From 1c2813ada6eb9443ee9c14802a73e0c416172f3a Mon Sep 17 00:00:00 2001 From: Littlegnal <8847263+littleGnAl@users.noreply.github.com> Date: Fri, 26 May 2023 14:52:09 +0800 Subject: [PATCH] feat: upgrade native sdk 4.2.0 (#1071) * ++ * ++ * ++ * ++ * [AUTO] Update doc 620 * feat: upgrade native sdk dependencies 20230523 * feat: upgrade native sdk dependencies 20230525 * ++ * ++ * feat: upgrade native sdk dependencies 20230525 (#1106) Co-authored-by: LichKing-2234 --------- Co-authored-by: jinyuagora Co-authored-by: littleGnAl Co-authored-by: LichKing-2234 --- android/build.gradle | 8 +- ci/run_flutter_windows_integration_test.sh | 2 +- example/lib/examples/advanced/index.dart | 16 +- .../advanced/media_player/media_player.dart | 4 +- .../media_recorder/media_recorder.dart | 46 +- .../advanced/music_player/music_player.dart | 81 +- .../advanced/precall_test/precall_test.dart | 3 +- .../rtmp_streaming/rtmp_streaming.dart | 8 +- .../send_multi_camera_stream.dart | 53 +- .../set_beauty_effect/set_beauty_effect.dart | 4 + .../start_local_video_transcoder.dart | 61 +- example/pubspec.yaml | 2 +- example/windows/CMakeLists.txt | 3 + ios/agora_rtc_engine.podspec | 6 +- lib/src/agora_base.dart | 871 ++++--- lib/src/agora_base.g.dart | 245 +- lib/src/agora_log.dart | 2 +- lib/src/agora_media_base.dart | 285 +- lib/src/agora_media_base.g.dart | 27 + lib/src/agora_media_engine.dart | 101 +- lib/src/agora_media_player.dart | 184 +- lib/src/agora_media_player_source.dart | 10 +- lib/src/agora_media_player_types.dart | 8 +- lib/src/agora_media_recorder.dart | 37 +- lib/src/agora_music_content_center.dart | 105 +- lib/src/agora_music_content_center.g.dart | 35 + lib/src/agora_rhythm_player.dart | 4 +- lib/src/agora_rtc_engine.dart | 2002 +++++++++----- lib/src/agora_rtc_engine.g.dart | 110 +- lib/src/agora_rtc_engine_ex.dart | 344 ++- lib/src/agora_rtc_engine_ext.dart | 23 +- lib/src/agora_spatial_audio.dart | 69 +- lib/src/audio_device_manager.dart | 82 +- lib/src/binding/agora_base_event_impl.dart | 11 +- .../binding/agora_media_base_event_impl.dart | 179 +- lib/src/binding/agora_media_engine_impl.dart | 123 +- .../agora_media_player_event_impl.dart | 57 +- lib/src/binding/agora_media_player_impl.dart | 122 +- .../agora_media_player_source_event_impl.dart | 5 +- .../binding/agora_media_recorder_impl.dart | 44 +- ...agora_music_content_center_event_impl.dart | 33 +- .../agora_music_content_center_impl.dart | 42 +- .../binding/agora_rtc_engine_event_impl.dart | 71 +- lib/src/binding/agora_rtc_engine_ex_impl.dart | 135 +- lib/src/binding/agora_rtc_engine_impl.dart | 581 +++-- lib/src/binding/agora_spatial_audio_impl.dart | 4 +- .../binding/audio_device_manager_impl.dart | 4 +- .../call_api_event_handler_buffer_ext.dart | 119 +- .../binding/call_api_impl_params_json.dart | 30 +- .../binding/call_api_impl_params_json.g.dart | 28 + lib/src/binding/event_handler_param_json.dart | 377 +-- .../binding/event_handler_param_json.g.dart | 272 +- .../agora_media_engine_impl_override.dart | 1 + .../agora_media_recorder_impl_override.dart | 73 +- ...ra_music_content_center_impl_override.dart | 28 +- lib/src/impl/agora_rtc_engine_impl.dart | 182 +- lib/src/impl/agora_video_view_impl.dart | 3 +- .../impl/global_video_view_controller.dart | 58 +- .../impl/media_player_controller_impl.dart | 16 +- lib/src/impl/media_player_impl.dart | 38 +- ...tive_iris_api_engine_binding_delegate.dart | 37 +- .../impl/native_iris_api_engine_bindings.dart | 201 +- lib/src/impl/video_view_controller_impl.dart | 5 + lib/src/render/agora_video_view.dart | 2 +- lib/src/render/video_view_controller.dart | 2 + macos/agora_rtc_engine.podspec | 8 +- pubspec.yaml | 4 +- scripts/artifacts_version.sh | 8 +- scripts/build-ios-arch.sh | 2 +- scripts/build-iris-ios.sh | 2 +- scripts/build-iris-macos-arch.sh | 2 +- scripts/build-iris-macos.sh | 15 +- scripts/code_gen.sh | 16 + scripts/download_unzip_iris_cdn_artifacts.sh | 9 +- scripts/flutter-build-runner.sh | 10 +- shared/darwin/TextureRenderer.h | 4 +- shared/darwin/TextureRenderer.mm | 124 +- shared/darwin/VideoViewController.h | 5 +- shared/darwin/VideoViewController.mm | 34 +- .../integration_test/apis_call_fake_test.dart | 16 + .../eventhandlers_fake_test.dart | 51 +- ...udiodevicemanager_fake_test.generated.dart | 120 - ...patialaudioengine_fake_test.generated.dart | 44 - ...patialaudioengine_fake_test.generated.dart | 32 - ...udioframeobserver_testcases.generated.dart | 6 +- .../mediaengine_fake_test.generated.dart | 317 +-- ...odedframeobserver_testcases.generated.dart | 6 +- ...ideoframeobserver_testcases.generated.dart | 604 +---- ...udiopcmframesink_testcases.generated.dart} | 19 +- ...ospectrumobserver_testcases.generated.dart | 10 +- .../mediaplayer_fake_test.generated.dart | 517 ++-- ...yersourceobserver_testcases.generated.dart | 46 +- ...ideoframeobserver_testcases.generated.dart | 8 +- .../mediarecorder_fake_test.generated.dart | 100 +- ...arecorderobserver_testcases.generated.dart | 53 +- ...usiccontentcenter_fake_test.generated.dart | 138 +- ...entereventhandler_testcases.generated.dart | 55 +- ...odedframeobserver_testcases.generated.dart | 20 +- ...ospectrumobserver_testcases.generated.dart | 10 +- .../rtcengine_fake_test.generated.dart | 2297 ++++------------- ..._metadataobserver_testcases.generated.dart | 6 +- ...ngineeventhandler_testcases.generated.dart | 677 ++--- .../rtcengineex_fake_test.generated.dart | 341 +-- ...ideodevicemanager_fake_test.generated.dart | 32 - .../localspatialaudioengine_testcases.dart | 8 - .../testcases/rtcengine_debug_testcases.dart | 4 - ...ngine_rtcengineeventhandler_testcases.dart | 80 + .../testcases/rtcengine_testcases.dart | 170 +- .../testcases/rtcengineex_testcases.dart | 129 +- test_shard/fake_test_app/lib/main.dart | 30 +- .../fake/fake_iris_method_channel.dart | 5 +- .../fake_agora_video_view_testcases.dart | 6 +- .../mediaengine_smoke_test_testcases.dart | 38 +- .../mediaplayer_smoke_test_testcases.dart | 6 +- test_shard/iris_tester/lib/iris_tester.dart | 18 +- .../common/screenshot_matcher_ext.dart | 8 +- .../common/widget_tester_ext.dart | 7 +- .../integration_test/local_video_view.dart | 1 + .../integration_test/remote_video_view.dart | 1 + ....texture.local.donot_handle_rendermode.png | Bin 1564 -> 1567 bytes ...endermode.with_videomirrormodedisabled.png | Bin 1553 -> 1560 bytes ....texture.local.with_rendermodeadaptive.png | Bin 1551 -> 1558 bytes ...ender.texture.local.with_rendermodefit.png | Bin 1562 -> 1565 bytes ...er.texture.local.with_rendermodehidden.png | Bin 1561 -> 1561 bytes ...texture.remote.donot_handle_rendermode.png | Bin 1590 -> 1599 bytes ...texture.remote.with_default_rendermode.png | Bin 1590 -> 1599 bytes ...ndermodede.with_videoMirrorModeEnabled.png | Bin 1594 -> 1604 bytes ...texture.remote.with_rendermodeadaptive.png | Bin 1613 -> 1602 bytes ...nder.texture.remote.with_rendermodefit.png | Bin 1592 -> 1600 bytes ...r.texture.remote.with_rendermodehidden.png | Bin 1590 -> 1599 bytes tool/ffi_gen/ffigen_config.yaml | 19 + tool/ffi_gen/run_ffi_gen.sh | 24 + tool/terra/terra_config_main.yaml | 4 +- .../bin/event_handler_gen_config.dart | 89 +- .../bin/method_call_gen_config.dart | 51 +- tool/testcase_gen/build.sh | 14 + .../testcase_gen/lib/templated_generator.dart | 35 +- windows/CMakeLists.txt | 26 +- windows/cmake/DownloadSDK.cmake | 16 + .../include/agora_rtc_engine/texture_render.h | 23 +- .../agora_rtc_engine/video_view_controller.h | 5 +- windows/texture_render.cc | 116 +- windows/video_view_controller.cc | 29 +- 143 files changed, 6910 insertions(+), 7544 deletions(-) create mode 100644 scripts/code_gen.sh rename test_shard/fake_test_app/integration_test/generated/{mediaplayer_mediaplayeraudioframeobserver_testcases.generated.dart => mediaplayer_audiopcmframesink_testcases.generated.dart} (83%) create mode 100644 test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart create mode 100644 tool/ffi_gen/ffigen_config.yaml create mode 100644 tool/ffi_gen/run_ffi_gen.sh create mode 100644 tool/testcase_gen/build.sh diff --git a/android/build.gradle b/android/build.gradle index 3eba9d0bb..85fac6341 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -47,13 +47,13 @@ dependencies { if (isDev(project)) { implementation fileTree(dir: "libs", include: ["*.jar"]) } else { - api 'io.agora.rtc:iris-rtc:4.1.0-rc.2' - api 'io.agora.rtc:full-sdk:4.1.0-1' - implementation 'io.agora.rtc:full-screen-sharing:4.1.0-1' + api 'io.agora.rtc:iris-rtc:4.2.0-build.3' + api 'io.agora.rtc:full-sdk:4.2.0' + api 'io.agora.rtc:full-screen-sharing:4.2.0' } } static boolean isDev(Project project) { def devFile = project.file('.plugin_dev') return devFile.exists() -} +} \ No newline at end of file diff --git a/ci/run_flutter_windows_integration_test.sh b/ci/run_flutter_windows_integration_test.sh index 9bf20022f..42b488afe 100644 --- a/ci/run_flutter_windows_integration_test.sh +++ b/ci/run_flutter_windows_integration_test.sh @@ -22,7 +22,7 @@ for filename in integration_test/*.dart; do if [[ "$filename" == *.generated.dart ]]; then continue fi - flutter test $filename -d windows + flutter test $filename -d windows --verbose done popd diff --git a/example/lib/examples/advanced/index.dart b/example/lib/examples/advanced/index.dart index e815cea95..b9049614f 100644 --- a/example/lib/examples/advanced/index.dart +++ b/example/lib/examples/advanced/index.dart @@ -38,8 +38,6 @@ final advanced = [ {'name': 'Advanced'}, {'name': 'AudioMixing', 'widget': const AudioMixing()}, {'name': 'ChannelMediaRelay', 'widget': const ChannelMediaRelay()}, - // if (!kIsWeb && (Platform.isAndroid || Platform.isIOS)) - // {'name': 'CustomCaptureAudio', 'widget': const CustomCaptureAudio()}, if (kIsWeb || !(Platform.isAndroid || Platform.isIOS)) {'name': 'DeviceManager', 'widget': const DeviceManager()}, {'name': 'JoinMultipleChannel', 'widget': const JoinMultipleChannel()}, @@ -69,15 +67,14 @@ final advanced = [ if (kIsWeb || !(Platform.isAndroid || Platform.isIOS)) {'name': 'SendMultiCameraStream', 'widget': const SendMultiCameraStream()}, {'name': 'StartRhythmPlayer', 'widget': const StartRhythmPlayer()}, - if (kIsWeb || !(Platform.isAndroid || Platform.isIOS)) - { - 'name': 'StartLocalVideoTranscoder', - 'widget': const StartLocalVideoTranscoder() - }, + { + 'name': 'StartLocalVideoTranscoder', + 'widget': const StartLocalVideoTranscoder() + }, {'name': 'ProcessVideoRawData', 'widget': const ProcessVideoRawData()}, {'name': 'ProcessAudioRawData', 'widget': const ProcessAudioRawData()}, {'name': 'AudioSpectrum', 'widget': const AudioSpectrum()}, - {'name': 'MediaRecorder', 'widget': const MediaRecorder()}, + {'name': 'MediaRecorder', 'widget': const MediaRecorderExample()}, {'name': 'PushVideoFrame', 'widget': const PushVideoFrame()}, // {'name': 'PushAudioFrame', 'widget': const PushAudioFrame()}, {'name': 'PushEncodedVideoFrame', 'widget': const PushEncodedVideoFrame()}, @@ -87,5 +84,6 @@ final advanced = [ }, if (kIsWeb || !(Platform.isAndroid || Platform.isIOS)) {'name': 'PreCallTest', 'widget': const PreCallTest()}, - {'name': 'MusicPlayer', 'widget': const MusicPlayerExample()}, + if (Platform.isAndroid || Platform.isIOS) + {'name': 'MusicPlayer', 'widget': const MusicPlayerExample()}, ]; diff --git a/example/lib/examples/advanced/media_player/media_player.dart b/example/lib/examples/advanced/media_player/media_player.dart index 6ca9029b6..10838551a 100644 --- a/example/lib/examples/advanced/media_player/media_player.dart +++ b/example/lib/examples/advanced/media_player/media_player.dart @@ -16,7 +16,7 @@ class _State extends State { late final RtcEngineEx _engine; bool _isReadyPreview = false; - late final MediaPlayerController _mediaPlayerController; + late MediaPlayerController _mediaPlayerController; late final TextEditingController _textEditingController; @@ -65,6 +65,8 @@ class _State extends State { Future _initEngine() async { _engine = createAgoraRtcEngineEx(); + _mediaPlayerController = MediaPlayerController( + rtcEngine: _engine, canvas: const VideoCanvas(uid: 0)); await _engine.initialize(RtcEngineContext( appId: config.appId, channelProfile: ChannelProfileType.channelProfileLiveBroadcasting, diff --git a/example/lib/examples/advanced/media_recorder/media_recorder.dart b/example/lib/examples/advanced/media_recorder/media_recorder.dart index 1878d2cca..22d46c883 100644 --- a/example/lib/examples/advanced/media_recorder/media_recorder.dart +++ b/example/lib/examples/advanced/media_recorder/media_recorder.dart @@ -11,15 +11,15 @@ import 'package:permission_handler/permission_handler.dart'; import 'package:agora_rtc_engine_example/config/agora.config.dart' as config; /// MediaRecorder Example -class MediaRecorder extends StatefulWidget { +class MediaRecorderExample extends StatefulWidget { /// @nodoc - const MediaRecorder({Key? key}) : super(key: key); + const MediaRecorderExample({Key? key}) : super(key: key); @override State createState() => _State(); } -class _State extends State { +class _State extends State { late final RtcEngine _engine; bool isJoined = false, switchCamera = true, switchRender = true; @@ -28,6 +28,7 @@ class _State extends State { bool _isStartedMediaRecording = false; String _recordingFileStoragePath = ''; bool _isReadyPreview = false; + MediaRecorder? _mediaRecorder; @override void initState() { @@ -43,7 +44,9 @@ class _State extends State { } Future _dispose() async { - await _engine.getMediaRecorder().release(); + if (_mediaRecorder != null) { + await _engine.destroyMediaRecorder(_mediaRecorder!); + } await _engine.release(); } @@ -52,7 +55,6 @@ class _State extends State { await _engine.initialize(RtcEngineContext( appId: config.appId, )); - await _engine.setLogFilter(LogFilterType.logFilterError); _engine.registerEventHandler(RtcEngineEventHandler( onError: (ErrorCodeType err, String msg) { @@ -116,25 +118,27 @@ class _State extends State { } Future _startMediaRecording() async { - await _engine.getMediaRecorder().setMediaRecorderObserver( - connection: RtcConnection(channelId: _controller.text, localUid: 0), - callback: MediaRecorderObserver( - onRecorderStateChanged: - (RecorderState state, RecorderErrorCode error) { - logSink.log('onRecorderStateChanged state: $state, error: $error'); - }, - onRecorderInfoUpdated: (RecorderInfo info) { - logSink.log('onRecorderInfoUpdated info: ${info.toJson()}'); - }, - )); + _mediaRecorder ??= await _engine.createMediaRecorder( + RecorderStreamInfo(channelId: _controller.text, uid: 0)); + + await _mediaRecorder?.setMediaRecorderObserver(MediaRecorderObserver( + onRecorderStateChanged: (String channelId, int uid, RecorderState state, + RecorderErrorCode error) { + logSink.log( + 'onRecorderStateChanged channelId: $channelId, uid: $uid state: $state, error: $error'); + }, + onRecorderInfoUpdated: (String channelId, int uid, RecorderInfo info) { + logSink.log( + 'onRecorderInfoUpdated channelId: $channelId, uid: $uid, info: ${info.toJson()}'); + }, + )); Directory appDocDir = Platform.isAndroid ? (await getExternalStorageDirectory())! : await getApplicationDocumentsDirectory(); String p = path.join(appDocDir.path, 'example.mp4'); - await _engine.getMediaRecorder().startRecording( - connection: RtcConnection(channelId: _controller.text, localUid: 0), - config: MediaRecorderConfiguration(storagePath: p)); + await _mediaRecorder + ?.startRecording(MediaRecorderConfiguration(storagePath: p)); setState(() { _recordingFileStoragePath = 'Recording file storage path: $p'; _isStartedMediaRecording = true; @@ -142,9 +146,7 @@ class _State extends State { } Future _stopMediaRecording() async { - await _engine - .getMediaRecorder() - .stopRecording(RtcConnection(channelId: _controller.text, localUid: 0)); + await _mediaRecorder?.stopRecording(); setState(() { _recordingFileStoragePath = ''; _isStartedMediaRecording = false; diff --git a/example/lib/examples/advanced/music_player/music_player.dart b/example/lib/examples/advanced/music_player/music_player.dart index 1dd18bf34..3bf025648 100644 --- a/example/lib/examples/advanced/music_player/music_player.dart +++ b/example/lib/examples/advanced/music_player/music_player.dart @@ -282,47 +282,54 @@ class _MusicPlayerExampleState extends State { )); _musicContentCenter.registerEventHandler(MusicContentCenterEventHandler( - onMusicChartsResult: (requestId, status, result) { - logSink.log( - '[onMusicChartsResult], requestId: $requestId, status: $status, result: ${result.toString()}'); - if (status == MusicContentCenterStatusCode.kMusicContentCenterStatusOk) { - if (_currentRequestId == requestId) { + onMusicChartsResult: (String requestId, List result, + MusicContentCenterStatusCode errorCode) { + logSink.log( + '[onMusicChartsResult], requestId: $requestId, errorCode: $errorCode, result: ${result.toString()}'); + if (errorCode == + MusicContentCenterStatusCode.kMusicContentCenterStatusOk) { + if (_currentRequestId == requestId) { + setState(() { + _musicChartInfos = result; + }); + } + } + }, + onMusicCollectionResult: (String requestId, MusicCollection result, + MusicContentCenterStatusCode errorCode) { + logSink.log( + '[onMusicCollectionResult], requestId: $requestId, errorCode: $errorCode, result: ${result.toString()}'); + + if (_musicCollectionRequestId == requestId) { + setState(() { + _musicCollection = result; + }); + } else if (_searchMusicRequestId == requestId) { setState(() { - _musicChartInfos = result; + _searchedMusicCollection = result; }); } - } - }, onMusicCollectionResult: (String requestId, - MusicContentCenterStatusCode status, MusicCollection result) { - logSink.log( - '[onMusicCollectionResult], requestId: $requestId, status: $status, result: ${result.toString()}'); - - if (_musicCollectionRequestId == requestId) { - setState(() { - _musicCollection = result; - }); - } else if (_searchMusicRequestId == requestId) { - setState(() { - _searchedMusicCollection = result; - }); - } - }, onPreLoadEvent: (int songCode, int percent, PreloadStatusCode status, - String msg, String lyricUrl) { - logSink.log( - '[onPreLoadEvent], songCode: $songCode, percent: $percent status: $status, msg: $msg, lyricUrl: $lyricUrl'); - if (_selectedMusic.songCode == songCode && - status == PreloadStatusCode.kPreloadStatusCompleted) { - _preloadCompleted?.complete(); - _preloadCompleted = null; - } - }, onLyricResult: (String requestId, String lyricUrl) { - if (_getLyricRequestId == requestId) { - _getLyricCompleted?.complete(lyricUrl); - _getLyricCompleted = null; - } - })); + }, + onPreLoadEvent: (int songCode, int percent, String lyricUrl, + PreloadStatusCode status, MusicContentCenterStatusCode errorCode) { + logSink.log( + '[onPreLoadEvent], songCode: $songCode, percent: $percent status: $status, errorCode: $errorCode, lyricUrl: $lyricUrl'); + if (_selectedMusic.songCode == songCode && + status == PreloadStatusCode.kPreloadStatusCompleted) { + _preloadCompleted?.complete(); + _preloadCompleted = null; + } + }, + onLyricResult: (String requestId, String lyricUrl, + MusicContentCenterStatusCode errorCode) { + if (_getLyricRequestId == requestId) { + _getLyricCompleted?.complete(lyricUrl); + _getLyricCompleted = null; + } + }, + )); - _musicPlayer = await _musicContentCenter.createMusicPlayer(); + _musicPlayer = (await _musicContentCenter.createMusicPlayer())!; _mediaPlayerSourceObserver = MediaPlayerSourceObserver( onPlayerSourceStateChanged: diff --git a/example/lib/examples/advanced/precall_test/precall_test.dart b/example/lib/examples/advanced/precall_test/precall_test.dart index c875146a3..2a03ebae0 100644 --- a/example/lib/examples/advanced/precall_test/precall_test.dart +++ b/example/lib/examples/advanced/precall_test/precall_test.dart @@ -249,7 +249,8 @@ class _State extends State { _isStartEchoTest = !_isStartEchoTest; if (_isStartEchoTest) { - await _engine.startEchoTest(); + await _engine.startEchoTest( + const EchoTestConfiguration(intervalInSeconds: 10)); } else { await _engine.stopEchoTest(); } diff --git a/example/lib/examples/advanced/rtmp_streaming/rtmp_streaming.dart b/example/lib/examples/advanced/rtmp_streaming/rtmp_streaming.dart index 5b62720df..ce63e0e4d 100644 --- a/example/lib/examples/advanced/rtmp_streaming/rtmp_streaming.dart +++ b/example/lib/examples/advanced/rtmp_streaming/rtmp_streaming.dart @@ -80,9 +80,15 @@ class _RtmpStreamingState extends State { _remoteUid = 0; }); }, - onLeaveChannel: (RtcConnection connection, RtcStats stats) { + onLeaveChannel: (RtcConnection connection, RtcStats stats) async { logSink.log( '[onLeaveChannel] connection: ${connection.toJson()} stats: ${stats.toJson()}'); + + if (_isStreaming && _rtmpUrlController.text.isNotEmpty) { + await _engine.stopRtmpStream(_rtmpUrlController.text); + _isStreaming = false; + } + setState(() { isJoined = false; }); diff --git a/example/lib/examples/advanced/send_multi_camera_stream/send_multi_camera_stream.dart b/example/lib/examples/advanced/send_multi_camera_stream/send_multi_camera_stream.dart index fc3295d19..5b965cab6 100644 --- a/example/lib/examples/advanced/send_multi_camera_stream/send_multi_camera_stream.dart +++ b/example/lib/examples/advanced/send_multi_camera_stream/send_multi_camera_stream.dart @@ -38,10 +38,7 @@ class _State extends State { } Future _dispose() async { - // await _localVideoController.dispose(); - await _engine.stopPrimaryCameraCapture(); - await _engine.stopSecondaryCameraCapture(); - await _engine.leaveChannel(); + await _leaveChannel(); await _engine.release(); } @@ -101,14 +98,16 @@ class _State extends State { await _engine.enableVideo(); await _engine.setClientRole(role: ClientRoleType.clientRoleBroadcaster); - await _engine.startPrimaryCameraCapture(CameraCapturerConfiguration( - deviceId: _videoDeviceInfos[0].deviceId, - format: VideoFormat( - width: 640, - height: 320, - fps: FrameRate.frameRateFps10.value(), - ), - )); + await _engine.startCameraCapture( + sourceType: VideoSourceType.videoSourceCameraPrimary, + config: CameraCapturerConfiguration( + deviceId: _videoDeviceInfos[0].deviceId, + format: VideoFormat( + width: 640, + height: 320, + fps: FrameRate.frameRateFps10.value(), + ), + )); await _engine.startPreview(); @@ -140,11 +139,10 @@ class _State extends State { } } - void _leaveChannel() async { - await _engine.stopSecondaryCameraCapture(); - await _engine.stopPrimaryCameraCapture(); + Future _leaveChannel() async { + await _engine.stopCameraCapture(VideoSourceType.videoSourceCameraPrimary); + await _engine.stopCameraCapture(VideoSourceType.videoSourceCameraSecondary); await _engine.leaveChannel(); - // setState(() {}); } @override @@ -228,17 +226,20 @@ class _State extends State { !_isStartSecondaryCameraDevice; if (_isStartSecondaryCameraDevice) { - _engine.startSecondaryCameraCapture( - CameraCapturerConfiguration( - deviceId: _videoDeviceInfos[1].deviceId, - format: VideoFormat( - width: 640, - height: 320, - fps: FrameRate.frameRateFps10.value(), - ), - )); + _engine.startCameraCapture( + sourceType: + VideoSourceType.videoSourceCameraSecondary, + config: CameraCapturerConfiguration( + deviceId: _videoDeviceInfos[1].deviceId, + format: VideoFormat( + width: 640, + height: 320, + fps: FrameRate.frameRateFps10.value(), + ), + )); } else { - _engine.stopSecondaryCameraCapture(); + _engine.stopCameraCapture( + VideoSourceType.videoSourceCameraSecondary); } setState(() {}); diff --git a/example/lib/examples/advanced/set_beauty_effect/set_beauty_effect.dart b/example/lib/examples/advanced/set_beauty_effect/set_beauty_effect.dart index 5bfab463b..49795e1f4 100644 --- a/example/lib/examples/advanced/set_beauty_effect/set_beauty_effect.dart +++ b/example/lib/examples/advanced/set_beauty_effect/set_beauty_effect.dart @@ -107,6 +107,10 @@ class _State extends State with KeepRemoteVideoViewsMixin { await _engine.enableVideo(); + await _engine.enableExtension( + provider: "agora_video_filters_clear_vision", + extension: "clear_vision"); + await _engine.setClientRole(role: ClientRoleType.clientRoleBroadcaster); await _engine.startPreview(); diff --git a/example/lib/examples/advanced/start_local_video_transcoder/start_local_video_transcoder.dart b/example/lib/examples/advanced/start_local_video_transcoder/start_local_video_transcoder.dart index 3198b342d..b6e40a9c5 100644 --- a/example/lib/examples/advanced/start_local_video_transcoder/start_local_video_transcoder.dart +++ b/example/lib/examples/advanced/start_local_video_transcoder/start_local_video_transcoder.dart @@ -149,7 +149,7 @@ class _State extends State { options: const ChannelMediaOptions( publishCameraTrack: false, publishSecondaryCameraTrack: false, - publishTrancodedVideoTrack: true, + publishTranscodedVideoTrack: true, ), ); } @@ -189,7 +189,7 @@ class _State extends State { if (!(defaultTargetPlatform == TargetPlatform.android || defaultTargetPlatform == TargetPlatform.iOS)) { transcodingVideoStreams.add(const TranscodingVideoStream( - sourceType: MediaSourceType.primaryCameraSource, + sourceType: VideoSourceType.videoSourceCameraPrimary, width: 640, height: 320)); @@ -197,11 +197,20 @@ class _State extends State { format: const VideoFormat(width: 640, height: 320, fps: 30), deviceId: _videoDevices[0].deviceId, ); - await _engine.startPrimaryCameraCapture(config); + + await _engine.startCameraCapture( + sourceType: VideoSourceType.videoSourceCameraPrimary, config: config); + } else { + transcodingVideoStreams.add(const TranscodingVideoStream( + sourceType: VideoSourceType.videoSourceCameraPrimary, + width: 160, + height: 320)); } await _engine .startLocalVideoTranscoder(_createLocalTranscoderConfiguration()); + await _engine.startPreview( + sourceType: VideoSourceType.videoSourceTranscoded); } Future _stopLocalVideoTranscoder() async { @@ -210,14 +219,15 @@ class _State extends State { } if (_isSecondaryCameraSource) { - await _engine.stopSecondaryCameraCapture(); + await _engine + .stopCameraCapture(VideoSourceType.videoSourceCameraSecondary); } if (_isPrimaryScreenSource) { await _engine.stopScreenCapture(); } - await _engine.stopPrimaryCameraCapture(); + await _engine.stopCameraCapture(VideoSourceType.videoSourceCameraPrimary); await _engine.stopLocalVideoTranscoder(); transcodingVideoStreams.clear(); _isSecondaryCameraSource = false; @@ -267,15 +277,16 @@ class _State extends State { _videoDevices.length >= 2 ? (v) async { if (!v) { - await _engine.stopSecondaryCameraCapture(); + await _engine.stopCameraCapture( + VideoSourceType.videoSourceCameraSecondary); transcodingVideoStreams.removeWhere((element) => element.sourceType == - MediaSourceType.secondaryCameraSource); + VideoSourceType.videoSourceCameraSecondary); } else { transcodingVideoStreams.add( const TranscodingVideoStream( - sourceType: - MediaSourceType.secondaryCameraSource, + sourceType: VideoSourceType + .videoSourceCameraSecondary, width: 360, height: 240)); @@ -284,7 +295,11 @@ class _State extends State { width: 640, height: 320, fps: 30), deviceId: _videoDevices[1].deviceId, ); - await _engine.startSecondaryCameraCapture(config); + + await _engine.startCameraCapture( + sourceType: + VideoSourceType.videoSourceCameraSecondary, + config: config); } await _engine.updateLocalTranscoderConfiguration( @@ -312,7 +327,7 @@ class _State extends State { await _engine.stopScreenCapture(); transcodingVideoStreams.removeWhere((element) => element.sourceType == - MediaSourceType.primaryScreenSource); + VideoSourceType.videoSourceScreen); } else { SIZE t = const SIZE(width: 360, height: 240); @@ -346,7 +361,7 @@ class _State extends State { transcodingVideoStreams.add( const TranscodingVideoStream( sourceType: - MediaSourceType.primaryScreenSource, + VideoSourceType.videoSourceScreen, x: 110, y: 110, width: 200, @@ -382,7 +397,7 @@ class _State extends State { await _mediaPlayerController.stop(); transcodingVideoStreams.removeWhere((element) => element.sourceType == - MediaSourceType.mediaPlayerSource); + VideoSourceType.videoSourceMediaPlayer); } else { _mediaPlayerSourceObserver ??= MediaPlayerSourceObserver( @@ -403,10 +418,9 @@ class _State extends State { ); transcodingVideoStreams.add(TranscodingVideoStream( - sourceType: MediaSourceType.mediaPlayerSource, - imageUrl: _mediaPlayerController - .getMediaPlayerId() - .toString(), + sourceType: VideoSourceType.videoSourceMediaPlayer, + mediaPlayerId: + _mediaPlayerController.getMediaPlayerId(), width: 360, height: 240, zOrder: 10, @@ -455,10 +469,10 @@ class _State extends State { if (!v) { transcodingVideoStreams.removeWhere((element) => element.sourceType == - MediaSourceType.rtcImagePngSource); + VideoSourceType.videoSourceRtcImagePng); } else { transcodingVideoStreams.add(TranscodingVideoStream( - sourceType: MediaSourceType.rtcImagePngSource, + sourceType: VideoSourceType.videoSourceRtcImagePng, imageUrl: _pngFilePath, x: 220, y: 60, @@ -496,10 +510,10 @@ class _State extends State { if (!v) { transcodingVideoStreams.removeWhere((element) => element.sourceType == - MediaSourceType.rtcImageJpegSource); + VideoSourceType.videoSourceRtcImageJpeg); } else { transcodingVideoStreams.add(TranscodingVideoStream( - sourceType: MediaSourceType.rtcImageJpegSource, + sourceType: VideoSourceType.videoSourceRtcImageJpeg, imageUrl: _jpgFilePath, x: 360, y: 0, @@ -537,10 +551,10 @@ class _State extends State { if (!v) { transcodingVideoStreams.removeWhere((element) => element.sourceType == - MediaSourceType.rtcImageGifSource); + VideoSourceType.videoSourceRtcImageGif); } else { transcodingVideoStreams.add(TranscodingVideoStream( - sourceType: MediaSourceType.rtcImageGifSource, + sourceType: VideoSourceType.videoSourceRtcImageGif, imageUrl: _gifFilePath, x: 360, y: 0, @@ -583,6 +597,7 @@ class _State extends State { canvas: const VideoCanvas( uid: 0, sourceType: VideoSourceType.videoSourceTranscoded, + renderMode: RenderModeType.renderModeFit, ), ), ), diff --git a/example/pubspec.yaml b/example/pubspec.yaml index b3c29d015..77e50a60c 100644 --- a/example/pubspec.yaml +++ b/example/pubspec.yaml @@ -37,7 +37,7 @@ dependencies: git: url: https://github.com/AgoraIO-Extensions/RawDataPluginSample.git path: frameworks/flutter/video_raw_data - ref: 8a7a90e78fb1192dc2ef1154bff57f670d462e96 + ref: c1e6426a2aa4381b23e4633a6ef4a9b3b075fa1f dev_dependencies: flutter_test: diff --git a/example/windows/CMakeLists.txt b/example/windows/CMakeLists.txt index d2e41f460..1fd8d9405 100644 --- a/example/windows/CMakeLists.txt +++ b/example/windows/CMakeLists.txt @@ -21,6 +21,9 @@ else() endif() endif() +set(CMAKE_EXE_LINKER_FLAGS_RELEASE "${CMAKE_EXE_LINKER_FLAGS_RELEASE} /DEBUG") +set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE} /DEBUG") + set(CMAKE_EXE_LINKER_FLAGS_PROFILE "${CMAKE_EXE_LINKER_FLAGS_RELEASE}") set(CMAKE_SHARED_LINKER_FLAGS_PROFILE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE}") set(CMAKE_C_FLAGS_PROFILE "${CMAKE_C_FLAGS_RELEASE}") diff --git a/ios/agora_rtc_engine.podspec b/ios/agora_rtc_engine.podspec index b6fc561d1..4a2cfd76c 100644 --- a/ios/agora_rtc_engine.podspec +++ b/ios/agora_rtc_engine.podspec @@ -17,8 +17,8 @@ Pod::Spec.new do |s| s.source = { :path => '.' } s.source_files = 'Classes/**/*.{h,mm,m,swift}' s.dependency 'Flutter' - s.dependency 'AgoraIrisRTC_iOS', '4.1.0-rc.2' - s.dependency 'AgoraRtcEngine_iOS', '4.1.0' + s.dependency 'AgoraIrisRTC_iOS', '4.2.0-build.3' + s.dependency 'AgoraRtcEngine_iOS', '4.2.0' # s.dependency 'AgoraRtcWrapper' s.platform = :ios, '9.0' s.swift_version = '5.0' @@ -26,4 +26,4 @@ Pod::Spec.new do |s| # Flutter.framework does not contain a i386 slice. s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'i386' } -end +end \ No newline at end of file diff --git a/lib/src/agora_base.dart b/lib/src/agora_base.dart index bbd80a9f4..be91d728d 100644 --- a/lib/src/agora_base.dart +++ b/lib/src/agora_base.dart @@ -199,7 +199,7 @@ enum ErrorCodeType { @JsonValue(3) errNotReady, - /// 4: RtcEngine does not support the request. Possible reasons include the following:The built-in encryption mode is incorrect, or the SDK fails to load the external encryption library. Check the encryption mode setting, or reload the external encryption library. + /// 4: The RtcEngine does not support the request. Possible reasons include the following:The built-in encryption mode is incorrect, or the SDK fails to load the external encryption library. Check the encryption mode setting, or reload the external encryption library. @JsonValue(4) errNotSupported, @@ -215,7 +215,7 @@ enum ErrorCodeType { @JsonValue(7) errNotInitialized, - /// @nodoc + /// 8: Invalid state. @JsonValue(8) errInvalidState, @@ -243,11 +243,11 @@ enum ErrorCodeType { @JsonValue(14) errNetDown, - /// 17: The request to join the channel is rejected. Possible reasons include the following:The user is already in the channel. Agora recommends using the onConnectionStateChanged callback to get whether the user is in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected(1) state.After calling startEchoTest for the call test, the user tries to join the channel without calling stopEchoTest to end the current test. To join a channel, the call test must be ended by calling stopEchoTest. + /// 17: The request to join the channel is rejected. Possible reasons include the following:The user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected(1) state.After calling startEchoTest for the call test, the user tries to join the channel without calling stopEchoTest to end the current test. To join a channel, the call test must be ended by calling stopEchoTest. @JsonValue(17) errJoinChannelRejected, - /// 18: Fails to leave the channel. Possible reasons include the following:The user has left the channel before calling the leaveChannel [1/2] method. Stop calling this method to clear this error.The user calls the leaveChannel [1/2] method to leave the channel before joining the channel. In this case, no extra operation is needed. + /// 18: Fails to leave the channel. Possible reasons include the following:The user has left the channel before calling the leaveChannel method. Stop calling this method to clear this error.The user calls the leaveChannel method to leave the channel before joining the channel. In this case, no extra operation is needed. @JsonValue(18) errLeaveChannelRejected, @@ -283,7 +283,7 @@ enum ErrorCodeType { @JsonValue(109) errTokenExpired, - /// 110: Invalid token Typical reasons include the following:App Certificate is enabled in Agora Console, but the code still uses App ID for authentication. Once App Certificate is enabled for a project, you must use token-based authentication.The uid used to generate the token is not the same as the uid used to join the channel.Deprecated:This enumerator is deprecated. Use connectionChangedInvalidToken(8) in the onConnectionStateChanged callback instead. + /// 110: Invalid token. Typical reasons include the following:App Certificate is enabled in Agora Console, but the code still uses App ID for authentication. Once App Certificate is enabled for a project, you must use token-based authentication.The uid used to generate the token is not the same as the uid used to join the channel.Deprecated:This enumerator is deprecated. Use connectionChangedInvalidToken(8) in the onConnectionStateChanged callback instead. @JsonValue(110) errInvalidToken, @@ -542,9 +542,7 @@ enum UserOfflineReasonType { @JsonValue(0) userOfflineQuit, - /// 1: The SDK times out and the user drops offline because no data packet is received within a certain period of time. - /// If the user quits the call and the message is not passed to the SDK (due to an unreliable channel), the SDK assumes the user dropped offline. - /// + /// 1: The SDK times out and the user drops offline because no data packet is received within a certain period of time.If the user quits the call and the message is not passed to the SDK (due to an unreliable channel), the SDK assumes the user dropped offline. @JsonValue(1) userOfflineDropped, @@ -613,10 +611,6 @@ enum InterfaceIdType { @JsonValue(11) agoraIidLocalSpatialAudio, - /// The MediaRecorder interface class. - @JsonValue(12) - agoraIidMediaRecorder, - /// @nodoc @JsonValue(13) agoraIidStateSync, @@ -628,6 +622,10 @@ enum InterfaceIdType { /// @nodoc @JsonValue(15) agoraIidMusicContentCenter, + + /// @nodoc + @JsonValue(16) + agoraIidH265Transcoder, } /// @nodoc @@ -674,7 +672,7 @@ enum QualityType { @JsonValue(6) qualityDown, - /// 7: Users cannot detect the network quality. (Not in use.) + /// 7: Users cannot detect the network quality (not in use). @JsonValue(7) qualityUnsupported, @@ -754,7 +752,7 @@ extension VideoOrientationExt on VideoOrientation { } } -/// Video frame rate. +/// The video frame rate. @JsonEnum(alwaysCreate: true) enum FrameRate { /// 1: 1 fps @@ -803,8 +801,8 @@ extension FrameRateExt on FrameRate { @JsonEnum(alwaysCreate: true) enum FrameWidth { /// @nodoc - @JsonValue(640) - frameWidth640, + @JsonValue(960) + frameWidth960, } /// @nodoc @@ -824,8 +822,8 @@ extension FrameWidthExt on FrameWidth { @JsonEnum(alwaysCreate: true) enum FrameHeight { /// @nodoc - @JsonValue(360) - frameHeight360, + @JsonValue(540) + frameHeight540, } /// @nodoc @@ -982,6 +980,35 @@ const defaultMinBitrate = -1; /// @nodoc const defaultMinBitrateEqualToTargetBitrate = -2; +/// The highest frame rate supported by the screen sharing device. +@JsonEnum(alwaysCreate: true) +enum ScreenCaptureFramerateCapability { + /// 0: The device supports the frame rate of up to 15 fps. + @JsonValue(0) + screenCaptureFramerateCapability15Fps, + + /// 1: The device supports the frame rate of up to 30 fps. + @JsonValue(1) + screenCaptureFramerateCapability30Fps, + + /// 2: The device supports the frame rate of up to 60 fps. + @JsonValue(2) + screenCaptureFramerateCapability60Fps, +} + +extension ScreenCaptureFramerateCapabilityExt + on ScreenCaptureFramerateCapability { + /// @nodoc + static ScreenCaptureFramerateCapability fromValue(int value) { + return $enumDecode(_$ScreenCaptureFramerateCapabilityEnumMap, value); + } + + /// @nodoc + int value() { + return _$ScreenCaptureFramerateCapabilityEnumMap[this]!; + } +} + /// Video codec types. @JsonEnum(alwaysCreate: true) enum VideoCodecType { @@ -993,7 +1020,7 @@ enum VideoCodecType { @JsonValue(1) videoCodecVp8, - /// 2: Standard H.264. + /// 2: (Default) Standard H.264. @JsonValue(2) videoCodecH264, @@ -1261,7 +1288,7 @@ class EncodedAudioFrameInfo { this.advancedSettings, this.captureTimeMs}); - /// Audio Codec type: AudioCodecType + /// Audio Codec type: AudioCodecType . @JsonKey(name: 'codec') final AudioCodecType? codec; @@ -1388,7 +1415,7 @@ class VideoSubscriptionOptions { /// @nodoc const VideoSubscriptionOptions({this.type, this.encodedFrameOnly}); - /// The video stream type that you want to subscribe to. The default value is videoStreamHigh, indicating that the high-quality video streams are subscribed. See VideoStreamType. + /// The video stream type that you want to subscribe to. The default value is videoStreamHigh, indicating that the high-quality video streams are subscribed. See VideoStreamType . @JsonKey(name: 'type') final VideoStreamType? type; @@ -1481,7 +1508,6 @@ enum CompressionPreference { preferLowLatency, /// 1: (Default) High quality preference. The SDK compresses video frames while maintaining video quality. This preference is suitable for scenarios where video quality is prioritized. - /// @JsonValue(1) preferQuality, } @@ -1502,7 +1528,7 @@ extension CompressionPreferenceExt on CompressionPreference { /// Video encoder preference. @JsonEnum(alwaysCreate: true) enum EncodingPreference { - /// -1: Default preference. The SDK automatically selects the optimal encoding type for encoding based on factors such as platform and device type. + /// -1: Adaptive preference. The SDK automatically selects the optimal encoding type for encoding based on factors such as platform and device type. @JsonValue(-1) preferAuto, @@ -1553,7 +1579,7 @@ class AdvanceOptions { /// Video mirror mode. @JsonEnum(alwaysCreate: true) enum VideoMirrorModeType { - /// 0: (Default) The SDK determines the mirror mode. + /// 0: The SDK determines the mirror mode.For the mirror mode of the local video view: If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default.For the remote user: The mirror mode is disabled by default. @JsonValue(0) videoMirrorModeAuto, @@ -1579,6 +1605,65 @@ extension VideoMirrorModeTypeExt on VideoMirrorModeType { } } +/// The bit mask that indicates the device codec capability. +@JsonEnum(alwaysCreate: true) +enum CodecCapMask { + /// (0): The device does not support encoding or decoding. + @JsonValue(0) + codecCapMaskNone, + + /// (1 << 0): The device supports hardware decoding. + @JsonValue(1 << 0) + codecCapMaskHwDec, + + /// (1 << 1): The device supports hardware encoding. + @JsonValue(1 << 1) + codecCapMaskHwEnc, + + /// (1 << 2): The device supports software decoding. + @JsonValue(1 << 2) + codecCapMaskSwDec, + + /// (1 << 3): The device supports software ecoding. + @JsonValue(1 << 3) + codecCapMaskSwEnc, +} + +/// @nodoc +extension CodecCapMaskExt on CodecCapMask { + /// @nodoc + static CodecCapMask fromValue(int value) { + return $enumDecode(_$CodecCapMaskEnumMap, value); + } + + /// @nodoc + int value() { + return _$CodecCapMaskEnumMap[this]!; + } +} + +/// The codec capability of the device. +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class CodecCapInfo { + /// @nodoc + const CodecCapInfo({this.codecType, this.codecCapMask}); + + /// The video codec types. See VideoCodecType . + @JsonKey(name: 'codecType') + final VideoCodecType? codecType; + + /// The bit mask of the codec type. See CodecCapMask . + @JsonKey(name: 'codecCapMask') + final int? codecCapMask; + + /// @nodoc + factory CodecCapInfo.fromJson(Map json) => + _$CodecCapInfoFromJson(json); + + /// @nodoc + Map toJson() => _$CodecCapInfoToJson(this); +} + /// Video encoder configurations. @JsonSerializable(explicitToJson: true, includeIfNull: false) class VideoEncoderConfiguration { @@ -1598,7 +1683,7 @@ class VideoEncoderConfiguration { @JsonKey(name: 'codecType') final VideoCodecType? codecType; - /// The dimensions of the encoded video (px). See VideoDimensions . This parameter measures the video encoding quality in the format of length × width. The default value is 640 × 360. You can set a custom value. + /// The dimensions of the encoded video (px). See VideoDimensions . This parameter measures the video encoding quality in the format of length × width. The default value is 960 × 540. You can set a custom value. @JsonKey(name: 'dimensions') final VideoDimensions? dimensions; @@ -1606,7 +1691,7 @@ class VideoEncoderConfiguration { @JsonKey(name: 'frameRate') final int? frameRate; - /// The encoding bitrate (Kbps) of the video. : (Recommended) Standard bitrate mode. In this mode, the video bitrate is twice the base bitrate.: Adaptive bitrate mode In this mode, the video bitrate is the same as the base bitrate. If you choose this mode in the LIVE_BROADCASTING profile, the video frame rate may be lower than the set value. + /// The encoding bitrate (Kbps) of the video. : (Recommended) Standard bitrate mode. In this mode, the bitrates of the live broadcasting profile is higher than that of the communication profile. : Adaptive bitrate mode In this mode, the bitrates of the live broadcasting profile equals that of the communication profile. If this mode is selected, the video frame rate of live broadcasting scenarios may be lower than the set value. @JsonKey(name: 'bitrate') final int? bitrate; @@ -1700,11 +1785,11 @@ class SimulcastStreamConfig { @JsonKey(name: 'dimensions') final VideoDimensions? dimensions; - /// @nodoc + /// Video receive bitrate (Kbps), represented by an instantaneous value. The default value is 65. @JsonKey(name: 'kBitrate') final int? kBitrate; - /// The capture frame rate (fps) of the local video. The default value is 5. + /// The frame rate (fps) of the local video. The default value is 5. @JsonKey(name: 'framerate') final int? framerate; @@ -1788,11 +1873,11 @@ class WatermarkOptions { @JsonKey(name: 'visibleInPreview') final bool? visibleInPreview; - /// When the adaptation mode of the watermark is fitModeCoverPosition, it is used to set the area of the watermark image in landscape mode. See fitModeCoverPosition for details. + /// When the adaptation mode of the watermark is fitModeCoverPosition, it is used to set the area of the watermark image in landscape mode. See Rectangle . @JsonKey(name: 'positionInLandscapeMode') final Rectangle? positionInLandscapeMode; - /// When the adaptation mode of the watermark is fitModeCoverPosition, it is used to set the area of the watermark image in portrait mode. See fitModeCoverPosition for details. + /// When the adaptation mode of the watermark is fitModeCoverPosition, it is used to set the area of the watermark image in portrait mode. See Rectangle . @JsonKey(name: 'positionInPortraitMode') final Rectangle? positionInPortraitMode; @@ -1812,7 +1897,7 @@ class WatermarkOptions { Map toJson() => _$WatermarkOptionsToJson(this); } -/// Statistics of the channel. +/// Statistics of a call session. @JsonSerializable(explicitToJson: true, includeIfNull: false) class RtcStats { /// @nodoc @@ -1855,15 +1940,15 @@ class RtcStats { @JsonKey(name: 'duration') final int? duration; - /// Total number of bytes transmitted, represented by an aggregate value. + /// The number of bytes sent. @JsonKey(name: 'txBytes') final int? txBytes; - /// Total number of bytes received, represented by an aggregate value. + /// The number of bytes received. @JsonKey(name: 'rxBytes') final int? rxBytes; - /// Total number of audio bytes sent, represented by an aggregate value. + /// The total number of audio bytes sent, represented by an aggregate value. @JsonKey(name: 'txAudioBytes') final int? txAudioBytes; @@ -1887,7 +1972,7 @@ class RtcStats { @JsonKey(name: 'rxKBitRate') final int? rxKBitRate; - /// Audio receive bitrate (Kbps), represented by an instantaneous value. + /// The bitrate (Kbps) of receiving the audio. @JsonKey(name: 'rxAudioKBitRate') final int? rxAudioKBitRate; @@ -1895,7 +1980,7 @@ class RtcStats { @JsonKey(name: 'txAudioKBitRate') final int? txAudioKBitRate; - /// Video receive bitrate (Kbps), represented by an instantaneous value. + /// The bitrate (Kbps) of receiving the video. @JsonKey(name: 'rxVideoKBitRate') final int? rxVideoKBitRate; @@ -1903,7 +1988,7 @@ class RtcStats { @JsonKey(name: 'txVideoKBitRate') final int? txVideoKBitRate; - /// The client-to-server delay (ms). + /// The client-to-server delay (milliseconds). @JsonKey(name: 'lastmileDelay') final int? lastmileDelay; @@ -1911,7 +1996,7 @@ class RtcStats { @JsonKey(name: 'userCount') final int? userCount; - /// Application CPU usage (%).The value of cpuTotalUsage is always reported as 0 in the onLeaveChannel callback.As of Android 8.1, you cannot get the CPU usage from this attribute due to system limitations. + /// Application CPU usage (%).The value of cpuAppUsage is always reported as 0 in the onLeaveChannel callback.As of Android 8.1, you cannot get the CPU usage from this attribute due to system limitations. @JsonKey(name: 'cpuAppUsage') final double? cpuAppUsage; @@ -1919,7 +2004,7 @@ class RtcStats { @JsonKey(name: 'cpuTotalUsage') final double? cpuTotalUsage; - /// The round-trip time delay (ms) from the client to the local router.On Android, to get gatewayRtt, ensure that you add the android.permission.ACCESS_WIFI_STATE permission after in the AndroidManifest.xml file in your project. + /// The round-trip time delay (ms) from the client to the local router.This property is disabled on devices running iOS 14 or later, and enabled on devices running versions earlier than iOS 14 by default. To enable this property on devices running iOS 14 or later, .On Android, to get gatewayRtt, ensure that you add the android.permission.ACCESS_WIFI_STATE permission after in the AndroidManifest.xml file in your project. @JsonKey(name: 'gatewayRtt') final int? gatewayRtt; @@ -1991,79 +2076,6 @@ class RtcStats { Map toJson() => _$RtcStatsToJson(this); } -/// The capture type of the custom video source. -@JsonEnum(alwaysCreate: true) -enum VideoSourceType { - /// (Default) The primary camera. - @JsonValue(0) - videoSourceCameraPrimary, - - /// The camera. - @JsonValue(0) - videoSourceCamera, - - /// The secondary camera. - @JsonValue(1) - videoSourceCameraSecondary, - - /// The primary screen. - @JsonValue(2) - videoSourceScreenPrimary, - - /// The screen. - @JsonValue(2) - videoSourceScreen, - - /// The secondary screen. - @JsonValue(3) - videoSourceScreenSecondary, - - /// The custom video source. - @JsonValue(4) - videoSourceCustom, - - /// The video source from the media player. - @JsonValue(5) - videoSourceMediaPlayer, - - /// The video source is a PNG image. - @JsonValue(6) - videoSourceRtcImagePng, - - /// The video source is a JPEG image. - @JsonValue(7) - videoSourceRtcImageJpeg, - - /// The video source is a GIF image. - @JsonValue(8) - videoSourceRtcImageGif, - - /// The video source is remote video acquired by the network. - @JsonValue(9) - videoSourceRemote, - - /// A transcoded video source. - @JsonValue(10) - videoSourceTranscoded, - - /// An unknown video source. - @JsonValue(100) - videoSourceUnknown, -} - -/// @nodoc -extension VideoSourceTypeExt on VideoSourceType { - /// @nodoc - static VideoSourceType fromValue(int value) { - return $enumDecode(_$VideoSourceTypeEnumMap, value); - } - - /// @nodoc - int value() { - return _$VideoSourceTypeEnumMap[this]!; - } -} - /// The user role in the interactive live streaming. @JsonEnum(alwaysCreate: true) enum ClientRoleType { @@ -2143,7 +2155,7 @@ extension AudienceLatencyLevelTypeExt on AudienceLatencyLevelType { } } -/// The detailed options of a user. +/// Setting of user role properties. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ClientRoleOptions { /// @nodoc @@ -2223,93 +2235,33 @@ extension ExperiencePoorReasonExt on ExperiencePoorReason { } } -/// Audio statistics of the remote user. -@JsonSerializable(explicitToJson: true, includeIfNull: false) -class RemoteAudioStats { - /// @nodoc - const RemoteAudioStats( - {this.uid, - this.quality, - this.networkTransportDelay, - this.jitterBufferDelay, - this.audioLossRate, - this.numChannels, - this.receivedSampleRate, - this.receivedBitrate, - this.totalFrozenTime, - this.frozenRate, - this.mosValue, - this.totalActiveTime, - this.publishDuration, - this.qoeQuality, - this.qualityChangedReason}); - - /// The user ID of the remote user. - @JsonKey(name: 'uid') - final int? uid; - - /// The quality of the audio stream sent by the user. See QualityType . - @JsonKey(name: 'quality') - final int? quality; - - /// The network delay (ms) from the sender to the receiver. - @JsonKey(name: 'networkTransportDelay') - final int? networkTransportDelay; - - /// The network delay (ms) from the audio receiver to the jitter buffer.When the receiving end is an audience member and audienceLatencyLevel of ClientRoleOptions is 1, this parameter does not take effect. - @JsonKey(name: 'jitterBufferDelay') - final int? jitterBufferDelay; - - /// The frame loss rate (%) of the remote audio stream in the reported interval. - @JsonKey(name: 'audioLossRate') - final int? audioLossRate; - - /// The number of audio channels. - @JsonKey(name: 'numChannels') - final int? numChannels; - - /// The sampling rate of the received audio stream in the reported interval. - @JsonKey(name: 'receivedSampleRate') - final int? receivedSampleRate; - - /// The average bitrate (Kbps) of the received audio stream in the reported interval. - @JsonKey(name: 'receivedBitrate') - final int? receivedBitrate; - - /// The total freeze time (ms) of the remote audio stream after the remote user joins the channel. In a session, audio freeze occurs when the audio frame loss rate reaches 4%. - @JsonKey(name: 'totalFrozenTime') - final int? totalFrozenTime; - - /// The total audio freeze time as a percentage (%) of the total time when the audio is available. The audio is considered available when the remote user neither stops sending the audio stream nor disables the audio module after joining the channel. - @JsonKey(name: 'frozenRate') - final int? frozenRate; - - /// The quality of the remote audio stream in the reported interval. The quality is determined by the Agora real-time audio MOS (Mean Opinion Score) measurement method. The return value range is [0, 500]. Dividing the return value by 100 gets the MOS score, which ranges from 0 to 5. The higher the score, the better the audio quality.The subjective perception of audio quality corresponding to the Agora real-time audio MOS scores is as follows:MOS scorePerception of audio qualityGreater than 4Excellent. The audio sounds clear and smooth.From 3.5 to 4Good. The audio has some perceptible impairment but still sounds clear.From 3 to 3.5Fair. The audio freezes occasionally and requires attentive listening.From 2.5 to 3Poor. The audio sounds choppy and requires considerable effort to understand.From 2 to 2.5Bad. The audio has occasional noise. Consecutive audio dropouts occur, resulting in some information loss. The users can communicate only with difficulty.Less than 2Very bad. The audio has persistent noise. Consecutive audio dropouts are frequent, resulting in severe information loss. Communication is nearly impossible. - @JsonKey(name: 'mosValue') - final int? mosValue; - - /// The total active time (ms) between the start of the audio call and the callback of the remote user.The active time refers to the total duration of the remote user without the mute state. - @JsonKey(name: 'totalActiveTime') - final int? totalActiveTime; - - /// The total duration (ms) of the remote audio stream. - @JsonKey(name: 'publishDuration') - final int? publishDuration; +/// AI noise reduction modes. +@JsonEnum(alwaysCreate: true) +enum AudioAinsMode { + /// 0: (Default) Balance mode. This mode allows for a balanced performance on noice reduction and time delay. + @JsonValue(0) + ainsModeBalanced, - /// The Quality of Experience (QoE) of the local user when receiving a remote audio stream. - @JsonKey(name: 'qoeQuality') - final int? qoeQuality; + /// 1: Aggressive mode. In scenarios where high performance on noise reduction is required, such as live streaming outdoor events, This mode reduces nosies more dramatically, but may sometimes affect the original character of the audio. + @JsonValue(1) + ainsModeAggressive, - /// Reasons why the QoE of the local user when receiving a remote audio stream is poor. See ExperiencePoorReason . - @JsonKey(name: 'qualityChangedReason') - final int? qualityChangedReason; + /// 2: Aggressive mode with low latency. The noise reduction delay of this mode is about only half of that of the balance and aggressive modes. It is suitable for scenarios that have high requirements on noise reduction with low latency, such as sing together online in real time. + @JsonValue(2) + ainsModeUltralowlatency, +} +/// @nodoc +extension AudioAinsModeExt on AudioAinsMode { /// @nodoc - factory RemoteAudioStats.fromJson(Map json) => - _$RemoteAudioStatsFromJson(json); + static AudioAinsMode fromValue(int value) { + return $enumDecode(_$AudioAinsModeEnumMap, value); + } /// @nodoc - Map toJson() => _$RemoteAudioStatsToJson(this); + int value() { + return _$AudioAinsModeEnumMap[this]!; + } } /// The audio profile. @@ -2368,11 +2320,11 @@ enum AudioScenarioType { @JsonValue(0) audioScenarioDefault, - /// 3: High-quality audio scenario, where users mainly play music. + /// 3: High-quality audio scenario, where users mainly play music. For example, instrument tutoring. @JsonValue(3) audioScenarioGameStreaming, - /// 5: Chatroom scenario, where users need to frequently switch the user role or mute and unmute the microphone. In this scenario, audience members receive a pop-up window to request permission of using microphones. + /// 5: Chatroom scenario, where users need to frequently switch the user role or mute and unmute the microphone. For example, education scenarios. In this scenario, audience members receive a pop-up window to request permission of using microphones. @JsonValue(5) audioScenarioChatroom, @@ -2490,6 +2442,31 @@ extension ScreenScenarioTypeExt on ScreenScenarioType { } } +/// The video application scenarios. +@JsonEnum(alwaysCreate: true) +enum VideoApplicationScenarioType { + /// 0: (Default) The general scenario. + @JsonValue(0) + applicationScenarioGeneral, + + /// If set to applicationScenarioMeeting (1), the SDK automatically enables the following strategies:In meeting scenarios where low-quality video streams are required to have a high bitrate, the SDK automatically enables multiple technologies used to deal with network congestions, to enhance the performance of the low-quality streams and to ensure the smooth reception by subscribers.The SDK monitors the number of subscribers to the high-quality video stream in real time and dynamically adjusts its configuration based on the number of subscribers.If nobody subscribers to the high-quality stream, the SDK automatically reduces its bitrate and frame rate to save upstream bandwidth.If someone subscribes to the high-quality stream, the SDK resets the high-quality stream to the VideoEncoderConfiguration configuration used in the most recent calling of setVideoEncoderConfiguration . If no configuration has been set by the user previously, the following values are used:Resolution: (Windows and macOS) 1280 × 720; (Android and iOS) 960 × 540Frame rate: 15 fpsBitrate: (Windows and macOS) 1600 Kbps; (Android and iOS) 1000 KbpsThe SDK monitors the number of subscribers to the low-quality video stream in real time and dynamically enables or disables it based on the number of subscribers.If the user has called setDualStreamMode to set that never send low-quality video stream (disableSimulcastStream), the dynamic adjustment of the low-quality stream in meeting scenarios will not take effect.If nobody subscribes to the low-quality stream, the SDK automatically disables it to save upstream bandwidth.If someone subscribes to the low-quality stream, the SDK enables the low-quality stream and resets it to the SimulcastStreamConfig configuration used in the most recent calling of setDualStreamMode. If no configuration has been set by the user previously, the following values are used:Resolution: 480 × 272Frame rate: 15 fpsBitrate: 500 Kbps1: The meeting scenario. + @JsonValue(1) + applicationScenarioMeeting, +} + +/// @nodoc +extension VideoApplicationScenarioTypeExt on VideoApplicationScenarioType { + /// @nodoc + static VideoApplicationScenarioType fromValue(int value) { + return $enumDecode(_$VideoApplicationScenarioTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$VideoApplicationScenarioTypeEnumMap[this]!; + } +} + /// The brightness level of the video image captured by the local camera. @JsonEnum(alwaysCreate: true) enum CaptureBrightnessLevelType { @@ -2689,7 +2666,7 @@ enum LocalVideoStreamError { @JsonValue(8) localVideoStreamErrorDeviceNotFound, - /// 9: (For macOS only) The video capture device currently in use is disconnected (such as being unplugged). + /// 9:(For macOS only) The video capture device currently in use is disconnected (such as being unplugged). @JsonValue(9) localVideoStreamErrorDeviceDisconnected, @@ -2825,7 +2802,7 @@ extension RemoteAudioStateReasonExt on RemoteAudioStateReason { } } -/// The state of the remote video. +/// The state of the remote video stream. @JsonEnum(alwaysCreate: true) enum RemoteVideoState { /// 0: The remote video is in the initial state. The SDK reports this state in the case of remoteVideoStateReasonLocalMuted, remoteVideoStateReasonRemoteMuted, or remoteVideoStateReasonRemoteOffline. @@ -2836,7 +2813,7 @@ enum RemoteVideoState { @JsonValue(1) remoteVideoStateStarting, - /// 2: The remote video stream is decoded and plays normally. The SDK reports this state in the case of remoteVideoStateReasonNetworkRecovery, remoteVideoStateReasonLocalUnmuted, remoteVideoStateReasonRemoteUnmuted or remoteVideoStateReasonAudioFallbackRecovery. + /// 2: The remote video stream is decoded and plays normally. The SDK reports this state in the case of remoteVideoStateReasonNetworkRecovery, remoteVideoStateReasonLocalUnmuted, remoteVideoStateReasonRemoteUnmuted, or remoteVideoStateReasonAudioFallbackRecovery. @JsonValue(2) remoteVideoStateDecoding, @@ -2873,7 +2850,7 @@ enum RemoteVideoStateReason { @JsonValue(1) remoteVideoStateReasonNetworkCongestion, - /// 2: Network recovery. + /// 2: Network is recovered. @JsonValue(2) remoteVideoStateReasonNetworkRecovery, @@ -2913,9 +2890,13 @@ enum RemoteVideoStateReason { @JsonValue(11) remoteVideoStateReasonVideoStreamTypeChangeToHigh, - /// @nodoc + /// 12: (iOS only) The remote user's app has switched to the background. @JsonValue(12) remoteVideoStateReasonSdkInBackground, + + /// @nodoc + @JsonValue(13) + remoteVideoStateReasonCodecNotSupport, } /// @nodoc @@ -3066,11 +3047,11 @@ class AudioVolumeInfo { /// @nodoc const AudioVolumeInfo({this.uid, this.volume, this.vad, this.voicePitch}); - /// The user ID.In the local user's callback, uid = 0.In the remote users' callback, uid is the user ID of a remote user whose instantaneous volume is one of the three highest. + /// The user ID.In the local user's callback, uid is 0.In the remote users' callback, uid is the user ID of a remote user whose instantaneous volume is the highest. @JsonKey(name: 'uid') final int? uid; - /// The volume of the user. The value ranges between 0 (lowest volume) and 255 (highest volume). + /// The volume of the user. The value ranges between 0 (the lowest volume) and 255 (the highest volume). If the local user enables audio capturing and calls muteLocalAudioStream and set it as true to mute, the value of volume indicates the volume of locally captured audio signal. @JsonKey(name: 'volume') final int? volume; @@ -3293,7 +3274,7 @@ enum RtmpStreamPublishState { @JsonValue(0) rtmpStreamPublishStateIdle, - /// 1: The SDK is connecting to Agora's streaming server and the CDN server. + /// 1: The streaming server and CDN server are being connected. @JsonValue(1) rtmpStreamPublishStateConnecting, @@ -3301,16 +3282,15 @@ enum RtmpStreamPublishState { @JsonValue(2) rtmpStreamPublishStateRunning, - /// 3: The RTMP or RTMPS streaming is recovering. When exceptions occur to the CDN, or the streaming is interrupted, the SDK tries to resume RTMP or RTMPS streaming and returns this state.If the SDK successfully resumes the streaming, rtmpStreamPublishStateRunning(2) returns. - /// If the streaming does not resume within 60 seconds or server errors occur, rtmpStreamPublishStateFailure(4) returns. You can also reconnect to the server by calling the stopRtmpStream method. + /// 3: The RTMP or RTMPS streaming is recovering. When exceptions occur to the CDN, or the streaming is interrupted, the SDK tries to resume RTMP or RTMPS streaming and returns this state.If the SDK successfully resumes the streaming, rtmpStreamPublishStateRunning(2) returns.If the streaming does not resume within 60 seconds or server errors occur, rtmpStreamPublishStateFailure(4) returns. If you feel that 60 seconds is too long, you can also actively try to reconnect. @JsonValue(3) rtmpStreamPublishStateRecovering, - /// 4: The RTMP or RTMPS streaming fails. See the errCode parameter for the detailed error information. + /// 4: The RTMP or RTMPS streaming fails. After a failure, you can troubleshoot the cause of the error through the returned error code. @JsonValue(4) rtmpStreamPublishStateFailure, - /// 5: The SDK is disconnecting from the Agora streaming server and CDN. When you call stopRtmpStream to stop the streaming normally, the SDK reports the streaming state as rtmpStreamPublishStateDisconnecting and rtmpStreamPublishStateIdle in sequence. + /// 5: The SDK is disconnecting from the Agora streaming server and CDN. When you call stopRtmpStream to stop the Media Push normally, the SDK reports the Media Push state as rtmpStreamPublishStateDisconnecting and rtmpStreamPublishStateIdle in sequence. @JsonValue(5) rtmpStreamPublishStateDisconnecting, } @@ -3331,7 +3311,7 @@ extension RtmpStreamPublishStateExt on RtmpStreamPublishState { /// Error codes of the RTMP or RTMPS streaming. @JsonEnum(alwaysCreate: true) enum RtmpStreamPublishErrorType { - /// 0: The RTMP or RTMPS streaming publishes successfully. + /// 0: The RTMP or RTMPS streaming has not started or has ended. @JsonValue(0) rtmpStreamPublishErrorOk, @@ -3343,11 +3323,11 @@ enum RtmpStreamPublishErrorType { @JsonValue(2) rtmpStreamPublishErrorEncryptedStreamNotAllowed, - /// 3: Timeout for the RTMP or RTMPS streaming. Try to publish the streaming again. + /// 3: Timeout for the RTMP or RTMPS streaming. @JsonValue(3) rtmpStreamPublishErrorConnectionTimeout, - /// 4: An error occurs in Agora's streaming server. Try to publish the streaming again. + /// 4: An error occurs in Agora's streaming server. @JsonValue(4) rtmpStreamPublishErrorInternalServerError, @@ -3355,7 +3335,7 @@ enum RtmpStreamPublishErrorType { @JsonValue(5) rtmpStreamPublishErrorRtmpServerError, - /// 6: The RTMP or RTMPS streaming publishing requests are too frequent. + /// 6: The RTMP or RTMPS streaming publishes too frequently. @JsonValue(6) rtmpStreamPublishErrorTooOften, @@ -3375,11 +3355,11 @@ enum RtmpStreamPublishErrorType { @JsonValue(10) rtmpStreamPublishErrorFormatNotSupported, - /// 11: The user role is not host, so the user cannot use the CDN live streaming function. Check your app code logic. + /// 11: The user role is not host, so the user cannot use the CDN live streaming function. Check your application code logic. @JsonValue(11) rtmpStreamPublishErrorNotBroadcaster, - /// 13: The updateRtmpTranscoding or setLiveTranscoding method is called to update the transcoding configuration in a scenario where there is streaming without transcoding. Check your application code logic. + /// 13: The updateRtmpTranscoding method is called to update the transcoding configuration in a scenario where there is streaming without transcoding. Check your application code logic. @JsonValue(13) rtmpStreamPublishErrorTranscodingNoMixStream, @@ -3387,15 +3367,15 @@ enum RtmpStreamPublishErrorType { @JsonValue(14) rtmpStreamPublishErrorNetDown, - /// 15: Your App ID does not have permission to use the CDN live streaming function. + /// @nodoc @JsonValue(15) rtmpStreamPublishErrorInvalidAppid, - /// @nodoc + /// 16: Your project does not have permission to use streaming services. Refer to Media Push to enable the Media Push permission. @JsonValue(16) rtmpStreamPublishErrorInvalidPrivilege, - /// 100: The streaming has been stopped normally. After you call stopRtmpStream to stop streaming, the SDK returns this value. + /// 100: The streaming has been stopped normally. After you stop the Media Push, the SDK returns this value. @JsonValue(100) rtmpStreamUnpublishErrorOk, } @@ -3413,14 +3393,14 @@ extension RtmpStreamPublishErrorTypeExt on RtmpStreamPublishErrorType { } } -/// Events during the media push. +/// Events during the Media Push. @JsonEnum(alwaysCreate: true) enum RtmpStreamingEvent { - /// 1: An error occurs when you add a background image or a watermark image in the media push. + /// 1: An error occurs when you add a background image or a watermark image in the Media Push. @JsonValue(1) rtmpStreamingEventFailedLoadImage, - /// 2: The streaming URL is already being used for CDN live streaming. If you want to start new streaming, use a new streaming URL. + /// 2: The streaming URL is already being used for Media Push. If you want to start new streaming, use a new streaming URL. @JsonValue(2) rtmpStreamingEventUrlAlreadyInUse, @@ -3484,7 +3464,7 @@ class RtcImage { @JsonKey(name: 'zOrder') final int? zOrder; - /// The transparency of the watermark or background image. The value ranges between 0.0 and 1.0:0.0: Completely transparent.1.0: (Default) Opaque. + /// The transparency of the watermark or background image. The range of the value is [0.0,1.0]:0.0: Completely transparent.1.0: (Default) Opaque. @JsonKey(name: 'alpha') final double? alpha; @@ -3522,11 +3502,11 @@ class LiveStreamAdvancedFeature { /// Connection states. @JsonEnum(alwaysCreate: true) enum ConnectionStateType { - /// 1: The SDK is disconnected from the Agora edge server. The state indicates the SDK is in one of the following phases:Theinitial state before calling the joinChannel [2/2] method.The app calls the leaveChannel method. + /// 1: The SDK is disconnected from the Agora edge server. The state indicates the SDK is in one of the following phases:Theinitial state before calling the joinChannel method.The app calls the leaveChannel method. @JsonValue(1) connectionStateDisconnected, - /// 2: The SDK is connecting to the Agora edge server. This state indicates that the SDK is establishing a connection with the specified channel after the app calls joinChannel [2/2].If the SDK successfully joins the channel, it triggers the onConnectionStateChanged callback and the connection state switches to connectionStateConnected.After the connection is established, the SDK also initializes the media and triggers onJoinChannelSuccess when everything is ready. + /// 2: The SDK is connecting to the Agora edge server. This state indicates that the SDK is establishing a connection with the specified channel after the app calls joinChannel.If the SDK successfully joins the channel, it triggers the onConnectionStateChanged callback and the connection state switches to connectionStateConnected.After the connection is established, the SDK also initializes the media and triggers onJoinChannelSuccess when everything is ready. @JsonValue(2) connectionStateConnecting, @@ -3538,7 +3518,7 @@ enum ConnectionStateType { @JsonValue(4) connectionStateReconnecting, - /// 5: The SDK fails to connect to the Agora edge server or join the channel. This state indicates that the SDK stops trying to rejoin the channel. You must call leaveChannel to leave the channel.You can call joinChannel [2/2] to rejoin the channel.If the SDK is banned from joining the channel by the Agora edge server through the RESTful API, the SDK triggers the onConnectionStateChanged callback. + /// 5: The SDK fails to connect to the Agora edge server or join the channel. This state indicates that the SDK stops trying to rejoin the channel. You must call leaveChannel to leave the channel.You can call joinChannel to rejoin the channel.If the SDK is banned from joining the channel by the Agora edge server through the RESTful API, the SDK triggers the onConnectionStateChanged callback. @JsonValue(5) connectionStateFailed, } @@ -3655,7 +3635,7 @@ class LiveTranscoding { @JsonKey(name: 'videoFramerate') final int? videoFramerate; - /// DeprecatedThis parameter is deprecated.Latency mode:true: Low latency with unassured quality.false: (Default) High latency with assured quality. + /// DeprecatedThis member is deprecated.Latency mode:true: Low latency with unassured quality.false: (Default) High latency with assured quality. @JsonKey(name: 'lowLatency') final bool? lowLatency; @@ -3687,7 +3667,7 @@ class LiveTranscoding { @JsonKey(name: 'transcodingExtraInfo') final String? transcodingExtraInfo; - /// DeprecatedThis parameter is deprecated.The metadata sent to the CDN client. + /// DeprecatedObsolete and not recommended for use.The metadata sent to the CDN client. @JsonKey(name: 'metadata') final String? metadata; @@ -3739,7 +3719,7 @@ class LiveTranscoding { Map toJson() => _$LiveTranscodingToJson(this); } -/// The video streams for the video mixing on the local client. +/// The video streams for local video mixing. @JsonSerializable(explicitToJson: true, includeIfNull: false) class TranscodingVideoStream { /// @nodoc @@ -3747,6 +3727,7 @@ class TranscodingVideoStream { {this.sourceType, this.remoteUserUid, this.imageUrl, + this.mediaPlayerId, this.x, this.y, this.width, @@ -3755,43 +3736,47 @@ class TranscodingVideoStream { this.alpha, this.mirror}); - /// The source type of video for the video mixing on the local client. See VideoSourceType . + /// The video source type for local video mixing. See VideoSourceType . @JsonKey(name: 'sourceType') - final MediaSourceType? sourceType; + final VideoSourceType? sourceType; - /// The ID of the remote user.Use this parameter only when the source type of the video for the video mixing on the local client is videoSourceRemote. + /// The user ID of the remote user.Use this parameter only when the source type is videoSourceRemote for local video mixing. @JsonKey(name: 'remoteUserUid') final int? remoteUserUid; - /// The URL of the image. + /// The URL of the image.Use this parameter only when the source type is the image for local video mixing. @JsonKey(name: 'imageUrl') final String? imageUrl; - /// The horizontal displacement of the top-left corner of the video for the video mixing on the client relative to the top-left corner (origin) of the canvas for this video mixing. + /// (Optional) Media player ID. Use the parameter only when you set sourceType to videoSourceMediaPlayer. + @JsonKey(name: 'mediaPlayerId') + final int? mediaPlayerId; + + /// The relative lateral displacement of the top left corner of the video for local video mixing to the origin (the top left corner of the canvas). @JsonKey(name: 'x') final int? x; - /// The vertical displacement of the top-left corner of the video for the video mixing on the client relative to the top-left corner (origin) of the canvas for this video mixing. + /// The relative longitudinal displacement of the top left corner of the captured video to the origin (the top left corner of the canvas). @JsonKey(name: 'y') final int? y; - /// The width (px) of the video for the video mixing on the local client. + /// The width (px) of the video for local video mixing on the canvas. @JsonKey(name: 'width') final int? width; - /// The height (px) of the video for the video mixing on the local client. + /// The height (px) of the video for local video mixing on the canvas. @JsonKey(name: 'height') final int? height; - /// The number of the layer to which the video for the video mixing on the local client belongs. The value range is [0,100].0: (Default) The layer is at the bottom.100: The layer is at the top. + /// The number of the layer to which the video for the local video mixing belongs. The value range is [0, 100].0: (Default) The layer is at the bottom.100: The layer is at the top. @JsonKey(name: 'zOrder') final int? zOrder; - /// The transparency of the video for the video mixing on the local client. The value range is [0.0,1.0]. 0.0 means the transparency is completely transparent. 1.0 means the transparency is opaque. + /// The transparency of the video for local video mixing. The value range is [0.0, 1.0]. 0.0 indicates that the video is completely transparent, and 1.0 indicates that it is opaque. @JsonKey(name: 'alpha') final double? alpha; - /// Whether to mirror the video for the video mixing on the local client.true: Mirror the captured video.false: (Default) Do not mirror the captured video.The paramter only works for videos with the source type CAMERA + /// Whether to mirror the video for the local video mixing.true: Mirror the video for the local video mixing.false: (Default) Do not mirror the video for the local video mixing.This parameter only takes effect on video source types that are cameras. @JsonKey(name: 'mirror') final bool? mirror; @@ -3817,11 +3802,11 @@ class LocalTranscoderConfiguration { @JsonKey(name: 'streamCount') final int? streamCount; - /// The video streams for the video mixing on the local client. See TranscodingVideoStream . - @JsonKey(name: 'VideoInputStreams') + /// The video streams for local video mixing. See TranscodingVideoStream . + @JsonKey(name: 'videoInputStreams') final List? videoInputStreams; - /// The encoding configuration of the mixed video stream after the video mixing on the local client. See VideoEncoderConfiguration . + /// The encoding configuration of the mixed video stream after the local video mixing. See VideoEncoderConfiguration . @JsonKey(name: 'videoOutputConfiguration') final VideoEncoderConfiguration? videoOutputConfiguration; @@ -3837,6 +3822,51 @@ class LocalTranscoderConfiguration { Map toJson() => _$LocalTranscoderConfigurationToJson(this); } +/// The error code of the local video mixing failure. +@JsonEnum(alwaysCreate: true) +enum VideoTranscoderError { + /// @nodoc + @JsonValue(0) + vtErrOk, + + /// 1: The selected video source has not started video capture. You need to create a video track for it and start video capture. + @JsonValue(1) + vtErrVideoSourceNotReady, + + /// 2: The video source type is invalid. You need to re-specify the supported video source type. + @JsonValue(2) + vtErrInvalidVideoSourceType, + + /// 3: The image path is invalid. You need to re-specify the correct image path. + @JsonValue(3) + vtErrInvalidImagePath, + + /// 4: The image format is invalid. Make sure the image format is one of PNG, JPEG, or GIF. + @JsonValue(4) + vtErrUnsupportImageFormat, + + /// 5: The video encoding resolution after video mixing is invalid. + @JsonValue(5) + vtErrInvalidLayout, + + /// 20: Unknown internal error. + @JsonValue(20) + vtErrInternal, +} + +/// @nodoc +extension VideoTranscoderErrorExt on VideoTranscoderError { + /// @nodoc + static VideoTranscoderError fromValue(int value) { + return $enumDecode(_$VideoTranscoderErrorEnumMap, value); + } + + /// @nodoc + int value() { + return _$VideoTranscoderErrorEnumMap[this]!; + } +} + /// Configurations of the last-mile network test. @JsonSerializable(explicitToJson: true, includeIfNull: false) class LastmileProbeConfig { @@ -3934,7 +3964,7 @@ class LastmileProbeResult { const LastmileProbeResult( {this.state, this.uplinkReport, this.downlinkReport, this.rtt}); - /// The status of the last-mile probe test. See LastmileProbeResultState . + /// The status of the last-mile network tests. See LastmileProbeResultState . @JsonKey(name: 'state') final LastmileProbeResultState? state; @@ -3989,11 +4019,11 @@ enum ConnectionChangedReasonType { @JsonValue(6) connectionChangedInvalidAppId, - /// 7: The connection failed since channel name is not valid. Please rejoin the channel with a valid channel name. + /// 7: The connection failed since channel name is not valid. Rejoin the channel with a valid channel name. @JsonValue(7) connectionChangedInvalidChannelName, - /// 8: The connection failed because the token is not valid. Typical reasons include:The App Certificate for the project is enabled in Agora Console, but you do not use a token when joining the channel. If you enable the App Certificate, you must use a token to join the channel.The uid specified when calling joinChannel [2/2] to join the channel is inconsistent with the uid passed in when generating the token. + /// 8: The connection failed because the token is not valid. Possible reasons are as follows:The App Certificate for the project is enabled in Agora Console, but you do not use a token when joining the channel. If you enable the App Certificate, you must use a token to join the channel.The uid specified when calling joinChannel to join the channel is inconsistent with the uid passed in when generating the token. @JsonValue(8) connectionChangedInvalidToken, @@ -4001,7 +4031,7 @@ enum ConnectionChangedReasonType { @JsonValue(9) connectionChangedTokenExpired, - /// 10: The connection is rejected by server. Typical reasons include:The user is already in the channel and still calls a method, for example, joinChannel [2/2], to join the channel. Stop calling this method to clear this error.The user tries to join the channel when conducting a pre-call test. The user needs to call the channel after the call test ends. + /// 10: The connection is rejected by server. Possible reasons are as follows:The user is already in the channel and still calls a method, for example, joinChannel, to join the channel. Stop calling this method to clear this error.The user tries to join a channel while a test call is in progress. The user needs to join the channel after the call test ends. @JsonValue(10) connectionChangedRejectedByServer, @@ -4021,7 +4051,7 @@ enum ConnectionChangedReasonType { @JsonValue(14) connectionChangedKeepAliveTimeout, - /// 15: The SDK has rejoined the channel successfully. + /// 15: The user has rejoined the channel successfully. @JsonValue(15) connectionChangedRejoinSuccess, @@ -4033,21 +4063,21 @@ enum ConnectionChangedReasonType { @JsonValue(17) connectionChangedEchoTest, - /// @nodoc + /// 18: The local IP address was changed by the user. @JsonValue(18) connectionChangedClientIpAddressChangedByUser, - /// @nodoc + /// 19: The user joined the same channel from different devices with the same UID. @JsonValue(19) connectionChangedSameUidLogin, - /// @nodoc + /// 20: The number of hosts in the channel has reached the upper limit. @JsonValue(20) connectionChangedTooManyBroadcasters, /// @nodoc @JsonValue(21) - connectionChangedLicenseVerifyFailed, + connectionChangedLicenseValidationFailure, } /// @nodoc @@ -4255,7 +4285,7 @@ extension VideoViewSetupModeExt on VideoViewSetupMode { } } -/// Attributes of video canvas object. +/// Attributes of the video canvas object. @JsonSerializable(explicitToJson: true, includeIfNull: false) class VideoCanvas { /// @nodoc @@ -4267,7 +4297,8 @@ class VideoCanvas { this.setupMode, this.sourceType, this.mediaPlayerId, - this.cropArea}); + this.cropArea, + this.enableAlphaMask}); /// Video display window. @JsonKey(name: 'view') @@ -4285,22 +4316,26 @@ class VideoCanvas { @JsonKey(name: 'mirrorMode') final VideoMirrorModeType? mirrorMode; - /// Setting mode of the view. See VideoViewSetupMode. + /// Setting mode of the view. See VideoViewSetupMode . @JsonKey(name: 'setupMode') final VideoViewSetupMode? setupMode; - /// The type of the video frame, see VideoSourceType . + /// The type of the video source. See VideoSourceType . @JsonKey(name: 'sourceType') final VideoSourceType? sourceType; - /// The ID of the media player. You can get the media player ID by calling getMediaPlayerId . + /// The ID of the media player. You can get the Device ID by calling getMediaPlayerId . @JsonKey(name: 'mediaPlayerId') final int? mediaPlayerId; - /// (Android and iOS only) (Optional) The display area for the video frame. See Rectangle. width and height represent the video pixel width and height of the area. The default value is null (width or height is 0), which means that the actual resolution of the video frame is displayed. + /// (Optional) Display area of the video frame, see Rectangle . width and height represent the video pixel width and height of the area. The default value is null (width or height is 0), which means that the actual resolution of the video frame is displayed. @JsonKey(name: 'cropArea') final Rectangle? cropArea; + /// (Optional) Whether the receiver enables alpha mask rendering:true: The receiver enables alpha mask rendering.false: (default) The receiver disables alpha mask rendering.Alpha mask rendering can create images with transparent effects and extract portraits from videos. When used in combination with other methods, you can implement effects such as picture-in-picture and watermarking.This property applies to macOS only.The receiver can render alpha channel information only when the sender enables alpha transmission.To enable alpha transmission, . + @JsonKey(name: 'enableAlphaMask') + final bool? enableAlphaMask; + /// @nodoc factory VideoCanvas.fromJson(Map json) => _$VideoCanvasFromJson(json); @@ -4320,7 +4355,7 @@ class BeautyOptions { this.rednessLevel, this.sharpnessLevel}); - /// The contrast level, used with the lighteningLevel parameter. The larger the value, the greater the contrast between light and dark. + /// The contrast level, used with the lighteningLevel parameter. The larger the value, the greater the contrast between light and dark. See LighteningContrastLevel . @JsonKey(name: 'lighteningContrastLevel') final LighteningContrastLevel? lighteningContrastLevel; @@ -4328,11 +4363,11 @@ class BeautyOptions { @JsonKey(name: 'lighteningLevel') final double? lighteningLevel; - /// The smoothness level, in the range [0.0,1.0], where 0.0 means the original smoothness. The default value is 0.0. The higher the value, the greater the smoothness level. + /// The smoothness level, in the range [0.0,1.0], where 0.0 means the original smoothness. The default value is 0.0. The greater the value, the greater the smoothness level. @JsonKey(name: 'smoothnessLevel') final double? smoothnessLevel; - /// The redness level, in the range [0.0,1.0], where 0.0 means the original redness. The default value is 0.0. The higher the value, the greater the redness level. + /// The redness level, in the range [0.0,1.0], where 0.0 means the original redness. The default value is 0.0. The larger the value, the greater the redness level. @JsonKey(name: 'rednessLevel') final double? rednessLevel; @@ -4547,14 +4582,14 @@ class ColorEnhanceOptions { Map toJson() => _$ColorEnhanceOptionsToJson(this); } -/// The custom background image. +/// The custom background. @JsonSerializable(explicitToJson: true, includeIfNull: false) class VirtualBackgroundSource { /// @nodoc const VirtualBackgroundSource( {this.backgroundSourceType, this.color, this.source, this.blurDegree}); - /// The type of the custom background image. See backgroundSourceType . + /// The custom background. See backgroundSourceType . @JsonKey(name: 'background_source_type') final BackgroundSourceType? backgroundSourceType; @@ -4578,20 +4613,28 @@ class VirtualBackgroundSource { Map toJson() => _$VirtualBackgroundSourceToJson(this); } -/// The type of the custom background image. +/// The custom background. @JsonEnum(alwaysCreate: true) enum BackgroundSourceType { + /// 0: Process the background as alpha information without replacement, only separating the portrait and the background. After setting this value, you can call startLocalVideoTranscoder to implement the picture-in-picture effect. + @JsonValue(0) + backgroundNone, + /// 1: (Default) The background image is a solid color. @JsonValue(1) backgroundColor, - /// The background image is a file in PNG or JPG format. + /// 2: The background is an image in PNG or JPG format. @JsonValue(2) backgroundImg, - /// The background image is the blurred background. + /// 3: The background is a blurred version of the original background. @JsonValue(3) backgroundBlur, + + /// 4: The background is a local video in MP4, AVI, MKV, FLV, or other supported formats. + @JsonValue(4) + backgroundVideo, } /// @nodoc @@ -4614,11 +4657,11 @@ enum BackgroundBlurDegree { @JsonValue(1) blurDegreeLow, - /// The degree of blurring applied to the custom background image is medium. It is difficult for the user to recognize details in the background. + /// 2: The degree of blurring applied to the custom background image is medium. It is difficult for the user to recognize details in the background. @JsonValue(2) blurDegreeMedium, - /// (Default) The degree of blurring applied to the custom background image is high. The user can barely see any distinguishing features in the background. + /// 3: (Default) The degree of blurring applied to the custom background image is high. The user can barely see any distinguishing features in the background. @JsonValue(3) blurDegreeHigh, } @@ -4683,6 +4726,53 @@ extension SegModelTypeExt on SegModelType { } } +/// The type of the audio track. +@JsonEnum(alwaysCreate: true) +enum AudioTrackType { + /// @nodoc + @JsonValue(-1) + audioTrackInvalid, + + /// 0: Mixable audio tracks. You can publish multiple mixable audio tracks in one channel, and SDK will automatically mix these tracks into one. The latency of mixable audio tracks is higher than that of direct audio tracks. + @JsonValue(0) + audioTrackMixable, + + /// 1: Direct audio tracks. When creating multiple audio tracks of this type, each direct audio track can only be published in one channel and cannot be mixed with others. The latency of direct audio tracks is lower than that of mixable audio tracks. + @JsonValue(1) + audioTrackDirect, +} + +/// @nodoc +extension AudioTrackTypeExt on AudioTrackType { + /// @nodoc + static AudioTrackType fromValue(int value) { + return $enumDecode(_$AudioTrackTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$AudioTrackTypeEnumMap[this]!; + } +} + +/// The configuration of custom audio tracks. +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class AudioTrackConfig { + /// @nodoc + const AudioTrackConfig({this.enableLocalPlayback}); + + /// Whether to enable the local audio-playback device:true: (Default) Enable the local audio-playback device.false: Do not enable the local audio-playback device. + @JsonKey(name: 'enableLocalPlayback') + final bool? enableLocalPlayback; + + /// @nodoc + factory AudioTrackConfig.fromJson(Map json) => + _$AudioTrackConfigFromJson(json); + + /// @nodoc + Map toJson() => _$AudioTrackConfigToJson(this); +} + /// The options for SDK preset voice beautifier effects. @JsonEnum(alwaysCreate: true) enum VoiceBeautifierPreset { @@ -4738,7 +4828,7 @@ enum VoiceBeautifierPreset { @JsonValue(0x01030800) timbreTransformationRinging, - /// A ultra-high quality voice, which makes the audio clearer and restores more details.To achieve better audio effect quality, Agora recommends that you set the profile of setAudioProfile to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) and scenario to audioScenarioGameStreaming(3) before calling setVoiceBeautifierPreset .If you have an audio capturing device that can already restore audio details to a high degree, Agora recommends that you do not enable ultra-high quality; otherwise, the SDK may over-restore audio details, and you may not hear the anticipated voice effect. + /// A ultra-high quality voice, which makes the audio clearer and restores more details.To achieve better audio effect quality, Agora recommends that you set the profile of to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) and scenario to audioScenarioGameStreaming(3) before calling setVoiceBeautifierPreset .If you have an audio capturing device that can already restore audio details to a high degree, Agora recommends that you do not enable ultra-high quality; otherwise, the SDK may over-restore audio details, and you may not hear the anticipated voice effect. @JsonValue(0x01040100) ultraHighQualityVoice, } @@ -4876,6 +4966,50 @@ enum VoiceConversionPreset { /// A deep voice. To avoid audio distortion, ensure that you use this enumerator to process a male-sounding voice. @JsonValue(0x03010400) voiceChangerBass, + + /// @nodoc + @JsonValue(0x03010500) + voiceChangerCartoon, + + /// @nodoc + @JsonValue(0x03010600) + voiceChangerChildlike, + + /// @nodoc + @JsonValue(0x03010700) + voiceChangerPhoneOperator, + + /// @nodoc + @JsonValue(0x03010800) + voiceChangerMonster, + + /// @nodoc + @JsonValue(0x03010900) + voiceChangerTransformers, + + /// @nodoc + @JsonValue(0x03010A00) + voiceChangerGroot, + + /// @nodoc + @JsonValue(0x03010B00) + voiceChangerDarthVader, + + /// @nodoc + @JsonValue(0x03010C00) + voiceChangerIronLady, + + /// @nodoc + @JsonValue(0x03010D00) + voiceChangerShinChan, + + /// @nodoc + @JsonValue(0x03010E00) + voiceChangerGirlishMan, + + /// @nodoc + @JsonValue(0x03010F00) + voiceChangerChipmunk, } /// @nodoc @@ -4936,7 +5070,7 @@ class ScreenCaptureParameters { this.highLightColor, this.enableHighLight}); - /// The maximum dimensions to encode the shared region. VideoDimensions . The default value is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges.If the screen dimensions are different from the value of this parameter, Agora applies the following strategies for encoding. Suppose dimensions is set to 1920 × 1080:If the value of the screen dimensions is lower than that of dimensions, for example, 1000 × 1000 pixels, the SDK uses the screen dimensions, that is, 1000 × 1000 pixels, for encoding.If the value of the screen dimensions is higher than that of dimensions, for example, 2000 × 1500, the SDK uses the maximum value under dimensions with the aspect ratio of the screen dimension (4:3) for encoding, that is, 1440 × 1080. + /// The video encoding resolution of the shared screen stream. See VideoDimensions . The default value is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges.If the screen dimensions are different from the value of this parameter, Agora applies the following strategies for encoding. Suppose dimensions is set to 1920 × 1080:If the value of the screen dimensions is lower than that of dimensions, for example, 1000 × 1000 pixels, the SDK uses the screen dimensions, that is, 1000 × 1000 pixels, for encoding.If the value of the screen dimensions is higher than that of dimensions, for example, 2000 × 1500, the SDK uses the maximum value under dimensions with the aspect ratio of the screen dimension (4:3) for encoding, that is, 1440 × 1080. @JsonKey(name: 'dimensions') final VideoDimensions? dimensions; @@ -4968,7 +5102,7 @@ class ScreenCaptureParameters { @JsonKey(name: 'highLightWidth') final int? highLightWidth; - /// (For macOS and Windows only) On Windows platforms, the color of the border in ARGB format. The default value is 0xFF8CBF26. On macOS, COLOR_CLASS refers to NSColor. + /// (For macOS and Windows only)On Windows platforms, the color of the border in ARGB format. The default value is 0xFF8CBF26.On macOS, COLOR_CLASS refers to NSColor. @JsonKey(name: 'highLightColor') final int? highLightColor; @@ -5075,7 +5209,7 @@ extension AudioEncodedFrameObserverPositionExt } } -/// Recording configuration. +/// Recording configurations. @JsonSerializable(explicitToJson: true, includeIfNull: false) class AudioRecordingConfiguration { /// @nodoc @@ -5087,13 +5221,11 @@ class AudioRecordingConfiguration { this.quality, this.recordingChannel}); - /// The absolute path (including the filename extensions) of the recording file. For example: C:\music\audio.mp4. - /// Ensure that the path for the recording file exists and is writable. + /// The absolute path (including the filename extensions) of the recording file. For example: C:\music\audio.mp4.Ensure that the directory for the log files exists and is writable. @JsonKey(name: 'filePath') final String? filePath; - /// Whether to encode the audio data: - /// true: Encode audio data in AAC.false: (Default) Do not encode audio data, but save the recorded audio data directly. + /// Whether to encode the audio data:true: Encode audio data in AAC.false: (Default) Do not encode audio data, but save the recorded audio data directly. @JsonKey(name: 'encode') final bool? encode; @@ -5101,7 +5233,7 @@ class AudioRecordingConfiguration { @JsonKey(name: 'sampleRate') final int? sampleRate; - /// Recording content. See AudioFileRecordingType . + /// The recording content. See AudioFileRecordingType . @JsonKey(name: 'fileRecordingType') final AudioFileRecordingType? fileRecordingType; @@ -5109,7 +5241,7 @@ class AudioRecordingConfiguration { @JsonKey(name: 'quality') final AudioRecordingQualityType? quality; - /// The audio channel of recording: The parameter supports the following values:1: (Default) Mono.2: Stereo.The actual recorded audio channel is related to the audio channel that you capture.If the captured audio is mono and recordingChannel is 2, the recorded audio is the dual-channel data that is copied from mono data, not stereo.If the captured audio is dual channel and recordingChannel is 1, the recorded audio is the mono data that is mixed by dual-channel data.The integration scheme also affects the final recorded audio channel. Therefore, to record in stereo, technical support for assistance. + /// The audio channel of recording: The parameter supports the following values:1: (Default) Mono.2: Stereo.The actual recorded audio channel is related to the audio channel that you capture.If the captured audio is mono and recordingChannel is 2, the recorded audio is the dual-channel data that is copied from mono data, not stereo.If the captured audio is dual channel and recordingChannel is 1, the recorded audio is the mono data that is mixed by dual-channel data.The integration scheme also affects the final recorded audio channel. If you need to record in stereo, contact . @JsonKey(name: 'recordingChannel') final int? recordingChannel; @@ -5121,7 +5253,7 @@ class AudioRecordingConfiguration { Map toJson() => _$AudioRecordingConfigurationToJson(this); } -/// Observer settings for encoded audio. +/// Observer settings for the encoded audio. @JsonSerializable(explicitToJson: true, includeIfNull: false) class AudioEncodedFrameObserverConfig { /// @nodoc @@ -5156,6 +5288,9 @@ class AudioEncodedFrameObserver { /// Gets the encoded audio data of the local user. /// After calling registerAudioEncodedFrameObserver and setting the encoded audio as audioEncodedFrameObserverPositionRecord, you can get the encoded audio data of the local user from this callback. /// + /// * [channels] The number of channels. + /// 1: Mono. + /// 2: Stereo. If the channel uses stereo, the data is interleaved. /// * [frameBuffer] The audio buffer. /// * [length] The data length (byte). /// * [audioEncodedFrameInfo] Audio information after encoding. See EncodedAudioFrameInfo . @@ -5165,6 +5300,9 @@ class AudioEncodedFrameObserver { /// Gets the encoded audio data of all remote users. /// After calling registerAudioEncodedFrameObserver and setting the encoded audio as audioEncodedFrameObserverPositionPlayback, you can get encoded audio data of all remote users through this callback. /// + /// * [samplesPerSec] Recording sample rate (Hz). + /// * [channels] The number of channels.1: Mono.2: Stereo. If the channel uses stereo, the data is interleaved. + /// * [samplesPerChannel] The number of samples per channel in the audio frame. /// * [frameBuffer] The audio buffer. /// * [length] The data length (byte). /// * [audioEncodedFrameInfo] Audio information after encoding. See EncodedAudioFrameInfo . @@ -5174,6 +5312,11 @@ class AudioEncodedFrameObserver { /// Gets the mixed and encoded audio data of the local and all remote users. /// After calling registerAudioEncodedFrameObserver and setting the audio profile as audioEncodedFrameObserverPositionMixed, you can get the mixed and encoded audio data of the local and all remote users through this callback. /// + /// * [samplesPerSec] Recording sample rate (Hz). + /// * [channels] The number of channels. + /// 1: Mono. + /// 2: Stereo. If the channel uses stereo, the data is interleaved. + /// * [samplesPerChannel] The number of samples per channel in the audio frame. /// * [frameBuffer] The audio buffer. /// * [length] The data length (byte). /// * [audioEncodedFrameInfo] Audio information after encoding. See EncodedAudioFrameInfo . @@ -5351,11 +5494,11 @@ enum ChannelMediaRelayEvent { @JsonValue(2) relayEventPacketJoinedSrcChannel, - /// 3: The user joins the destination channel. + /// 3: The user joins the target channel. @JsonValue(3) relayEventPacketJoinedDestChannel, - /// 4: The SDK starts relaying the media stream to the destination channel. + /// 4: The SDK starts relaying the media stream to the target channel. @JsonValue(4) relayEventPacketSentToDestChannel, @@ -5367,7 +5510,7 @@ enum ChannelMediaRelayEvent { @JsonValue(6) relayEventPacketReceivedAudioFromSrc, - /// 7: The destination channel is updated. + /// 7: The target channel is updated. @JsonValue(7) relayEventPacketUpdateDestChannel, @@ -5375,11 +5518,11 @@ enum ChannelMediaRelayEvent { @JsonValue(8) relayEventPacketUpdateDestChannelRefused, - /// 9: The destination channel does not change, which means that the destination channel fails to be updated. + /// 9: The target channel does not change, which means that the target channel fails to be updated. @JsonValue(9) relayEventPacketUpdateDestChannelNotChange, - /// 10: The destination channel name is NULL. + /// 10: The target channel name is NULL. @JsonValue(10) relayEventPacketUpdateDestChannelIsNull, @@ -5387,19 +5530,19 @@ enum ChannelMediaRelayEvent { @JsonValue(11) relayEventVideoProfileUpdate, - /// 12: The SDK successfully pauses relaying the media stream to destination channels. + /// 12: The SDK successfully pauses relaying the media stream to target channels. @JsonValue(12) relayEventPauseSendPacketToDestChannelSuccess, - /// 13: The SDK fails to pause relaying the media stream to destination channels. + /// 13: The SDK fails to pause relaying the media stream to target channels. @JsonValue(13) relayEventPauseSendPacketToDestChannelFailed, - /// 14: The SDK successfully resumes relaying the media stream to destination channels. + /// 14: The SDK successfully resumes relaying the media stream to target channels. @JsonValue(14) relayEventResumeSendPacketToDestChannelSuccess, - /// 15: The SDK fails to resume relaying the media stream to destination channels. + /// 15: The SDK fails to resume relaying the media stream to target channels. @JsonValue(15) relayEventResumeSendPacketToDestChannelFailed, } @@ -5476,22 +5619,22 @@ class ChannelMediaInfo { Map toJson() => _$ChannelMediaInfoToJson(this); } -/// Configuration information of relaying media streams across channels. +/// Configuration of cross channel media relay. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ChannelMediaRelayConfiguration { /// @nodoc const ChannelMediaRelayConfiguration( {this.srcInfo, this.destInfos, this.destCount}); - /// The information of the source channel. See ChannelMediainfo. + /// The information of the source channel. See ChannelMediaInfo . It contains the following members:channelName: The name of the source channel. The default value is NULL, which means the SDK applies the name of the current channel.token: The token for joining the source channel. This token is generated with the channelName and uid you set in srcInfo.If you have not enabled the App Certificate, set this parameter as the default value NULL, which means the SDK applies the App ID.If you have enabled the App Certificate, you must use the token generated with the channelName and uid, and the uid must be set as 0.uid: The unique user ID to identify the relay stream in the source channel. Agora recommends leaving the default value of 0 unchanged. @JsonKey(name: 'srcInfo') final ChannelMediaInfo? srcInfo; - /// The information of the destination channel. See ChannelMediainfo. + /// The information of the target channel ChannelMediaInfo. It contains the following members:channelName: The name of the target channel.token: The token for joining the target channel. It is generated with the channelName and uid you set in destInfos.If you have not enabled the App Certificate, set this parameter as the default value NULL, which means the SDK applies the App ID.If you have enabled the App Certificate, you must use the token generated with the channelName and uid.If the token of any target channel expires, the whole media relay stops; hence Agora recommends that you specify the same expiration time for the tokens of all the target channels.uid: The unique user ID to identify the relay stream in the target channel. The value ranges from 0 to (2 32-1). To avoid user ID conflicts, this user ID must be different from any other user ID in the target channel. The default value is 0, which means the SDK generates a random user ID. @JsonKey(name: 'destInfos') final List? destInfos; - /// The number of destination channels. The default value is 0, and the value range is from 0 to 4. Ensure that the value of this parameter corresponds to the number of ChannelMediaInfo structs you define in destInfo. + /// The number of target channels. The default value is 0, and the value range is from 0 to 4. Ensure that the value of this parameter corresponds to the number of ChannelMediaInfo structs you define in destInfo. @JsonKey(name: 'destCount') final int? destCount; @@ -5509,7 +5652,7 @@ class UplinkNetworkInfo { /// @nodoc const UplinkNetworkInfo({this.videoEncoderTargetBitrateBps}); - /// @nodoc + /// The target video encoder bitrate (bps). @JsonKey(name: 'video_encoder_target_bitrate_bps') final int? videoEncoderTargetBitrateBps; @@ -5858,7 +6001,8 @@ class EchoTestConfiguration { this.enableAudio, this.enableVideo, this.token, - this.channelId}); + this.channelId, + this.intervalInSeconds}); /// The view used to render the local user's video. This parameter is only applicable to scenarios testing video devices, that is, when enableVideo is true. @JsonKey(name: 'view') @@ -5872,7 +6016,7 @@ class EchoTestConfiguration { @JsonKey(name: 'enableVideo') final bool? enableVideo; - /// @nodoc + /// The token used to secure the audio and video call loop test. If you do not enable App Certificate in Agora Console, you do not need to pass a value in this parameter; if you have enabled App Certificate in Agora Console, you must pass a token in this parameter; the uid used when you generate the token must be 0xFFFFFFFF, and the channel name used must be the channel name that identifies each audio and video call loop tested. For server-side token generation, see . @JsonKey(name: 'token') final String? token; @@ -5880,6 +6024,10 @@ class EchoTestConfiguration { @JsonKey(name: 'channelId') final String? channelId; + /// The time interval (s) between when you start the call and when the recording plays back. The value range is [2, 10], and the default value is 2. + @JsonKey(name: 'intervalInSeconds') + final int? intervalInSeconds; + /// @nodoc factory EchoTestConfiguration.fromJson(Map json) => _$EchoTestConfigurationFromJson(json); @@ -6073,6 +6221,127 @@ class ScreenCaptureParameters2 { Map toJson() => _$ScreenCaptureParameters2ToJson(this); } +/// The rendering state of the media frame.` +@JsonEnum(alwaysCreate: true) +enum MediaTraceEvent { + /// 0: The video frame has been rendered. + @JsonValue(0) + mediaTraceEventVideoRendered, + + /// 1: The video frame has been decoded. + @JsonValue(1) + mediaTraceEventVideoDecoded, +} + +/// @nodoc +extension MediaTraceEventExt on MediaTraceEvent { + /// @nodoc + static MediaTraceEvent fromValue(int value) { + return $enumDecode(_$MediaTraceEventEnumMap, value); + } + + /// @nodoc + int value() { + return _$MediaTraceEventEnumMap[this]!; + } +} + +/// Indicators during video frame rendering progress. +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class VideoRenderingTracingInfo { + /// @nodoc + const VideoRenderingTracingInfo( + {this.elapsedTime, + this.start2JoinChannel, + this.join2JoinSuccess, + this.joinSuccess2RemoteJoined, + this.remoteJoined2SetView, + this.remoteJoined2UnmuteVideo, + this.remoteJoined2PacketReceived}); + + /// The time interval from calling the startMediaRenderingTracing method to SDK triggering the onVideoRenderingTracingResult callback. The unit is milliseconds. Agora recommends you call startMediaRenderingTracing before joining a channel. + @JsonKey(name: 'elapsedTime') + final int? elapsedTime; + + /// The time interval from calling startMediaRenderingTracing to calling joinChannel . The unit is milliseconds. A negative number means to call joinChannel after calling startMediaRenderingTracing. + @JsonKey(name: 'start2JoinChannel') + final int? start2JoinChannel; + + /// Time interval from calling joinChannel to successfully joining the channel. The unit is milliseconds. + @JsonKey(name: 'join2JoinSuccess') + final int? join2JoinSuccess; + + /// If the local user calls startMediaRenderingTracing before successfully joining the channel, this value is the time interval from the local user successfully joining the channel to the remote user joining the channel. The unit is milliseconds.If the local user calls startMediaRenderingTracing after successfully joining the channel, the value is the time interval from calling startMediaRenderingTracing to when the remote user joins the channel. The unit is milliseconds.If the local user calls startMediaRenderingTracing after the remote user joins the channel, the value is 0 and meaningless.In order to reduce the time of rendering the first frame for remote users, Agora recommends that the local user joins the channel when the remote user is in the channel to reduce this value. + @JsonKey(name: 'joinSuccess2RemoteJoined') + final int? joinSuccess2RemoteJoined; + + /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from when the remote user joins the channel to when the local user sets the remote view. The unit is milliseconds.If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to setting the remote view. The unit is milliseconds.If the local user calls startMediaRenderingTracing after setting the remote view, the value is 0 and has no effect.In order to reduce the time of rendering the first frame for remote users, Agora recommends that the local user sets the remote view before the remote user joins the channel, or sets the remote view immediately after the remote user joins the channel to reduce this value. + @JsonKey(name: 'remoteJoined2SetView') + final int? remoteJoined2SetView; + + /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from the remote user joining the channel to subscribing to the remote video stream. The unit is milliseconds.If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to subscribing to the remote video stream. The unit is milliseconds.If the local user calls startMediaRenderingTracing after subscribing to the remote video stream, the value is 0 and has no effect.In order to reduce the time of rendering the first frame for remote users, Agora recommends that after the remote user joins the channel, the local user immediately subscribes to the remote video stream to reduce this value. + @JsonKey(name: 'remoteJoined2UnmuteVideo') + final int? remoteJoined2UnmuteVideo; + + /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from when the remote user joins the channel to when the local user receives the remote video stream. The unit is milliseconds.If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to receiving the remote video stream. The unit is milliseconds.If the local user calls startMediaRenderingTracing after receiving the remote video stream, the value is 0 and has no effect.In order to reduce the time of rendering the first frame for remote users, Agora recommends that the remote user publishes video streams immediately after joining the channel, and the local user immediately subscribes to remote video streams to reduce this value. + @JsonKey(name: 'remoteJoined2PacketReceived') + final int? remoteJoined2PacketReceived; + + /// @nodoc + factory VideoRenderingTracingInfo.fromJson(Map json) => + _$VideoRenderingTracingInfoFromJson(json); + + /// @nodoc + Map toJson() => _$VideoRenderingTracingInfoToJson(this); +} + +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum ConfigFetchType { + /// @nodoc + @JsonValue(1) + configFetchTypeInitialize, + + /// @nodoc + @JsonValue(2) + configFetchTypeJoinChannel, +} + +/// @nodoc +extension ConfigFetchTypeExt on ConfigFetchType { + /// @nodoc + static ConfigFetchType fromValue(int value) { + return $enumDecode(_$ConfigFetchTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$ConfigFetchTypeEnumMap[this]!; + } +} + +/// @nodoc +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class RecorderStreamInfo { + /// @nodoc + const RecorderStreamInfo({this.channelId, this.uid}); + + /// @nodoc + @JsonKey(name: 'channelId') + final String? channelId; + + /// @nodoc + @JsonKey(name: 'uid') + final int? uid; + + /// @nodoc + factory RecorderStreamInfo.fromJson(Map json) => + _$RecorderStreamInfoFromJson(json); + + /// @nodoc + Map toJson() => _$RecorderStreamInfoToJson(this); +} + /// The spatial audio parameters. @JsonSerializable(explicitToJson: true, includeIfNull: false) class SpatialAudioParams { @@ -6087,35 +6356,35 @@ class SpatialAudioParams { this.speakerAttenuation, this.enableDoppler}); - /// The azimuth angle of the remote user or media player relative to the local user. The value range is [0,360], and the unit is degrees, The values are as follows:0: (Default) 0 degrees, which means directly in front on the horizontal plane.90: 90 degrees, which means directly to the left on the horizontal plane.180: 180 degrees, which means directly behind on the horizontal plane.270: 270 degrees, which means directly to the right on the horizontal plane.360: 360 degrees, which means directly in front on the horizontal plane. + /// @nodoc @JsonKey(name: 'speaker_azimuth') final double? speakerAzimuth; - /// The elevation angle of the remote user or media player relative to the local user. The value range is [-90,90], and the unit is degrees, The values are as follows:0: (Default) 0 degrees, which means that the horizontal plane is not rotated.-90: -90 degrees, which means that the horizontal plane is rotated 90 degrees downwards.90: 90 degrees, which means that the horizontal plane is rotated 90 degrees upwards. + /// @nodoc @JsonKey(name: 'speaker_elevation') final double? speakerElevation; - /// The distance of the remote user or media player relative to the local user. The value range is [1,50], and the unit is meters. The default value is 1 meter. + /// @nodoc @JsonKey(name: 'speaker_distance') final double? speakerDistance; - /// The orientation of the remote user or media player relative to the local user. The value range is [0,180], and the unit is degrees, The values are as follows:0: (Default) 0 degrees, which means that the sound source and listener face the same direction.180: 180 degrees, which means that the sound source and listener face each other. + /// @nodoc @JsonKey(name: 'speaker_orientation') final int? speakerOrientation; - /// Whether to enable audio blurring:true: Enable audio blurring.false: (Default) Disable audio blurring. + /// @nodoc @JsonKey(name: 'enable_blur') final bool? enableBlur; - /// Whether to enable air absorption, that is, to simulate the sound attenuation effect of sound transmitting in the air; under a certain transmission distance, the attenuation speed of high-frequency sound is fast, and the attenuation speed of low-frequency sound is slow.true: (Default) Enable air absorption. Make sure that the value of speaker_attenuation is not 0; otherwise, this setting does not take effect.false: Disable air absorption. + /// @nodoc @JsonKey(name: 'enable_air_absorb') final bool? enableAirAbsorb; - /// The sound attenuation coefficient of the remote user or media player. The value range is [0,1]. The values are as follows:0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance.(0,0.5): Weak attenuation mode, where the volume and timbre only have a weak attenuation during the propagation, and the sound can travel farther than that in a real environment. enable_air_absorb needs to be enabled at the same time. 0.5: (Default) Simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the speaker_attenuation parameter.(0.5,1]: Strong attenuation mode, where volume and timbre attenuate rapidly during the propagation. enable_air_absorb needs to be enabled at the same time. + /// @nodoc @JsonKey(name: 'speaker_attenuation') final double? speakerAttenuation; - /// Whether to enable the Doppler effect: When there is a relative displacement between the sound source and the receiver of the sound source, the tone heard by the receiver changes.true: Enable the Doppler effect.false: (Default) Disable the Doppler effect.This parameter is suitable for scenarios where the sound source is moving at high speed (for example, racing games). It is not recommended for common audio and video interactive scenarios (for example, voice chat, cohosting, or online KTV).When this parameter is enabled, Agora recommends that you set a regular period (such as 30 ms), and then call the updatePlayerPositionInfo , updateSelfPosition , and updateRemotePosition methods to continuously update the relative distance between the sound source and the receiver. The following factors can cause the Doppler effect to be unpredictable or the sound to be jittery: the period of updating the distance is too long, the updating period is irregular, or the distance information is lost due to network packet loss or delay. + /// @nodoc @JsonKey(name: 'enable_doppler') final bool? enableDoppler; diff --git a/lib/src/agora_base.g.dart b/lib/src/agora_base.g.dart index e9c9252e1..7790ae70d 100644 --- a/lib/src/agora_base.g.dart +++ b/lib/src/agora_base.g.dart @@ -283,6 +283,26 @@ const _$CompressionPreferenceEnumMap = { CompressionPreference.preferQuality: 1, }; +CodecCapInfo _$CodecCapInfoFromJson(Map json) => CodecCapInfo( + codecType: + $enumDecodeNullable(_$VideoCodecTypeEnumMap, json['codecType']), + codecCapMask: json['codecCapMask'] as int?, + ); + +Map _$CodecCapInfoToJson(CodecCapInfo instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('codecType', _$VideoCodecTypeEnumMap[instance.codecType]); + writeNotNull('codecCapMask', instance.codecCapMask); + return val; +} + VideoEncoderConfiguration _$VideoEncoderConfigurationFromJson( Map json) => VideoEncoderConfiguration( @@ -606,52 +626,6 @@ const _$AudienceLatencyLevelTypeEnumMap = { AudienceLatencyLevelType.audienceLatencyLevelUltraLowLatency: 2, }; -RemoteAudioStats _$RemoteAudioStatsFromJson(Map json) => - RemoteAudioStats( - uid: json['uid'] as int?, - quality: json['quality'] as int?, - networkTransportDelay: json['networkTransportDelay'] as int?, - jitterBufferDelay: json['jitterBufferDelay'] as int?, - audioLossRate: json['audioLossRate'] as int?, - numChannels: json['numChannels'] as int?, - receivedSampleRate: json['receivedSampleRate'] as int?, - receivedBitrate: json['receivedBitrate'] as int?, - totalFrozenTime: json['totalFrozenTime'] as int?, - frozenRate: json['frozenRate'] as int?, - mosValue: json['mosValue'] as int?, - totalActiveTime: json['totalActiveTime'] as int?, - publishDuration: json['publishDuration'] as int?, - qoeQuality: json['qoeQuality'] as int?, - qualityChangedReason: json['qualityChangedReason'] as int?, - ); - -Map _$RemoteAudioStatsToJson(RemoteAudioStats instance) { - final val = {}; - - void writeNotNull(String key, dynamic value) { - if (value != null) { - val[key] = value; - } - } - - writeNotNull('uid', instance.uid); - writeNotNull('quality', instance.quality); - writeNotNull('networkTransportDelay', instance.networkTransportDelay); - writeNotNull('jitterBufferDelay', instance.jitterBufferDelay); - writeNotNull('audioLossRate', instance.audioLossRate); - writeNotNull('numChannels', instance.numChannels); - writeNotNull('receivedSampleRate', instance.receivedSampleRate); - writeNotNull('receivedBitrate', instance.receivedBitrate); - writeNotNull('totalFrozenTime', instance.totalFrozenTime); - writeNotNull('frozenRate', instance.frozenRate); - writeNotNull('mosValue', instance.mosValue); - writeNotNull('totalActiveTime', instance.totalActiveTime); - writeNotNull('publishDuration', instance.publishDuration); - writeNotNull('qoeQuality', instance.qoeQuality); - writeNotNull('qualityChangedReason', instance.qualityChangedReason); - return val; -} - VideoFormat _$VideoFormatFromJson(Map json) => VideoFormat( width: json['width'] as int?, height: json['height'] as int?, @@ -724,6 +698,10 @@ const _$VideoSourceTypeEnumMap = { VideoSourceType.videoSourceRtcImageGif: 8, VideoSourceType.videoSourceRemote: 9, VideoSourceType.videoSourceTranscoded: 10, + VideoSourceType.videoSourceCameraThird: 11, + VideoSourceType.videoSourceCameraFourth: 12, + VideoSourceType.videoSourceScreenThird: 13, + VideoSourceType.videoSourceScreenFourth: 14, VideoSourceType.videoSourceUnknown: 100, }; @@ -1007,9 +985,10 @@ TranscodingVideoStream _$TranscodingVideoStreamFromJson( Map json) => TranscodingVideoStream( sourceType: - $enumDecodeNullable(_$MediaSourceTypeEnumMap, json['sourceType']), + $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['sourceType']), remoteUserUid: json['remoteUserUid'] as int?, imageUrl: json['imageUrl'] as String?, + mediaPlayerId: json['mediaPlayerId'] as int?, x: json['x'] as int?, y: json['y'] as int?, width: json['width'] as int?, @@ -1029,9 +1008,10 @@ Map _$TranscodingVideoStreamToJson( } } - writeNotNull('sourceType', _$MediaSourceTypeEnumMap[instance.sourceType]); + writeNotNull('sourceType', _$VideoSourceTypeEnumMap[instance.sourceType]); writeNotNull('remoteUserUid', instance.remoteUserUid); writeNotNull('imageUrl', instance.imageUrl); + writeNotNull('mediaPlayerId', instance.mediaPlayerId); writeNotNull('x', instance.x); writeNotNull('y', instance.y); writeNotNull('width', instance.width); @@ -1042,28 +1022,11 @@ Map _$TranscodingVideoStreamToJson( return val; } -const _$MediaSourceTypeEnumMap = { - MediaSourceType.audioPlayoutSource: 0, - MediaSourceType.audioRecordingSource: 1, - MediaSourceType.primaryCameraSource: 2, - MediaSourceType.secondaryCameraSource: 3, - MediaSourceType.primaryScreenSource: 4, - MediaSourceType.secondaryScreenSource: 5, - MediaSourceType.customVideoSource: 6, - MediaSourceType.mediaPlayerSource: 7, - MediaSourceType.rtcImagePngSource: 8, - MediaSourceType.rtcImageJpegSource: 9, - MediaSourceType.rtcImageGifSource: 10, - MediaSourceType.remoteVideoSource: 11, - MediaSourceType.transcodedVideoSource: 12, - MediaSourceType.unknownMediaSource: 100, -}; - LocalTranscoderConfiguration _$LocalTranscoderConfigurationFromJson( Map json) => LocalTranscoderConfiguration( streamCount: json['streamCount'] as int?, - videoInputStreams: (json['VideoInputStreams'] as List?) + videoInputStreams: (json['videoInputStreams'] as List?) ?.map( (e) => TranscodingVideoStream.fromJson(e as Map)) .toList(), @@ -1085,7 +1048,7 @@ Map _$LocalTranscoderConfigurationToJson( } writeNotNull('streamCount', instance.streamCount); - writeNotNull('VideoInputStreams', + writeNotNull('videoInputStreams', instance.videoInputStreams?.map((e) => e.toJson()).toList()); writeNotNull( 'videoOutputConfiguration', instance.videoOutputConfiguration?.toJson()); @@ -1214,6 +1177,7 @@ VideoCanvas _$VideoCanvasFromJson(Map json) => VideoCanvas( cropArea: json['cropArea'] == null ? null : Rectangle.fromJson(json['cropArea'] as Map), + enableAlphaMask: json['enableAlphaMask'] as bool?, ); Map _$VideoCanvasToJson(VideoCanvas instance) { @@ -1233,6 +1197,7 @@ Map _$VideoCanvasToJson(VideoCanvas instance) { writeNotNull('sourceType', _$VideoSourceTypeEnumMap[instance.sourceType]); writeNotNull('mediaPlayerId', instance.mediaPlayerId); writeNotNull('cropArea', instance.cropArea?.toJson()); + writeNotNull('enableAlphaMask', instance.enableAlphaMask); return val; } @@ -1398,9 +1363,11 @@ Map _$VirtualBackgroundSourceToJson( } const _$BackgroundSourceTypeEnumMap = { + BackgroundSourceType.backgroundNone: 0, BackgroundSourceType.backgroundColor: 1, BackgroundSourceType.backgroundImg: 2, BackgroundSourceType.backgroundBlur: 3, + BackgroundSourceType.backgroundVideo: 4, }; const _$BackgroundBlurDegreeEnumMap = { @@ -1436,6 +1403,24 @@ const _$SegModelTypeEnumMap = { SegModelType.segModelGreen: 2, }; +AudioTrackConfig _$AudioTrackConfigFromJson(Map json) => + AudioTrackConfig( + enableLocalPlayback: json['enableLocalPlayback'] as bool?, + ); + +Map _$AudioTrackConfigToJson(AudioTrackConfig instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('enableLocalPlayback', instance.enableLocalPlayback); + return val; +} + ScreenCaptureParameters _$ScreenCaptureParametersFromJson( Map json) => ScreenCaptureParameters( @@ -1751,6 +1736,7 @@ EchoTestConfiguration _$EchoTestConfigurationFromJson( enableVideo: json['enableVideo'] as bool?, token: json['token'] as String?, channelId: json['channelId'] as String?, + intervalInSeconds: json['intervalInSeconds'] as int?, ); Map _$EchoTestConfigurationToJson( @@ -1768,6 +1754,7 @@ Map _$EchoTestConfigurationToJson( writeNotNull('enableVideo', instance.enableVideo); writeNotNull('token', instance.token); writeNotNull('channelId', instance.channelId); + writeNotNull('intervalInSeconds', instance.intervalInSeconds); return val; } @@ -1882,6 +1869,59 @@ Map _$ScreenCaptureParameters2ToJson( return val; } +VideoRenderingTracingInfo _$VideoRenderingTracingInfoFromJson( + Map json) => + VideoRenderingTracingInfo( + elapsedTime: json['elapsedTime'] as int?, + start2JoinChannel: json['start2JoinChannel'] as int?, + join2JoinSuccess: json['join2JoinSuccess'] as int?, + joinSuccess2RemoteJoined: json['joinSuccess2RemoteJoined'] as int?, + remoteJoined2SetView: json['remoteJoined2SetView'] as int?, + remoteJoined2UnmuteVideo: json['remoteJoined2UnmuteVideo'] as int?, + remoteJoined2PacketReceived: json['remoteJoined2PacketReceived'] as int?, + ); + +Map _$VideoRenderingTracingInfoToJson( + VideoRenderingTracingInfo instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('elapsedTime', instance.elapsedTime); + writeNotNull('start2JoinChannel', instance.start2JoinChannel); + writeNotNull('join2JoinSuccess', instance.join2JoinSuccess); + writeNotNull('joinSuccess2RemoteJoined', instance.joinSuccess2RemoteJoined); + writeNotNull('remoteJoined2SetView', instance.remoteJoined2SetView); + writeNotNull('remoteJoined2UnmuteVideo', instance.remoteJoined2UnmuteVideo); + writeNotNull( + 'remoteJoined2PacketReceived', instance.remoteJoined2PacketReceived); + return val; +} + +RecorderStreamInfo _$RecorderStreamInfoFromJson(Map json) => + RecorderStreamInfo( + channelId: json['channelId'] as String?, + uid: json['uid'] as int?, + ); + +Map _$RecorderStreamInfoToJson(RecorderStreamInfo instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('channelId', instance.channelId); + writeNotNull('uid', instance.uid); + return val; +} + SpatialAudioParams _$SpatialAudioParamsFromJson(Map json) => SpatialAudioParams( speakerAzimuth: (json['speaker_azimuth'] as num?)?.toDouble(), @@ -2062,10 +2102,10 @@ const _$InterfaceIdTypeEnumMap = { InterfaceIdType.agoraIidMediaEngineRegulator: 9, InterfaceIdType.agoraIidCloudSpatialAudio: 10, InterfaceIdType.agoraIidLocalSpatialAudio: 11, - InterfaceIdType.agoraIidMediaRecorder: 12, InterfaceIdType.agoraIidStateSync: 13, InterfaceIdType.agoraIidMetachatService: 14, InterfaceIdType.agoraIidMusicContentCenter: 15, + InterfaceIdType.agoraIidH265Transcoder: 16, }; const _$QualityTypeEnumMap = { @@ -2096,11 +2136,17 @@ const _$FrameRateEnumMap = { }; const _$FrameWidthEnumMap = { - FrameWidth.frameWidth640: 640, + FrameWidth.frameWidth960: 960, }; const _$FrameHeightEnumMap = { - FrameHeight.frameHeight360: 360, + FrameHeight.frameHeight540: 540, +}; + +const _$ScreenCaptureFramerateCapabilityEnumMap = { + ScreenCaptureFramerateCapability.screenCaptureFramerateCapability15Fps: 0, + ScreenCaptureFramerateCapability.screenCaptureFramerateCapability30Fps: 1, + ScreenCaptureFramerateCapability.screenCaptureFramerateCapability60Fps: 2, }; const _$H264PacketizeModeEnumMap = { @@ -2108,6 +2154,14 @@ const _$H264PacketizeModeEnumMap = { H264PacketizeMode.singleNalUnit: 1, }; +const _$CodecCapMaskEnumMap = { + CodecCapMask.codecCapMaskNone: 0, + CodecCapMask.codecCapMaskHwDec: 1, + CodecCapMask.codecCapMaskHwEnc: 2, + CodecCapMask.codecCapMaskSwDec: 4, + CodecCapMask.codecCapMaskSwEnc: 8, +}; + const _$SimulcastStreamModeEnumMap = { SimulcastStreamMode.autoSimulcastStream: -1, SimulcastStreamMode.disableSimulcastStream: 0, @@ -2138,6 +2192,12 @@ const _$ExperiencePoorReasonEnumMap = { ExperiencePoorReason.wifiBluetoothCoexist: 8, }; +const _$AudioAinsModeEnumMap = { + AudioAinsMode.ainsModeBalanced: 0, + AudioAinsMode.ainsModeAggressive: 1, + AudioAinsMode.ainsModeUltralowlatency: 2, +}; + const _$AudioProfileTypeEnumMap = { AudioProfileType.audioProfileDefault: 0, AudioProfileType.audioProfileSpeechStandard: 1, @@ -2165,6 +2225,11 @@ const _$ScreenScenarioTypeEnumMap = { ScreenScenarioType.screenScenarioRdc: 4, }; +const _$VideoApplicationScenarioTypeEnumMap = { + VideoApplicationScenarioType.applicationScenarioGeneral: 0, + VideoApplicationScenarioType.applicationScenarioMeeting: 1, +}; + const _$CaptureBrightnessLevelTypeEnumMap = { CaptureBrightnessLevelType.captureBrightnessLevelInvalid: -1, CaptureBrightnessLevelType.captureBrightnessLevelNormal: 0, @@ -2263,6 +2328,7 @@ const _$RemoteVideoStateReasonEnumMap = { RemoteVideoStateReason.remoteVideoStateReasonVideoStreamTypeChangeToLow: 10, RemoteVideoStateReason.remoteVideoStateReasonVideoStreamTypeChangeToHigh: 11, RemoteVideoStateReason.remoteVideoStateReasonSdkInBackground: 12, + RemoteVideoStateReason.remoteVideoStateReasonCodecNotSupport: 13, }; const _$RemoteUserStateEnumMap = { @@ -2316,6 +2382,16 @@ const _$ConnectionStateTypeEnumMap = { ConnectionStateType.connectionStateFailed: 5, }; +const _$VideoTranscoderErrorEnumMap = { + VideoTranscoderError.vtErrOk: 0, + VideoTranscoderError.vtErrVideoSourceNotReady: 1, + VideoTranscoderError.vtErrInvalidVideoSourceType: 2, + VideoTranscoderError.vtErrInvalidImagePath: 3, + VideoTranscoderError.vtErrUnsupportImageFormat: 4, + VideoTranscoderError.vtErrInvalidLayout: 5, + VideoTranscoderError.vtErrInternal: 20, +}; + const _$ConnectionChangedReasonTypeEnumMap = { ConnectionChangedReasonType.connectionChangedConnecting: 0, ConnectionChangedReasonType.connectionChangedJoinSuccess: 1, @@ -2338,7 +2414,7 @@ const _$ConnectionChangedReasonTypeEnumMap = { ConnectionChangedReasonType.connectionChangedClientIpAddressChangedByUser: 18, ConnectionChangedReasonType.connectionChangedSameUidLogin: 19, ConnectionChangedReasonType.connectionChangedTooManyBroadcasters: 20, - ConnectionChangedReasonType.connectionChangedLicenseVerifyFailed: 21, + ConnectionChangedReasonType.connectionChangedLicenseValidationFailure: 21, }; const _$ClientRoleChangeFailedReasonEnumMap = { @@ -2370,6 +2446,12 @@ const _$NetworkTypeEnumMap = { NetworkType.networkTypeMobile4g: 5, }; +const _$AudioTrackTypeEnumMap = { + AudioTrackType.audioTrackInvalid: -1, + AudioTrackType.audioTrackMixable: 0, + AudioTrackType.audioTrackDirect: 1, +}; + const _$VoiceBeautifierPresetEnumMap = { VoiceBeautifierPreset.voiceBeautifierOff: 0, VoiceBeautifierPreset.chatBeautifierMagnetic: 16843008, @@ -2416,6 +2498,17 @@ const _$VoiceConversionPresetEnumMap = { VoiceConversionPreset.voiceChangerSweet: 50397696, VoiceConversionPreset.voiceChangerSolid: 50397952, VoiceConversionPreset.voiceChangerBass: 50398208, + VoiceConversionPreset.voiceChangerCartoon: 50398464, + VoiceConversionPreset.voiceChangerChildlike: 50398720, + VoiceConversionPreset.voiceChangerPhoneOperator: 50398976, + VoiceConversionPreset.voiceChangerMonster: 50399232, + VoiceConversionPreset.voiceChangerTransformers: 50399488, + VoiceConversionPreset.voiceChangerGroot: 50399744, + VoiceConversionPreset.voiceChangerDarthVader: 50400000, + VoiceConversionPreset.voiceChangerIronLady: 50400256, + VoiceConversionPreset.voiceChangerShinChan: 50400512, + VoiceConversionPreset.voiceChangerGirlishMan: 50400768, + VoiceConversionPreset.voiceChangerChipmunk: 50401024, }; const _$HeadphoneEqualizerPresetEnumMap = { @@ -2535,3 +2628,13 @@ const _$ThreadPriorityTypeEnumMap = { ThreadPriorityType.highest: 4, ThreadPriorityType.critical: 5, }; + +const _$MediaTraceEventEnumMap = { + MediaTraceEvent.mediaTraceEventVideoRendered: 0, + MediaTraceEvent.mediaTraceEventVideoDecoded: 1, +}; + +const _$ConfigFetchTypeEnumMap = { + ConfigFetchType.configFetchTypeInitialize: 1, + ConfigFetchType.configFetchTypeJoinChannel: 2, +}; diff --git a/lib/src/agora_log.dart b/lib/src/agora_log.dart index 4f1e6559c..92fb71d29 100644 --- a/lib/src/agora_log.dart +++ b/lib/src/agora_log.dart @@ -106,7 +106,7 @@ class LogConfig { @JsonKey(name: 'filePath') final String? filePath; - /// The size (KB) of an agorasdk.log file. The value range is [128,1024]. The default value is 1,024 KB. If you set fileSizeInKByte to a value lower than 128 KB, the SDK adjusts it to 128 KB. If you set fileSizeInKBytes to a value higher than 1,024 KB, the SDK adjusts it to 1,024 KB. + /// The size (KB) of an agorasdk.log file. The value range is [128,1024]. The default value is 1,024 KB. If you set fileSizeInKByte smaller than 128 KB, the SDK automatically adjusts it to 128 KB; if you set fileSizeInKByte greater than 1,024 KB, the SDK automatically adjusts it to 1,024 KB. @JsonKey(name: 'fileSizeInKB') final int? fileSizeInKB; diff --git a/lib/src/agora_media_base.dart b/lib/src/agora_media_base.dart index 85027da47..86e22e465 100644 --- a/lib/src/agora_media_base.dart +++ b/lib/src/agora_media_base.dart @@ -1,12 +1,104 @@ import 'package:agora_rtc_engine/src/binding_forward_export.dart'; part 'agora_media_base.g.dart'; +/// @nodoc +const invalidTrackId = 0xffffffff; + /// @nodoc const defaultConnectionId = 0; /// @nodoc const dummyConnectionId = 4294967295; +/// The type of the video source. +@JsonEnum(alwaysCreate: true) +enum VideoSourceType { + /// 0: (Default) The primary camera. + @JsonValue(0) + videoSourceCameraPrimary, + + /// 0: (Default) The primary camera. + @JsonValue(0) + videoSourceCamera, + + /// 1: The secondary camera. + @JsonValue(1) + videoSourceCameraSecondary, + + /// 2: The primary screen. + @JsonValue(2) + videoSourceScreenPrimary, + + /// 2: The primary screen. + @JsonValue(2) + videoSourceScreen, + + /// 3: The secondary screen. + @JsonValue(3) + videoSourceScreenSecondary, + + /// 4: A custom video source. + @JsonValue(4) + videoSourceCustom, + + /// 5: The media player. + @JsonValue(5) + videoSourceMediaPlayer, + + /// 6: One PNG image. + @JsonValue(6) + videoSourceRtcImagePng, + + /// 7: One JPEG image. + @JsonValue(7) + videoSourceRtcImageJpeg, + + /// 8: One GIF image. + @JsonValue(8) + videoSourceRtcImageGif, + + /// 9: One remote video acquired by the network. + @JsonValue(9) + videoSourceRemote, + + /// 10: One transcoded video source. + @JsonValue(10) + videoSourceTranscoded, + + /// 11: (For Windows and macOS only) The third camera. + @JsonValue(11) + videoSourceCameraThird, + + /// 12: (For Windows and macOS only) The fourth camera. + @JsonValue(12) + videoSourceCameraFourth, + + /// 13: (For Windows and macOS only) The third screen. + @JsonValue(13) + videoSourceScreenThird, + + /// 14: (For Windows and macOS only) The fourth screen. + @JsonValue(14) + videoSourceScreenFourth, + + /// 100: An unknown video source. + @JsonValue(100) + videoSourceUnknown, +} + +/// @nodoc +extension VideoSourceTypeExt on VideoSourceType { + /// @nodoc + static VideoSourceType fromValue(int value) { + return $enumDecode(_$VideoSourceTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$VideoSourceTypeEnumMap[this]!; + } +} + /// The type of the audio route. @JsonEnum(alwaysCreate: true) enum AudioRoute { @@ -398,7 +490,7 @@ class AudioPcmFrame { @JsonKey(name: 'bytes_per_sample') final BytesPerSample? bytesPerSample; - /// The video frame. + /// The audio frame. @JsonKey(name: 'data_') final List? data; @@ -537,6 +629,35 @@ extension RenderModeTypeExt on RenderModeType { } } +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum CameraVideoSourceType { + /// @nodoc + @JsonValue(0) + cameraSourceFront, + + /// @nodoc + @JsonValue(1) + cameraSourceBack, + + /// @nodoc + @JsonValue(2) + videoSourceUnspecified, +} + +/// @nodoc +extension CameraVideoSourceTypeExt on CameraVideoSourceType { + /// @nodoc + static CameraVideoSourceType fromValue(int value) { + return $enumDecode(_$CameraVideoSourceTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$CameraVideoSourceTypeEnumMap[this]!; + } +} + /// The external video frame. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ExternalVideoFrame { @@ -557,7 +678,8 @@ class ExternalVideoFrame { this.textureId, this.matrix, this.metadataBuffer, - this.metadataSize}); + this.metadataSize, + this.alphaBuffer}); /// The video type. See VideoBufferType . @JsonKey(name: 'type') @@ -603,15 +725,15 @@ class ExternalVideoFrame { @JsonKey(name: 'timestamp') final int? timestamp; - /// This parameter only applies to video data in Texture format. Texture ID of the frame. + /// This parameter only applies to video data in Texture format. Texture ID of the video frame. @JsonKey(name: 'eglType') final EglContextType? eglType; - /// This parameter only applies to video data in Texture format. Incoming 4 x 4 transformational matrix. The typical value is a unit matrix. + /// This parameter only applies to video data in Texture format. Incoming 4 × 4 transformational matrix. The typical value is a unit matrix. @JsonKey(name: 'textureId') final int? textureId; - /// This parameter only applies to video data in Texture format. Incoming 4 x 4 transformational matrix. The typical value is a unit matrix. + /// This parameter only applies to video data in Texture format. Incoming 4 × 4 transformational matrix. The typical value is a unit matrix. @JsonKey(name: 'matrix') final List? matrix; @@ -623,6 +745,10 @@ class ExternalVideoFrame { @JsonKey(name: 'metadata_size') final int? metadataSize; + /// @nodoc + @JsonKey(name: 'alphaBuffer', ignore: true) + final Uint8List? alphaBuffer; + /// @nodoc factory ExternalVideoFrame.fromJson(Map json) => _$ExternalVideoFrameFromJson(json); @@ -686,7 +812,7 @@ extension VideoBufferTypeExt on VideoBufferType { } /// Configurations of the video frame. -/// The video data format is YUV420. Note that the buffer provides a pointer to a pointer. This interface cannot modify the pointer of the buffer, but it can modify the content of the buffer. +/// Note that the buffer provides a pointer to a pointer. This interface cannot modify the pointer of the buffer, but it can modify the content of the buffer. @JsonSerializable(explicitToJson: true, includeIfNull: false) class VideoFrame { /// @nodoc @@ -707,7 +833,8 @@ class VideoFrame { this.metadataSize, this.textureId, this.matrix, - this.alphaBuffer}); + this.alphaBuffer, + this.pixelBuffer}); /// The pixel format. See VideoPixelFormat . @JsonKey(name: 'type') @@ -777,6 +904,10 @@ class VideoFrame { @JsonKey(name: 'alphaBuffer', ignore: true) final Uint8List? alphaBuffer; + /// @nodoc + @JsonKey(name: 'pixelBuffer', ignore: true) + final Uint8List? pixelBuffer; + /// @nodoc factory VideoFrame.fromJson(Map json) => _$VideoFrameFromJson(json); @@ -843,6 +974,21 @@ extension VideoModulePositionExt on VideoModulePosition { } } +/// This class is used to get raw PCM audio. +/// You can inherit this class and implement the onFrame callback to get raw PCM audio. +class AudioPcmFrameSink { + /// @nodoc + const AudioPcmFrameSink({ + this.onFrame, + }); + + /// Occurs each time the player receives an audio frame. + /// After registering the audio frame observer, the callback occurs every time the player receives an audio frame, reporting the detailed information of the audio frame. + /// + /// * [frame] The audio frame information. See AudioPcmFrame. + final void Function(AudioPcmFrame frame)? onFrame; +} + /// The audio frame observer. class AudioFrameObserverBase { /// @nodoc @@ -858,9 +1004,6 @@ class AudioFrameObserverBase { /// /// * [audioFrame] The raw audio data. See AudioFrame . /// * [channelId] The channel ID. - /// - /// Returns - /// Reserved for future use. final void Function(String channelId, AudioFrame audioFrame)? onRecordAudioFrame; @@ -869,9 +1012,6 @@ class AudioFrameObserverBase { /// /// * [audioFrame] The raw audio data. See AudioFrame . /// * [channelId] The channel ID. - /// - /// Returns - /// Reserved for future use. final void Function(String channelId, AudioFrame audioFrame)? onPlaybackAudioFrame; @@ -880,14 +1020,12 @@ class AudioFrameObserverBase { /// /// * [audioFrame] The raw audio data. See AudioFrame . /// * [channelId] The channel ID. - /// - /// Returns - /// Reserved for future use. final void Function(String channelId, AudioFrame audioFrame)? onMixedAudioFrame; /// Gets the in-ear monitoring audio frame. /// In order to ensure that the obtained in-ear audio data meets the expectations, Agora recommends that you set the in-ear monitoring-ear audio data format as follows: After calling setEarMonitoringAudioFrameParameters to set the audio data format and registerAudioFrameObserver to register the audio frame observer object, the SDK calculates the sampling interval according to the parameters set in the methods, and triggers the onEarMonitoringAudioFrame callback according to the sampling interval.Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. + /// /// * [audioFrame] The raw audio data. See AudioFrame . final void Function(AudioFrame audioFrame)? onEarMonitoringAudioFrame; } @@ -935,7 +1073,7 @@ class AudioFrame { @JsonKey(name: 'samplesPerChannel') final int? samplesPerChannel; - /// The number of bytes per audio sample, which is usually 16-bit (2 bytes). + /// The number of bytes per sample. The number of bytes per audio sample, which is usually 16-bit (2-byte). @JsonKey(name: 'bytesPerSample') final BytesPerSample? bytesPerSample; @@ -947,7 +1085,7 @@ class AudioFrame { @JsonKey(name: 'samplesPerSec') final int? samplesPerSec; - /// The data buffer of the audio frame. When the audio frame uses a stereo channel, the data buffer is interleaved.The size of the data buffer is as follows: buffer = samples ×channels × bytesPerSample. + /// The data buffer of the audio frame. When the audio frame uses a stereo channel, the data buffer is interleaved.The size of the data buffer is as follows: buffer = samples × channels × bytesPerSample. @JsonKey(name: 'buffer', ignore: true) final Uint8List? buffer; @@ -1070,9 +1208,6 @@ class AudioFrameObserver extends AudioFrameObserverBase { /// * [channelId] The channel ID. /// * [uid] The user ID of the specified user. /// * [audioFrame] The raw audio data. See AudioFrame . - /// - /// Returns - /// Reserved for future use. final void Function(String channelId, int uid, AudioFrame audioFrame)? onPlaybackAudioFrameBeforeMixing; } @@ -1109,7 +1244,7 @@ class UserAudioSpectrumInfo { @JsonKey(name: 'uid') final int? uid; - /// Audio spectrum information of the remote user.See AudioSpectrumData . + /// Audio spectrum information of the remote user. See AudioSpectrumData . @JsonKey(name: 'spectrumData') final AudioSpectrumData? spectrumData; @@ -1172,75 +1307,39 @@ class VideoFrameObserver { const VideoFrameObserver({ this.onCaptureVideoFrame, this.onPreEncodeVideoFrame, - this.onSecondaryCameraCaptureVideoFrame, - this.onSecondaryPreEncodeCameraVideoFrame, - this.onScreenCaptureVideoFrame, - this.onPreEncodeScreenVideoFrame, this.onMediaPlayerVideoFrame, - this.onSecondaryScreenCaptureVideoFrame, - this.onSecondaryPreEncodeScreenVideoFrame, this.onRenderVideoFrame, this.onTranscodedVideoFrame, }); /// Occurs each time the SDK receives a video frame captured by the local camera. - /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured by the local camera. You can then pre-process the data according to your scenarios.The video data that this callback gets has not been pre-processed, and is not watermarked, cropped, rotated or beautified.If the video data type you get is RGBA, Agora does not support processing the data of the alpha channel.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. + /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured by the local camera. You can then pre-process the data according to your scenarios.The video data that this callback gets has not been pre-processed, and is not watermarked, cropped, rotated or beautified.If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. /// - /// * [videoFrame] The video frame. See VideoFrame . - final void Function(VideoFrame videoFrame)? onCaptureVideoFrame; + /// * [sourceType] The type of the video source. See VideoSourceType . + /// * [videoFrame] The video frame. See VideoFrame .The default value of the video frame data format obtained through this callback is as follows:Android: textureiOS: cvPixelBuffermacOS: YUV 420Windows: YUV 420 + final void Function(VideoSourceType sourceType, VideoFrame videoFrame)? + onCaptureVideoFrame; /// Occurs each time the SDK receives a video frame before encoding. /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data before encoding and then process the data according to your particular scenarios.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK.The video data that this callback gets has been preprocessed, with its content cropped and rotated, and the image enhanced. /// - /// * [videoFrame] The video frame. See VideoFrame . - final void Function(VideoFrame videoFrame)? onPreEncodeVideoFrame; + /// * [videoFrame] The video frame. See VideoFrame .The default value of the video frame data format obtained through this callback is as follows: + /// Android: texture + /// iOS: cvPixelBuffer + /// macOS: YUV 420 + /// Windows: YUV 420 + /// * [sourceType] The type of the video source. See VideoSourceType . + final void Function(VideoSourceType sourceType, VideoFrame videoFrame)? + onPreEncodeVideoFrame; /// @nodoc - final void Function(VideoFrame videoFrame)? - onSecondaryCameraCaptureVideoFrame; - - /// Gets the video data captured from the second camera before encoding. - /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured from the second camera before encoding and then process the data according to your particular scenarios.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. - /// - /// * [videoFrame] The video frame. See VideoFrame . - final void Function(VideoFrame videoFrame)? - onSecondaryPreEncodeCameraVideoFrame; - - /// Occurs each time the SDK receives a video frame captured by the screen. - /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data for screen sharing. You can then pre-process the data according to your scenarios.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. - /// - /// * [videoFrame] The video frame. See VideoFrame . - final void Function(VideoFrame videoFrame)? onScreenCaptureVideoFrame; - - /// Gets the video data captured from the screen before encoding. - /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured from the screen before encoding and then process the data according to your particular scenarios.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. - /// - /// * [videoFrame] The video frame. See VideoFrame . - final void Function(VideoFrame videoFrame)? onPreEncodeScreenVideoFrame; - - /// Gets the video data of the media player. - /// After you successfully register the video frame observer and calling the createMediaPlayer method, the SDK triggers this callback each time when it receives a video frame. In this callback, you can get the video data of the media player. You can then process the data according to your particular scenarios.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. - /// - /// * [videoFrame] The video frame. See VideoFrame . - /// * [mediaPlayerId] The ID of the media player. final void Function(VideoFrame videoFrame, int mediaPlayerId)? onMediaPlayerVideoFrame; - /// @nodoc - final void Function(VideoFrame videoFrame)? - onSecondaryScreenCaptureVideoFrame; - - /// Gets the video data captured from the second screen before encoding. - /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured from the second screen before encoding and then process the data according to your particular scenarios.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. - /// - /// * [videoFrame] The video frame. See VideoFrame . - final void Function(VideoFrame videoFrame)? - onSecondaryPreEncodeScreenVideoFrame; - /// Occurs each time the SDK receives a video frame sent by the remote user. - /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data before encoding. You can then process the data according to your particular scenarios.If the video data type you get is RGBA, Agora does not support processing the data of the alpha channel.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. + /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data sent from the remote end before rendering, and then process it according to the particular scenarios.If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. /// - /// * [videoFrame] The video frame. See VideoFrame . + /// * [videoFrame] The video frame. See VideoFrame .The default value of the video frame data format obtained through this callback is as follows:Android: textureiOS: cvPixelBuffermacOS: YUV 420Windows: YUV 420 /// * [remoteUid] The user ID of the remote user who sends the current video frame. /// * [channelId] The channel ID. final void Function(String channelId, int remoteUid, VideoFrame videoFrame)? @@ -1416,7 +1515,7 @@ extension RecorderErrorCodeExt on RecorderErrorCode { } } -/// Configurations for the local audio and video recording. +/// @nodoc @JsonSerializable(explicitToJson: true, includeIfNull: false) class MediaRecorderConfiguration { /// @nodoc @@ -1427,23 +1526,23 @@ class MediaRecorderConfiguration { this.maxDurationMs, this.recorderInfoUpdateInterval}); - /// The absolute path (including the filename extensions) of the recording file. For example:Windows: C:\Users\\AppData\Local\Agora\\example.mp4iOS: /App Sandbox/Library/Caches/example.mp4macOS: /Library/Logs/example.mp4Android: /storage/emulated/0/Android/data//files/example.mp4Ensure that the directory for the log files exists and is writable. + /// @nodoc @JsonKey(name: 'storagePath') final String? storagePath; - /// The format of the recording file. See MediaRecorderContainerFormat . + /// @nodoc @JsonKey(name: 'containerFormat') final MediaRecorderContainerFormat? containerFormat; - /// The recording content. See MediaRecorderStreamType . + /// @nodoc @JsonKey(name: 'streamType') final MediaRecorderStreamType? streamType; - /// The maximum recording duration, in milliseconds. The default value is 120000. + /// @nodoc @JsonKey(name: 'maxDurationMs') final int? maxDurationMs; - /// The interval (ms) of updating the recording information. The value range is [1000,10000]. Based on the value you set in this parameter, the SDK triggers the onRecorderInfoUpdated callback to report the updated recording information. + /// @nodoc @JsonKey(name: 'recorderInfoUpdateInterval') final int? recorderInfoUpdateInterval; @@ -1455,21 +1554,21 @@ class MediaRecorderConfiguration { Map toJson() => _$MediaRecorderConfigurationToJson(this); } -/// The information about the file that is recorded. +/// @nodoc @JsonSerializable(explicitToJson: true, includeIfNull: false) class RecorderInfo { /// @nodoc const RecorderInfo({this.fileName, this.durationMs, this.fileSize}); - /// The absolute path of the recording file. + /// @nodoc @JsonKey(name: 'fileName') final String? fileName; - /// The recording duration (ms). + /// @nodoc @JsonKey(name: 'durationMs') final int? durationMs; - /// The size (bytes) of the recording file. + /// @nodoc @JsonKey(name: 'fileSize') final int? fileSize; @@ -1481,7 +1580,7 @@ class RecorderInfo { Map toJson() => _$RecorderInfoToJson(this); } -/// The MediaRecorderObserver class. +/// @nodoc class MediaRecorderObserver { /// @nodoc const MediaRecorderObserver({ @@ -1489,17 +1588,11 @@ class MediaRecorderObserver { this.onRecorderInfoUpdated, }); - /// Occurs when the recording state changes. - /// When the local audio or video recording state changes, the SDK triggers this callback to report the current recording state and the reason for the change. - /// - /// * [state] The current recording state. See RecorderState . - /// * [error] The reason for the state change. See RecorderErrorCode . - final void Function(RecorderState state, RecorderErrorCode error)? - onRecorderStateChanged; + /// @nodoc + final void Function(String channelId, int uid, RecorderState state, + RecorderErrorCode error)? onRecorderStateChanged; - /// Occurs when the recording information is updated. - /// After you successfully enable the local audio and video recording, the SDK periodically triggers this callback based on the value of recorderInfoUpdateInterval set in MediaRecorderConfiguration . This callback reports the file name, duration, and size of the current recording file. - /// - /// * [info] The information about the file that is recorded. See RecorderInfo . - final void Function(RecorderInfo info)? onRecorderInfoUpdated; + /// @nodoc + final void Function(String channelId, int uid, RecorderInfo info)? + onRecorderInfoUpdated; } diff --git a/lib/src/agora_media_base.g.dart b/lib/src/agora_media_base.g.dart index 7d1bbd67b..9fac5b89f 100644 --- a/lib/src/agora_media_base.g.dart +++ b/lib/src/agora_media_base.g.dart @@ -447,6 +447,27 @@ Map _$RecorderInfoToJson(RecorderInfo instance) { return val; } +const _$VideoSourceTypeEnumMap = { + VideoSourceType.videoSourceCameraPrimary: 0, + VideoSourceType.videoSourceCamera: 0, + VideoSourceType.videoSourceCameraSecondary: 1, + VideoSourceType.videoSourceScreenPrimary: 2, + VideoSourceType.videoSourceScreen: 2, + VideoSourceType.videoSourceScreenSecondary: 3, + VideoSourceType.videoSourceCustom: 4, + VideoSourceType.videoSourceMediaPlayer: 5, + VideoSourceType.videoSourceRtcImagePng: 6, + VideoSourceType.videoSourceRtcImageJpeg: 7, + VideoSourceType.videoSourceRtcImageGif: 8, + VideoSourceType.videoSourceRemote: 9, + VideoSourceType.videoSourceTranscoded: 10, + VideoSourceType.videoSourceCameraThird: 11, + VideoSourceType.videoSourceCameraFourth: 12, + VideoSourceType.videoSourceScreenThird: 13, + VideoSourceType.videoSourceScreenFourth: 14, + VideoSourceType.videoSourceUnknown: 100, +}; + const _$AudioRouteEnumMap = { AudioRoute.routeDefault: -1, AudioRoute.routeHeadset: 0, @@ -497,6 +518,12 @@ const _$RenderModeTypeEnumMap = { RenderModeType.renderModeAdaptive: 3, }; +const _$CameraVideoSourceTypeEnumMap = { + CameraVideoSourceType.cameraSourceFront: 0, + CameraVideoSourceType.cameraSourceBack: 1, + CameraVideoSourceType.videoSourceUnspecified: 2, +}; + const _$MediaPlayerSourceTypeEnumMap = { MediaPlayerSourceType.mediaPlayerSourceDefault: 0, MediaPlayerSourceType.mediaPlayerSourceFullFeatured: 1, diff --git a/lib/src/agora_media_engine.dart b/lib/src/agora_media_engine.dart index 5a13c1791..0736d9ebf 100644 --- a/lib/src/agora_media_engine.dart +++ b/lib/src/agora_media_engine.dart @@ -39,50 +39,52 @@ abstract class MediaEngine { /// Registers an audio frame observer object. /// Call this method to register an audio frame observer object (register a callback). When you need the SDK to trigger onMixedAudioFrame , onRecordAudioFrame , onPlaybackAudioFrame or onEarMonitoringAudioFrame callback, you need to use this method to register the callbacks.Ensure that you call this method before joining a channel. /// - /// * [observer] The observer object instance. See AudioFrameObserver . Agora recommends calling after receiving onLeaveChannel to release the audio observer object. + /// * [observer] The observer object instance. See AudioFrameObserver . Agora recommends calling this method after receiving onLeaveChannel to release the audio observer object. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. void registerAudioFrameObserver(AudioFrameObserver observer); - /// Registers a video frame observer object. + /// Registers a raw video frame observer object. /// If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: /// Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. /// Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. /// After joining the channel, get the user IDs of group B users through onUserJoined , and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. - /// Call muteAllRemoteVideoStreams to start receiving the video streams of all remote users. Then: + /// Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: /// The raw video data of group A users can be obtained through the callback in VideoFrameObserver , and the SDK renders the data by default. - /// The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver . If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one VideoFrameObserver class with this method.After registering the class, you need to register the callbacks in the class as required. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received.Ensure that you call this method before joining a channel.When handling the video data returned in the callbacks, pay attention to the changes in the width and height parameters, which may be adapted under the following circumstances:When network conditions deteriorate, the video resolution decreases incrementally.If the user adjusts the video profile, the resolution of the video returned in the callbacks also changes. + /// The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver . If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one VideoFrameObserver class with this method.When calling this method to register a video observer, you can register callbacks in the VideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received.Ensure that you call this method before joining a channel.When handling the video data returned in the callbacks, pay attention to the changes in the width and height parameters, which may be adapted under the following circumstances:When network conditions deteriorate, the video resolution decreases incrementally.If the user adjusts the video profile, the resolution of the video returned in the callbacks also changes.After registering the raw video observer, you can use the obtained raw video data in various video pre-processing scenarios, such as implementing virtual backgrounds and image enhacement scenarios by yourself, Agora provides some open source sample projects on GitHub for your reference. /// /// * [observer] The observer object instance. See VideoFrameObserver . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. void registerVideoFrameObserver(VideoFrameObserver observer); /// Registers a receiver object for the encoded video image. - /// If you only want to observe encoded video frames (such as h.264 format) without decoding and rendering the video, Agora recommends that you implement one VideoEncodedFrameObserver class through this method.If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps:Call registerVideoFrameObserver to register the raw video frame observer before joining the channel.Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel.After joining the channel, get the user IDs of group B users through onUserJoined , and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true.Call muteAllRemoteVideoStreams to start receiving the video streams of all remote users. Then:The raw video data of group A users can be obtained through the callback in VideoFrameObserver , and the SDK renders the data by default.The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver .Call this method before joining a channel. + /// If you only want to observe encoded video frames (such as h.264 format) without decoding and rendering the video, Agora recommends that you implement one VideoEncodedFrameObserver class through this method.If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps:Call registerVideoFrameObserver to register the raw video frame observer before joining the channel.Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel.After joining the channel, get the user IDs of group B users through onUserJoined , and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true.Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then:The raw video data of group A users can be obtained through the callback in VideoFrameObserver , and the SDK renders the data by default.The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver .Call this method before joining a channel. /// /// * [observer] The video frame observer object. See VideoEncodedFrameObserver . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. void registerVideoEncodedFrameObserver(VideoEncodedFrameObserver observer); /// Pushes the external audio frame. /// - /// * [type] The type of the audio recording device. See MediaSourceType . /// * [frame] The external audio frame. See AudioFrame . - /// * [wrap] Whether to use the placeholder. Agora recommends using the default value.true: Use the placeholder.false: (Default) Do not use the placeholder. - /// * [sourceId] The ID of external audio source. If you want to publish a custom external audio source, set this parameter to the ID of the corresponding custom audio track you want to publish. - Future pushAudioFrame( - {required MediaSourceType type, - required AudioFrame frame, - bool wrap = false, - int sourceId = 0}); - - /// @nodoc - Future pushCaptureAudioFrame(AudioFrame frame); - - /// @nodoc - Future pushReverseAudioFrame(AudioFrame frame); - - /// @nodoc - Future pushDirectAudioFrame(AudioFrame frame); + /// * [trackId] The audio track ID. If you want to publish a custom external audio source, set this parameter to the ID of the corresponding custom audio track you want to publish. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + Future pushAudioFrame({required AudioFrame frame, int trackId = 0}); /// Pulls the remote audio data. /// Before calling this method, you need to call setExternalAudioSink to notify the app to enable and set the external rendering.After a successful method call, the app pulls the decoded and mixed audio data for playback.This method only supports pulling data from custom audio source. If you need to pull the data captured by the SDK, do not call this method.Call this method after joining a channel.Once you enable the external audio sink, the app will not retrieve any audio data from the onPlaybackAudioFrame callback.The difference between this method and the onPlaybackAudioFrame callback is as follows:The SDK sends the audio data to the app through the onPlaybackAudioFrame callback. Any delay in processing the audio frames may result in audio jitter.After a successful method call, the app automatically pulls the audio data from the SDK. After setting the audio data parameters, the SDK adjusts the frame buffer and avoids problems caused by jitter in the external audio playback. + /// + /// * [frame] Pointers to AudioFrame . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. Future pullAudioFrame(AudioFrame frame); /// Configures the external video source. @@ -90,53 +92,79 @@ abstract class MediaEngine { /// /// * [enabled] Whether to use the external video source:true: Use the external video source. The SDK prepares to accept the external video frame.false: (Default) Do not use the external video source. /// * [useTexture] Whether to use the external video frame in the Texture format.true: Use the external video frame in the Texture format.false: (Default) Do not use the external video frame in the Texture format. - /// * [sourceType] Whether to encode the external video frame, see ExternalVideoSourceType . + /// * [sourceType] Whether the external video frame is encoded. See ExternalVideoSourceType . /// * [encodedVideoOption] Video encoding options. This parameter needs to be set if sourceType is encodedVideoFrame. To set this parameter, contact . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setExternalVideoSource( {required bool enabled, required bool useTexture, ExternalVideoSourceType sourceType = ExternalVideoSourceType.videoFrame, SenderOptions encodedVideoOption = const SenderOptions()}); - /// Sets the external captured audio parameters and chooses whether to publish the audio to the remote user. + /// Sets the external audio source parameters. /// Call this method before joining a channel. /// /// * [enabled] Whether to enable the external audio source:true: Enable the external audio source.false: (Default) Disable the external audio source. - /// * [sampleRate] The sample rate (Hz) of the external audio source, which can be set as 8000, 16000, 32000, 44100, or 48000. + /// * [sampleRate] The sample rate (Hz) of the external audio source which can be set as 8000, 16000, 32000, 44100, or 48000. /// * [channels] The number of channels of the external audio source, which can be set as 1 (Mono) or 2 (Stereo). /// * [sourceNumber] The number of external audio sources. The value of this parameter should be larger than 0. The SDK creates a corresponding number of custom audio tracks based on this parameter value and names the audio tracks starting from 0. In ChannelMediaOptions , you can set publishCustomAudioSourceId to the audio track ID you want to publish. /// * [localPlayback] Whether to play the external audio source:true: Play the external audio source.false: (Default) Do not play the external source. /// * [publish] Whether to publish audio to the remote users:true: (Default) Publish audio to the remote users.false: Do not publish audio to the remote users. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setExternalAudioSource( {required bool enabled, required int sampleRate, required int channels, - int sourceNumber = 1, bool localPlayback = false, bool publish = true}); + /// Creates a customized audio track. + /// When you need to publish multiple custom captured audios in the channel, you can refer to the following steps:Call this method to create a custom audio track and get the audio track ID.In ChannelMediaOptions of each channel, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true.If you call pushAudioFrame trackId as the audio track ID set in step 2, you can publish the corresponding custom audio source in multiple channels. + /// + /// * [trackType] The type of the custom audio track. See AudioTrackType . + /// * [config] The configuration of the custom audio track. See AudioTrackConfig . + /// + /// Returns + /// If the method call is successful, the audio track ID is returned as the unique identifier of the audio track.If the method call fails, a negative value is returned. + Future createCustomAudioTrack( + {required AudioTrackType trackType, required AudioTrackConfig config}); + + /// Destroys the specified audio track. + /// + /// * [trackId] The custom audio track ID returned in createCustomAudioTrack . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + Future destroyCustomAudioTrack(int trackId); + /// Sets the external audio sink. /// This method applies to scenarios where you want to use external audio data for playback. After you set the external audio sink, you can call pullAudioFrame to pull remote audio frames. The app can process the remote audio and play it with the audio effects that you want. /// /// * [enabled] Whether to enable or disable the external audio sink:true: Enables the external audio sink.false: (Default) Disables the external audio sink. /// * [sampleRate] The sample rate (Hz) of the external audio sink, which can be set as 16000, 32000, 44100, or 48000. /// * [channels] The number of audio channels of the external audio sink:1: Mono.2: Stereo. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setExternalAudioSink( {required bool enabled, required int sampleRate, required int channels}); /// @nodoc Future enableCustomAudioLocalPlayback( - {required int sourceId, required bool enabled}); - - /// @nodoc - Future setDirectExternalAudioSource( - {required bool enable, bool localPlayback = false}); + {required int trackId, required bool enabled}); /// Pushes the external raw video frame to the SDK. - /// To push the unencoded external raw video frame to the SDK, call createCustomVideoTrack to get the video track ID, set customVideoTrackId as the video track ID you want to publish in the ChannelMediaOptions of each channel, and set publishCustomVideoTrack as true. + /// If you call createCustomVideoTrack method to get the video track ID, set the customVideoTrackId parameter to the video track ID you want to publish in the ChannelMediaOptions of each channel, and set the publishCustomVideoTrack parameter to true, you can call this method to push the unencoded external video frame to the SDK. /// /// * [frame] The external raw video frame to be pushed. See ExternalVideoFrame . /// * [videoTrackId] The video track ID returned by calling the createCustomVideoTrack method. The default value is 0. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. Future pushVideoFrame( {required ExternalVideoFrame frame, int videoTrackId = 0}); @@ -153,15 +181,24 @@ abstract class MediaEngine { /// Unregisters an audio frame observer. /// /// * [observer] The audio frame observer, reporting the reception of each audio frame. See AudioFrameObserver . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. void unregisterAudioFrameObserver(AudioFrameObserver observer); /// Unregisters the video frame observer. /// /// * [observer] The video observer, reporting the reception of each video frame. See VideoFrameObserver . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. void unregisterVideoFrameObserver(VideoFrameObserver observer); /// Unregisters a receiver object for the encoded video image. /// /// * [observer] The video observer, reporting the reception of each video frame. See VideoEncodedFrameObserver . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. void unregisterVideoEncodedFrameObserver(VideoEncodedFrameObserver observer); } diff --git a/lib/src/agora_media_player.dart b/lib/src/agora_media_player.dart index 829f5bcb8..b4ad188fb 100644 --- a/lib/src/agora_media_player.dart +++ b/lib/src/agora_media_player.dart @@ -11,8 +11,11 @@ abstract class MediaPlayer { /// Opens the media resource. /// This method is called asynchronously.If you need to play a media file, make sure you receive the onPlayerSourceStateChanged callback reporting playerStateOpenCompleted before calling the play method to play the file. /// - /// * [url] The path of the media file. Both local path and online path are supported.On the Android platform, if you need to open a file in URI format, use open . + /// * [url] The path of the media file. Both local path and online path are supported. /// * [startPos] The starting position (ms) for playback. Default value is 0. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future open({required String url, required int startPos}); /// Opens a media file and configures the playback scenarios. @@ -20,31 +23,51 @@ abstract class MediaPlayer { /// /// * [source] Media resources. See MediaSource . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future openWithMediaSource(MediaSource source); /// Plays the media file. /// After calling open or seek, you can call this method to play the media file. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future play(); /// Pauses the playback. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future pause(); /// Stops playing the media track. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stop(); /// Resumes playing the media file. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future resume(); /// Seeks to a new playback position. - /// After successfully calling this method, you will receive the onPlayerEvent callback, reporting the result of the seek operation to the new playback position.To play the media file from a specific position, do the following:Call this method to seek to the position you want to begin playback.Call the play method to play the media file. + /// fter successfully calling this method, you will receive the onPlayerEvent callback, reporting the result of the seek operation to the new playback position.To play the media file from a specific position, do the following:Call this method to seek to the position you want to begin playback.Call the play method to play the media file. /// /// * [newPos] The new playback position (ms). + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future seek(int newPos); /// Sets the pitch of the current media resource. /// Call this method after calling open . /// /// * [pitch] Sets the pitch of the local music file by the chromatic scale. The default value is 0, which means keeping the original pitch. The value ranges from -12 to 12, and the pitch value between consecutive values is a chromatic value. The greater the absolute value of this parameter, the higher or lower the pitch of the local music file. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setAudioPitch(int pitch); /// Gets the duration of the media resource. @@ -79,25 +102,37 @@ abstract class MediaPlayer { /// If you want to loop, call this method and set the number of the loops.When the loop finishes, the SDK triggers onPlayerSourceStateChanged and reports the playback state as playerStatePlaybackAllLoopsCompleted. /// /// * [loopCount] The number of times the audio effect loops: + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setLoopCount(int loopCount); /// Sets the channel mode of the current audio file. /// Call this method after calling open . /// /// * [speed] The playback speed. Agora recommends that you limit this value to between 50 and 400, defined as follows:50: Half the original speed.100: The original speed.400: 4 times the original speed. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setPlaybackSpeed(int speed); /// Selects the audio track used during playback. /// After getting the track index of the audio file, you can call this method to specify any track to play. For example, if different tracks of a multi-track file store songs in different languages, you can call this method to set the playback language.You need to call this method after calling getStreamInfo to get the audio stream index value. /// /// * [index] The index of the audio track. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future selectAudioTrack(int index); /// Sets the private options for the media player. - /// The media player supports setting private options by key and value. Under normal circumstances, you do not need to know the private option settings, and just use the default option settings.Ensure that you call this method before open .If you need to push streams with SEI into the CDN, callsetPlayerOptionInInt ("sei_data_with_uuid", 1); otherwise, the loss of SEI might occurs. + /// The media player supports setting private options by key and value. Under normal circumstances, you do not need to know the private option settings, and just use the default option settings.Ensure that you call this method before open .If you need to push streams with SEI into the CDN, call setPlayerOptionInInt("sei_data_with_uuid", 1); otherwise, the loss of SEI might occurs. /// /// * [key] The key of the option. /// * [value] The value of the key. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setPlayerOptionInInt({required String key, required int value}); /// @nodoc @@ -122,6 +157,9 @@ abstract class MediaPlayer { /// Sets whether to mute the media file. /// /// * [muted] Whether to mute the media file:true: Mute the media file.false: (Default) Unmute the media file. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future mute(bool muted); /// Reports whether the media resource is muted. @@ -133,6 +171,9 @@ abstract class MediaPlayer { /// Adjusts the local playback volume. /// /// * [volume] The local playback volume, which ranges from 0 to 100:0: Mute.100: (Default) The original volume. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. Future adjustPlayoutVolume(int volume); /// Gets the local playback volume. @@ -145,32 +186,84 @@ abstract class MediaPlayer { /// After connected to the Agora server, you can call this method to adjust the volume of the media file heard by the remote user. /// /// * [volume] The volume, which ranges from 0 to 400:0: Mute.100: (Default) The original volume.400: Four times the original volume (amplifying the audio signals by four times). + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. Future adjustPublishSignalVolume(int volume); /// Gets the volume of the media file for publishing. /// /// Returns - /// The remote playback volume, if the method call succeeds.< 0: Failure. + /// ≥ 0: The remote playback volume.< 0: Failure. Future getPublishSignalVolume(); /// Sets the view. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setView(int view); /// Sets the render mode of the media player. /// /// * [renderMode] Sets the render mode of the view. See RenderModeType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRenderMode(RenderModeType renderMode); /// Registers a media player observer. /// /// * [observer] The player observer, listening for events during the playback. See MediaPlayerSourceObserver . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. void registerPlayerSourceObserver(MediaPlayerSourceObserver observer); /// Releases a media player observer. /// /// * [observer] The player observer, listening for events during the playback. See MediaPlayerSourceObserver . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. void unregisterPlayerSourceObserver(MediaPlayerSourceObserver observer); + /// Registers an audio frame observer object. + /// + /// * [observer] The audio frame observer, reporting the reception of each audio frame. See AudioPcmFrameSink . + /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + void registerAudioFrameObserver( + {required AudioPcmFrameSink observer, + RawAudioFrameOpModeType mode = + RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly}); + + /// Unregisters an audio frame observer. + /// + /// * [observer] The audio observer. See AudioPcmFrameSink . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + void unregisterAudioFrameObserver(AudioPcmFrameSink observer); + + /// Registers a video frame observer object. + /// You need to implement the MediaPlayerVideoFrameObserver class in this method and register callbacks according to your scenarios. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. + /// + /// * [observer] The video observer, reporting the reception of each video frame. See MediaPlayerVideoFrameObserver . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + void registerVideoFrameObserver(MediaPlayerVideoFrameObserver observer); + + /// Unregisters the video frame observer. + /// + /// * [observer] The video observer, reporting the reception of each video frame. See MediaPlayerVideoFrameObserver . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + void unregisterVideoFrameObserver(MediaPlayerVideoFrameObserver observer); + /// @nodoc void registerMediaPlayerAudioSpectrumObserver( {required AudioSpectrumObserver observer, required int intervalInMS}); @@ -183,12 +276,18 @@ abstract class MediaPlayer { /// In a stereo music file, the left and right channels can store different audio data. According to your needs, you can set the channel mode to original mode, left channel mode, right channel mode, or mixed channel mode. For example, in the KTV scenario, the left channel of the music file stores the musical accompaniment, and the right channel stores the singing voice. If you only need to listen to the accompaniment, call this method to set the channel mode of the music file to left channel mode; if you need to listen to the accompaniment and the singing voice at the same time, call this method to set the channel mode to mixed channel mode.Call this method after calling open .This method only applies to stereo audio files. /// /// * [mode] The channel mode. See AudioDualMonoMode . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setAudioDualMonoMode(AudioDualMonoMode mode); /// @nodoc Future getPlayerSdkVersion(); - /// @nodoc + /// Gets the path of the media resource being played. + /// + /// Returns + /// The path of the media resource being played. Future getPlaySrc(); /// @nodoc @@ -214,11 +313,13 @@ abstract class MediaPlayer { Future switchAgoraCDNSrc({required String src, bool syncPts = false}); /// Switches the media resource being played. - /// You can call this method to switch the media resource to be played according to the current network status. For example:When the network is poor, the media resource to be played is switched to a media resource address with a lower bitrate.When the network is good, the media resource to be played is switched to a media resource address with a higher bitrate.After calling this method, if you receive the onPlayerEvent event in the playerEventSwitchComplete callback, the switch is successful; If you receive the onPlayerEvent event in the playerEventSwitchError callback, the switch fails.Ensure that you call this method after open .To ensure normal playback, pay attention to the following when calling this method:Do not call this method when playback is paused.Do not call the seek method during switching.Before switching the media resource, make sure that the playback position does not exceed the total duration of the media resource to be switched. + /// You can call this method to switch the media resource to be played according to the current network status. For example:When the network is poor, the media resource to be played is switched to a media resource address with a lower bitrate.When the network is good, the media resource to be played is switched to a media resource address with a higher bitrate.After calling this method, if you receive the playerEventSwitchComplete event in the onPlayerEvent callback, the switch is successful; If you receive the playerEventSwitchError event in the onPlayerEvent callback, the switch fails.Ensure that you call this method after open .To ensure normal playback, pay attention to the following when calling this method:Do not call this method when playback is paused.Do not call the seek method during switching.Before switching the media resource, make sure that the playback position does not exceed the total duration of the media resource to be switched. /// /// * [src] The URL of the media resource. /// * [syncPts] Whether to synchronize the playback position (ms) before and after the switch:true: Synchronize the playback position before and after the switch.false: (Default) Do not synchronize the playback position before and after the switch.Make sure to set this parameter as false if you need to play live streams, or the switch fails. If you need to play on-demand streams, you can set the value of this parameter according to your scenarios. /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future switchSrc({required String src, bool syncPts = true}); /// Preloads a media resource. @@ -226,89 +327,100 @@ abstract class MediaPlayer { /// /// * [src] The URL of the media resource. /// * [startPos] The starting position (ms) for playing after the media resource is preloaded to the playlist. When preloading a live stream, set this parameter to 0. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future preloadSrc({required String src, required int startPos}); /// Plays preloaded media resources. /// After calling the preloadSrc method to preload the media resource into the playlist, you can call this method to play the preloaded media resource. After calling this method, if you receive the onPlayerSourceStateChanged callback which reports the playerStatePlaying state, the playback is successful.If you want to change the preloaded media resource to be played, you can call this method again and specify the URL of the new media resource that you want to preload. If you want to replay the media resource, you need to call preloadSrc to preload the media resource to the playlist again before playing. If you want to clear the playlist, call the stop method.If you call this method when playback is paused, this method does not take effect until playback is resumed. /// /// * [src] The URL of the media resource in the playlist must be consistent with the src set by the preloadSrc method; otherwise, the media resource cannot be played. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future playPreloadedSrc(String src); /// Unloads media resources that are preloaded. /// This method cannot release the media resource being played. /// /// * [src] The URL of the media resource. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future unloadSrc(String src); /// Enables or disables the spatial audio effect for the media player. /// After successfully setting the spatial audio effect parameters of the media player, the SDK enables the spatial audio effect for the media player, and the local user can hear the media resources with a sense of space.If you need to disable the spatial audio effect for the media player, set the params parameter to null. /// - /// * [params] The spatial audio effect parameters of the media player. See SpatialAudioParams for details. + /// * [params] The spatial audio effect parameters of the media player. See SpatialAudioParams . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setSpatialAudioParams(SpatialAudioParams params); /// @nodoc Future setSoundPositionParams( {required double pan, required double gain}); - - /// Registers an audio frame observer object. - /// You need to implement the MediaPlayerAudioFrameObserver class in this method and register callbacks according to your scenarios. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. - /// - /// * [observer] The audio frame observer, reporting the reception of each audio frame. See MediaPlayerAudioFrameObserver . - void registerAudioFrameObserver(MediaPlayerAudioFrameObserver observer); - - /// Unregisters an audio observer. - /// - /// * [observer] The audio observer. See MediaPlayerAudioFrameObserver . - void unregisterAudioFrameObserver(MediaPlayerAudioFrameObserver observer); - - /// Registers a video frame observer object. - /// You need to implement the MediaPlayerVideoFrameObserver class in this method and register callbacks according to your scenarios. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. - /// - /// * [observer] The video observer, reporting the reception of each video frame. See MediaPlayerVideoFrameObserver . - void registerVideoFrameObserver(MediaPlayerVideoFrameObserver observer); - - /// Unregisters the video frame observer. - /// - /// * [observer] The video observer, reporting the reception of each video frame. See MediaPlayerVideoFrameObserver . - void unregisterVideoFrameObserver(MediaPlayerVideoFrameObserver observer); } /// This class provides methods to manage cached media files. abstract class MediaPlayerCacheManager { /// Deletes all cached media files in the media player. /// The cached media file currently being played will not be deleted. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. See MediaPlayerError . Future removeAllCaches(); /// Deletes a cached media file that is the least recently used. /// You can call this method to delete a cached media file when the storage space for the cached files is about to reach its limit. After you call this method, the SDK deletes the cached media file that is least used.The cached media file currently being played will not be deleted. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. See MediaPlayerError . Future removeOldCache(); /// Deletes a cached media file. /// The cached media file currently being played will not be deleted. /// /// * [uri] The URI (Uniform Resource Identifier) of the media file to be deleted. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. See MediaPlayerError . Future removeCacheByUri(String uri); /// Sets the storage path for the media files that you want to cache. /// Make sure RtcEngine is initialized before you call this method. /// /// * [path] The absolute path of the media files to be cached. Ensure that the directory for the media files exists and is writable. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. See MediaPlayerError . Future setCacheDir(String path); /// Sets the maximum number of media files that can be cached. /// /// * [count] The maximum number of media files that can be cached. The default value is 1,000. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. See MediaPlayerError . Future setMaxCacheFileCount(int count); /// Sets the maximum size of the aggregate storage space for cached media files. /// /// * [cacheSize] The maximum size (bytes) of the aggregate storage space for cached media files. The default value is 1 GB. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. See MediaPlayerError . Future setMaxCacheFileSize(int cacheSize); /// Sets whether to delete cached media files automatically. /// If you enable this function to remove cached media files automatically, when the cached media files exceed either the number or size limit you set, the SDK automatically deletes the least recently used cache file. /// /// * [enable] Whether to enable the SDK to delete cached media files automatically:true: Delete cached media files automatically.false: (Default) Do not delete cached media files automatically. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. See MediaPlayerError . Future enableAutoRemoveCache(bool enable); /// Gets the storage path of the cached media files. @@ -341,20 +453,6 @@ abstract class MediaPlayerCacheManager { Future getCacheFileCount(); } -/// The audio frame observer for the media player. -class MediaPlayerAudioFrameObserver { - /// @nodoc - const MediaPlayerAudioFrameObserver({ - this.onFrame, - }); - - /// Occurs each time the player receives an audio frame. - /// After registering the audio frame observer, the callback occurs every time the player receives an audio frame, reporting the detailed information of the audio frame. - /// - /// * [frame] Audio frame information. See AudioPcmFrame . - final void Function(AudioPcmFrame frame)? onFrame; -} - /// The video frame observer for the media player. class MediaPlayerVideoFrameObserver { /// @nodoc diff --git a/lib/src/agora_media_player_source.dart b/lib/src/agora_media_player_source.dart index 3457f9df3..64bd9c208 100644 --- a/lib/src/agora_media_player_source.dart +++ b/lib/src/agora_media_player_source.dart @@ -17,24 +17,24 @@ class MediaPlayerSourceObserver { this.onAudioVolumeIndication, }); - /// Reports the playback state change. + /// Reports the changes of playback state. /// When the state of the media player changes, the SDK triggers this callback to report the current playback state. /// - /// * [state] The playback state, see MediaPlayerState . + /// * [state] The playback state. See MediaPlayerState . /// * [ec] The error code. See MediaPlayerError . final void Function(MediaPlayerState state, MediaPlayerError ec)? onPlayerSourceStateChanged; /// Reports current playback progress. - /// When playing media files, the SDK triggers this callback every one second to report current playback progress. + /// When playing media files, the SDK triggers this callback every two second to report current playback progress. /// /// * [position] The playback position (ms) of media files. final void Function(int positionMs)? onPositionChanged; - /// Reports the playback event. + /// Reports the player events. /// After calling the seek method, the SDK triggers the callback to report the results of the seek operation. /// - /// * [eventCode] The playback event. See MediaPlayerEvent . + /// * [eventCode] The player events. See MediaPlayerEvent . /// * [elapsedTime] The time (ms) when the event occurs. /// * [message] Information about the event. final void Function( diff --git a/lib/src/agora_media_player_types.dart b/lib/src/agora_media_player_types.dart index 51b6fe870..6bf835411 100644 --- a/lib/src/agora_media_player_types.dart +++ b/lib/src/agora_media_player_types.dart @@ -520,7 +520,7 @@ class MediaSource { @JsonKey(name: 'uri') final String? uri; - /// The starting position (ms) for playback. Default value is 0. + /// The starting position (ms) for playback. The default value is 0. @JsonKey(name: 'startPos') final int? startPos; @@ -528,15 +528,15 @@ class MediaSource { @JsonKey(name: 'autoPlay') final bool? autoPlay; - /// Whether to cache the media file when it is being played:true:Enables caching.false: (Default) Disables caching.If you need to enable caching, pass in a value to uri; otherwise, caching is based on the url of the media file.If you enable this function, the Media Player caches part of the media file being played on your local device, and you can play the cached media file without internet connection. The statistics about the media file being cached are updated every second after the media file is played. See CacheStatistics . + /// Whether to cache the media file when it is being played:true:Enables caching.false: (Default) Disables caching.Agora only supports caching on-demand audio and video streams that are not transmitted in HLS protocol.If you need to enable caching, pass in a value to uri; otherwise, caching is based on the url of the media file.If you enable this function, the Media Player caches part of the media file being played on your local device, and you can play the cached media file without internet connection. The statistics about the media file being cached are updated every second after the media file is played. See CacheStatistics . @JsonKey(name: 'enableCache') final bool? enableCache; - /// Whether the media resource to be opened is a live stream or on-demand video distributed through Media Broadcast service:true: The media resource is a live stream or on-demand video distributed through Media Broadcast service.false: (Default) The media resource is not a live stream or on-demand video distributed through Media Broadcast service.If you need to open a live stream or on-demand video distributed through Broadcast Streaming service, pass in the URL of the media resource to url, and set isAgoraSource as ; otherwise, you don't need to set the isAgoraSource parameter.true + /// Whether the media resource to be opened is a live stream or on-demand video distributed through Media Broadcast service:true: The media resource to be played is a live or on-demand video distributed through Media Broadcast service.false: (Default) The media resource is not a live stream or on-demand video distributed through Media Broadcast service.If you need to open a live stream or on-demand video distributed through Broadcast Streaming service, pass in the URL of the media resource to url, and set isAgoraSource as true; otherwise, you don't need to set the isAgoraSource parameter. @JsonKey(name: 'isAgoraSource') final bool? isAgoraSource; - /// Whether the media resource to be opened is a live stream:true: The media resource is a live stream.false: (Default) The media resource is not a live stream.trueIf the media resource you want to open is a live stream, Agora recommends that you set this parameter as so that the live stream can be loaded more quickly.trueIf the media resource you open is not a live stream, but you set isLiveSource as , the media resource is not to be loaded more quickly. + /// Whether the media resource to be opened is a live stream:true: The media resource is a live stream.false: (Default) The media resource is not a live stream.If the media resource you want to open is a live stream, Agora recommends that you set this parameter as true so that the live stream can be loaded more quickly.If the media resource you open is not a live stream, but you set isLiveSource as true, the media resource is not to be loaded more quickly. @JsonKey(name: 'isLiveSource') final bool? isLiveSource; diff --git a/lib/src/agora_media_recorder.dart b/lib/src/agora_media_recorder.dart index 01026f670..f34479016 100644 --- a/lib/src/agora_media_recorder.dart +++ b/lib/src/agora_media_recorder.dart @@ -1,36 +1,13 @@ import 'package:agora_rtc_engine/src/binding_forward_export.dart'; -/// Used for recording audio and video on the client. -/// MediaRecorder can record the following: -/// The audio captured by the local microphone and encoded in AAC format.The video captured by the local camera and encoded by the SDK. +/// @nodoc abstract class MediaRecorder { - /// Registers one MediaRecorderObserver object. - /// Make sure the RtcEngine is initialized before you call this method. - /// - /// * [connection] The connection information. See RtcConnection . - /// * [callback] The callbacks for recording local audio and video streams. See MediaRecorderObserver . - Future setMediaRecorderObserver( - {required RtcConnection connection, - required MediaRecorderObserver callback}); + /// @nodoc + Future setMediaRecorderObserver(MediaRecorderObserver callback); - /// Starts recording the local audio and video. - /// After successfully getting the MediaRecorder object by calling getMediaRecorder , you can call this method to enable the recoridng of the local audio and video.This method can record the audio captured by the local microphone and encoded in AAC format, and the video captured by the local camera and encoded in H.264 format. The SDK can generate a recording file only when it detects audio and video streams; when there are no audio and video streams to be recorded or the audio and video streams are interrupted for more than five seconds, the SDK stops the recording and triggers the onRecorderStateChanged(recorderStateError, recorderErrorNoStream) callback.Once the recording is started, if the video resolution is changed, the SDK stops the recording; if the sampling rate and audio channel changes, the SDK continues recording and generates audio files respectively.Call this method after joining a channel. - /// - /// * [connection] The connection information. See RtcConnection . - /// * [config] The recording configuration. See MediaRecorderConfiguration . - /// - Future startRecording( - {required RtcConnection connection, - required MediaRecorderConfiguration config}); + /// @nodoc + Future startRecording(MediaRecorderConfiguration config); - /// Stops recording the local audio and video. - /// After calling startRecording , if you want to stop the recording, you must call this method; otherwise, the generated recording files may not be playable. - /// - /// * [connection] The connection information. See RtcConnection . - /// - Future stopRecording(RtcConnection connection); - - /// Release the MediaRecorder object. - /// This method releases the MediaRecorder object and all resources used by the RtcEngine object. After calling this method, if you need to start recording again, you need to call getMediaRecorder again to get the MediaRecorder object. - Future release(); + /// @nodoc + Future stopRecording(); } diff --git a/lib/src/agora_music_content_center.dart b/lib/src/agora_music_content_center.dart index 94642e658..0a30d4d71 100644 --- a/lib/src/agora_music_content_center.dart +++ b/lib/src/agora_music_content_center.dart @@ -15,6 +15,10 @@ enum PreloadStatusCode { /// @nodoc @JsonValue(2) kPreloadStatusPreloading, + + /// @nodoc + @JsonValue(3) + kPreloadStatusRemoved, } /// @nodoc @@ -40,6 +44,26 @@ enum MusicContentCenterStatusCode { /// @nodoc @JsonValue(1) kMusicContentCenterStatusErr, + + /// @nodoc + @JsonValue(2) + kMusicContentCenterStatusErrGateway, + + /// @nodoc + @JsonValue(3) + kMusicContentCenterStatusErrPermissionAndResource, + + /// @nodoc + @JsonValue(4) + kMusicContentCenterStatusErrInternalDataParse, + + /// @nodoc + @JsonValue(5) + kMusicContentCenterStatusErrMusicLoading, + + /// @nodoc + @JsonValue(6) + kMusicContentCenterStatusErrMusicDecryption, } /// @nodoc @@ -77,6 +101,53 @@ class MusicChartInfo { Map toJson() => _$MusicChartInfoToJson(this); } +/// @nodoc +@JsonEnum(alwaysCreate: true) +enum MusicCacheStatusType { + /// @nodoc + @JsonValue(0) + musicCacheStatusTypeCached, + + /// @nodoc + @JsonValue(1) + musicCacheStatusTypeCaching, +} + +/// @nodoc +extension MusicCacheStatusTypeExt on MusicCacheStatusType { + /// @nodoc + static MusicCacheStatusType fromValue(int value) { + return $enumDecode(_$MusicCacheStatusTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$MusicCacheStatusTypeEnumMap[this]!; + } +} + +/// @nodoc +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class MusicCacheInfo { + /// @nodoc + const MusicCacheInfo({this.songCode, this.status}); + + /// @nodoc + @JsonKey(name: 'songCode') + final int? songCode; + + /// @nodoc + @JsonKey(name: 'status') + final MusicCacheStatusType? status; + + /// @nodoc + factory MusicCacheInfo.fromJson(Map json) => + _$MusicCacheInfoFromJson(json); + + /// @nodoc + Map toJson() => _$MusicCacheInfoToJson(this); +} + /// @nodoc abstract class MusicChartCollection { /// @nodoc @@ -242,26 +313,32 @@ class MusicContentCenterEventHandler { }); /// @nodoc - final void Function(String requestId, MusicContentCenterStatusCode status, - List result)? onMusicChartsResult; + final void Function(String requestId, List result, + MusicContentCenterStatusCode errorCode)? onMusicChartsResult; /// @nodoc - final void Function(String requestId, MusicContentCenterStatusCode status, - MusicCollection result)? onMusicCollectionResult; + final void Function(String requestId, MusicCollection result, + MusicContentCenterStatusCode errorCode)? onMusicCollectionResult; /// @nodoc - final void Function(String requestId, String lyricUrl)? onLyricResult; + final void Function(String requestId, String lyricUrl, + MusicContentCenterStatusCode errorCode)? onLyricResult; /// @nodoc - final void Function(int songCode, int percent, PreloadStatusCode status, - String msg, String lyricUrl)? onPreLoadEvent; + final void Function( + int songCode, + int percent, + String lyricUrl, + PreloadStatusCode status, + MusicContentCenterStatusCode errorCode)? onPreLoadEvent; } /// @nodoc @JsonSerializable(explicitToJson: true, includeIfNull: false) class MusicContentCenterConfiguration { /// @nodoc - const MusicContentCenterConfiguration({this.appId, this.token, this.mccUid}); + const MusicContentCenterConfiguration( + {this.appId, this.token, this.mccUid, this.maxCacheSize}); /// @nodoc @JsonKey(name: 'appId') @@ -275,6 +352,10 @@ class MusicContentCenterConfiguration { @JsonKey(name: 'mccUid') final int? mccUid; + /// @nodoc + @JsonKey(name: 'maxCacheSize') + final int? maxCacheSize; + /// @nodoc factory MusicContentCenterConfiguration.fromJson(Map json) => _$MusicContentCenterConfigurationFromJson(json); @@ -308,7 +389,7 @@ abstract class MusicContentCenter { void unregisterEventHandler(); /// @nodoc - Future createMusicPlayer(); + Future createMusicPlayer(); /// @nodoc Future getMusicCharts(); @@ -330,6 +411,12 @@ abstract class MusicContentCenter { /// @nodoc Future preload({required int songCode, String? jsonOption}); + /// @nodoc + Future removeCache(int songCode); + + /// @nodoc + Future> getCaches(int cacheInfoSize); + /// @nodoc Future isPreloaded(int songCode); diff --git a/lib/src/agora_music_content_center.g.dart b/lib/src/agora_music_content_center.g.dart index cd1c2a009..099ff5f9a 100644 --- a/lib/src/agora_music_content_center.g.dart +++ b/lib/src/agora_music_content_center.g.dart @@ -28,6 +28,32 @@ Map _$MusicChartInfoToJson(MusicChartInfo instance) { return val; } +MusicCacheInfo _$MusicCacheInfoFromJson(Map json) => + MusicCacheInfo( + songCode: json['songCode'] as int?, + status: + $enumDecodeNullable(_$MusicCacheStatusTypeEnumMap, json['status']), + ); + +Map _$MusicCacheInfoToJson(MusicCacheInfo instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('songCode', instance.songCode); + writeNotNull('status', _$MusicCacheStatusTypeEnumMap[instance.status]); + return val; +} + +const _$MusicCacheStatusTypeEnumMap = { + MusicCacheStatusType.musicCacheStatusTypeCached: 0, + MusicCacheStatusType.musicCacheStatusTypeCaching: 1, +}; + MvProperty _$MvPropertyFromJson(Map json) => MvProperty( resolution: json['resolution'] as String?, bandwidth: json['bandwidth'] as String?, @@ -123,6 +149,7 @@ MusicContentCenterConfiguration _$MusicContentCenterConfigurationFromJson( appId: json['appId'] as String?, token: json['token'] as String?, mccUid: json['mccUid'] as int?, + maxCacheSize: json['maxCacheSize'] as int?, ); Map _$MusicContentCenterConfigurationToJson( @@ -138,6 +165,7 @@ Map _$MusicContentCenterConfigurationToJson( writeNotNull('appId', instance.appId); writeNotNull('token', instance.token); writeNotNull('mccUid', instance.mccUid); + writeNotNull('maxCacheSize', instance.maxCacheSize); return val; } @@ -145,9 +173,16 @@ const _$PreloadStatusCodeEnumMap = { PreloadStatusCode.kPreloadStatusCompleted: 0, PreloadStatusCode.kPreloadStatusFailed: 1, PreloadStatusCode.kPreloadStatusPreloading: 2, + PreloadStatusCode.kPreloadStatusRemoved: 3, }; const _$MusicContentCenterStatusCodeEnumMap = { MusicContentCenterStatusCode.kMusicContentCenterStatusOk: 0, MusicContentCenterStatusCode.kMusicContentCenterStatusErr: 1, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrGateway: 2, + MusicContentCenterStatusCode + .kMusicContentCenterStatusErrPermissionAndResource: 3, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrInternalDataParse: 4, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrMusicLoading: 5, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrMusicDecryption: 6, }; diff --git a/lib/src/agora_rhythm_player.dart b/lib/src/agora_rhythm_player.dart index d59ed7707..3eed312a5 100644 --- a/lib/src/agora_rhythm_player.dart +++ b/lib/src/agora_rhythm_player.dart @@ -20,7 +20,7 @@ enum RhythmPlayerStateType { @JsonValue(813) rhythmPlayerStatePlaying, - /// 814: Failed to start virtual metronome. You can use the reported errorcode to troubleshoot the cause of the error, or you can try to start the virtual metronome again. + /// 814: Failed to start virtual metronome. You can use the reported errorCode to troubleshoot the cause of the error, or you can try to start the virtual metronome again. @JsonValue(814) rhythmPlayerStateFailed, } @@ -45,7 +45,7 @@ enum RhythmPlayerErrorType { @JsonValue(0) rhythmPlayerErrorOk, - /// 1: General error; no clear reason. + /// 1: A general error; no specific reason. @JsonValue(1) rhythmPlayerErrorFailed, diff --git a/lib/src/agora_rtc_engine.dart b/lib/src/agora_rtc_engine.dart index 58b922c76..2fb8e6719 100644 --- a/lib/src/agora_rtc_engine.dart +++ b/lib/src/agora_rtc_engine.dart @@ -16,7 +16,7 @@ enum MediaDeviceType { @JsonValue(1) audioRecordingDevice, - /// 2: Video rendering device. + /// 2: Video rendering device (graphics card). @JsonValue(2) videoRenderDevice, @@ -27,6 +27,14 @@ enum MediaDeviceType { /// 4: Audio playback device for an app. @JsonValue(4) audioApplicationPlayoutDevice, + + /// (For macOS only)5: Virtual audio playback device (virtual sound card). + @JsonValue(5) + audioVirtualPlayoutDevice, + + /// (For macOS only)6: Virtual audio capturing device (virtual sound card). + @JsonValue(6) + audioVirtualRecordingDevice, } /// @nodoc @@ -438,7 +446,7 @@ class LocalVideoStats { @JsonKey(name: 'txPacketLossRate') final int? txPacketLossRate; - /// The brightness level of the video image captured by the local camera. See CaptureBrightnessLevelType. + /// The brightness level of the video image captured by the local camera. See CaptureBrightnessLevelType . @JsonKey(name: 'captureBrightnessLevel') final CaptureBrightnessLevelType? captureBrightnessLevel; @@ -446,7 +454,7 @@ class LocalVideoStats { @JsonKey(name: 'dualStreamEnabled') final bool? dualStreamEnabled; - /// The local video encoding acceleration type. + /// The local video encoding acceleration type. 0: Software encoding is applied without acceleration.1: Hardware encoding is applied for acceleration. @JsonKey(name: 'hwEncoderAccelerating') final int? hwEncoderAccelerating; @@ -458,6 +466,110 @@ class LocalVideoStats { Map toJson() => _$LocalVideoStatsToJson(this); } +/// Audio statistics of the remote user. +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class RemoteAudioStats { + /// @nodoc + const RemoteAudioStats( + {this.uid, + this.quality, + this.networkTransportDelay, + this.jitterBufferDelay, + this.audioLossRate, + this.numChannels, + this.receivedSampleRate, + this.receivedBitrate, + this.totalFrozenTime, + this.frozenRate, + this.mosValue, + this.frozenRateByCustomPlcCount, + this.plcCount, + this.totalActiveTime, + this.publishDuration, + this.qoeQuality, + this.qualityChangedReason, + this.rxAudioBytes}); + + /// The user ID of the remote user. + @JsonKey(name: 'uid') + final int? uid; + + /// The quality of the audio stream sent by the user. See QualityType . + @JsonKey(name: 'quality') + final int? quality; + + /// The network delay (ms) from the sender to the receiver. + @JsonKey(name: 'networkTransportDelay') + final int? networkTransportDelay; + + /// The network delay (ms) from the audio receiver to the jitter buffer.When the receiving end is an audience member and audienceLatencyLevel of ClientRoleOptions is 1, this parameter does not take effect. + @JsonKey(name: 'jitterBufferDelay') + final int? jitterBufferDelay; + + /// The frame loss rate (%) of the remote audio stream in the reported interval. + @JsonKey(name: 'audioLossRate') + final int? audioLossRate; + + /// The number of audio channels. + @JsonKey(name: 'numChannels') + final int? numChannels; + + /// The sampling rate of the received audio stream in the reported interval. + @JsonKey(name: 'receivedSampleRate') + final int? receivedSampleRate; + + /// The average bitrate (Kbps) of the received audio stream in the reported interval. + @JsonKey(name: 'receivedBitrate') + final int? receivedBitrate; + + /// The total freeze time (ms) of the remote audio stream after the remote user joins the channel. In a session, audio freeze occurs when the audio frame loss rate reaches 4%. + @JsonKey(name: 'totalFrozenTime') + final int? totalFrozenTime; + + /// The total audio freeze time as a percentage (%) of the total time when the audio is available. The audio is considered available when the remote user neither stops sending the audio stream nor disables the audio module after joining the channel. + @JsonKey(name: 'frozenRate') + final int? frozenRate; + + /// The quality of the remote audio stream in the reported interval. The quality is determined by the Agora real-time audio MOS (Mean Opinion Score) measurement method. The return value range is [0, 500]. Dividing the return value by 100 gets the MOS score, which ranges from 0 to 5. The higher the score, the better the audio quality.The subjective perception of audio quality corresponding to the Agora real-time audio MOS scores is as follows:MOS scorePerception of audio qualityGreater than 4Excellent. The audio sounds clear and smooth.From 3.5 to 4Good. The audio has some perceptible impairment but still sounds clear.From 3 to 3.5Fair. The audio freezes occasionally and requires attentive listening.From 2.5 to 3Poor. The audio sounds choppy and requires considerable effort to understand.From 2 to 2.5Bad. The audio has occasional noise. Consecutive audio dropouts occur, resulting in some information loss. The users can communicate only with difficulty.Less than 2Very bad. The audio has persistent noise. Consecutive audio dropouts are frequent, resulting in severe information loss. Communication is nearly impossible. + @JsonKey(name: 'mosValue') + final int? mosValue; + + /// @nodoc + @JsonKey(name: 'frozenRateByCustomPlcCount') + final int? frozenRateByCustomPlcCount; + + /// @nodoc + @JsonKey(name: 'plcCount') + final int? plcCount; + + /// The total active time (ms) between the start of the audio call and the callback of the remote user.The active time refers to the total duration of the remote user without the mute state. + @JsonKey(name: 'totalActiveTime') + final int? totalActiveTime; + + /// The total duration (ms) of the remote audio stream. + @JsonKey(name: 'publishDuration') + final int? publishDuration; + + /// The Quality of Experience (QoE) of the local user when receiving a remote audio stream. + @JsonKey(name: 'qoeQuality') + final int? qoeQuality; + + /// Reasons why the QoE of the local user when receiving a remote audio stream is poor. See ExperiencePoorReason . + @JsonKey(name: 'qualityChangedReason') + final int? qualityChangedReason; + + /// @nodoc + @JsonKey(name: 'rxAudioBytes') + final int? rxAudioBytes; + + /// @nodoc + factory RemoteAudioStats.fromJson(Map json) => + _$RemoteAudioStatsFromJson(json); + + /// @nodoc + Map toJson() => _$RemoteAudioStatsToJson(this); +} + /// Statistics of the remote video stream. @JsonSerializable(explicitToJson: true, includeIfNull: false) class RemoteVideoStats { @@ -465,6 +577,7 @@ class RemoteVideoStats { const RemoteVideoStats( {this.uid, this.delay, + this.e2eDelay, this.width, this.height, this.receivedBitrate, @@ -478,8 +591,8 @@ class RemoteVideoStats { this.avSyncTimeMs, this.totalActiveTime, this.publishDuration, - this.superResolutionType, - this.mosValue}); + this.mosValue, + this.rxVideoBytes}); /// The user ID of the remote user sending the video stream. @JsonKey(name: 'uid') @@ -489,6 +602,10 @@ class RemoteVideoStats { @JsonKey(name: 'delay') final int? delay; + /// End-to-end video latency (ms). That is, the time elapsed from the video capturing on the remote user's end to the receiving and rendering of the video on the local user's end. + @JsonKey(name: 'e2eDelay') + final int? e2eDelay; + /// The width (pixels) of the video. @JsonKey(name: 'width') final int? width; @@ -541,14 +658,14 @@ class RemoteVideoStats { @JsonKey(name: 'publishDuration') final int? publishDuration; - /// The state of super resolution:>0: Super resolution is enabled.=0: Super resolution is not enabled. - @JsonKey(name: 'superResolutionType') - final int? superResolutionType; - /// @nodoc @JsonKey(name: 'mosValue') final int? mosValue; + /// @nodoc + @JsonKey(name: 'rxVideoBytes') + final int? rxVideoBytes; + /// @nodoc factory RemoteVideoStats.fromJson(Map json) => _$RemoteVideoStatsFromJson(json); @@ -818,24 +935,6 @@ class PublisherConfiguration { Map toJson() => _$PublisherConfigurationToJson(this); } -/// @nodoc -@JsonSerializable(explicitToJson: true, includeIfNull: false) -class AudioTrackConfig { - /// @nodoc - const AudioTrackConfig({this.enableLocalPlayback}); - - /// @nodoc - @JsonKey(name: 'enableLocalPlayback') - final bool? enableLocalPlayback; - - /// @nodoc - factory AudioTrackConfig.fromJson(Map json) => - _$AudioTrackConfigFromJson(json); - - /// @nodoc - Map toJson() => _$AudioTrackConfigToJson(this); -} - /// The camera direction. @JsonEnum(alwaysCreate: true) enum CameraDirection { @@ -912,7 +1011,7 @@ class CameraCapturerConfiguration { @JsonKey(name: 'format') final VideoFormat? format; - /// Whether to follow the video aspect ratio set in setVideoEncoderConfiguration :true: (Default) Follow the set video aspect ratio. The SDK crops the captured video according to the set video aspect ratio and synchronously changes the local preview screen and the video frame in onCaptureVideoFrame and onPreEncodeVideoFrame .false: Do not follow the set video aspect ratio. The SDK does not change the aspect ratio of the captured video frame. + /// Whether to follow the video aspect ratio set in setVideoEncoderConfiguration :true: (Default) Follow the set video aspect ratio. The SDK crops the captured video according to the set video aspect ratio and synchronously changes the local preview screen and the video frame in onCaptureVideoFrame and onPreEncodeVideoFrame .false: Do not follow the system default audio playback device. The SDK does not change the aspect ratio of the captured video frame. @JsonKey(name: 'followEncodeDimensionRatio') final bool? followEncodeDimensionRatio; @@ -948,7 +1047,7 @@ class ScreenCaptureConfiguration { @JsonKey(name: 'screenRect') final Rectangle? screenRect; - /// (For Windows and macOS only) Window ID.This parameter takes effect only when you want to capture the window. + /// (For Windows and macOS only)Window ID.This parameter takes effect only when you want to capture the window. @JsonKey(name: 'windowId') final int? windowId; @@ -956,7 +1055,7 @@ class ScreenCaptureConfiguration { @JsonKey(name: 'params') final ScreenCaptureParameters? params; - /// (For Windows and macOS only) The relative position of the shared region to the whole screen. See Rectangle .If you do not set this parameter, the SDK shares the whole screen. If the region you set exceeds the boundary of the screen, only the region within in the screen is shared. If you setwidth or height in Rectangle as 0, the whole screen is shared. + /// (For Windows and macOS only) The relative position of the shared region to the whole screen. See Rectangle . If you do not set this parameter, the SDK shares the whole screen. If the region you set exceeds the boundary of the screen, only the region within in the screen is shared. If you set width or height in Rectangle as 0, the whole screen is shared. @JsonKey(name: 'regionRect') final Rectangle? regionRect; @@ -1067,7 +1166,9 @@ class ScreenCaptureSourceInfo { this.sourceTitle, this.primaryMonitor, this.isOccluded, - this.minimizeWindow}); + this.position, + this.minimizeWindow, + this.sourceDisplayId}); /// The type of the shared target. See ScreenCaptureSourceType . @JsonKey(name: 'type') @@ -1081,11 +1182,11 @@ class ScreenCaptureSourceInfo { @JsonKey(name: 'sourceName') final String? sourceName; - /// The image content of the thumbnail. See + /// The image content of the thumbnail. See ThumbImageBuffer @JsonKey(name: 'thumbImage') final ThumbImageBuffer? thumbImage; - /// The image content of the icon. See + /// The image content of the icon. See ThumbImageBuffer @JsonKey(name: 'iconImage') final ThumbImageBuffer? iconImage; @@ -1105,10 +1206,18 @@ class ScreenCaptureSourceInfo { @JsonKey(name: 'isOccluded') final bool? isOccluded; + /// The position of a window relative to the entire screen space (including all shareable screens). See Rectangle . + @JsonKey(name: 'position') + final Rectangle? position; + /// (For Windows only) Whether the window is minimized:true: The window is minimized.false: The window is not minimized. @JsonKey(name: 'minimizeWindow') final bool? minimizeWindow; + /// (For Windows only) Screen ID where the window is located. If the window is displayed across multiple screens, this parameter indicates the ID of the screen with which the window has the largest intersection area. If the window is located outside of the visible screens, the value of this member is -2. + @JsonKey(name: 'sourceDisplayId') + final int? sourceDisplayId; + /// @nodoc factory ScreenCaptureSourceInfo.fromJson(Map json) => _$ScreenCaptureSourceInfoFromJson(json); @@ -1135,13 +1244,13 @@ class AdvancedAudioOptions { Map toJson() => _$AdvancedAudioOptionsToJson(this); } -/// Image configurations +/// Image configurations. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ImageTrackOptions { /// @nodoc const ImageTrackOptions({this.imageUrl, this.fps, this.mirrorMode}); - /// The URL of the image that you want to use to replace the video feeds. The image must be in PNG format. This method supports adding an image from the local absolute or relative file path. + /// The URL of the image that you want to use to replace the video feeds. The image must be in PNG format. This method supports adding an image from the local absolute or relative file path.On the Android platform, adding images from /assets/ is not supported. @JsonKey(name: 'imageUrl') final String? imageUrl; @@ -1162,7 +1271,7 @@ class ImageTrackOptions { } /// The channel media options. -/// Agora supports publishing multiple audio streams and one video stream at the same time and in the same RtcConnection . For example, publishMicrophoneTrack, publishAudioTrack, publishCustomAudioTrack, and publishMediaPlayerAudioTrack can be set as true at the same time, but only one of publishCameraTrack, publishCustomVideoTrack, or publishEncodedVideoTrack can be set as true. +/// Agora supports publishing multiple audio streams and one video stream at the same time and in the same RtcConnection . For example, publishMicrophoneTrack, publishAudioTrack, publishCustomAudioTrack, and publishMediaPlayerAudioTrack can be set as true at the same time, but only one of publishCameraTrack, publishScreenCaptureVideopublishScreenTrack, publishCustomVideoTrack, or publishEncodedVideoTrack can be set as true.Agora recommends that you set member parameter values yourself according to your business scenario, otherwise the SDK will automatically assign values to member parameters. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ChannelMediaOptions { /// @nodoc @@ -1175,15 +1284,12 @@ class ChannelMediaOptions { this.publishScreenTrack, this.publishSecondaryScreenTrack, this.publishCustomAudioTrack, - this.publishCustomAudioSourceId, - this.publishCustomAudioTrackEnableAec, - this.publishDirectCustomAudioTrack, - this.publishCustomAudioTrackAec, + this.publishCustomAudioTrackId, this.publishCustomVideoTrack, this.publishEncodedVideoTrack, this.publishMediaPlayerAudioTrack, this.publishMediaPlayerVideoTrack, - this.publishTrancodedVideoTrack, + this.publishTranscodedVideoTrack, this.autoSubscribeAudio, this.autoSubscribeVideo, this.enableAudioRecordingOrPlayout, @@ -1201,83 +1307,71 @@ class ChannelMediaOptions { this.customVideoTrackId, this.isAudioFilterable}); - /// Whether to publish the video captured by the camera:true: (Default) Publish the video captured by the camera.false: Do not publish the video captured by the camera. + /// Whether to publish the video captured by the camera:true: Publish the video captured by the camera.false: Do not publish the video captured by the camera. @JsonKey(name: 'publishCameraTrack') final bool? publishCameraTrack; - /// Whether to publish the video captured by the second camera: true: Publish the video captured by the second camera. false: (Default) Do not publish the video captured by the second camera. + /// Whether to publish the video captured by the second camera:true: Publish the video captured by the second camera.false: Do not publish the video captured by the second camera. @JsonKey(name: 'publishSecondaryCameraTrack') final bool? publishSecondaryCameraTrack; - /// Whether to publish the audio captured by the microphone:true: (Default) Publish the audio captured by the microphone.false: Do not publish the audio captured by the microphone. + /// Whether to publish the audio captured by the microphone:true: Publish the audio captured by the microphone.false: Do not publish the audio captured by the microphone. @JsonKey(name: 'publishMicrophoneTrack') final bool? publishMicrophoneTrack; - /// Whether to publish the video captured from the screen:true: Publish the video captured from the screen.false: (Default) Do not publish the captured video from the screen.This parameter applies to Android and iOS only. + /// Whether to publish the video captured from the screen:true: Publish the video captured from the screen.false: Do not publish the captured video from the screen.This parameter applies to Android and iOS only. @JsonKey(name: 'publishScreenCaptureVideo') final bool? publishScreenCaptureVideo; - /// Whether to publish the audio captured from the screen: true: Publish the audio captured from the screen. false: (Default) Do not publish the audio captured from the screen. This parameter applies to Android and iOS only. + /// Whether to publish the audio captured from the screen:true: Publish the audio captured from the screen.false: Publish the audio captured from the screen.This parameter applies to Android and iOS only. @JsonKey(name: 'publishScreenCaptureAudio') final bool? publishScreenCaptureAudio; - /// Whether to publish the video captured from the screen: true: Publish the video captured from the screen. false: (Default) Do not publish the captured video from the screen. + /// Whether to publish the video captured from the screen:true: Publish the video captured from the screen.false: (Default) Do not publish the video captured from the screen. @JsonKey(name: 'publishScreenTrack') final bool? publishScreenTrack; - /// Whether to publish the video captured from the second screen:true: Publish the captured video from the second screen.false: (Default) Do not publish the video captured from the second screen. + /// Whether to publish the video captured from the second screen:true: Publish the video captured from the second screen.false: Do not publish the video captured from the second screen. @JsonKey(name: 'publishSecondaryScreenTrack') final bool? publishSecondaryScreenTrack; - /// Whether to publish the audio captured from a custom source:true: Publish the captured audio from a custom source.false: (Default) Do not publish the audio captured from the custom source. + /// Whether to publish the audio captured from a custom source:true: Publish the audio captured from the custom source.false: Do not publish the captured audio from a custom source. @JsonKey(name: 'publishCustomAudioTrack') final bool? publishCustomAudioTrack; - /// The ID of the custom audio source to publish. The default value is 0.If you have set the value of sourceNumber greater than 1 in setExternalAudioSource , the SDK creates the corresponding number of custom audio tracks and assigns an ID to each audio track starting from 0. - @JsonKey(name: 'publishCustomAudioSourceId') - final int? publishCustomAudioSourceId; - - /// Whether to enable AEC when publishing the audio captured from a custom source:true: Enable AEC when publishing the captured audio from a custom source.false: (Default) Do not enable AEC when publishing the audio captured from the custom source. - @JsonKey(name: 'publishCustomAudioTrackEnableAec') - final bool? publishCustomAudioTrackEnableAec; - - /// @nodoc - @JsonKey(name: 'publishDirectCustomAudioTrack') - final bool? publishDirectCustomAudioTrack; - - /// @nodoc - @JsonKey(name: 'publishCustomAudioTrackAec') - final bool? publishCustomAudioTrackAec; + /// The ID of the custom audio source to publish. The default value is 0.If you have set sourceNumber in setExternalAudioSource to a value greater than 1, the SDK creates the corresponding number of custom audio tracks and assigns an ID to each audio track, starting from 0. + @JsonKey(name: 'publishCustomAudioTrackId') + final int? publishCustomAudioTrackId; - /// Whether to publish the video captured from a custom source:true: Publish the captured video from a custom source.false: (Default) Do not publish the video captured from the custom source. + /// Whether to publish the video captured from a custom source:true: Publish the video captured from the custom source.false: Do not publish the captured video from a custom source. @JsonKey(name: 'publishCustomVideoTrack') final bool? publishCustomVideoTrack; - /// Whether to publish the encoded video:true: Publish the encoded video.false: (Default) Do not publish the encoded video. + /// Whether to publish the encoded video:true: Publish the encoded video.false: Do not publish the encoded video. @JsonKey(name: 'publishEncodedVideoTrack') final bool? publishEncodedVideoTrack; - /// Whether to publish the audio from the media player:true: Publish the audio from the media player.false: (Default) Do not publish the audio from the media player. + /// Whether to publish the audio from the media player:true: Publish the audio from the media player.false: Do not publish the audio from the media player. @JsonKey(name: 'publishMediaPlayerAudioTrack') final bool? publishMediaPlayerAudioTrack; - /// Whether to publish the video from the media player:true: Publish the video from the media player.false: (Default) Do not publish the video from the media player. + /// Whether to publish the video from the media player:true: Publish the video from the media player.false: Do not publish the video from the media player. @JsonKey(name: 'publishMediaPlayerVideoTrack') final bool? publishMediaPlayerVideoTrack; - /// Whether to publish the local transcoded video:true: Publish the local transcoded video.false: (Default) Do not publish the local transcoded video. - @JsonKey(name: 'publishTrancodedVideoTrack') - final bool? publishTrancodedVideoTrack; + /// Whether to publish the local transcoded video:true: Publish the local transcoded video.false: Do not publish the local transcoded video. + @JsonKey(name: 'publishTranscodedVideoTrack') + final bool? publishTranscodedVideoTrack; - /// Whether to automatically subscribe to all remote audio streams when the user joins a channel:true: (Default) Subscribe to all remote audio streams.false: Do not automatically subscribe to any remote audio streams. + /// Whether to automatically subscribe to all remote audio streams when the user joins a channel:true: Subscribe to all remote audio streams.false: Do not automatically subscribe to any remote audio streams. @JsonKey(name: 'autoSubscribeAudio') final bool? autoSubscribeAudio; - /// Whether to automatically subscribe to all remote video streams when the user joins the channel:true: (Default) Subscribe to all remote video streams.false: Do not automatically subscribe to any remote video streams. + /// Whether to automatically subscribe to all remote video streams when the user joins the channel:true: Subscribe to all remote video streams.false: Do not automatically subscribe to any remote video streams. @JsonKey(name: 'autoSubscribeVideo') final bool? autoSubscribeVideo; - /// Whether to enable audio capturing or playback:true: (Default) Enable audio capturing or playback.false: Do not enable audio capturing or playback. + /// Whether to enable audio capturing or playback:true: Do not enable audio capturing or playback.false: Do not enable audio capturing or playback. @JsonKey(name: 'enableAudioRecordingOrPlayout') final bool? enableAudioRecordingOrPlayout; @@ -1290,17 +1384,14 @@ class ChannelMediaOptions { final ClientRoleType? clientRoleType; /// The latency level of an audience member in interactive live streaming. See AudienceLatencyLevelType . - /// @JsonKey(name: 'audienceLatencyLevel') final AudienceLatencyLevelType? audienceLatencyLevel; /// The default video-stream type. See VideoStreamType . - /// @JsonKey(name: 'defaultVideoStreamType') final VideoStreamType? defaultVideoStreamType; /// The channel profile. See ChannelProfileType . - /// @JsonKey(name: 'channelProfile') final ChannelProfileType? channelProfile; @@ -1312,7 +1403,7 @@ class ChannelMediaOptions { @JsonKey(name: 'mediaPlayerAudioDelayMs') final int? mediaPlayerAudioDelayMs; - /// (Optional) The token generated on your server for authentication. See This parameter takes effect only when calling updateChannelMediaOptions or updateChannelMediaOptionsEx .Ensure that the App ID, channel name, and user name used for creating the token are the same as those used by the initialize method for initializing the RTC engine, and those used by the joinChannel [2/2] and joinChannelEx methods for joining the channel. + /// (Optional) The token generated on your server for authentication. See This parameter takes effect only when calling updateChannelMediaOptions or updateChannelMediaOptionsEx .Ensure that the App ID, channel name, and user name used for creating the token are the same as those used by the initialize method for initializing the RTC engine, and those used by the joinChannel and joinChannelEx methods for joining the channel. @JsonKey(name: 'token') final String? token; @@ -1320,19 +1411,19 @@ class ChannelMediaOptions { @JsonKey(name: 'enableBuiltInMediaEncryption') final bool? enableBuiltInMediaEncryption; - /// Whether to publish the sound of a metronome to remote users:true: (Default) Publish the sound of the metronome. Both the local user and remote users can hear the metronome.false: Do not publish the sound of the metronome. Only the local user can hear the metronome. + /// Whether to publish the sound of a metronome to remote users:true: Publish processed audio frames. Both the local user and remote users can hear the metronome.false: Do not publish the sound of the metronome. Only the local user can hear the metronome. @JsonKey(name: 'publishRhythmPlayerTrack') final bool? publishRhythmPlayerTrack; - /// Whether to enable interactive mode:true: Enable interactive mode. Once this mode is enabled and the user role is set as audience, the user can receive remote video streams with low latency.false: (Default) Do not enable interactive mode. If this mode is disabled, the user receives the remote video streams in default settings.This parameter only applies to scenarios involving cohosting across channels. The cohosts need to call the joinChannelEx method to join the other host's channel as an audience member, and set isInteractiveAudience to true.This parameter takes effect only when the user role is clientRoleAudience. + /// Whether to enable interactive mode:true: Enable interactive mode. Once this mode is enabled and the user role is set as audience, the user can receive remote video streams with low latency.false:Do not enable interactive mode. If this mode is disabled, the user receives the remote video streams in default settings.This parameter only applies to scenarios involving cohosting across channels. The cohosts need to call the joinChannelEx method to join the other host's channel as an audience member, and set isInteractiveAudience to true.This parameter takes effect only when the user role is clientRoleAudience. @JsonKey(name: 'isInteractiveAudience') final bool? isInteractiveAudience; - /// The video track ID returned by calling createCustomVideoTrack the method. The default value is 0. + /// The video track ID returned by calling the createCustomVideoTrack method. The default value is 0. @JsonKey(name: 'customVideoTrackId') final int? customVideoTrackId; - /// Whether the audio stream being published is filtered according to the volume algorithm:true: (Default) The audio stream is filtered. If the audio stream filter is not enabled, this setting does not takes effect.false: The audio stream is not filtered.If you need to enable this function, contact . + /// Whether the audio stream being published is filtered according to the volume algorithm:true: The audio stream is not filtered. If the audio stream filter is not enabled, this setting does not takes effect.false: The audio stream is not filtered.If you need to enable this function, contact . @JsonKey(name: 'isAudioFilterable') final bool? isAudioFilterable; @@ -1391,6 +1482,14 @@ enum ProxyType { /// 4: Automatic mode. In this mode, the SDK attempts a direct connection to SD-RTN™ and automatically switches to TCP/TLS 443 if the attempt fails. @JsonValue(4) tcpProxyAutoFallbackType, + + /// @nodoc + @JsonValue(5) + httpProxyType, + + /// @nodoc + @JsonValue(6) + httpsProxyType, } /// @nodoc @@ -1568,7 +1667,6 @@ class RtcEngineEventHandler { this.onUserEnableVideo, this.onUserStateChanged, this.onUserEnableLocalVideo, - this.onApiCallExecuted, this.onLocalAudioStats, this.onRemoteAudioStats, this.onLocalVideoStats, @@ -1627,30 +1725,32 @@ class RtcEngineEventHandler { this.onExtensionStopped, this.onExtensionError, this.onUserAccountUpdated, + this.onLocalVideoTranscoderError, + this.onVideoRenderingTracingResult, }); /// Occurs when a user joins a channel. /// This callback notifies the application that a user joins a specified channel. /// /// * [connection] The connection information. See RtcConnection . - /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel [2/2] until the SDK triggers this callback. + /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int elapsed)? onJoinChannelSuccess; /// Occurs when a user rejoins the channel. /// When a user loses connection with the server because of network problems, the SDK automatically tries to reconnect and triggers this callback upon reconnection. /// - /// * [elapsed] Time elapsed (ms) from the local user calling the joinChannel [1/2] or joinChannel [2/2] method until this callback is triggered. + /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int elapsed)? onRejoinChannelSuccess; /// Reports the proxy connection state. - /// You can use this callback to listen for the state of the SDK connecting to a proxy. For example, when a user calls setCloudProxy and joins a channel successfully, the SDK triggers this callback to report the user ID, the proxy type connected, and the time elapsed fromthe user calling joinChannel [1/2] until this callback is triggered. + /// You can use this callback to listen for the state of the SDK connecting to a proxy. For example, when a user calls setCloudProxy and joins a channel successfully, the SDK triggers this callback to report the user ID, the proxy type connected, and the time elapsed fromthe user calling joinChannel until this callback is triggered. /// /// * [channel] The channel name. /// * [uid] The user ID. /// * [localProxyIp] Reserved for future use. - /// * [elapsed] The time elapsed (ms) from the user calling joinChannel [1/2] until this callback is triggered. + /// * [elapsed] The time elapsed (ms) from the user calling joinChannel until this callback is triggered. final void Function(String channel, int uid, ProxyType proxyType, String localProxyIp, int elapsed)? onProxyConnected; @@ -1661,14 +1761,14 @@ class RtcEngineEventHandler { /// * [msg] The error message. final void Function(ErrorCodeType err, String msg)? onError; - /// Reports the statistics of the audio stream from each remote user. - /// Deprecated:Please use onRemoteAudioStats instead.The SDK triggers this callback once every two seconds to report the audio quality of each remote user/host sending an audio stream. If a channel has multiple users/hosts sending audio streams, the SDK triggers this callback as many times. + /// Reports the statistics of the audio stream sent by each remote user. + /// Deprecated:Use onRemoteAudioStats instead.The SDK triggers this callback once every two seconds to report the audio quality of each remote user who is sending an audio stream. If a channel has multiple users sending audio streams, the SDK triggers this callback as many times. /// /// * [connection] The connection information. See RtcConnection . /// * [remoteUid] The user ID of the remote user sending the audio stream. /// * [quality] Audio quality of the user. See QualityType . /// * [delay] The network delay (ms) from the sender to the receiver, including the delay caused by audio sampling pre-processing, network transmission, and network jitter buffering. - /// * [lost] The packet loss rate (%) of the audio packet sent from the remote user. + /// * [lost] The packet loss rate (%) of the audio packet sent from the remote user to the receiver. final void Function(RtcConnection connection, int remoteUid, QualityType quality, int delay, int lost)? onAudioQuality; @@ -1679,12 +1779,12 @@ class RtcEngineEventHandler { final void Function(LastmileProbeResult result)? onLastmileProbeResult; /// Reports the volume information of users. - /// By default, this callback is disabled. You can enable it by calling enableAudioVolumeIndication . Once this callback is enabled and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback according to the time interval set in enableAudioVolumeIndication. The SDK triggers two independent onAudioVolumeIndication callbacks simultaneously, which separately report the volume information of the local user who sends a stream and the remote users (up to three) whose instantaneous volume is the highest.Once this callback is enabled, if the local user calls the muteLocalAudioStream method for mute, the SDK continues to report the volume indication of the local user.20 seconds after a remote user whose volume is one of the three highest in the channel stops publishing the audio stream, the callback excludes this user's information; 20 seconds after all remote users stop publishing audio streams, the SDK stops triggering the callback for remote users. + /// By default, this callback is disabled. You can enable it by calling enableAudioVolumeIndication . Once this callback is enabled and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback according to the time interval set in enableAudioVolumeIndication. The SDK triggers two independent onAudioVolumeIndication callbacks simultaneously, which separately report the volume information of the local user who sends a stream and the remote users (up to three) whose instantaneous volume is the highest.Once this callback is enabled, if the local user calls the muteLocalAudioStream method to mute, the SDK continues to report the volume indication of the local user.If a remote user whose volume is one of the three highest in the channel stops publishing the audio stream for 20 seconds, the callback excludes this user's information; if all remote users stop publishing audio streams for 20 seconds, the SDK stops triggering the callback for remote users. /// /// * [connection] The connection information. See RtcConnection . - /// * [speakers] The volume information of the users. See AudioVolumeInfo . An empty speakers array in the callback indicates that no remote user is in the channel or sending a stream at the moment. + /// * [speakers] The volume information of the users. See AudioVolumeInfo . An empty speakers array in the callback indicates that no remote user is in the channel or is sending a stream. /// * [speakerNumber] The total number of users.In the callback for the local user, if the local user is sending streams, the value of speakerNumber is 1.In the callback for remote users, the value range of speakerNumber is [0,3]. If the number of remote users who send streams is greater than or equal to three, the value of speakerNumber is 3. - /// * [totalVolume] The volume of the speaker. The value ranges between 0 (lowest volume) and 255 (highest volume).In the callback for the local user, totalVolume is the volume of the local user who sends a stream.In the callback for remote users, totalVolume is the sum of the volume of the remote users (up to three) whose instantaneous volume are the highest. + /// * [totalVolume] The volume of the speaker. The value range is [0,255].In the callback for the local user, totalVolume is the volume of the local user who sends a stream.In the callback for remote users, totalVolume is the sum of the volume of all remote users (up to three) whose instantaneous volume is the highest. final void Function(RtcConnection connection, List speakers, int speakerNumber, int totalVolume)? onAudioVolumeIndication; @@ -1700,7 +1800,6 @@ class RtcEngineEventHandler { /// /// * [connection] The connection information. See RtcConnection . /// * [stats] Statistics of the RTC engine. See RtcStats . - /// final void Function(RtcConnection connection, RtcStats stats)? onRtcStats; /// Occurs when the audio device state changes. @@ -1712,7 +1811,13 @@ class RtcEngineEventHandler { final void Function(String deviceId, MediaDeviceType deviceType, MediaDeviceStateType deviceState)? onAudioDeviceStateChanged; - /// @nodoc + /// Reports the playback progress of a music file. + /// After you called the startAudioMixing method to play a music file, the SDK triggers this callback every two seconds to report the playback progress. + /// + /// * [position] The playback progress (ms). + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. final void Function(int position)? onAudioMixingPositionChanged; /// Occurs when the playback of the local music file finishes. @@ -1738,8 +1843,8 @@ class RtcEngineEventHandler { /// This callback reports the last mile network conditions of each user in the channel. Last mile refers to the connection between the local device and Agora's edge server.The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times.txQuality is rxQuality is /// /// * [connection] The connection information. See RtcConnection . - /// * [remoteUid] The user ID. The network quality of the user with this user ID is reported. - /// * [txQuality] Uplink network quality rating of the user in terms of the transmission bit rate, packet loss rate, average RTT (Round-Trip Time) and jitter of the uplink network. This parameter is a quality rating helping you understand how well the current uplink network conditions can support the selected video encoder configuration. For example, a 1000 Kbps uplink network may be adequate for video frames with a resolution of 640 × 480 and a frame rate of 15 fps in the LIVE_BROADCASTING profile, but may be inadequate for resolutions higher than 1280 × 720. See QualityType . + /// * [remoteUid] The user ID. The network quality of the user with this user ID is reported. If the uid is 0, the local network quality is reported. + /// * [txQuality] Uplink network quality rating of the user in terms of the transmission bit rate, packet loss rate, average RTT (Round-Trip Time) and jitter of the uplink network. This parameter is a quality rating helping you understand how well the current uplink network conditions can support the selected video encoder configuration. For example, a 1000 Kbps uplink network may be adequate for video frames with a resolution of 640 × 480 and a frame rate of 15 fps in the LIVE_BROADCASTING profile, but might be inadequate for resolutions higher than 1280 × 720. See QualityType . /// * [rxQuality] Downlink network quality rating of the user in terms of packet loss rate, average RTT, and jitter of the downlink network. See QualityType . final void Function(RtcConnection connection, int remoteUid, QualityType txQuality, QualityType rxQuality)? onNetworkQuality; @@ -1759,34 +1864,25 @@ class RtcEngineEventHandler { /// Reports the last-mile network quality of the local user. /// This callback reports the last-mile network conditions of the local user before the user joins the channel. Last mile refers to the connection between the local device and Agora's edge server.Before the user joins the channel, this callback is triggered by the SDK once startLastmileProbeTest is called and reports the last-mile network conditions of the local user. /// - /// * [quality] The last-mile network quality. - /// qualityUnknown(0): The quality is unknown. - /// qualityExcellent(1): The quality is excellent. - /// qualityGood(2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. - /// qualityPoor(3): Users can feel the communication is slightly impaired. - /// qualityBad(4): Users cannot communicate smoothly. - /// qualityVbad(5): The quality is so bad that users can barely communicate. - /// qualityDown(6): The network is down, and users cannot communicate at all. - /// See QualityType . + /// * [quality] The last-mile network quality. qualityUnknown(0): The quality is unknown.qualityExcellent(1): The quality is excellent.qualityGood(2): The network quality seems excellent, but the bitrate can be slightly lower than excellent.qualityPoor(3): Users can feel the communication is slightly impaired.qualityBad(4): Users cannot communicate smoothly.qualityVbad(5): The quality is so bad that users can barely communicate.qualityDown(6): The network is down, and users cannot communicate at all.See QualityType . final void Function(QualityType quality)? onLastmileQuality; /// Occurs when the first local video frame is displayed on the local video view. /// The SDK triggers this callback when the first local video frame is displayed on the local video view. /// - /// * [source] The capture type of the custom video source. See VideoSourceType . - /// * [connection] The connection information. See RtcConnection . + /// * [source] The type of the video source. See VideoSourceType . /// * [width] The width (px) of the first local video frame. /// * [height] The height (px) of the first local video frame. - /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel [2/2] until the SDK triggers this callback. If you call startPreview before calling joinChannel [2/2], then this parameter is the time elapsed from calling the startPreview method until the SDK triggers this callback. + /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. If you call startPreview before calling joinChannel, then this parameter is the time elapsed from calling the startPreview method until the SDK triggers this callback. final void Function( VideoSourceType source, int width, int height, int elapsed)? onFirstLocalVideoFrame; /// Occurs when the first video frame is published. - /// The SDK triggers this callback under one of the following circumstances:The local client enables the video module and calls joinChannel [2/2] successfully.The local client calls muteLocalVideoStream (true) and muteLocalVideoStream(false) in sequence.The local client calls disableVideo and enableVideo in sequence. + /// The SDK triggers this callback under one of the following circumstances:The local client enables the video module and calls joinChannel successfully.The local client calls muteLocalVideoStream (true) and muteLocalVideoStream(false) in sequence.The local client calls disableVideo and enableVideo in sequence. /// /// * [connection] The connection information. See RtcConnection . - /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel [2/2] until the SDK triggers this callback. + /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int elapsed)? onFirstLocalVideoFramePublished; @@ -1794,17 +1890,17 @@ class RtcEngineEventHandler { /// The SDK triggers this callback under one of the following circumstances:The remote user joins the channel and sends the video stream.The remote user stops sending the video stream and re-sends it after 15 seconds. Reasons for such an interruption include:The remote user leaves the channel.The remote user drops offline.The remote user calls muteLocalVideoStream to stop sending the video stream.The remote user calls disableVideo to disable video. /// /// * [connection] The connection information. See RtcConnection . - /// * [remoteUid] The ID of the remote user sending the video stream. + /// * [remoteUid] The user ID of the remote user sending the video stream. /// * [width] The width (px) of the video stream. /// * [height] The height (px) of the video stream. - /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel [2/2] until the SDK triggers this callback. + /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int remoteUid, int width, int height, int elapsed)? onFirstRemoteVideoDecoded; /// Occurs when the video size or rotation of a specified user changes. /// /// * [connection] The connection information. See RtcConnection . - /// * [sourceType] The capture type of the custom video source. See VideoSourceType . + /// * [sourceType] The type of the video source. See VideoSourceType . /// * [uid] The ID of the user whose video size or rotation changes. (The uid for the local user is 0. The video is the local user's video preview). /// * [width] The width (pixels) of the video stream. /// * [height] The height (pixels) of the video stream. @@ -1813,9 +1909,9 @@ class RtcEngineEventHandler { int uid, int width, int height, int rotation)? onVideoSizeChanged; /// Occurs when the local video stream state changes. - /// When the state of the local video stream changes (including the state of the video capture and encoding), the SDK triggers this callback to report the current state. This callback indicates the state of the local video stream, including camera capturing and video encoding, and allows you to troubleshoot issues when exceptions occur.The SDK triggers the onLocalVideoStateChanged callback with the state code of localVideoStreamStateFailed and error code of localVideoStreamErrorCaptureFailure in the following situations:The app switches to the background, and the system gets the camera resource.The camera starts normally, but does not output video frames for four consecutive seconds.When the camera outputs the captured video frames, if the video frames are the same for 15 consecutive frames, the SDK triggers the onLocalVideoStateChanged callback with the state code of localVideoStreamStateCapturing and error code of localVideoStreamErrorCaptureFailure. Note that the video frame duplication detection is only available for video frames with a resolution greater than 200 × 200, a frame rate greater than or equal to 10 fps, and a bitrate less than 20 Kbps.For some device models, the SDK does not trigger this callback when the state of the local video changes while the local video capturing device is in use, so you have to make your own timeout judgment. + /// When the state of the local video stream changes (including the state of the video capture and encoding), the SDK triggers this callback to report the current state. This callback indicates the state of the local video stream, including camera capturing and video encoding, and allows you to troubleshoot issues when exceptions occur.The SDK triggers the onLocalVideoStateChanged callback with the state code of localVideoStreamStateFailed and error code of localVideoStreamErrorCaptureFailure in the following situations:The app switches to the background, and the system gets the camera resource.If your app runs in the background on a device running Android 9 or later, you cannot access the camera.If your app runs in the background on a device running Android 6 or later, the camera is occupied by a third-party app. Once the camera is released, the SDK triggers the onLocalVideoStateChanged(localVideoStreamStateCapturing,localVideoStreamErrorOk) callback.The camera starts normally, but does not output video frames for four consecutive seconds.When the camera outputs the captured video frames, if the video frames are the same for 15 consecutive frames, the SDK triggers the onLocalVideoStateChanged callback with the state code of localVideoStreamStateCapturing and error code of localVideoStreamErrorCaptureFailure. Note that the video frame duplication detection is only available for video frames with a resolution greater than 200 × 200, a frame rate greater than or equal to 10 fps, and a bitrate less than 20 Kbps.For some device models, the SDK does not trigger this callback when the state of the local video changes while the local video capturing device is in use, so you have to make your own timeout judgment. /// - /// * [source] The capture type of the custom video source. See VideoSourceType . + /// * [source] The type of the video source. See VideoSourceType . /// * [state] The state of the local video, see LocalVideoStreamState . /// * [error] The detailed error information, see LocalVideoStreamError . final void Function(VideoSourceType source, LocalVideoStreamState state, @@ -1826,9 +1922,9 @@ class RtcEngineEventHandler { /// /// * [connection] The connection information. See RtcConnection . /// * [remoteUid] The ID of the remote user whose video state changes. - /// * [state] The state of the remote video, see RemoteVideoState . - /// * [reason] The reason for the remote video state change, see RemoteVideoStateReason . - /// * [elapsed] Time elapsed (ms) from the local user calling the joinChannel [2/2] method until the SDK triggers this callback. + /// * [state] The state of the remote video. See RemoteVideoState . + /// * [reason] The reason for the remote video state change. See RemoteVideoStateReason . + /// * [elapsed] Time elapsed (ms) from the local user calling the joinChannel method until the SDK triggers this callback. final void Function( RtcConnection connection, int remoteUid, @@ -1838,20 +1934,20 @@ class RtcEngineEventHandler { /// Occurs when the renderer receives the first frame of the remote video. /// - /// * [uid] The ID of the remote user sending the video stream. + /// * [uid] The user ID of the remote user sending the video stream. /// * [connection] The connection information. See RtcConnection . /// * [width] The width (px) of the video stream. /// * [height] The height (px) of the video stream. - /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel [2/2] until the SDK triggers this callback. + /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int remoteUid, int width, int height, int elapsed)? onFirstRemoteVideoFrame; - /// Occurs when a remote user (COMMUNICATION)/ host (LIVE_BROADCASTING) joins the channel. - /// In a communication channel, this callback indicates that a remote user joins the channel. The SDK also triggers this callback to report the existing users in the channel when a user joins the channel.In a live-broadcast channel, this callback indicates that a host joins the channel. The SDK also triggers this callback to report the existing hosts in the channel when a host joins the channel. Agora recommends limiting the number of hosts to 17.The SDK triggers this callback under one of the following circumstances:A remote user/host joins the channel by calling the joinChannel [2/2] method.A remote user switches the user role to the host after joining the channel.A remote user/host rejoins the channel after a network interruption. + /// Occurs when a remote user (in the communication profile)/ host (in the live streaming profile) leaves the channel. + /// In a communication channel, this callback indicates that a remote user joins the channel. The SDK also triggers this callback to report the existing users in the channel when a user joins the channel.In a live-broadcast channel, this callback indicates that a host joins the channel. The SDK also triggers this callback to report the existing hosts in the channel when a host joins the channel. Agora recommends limiting the number of hosts to 17.The SDK triggers this callback under one of the following circumstances:A remote user/host joins the channel.A remote user switches the user role to the host after joining the channel.A remote user/host rejoins the channel after a network interruption. /// /// * [connection] The connection information. See RtcConnection . /// * [remoteUid] The ID of the user or host who joins the channel. - /// * [elapsed] Time delay (ms) from the local user calling joinChannel [2/2] until this callback is triggered. + /// * [elapsed] Time delay (ms) from the local user calling joinChannel until this callback is triggered. final void Function(RtcConnection connection, int remoteUid, int elapsed)? onUserJoined; @@ -1869,12 +1965,12 @@ class RtcEngineEventHandler { /// /// * [connection] The connection information. See RtcConnection . /// * [remoteUid] The user ID. - /// * [muted] Whether the remote user's audio stream is muted/unmuted:true: User's audio stream is muted.false: User's audio stream is unmuted. + /// * [muted] Whether the remote user's audio stream is muted:true: User's audio stream is muted.false: User's audio stream is unmuted. final void Function(RtcConnection connection, int remoteUid, bool muted)? onUserMuteAudio; - /// Occurs when a remote user stops/resumes publishing the video stream. - /// When a remote user calls muteLocalVideoStream to stop or resume publishing the video stream, the SDK triggers this callback to report the state of the remote user's publishing stream to the local user.This callback can be inaccurate when the number of users (in the communication profile) or hosts (in the live streaming profile) in a channel exceeds 17. + /// Occurs when a remote user stops or resumes publishing the video stream. + /// When a remote user calls muteLocalVideoStream to stop or resume publishing the video stream, the SDK triggers this callback to report to the local user the state of the streams published by the remote user.This callback can be inaccurate when the number of users (in the communication profile) or hosts (in the live streaming profile) in a channel exceeds 17. /// /// * [connection] The connection information. See RtcConnection . /// * [remoteUid] The user ID of the remote user. @@ -1882,12 +1978,12 @@ class RtcEngineEventHandler { final void Function(RtcConnection connection, int remoteUid, bool muted)? onUserMuteVideo; - /// Occurs when a remote user enables/disables the video module. + /// Occurs when a remote user enables or disables the video module. /// Once the video module is disabled, the user can only use a voice call. The user cannot send or receive any video.The SDK triggers this callback when a remote user enables or disables the video module by calling the enableVideo or disableVideo method. /// /// * [connection] The connection information. See RtcConnection . /// * [remoteUid] The user ID of the remote user. - /// * [enabled] true: Enable.false: Disable. + /// * [enabled] true: The video module is enabled.false: The video module is disabled. final void Function(RtcConnection connection, int remoteUid, bool enabled)? onUserEnableVideo; @@ -1900,19 +1996,10 @@ class RtcEngineEventHandler { /// /// * [connection] The connection information. See RtcConnection . /// * [remoteUid] The user ID of the remote user. - /// * [enabled] Whether the specified remote user enables/disables the local video capturing function:true: Enable. Other users in the channel can see the video of this remote user.false: Disable. Other users in the channel can no longer receive the video stream from this remote user, while this remote user can still receive the video streams from other users. + /// * [enabled] Whether the specified remote user enables/disables the local video capturing function:true: The video module is enabled. Other users in the channel can see the video of this remote user.false: The video module is disabled. Other users in the channel can no longer receive the video stream from this remote user, while this remote user can still receive the video streams from other users. final void Function(RtcConnection connection, int remoteUid, bool enabled)? onUserEnableLocalVideo; - /// Occurs when a method is executed by the SDK. - /// Deprecated:Deprecated as of v4.1.0. This method can neither accurately characterize the specific API method nor represent the execution result of the API.Agora recommends getting the results of the API implementation through relevant channels and media callbacks. For example, after calling the enableLocalAudio method to enable the microphone, the status of the microphone is returned in the onLocalAudioStateChanged callback. - /// - /// * [err] The error code returned by the SDK when the method call fails. If the SDK returns 0, then the method call is successful. - /// * [api] The method executed by the SDK. - /// * [result] The result of the method call. - final void Function(ErrorCodeType err, String api, String result)? - onApiCallExecuted; - /// Reports the statistics of the local audio stream. /// The SDK triggers this callback once every two seconds. /// @@ -1921,11 +2008,11 @@ class RtcEngineEventHandler { final void Function(RtcConnection connection, LocalAudioStats stats)? onLocalAudioStats; - /// Reports the statistics of the audio stream sent by each remote users. - /// The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times. + /// Reports the transport-layer statistics of each remote audio stream. + /// The SDK triggers this callback once every two seconds for each remote user who is sending audio streams. If a channel includes multiple remote users, the SDK triggers this callback as many times. /// /// * [connection] The connection information. See RtcConnection . - /// * [stats] Statistics of the received remote audio stream. See RemoteAudioStats . + /// * [stats] The statistics of the received remote audio streams. See RemoteAudioStats . final void Function(RtcConnection connection, RemoteAudioStats stats)? onRemoteAudioStats; @@ -1940,7 +2027,8 @@ class RtcEngineEventHandler { /// Reports the statistics of the video stream sent by each remote users. /// Reports the statistics of the video stream from the remote users. The SDK triggers this callback once every two seconds for each remote user. If a channel has multiple users/hosts sending video streams, the SDK triggers this callback as many times. /// - /// * [stats] Statistics of the remote video stream. + /// * [connection] The connection information. See RtcConnection . + /// * [stats] Statistics of the remote video stream. See RemoteVideoStats . final void Function(RtcConnection connection, RemoteVideoStats stats)? onRemoteVideoStats; @@ -1949,15 +2037,17 @@ class RtcEngineEventHandler { final void Function()? onCameraReady; /// Occurs when the camera focus area changes. + /// The SDK triggers this callback when the local user changes the camera focus position by calling setCameraFocusPositionInPreview .This callback is for Android and iOS only. /// - /// * [x] The x-coordinate of the changed focus area. - /// * [y] The y-coordinate of the changed focus area. - /// * [width] The width of the focus area that changes. - /// * [height] The height of the focus area that changes. + /// * [x] The x-coordinate of the changed camera focus area. + /// * [y] The y-coordinate of the changed camera focus area. + /// * [width] The width of the changed camera focus area. + /// * [height] The height of the changed camera focus area. final void Function(int x, int y, int width, int height)? onCameraFocusAreaChanged; /// Occurs when the camera exposure area changes. + /// The SDK triggers this callback when the local user changes the camera exposure position by calling setCameraExposurePosition .This callback is for Android and iOS only. final void Function(int x, int y, int width, int height)? onCameraExposureAreaChanged; @@ -1969,8 +2059,12 @@ class RtcEngineEventHandler { /// * [vecRectangle] The information of the detected human face. See Rectangle . /// * [vecDistance] The distance between the human face and the device screen (cm). /// * [numFaces] The number of faces detected. If the value is 0, it means that no human face is detected. - final void Function(int imageWidth, int imageHeight, Rectangle vecRectangle, - int vecDistance, int numFaces)? onFacePositionChanged; + final void Function( + int imageWidth, + int imageHeight, + List vecRectangle, + List vecDistance, + int numFaces)? onFacePositionChanged; /// Occurs when the video stops playing. /// Deprecated:Use localVideoStreamStateStopped(0) in the onLocalVideoStateChanged callback instead.The application can use this callback to change the configuration of the view (for example, displaying other pictures in the view) after the video stops playing. @@ -1986,26 +2080,27 @@ class RtcEngineEventHandler { /// Occurs when the state of virtual metronome changes. /// When the state of the virtual metronome changes, the SDK triggers this callback to report the current state of the virtual metronome. This callback indicates the state of the local audio stream and enables you to troubleshoot issues when audio exceptions occur.This callback is for Android and iOS only. - /// * [state] For the current virtual metronome status, see RhythmPlayerStateType. - /// * [errorCode] For the error codes and error messages related to virtual metronome errors, see RhythmPlayerErrorType. + /// + /// * [state] For the current virtual metronome status, see RhythmPlayerStateType . + /// * [errorCode] For the error codes and error messages related to virtual metronome errors, see RhythmPlayerErrorType . final void Function( RhythmPlayerStateType state, RhythmPlayerErrorType errorCode)? onRhythmPlayerStateChanged; /// Occurs when the SDK cannot reconnect to Agora's edge server 10 seconds after its connection to the server is interrupted. - /// The SDK triggers this callback when it cannot connect to the server 10 seconds after calling the joinChannel [2/2] method, regardless of whether it is in the channel. If the SDK fails to rejoin the channel within 20 minutes after disconnecting, the SDK will stop trying to reconnect. + /// The SDK triggers this callback when it cannot connect to the server 10 seconds after calling the joinChannel method, regardless of whether it is in the channel. If the SDK fails to rejoin the channel 20 minutes after being disconnected from Agora's edge server, the SDK stops rejoining the channel. /// /// * [connection] The connection information. See RtcConnection . final void Function(RtcConnection connection)? onConnectionLost; /// Occurs when the connection between the SDK and the server is interrupted. - /// Deprecated:Use onConnectionStateChanged instead.The SDK triggers this callback when it loses connection with the server for more than four seconds after the connection is established. After triggering this callback, the SDK tries to reconnect to the server. You can use this callback to implement pop-up reminders. The difference between this callback and onConnectionLost is:The SDK triggers the onConnectionInterrupted callback when it loses connection with the server for more than four seconds after it successfully joins the channel.The SDK triggers the onConnectionLost callback when it loses connection with the server for more than 10 seconds, whether or not it joins the channel.If the SDK fails to rejoin the channel 20 minutes after being disconnected from Agora's edge server, the SDK stops rejoining the channel. + /// Deprecated:Use onConnectionStateChanged instead.The SDK triggers this callback when it loses connection with the server for more than four seconds after the connection is established. After triggering this callback, the SDK tries to reconnect to the server. You can use this callback to implement pop-up reminders. The differences between this callback and onConnectionLost are as follow:The SDK triggers the onConnectionInterrupted callback when it loses connection with the server for more than four seconds after it successfully joins the channel.The SDK triggers the onConnectionLost callback when it loses connection with the server for more than 10 seconds, whether or not it joins the channel.If the SDK fails to rejoin the channel 20 minutes after being disconnected from Agora's edge server, the SDK stops rejoining the channel. /// /// * [connection] The connection information. See RtcConnection . final void Function(RtcConnection connection)? onConnectionInterrupted; /// Occurs when the connection is banned by the Agora server. - /// Deprecated:Please use onConnectionStateChanged instead. + /// Deprecated:Use onConnectionStateChanged instead. /// /// * [connection] The connection information. See RtcConnection . final void Function(RtcConnection connection)? onConnectionBanned; @@ -2016,7 +2111,7 @@ class RtcEngineEventHandler { /// * [connection] The connection information. See RtcConnection . /// * [uid] The ID of the remote user sending the message. /// * [streamId] The stream ID of the received message. - /// * [data] received data. + /// * [data] The data received. /// * [length] The data length (byte). /// * [sentTs] The time when the data stream is sent. final void Function(RtcConnection connection, int remoteUid, int streamId, @@ -2035,13 +2130,13 @@ class RtcEngineEventHandler { ErrorCodeType code, int missed, int cached)? onStreamMessageError; /// Occurs when the token expires. - /// When the token expires during a call, the SDK triggers this callback to remind the app to renew the token.Once you receive this callback, generate a new token on your app server, and call joinChannel [2/2] to rejoin the channel. + /// When the token expires during a call, the SDK triggers this callback to remind the app to renew the token.Once you receive this callback, you need to generate a new token on your app server, and call joinChannel to rejoin the channel. /// /// * [connection] The connection information. See RtcConnection . final void Function(RtcConnection connection)? onRequestToken; /// Occurs when the token expires in 30 seconds. - /// When the token is about to expire in 30 seconds, the SDK triggers this callback to remind the app to renew the token.Upon receiving this callback, generate a new token on your server, and call renewToken to pass the new token to the SDK. + /// When the token is about to expire in 30 seconds, the SDK triggers this callback to remind the app to renew the token.Upon receiving this callback, you need to generate a new token on your server, and call renewToken to pass the new token to the SDK. /// /// * [connection] The connection information. See RtcConnection . /// * [token] The token that expires in 30 seconds. @@ -2053,28 +2148,28 @@ class RtcEngineEventHandler { onLicenseValidationFailure; /// Occurs when the first audio frame is published. - /// The SDK triggers this callback under one of the following circumstances:The local client enables the audio module and calls joinChannel [2/2] successfully.The local client calls muteLocalAudioStream (true) and muteLocalAudioStream(false) in sequence.The local client calls disableAudio and enableAudio in sequence. + /// The SDK triggers this callback under one of the following circumstances:The local client enables the audio module and calls joinChannel successfully.The local client calls muteLocalAudioStream (true) and muteLocalAudioStream(false) in sequence.The local client calls disableAudio and enableAudio in sequence. /// /// * [connection] The connection information. See RtcConnection . - /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel [2/2] until the SDK triggers this callback. + /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int elapsed)? onFirstLocalAudioFramePublished; - /// Occurs when the first audio frame sent by a specified remote user is received. + /// Occurs when the SDK receives the first audio frame from a specific remote user. /// Deprecated:Use onRemoteAudioStateChanged instead. /// /// * [connection] The connection information. See RtcConnection . - /// * [userId] The ID of the remote user sending the audio frames. - /// * [elapsed] The time elapsed (ms) from the local user calling the joinChannel [2/2] method until the SDK triggers this callback. + /// * [userId] The user ID of the remote user. + /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int userId, int elapsed)? onFirstRemoteAudioFrame; /// Occurs when the SDK decodes the first remote audio frame for playback. - /// Deprecated:Use onRemoteAudioStateChanged instead.The SDK triggers this callback under one of the following circumstances:The remote user joins the channel and sends the audio stream.The remote user stops sending the audio stream and re-sends it after 15 seconds, and the possible reasons include:The remote user leaves the channel.The remote user is offline.The remote user calls muteLocalAudioStream to stop sending the video stream.The remote user calls disableAudio to disable video. + /// Deprecated:Use onRemoteAudioStateChanged instead.The SDK triggers this callback under one of the following circumstances:The remote user joins the channel and sends the audio stream for the first time.The remote user's audio is offline and then goes online to re-send audio. It means the local user cannot receive audio in 15 seconds. Reasons for such an interruption include:The remote user leaves channel.The remote user drops offline.The remote user calls muteLocalAudioStream to stop sending the audio stream.The remote user calls disableAudio to disable audio. /// /// * [connection] The connection information. See RtcConnection . /// * [uid] The user ID of the remote user. - /// * [elapsed] The time elapsed (ms) from the local user calling the joinChannel [2/2] method until the SDK triggers this callback. + /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int uid, int elapsed)? onFirstRemoteAudioDecoded; @@ -2082,7 +2177,7 @@ class RtcEngineEventHandler { /// When the state of the local audio stream changes (including the state of the audio capture and encoding), the SDK triggers this callback to report the current state. This callback indicates the state of the local audio stream, and allows you to troubleshoot issues when audio exceptions occur.When the state is localAudioStreamStateFailed (3), you can view the error information in the error parameter. /// /// * [connection] The connection information. See RtcConnection . - /// * [state] The state of the local audio. See localaudiostreamstate . + /// * [state] The state of the local audio. See LocalAudioStreamState . /// * [error] Local audio state error codes. See LocalAudioStreamError . final void Function(RtcConnection connection, LocalAudioStreamState state, LocalAudioStreamError error)? onLocalAudioStateChanged; @@ -2094,7 +2189,7 @@ class RtcEngineEventHandler { /// * [remoteUid] The ID of the remote user whose audio state changes. /// * [state] The state of the remote audio. See RemoteAudioState . /// * [reason] The reason of the remote audio state change. See RemoteAudioStateReason . - /// * [elapsed] Time elapsed (ms) from the local user calling the joinChannel [2/2] method until the SDK triggers this callback. + /// * [elapsed] Time elapsed (ms) from the local user calling the joinChannel method until the SDK triggers this callback. final void Function( RtcConnection connection, int remoteUid, @@ -2103,10 +2198,10 @@ class RtcEngineEventHandler { int elapsed)? onRemoteAudioStateChanged; /// Occurs when the most active remote speaker is detected. - /// After a successful call of enableAudioVolumeIndication , the SDK continuously detects which remote user has the loudest volume. During the current period, the remote user, who is detected as the loudest for the most times, is the most active user.When the number of users is no less than two and an active remote speaker exists, the SDK triggers this callback and reports the uid of the most active remote speaker.If the most active remote speaker is always the same user, the SDK triggers the onActiveSpeaker callback only once.If the most active remote speaker changes to another user, the SDK triggers this callback again and reports the uid of the new active remote speaker. + /// After a successful call of enableAudioVolumeIndication , the SDK continuously detects which remote user has the loudest volume. During the current period, the remote user whose volume is detected as the loudest for the most times, is the most active user.When the number of users is no less than two and an active remote speaker exists, the SDK triggers this callback and reports the uid of the most active remote speaker.If the most active remote speaker is always the same user, the SDK triggers the onActiveSpeaker callback only once.If the most active remote speaker changes to another user, the SDK triggers this callback again and reports the uid of the new active remote speaker. /// /// * [connection] The connection information. See RtcConnection . - /// * [uid] The user ID of the most active remote speaker. + /// * [uid] The user ID of the most active speaker. final void Function(RtcConnection connection, int uid)? onActiveSpeaker; /// @nodoc @@ -2137,8 +2232,8 @@ class RtcEngineEventHandler { ClientRoleType newRole, ClientRoleOptions newRoleOptions)? onClientRoleChanged; - /// Occurs when the user role switch fails in the interactive live streaming. - /// In the live broadcasting channel profile, when the local user calls setClientRole [1/2] to switch their user role after joining the channel but the switch fails, the SDK triggers this callback to report the reason for the failure and the current user role. + /// Occurs when the user role switching fails in the interactive live streaming. + /// In the live broadcasting channel profile, when the local user calls to switch the user role after joining the channel but the switch fails, the SDK triggers this callback to report the reason for the failure and the current user role. /// /// * [connection] The connection information. See RtcConnection . /// * [reason] The reason for a user role switch failure. See ClientRoleChangeFailedReason . @@ -2157,24 +2252,24 @@ class RtcEngineEventHandler { final void Function(MediaDeviceType deviceType, int volume, bool muted)? onAudioDeviceVolumeChanged; - /// Occurs when the media push state changes. - /// When the media push state changes, the SDK triggers this callback and reports the URL address and the current state of the media push. This callback indicates the state of the media push. When exceptions occur, you can troubleshoot issues by referring to the detailed error descriptions in the error code parameter. + /// Occurs when the state of Media Push changes. + /// When the state of Media Push changes, the SDK triggers this callback and reports the URL address and the current state of the Media Push. This callback indicates the state of the Media Push. When exceptions occur, you can troubleshoot issues by referring to the detailed error descriptions in the error code parameter. /// - /// * [url] The URL address where the state of the media push changes. - /// * [state] The current state of the media push. See RtmpStreamPublishState . - /// * [errCode] The detailed error information for the media push. See RtmpStreamPublishErrorType . + /// * [url] The URL address where the state of the Media Push changes. + /// * [state] The current state of the Media Push. See RtmpStreamPublishState . + /// * [errCode] The detailed error information for the Media Push. See RtmpStreamPublishErrorType . final void Function(String url, RtmpStreamPublishState state, RtmpStreamPublishErrorType errCode)? onRtmpStreamingStateChanged; - /// Reports events during the media push. + /// Reports events during the Media Push. /// - /// * [url] The URL of media push. - /// * [eventCode] The event code of media push. See RtmpStreamingEvent . + /// * [url] The URL for Media Push. + /// * [eventCode] The event code of Media Push. See RtmpStreamingEvent . final void Function(String url, RtmpStreamingEvent eventCode)? onRtmpStreamingEvent; /// Occurs when the publisher's transcoding is updated. - /// When the LiveTranscoding class in the setLiveTranscoding method updates, the SDK triggers the onTranscodingUpdated callback to report the update information.If you call the setLiveTranscoding method to set the LiveTranscoding class for the first time, the SDK does not trigger this callback. + /// When the LiveTranscoding class in the method updates, the SDK triggers the onTranscodingUpdated callback to report the update information.If you call the method to set the LiveTranscoding class for the first time, the SDK does not trigger this callback. final void Function()? onTranscodingUpdated; /// Occurs when the local audio route changes. @@ -2193,6 +2288,7 @@ class RtcEngineEventHandler { onChannelMediaRelayStateChanged; /// Reports events during the media stream relay. + /// Deprecated:This callback is deprecated. /// /// * [code] The event code of channel media relay. See ChannelMediaRelayEvent . final void Function(ChannelMediaRelayEvent code)? onChannelMediaRelayEvent; @@ -2206,18 +2302,18 @@ class RtcEngineEventHandler { onRemoteSubscribeFallbackToAudioOnly; /// Reports the transport-layer statistics of each remote audio stream. - /// Deprecated:Please use onRemoteAudioStats instead.This callback reports the transport-layer statistics, such as the packet loss rate and network time delay, once every two seconds after the local user receives an audio packet from a remote user. During a call, when the user receives the video packet sent by the remote user/host, the callback is triggered every 2 seconds. + /// Deprecated:Use onRemoteAudioStats instead.This callback reports the transport-layer statistics, such as the packet loss rate and network time delay after the local user receives an audio packet from a remote user. During a call, when the user receives the audio packet sent by the remote user, the callback is triggered every 2 seconds. /// /// * [connection] The connection information. See RtcConnection . - /// * [remoteUid] The ID of the remote user sending the audio packets. - /// * [delay] The network delay (ms) from the sender to the receiver. - /// * [lost] The packet loss rate (%) of the audio packet sent from the remote user. - /// * [rxKBitRate] The bitrate of the received audio (Kbps). + /// * [remoteUid] The ID of the remote user sending the audio streams. + /// * [delay] The network delay (ms) from the remote user to the receiver. + /// * [lost] The packet loss rate (%) of the audio packet sent from the remote user to the receiver. + /// * [rxKBitrate] The bitrate of the received audio (Kbps). final void Function(RtcConnection connection, int remoteUid, int delay, int lost, int rxKBitRate)? onRemoteAudioTransportStats; /// Reports the transport-layer statistics of each remote video stream. - /// Deprecated:This callback is deprecated; use onRemoteVideoStats instead.This callback reports the transport-layer statistics, such as the packet loss rate and network time delay, once every two seconds after the local user receives a video packet from a remote user.During a call, when the user receives the video packet sent by the remote user/host, the callback is triggered every 2 seconds. + /// Deprecated:This callback is deprecated. Use onRemoteVideoStats instead.This callback reports the transport-layer statistics, such as the packet loss rate and network time delay after the local user receives a video packet from a remote user.During a call, when the user receives the video packet sent by the remote user/host, the callback is triggered every 2 seconds. /// /// * [connection] The connection information. See RtcConnection . /// * [remoteUid] The ID of the remote user sending the video packets. @@ -2230,8 +2326,9 @@ class RtcEngineEventHandler { /// Occurs when the network connection state changes. /// When the network connection state changes, the SDK triggers this callback and reports the current connection state and the reason for the change. /// - /// * [state] The current connection state. - /// * [reason] The reason for a connection state change. + /// * [connection] The connection information. See RtcConnection . + /// * [state] The current connection state. See ConnectionStateType . + /// * [reason] The reason for a connection state change. See ConnectionChangedReasonType . final void Function(RtcConnection connection, ConnectionStateType state, ConnectionChangedReasonType reason)? onConnectionStateChanged; @@ -2255,12 +2352,12 @@ class RtcEngineEventHandler { /// When encryption is enabled by calling enableEncryption , the SDK triggers this callback if an error occurs in encryption or decryption on the sender or the receiver side. /// /// * [connection] The connection information. See RtcConnection . - /// + /// * [errorType] Details about the error type. See EncryptionErrorType . final void Function(RtcConnection connection, EncryptionErrorType errorType)? onEncryptionError; /// Occurs when the SDK cannot get the device permission. - /// When the SDK fails to get the device permission, the SDK triggers this callback to report which device permission cannot be got.This callback is for Android and iOS only. + /// When the SDK fails to get the device permission, the SDK triggers this callback to report which device permission cannot be got. /// /// * [permissionType] The type of the device permission. See PermissionType . final void Function(PermissionType permissionType)? onPermissionError; @@ -2287,8 +2384,8 @@ class RtcEngineEventHandler { /// /// * [channel] The channel name. /// * [uid] The user ID of the remote user. - /// * [oldState] The previous subscribing status, see StreamSubscribeState for details. - /// * [newState] The current subscribing status, see StreamSubscribeState for details. + /// * [oldState] The previous subscribing status. See StreamSubscribeState . + /// * [newState] The current subscribing status. See StreamSubscribeState. /// * [elapseSinceLastState] The time elapsed (ms) from the previous state to the current state. final void Function( String channel, @@ -2314,8 +2411,8 @@ class RtcEngineEventHandler { /// Occurs when the audio publishing state changes. /// /// * [channel] The channel name. - /// * [oldState] The previous subscribing status. See StreamPublishState . - /// * [newState] The current subscribing status. See StreamPublishState. + /// * [oldState] The previous publishing state. See StreamPublishState . + /// * [newState] The current publishing stat. See StreamPublishState. /// * [elapseSinceLastState] The time elapsed (ms) from the previous state to the current state. final void Function( String channel, @@ -2326,9 +2423,9 @@ class RtcEngineEventHandler { /// Occurs when the video publishing state changes. /// /// * [channel] The channel name. - /// * [source] The capture type of the custom video source. See VideoSourceType . - /// * [oldState] For the previous publishing state, see StreamPublishState . - /// * [newState] For the current publishing state, see StreamPublishState. + /// * [source] The type of the video source. See VideoSourceType . + /// * [oldState] The previous publishing state. See StreamPublishState . + /// * [newState] The current publishing stat. See StreamPublishState. /// * [elapseSinceLastState] The time elapsed (ms) from the previous state to the current state. final void Function( VideoSourceType source, @@ -2377,36 +2474,65 @@ class RtcEngineEventHandler { final void Function( RtcConnection connection, int remoteUid, String userAccount)? onUserAccountUpdated; + + /// Occurs when there's an error during the local video mixing. + /// When you fail to call startLocalVideoTranscoder or updateLocalTranscoderConfiguration , the SDK triggers this callback to report the reason. + /// + /// * [stream] The video streams that cannot be mixed during video mixing. See TranscodingVideoStream . + /// * [error] The reason for local video mixing error. See VideoTranscoderError . + final void Function( + TranscodingVideoStream stream, VideoTranscoderError error)? + onLocalVideoTranscoderError; + + /// Video frame rendering event callback. + /// After calling the startMediaRenderingTracing method or joining the channel, the SDK triggers this callback to report the events of video frame rendering and the indicators during the rendering process. Developers can optimize the indicators to improve the efficiency of the first video frame rendering. + /// + /// * [uid] The user ID. + /// * [currentEvent] The current video frame rendering event. See MediaTraceEvent . + /// * [tracingInfo] The indicators during the video frame rendering process. Developers need to reduce the value of indicators as much as possible in order to improve the efficiency of the first video frame rendering. See VideoRenderingTracingInfo . + final void Function( + RtcConnection connection, + int uid, + MediaTraceEvent currentEvent, + VideoRenderingTracingInfo tracingInfo)? onVideoRenderingTracingResult; } /// Video device management methods. abstract class VideoDeviceManager { /// Enumerates the video devices. + /// This method is for Windows and macOS only. /// /// Returns /// Success: A VideoDeviceInfo array including all video devices in the system.Failure: An empty array. Future> enumerateVideoDevices(); /// Specifies the video capture device with the device ID. - /// Plugging or unplugging a device does not change its device ID. + /// Plugging or unplugging a device does not change its device ID.This method is for Windows and macOS only. /// /// * [deviceIdUTF8] The device ID. You can get the device ID by calling enumerateVideoDevices .The maximum length is MaxDeviceIdLengthType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setDevice(String deviceIdUTF8); /// Retrieves the current video capture device. + /// This method is for Windows and macOS only. /// /// Returns /// The video capture device. Future getDevice(); /// Gets the number of video formats supported by the specified video capture device. - /// Video capture devices may support multiple video formats, and each format supports different combinations of video frame width, video frame height, and frame rate.You can call this method to get how many video formats the specified video capture device can support, and then call getCapability to get the specific video frame information in the specified video format. + /// This method is for Windows and macOS only.Video capture devices may support multiple video formats, and each format supports different combinations of video frame width, video frame height, and frame rate.You can call this method to get how many video formats the specified video capture device can support, and then call getCapability to get the specific video frame information in the specified video format. /// /// * [deviceIdUTF8] The ID of the video capture device. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.≤ 0: Failure. Future numberOfCapabilities(String deviceIdUTF8); /// Gets the detailed video frame information of the video capture device in the specified video format. - /// After calling numberOfCapabilities to get the number of video formats supported by the video capture device, you can call this method to get the specific video frame information supported by the specified index number. + /// This method is for Windows and macOS only.After calling numberOfCapabilities to get the number of video formats supported by the video capture device, you can call this method to get the specific video frame information supported by the specified index number. /// /// * [deviceIdUTF8] The ID of the video capture device. /// * [deviceCapabilityNumber] The index number of the video format. If the return value of numberOfCapabilities is i, the value range of this parameter is [0,i). @@ -2423,6 +2549,7 @@ abstract class VideoDeviceManager { Future stopDeviceTest(); /// Releases all the resources occupied by the VideoDeviceManager object. + /// This method is for Windows and macOS only. Future release(); } @@ -2439,7 +2566,8 @@ class RtcEngineContext { this.logConfig, this.threadPriority, this.useExternalEglContext, - this.domainLimit}); + this.domainLimit, + this.autoRegisterAgoraExtensions}); /// The App ID issued by Agora for your project. Only users in apps with the same App ID can join the same channel and communicate with each other. An App ID can only be used to create one RtcEngine instance. To change your App ID, call release to destroy the current RtcEngine instance, and then create a new one. @JsonKey(name: 'appId') @@ -2461,7 +2589,16 @@ class RtcEngineContext { @JsonKey(name: 'areaCode') final int? areaCode; - /// The SDK log files are: agorasdk.log, agorasdk.1.log, agorasdk.2.log, agorasdk.3.log, and agorasdk.4.log.The API call log files are: agoraapi.log, agoraapi.1.log, agoraapi.2.log, agoraapi.3.log, and agoraapi.4.log.The default size for each SDK log file is 1,024 KB; the default size for each API call log file is 2,048 KB. These log files are encoded in UTF-8.The SDK writes the latest logs in agorasdk.log or agoraapi.log.When agorasdk.log is full, the SDK processes the log files in the following order:Delete the agorasdk.4.log file (if any).Rename agorasdk.3.log to agorasdk.4.log.Rename agorasdk.2.log to agorasdk.3.log.Rename agorasdk.1.log to agorasdk.2.log.Create a new agorasdk.log file.The overwrite rules for the agoraapi.log file are the same as for agorasdk.log.The log files that the SDK outputs. See LogConfig .By default, the SDK generates five SDK log files and five API call log files with the following rules: + /// The SDK log files are: agorasdk.log, agorasdk.1.log, agorasdk.2.log, agorasdk.3.log, and agorasdk.4.log. + /// The API call log files are: agoraapi.log, agoraapi.1.log, agoraapi.2.log, agoraapi.3.log, and agoraapi.4.log. + /// The default size for each SDK log file is 1,024 KB; the default size for each API call log file is 2,048 KB. These log files are encoded in UTF-8. + /// The SDK writes the latest logs in agorasdk.log or agoraapi.log. + /// When agorasdk.log is full, the SDK processes the log files in the following order: + /// Delete the agorasdk.4.log file (if any). + /// Rename agorasdk.3.log to agorasdk.4.log. + /// Rename agorasdk.2.log to agorasdk.3.log. + /// Rename agorasdk.1.log to agorasdk.2.log. + /// Create a new agorasdk.log file. The overwrite rules for the agoraapi.log file are the same as for agorasdk.log. Sets the log file size. See LogConfig .By default, the SDK generates five SDK log files and five API call log files with the following rules: @JsonKey(name: 'logConfig') final LogConfig? logConfig; @@ -2477,6 +2614,10 @@ class RtcEngineContext { @JsonKey(name: 'domainLimit') final bool? domainLimit; + /// Whether to automatically register the Agora extensions when initializing RtcEngine :true: (Default) Automatically register the Agora extensions when initializing RtcEngine.false: Do not register the Agora extensions when initializing RtcEngine. You need to call enableExtension to register the Agora extensions. + @JsonKey(name: 'autoRegisterAgoraExtensions') + final bool? autoRegisterAgoraExtensions; + /// @nodoc factory RtcEngineContext.fromJson(Map json) => _$RtcEngineContextFromJson(json); @@ -2494,7 +2635,7 @@ class MetadataObserver { /// Occurs when the local user receives the metadata. /// - /// * [metadata] The metadata received, see Metadata . + /// * [metadata] The metadata received. See Metadata . final void Function(Metadata metadata)? onMetadataReceived; } @@ -2582,30 +2723,30 @@ class Metadata { Map toJson() => _$MetadataToJson(this); } -/// @nodoc +/// The CDN streaming error. @JsonEnum(alwaysCreate: true) enum DirectCdnStreamingError { - /// @nodoc + /// 0: No error. @JsonValue(0) directCdnStreamingErrorOk, - /// @nodoc + /// 1: A general error; no specific reason. You can try to push the media stream again. @JsonValue(1) directCdnStreamingErrorFailed, - /// @nodoc + /// 2: An error occurs when pushing audio streams. For example, the local audio capture device is not working properly, is occupied by another process, or does not get the permission required. @JsonValue(2) directCdnStreamingErrorAudioPublication, - /// @nodoc + /// 3: An error occurs when pushing video streams. For example, the local video capture device is not working properly, is occupied by another process, or does not get the permission required. @JsonValue(3) directCdnStreamingErrorVideoPublication, - /// @nodoc + /// 4: Fails to connect to the CDN. @JsonValue(4) directCdnStreamingErrorNetConnect, - /// @nodoc + /// 5: The URL is already being used. Use a new URL for streaming. @JsonValue(5) directCdnStreamingErrorBadName, } @@ -2623,26 +2764,26 @@ extension DirectCdnStreamingErrorExt on DirectCdnStreamingError { } } -/// @nodoc +/// The current CDN streaming state. @JsonEnum(alwaysCreate: true) enum DirectCdnStreamingState { - /// @nodoc + /// 0: The initial state before the CDN streaming starts. @JsonValue(0) directCdnStreamingStateIdle, - /// @nodoc + /// 1: Streams are being pushed to the CDN. The SDK returns this value when you call the startDirectCdnStreaming method to push streams to the CDN. @JsonValue(1) directCdnStreamingStateRunning, - /// @nodoc + /// 2: Stops pushing streams to the CDN. The SDK returns this value when you call the stopDirectCdnStreaming method to stop pushing streams to the CDN. @JsonValue(2) directCdnStreamingStateStopped, - /// @nodoc + /// 3: Fails to push streams to the CDN. You can troubleshoot the issue with the information reported by the onDirectCdnStreamingStateChanged callback, and then push streams to the CDN again. @JsonValue(3) directCdnStreamingStateFailed, - /// @nodoc + /// 4: Tries to reconnect the Agora server to the CDN. The SDK attempts to reconnect a maximum of 10 times; if the connection is not restored, the streaming state becomes directCdnStreamingStateFailed. @JsonValue(4) directCdnStreamingStateRecovering, } @@ -2660,7 +2801,7 @@ extension DirectCdnStreamingStateExt on DirectCdnStreamingState { } } -/// @nodoc +/// The statistics of the current CDN streaming. @JsonSerializable(explicitToJson: true, includeIfNull: false) class DirectCdnStreamingStats { /// @nodoc @@ -2671,23 +2812,23 @@ class DirectCdnStreamingStats { this.videoBitrate, this.audioBitrate}); - /// @nodoc + /// The width (px) of the video frame. @JsonKey(name: 'videoWidth') final int? videoWidth; - /// @nodoc + /// The height (px) of the video frame. @JsonKey(name: 'videoHeight') final int? videoHeight; - /// @nodoc + /// The frame rate (fps) of the current video frame. @JsonKey(name: 'fps') final int? fps; - /// @nodoc + /// The bitrate (bps) of the current video frame. @JsonKey(name: 'videoBitrate') final int? videoBitrate; - /// @nodoc + /// The bitrate (bps) of the current audio frame. @JsonKey(name: 'audioBitrate') final int? audioBitrate; @@ -2699,7 +2840,7 @@ class DirectCdnStreamingStats { Map toJson() => _$DirectCdnStreamingStatsToJson(this); } -/// @nodoc +/// The DirectCdnStreamingEventHandler interface class is used by the SDK to send event notifications of CDN streaming to your app. Your app can get those notifications through methods that inherit this interface class. class DirectCdnStreamingEventHandler { /// @nodoc const DirectCdnStreamingEventHandler({ @@ -2707,17 +2848,25 @@ class DirectCdnStreamingEventHandler { this.onDirectCdnStreamingStats, }); - /// @nodoc + /// Occurs when the CDN streaming state changes. + /// When the host directly pushes streams to the CDN, if the streaming state changes, the SDK triggers this callback to report the changed streaming state, error codes, and other information. You can troubleshoot issues by referring to this callback. + /// + /// * [state] The current CDN streaming state. See DirectCdnStreamingState . + /// * [error] The CDN streaming error. See DirectCdnStreamingError . + /// * [message] The information about the changed streaming state. final void Function( DirectCdnStreamingState state, DirectCdnStreamingError error, String message)? onDirectCdnStreamingStateChanged; - /// @nodoc + /// Reports the CDN streaming statistics. + /// When the host directly pushes media streams to the CDN, the SDK triggers this callback every one second. + /// + /// * [stats] The statistics of the current CDN streaming. See DirectCdnStreamingStats . final void Function(DirectCdnStreamingStats stats)? onDirectCdnStreamingStats; } -/// @nodoc +/// The media setting options for the host. @JsonSerializable(explicitToJson: true, includeIfNull: false) class DirectCdnStreamingMediaOptions { /// @nodoc @@ -2730,19 +2879,19 @@ class DirectCdnStreamingMediaOptions { this.publishMediaPlayerId, this.customVideoTrackId}); - /// @nodoc + /// Sets whether to publish the video captured by the camera:true: Publish the video captured by the camera.false: (Default) Do not publish the video captured by the camera. @JsonKey(name: 'publishCameraTrack') final bool? publishCameraTrack; - /// @nodoc + /// Sets whether to publish the audio captured by the microphone:true: Publish the audio captured by the microphone.false: (Default) Do not publish the audio captured by the microphone. @JsonKey(name: 'publishMicrophoneTrack') final bool? publishMicrophoneTrack; - /// @nodoc + /// Sets whether to publish the captured audio from a custom source:true: Publish the captured audio from a custom source.false: (Default) Do not publish the captured audio from the custom source. @JsonKey(name: 'publishCustomAudioTrack') final bool? publishCustomAudioTrack; - /// @nodoc + /// Sets whether to publish the captured video from a custom source:true: Publish the captured video from a custom source.false: (Default) Do not publish the captured video from the custom source. @JsonKey(name: 'publishCustomVideoTrack') final bool? publishCustomVideoTrack; @@ -2754,7 +2903,7 @@ class DirectCdnStreamingMediaOptions { @JsonKey(name: 'publishMediaPlayerId') final int? publishMediaPlayerId; - /// @nodoc + /// The video track ID returned by calling the createCustomVideoTrack method. The default value is 0. @JsonKey(name: 'customVideoTrackId') final int? customVideoTrackId; @@ -2811,12 +2960,14 @@ abstract class RtcEngine { /// /// * [context] Configurations for the RtcEngine instance. See RtcEngineContext . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-7: The SDK is not initialized.-22: The resource request failed. The SDK fails to allocate resources because your app consumes too much system resource or the system resources are insufficient.-101: The App ID is invalid. Future initialize(RtcEngineContext context); /// Gets the SDK version. /// /// Returns - /// An SDKBuildInfo object. + /// One SDKBuildInfo object. Future getVersion(); /// Gets the warning or error description. @@ -2827,13 +2978,24 @@ abstract class RtcEngine { /// The specific error or warning description. Future getErrorDescription(int code); + /// Queries the current device's supported video codec capabilities. + /// + /// * [size] The size of CodecCapInfo. + /// + /// Returns + /// One CodecCapInfo array indicating the video encoding capability of the device, if the method call succeeds.If the call timeouts, please modify the call logic and do not invoke the method in the main thread. + Future> queryCodecCapability(int size); + /// Joins a channel with media options. - /// This method enables users to join a channel. Users in the same channel can talk to each other, and multiple users in the same channel can start a group chat. Users with different App IDs cannot call each other.A successful call of this method triggers the following callbacks: The local client: The onJoinChannelSuccess and onConnectionStateChanged callbacks.The remote client: onUserJoined , if the user joining the channel is in the Communication profile or is a host in the Live-broadcasting profile.When the connection between the client and Agora's server is interrupted due to poor network conditions, the SDK tries reconnecting to the server. When the local client successfully rejoins the channel, the SDK triggers the onRejoinChannelSuccess callback on the local client.This method allows users to join only one channel at a time.Ensure that the app ID you use to generate the token is the same app ID that you pass in the initialize method; otherwise, you may fail to join the channel by token. + /// This method enables users to join a channel. Users in the same channel can talk to each other, and multiple users in the same channel can start a group chat. Users with different App IDs cannot call each other.A successful call of this method triggers the following callbacks:The local client: The onJoinChannelSuccess and onConnectionStateChanged callbacks.The remote client: onUserJoined , if the user joining the channel is in the Communication profile or is a host in the Live-broadcasting profile.When the connection between the client and Agora's server is interrupted due to poor network conditions, the SDK tries reconnecting to the server. When the local client successfully rejoins the channel, the SDK triggers the onRejoinChannelSuccess callback on the local client.This method allows users to join only one channel at a time.Ensure that the app ID you use to generate the token is the same app ID that you pass in the initialize method; otherwise, you may fail to join the channel by token. /// /// * [token] The token generated on your server for authentication. - /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters:All lowercase English letters: a to z.All uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters:All lowercase English letters: a to z.All uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," /// * [uid] The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 232-1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and returns it in the onJoinChannelSuccess callback. Your application must record and maintain the returned user ID, because the SDK does not do so. /// * [options] The channel media options. See ChannelMediaOptions . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again.-3: Failes to initialize the RtcEngine object. You need to reinitialize the RtcEngine object.-7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method.-8: The internal state of the RtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling startEchoTest to stop the test after calling stopEchoTest to start a call loop test. You need to call stopEchoTest before calling this method.-17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected(1) state.-102: The channel name is invalid. You need to pass in a valid channelname in channelId to rejoin the channel.-121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. Future joinChannel( {required String token, required String channelId, @@ -2843,23 +3005,38 @@ abstract class RtcEngine { /// Updates the channel media options after joining the channel. /// /// * [options] The channel media options. See ChannelMediaOptions . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The value of a member in the ChannelMediaOptions structure is invalid. For example, the token or the user ID is invalid. You need to fill in a valid parameter.-7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method.-8: The internal state of the RtcEngine object is wrong. The possible reason is that the user is not in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. If you receive the connectionStateDisconnected (1) or connectionStateFailed (5) state, the user is not in the channel. You need to call joinChannel to join a channel before calling this method. Future updateChannelMediaOptions(ChannelMediaOptions options); - /// Leaves a channel. - /// This method releases all resources related to the session.This method call is asynchronous. When this method returns, it does not necessarily mean that the user has left the channel.After joining the channel, you must call this method or leaveChannel to end the call, otherwise, the next call cannot be started.If you successfully call this method and leave the channel, the following callbacks are triggered:The local client: onLeaveChannel .The remote client: onUserOffline , if the user joining the channel is in the Communication profile, or is a host in the Live-broadcasting profile.If you call release immediately after calling this method, the SDK does not trigger the onLeaveChannel callback. + /// Sets channel options and leaves the channel. + /// If you call release immediately after calling this method, the SDK does not trigger the onLeaveChannel callback. + /// If you have called joinChannelEx to join multiple channels, calling this method will leave the channels when calling joinChannel and joinChannelEx at the same time. + /// This method will release all resources related to the session, leave the channel, that is, hang up or exit the call. This method can be called whether or not a call is currently in progress.After joining the channel, you must call this method or to end the call, otherwise, the next call cannot be started.This method call is asynchronous. When this method returns, it does not necessarily mean that the user has left the channel. After actually leaving the channel, the local user triggers the onLeaveChannel callback; after the user in the communication scenario and the host in the live streaming scenario leave the channel, the remote user triggers the onUserOffline callback. + /// /// * [options] The options for leaving the channel. See LeaveChannelOptions . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future leaveChannel({LeaveChannelOptions? options}); - /// Gets a new token when the current token expires after a period of time. - /// You can use this method to pass a new token to the SDK. A token expires after a certain period of time. In the following two cases, the app should call this method to pass in a new token. Failure to do so will result in the SDK disconnecting from the server.The SDK triggers the onTokenPrivilegeWillExpire callback.The onConnectionStateChanged callback reports connectionChangedTokenExpired(9). + /// Renews the token. + /// The SDK triggers the onTokenPrivilegeWillExpire callback.The onConnectionStateChanged callback reports connectionChangedTokenExpired(9). /// /// * [token] The new token. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid. For example, the token is invalid. You need to fill in a valid parameter.-7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. Future renewToken(String token); /// Sets the channel profile. - /// After initializing the SDK, the default channel profile is the live streaming profile. You can call this method to set the usage scenario of Agora channel. The Agora SDK differentiates channel profiles and applies optimization algorithms accordingly. For example, it prioritizes smoothness and low latency for a video call and prioritizes video quality for interactive live video streaming.To ensure the quality of real-time communication, Agora recommends that all users in a channel use the same channel profile.This method must be called and set before joinChannel [2/2], and cannot be set again after joining the channel. + /// After initializing the SDK, the default channel profile is the live streaming profile. You can call this method to set the usage scenario of the channel. For example, it prioritizes smoothness and low latency for a video call, and prioritizes video quality for the interactive live video streaming.To ensure the quality of real-time communication, Agora recommends that all users in a channel use the same channel profile.This method must be called and set before joinChannel, and cannot be set again after joining the channel. /// /// * [profile] The channel profile. See ChannelProfileType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-7: The SDK is not initialized. Future setChannelProfile(ChannelProfileType profile); /// Sets the user role and level in an interactive live streaming channel. @@ -2867,46 +3044,70 @@ abstract class RtcEngine { /// /// * [role] The user role in the interactive live streaming. See ClientRoleType . /// * [options] The detailed options of a user, including the user level. See ClientRoleOptions . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-5: The request is rejected.-7: The SDK is not initialized. Future setClientRole( {required ClientRoleType role, ClientRoleOptions? options}); - /// Starts an audio call test. - /// This method starts an audio call test to determine whether the audio devices (for example, headset and speaker) and the network connection are working properly. To conduct the test, let the user speak for a while, and the recording is played back within the set interval. If the user can hear the recording within the interval, the audio devices and network connection are working properly.Call this method before joining a channel.After calling startEchoTest, you must call stopEchoTest to end the test. Otherwise, the app cannot perform the next echo test, and you cannot join the channel.In the live streaming channels, only a host can call this method. + /// Starts an audio device loopback test. + /// To test whether the user's local sending and receiving streams are normal, you can call this method to perform an audio and video call loop test, which tests whether the audio and video devices and the user's upstream and downstream networks are working properly.After starting the test, the user needs to make a sound or face the camera. The audio or video is output after about two seconds. If the audio playback is normal, the audio device and the user's upstream and downstream networks are working properly; if the video playback is normal, the video device and the user's upstream and downstream networks are working properly.You can call this method either before or after joining a channel.After calling this method, call stopEchoTest to end the test; otherwise, the user cannot perform the next audio and video call loop test and cannot join the channel.In live streaming scenarios, this method only applies to hosts. + /// + /// * [config] The configuration of the audio and video call loop test. See EchoTestConfiguration . /// - /// * [intervalInSeconds] The time interval (s) between when you speak and when the recording plays back. The value range is [2, 10], and the default value is 10. - Future startEchoTest({int intervalInSeconds = 10}); + /// Returns + /// 0: Success.< 0: Failure. + Future startEchoTest(EchoTestConfiguration config); /// Stops the audio call test. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure.-5(ERR_REFUSED): Failed to stop the echo test. The echo test may not be running. Future stopEchoTest(); /// Enables or disables multi-camera capture. - /// In scenarios where there are existing cameras to capture video, Agora recommends that you use the following steps to capture and publish video with multiple cameras:Call this method to enable multi-channel camera capture.Call startPreview to start the local video preview.Call startSecondaryCameraCapture to start video capture with the second camera.Call joinChannelEx , and set publishSecondaryCameraTrack to true to publish the video stream captured by the second camera in the channel.If you want to disable multi-channel camera capture, use the following steps:Call stopSecondaryCameraCapture .Call this method with enabled set to false.You can call this method before and after startPreview to enable multi-camera capture:If it is enabled before startPreview, the local video preview shows the image captured by the two cameras at the same time.If it is enabled after startPreview, the SDK stops the current camera capture first, and then enables the primary camera and the second camera. The local video preview appears black for a short time, and then automatically returns to normal.When using this function, ensure that the system version is 13.0 or later.The minimum iOS device types that support multi-camera capture are as follows:iPhone XRiPhone XSiPhone XS MaxiPad Pro 3rd generation and later + /// In scenarios where there are existing cameras to capture video, Agora recommends that you use the following steps to capture and publish video with multiple cameras:Call this method to enable multi-channel camera capture.Call to start the local video preview.Call startCameraCapture , and set sourceType to start video capture with the second camera.Call joinChannelEx , and set publishSecondaryCameraTrack to true to publish the video stream captured by the second camera in the channel.If you want to disable multi-channel camera capture, use the following steps:Call stopCameraCapture .Call this method with enabled set to false.You can call this method before and after to enable multi-camera capture:If it is enabled before , the local video preview shows the image captured by the two cameras at the same time.If it is enabled after , the SDK stops the current camera capture first, and then enables the primary camera and the second camera. The local video preview appears black for a short time, and then automatically returns to normal.When using this function, ensure that the system version is 13.0 or later.The minimum iOS device types that support multi-camera capture are as follows:iPhone XRiPhone XSiPhone XS MaxiPad Pro 3rd generation and later /// /// * [enabled] Whether to enable multi-camera video capture mode:true: Enable multi-camera capture mode; the SDK uses multiple cameras to capture video.false: Disable multi-camera capture mode; the SDK uses a single camera to capture video. /// * [config] Capture configuration for the second camera. See CameraCapturerConfiguration . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableMultiCamera( {required bool enabled, required CameraCapturerConfiguration config}); /// Enables the video module. - /// Call this method either before joining a channel or during a call. If this method is called before joining a channel, the call starts in the video mode. Call disableVideo to disable the video mode.A successful call of this method triggers the onRemoteVideoStateChanged callback on the remote client.This method enables the internal engine and is valid after leaving the channel.This method resets the internal engine and takes some time to take effect. Agora recommends using the following API methods to control the video engine modules separately: enableLocalVideo : Whether to enable the camera to create the local video stream. muteLocalVideoStream : Whether to publish the local video stream. muteRemoteVideoStream : Whether to subscribe to and play the remote video stream. muteAllRemoteVideoStreams : Whether to subscribe to and play all remote video streams. + /// Call this method either before joining a channel or during a call. If this method is called before joining a channel, the call starts in the video mode; if called during a call, the audio call switches to a video call. Call disableVideo to disable the video mode.A successful call of this method triggers the onRemoteVideoStateChanged callback on the remote client.This method enables the internal engine and is valid after leaving the channel.This method resets the internal engine and thus might takes some time to take effect. Agora recommends using the following APIs to control the video modules separately: enableLocalVideo : Whether to enable the camera to create the local video stream. muteLocalVideoStream : Whether to publish the local video stream. muteRemoteVideoStream : Whether to subscribe to and play the remote video stream. muteAllRemoteVideoStreams : Whether to subscribe to and play all remote video streams. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableVideo(); /// Disables the video module. - /// This method disables video. You can call this method either before or after joining a channel. If you call it before joining a channel, an audio call starts when you join the channel. If you call it after joining a channel, a video call switches to an audio call. Call enableVideo to enable video.A successful call of this method triggers the onUserEnableVideo (false) callback on the remote client.This method affects the internal engine and can be called after leaving the channel.This method resets the internal engine and takes some time to take effect. Agora recommends using the following API methods to control the video engine modules separately: enableLocalVideo : Whether to enable the camera to create the local video stream. muteLocalVideoStream : Whether to publish the local video stream. muteRemoteVideoStream : Whether to subscribe to and play the remote video stream. muteAllRemoteVideoStreams : Whether to subscribe to and play all remote video streams. + /// This method can be called before joining a channel or during a call to disable the video module. If it is called before joining a channel, an audio call starts when you join the channel; if called during a call, a video call switches to an audio call. Call enableVideo to enable the video module.A successful call of this method triggers the onUserEnableVideo (false) callback on the remote client.This method affects the internal engine and can be called after leaving the channel.This method resets the internal engine and thus might takes some time to take effect. Agora recommends using the following APIs to control the video modules separately: enableLocalVideo : Whether to enable the camera to create the local video stream. muteLocalVideoStream : Whether to publish the local video stream. muteRemoteVideoStream : Whether to subscribe to and play the remote video stream. muteAllRemoteVideoStreams : Whether to subscribe to and play all remote video streams. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future disableVideo(); /// Enables the local video preview and specifies the video source for the preview. - /// This method starts the local video preview before joining the channel. Before calling this method, ensure that you do the following: - /// Call setupLocalVideo to set the local preview window. - /// Call enableVideo to enable the video. The local preview enables the mirror mode by default.After the local video preview is enabled, if you call leaveChannel to exit the channel, the local preview remains until you call stopPreview to disable it.The video source type set in this method needs to be consistent with the video source type of VideoCanvas you set in setupLocalVideo . + /// You can call this method to enable local video preview. Before calling this method, ensure that you do the following:Call setupLocalVideo to set the local preview window.Call enableVideo to enable the video.The local preview enables the mirror mode by default.After the local video preview is enabled, if you call leaveChannel to exit the channel, the local preview remains until you call stopPreview to disable it.The video source type set in this method needs to be consistent with the video source type of VideoCanvas you set in setupLocalVideo . + /// + /// * [sourceType] The type of the video source. See VideoSourceType . /// - /// * [sourceType] The type of the video frame, see VideoSourceType . + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future startPreview( {VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary}); /// Stops the local video preview. - /// After calling startPreview to start the preview, if you want to close the local video preview, please call this method.Please call this method before joining a channel or after leaving a channel. - /// * [sourceType] The type of the video frame, see VideoSourceType. + /// After calling startPreview to start the preview, if you want to close the local video preview, call this method.Call this method before joining a channel or after leaving a channel. + /// + /// * [sourceType] The type of the video source. See VideoSourceType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopPreview( {VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary}); @@ -2914,24 +3115,35 @@ abstract class RtcEngine { /// This method starts the last-mile network probe test before joining a channel to get the uplink and downlink last mile network statistics, including the bandwidth, packet loss, jitter, and round-trip time (RTT).Once this method is enabled, the SDK returns the following callbacks: onLastmileQuality : The SDK triggers this callback within two seconds depending on the network conditions. This callback rates the network conditions and is more closely linked to the user experience. onLastmileProbeResult : The SDK triggers this callback within 30 seconds depending on the network conditions. This callback returns the real-time statistics of the network conditions and is more objective.This method applies to the following scenarios:Before a user joins a channel, call this method to check the uplink network quality.In a live streaming channel, call this method to check the uplink network quality before an audience member switches to a host.Do not call other methods before receiving the onLastmileQuality and onLastmileProbeResult callbacks. Otherwise, the callbacks may be interrupted.A host should not call this method after joining a channel (when in a call). /// /// * [config] The configurations of the last-mile network probe test. See LastmileProbeConfig . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future startLastmileProbeTest(LastmileProbeConfig config); /// Stops the last mile network probe test. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopLastmileProbeTest(); /// Sets the video encoder configuration. - /// Sets the encoder configuration for the local video.You can call this method either before or after joining a channel. If you don't need to set the video encoder configuration after joining a channel, - /// Agora recommends you calling this method before the enableVideo method to reduce the rendering time of the first video frame. + /// Sets the encoder configuration for the local video.You can call this method either before or after joining a channel. If the user does not need to reset the video encoding properties after joining the channel, Agora recommends calling this method before enableVideo to reduce the time to render the first video frame. /// /// * [config] Video profile. See VideoEncoderConfiguration . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setVideoEncoderConfiguration(VideoEncoderConfiguration config); /// Sets the image enhancement options. - /// Enables or disables image enhancement, and sets the options.Call this method before calling enableVideo or startPreview .This method relies on the video enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// Enables or disables image enhancement, and sets the options.Call this method before calling enableVideo or .This method relies on the video enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [type] The type of the video source. See MediaSourceType . /// * [enabled] Whether to enable the image enhancement function:true: Enable the image enhancement function.false: (Default) Disable the image enhancement function. /// * [options] The image enhancement options. See BeautyOptions . + /// * [type] The type of the video source, see MediaSourceType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.errNotSupported(4): The current device version is below Android 5.0, and this operation is not supported. Future setBeautyEffectOptions( {required bool enabled, required BeautyOptions options, @@ -2943,6 +3155,9 @@ abstract class RtcEngine { /// * [enabled] Whether to enable low-light enhancement function:true: Enable low-light enhancement function.false: (Default) Disable low-light enhancement function. /// * [options] The low-light enhancement options. See LowlightEnhanceOptions . /// * [type] The type of the video source. See MediaSourceType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setLowlightEnhanceOptions( {required bool enabled, required LowlightEnhanceOptions options, @@ -2951,9 +3166,12 @@ abstract class RtcEngine { /// Sets video noise reduction. /// Underlit environments and low-end video capture devices can cause video images to contain significant noise, which affects video quality. In real-time interactive scenarios, video noise also consumes bitstream resources and reduces encoding efficiency during encoding.You can call this method to enable the video noise reduction feature and set the options of the video noise reduction effect.Call this method after calling enableVideo .Video noise reduction has certain requirements for equipment performance. If your device overheats after you enable video noise reduction, Agora recommends modifying the video noise reduction options to a less performance-consuming level or disabling video noise reduction entirely.Both this method and setExtensionProperty can turn on video noise reduction function:When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK).When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty.This method relies on the video enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [type] The type of the video source. See MediaSourceType . /// * [enabled] Whether to enable video noise reduction:true: Enable video noise reduction.false: (Default) Disable video noise reduction. /// * [options] The video noise reduction options. See VideoDenoiserOptions . + /// * [type] The type of the video source. See MediaSourceType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setVideoDenoiserOptions( {required bool enabled, required VideoDenoiserOptions options, @@ -2962,146 +3180,205 @@ abstract class RtcEngine { /// Sets color enhancement. /// The video images captured by the camera can have color distortion. The color enhancement feature intelligently adjusts video characteristics such as saturation and contrast to enhance the video color richness and color reproduction, making the video more vivid.You can call this method to enable the color enhancement feature and set the options of the color enhancement effect.Call this method after calling enableVideo .The color enhancement feature has certain performance requirements on devices. With color enhancement turned on, Agora recommends that you change the color enhancement level to one that consumes less performance or turn off color enhancement if your device is experiencing severe heat problems.Both this method and setExtensionProperty can enable color enhancement:When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK).When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty.This method relies on the video enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [type] The type of the video source. See MediaSourceType . /// * [enabled] Whether to enable color enhancement:true Enable color enhancement.false: (Default) Disable color enhancement. /// * [options] The color enhancement options. See ColorEnhanceOptions . + /// * [type] The type of the video source. See MediaSourceType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setColorEnhanceOptions( {required bool enabled, required ColorEnhanceOptions options, MediaSourceType type = MediaSourceType.primaryCameraSource}); /// Enables/Disables the virtual background. - /// The virtual background function allows you to replace the original background image of the local user or to blur the background. After successfully enabling the virtual background function, all users in the channel can see the customized background.Call this method before calling enableVideo or startPreview .This function requires a high-performance device. Agora recommends that you use this function on devices with the following chips:Snapdragon 700 series 750G and laterSnapdragon 800 series 835 and laterDimensity 700 series 720 and laterKirin 800 series 810 and laterKirin 900 series 980 and laterDevices with an A9 chip and better, as follows:iPhone 6S and lateriPad Air 3rd generation and lateriPad 5th generation and lateriPad Pro 1st generation and lateriPad mini 5th generation and laterAgora recommends that you use this function in scenarios that meet the following conditions:A high-definition camera device is used, and the environment is uniformly lit.There are few objects in the captured video. Portraits are half-length and unobstructed. Ensure that the background is a solid color that is different from the color of the user's clothing.This method relies on the video enhancement dynamic library libagora_segmentation_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// The virtual background feature enables the local user to replace their original background with a static image, dynamic video, blurred background, or portrait-background segmentation to achieve picture-in-picture effect. Once the virtual background feature is enabled, all users in the channel can see the custom background.Call this method before calling enableVideo or .This feature requires high performance devices. Agora recommends that you implement it on devices equipped with the following chips:Snapdragon 700 series 750G and laterSnapdragon 800 series 835 and laterDimensity 700 series 720 and laterKirin 800 series 810 and laterKirin 900 series 980 and laterDevices with an A9 chip and better, as follows:iPhone 6S and lateriPad Air 3rd generation and lateriPad 5th generation and lateriPad Pro 1st generation and lateriPad mini 5th generation and laterAgora recommends that you use this feature in scenarios that meet the following conditions:A high-definition camera device is used, and the environment is uniformly lit.There are few objects in the captured video. Portraits are half-length and unobstructed. Ensure that the background is a solid color that is different from the color of the user's clothing.This method relies on the virtual background dynamic library libagora_segmentation_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// /// * [enabled] Whether to enable virtual background:true: Enable virtual background.false: Disable virtual background. - /// * [backgroundSource] The custom background image. See VirtualBackgroundSource . To adapt the resolution of the custom background image to that of the video captured by the SDK, the SDK scales and crops the custom background image while ensuring that the content of the custom background image is not distorted. - /// * [segproperty] Processing properties for background images. See SegmentationProperty. - /// * [type] The type of the video source. See MediaSourceType. In this method, this parameter supports only the following two settings: The default value is primaryCameraSource. If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. + /// * [backgroundSource] The custom background. See VirtualBackgroundSource . To adapt the resolution of the custom background image to that of the video captured by the SDK, the SDK scales and crops the custom background image while ensuring that the content of the custom background image is not distorted. + /// * [segproperty] Processing properties for background images. See SegmentationProperty . + /// * [type] The type of the video source. See MediaSourceType .In this method, this parameter supports only the following two settings:The default value is primaryCameraSource.If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: The custom background image does not exist. Check the value of source in VirtualBackgroundSource .-2: The color format of the custom background image is invalid. Check the value of color in VirtualBackgroundSource .-3: The device does not support virtual background. Future enableVirtualBackground( {required bool enabled, required VirtualBackgroundSource backgroundSource, required SegmentationProperty segproperty, MediaSourceType type = MediaSourceType.primaryCameraSource}); - /// Enables/Disables the super resolution algorithm for a remote user's video stream. - /// This function can effectively improve the resolution of the remote video picture seen by the local user, that is, the width and height (pixels) of the video received by the specified remote user are enlarged to 2 times original size.After calling this method, you can confirm whether super resolution is successfully enabled through the remote video stream statistics ( RemoteVideoStats ) in the onRemoteVideoStats callback:If the parameter superResolutionType >0: Super resolution is enabled.If the parameter superResolutionType =0: Super resolution is not enabled.The super resolution feature requires extra system resources. To balance the visual experience and system consumption, this feature can only be enabled for a single remote user. If the local user uses super resolution on Android, the original resolution of the remote user's video cannot exceed 640 × 360 pixels; if the local user uses super resolution on iOS, the original resolution of the remote user's video cannot exceed 640 × 480 pixels.This method applies to Android and iOS only.This method relies on the super resolution dynamic library libagora_super_resolution_extension.so (Android); AgoraSuperResolutionExtension.xcframework (iOS). If the dynamic library is deleted, the function cannot be enabled normally.Because this method has certain system performance requirements, Agora recommends that you use the following devices or better:Android:VIVO: V1821A, NEX S, 1914A, 1916A, 1962A, 1824BA, X60, X60 ProOPPO: PCCM00, Find X3OnePlus: A6000Xiaomi: Mi 8, Mi 9, Mi 10, Mi 11, MIX3, Redmi K20 ProSAMSUNG: SM-G9600, SM-G9650, SM-N9600, SM-G9708, SM-G960U, SM-G9750, S20, S21HUAWEI: SEA-AL00, ELE-AL00, VOG-AL00, YAL-AL10, HMA-AL00, EVR-AN00, nova 4, nova 5 Pro, nova 6 5G, nova 7 5G, Mate 30, Mate 30 Pro, Mate 40, Mate 40 Pro, P40, P40 Pro, Huawei M6, MatePad 10.8iOS:iPhone XRiPhone XSiPhone XS MaxiPhone 11iPhone 11 ProiPhone 11 Pro MaxiPhone 12iPhone 12 miniiPhone 12 ProiPhone 12 Pro MaxiPhone 12 SE (2nd generation)iPad Pro 11-inch (3rd generation)iPad Pro 12.9-inch (3rd generation)iPad Air 3 (3rd generation)iPad Air 3 (4th generation) - /// - /// * [userId] The user ID of the remote user. - /// * [enable] Whether to enable super resolution for the remote user’s video:true:Enable super resolution.false: Disable super resolution. - Future enableRemoteSuperResolution( - {required int userId, required bool enable}); - /// Initializes the video view of a remote user. - /// This method initializes the video view of a remote stream on the local device. It affects only the video view that the local user sees. Call this method to bind the remote video stream to a video view and to set the rendering and mirror modes of the video view.You need to specify the ID of the remote user in this method. If the remote user ID is unknown to the application, set it after the app receives the onUserJoined callback.To unbind the remote user from the view, set the view parameter to NULL.Once the remote user leaves the channel, the SDK unbinds the remote user.To update the rendering or mirror mode of the remote video view during a call, use the setRemoteRenderMode method.If you use the Agora recording feature, the recording client joins the channel as a dummy client, triggering the onUserJoined callback. Do not bind the dummy client to the app view because the dummy client does not send any video streams. If your app does not recognize the dummy client, bind the remote user to the view when the SDK triggers the onFirstRemoteVideoDecoded callback. + /// This method initializes the video view of a remote stream on the local device. It affects only the video view that the local user sees. Call this method to bind the remote video stream to a video view and to set the rendering and mirror modes of the video view.You need to specify the ID of the remote user in this method. If the remote user ID is unknown to the application, set it after the app receives the onUserJoined callback.To unbind the remote user from the view, set the view parameter to NULL.Once the remote user leaves the channel, the SDK unbinds the remote user.To update the rendering or mirror mode of the remote video view during a call, use the setRemoteRenderMode method.If you use the Agora recording function, the recording client joins the channel as a placeholder client, triggering the onUserJoined callback. Do not bind the placeholder client to the app view because the placeholder client does not send any video streams. If your app does not recognize the placeholder client, bind the remote user to the view when the SDK triggers the onFirstRemoteVideoDecoded callback. /// /// * [canvas] The remote video view and settings. See VideoCanvas . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setupRemoteVideo(VideoCanvas canvas); /// Initializes the local video view. /// This method initializes the video view of a local stream on the local device. It affects only the video view that the local user sees, not the published local video stream. Call this method to bind the local video stream to a video view and to set the rendering and mirror modes of the video view.After initialization, call this method to set the local video and then join the channel. The local video still binds to the view after you leave the channel. To unbind the local video from the view, set the view parameter as NULL.You can call this method either before or after joining a channel.To update the rendering or mirror mode of the local video view during a call, use the setLocalRenderMode method. /// - /// * [canvas] Local video display properties. See VideoCanvas . + /// * [canvas] The local video view and settings. See VideoCanvas . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setupLocalVideo(VideoCanvas canvas); + /// Sets video application scenarios. + /// After successfully calling this method, the SDK will automatically enable the best practice strategies and adjust key performance metrics based on the specified scenario, to optimize the video experience.Ensure that you call this method before joining a channel. + /// + /// * [scenarioType] The type of video application scenario. See VideoApplicationScenarioType .If set to applicationScenarioMeeting (1), the SDK automatically enables the following strategies:In meeting scenarios where low-quality video streams are required to have a high bitrate, the SDK automatically enables multiple technologies used to deal with network congestions, to enhance the performance of the low-quality streams and to ensure the smooth reception by subscribers.The SDK monitors the number of subscribers to the high-quality video stream in real time and dynamically adjusts its configuration based on the number of subscribers.If nobody subscribers to the high-quality stream, the SDK automatically reduces its bitrate and frame rate to save upstream bandwidth.If someone subscribes to the high-quality stream, the SDK resets the high-quality stream to the VideoEncoderConfiguration configuration used in the most recent calling of setVideoEncoderConfiguration . If no configuration has been set by the user previously, the following values are used:Resolution: (Windows and macOS) 1280 × 720; (Android and iOS) 960 × 540Frame rate: 15 fpsBitrate: (Windows and macOS) 1600 Kbps; (Android and iOS) 1000 KbpsThe SDK monitors the number of subscribers to the low-quality video stream in real time and dynamically enables or disables it based on the number of subscribers.If the user has called setDualStreamMode to set that never send low-quality video stream (disableSimulcastStream), the dynamic adjustment of the low-quality stream in meeting scenarios will not take effect.If nobody subscribes to the low-quality stream, the SDK automatically disables it to save upstream bandwidth.If someone subscribes to the low-quality stream, the SDK enables the low-quality stream and resets it to the SimulcastStreamConfig configuration used in the most recent calling of setDualStreamMode. If no configuration has been set by the user previously, the following values are used:Resolution: 480 × 272Frame rate: 15 fpsBitrate: 500 Kbps + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + Future setVideoScenario(VideoApplicationScenarioType scenarioType); + /// Enables the audio module. - /// The audio mode is enabled by default.This method enables the internal engine and can be called anytime after initialization. It is still valid after one leaves channel.This method enables the audio module and takes some time to take effect. Agora recommends using the following API methods to control the audio module separately: enableLocalAudio : Whether to enable the microphone to create the local audio stream. muteLocalAudioStream : Whether to publish the local audio stream. muteRemoteAudioStream : Whether to subscribe and play the remote audio stream. muteAllRemoteAudioStreams : Whether to subscribe to and play all remote audio streams. + /// The audio mode is enabled by default.This method enables the internal engine and can be called anytime after initialization. It is still valid after one leaves channel.This method enables the whole audio module and thus might take a while to take effect. Agora recommends using the following APIs to control the audio module separately: enableLocalAudio : Whether to enable the microphone to create the local audio stream. muteLocalAudioStream : Whether to publish the local audio stream. muteRemoteAudioStream : Whether to subscribe and play the remote audio stream. muteAllRemoteAudioStreams : Whether to subscribe to and play all remote audio streams. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableAudio(); /// Disables the audio module. /// This method disables the internal engine and can be called anytime after initialization. It is still valid after one leaves channel.This method resets the internal engine and takes some time to take effect. Agora recommends using the following API methods to control the audio modules separately: enableLocalAudio : Whether to enable the microphone to create the local audio stream. muteLocalAudioStream : Whether to publish the local audio stream. muteRemoteAudioStream : Whether to subscribe and play the remote audio stream. muteAllRemoteAudioStreams : Whether to subscribe to and play all remote audio streams. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future disableAudio(); - /// Sets the audio parameters and application scenarios. - /// You can call this method either before or after joining a channel.In scenarios requiring high-quality audio, such as online music tutoring, Agora recommends you set profile as audioProfileMusicHighQuality (4).If you want to set the audio scenario, call initialize and set RtcEngineContext struct. + /// Sets the audio profile and audio scenario. + /// You can call this method either before or after joining a channel.In scenarios requiring high-quality audio, such as online music tutoring, Agora recommends you set profile as audioProfileMusicHighQuality(4)and scenario as audioScenarioGameStreaming(3). /// /// * [profile] The audio profile, including the sampling rate, bitrate, encoding mode, and the number of channels. See AudioProfileType . - /// * [scenario] The audio scenarios. See AudioScenarioType. + /// * [scenario] The audio scenarios. See AudioScenarioType . Under different audio scenarios, the device uses different volume types. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. Future setAudioProfile( {required AudioProfileType profile, AudioScenarioType scenario = AudioScenarioType.audioScenarioDefault}); /// Sets audio scenarios. /// You can call this method either before or after joining a channel. - /// * [scenario] The audio scenarios. See AudioScenarioType. Under different audio scenarios, the device uses different volume types. + /// + /// * [scenario] The audio scenarios. See AudioScenarioType . Under different audio scenarios, the device uses different volume types. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setAudioScenario(AudioScenarioType scenario); - /// Enables/Disables the local audio capture. - /// The audio function is enabled by default. This method disables or re-enables the local audio function to stop or restart local audio capturing.This method does not affect receiving or playing the remote audio streams, and enableLocalAudio (false) is applicable to scenarios where the user wants to receive remote audio streams without sending any audio stream to other users in the channel.Once the local audio function is disabled or re-enabled, the SDK triggers the onLocalAudioStateChanged callback, which reports localAudioStreamStateStopped (0) or localAudioStreamStateRecording (1).This method is different from the muteLocalAudioStream method:enableLocalAudio: Disables/Re-enables the local audio capturing and processing. If you disable or re-enable local audio capturing using the enableLocalAudio method, the local user might hear a pause in the remote audio playback.muteLocalAudioStream: Sends/Stops sending the local audio streams.You can call this method either before or after joining a channel. Calling it before joining a channel only sets the device state, and it takes effect immediately after you join the channel. + /// Enables or disables the local audio capture. + /// The audio function is enabled by default when users joining a channel. This method disables or re-enables the local audio function to stop or restart local audio capturing.This method does not affect receiving or playing the remote audio streams, and enableLocalAudio (false) is applicable to scenarios where the user wants to receive remote audio streams without sending any audio stream to other users in the channel.Once the local audio function is disabled or re-enabled, the SDK triggers the onLocalAudioStateChanged callback, which reports localAudioStreamStateStopped(0) or localAudioStreamStateRecording(1).The difference between this method and muteLocalAudioStream are as follow:enableLocalAudio: Disables or re-enables the local audio capturing and processing. If you disable or re-enable local audio capturing using the enableLocalAudio method, the local user might hear a pause in the remote audio playback.muteLocalAudioStream: Sends or stops sending the local audio streams.You can call this method either before or after joining a channel. Calling it before joining a channel only sets the device state, and it takes effect immediately after you join the channel. /// /// * [enabled] true: (Default) Re-enable the local audio function, that is, to start the local audio capturing device (for example, the microphone).false: Disable the local audio function, that is, to stop local audio capturing. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableLocalAudio(bool enabled); /// Stops or resumes publishing the local audio stream. /// This method does not affect any ongoing audio recording, because it does not disable the audio capture device. /// - /// * [mute] Whether to stop publishing the local audio stream.true: Stop publishing the local audio stream.false: (Default) Resumes publishing the local audio stream. + /// * [mute] Whether to stop publishing the local audio stream:true: Stops publishing the local audio stream.false: (Default) Resumes publishing the local audio stream. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. Future muteLocalAudioStream(bool mute); /// Stops or resumes subscribing to the audio streams of all remote users. - /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users.Call this method after joining a channel.If you do not want to subscribe the audio streams of remote users before joining a channel, you can call joinChannel [2/2] and set autoSubscribeAudio as false. + /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users.Call this method after joining a channel.If you do not want to subscribe the audio streams of remote users before joining a channel, you can set autoSubscribeAudio as false when calling joinChannel . + /// + /// * [mute] Whether to stop subscribing to the audio streams of all remote users:true: Stops subscribing to the audio streams of all remote users.false: (Default) Subscribes to the audio streams of all remote users by default. /// - /// * [mute] Whether to stop subscribing to the audio streams of all remote users:true: Stop subscribing to the audio streams of all remote users.false: (Default) Subscribe to the audio streams of all remote users by default. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. Future muteAllRemoteAudioStreams(bool mute); /// @nodoc Future setDefaultMuteAllRemoteAudioStreams(bool mute); - /// Cancels or resumes subscribing to the specified remote user's audio stream. + /// Stops or resumes subscribing to the audio stream of a specified user. /// Call this method after joining a channel. /// /// * [uid] The user ID of the specified user. - /// * [mute] Whether to stop subscribing to the audio stream of the specified user. - /// true: Unsubscribe from the specified user's audio stream.false: (Default) Subscribes to the specified user's audio stream. + /// * [mute] Whether to subscribe to the specified remote user's audio stream.true: Stop subscribing to the audio stream of the specified user.false: (Default) Subscribe to the audio stream of the specified user. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. Future muteRemoteAudioStream({required int uid, required bool mute}); /// Stops or resumes publishing the local video stream. /// A successful call of this method triggers the onUserMuteVideo callback on the remote client.This method executes faster than the enableLocalVideo (false) method, which controls the sending of the local video stream.This method does not affect any ongoing video recording, because it does not disable the camera. /// /// * [mute] Whether to stop publishing the local video stream.true: Stop publishing the local video stream.false: (Default) Publish the local video stream. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future muteLocalVideoStream(bool mute); /// Enables/Disables the local video capture. - /// This method disables or re-enables the local video capturer, and does not affect receiving the remote video stream.After calling enableVideo , the local video capturer is enabled by default. You can call enableLocalVideo (false) to disable the local video capturer. If you want to re-enable the local video, call enableLocalVideo(true).After the local video capturer is successfully disabled or re-enabled, the SDK triggers the onRemoteVideoStateChanged callback on the remote client.You can call this method either before or after joining a channel.This method enables the internal engine and is valid after leaving the channel. + /// This method disables or re-enables the local video capture, and does not affect receiving the remote video stream.After calling enableVideo , the local video capture is enabled by default. You can call enableLocalVideo (false) to disable the local video capture. If you want to re-enable the local video capture, call enableLocalVideo(true).After the local video capturer is successfully disabled or re-enabled, the SDK triggers the onRemoteVideoStateChanged callback on the remote client.You can call this method either before or after joining a channel.This method enables the internal engine and is valid after leaving the channel. + /// + /// * [enabled] Whether to enable the local video capture.true: (Default) Enable the local video capture.false: Disable the local video capture. Once the local video is disabled, the remote users cannot receive the video stream of the local user, while the local user can still receive the video streams of remote users. When set to false, this method does not require a local camera. /// - /// * [enabled] Whether to enable the local video capture.true: (Default) Enable the local video capture.false: Disables the local video capture. Once the local video is disabled, the remote users can no longer receive the video stream of this user, while this user can still receive the video streams of the other remote users. When set to false, this method does not require a local camera. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableLocalVideo(bool enabled); /// Stops or resumes subscribing to the video streams of all remote users. - /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users.Call this method after joining a channel.If you do not want to subscribe the video streams of remote users before joining a channel, you can call joinChannel [2/2] and set autoSubscribeVideo as false. + /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users.Call this method after joining a channel.If you do not want to subscribe the video streams of remote users before joining a channel, you can call joinChannel and set autoSubscribeVideo as false. /// /// * [mute] Whether to stop subscribing to the video streams of all remote users.true: Stop subscribing to the video streams of all remote users.false: (Default) Subscribe to the audio streams of all remote users by default. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. Future muteAllRemoteVideoStreams(bool mute); /// @nodoc Future setDefaultMuteAllRemoteVideoStreams(bool mute); - /// Cancels or resumes subscribing to the specified remote user's video stream. + /// Stops or resumes subscribing to the video stream of a specified user. /// Call this method after joining a channel. /// /// * [uid] The user ID of the specified user. - /// * [mute] Whether to subscribe to the specified remote user's video stream.true: Unsubscribe from the specified user's video stream.false: (Default) Subscribes to the specified user's video stream. + /// * [mute] Whether to subscribe to the specified remote user's video stream.true: Stop subscribing to the video streams of the specified user.false: (Default) Subscribe to the video stream of the specified user. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future muteRemoteVideoStream({required int uid, required bool mute}); /// Sets the stream type of the remote video. - /// Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate.By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream.The SDK enables the low-quality video stream auto mode on the sender by default (not actively sending low-quality video streams). The host at the receiving end can call this method to initiate a low-quality video stream stream request on the receiving end, and the sender automatically switches to the low-quality video stream mode after receiving the request.The result of this method returns in the onApiCallExecuted callback.You can call this method either before or after joining a channel. If you call both setRemoteVideoStreamType and setRemoteDefaultVideoStreamType , the setting of setRemoteVideoStreamType takes effect. + /// Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate.By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream.The SDK enables the low-quality video stream auto mode on the sender by default (not actively sending low-quality video streams). The host at the receiving end can call this method to initiate a low-quality video stream stream request on the receiving end, and the sender automatically switches to the low-quality video stream mode after receiving the request.You can call this method either before or after joining a channel. If you call both setRemoteVideoStreamType and setRemoteDefaultVideoStreamType , the setting of setRemoteVideoStreamType takes effect. /// /// * [uid] The user ID. /// * [streamType] The video stream type: VideoStreamType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRemoteVideoStreamType( {required int uid, required VideoStreamType streamType}); /// Options for subscribing to remote video streams. - /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user.If you only register one VideoFrameObserver object, the SDK subscribes to the raw video data and encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false).If you only register one VideoEncodedFrameObserver object, the SDK only subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to true).If you register one VideoFrameObserver object and one VideoEncodedFrameObserver object successively, the SDK subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false).If you call this method first with the options parameter set, and then register one VideoFrameObserver or VideoEncodedFrameObserver object, you need to call this method again and set the options parameter as described in the above two items to get the desired results.Agora recommends the following steps:Set autoSubscribeVideo to false when calling joinChannel [2/2] to join a channel.Call this method after receiving the onUserJoined callback to set the subscription options for the specified remote user's video stream.Call the muteRemoteVideoStream method to resume subscribing to the video stream of the specified remote user. If you set encodedFrameOnly to true in the previous step, the SDK triggers the onEncodedVideoFrameReceived callback locally to report the received encoded video frame information. + /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user.If you only register one VideoFrameObserver object, the SDK subscribes to the raw video data and encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false).If you only register one VideoEncodedFrameObserver object, the SDK only subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to true).If you register one VideoFrameObserver object and one VideoEncodedFrameObserver object successively, the SDK subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false).If you call this method first with the options parameter set, and then register one VideoFrameObserver or VideoEncodedFrameObserver object, you need to call this method again and set the options parameter as described in the above two items to get the desired results.Agora recommends the following steps:Set autoSubscribeVideo to false when calling joinChannel to join a channel.Call this method after receiving the onUserJoined callback to set the subscription options for the specified remote user's video stream.Call the muteRemoteVideoStream method to resume subscribing to the video stream of the specified remote user. If you set encodedFrameOnly to true in the previous step, the SDK triggers the onEncodedVideoFrameReceived callback locally to report the received encoded video frame information. /// /// * [uid] The user ID of the remote user. /// * [options] The video subscription options. See VideoSubscriptionOptions . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRemoteVideoSubscriptionOptions( {required int uid, required VideoSubscriptionOptions options}); /// Sets the default stream type of subscrption for remote video streams. - /// The SDK enables the low-quality video stream auto mode on the sender by default (not actively sending low-quality video streams). The host at the receiving end can call this method to initiate a low-quality video stream stream request on the receiving end, and the sender automatically switches to the low-quality video stream mode after receiving the request.Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate.By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream.The result of this method returns in the onApiCallExecuted callback.Call this method before joining a channel. Agora does not support changing the default subscribed video stream type after joining a channel.If you call both this method and setRemoteVideoStreamType , the SDK applies the settings in the setRemoteVideoStreamType method. + /// The SDK enables the low-quality video stream auto mode on the sender by default (not actively sending low-quality video streams). The host at the receiving end can call this method to initiate a low-quality video stream stream request on the receiving end, and the sender automatically switches to the low-quality video stream mode after receiving the request. + /// Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate.By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream.Call this method before joining a channel. The SDK does not support changing the default subscribed video stream type after joining a channel.If you call both this method and setRemoteVideoStreamType , the SDK applies the settings in the setRemoteVideoStreamType method. /// /// * [streamType] The default video-stream type. See VideoStreamType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRemoteDefaultVideoStreamType(VideoStreamType streamType); /// Set the blocklist of subscriptions for audio streams. @@ -3109,79 +3386,121 @@ abstract class RtcEngine { /// /// * [uidList] The user ID list of users that you do not want to subscribe to.If you want to specify the audio streams of a user that you do not want to subscribe to, add the user ID in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeAudioBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setSubscribeAudioBlocklist( {required List uidList, required int uidNumber}); /// Sets the allowlist of subscriptions for audio streams. - /// You can call this method to specify the audio streams of a user that you want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect.You can call this method either before or after joining a channel.The allowlist is not affected by the setting in muteRemoteAudioStream , muteAllRemoteAudioStreams and autoSubscribeAudio in ChannelMediaOptions .Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. + /// You can call this method to specify the audio streams of a user that you want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. + /// You can call this method either before or after joining a channel. + /// The allowlist is not affected by the setting in muteRemoteAudioStream , muteAllRemoteAudioStreams and autoSubscribeAudio in ChannelMediaOptions .Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. /// /// * [uidList] The user ID list of users that you want to subscribe to.If you want to specify the audio streams of a user for subscription, add the user ID in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeAudioAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setSubscribeAudioAllowlist( {required List uidList, required int uidNumber}); /// Set the blocklist of subscriptions for video streams. - /// You can call this method to specify the video streams of a user that you do not want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect.Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel.You can call this method either before or after joining a channel.The blocklist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . + /// You can call this method to specify the video streams of a user that you do not want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. + /// Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. + /// You can call this method either before or after joining a channel. + /// The blocklist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . /// - /// * [uidNumber] The number of users in the user ID list. /// * [uidList] The user ID list of users that you do not want to subscribe to.If you want to specify the video streams of a user that you do not want to subscribe to, add the user ID of that user in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeVideoBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. + /// * [uidNumber] The number of users in the user ID list. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setSubscribeVideoBlocklist( {required List uidList, required int uidNumber}); /// Set the allowlist of subscriptions for video streams. - /// You can call this method to specify the video streams of a user that you want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. - /// Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. - /// You can call this method either before or after joining a channel. - /// The allowlist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . + /// You can call this method to specify the video streams of a user that you want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect.Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel.You can call this method either before or after joining a channel.The allowlist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . /// - /// * [uidNumber] The number of users in the user ID list. /// * [uidList] The user ID list of users that you want to subscribe to.If you want to specify the video streams of a user for subscription, add the user ID of that user in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeVideoAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. - Future setSubscribeVideoAllowlist( + /// * [uidNumber] The number of users in the user ID list. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + Future setSubscribeVideoAllowlist( {required List uidList, required int uidNumber}); /// Enables the reporting of users' volume indication. - /// This method enables the SDK to regularly report the volume information of the local user who sends a stream and remote users (up to three) whose instantaneous volumes are the highest to the app. Once you call this method and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method.You can call this method either before or after joining a channel. + /// This method enables the SDK to regularly report the volume information to the app of the local user who sends a stream and remote users (three users at most) whose instantaneous volumes are the highest. Once you call this method and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method.You can call this method either before or after joining a channel. /// - /// * [interval] Sets the time interval between two consecutive volume indications:≤ 0: Disables the volume indication.> 0: Time interval (ms) between two consecutive volume indications. You need to set this parameter to an integer multiple of 200. If the value is lower than 200, the SDK automatically adjusts the value to 200. - /// * [smooth] The smoothing factor sets the sensitivity of the audio volume indicator. The value ranges between 0 and 10. The recommended value is 3. The greater the value, the more sensitive the indicator. - /// * [reportVad] true: Enable the voice activity detection of the local user. Once it is enabled,the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user.false: (Default) Disable the voice activity detection of the local user. Once it is disabled, the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user. + /// * [interval] Sets the time interval between two consecutive volume indications:≤ 0: Disables the volume indication.> 0: Time interval (ms) between two consecutive volume indications. The lowest value is 50. + /// * [smooth] The smoothing factor that sets the sensitivity of the audio volume indicator. The value ranges between 0 and 10. The recommended value is 3. The greater the value, the more sensitive the indicator. + /// * [reportVad] true: Enables the voice activity detection of the local user. Once it is enabled, the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user.false: (Default) Disables the voice activity detection of the local user. Once it is disabled, the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableAudioVolumeIndication( {required int interval, required int smooth, required bool reportVad}); - /// Starts the audio recording on the client. - /// The Agora SDK allows recording during a call. After successfully calling this method, you can record the audio of all the users in the channel and get an audio recording file. Supported formats of the recording file are as follows:WAV: High-fidelity files with typically larger file sizes. For example, the size of a WAV file with a sample rate of 32,000 Hz and a recording duration of 10 minutes is around 73 MB.AAC: Low-fidelity files with typically smaller file sizes. For example, if the sample rate is 32,000 Hz and the recording quality is audioRecordingQualityMedium, the file size for a 10-minute recording is approximately 2 MB.Once the user leaves the channel, the recording automatically stops.Call this method after joining a channel. + /// Starts audio recording on the client and sets recording configurations. + /// The Agora SDK allows recording during a call. After successfully calling this method, you can record the audio of users in the channel and get an audio recording file. Supported formats of the recording file are as follows:WAV: High-fidelity files with typically larger file sizes. For example, if the sample rate is 32,000 Hz, the file size for 10-minute recording is approximately 73 MB.AAC: Low-fidelity files with typically smaller file sizes. For example, if the sample rate is 32,000 Hz and the recording quality is audioRecordingQualityMedium, the file size for 10-minute recording is approximately 2 MB.Once the user leaves the channel, the recording automatically stops.Call this method after joining a channel. + /// + /// * [config] Recording configurations. See AudioRecordingConfiguration . /// - /// * [config] Recording configuration. See AudioRecordingConfiguration . + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future startAudioRecording(AudioRecordingConfiguration config); /// Registers an encoded audio observer. - /// Call this method after joining a channel.You can call this method or the startAudioRecording method to set the audio content and audio quality. Agora recommends not using this method and startAudioRecording at the same time; otherwise, only the method called later takes effect. + /// Call this method after joining a channel.You can call this method or startAudioRecording to set the recording type and quality of audio files, but Agora does not recommend using this method and startAudioRecording at the same time. Only the method called later will take effect. /// /// * [config] Observer settings for the encoded audio. See AudioEncodedFrameObserverConfig . /// * [observer] The encoded audio observer. See AudioEncodedFrameObserver . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. void registerAudioEncodedFrameObserver( {required AudioEncodedFrameObserverConfig config, required AudioEncodedFrameObserver observer}); /// Stops the audio recording on the client. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopAudioRecording(); /// Creates a media player instance. /// /// Returns - /// The MediaPlayer instance, if the method call succeeds.An empty pointer , if the method call fails. - Future createMediaPlayer(); + /// The MediaPlayer instance, if the method call succeeds.An empty pointer, if the method call fails. + Future createMediaPlayer(); /// Destroys the media player instance. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future destroyMediaPlayer(MediaPlayer mediaPlayer); + /// @nodoc + Future createMediaRecorder(RecorderStreamInfo info); + + /// @nodoc + Future destroyMediaRecorder(MediaRecorder mediaRecorder); + /// Starts playing the music file. - /// This method mixes the specified local or online audio file with the audio from the microphone, or replaces the microphone's audio with the specified local or remote audio file. A successful method call triggers the onAudioMixingStateChanged (audioMixingStatePlaying) callback. When the audio mixing file playback finishes, the SDK triggers the onAudioMixingStateChanged(audioMixingStateStopped) callback on the local client.For the audio file formats supported by this method, see What formats of audio files the Agora RTC SDK support.You can call this method either before or after joining a channel. If you need to call startAudioMixing multiple times, ensure that the time interval between calling this method is more than 500 ms.If the local music file does not exist, the SDK does not support the file format, or the the SDK cannot access the music file URL, the SDK reports the warn code 701. + /// This method mixes the specified local or online audio file with the audio from the microphone, or replaces the microphone's audio with the specified local or remote audio file. A successful method call triggers the onAudioMixingStateChanged (audioMixingStatePlaying) callback. When the audio mixing file playback finishes, the SDK triggers the onAudioMixingStateChanged(audioMixingStateStopped) callback on the local client.For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. + /// You can call this method either before or after joining a channel. If you need to call startAudioMixing multiple times, ensure that the time interval between calling this method is more than 500 ms.If the local music file does not exist, the SDK does not support the file format, or the the SDK cannot access the music file URL, the SDK reports 701. /// - /// * [filePath] File path:Android: The file path, which needs to be accurate to the file name and suffix. Agora supports using a URI address, an absolute path, or a path that starts with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example: content://com.android.providers.media.documents/document/audio%3A14441.Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: C:\music\audio.mp4.iOS or macOS: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: /var/mobile/Containers/Data/audio.mp4. + /// * [filePath] File path: + /// Android: The file path, which needs to be accurate to the file name and suffix. Agora supports URL addresses, absolute paths, or file paths that start with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example: content://com.android.providers.media.documents/document/audio%3A14441 + /// Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: C:\music\audio.mp4. + /// iOS or macOS: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: /var/mobile/Containers/Data/audio.mp4. /// * [loopback] Whether to only play music files on the local client:true: Only play music files on the local client so that only the local user can hear the music.false: Publish music files to remote clients so that both the local user and remote users can hear the music. /// * [cycle] The number of times the music file plays.≥ 0: The number of playback times. For example, 0 means that the SDK does not play the music file while 1 means that the SDK plays once.-1: Play the audio file in an infinite loop. /// * [startPos] The playback position (ms) of the music file. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-3: The SDK is not ready.The audio module is disabled.The program is not complete.The initialization of RtcEngine fails. Reinitialize the RtcEngine. Future startAudioMixing( {required String filePath, required bool loopback, @@ -3190,20 +3509,32 @@ abstract class RtcEngine { /// Stops playing and mixing the music file. /// This method stops the audio mixing. Call this method when you are in a channel. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopAudioMixing(); - /// Pauses playing the music file. + /// Pauses playing and mixing the music file. /// Call this method after joining a channel. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future pauseAudioMixing(); /// Resumes playing and mixing the music file. /// This method resumes playing and mixing the music file. Call this method when you are in a channel. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future resumeAudioMixing(); /// Selects the audio track used during playback. /// After getting the track index of the audio file, you can call this method to specify any track to play. For example, if different tracks of a multi-track file store songs in different languages, you can call this method to set the playback language.For the supported formats of audio files, see .You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. /// /// * [index] The audio track you want to specify. The value range is [0, getAudioTrackCount ()]. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future selectAudioTrack(int index); /// Gets the index of audio tracks of the current music file. @@ -3214,39 +3545,48 @@ abstract class RtcEngine { Future getAudioTrackCount(); /// Adjusts the volume during audio mixing. - /// This method adjusts the audio mixing volume on both the local client and remote clients.Call this method after the startAudioMixing method. + /// This method adjusts the audio mixing volume on both the local client and remote clients.Call this method after startAudioMixing . /// /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future adjustAudioMixingVolume(int volume); /// Adjusts the volume of audio mixing for publishing. - /// This method adjusts the audio mixing volume on the remote clients.Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. + /// This method adjusts the volume of audio mixing for publishing (sending to other users).Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. /// /// * [volume] The volume of audio mixing for local playback. The value ranges between 0 and 100 (default). 100 represents the original volume. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future adjustAudioMixingPublishVolume(int volume); /// Retrieves the audio mixing volume for publishing. - /// This method helps to troubleshoot audio volume‑related issues.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. + /// This method helps troubleshoot audio volume‑related issues.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. /// /// Returns - /// The audio mixing volume, if this method call succeeds. The value range is [0,100].< 0: Failure. + /// ≥ 0: The audio mixing volume, if this method call succeeds. The value range is [0,100].< 0: Failure. Future getAudioMixingPublishVolume(); /// Adjusts the volume of audio mixing for local playback. /// Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. /// /// * [volume] The volume of audio mixing for local playback. The value ranges between 0 and 100 (default). 100 represents the original volume. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future adjustAudioMixingPlayoutVolume(int volume); /// Retrieves the audio mixing volume for local playback. - /// This method retrieves the audio mixing volume for local playback. You can use it to troubleshoot audio volume related issues.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. + /// This method helps troubleshoot audio volume‑related issues.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. /// /// Returns - /// The audio mixing volume, if this method call succeeds. The value range is [0,100].< 0: Failure. + /// ≥ 0: The audio mixing volume, if this method call succeeds. The value range is [0,100].< 0: Failure. Future getAudioMixingPlayoutVolume(); /// Retrieves the duration (ms) of the music file. - /// Retrieves the total duration (ms) of the audio file.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. + /// Retrieves the total duration (ms) of the audio.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. /// /// Returns /// ≥ 0: The audio mixing duration, if this method call succeeds.< 0: Failure. @@ -3260,9 +3600,12 @@ abstract class RtcEngine { Future getAudioMixingCurrentPosition(); /// Sets the audio mixing position. - /// Call this method to set the playback position of the music file to a different starting position, rather than playing the file from the beginning.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. + /// Call this method to set the playback position of the music file to a different starting position (the default plays from the beginning).You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. /// /// * [pos] Integer. The playback position (ms). + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setAudioMixingPosition(int pos); /// Sets the channel mode of the current audio file. @@ -3270,47 +3613,61 @@ abstract class RtcEngine { /// /// * [mode] The channel mode. See AudioMixingDualMonoMode . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setAudioMixingDualMonoMode(AudioMixingDualMonoMode mode); /// Sets the pitch of the local music file. /// When a local music file is mixed with a local human voice, call this method to set the pitch of the local music file only.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. /// /// * [pitch] Sets the pitch of the local music file by the chromatic scale. The default value is 0, which means keeping the original pitch. The value ranges from -12 to 12, and the pitch value between consecutive values is a chromatic value. The greater the absolute value of this parameter, the higher or lower the pitch of the local music file. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setAudioMixingPitch(int pitch); /// Retrieves the volume of the audio effects. - /// The volume range is [0,100]. The default value is 100, the original volume.Call this method after the playEffect method. + /// The volume is an integer ranging from 0 to 100. The default value is 100, which means the original volume.Call this method after playEffect . /// /// Returns /// Volume of the audio effects, if this method call succeeds.< 0: Failure. Future getEffectsVolume(); /// Sets the volume of the audio effects. - /// Call this method after the playEffect method. + /// Call this method after playEffect . /// /// * [volume] The playback volume. The value range is [0, 100]. The default value is 100, which represents the original volume. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setEffectsVolume(int volume); /// Preloads a specified audio effect file into the memory. - /// To ensure smooth communication, limit the size of the audio effect file. Agora recommends using this method to preload the audio effect before calling joinChannel [2/2].This method does not support online audio effect files.For the audio file formats supported by this method, see What formats of audio files the Agora RTC SDK support. + /// To ensure smooth communication, It is recommended that you limit the size of the audio effect file. You can call this method to preload the audio effect before calling joinChannel.This method does not support online audio effect files.For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. /// /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. - /// * [filePath] File path:Android: The file path, which needs to be accurate to the file name and suffix. Agora supports using a URI address, an absolute path, or a path that starts with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example: content://com.android.providers.media.documents/document/audio%3A14441.Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: C:\music\audio.mp4.iOS or macOS: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: /var/mobile/Containers/Data/audio.mp4. + /// * [filePath] File path:Android: The file path, which needs to be accurate to the file name and suffix. Agora supports URL addresses, absolute paths, or file paths that start with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example: content://com.android.providers.media.documents/document/audio%3A14441Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: C:\music\audio.mp4.iOS or macOS: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: /var/mobile/Containers/Data/audio.mp4. /// * [startPos] The playback position (ms) of the audio effect file. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future preloadEffect( {required int soundId, required String filePath, int startPos = 0}); /// Plays the specified local or online audio effect file. - /// If you use this method to play an online audio effect file, Agora recommends that you cache the online audio effect file to your local device, call preloadEffect to preload the cached audio effect file into memory, and then call this method to play the audio effect. Otherwise, you might encounter playback failures or no sound during playback due to loading timeouts or failures.To play multiple audio effect files at the same time, call this method multiple times with different soundId and filePath. For a better user experience, Agora recommends playing no more than three audio effect files at the same time. After the playback of an audio effect file completes, the SDK triggers the onAudioEffectFinished callback. + /// If you use this method to play an online audio effect file, Agora recommends that you cache the online audio effect file to your local device, call preloadEffect to preload the cached audio effect file into memory, and then call this method to play the audio effect. Otherwise, you might encounter playback failures or no sound during playback due to loading timeouts or failures.To play multiple audio effect files at the same time, call this method multiple times with different soundId and filePath. To achieve the optimal user experience, Agora recommends that do not playing more than three audio files at the same time. After the playback of an audio effect file completes, the SDK triggers the onAudioEffectFinished callback. /// /// * [soundId] The audio effect ID. The ID of each audio effect file is unique.If you have preloaded an audio effect into memory by calling preloadEffect , ensure that the value of this parameter is the same as that of soundId in preloadEffect. /// * [filePath] The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example, C:\music\audio.mp4. Supported audio formats include MP3, AAC, M4A, MP4, WAV, and 3GP. See supported audio formats.If you have preloaded an audio effect into memory by calling preloadEffect , ensure that the value of this parameter is the same as that of filePath in preloadEffect. - /// * [loopCount] The number of times the audio effect loops.≥ 0: The number of playback times. For example, 1 means looping one time, which means playing the audio effect two times in total.-1: Play the audio effect in an infinite loop. + /// * [loopCount] The number of times the audio effect loops.≥ 0: The number of playback times. For example, 1 means looping one time, which means playing the audio effect two times in total.-1: Play the audio file in an infinite loop. /// * [pitch] The pitch of the audio effect. The value range is 0.5 to 2.0. The default value is 1.0, which means the original pitch. The lower the value, the lower the pitch. /// * [pan] The spatial position of the audio effect. The value ranges between -1.0 and 1.0:-1.0: The audio effect is heard on the left of the user.0.0: The audio effect is heard in front of the user.1.0: The audio effect is heard on the right of the user. /// * [gain] The volume of the audio effect. The value range is 0.0 to 100.0. The default value is 100.0, which means the original volume. The smaller the value, the lower the volume. /// * [publish] Whether to publish the audio effect to the remote users:true: Publish the audio effect to the remote users. Both the local user and remote users can hear the audio effect.false: Do not publish the audio effect to the remote users. Only the local user can hear the audio effect. /// * [startPos] The playback position (ms) of the audio effect file. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future playEffect( {required int soundId, required String filePath, @@ -3321,14 +3678,17 @@ abstract class RtcEngine { bool publish = false, int startPos = 0}); - /// Plays all audio effects. + /// Plays all audio effect files. /// After calling preloadEffect multiple times to preload multiple audio effects into the memory, you can call this method to play all the specified audio effects for all users in the channel. /// - /// * [loopCount] The number of times the audio effect loops:-1: Play the audio effect in an indefinite loop until you call stopEffect or stopAllEffects .0: Play the audio effect once.1: Play the audio effect twice. + /// * [loopCount] The number of times the audio effect loops:-1: Play the audio effect files in an indefinite loop until you call stopEffect or stopAllEffects .0: Play the audio effect once.1: Play the audio effect twice. /// * [pitch] The pitch of the audio effect. The value ranges between 0.5 and 2.0. The default value is 1.0 (original pitch). The lower the value, the lower the pitch. /// * [pan] The spatial position of the audio effect. The value ranges between -1.0 and 1.0:-1.0: The audio effect shows on the left.0: The audio effect shows ahead.1.0: The audio effect shows on the right. /// * [gain] The volume of the audio effect. The value range is [0, 100]. The default value is 100 (original volume). The smaller the value, the lower the volume. - /// * [publish] Whether to publish the audio effect to the remote users:true: Publish the audio effect to the remote users. Both the local user and remote users can hear the audio effect.false: Do not publish the audio effect to the remote users. Only the local user can hear the audio effect. + /// * [publish] Whether to publish the audio effect to the remote users:true: Publish the audio effect to the remote users. Both the local user and remote users can hear the audio effect.false: (Default) Do not publish the audio effect to the remote users. Only the local user can hear the audio effect. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future playAllEffects( {required int loopCount, required double pitch, @@ -3336,56 +3696,86 @@ abstract class RtcEngine { required int gain, bool publish = false}); - /// Gets the volume of a specified audio effect. + /// Gets the volume of a specified audio effect file. /// - /// * [soundId] The ID of the audio effect. + /// * [soundId] The ID of the audio effect file. /// /// Returns - /// The volume of the specified audio effect, if the method call succeeds. The value range is [0,100]. 100 represents the original volume. < 0: Failure. + /// ≥ 0: Returns the volume of the specified audio effect, if the method call is successful. The value ranges between 0 and 100. 100 represents the original volume. < 0: Failure. Future getVolumeOfEffect(int soundId); /// Sets the volume of a specified audio effect. /// - /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. + /// * [soundId] The ID of the audio effect. The ID of each audio effect file is unique. /// * [volume] The playback volume. The value range is [0, 100]. The default value is 100, which represents the original volume. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setVolumeOfEffect({required int soundId, required int volume}); - /// Pauses playing a specified audio effect file. + /// Pauses a specified audio effect file. /// /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future pauseEffect(int soundId); - /// Pauses playing all audio effect files. + /// Pauses all audio effects. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future pauseAllEffects(); /// Resumes playing a specified audio effect. /// /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future resumeEffect(int soundId); - /// Resumes playing all audio effects. + /// Resumes playing all audio effect files. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future resumeAllEffects(); /// Stops playing a specified audio effect. /// - /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. + /// * [soundId] The ID of the audio effect. Each audio effect has a unique ID. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopEffect(int soundId); /// Stops playing all audio effects. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopAllEffects(); /// Releases a specified preloaded audio effect from the memory. /// - /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. + /// * [soundId] The ID of the audio effect. Each audio effect has a unique ID. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future unloadEffect(int soundId); /// Releases a specified preloaded audio effect from the memory. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future unloadAllEffects(); /// Retrieves the duration of the audio effect file. /// Call this method after joining a channel. /// - /// * [filePath] File path:Android: The file path, which needs to be accurate to the file name and suffix. Agora supports using a URI address, an absolute path, or a path that starts with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example: content://com.android.providers.media.documents/document/audio%3A14441.Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: C:\music\audio.mp4.iOS or macOS: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: /var/mobile/Containers/Data/audio.mp4. + /// * [filePath] File path: + /// Android: The file path, which needs to be accurate to the file name and suffix. Agora supports URL addresses, absolute paths, or file paths that start with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example: content://com.android.providers.media.documents/document/audio%3A14441 + /// Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: C:\music\audio.mp4. + /// iOS or macOS: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: /var/mobile/Containers/Data/audio.mp4. /// /// Returns /// The total duration (ms) of the specified audio effect file, if the method call succeeds.< 0: Failure. @@ -3396,6 +3786,9 @@ abstract class RtcEngine { /// /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. /// * [pos] The playback position (ms) of the audio effect file. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setEffectPosition({required int soundId, required int pos}); /// Retrieves the playback position of the audio effect file. @@ -3407,10 +3800,13 @@ abstract class RtcEngine { /// The playback position (ms) of the specified audio effect file, if the method call succeeds.< 0: Failure. Future getEffectCurrentPosition(int soundId); - /// Enables/Disables stereo panning for remote users. + /// Enables or disables stereo panning for remote users. /// Ensure that you call this method before joining a channel to enable stereo panning for remote users so that the local user can track the position of a remote user by calling setRemoteVoicePosition. /// /// * [enabled] Whether to enable stereo panning for remote users:true: Enable stereo panning.false: Disable stereo panning. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableSoundPositionIndication(bool enabled); /// Sets the 2D position (the position on the horizontal plane) of the remote user's voice. @@ -3419,6 +3815,9 @@ abstract class RtcEngine { /// * [uid] The user ID of the remote user. /// * [pan] The voice position of the remote user. The value ranges from -1.0 to 1.0:0.0: (Default) The remote voice comes from the front.-1.0: The remote voice comes from the left.1.0: The remote voice comes from the right. /// * [gain] The volume of the remote user. The value ranges from 0.0 to 100.0. The default value is 100.0 (the original volume of the remote user). The smaller the value, the lower the volume. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRemoteVoicePosition( {required int uid, required double pan, required double gain}); @@ -3426,51 +3825,72 @@ abstract class RtcEngine { /// After enabling the spatial audio effect, you can call setRemoteUserSpatialAudioParams to set the spatial audio effect parameters of the remote user.You can call this method either before or after joining a channel.This method relies on the spatial audio dynamic library libagora_spatial_audio_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// /// * [enabled] Whether to enable the spatial audio effect:true: Enable the spatial audio effect.false: Disable the spatial audio effect. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableSpatialAudio(bool enabled); /// Sets the spatial audio effect parameters of the remote user. /// Call this method after enableSpatialAudio . After successfully setting the spatial audio effect parameters of the remote user, the local user can hear the remote user with a sense of space. /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. - /// * [params] The spatial audio parameters. See SpatialAudioParams for details. + /// * [params] The spatial audio parameters. See SpatialAudioParams . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRemoteUserSpatialAudioParams( {required int uid, required SpatialAudioParams params}); /// Sets a preset voice beautifier effect. - /// Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting a voice beautifier effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. For better voice effects, Agora recommends that you call setAudioProfile and set scenario to audioScenarioGameStreaming (3) and profile to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo(5) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1) or audioProfileIot(6), or the method does not take effect.This method works best with the human voice. Agora does not recommend using this method for audio containing music.After calling setVoiceBeautifierPreset, Agora recommends not calling the following methods, because they can override settings in setVoiceBeautifierPreset: setAudioEffectPreset setAudioEffectParameters setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting a voice beautifier effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. For better voice effects, Agora recommends that you call setAudioProfile and set scenario to audioScenarioGameStreaming(3) and profile to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1) or audioProfileIot(6), or the method does not take effect.This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music.After calling setVoiceBeautifierPreset, Agora does not recommend calling the following methods, otherwise the effect set by setVoiceBeautifierPreset will be overwritten: setAudioEffectPreset setAudioEffectParameters setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// /// * [preset] The preset voice beautifier effect options: VoiceBeautifierPreset . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setVoiceBeautifierPreset(VoiceBeautifierPreset preset); /// Sets an SDK preset audio effect. - /// Call this method to set an SDK preset audio effect for the local user who sends an audio stream. This audio effect does not change the gender characteristics of the original voice. After setting an audio effect, all users in the channel can hear the effect.To get better audio effect quality, Agora recommends calling setAudioProfile and setting the scenario parameter as audioScenarioGameStreaming (3) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1) or audioProfileIot(6), or the method does not take effect.This method works best with the human voice. Agora does not recommend using this method for audio containing music.If you call setAudioEffectPreset and set enumerators except for roomAcoustics3dVoice or pitchCorrection, do not call setAudioEffectParameters ; otherwise, setAudioEffectPreset is overridden.After calling setAudioEffectPreset, Agora recommends not calling the following methods, or the settings in setAudioEffectPreset are overridden: setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// Call this method to set an SDK preset audio effect for the local user who sends an audio stream. This audio effect does not change the gender characteristics of the original voice. After setting an audio effect, all users in the channel can hear the effect.To get better audio effect quality, Agora recommends setting the scenario parameter of setAudioProfile as audioScenarioGameStreaming(3) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1)audioProfileIot or (6), or the method does not take effect.This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music.If you call setAudioEffectPreset and set enumerators except for roomAcoustics3dVoice or pitchCorrection, do not call setAudioEffectParameters ; otherwise, setAudioEffectPreset is overridden.After calling setAudioEffectPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setAudioEffectPreset will be overwritten: setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// /// * [preset] The options for SDK preset audio effects. See AudioEffectPreset . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setAudioEffectPreset(AudioEffectPreset preset); /// Sets a preset voice beautifier effect. - /// Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting an audio effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. To achieve better audio effect quality, Agora recommends that you call setAudioProfile and set the profile to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5) and scenario to audioScenarioGameStreaming(3) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1) or audioProfileIot(6), or the method does not take effect.This method works best with the human voice. Agora does not recommend using this method for audio containing music.After calling setVoiceConversionPreset, Agora recommends not calling the following methods, or the settings in setVoiceConversionPreset are overridden: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setVoiceBeautifierParameters setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting an audio effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. To achieve better audio effect quality, Agora recommends that you call setAudioProfile and set the profile to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) and scenario to audioScenarioGameStreaming(3) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1) or audioProfileIot(6), or the method does not take effect.This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music.After calling setVoiceConversionPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setVoiceConversionPreset will be overwritten: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setVoiceBeautifierParameters setLocalVoicePitch setLocalVoiceFormant setLocalVoiceEqualization setLocalVoiceReverb This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// /// * [preset] The options for the preset voice beautifier effects: VoiceConversionPreset . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setVoiceConversionPreset(VoiceConversionPreset preset); /// Sets parameters for SDK preset audio effects. - /// Call this method to set the following parameters for the local user who sends an audio stream:3D voice effect: Sets the cycle period of the 3D voice effect.Pitch correction effect: Sets the basic mode and tonic pitch of the pitch correction effect. Different songs have different modes and tonic pitches. Agora recommends bounding this method with interface elements to enable users to adjust the pitch correction interactively.After setting the audio parameters, all users in the channel can hear the effect.You can call this method either before or after joining a channel.To get better audio effect quality, Agora recommends calling and setting scenario in setAudioProfile as audioScenarioGameStreaming(3) before calling this method.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1) or audioProfileIot(6), or the method does not take effect.This method works best with the human voice. Agora does not recommend using this method for audio containing music.After calling setAudioEffectParameters, Agora recommends not calling the following methods, or the settings in setAudioEffectParameters are overridden: setAudioEffectPreset setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset + /// Call this method to set the following parameters for the local user who sends an audio stream:3D voice effect: Sets the cycle period of the 3D voice effect.Pitch correction effect: Sets the basic mode and tonic pitch of the pitch correction effect. Different songs have different modes and tonic pitches. Agora recommends bounding this method with interface elements to enable users to adjust the pitch correction interactively.After setting the audio parameters, all users in the channel can hear the effect.You can call this method either before or after joining a channel.To get better audio effect quality, Agora recommends setting the scenario parameter of setAudioProfile as audioScenarioGameStreaming(3) before calling this method.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1)audioProfileIot or (6), or the method does not take effect.This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music.After calling setAudioEffectParameters, Agora does not recommend you to call the following methods, otherwise the effect set by setAudioEffectParameters will be overwritten: setAudioEffectPreset setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset + /// + /// * [preset] The options for SDK preset audio effects:roomAcoustics3dVoice, 3D voice effect:Call setAudioProfile and set the profile parameter in to audioProfileMusicStandardStereo(3) or audioProfileMusicHighQualityStereo(5) before setting this enumerator; otherwise, the enumerator setting does not take effect.If the 3D voice effect is enabled, users need to use stereo audio playback devices to hear the anticipated voice effect.pitchCorrection, Pitch correction effect: To achieve better audio effect quality, Agora recommends setting the profile parameter in setAudioProfile to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) before setting this enumerator. + /// * [param1] If you set preset to roomAcoustics3dVoice, param1 sets the cycle period of the 3D voice effect. The value range is [1,60] and the unit is seconds. The default value is 10, indicating that the voice moves around you every 10 seconds.If you set preset to pitchCorrection, param1 indicates the basic mode of the pitch correction effect:1: (Default) Natural major scale.2: Natural minor scale.3: Japanese pentatonic scale. + /// * [param2] If you set preset to roomAcoustics3dVoice , you need to set param2 to 0.If you set preset to pitchCorrection, param2 indicates the tonic pitch of the pitch correction effect:1: A2: A#3: B4: (Default) C5: C#6: D7: D#8: E9: F10: F#11: G12: G# /// - /// * [preset] The options for SDK preset audio effects:roomAcoustics3dVoice, 3D voice effect:You need to set the profile parameter in setAudioProfile to audioProfileMusicStandardStereo(3) or audioProfileMusicHighQualityStereo(5) before setting this enumerator; otherwise, the enumerator setting does not take effect.If the 3D voice effect is enabled, users need to use stereo audio playback devices to hear the anticipated voice effect.pitchCorrection; pitch correction effect: To achieve better audio effect quality, Agora recommends setting the profile parameter in setAudioProfile to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) before setting this enumerator. - /// * [param1] If you set preset to roomAcoustics3dVoice, param1 indicates the cycle period of the 3D voice effect. The value range is [1,60], in seconds. The default value is 10, indicating that the voice moves around you every 10 seconds. If you set preset to pitchCorrection, param1 indicates the basic mode of the pitch correction effect:1: (Default) Natural major scale.2: Natural minor scale.3: Japanese pentatonic scale. - /// * [param2] If you set preset to roomAcoustics3dVoice, you need to set param2 to 0. If you set preset to pitchCorrection, param2 indicates the tonic pitch of the pitch correction effect:1: A2: A#3: B4: (Default) C5: C#6: D7: D#8: E9: F10: F#11: G12: G# + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setAudioEffectParameters( {required AudioEffectPreset preset, required int param1, required int param2}); /// Sets parameters for the preset voice beautifier effects. - /// Call this method to set a gender characteristic and a reverberation effect for the singing beautifier effect. This method sets parameters for the local user who sends an audio stream. After setting the audio parameters, all users in the channel can hear the effect.For better voice effects, Agora recommends that you call setAudioProfile and set scenario to audioScenarioGameStreaming(3) and profile to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1)This method works best with the human voice. Agora does not recommend using this method for audio containing music.After calling setVoiceBeautifierParameters, Agora recommends not calling the following methods, because they can override settings in setVoiceBeautifierParameters: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceConversionPreset + /// Call this method to set a gender characteristic and a reverberation effect for the singing beautifier effect. This method sets parameters for the local user who sends an audio stream. After setting the audio parameters, all users in the channel can hear the effect.For better voice effects, Agora recommends that you call setAudioProfile and set scenario to audioScenarioGameStreaming(3) and profile to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1) or audioProfileIot(6), or the method does not take effect.This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music.After calling setVoiceBeautifierParameters, Agora does not recommend calling the following methods, otherwise the effect set by setVoiceBeautifierParameters will be overwritten: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceConversionPreset /// /// * [preset] The option for the preset audio effect:SINGING_BEAUTIFIER: The singing beautifier effect. /// * [param1] The gender characteristics options for the singing voice:1: A male-sounding voice.2: A female-sounding voice. /// * [param2] The reverberation effect options for the singing voice:1: The reverberation effect sounds like singing in a small room.2: The reverberation effect sounds like singing in a large room.3: The reverberation effect sounds like singing in a hall. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setVoiceBeautifierParameters( {required VoiceBeautifierPreset preset, required int param1, @@ -3485,23 +3905,41 @@ abstract class RtcEngine { /// Changes the voice pitch of the local speaker. /// You can call this method either before or after joining a channel. /// - /// * [pitch] The local voice pitch. The value range is [0.5,2.0]. The lower the value, the lower the pitch. The default value is 1 (no change to the pitch). + /// * [pitch] The local voice pitch. The value range is [0.5,2.0]. The lower the value, the lower the pitch. The default value is 1.0 (no change to the pitch). + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setLocalVoicePitch(double pitch); + /// Set the formant ratio to change the timbre of human voice. + /// Formant ratio affects the timbre of voice. The smaller the value, the deeper the sound will be, and the larger, the sharper.You can call this method to set the formant ratio of local audio to change the timbre of human voice. After you set the formant ratio, all users in the channel can hear the changed voice. If you want to change the timbre and pitch of voice at the same time, Agora recommends using this method together with setLocalVoicePitch .You can call this method either before or after joining a channel. + /// + /// * [formantRatio] The formant ratio. The value range is [-1.0, 1.0]. The default value is 0.0, which means do not change the timbre of the voice.Agora recommends setting this value within the range of [-0.4, 0.6]. Otherwise, the voice may be seriously distorted. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + Future setLocalVoiceFormant(double formantRatio); + /// Sets the local voice equalization effect. /// You can call this method either before or after joining a channel. /// - /// * [bandFrequency] The band frequency. The value ranges between 0 and 9; representing the respective 10-band center frequencies of the voice effects, including 31, 62, 125, 250, 500, 1k, 2k, 4k, 8k, and 16k Hz. + /// * [bandFrequency] The band frequency. The value ranges between 0 and 9; representing the respective 10-band center frequencies of the voice effects, including 31, 62, 125, 250, 500, 1k, 2k, 4k, 8k, and 16k Hz. See AudioEqualizationBandFrequency . /// * [bandGain] The gain of each band in dB. The value ranges between -15 and 15. The default value is 0. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setLocalVoiceEqualization( {required AudioEqualizationBandFrequency bandFrequency, required int bandGain}); /// Sets the local voice reverberation. - /// The SDK also provides the setAudioEffectPreset method, which allows you to directly implement preset reverb effects for such as pop, R&B, and KTV.You can call this method either before or after joining a channel. + /// The SDK provides an easier-to-use method, setAudioEffectPreset , to directly implement preset reverb effects for such as pop, R&B, and KTV.You can call this method either before or after joining a channel. /// - /// * [reverbKey] The reverberation key. Agora provides five reverberation keys; see AudioReverbType for details. + /// * [reverbKey] The reverberation key. Agora provides five reverberation keys, see AudioReverbType . /// * [value] The value of the reverberation key. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setLocalVoiceReverb( {required AudioReverbType reverbKey, required int value}); @@ -3509,6 +3947,9 @@ abstract class RtcEngine { /// This method is mainly used in spatial audio effect scenarios. You can select the preset headphone equalizer to listen to the audio to achieve the expected audio experience.If the headphones you use already have a good equalization effect, you may not get a significant improvement when you call this method, and could even diminish the experience. /// /// * [preset] The preset headphone equalization effect. See HeadphoneEqualizerPreset . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason). Future setHeadphoneEQPreset(HeadphoneEqualizerPreset preset); /// Sets the low- and high-frequency parameters of the headphone equalizer. @@ -3516,31 +3957,46 @@ abstract class RtcEngine { /// /// * [lowGain] The low-frequency parameters of the headphone equalizer. The value range is [-10,10]. The larger the value, the deeper the sound. /// * [highGain] The high-frequency parameters of the headphone equalizer. The value range is [-10,10]. The larger the value, the sharper the sound. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason). Future setHeadphoneEQParameters( {required int lowGain, required int highGain}); /// Sets the log file. - /// Deprecated:Use the mLogConfig parameter in initialize method instead.Specifies an SDK output log file. The log file records all log data for the SDK’s operation. Ensure that the directory for the log file exists and is writable.Ensure that you call this method immediately after calling the initialize method to initialize the RtcEngine , or the output log may not be complete. + /// Deprecated:Use the mLogConfig parameter in initialize method instead.Specifies an SDK output log file. The log file records all log data for the SDK’s operation. Ensure that the directory for the log file exists and is writable.Ensure that you call initialize immediately after calling the RtcEngine method, or the output log may not be complete. /// /// * [filePath] The complete path of the log files. These log files are encoded in UTF-8. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setLogFile(String filePath); /// Sets the log output level of the SDK. - /// Deprecated:Use logConfig in initialize instead.This method sets the output log level of the SDK. You can use one or a combination of the log filter levels. The log level follows the sequence of logFilterOff, logFilterCritical, logFilterError, logFilterWarn, logFilterInfo, and logFilterDebug. Choose a level to see the logs preceding that level.If, for example, you set the log level to logFilterWarn, you see the logs within levels logFilterCritical, logFilterError, and logFilterWarn. + /// Deprecated:Use logConfig in initialize instead.This method sets the output log level of the SDK. You can use one or a combination of the log filter levels. The log level follows the sequence of logFilterOff, logFilterCritical, logFilterError, logFilterWarn, logFilterInfo, and logFilterDebug. Choose a level to see the logs preceding that level.If, for example, you set the log level to logFilterWarn, you see the logs within levels logFilterCritical, logFilterError and logFilterWarn. /// /// * [filter] The output log level of the SDK. See LogFilterType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setLogFilter(LogFilterType filter); /// Sets the output log level of the SDK. /// Deprecated:This method is deprecated. Use RtcEngineContext instead to set the log output level.Choose a level to see the logs preceding that level. /// /// * [level] The log level: LogLevel . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setLogLevel(LogLevel level); /// Sets the log file size. /// Deprecated:Use the logConfig parameter in initialize instead.By default, the SDK generates five SDK log files and five API call log files with the following rules:The SDK log files are: agorasdk.log, agorasdk.1.log, agorasdk.2.log, agorasdk.3.log, and agorasdk.4.log.The API call log files are: agoraapi.log, agoraapi.1.log, agoraapi.2.log, agoraapi.3.log, and agoraapi.4.log.The default size for each SDK log file is 1,024 KB; the default size for each API call log file is 2,048 KB. These log files are encoded in UTF-8.The SDK writes the latest logs in agorasdk.log or agoraapi.log.When agorasdk.log is full, the SDK processes the log files in the following order:Delete the agorasdk.4.log file (if any).Rename agorasdk.3.log to agorasdk.4.log.Rename agorasdk.2.log to agorasdk.3.log.Rename agorasdk.1.log to agorasdk.2.log.Create a new agorasdk.log file.The overwrite rules for the agoraapi.log file are the same as for agorasdk.log.This method is used to set the size of the agorasdk.log file only and does not effect the agoraapi.log file. /// /// * [fileSizeInKBytes] The size (KB) of an agorasdk.log file. The value range is [128,20480]. The default value is 1,024 KB. If you set fileSizeInKByte smaller than 128 KB, the SDK automatically adjusts it to 128 KB; if you set fileSizeInKByte greater than 20,480 KB, the SDK automatically adjusts it to 20,480 KB. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setLogFileSize(int fileSizeInKBytes); /// @nodoc @@ -3550,18 +4006,24 @@ abstract class RtcEngine { /// After initializing the local video view, you can call this method to update its rendering and mirror modes. It affects only the video view that the local user sees, not the published local video stream.Ensure that you have called the setupLocalVideo method to initialize the local video view before calling this method.During a call, you can call this method as many times as necessary to update the display mode of the local video view. /// /// * [renderMode] The local video display mode. See RenderModeType . - /// * [mirrorMode] The rendering mode of the local video view. See VideoMirrorModeType .If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default. + /// * [mirrorMode] The mirror mode of the local video view. See VideoMirrorModeType .If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. Future setLocalRenderMode( {required RenderModeType renderMode, VideoMirrorModeType mirrorMode = VideoMirrorModeType.videoMirrorModeAuto}); /// Updates the display mode of the video view of a remote user. - /// After initializing the video view of a remote user, you can call this method to update its rendering and mirror modes. This method affects only the video view that the local user sees.Please call this method after initializing the remote view by calling the setupRemoteVideo method.During a call, you can call this method as many times as necessary to update the display mode of the video view of a remote user. + /// After initializing the video view of a remote user, you can call this method to update its rendering and mirror modes. This method affects only the video view that the local user sees.Call this method after initializing the remote view by calling the setupRemoteVideo method.During a call, you can call this method as many times as necessary to update the display mode of the video view of a remote user. /// /// * [uid] The user ID of the remote user. /// * [renderMode] The rendering mode of the remote user view. - /// * [mirrorMode] The mirror mode of the remote user view. + /// * [mirrorMode] The mirror mode of the remote user view. See VideoMirrorModeType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRemoteRenderMode( {required int uid, required RenderModeType renderMode, @@ -3571,52 +4033,47 @@ abstract class RtcEngine { /// Deprecated:This method is deprecated.Use setupLocalVideo or setLocalRenderMode instead. /// /// * [mirrorMode] The local video mirror mode. See VideoMirrorModeType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setLocalVideoMirrorMode(VideoMirrorModeType mirrorMode); - /// Enables or disables dual-stream mode. - /// You can call this method to enable or disable the dual-stream mode on the publisher side. Dual streams are a pairing of a high-quality video stream and a low-quality video stream:High-quality video stream: High bitrate, high resolution.Low-quality video stream: Low bitrate, low resolution.After you enable dual-stream mode, you can call setRemoteVideoStreamType to choose to receive either the high-quality video stream or the low-quality video stream on the subscriber side.This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams.If you need to enable dual video streams in a multi-channel scenario, you can call the enableDualStreamModeEx method.You can call this method either before or after joining a channel. + /// Enables or disables the dual-stream mode on the sender and sets the low-quality video stream. + /// Deprecated:This method is deprecated as of v4.2.0. Use setDualStreamMode instead.You can call this method to enable or disable the dual-stream mode on the publisher side. Dual streams are a pairing of a high-quality video stream and a low-quality video stream:High-quality video stream: High bitrate, high resolution.Low-quality video stream: Low bitrate, low resolution.After you enable dual-stream mode, you can call setRemoteVideoStreamType to choose to receive either the high-quality video stream or the low-quality video stream on the subscriber side.This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams.If you need to enable dual video streams in a multi-channel scenario, you can call the enableDualStreamModeEx method.You can call this method either before or after joining a channel. /// - /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig. - /// * [enabled] Whether to enable dual-stream mode:true: Enable dual-stream mode.false: Disable dual-stream mode. + /// * [enabled] Whether to enable dual-stream mode:true: Enable dual-stream mode.false: (Default) Disable dual-stream mode. + /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableDualStreamMode( {required bool enabled, SimulcastStreamConfig? streamConfig}); /// Sets dual-stream mode configuration on the sender, and sets the low-quality video stream. - /// The SDK enables the low-quality video stream auto mode on the sender by default, which is equivalent to calling this method and setting the mode to autoSimulcastStream. If you want to modify this behavior, you can call this method and modify the mode to disableSimulcastStream(always never send low-quality video streams) or enableSimulcastStream (always send low-quality video streams). - /// - /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig . + /// The SDK enables the low-quality video stream auto mode on the sender by default, which is equivalent to calling this method and setting the mode to autoSimulcastStream. If you want to modify this behavior, you can call this method and modify the mode to disableSimulcastStream (never send low-quality video streams) or enableSimulcastStream (always send low-quality video streams). /// /// * [mode] The mode in which the video stream is sent. See SimulcastStreamMode . + /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig .When setting mode to disableSimulcastStream, setting streamConfig will not take effect. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setDualStreamMode( {required SimulcastStreamMode mode, SimulcastStreamConfig? streamConfig}); - /// @nodoc - Future enableEchoCancellationExternal( - {required bool enabled, required int audioSourceDelay}); - /// @nodoc Future enableCustomAudioLocalPlayback( - {required int sourceId, required bool enabled}); - - /// @nodoc - Future startPrimaryCustomAudioTrack(AudioTrackConfig config); - - /// @nodoc - Future stopPrimaryCustomAudioTrack(); - - /// @nodoc - Future startSecondaryCustomAudioTrack(AudioTrackConfig config); - - /// @nodoc - Future stopSecondaryCustomAudioTrack(); + {required int trackId, required bool enabled}); /// Sets the format of the captured raw audio data. - /// Sets the audio format for the onRecordAudioFrame callback.Ensure that you call this method before joining a channel.The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method.Sample interval = samplePerCall/(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). + /// Sets the audio format for the onRecordAudioFrame callback.Ensure that you call this method before joining a channel.The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method.Sample interval (sec) = samplePerCall/(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). /// /// * [sampleRate] The sample rate returned in the onRecordAudioFrame callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz. /// * [channel] The number of channels returned in the onRecordAudioFrame callback:1: Mono.2: Stereo. /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType . /// * [samplesPerCall] The number of data samples returned in the onRecordAudioFrame callback, such as 1024 for the Media Push. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRecordingAudioFrameParameters( {required int sampleRate, required int channel, @@ -3624,12 +4081,15 @@ abstract class RtcEngine { required int samplesPerCall}); /// Sets the audio data format for playback. - /// Sets the data format for the onPlaybackAudioFrame callback.Ensure that you call this method before joining a channel.The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method.Sample interval = samplePerCall/(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). The SDK triggers the onPlaybackAudioFrame callback according to the sampling interval. + /// Sets the data format for the onPlaybackAudioFrame callback.Ensure that you call this method before joining a channel.The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method.Sample interval (sec) = samplePerCall/(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). The SDK triggers the callback according to the sampling interval.onPlaybackAudioFrame /// /// * [sampleRate] The sample rate returned in the onPlaybackAudioFrame callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz. /// * [channel] The number of channels returned in the onPlaybackAudioFrame callback:1: Mono.2: Stereo. /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType . /// * [samplesPerCall] The number of data samples returned in the onPlaybackAudioFrame callback, such as 1024 for the Media Push. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setPlaybackAudioFrameParameters( {required int sampleRate, required int channel, @@ -3639,8 +4099,11 @@ abstract class RtcEngine { /// Sets the audio data format reported by onMixedAudioFrame . /// /// * [sampleRate] The sample rate (Hz) of the audio data, which can be set as 8000, 16000, 32000, 44100, or 48000. - /// * [channel] The number of channels of the audio data, which can be set as 1 (Mono) or 2 (Stereo). + /// * [channel] The number of channels of the audio data, which can be set as 1(Mono) or 2(Stereo). /// * [samplesPerCall] Sets the number of samples. In Media Push scenarios, set it as 1024. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setMixedAudioFrameParameters( {required int sampleRate, required int channel, @@ -3653,6 +4116,9 @@ abstract class RtcEngine { /// * [channel] The number of audio channels reported in the onEarMonitoringAudioFrame callback.1: Mono.2: Stereo. /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType . /// * [samplesPerCall] The number of data samples reported in the onEarMonitoringAudioFrame callback, such as 1,024 for the Media Push. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setEarMonitoringAudioFrameParameters( {required int sampleRate, required int channel, @@ -3663,55 +4129,82 @@ abstract class RtcEngine { /// /// * [sampleRate] The sample rate (Hz) of the audio data, which can be set as 8000, 16000, 32000, 44100, or 48000. /// * [channel] The number of channels of the external audio source, which can be set as 1(Mono) or 2(Stereo). + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setPlaybackAudioFrameBeforeMixingParameters( {required int sampleRate, required int channel}); /// Turns on audio spectrum monitoring. /// If you want to obtain the audio spectrum data of local or remote users, you can register the audio spectrum observer and enable audio spectrum monitoring.You can call this method either before or after joining a channel. /// - /// * [intervalInMS] The interval (in milliseconds) at which the SDK triggers the onLocalAudioSpectrum and onRemoteAudioSpectrum callbacks. The default value is 100. Do not set this parameter to less than 10 milliseconds, otherwise the calling of this method fails. + /// * [intervalInMS] The interval (in milliseconds) at which the SDK triggers the onLocalAudioSpectrum and onRemoteAudioSpectrum callbacks. The default value is 100. Do not set this parameter to a value less than 10, otherwise calling this method would fail. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: Invalid parameters. Future enableAudioSpectrumMonitor({int intervalInMS = 100}); /// Disables audio spectrum monitoring. /// After calling enableAudioSpectrumMonitor , if you want to disable audio spectrum monitoring, you can call this method.You can call this method either before or after joining a channel. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future disableAudioSpectrumMonitor(); - /// Registers an audio spectrum observer. + /// Register an audio spectrum observer. /// After successfully registering the audio spectrum observer and calling - /// enableAudioSpectrumMonitor to enable the audio spectrum monitoring, the SDK reports the callback that you implement in the AudioSpectrumObserver class at the time interval you set.You can call this method either before or after joining a channel. + /// enableAudioSpectrumMonitor to enable the audio spectrum monitoring, the SDK reports the callback that you implement in the AudioSpectrumObserver class according to the time interval you set.You can call this method either before or after joining a channel. + /// + /// * [observer] The audio spectrum observer. See AudioSpectrumObserver . /// - /// * [observer] The Audio spectrum observer. See AudioSpectrumObserver . + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. void registerAudioSpectrumObserver(AudioSpectrumObserver observer); /// Unregisters the audio spectrum observer. /// After calling registerAudioSpectrumObserver , if you want to disable audio spectrum monitoring, you can call this method.You can call this method either before or after joining a channel. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. void unregisterAudioSpectrumObserver(AudioSpectrumObserver observer); /// Adjusts the capturing signal volume. /// You can call this method either before or after joining a channel. /// - /// * [volume] The volume of the user. The value range is [0,400].0: Mute.100: (Default) The original volume.400: Four times the original volume (amplifying the audio signals by four times). + /// * [volume] The volume of the user. The value range is [0,400].0: Mute.If you only need to mute the audio signal, Agora recommends that you use muteRecordingSignal instead.100: (Default) The original volume.400: Four times the original volume (amplifying the audio signals by four times). + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future adjustRecordingSignalVolume(int volume); /// Whether to mute the recording signal. /// - /// * [mute] true: Mute the recording signal.false: (Default) Do not mute the recording signal. + /// * [mute] true: The media file is muted.false: (Default) Do not mute the recording signal.If you have already called adjustRecordingSignalVolume to adjust the volume, then when you call this method and set it to true, the SDK will record the current volume and mute it. To restore the previous volume, call this method again and set it to false. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future muteRecordingSignal(bool mute); /// Adjusts the playback signal volume of all remote users. /// This method adjusts the playback volume that is the mixed volume of all remote users.You can call this method either before or after joining a channel. /// /// * [volume] The volume of the user. The value range is [0,400]. - /// 0: Mute. + /// 0: Mute.If you only need to mute the audio signal, Agora recommends that you use muteRecordingSignal instead. /// 100: (Default) The original volume. /// 400: Four times the original volume (amplifying the audio signals by four times). + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. Future adjustPlaybackSignalVolume(int volume); /// Adjusts the playback signal volume of a specified remote user. /// You can call this method to adjust the playback volume of a specified remote user. To adjust the playback volume of different remote users, call the method as many times, once for each remote user.Call this method after joining a channel.The playback volume here refers to the mixed volume of a specified remote user. /// - /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. /// * [uid] The user ID of the remote user. + /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future adjustUserPlaybackSignalVolume( {required int uid, required int volume}); @@ -3721,11 +4214,20 @@ abstract class RtcEngine { /// @nodoc Future setRemoteSubscribeFallbackOption(StreamFallbackOptions option); - /// Enables loopback audio capture. - /// If you enable loopback audio capture, the output of the sound card is mixed into the audio stream sent to the other end.Applies to the macOS and Windows platforms only.macOS does not support loopback audio capture of the default sound card. If you need to use this method, use a virtual sound card and pass its name to the deviceName parameter. Agora recommends that you use Soundflower for loopback audio capture.You can call this method either before or after joining a channel. + /// @nodoc + Future setHighPriorityUserList( + {required List uidList, + required int uidNum, + required StreamFallbackOptions option}); + + /// Enables loopback audio capturing. + /// If you enable loopback audio capturing, the output of the sound card is mixed into the audio stream sent to the other end.This method applies to the macOS and Windows only.macOS does not support loopback audio capture of the default sound card. If you need to use this function, use a virtual sound card and pass its name to the deviceName parameter. Agora recommends using AgoraALD as the virtual sound card for audio capturing.You can call this method either before or after joining a channel. /// - /// * [enabled] Whether to enable loopback audio capture.true: Enable loopback audio capture.false: (Default) Disable loopback audio capture. - /// * [deviceName] macOS: The device name of the virtual sound card. The default is set to null, which means the SDK uses Soundflower for loopback audio capture.Windows: The device name of the sound card. The default is set to null, which means the SDK uses the sound card of your device for loopback audio capture. + /// * [enabled] Whether to enable loopback audio capturing.true: Enable loopback audio capturing.false: (Default) Disable loopback audio capturing. + /// * [deviceName] macOS: The device name of the virtual sound card. The default value is set to NULL, which means using AgoraALD for loopback audio capturing.Windows: The device name of the sound card. The default is set to NULL, which means the SDK uses the sound card of your device for loopback audio capturing. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableLoopbackRecording( {required bool enabled, String? deviceName}); @@ -3733,16 +4235,22 @@ abstract class RtcEngine { /// After calling enableLoopbackRecording to enable loopback audio capturing, you can call this method to adjust the volume of the signal captured by the sound card. /// /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future adjustLoopbackSignalVolume(int volume); /// @nodoc Future getLoopbackRecordingVolume(); /// Enables in-ear monitoring. - /// This method enables or disables in-ear monitoring.Users must use wired earphones to hear their own voices.You can call this method either before or after joining a channel. + /// This method enables or disables in-ear monitoring.Users must use earphones (wired or Bluetooth) to hear the in-ear monitoring effect.You can call this method either before or after joining a channel. /// /// * [enabled] Enables or disables in-ear monitoring.true: Enables in-ear monitoring.false: (Default) Disables in-ear monitoring. /// * [includeAudioFilters] The audio filter of in-ear monitoring: See EarMonitoringFilterType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.- 8: Make sure the current audio routing is Bluetooth or headset. Future enableInEarMonitoring( {required bool enabled, required EarMonitoringFilterType includeAudioFilters}); @@ -3751,6 +4259,9 @@ abstract class RtcEngine { /// This method applies to Android and iOS only.Users must use wired earphones to hear their own voices.You can call this method either before or after joining a channel. /// /// * [volume] The volume of the in-ear monitor. The value ranges between 0 and 100. The default value is 100. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setInEarMonitoringVolume(int volume); /// Adds an extension to the SDK. @@ -3758,37 +4269,48 @@ abstract class RtcEngine { /// /// * [path] The extension library path and name. For example: /library/libagora_segmentation_extension.dll. /// * [unloadAfterUse] Whether to uninstall the current extension when you no longer using it:true: Uninstall the extension when the RtcEngine is destroyed.false: (Rcommended) Do not uninstall the extension until the process terminates. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future loadExtensionProvider( {required String path, bool unloadAfterUse = false}); /// Sets the properties of the extension provider. /// You can call this method to set the attributes of the extension provider and initialize the relevant parameters according to the type of the provider.Call this method after enableExtension , and before enabling the audio ( enableAudio / enableLocalAudio ) or the video ( enableVideo / enableLocalVideo ). /// - /// * [value] The value of the extension key. - /// * [key] The key of the extension. /// * [provider] The name of the extension provider. + /// * [key] The key of the extension. + /// * [value] The value of the extension key. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setExtensionProviderProperty( {required String provider, required String key, required String value}); /// Registers an extension. /// After the extension is loaded, you can call this method to register the extension.This method applies to Windows only. /// - /// * [type] Type of media source. See MediaSourceType . In this method, this parameter supports only the following two settings:The default value is unknownMediaSource.If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. - /// * [extension] The name of the extension. /// * [provider] The name of the extension provider. + /// * [extension] The name of the extension. + /// * [type] Type of media source. See MediaSourceType .In this method, this parameter supports only the following two settings:The default value is unknownMediaSource.If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future registerExtension( {required String provider, required String extension, MediaSourceType type = MediaSourceType.unknownMediaSource}); - /// Enables/Disables extensions. - /// Ensure that you call this method before joining a channel.If you want to enable multiple extensions, you need to call this method multiple times.The data processing order of different extensions in the SDK is determined by the order in which the extensions are enabled. That is, the extension that is enabled first will process the data first. + /// Enables or disables extensions. + /// To call this method, call it immediately after initializing the RtcEngine object.If you want to enable multiple extensions, you need to call this method multiple times.The data processing order of different extensions in the SDK is determined by the order in which the extensions are enabled. That is, the extension that is enabled first will process the data first. /// - /// * [extension] The name of the extension. /// * [provider] The name of the extension provider. + /// * [extension] The name of the extension. /// * [enable] Whether to enable the extension:true: Enable the extension.false: Disable the extension. - /// * [type] Type of media source. See MediaSourceType . In this method, this parameter supports only the following two settings:The default value is unknownMediaSource.If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. + /// * [type] Type of media source. See MediaSourceType .In this method, this parameter supports only the following two settings:The default value is unknownMediaSource.If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-3: The extension library is not loaded. Agora recommends that you check the storage location or the name of the dynamic library. Future enableExtension( {required String provider, required String extension, @@ -3802,7 +4324,10 @@ abstract class RtcEngine { /// * [extension] The name of the extension. /// * [key] The key of the extension. /// * [value] The value of the extension key. - /// * [type] The type of the video source. See MediaSourceType . + /// * [type] The type of the video source, see MediaSourceType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setExtensionProperty( {required String provider, required String extension, @@ -3815,8 +4340,8 @@ abstract class RtcEngine { /// * [provider] The name of the extension provider. /// * [extension] The name of the extension. /// * [key] The key of the extension. - /// * [type] Source type of the extension. See MediaSourceType . /// * [bufLen] Maximum length of the JSON string indicating the extension property. The maximum value is 512 bytes. + /// * [type] Source type of the extension. See MediaSourceType . /// /// Returns /// The extension information, if the method call succeeds.An empty string, if the method call fails. @@ -3828,9 +4353,12 @@ abstract class RtcEngine { MediaSourceType type = MediaSourceType.unknownMediaSource}); /// Sets the camera capture configuration. - /// This method is for Android and iOS only.Call this method before calling joinChannel [2/2] , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// This method is for Android and iOS only.Call this method before calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// + /// * [config] The camera capture configuration. See CameraCapturerConfiguration . /// - /// * [config] The camera capturer configuration. See CameraCapturerConfiguration . + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setCameraCapturerConfiguration( CameraCapturerConfiguration config); @@ -3847,39 +4375,44 @@ abstract class RtcEngine { /// Destroys the specified video track. /// /// * [videoTrackId] The video track ID returned by calling the createCustomVideoTrack method. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future destroyCustomVideoTrack(int videoTrackId); /// @nodoc Future destroyCustomEncodedVideoTrack(int videoTrackId); /// Switches between front and rear cameras. - /// This method needs to be called after the camera is started (for example, by calling startPreview or joinChannel [2/2] ).This method is for Android and iOS only. + /// This method needs to be called after the camera is started (for example, by calling startPreview or joinChannel ).This method is for Android and iOS only. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future switchCamera(); /// Checks whether the device supports camera zoom. - /// Call this method after enabling the local camera, for example, by calling joinChannel [2/2] , enableVideo , or enableLocalVideo ,depending on which method you use to turn on your local camera. - /// This method is for Android and iOS only. + /// Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera.This method is for Android and iOS only. /// /// Returns /// true: The device supports camera zoom.false: The device does not support camera zoom. Future isCameraZoomSupported(); /// Checks whether the device camera supports face detection. - /// Call this method after enabling the local camera, for example, by calling joinChannel [2/2] , enableVideo , or enableLocalVideo ,depending on which method you use to turn on your local camera.This method is for Android and iOS only. + /// Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera.This method is for Android and iOS only. /// /// Returns /// true: The device camera supports face detection.false: The device camera does not support face detection. Future isCameraFaceDetectSupported(); /// Checks whether the device supports camera flash. - /// Call this method after enabling the local camera, for example, by calling joinChannel [2/2] , enableVideo , or enableLocalVideo ,depending on which method you use to turn on your local camera.This method is for Android and iOS only.The app enables the front camera by default. If your front camera does not support enabling the flash, this method returns false. If you want to check whether the rear camera supports the flash function, call switchCamera before this method.On iPads with system version 15, even if isCameraTorchSupported returns true, you might fail to successfully enable the flash by calling setCameraTorchOn due to system issues. + /// Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera.This method is for Android and iOS only.The app enables the front camera by default. If your front camera does not support enabling the flash, this method returns false. If you want to check whether the rear camera supports the flash function, call switchCamera before this method.On iPads with system version 15, even if isCameraTorchSupported returns true, you might fail to successfully enable the flash by calling setCameraTorchOn due to system issues. /// /// Returns - /// true: The device supports enabling the flash.false: The device does not support enabling the flash. + /// true: The device supports camera flash.false: The device does not support camera flash. Future isCameraTorchSupported(); /// Check whether the device supports the manual focus function. - /// Call this method after enabling the local camera, for example, by calling joinChannel [2/2] , enableVideo , or enableLocalVideo ,depending on which method you use to turn on your local camera. + /// Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. /// This method is for Android and iOS only. /// /// Returns @@ -3887,90 +4420,116 @@ abstract class RtcEngine { Future isCameraFocusSupported(); /// Checks whether the device supports the face auto-focus function. - /// This method is for Android and iOS only.Call this method after enabling the local camera, for example, by calling joinChannel [2/2] , enableVideo , or enableLocalVideo ,depending on which method you use to turn on your local camera. + /// This method is for Android and iOS only.Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. /// /// Returns /// true: The device supports the face auto-focus function.false: The device does not support the face auto-focus function. Future isCameraAutoFocusFaceModeSupported(); /// Sets the camera zoom ratio. - /// This method is for Android and iOS only.Call this method before calling joinChannel [2/2] , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// This method is for Android and iOS only.Call this method before calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. /// /// * [factor] The camera zoom ratio. The value ranges between 1.0 and the maximum zoom supported by the device. You can get the maximum zoom ratio supported by the device by calling the getCameraMaxZoomFactor method. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: if the method if failed. Future setCameraZoomFactor(double factor); - /// Enables/Disables face detection for the local user. - /// You can call this method either before or after joining a channel.This method is for Android and iOS only.Once face detection is enabled, the SDK triggers the onFacePositionChanged callback to report the face information of the local user, which includes the following:The width and height of the local video.The position of the human face in the local view.The distance between the human face and the screen.This method needs to be called after the camera is started (for example, by calling startPreviewjoinChannel [2/2]). + /// Enables or disables face detection for the local user. + /// You can call this method either before or after joining a channel.This method is for Android and iOS only.Once face detection is enabled, the SDK triggers the onFacePositionChanged callback to report the face information of the local user, which includes the following:The width and height of the local video.The position of the human face in the local view.The distance between the human face and the screen.This method needs to be called after the camera is started (for example, by calling joinChannel). /// /// * [enabled] Whether to enable face detection for the local user:true: Enable face detection.false: (Default) Disable face detection. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableFaceDetection(bool enabled); /// Gets the maximum zoom ratio supported by the camera. - /// This method is for Android and iOS only.Call this method after enabling the local camera, for example, by calling joinChannel [2/2] , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// This method is for Android and iOS only.Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. /// /// Returns /// The maximum zoom factor. Future getCameraMaxZoomFactor(); /// Sets the camera manual focus position. - /// This method needs to be called after the camera is started (for example, by calling startPreview or joinChannel [2/2] ). After a successful method call, the SDK triggers the onCameraFocusAreaChanged callback.This method is for Android and iOS only. + /// This method needs to be called after the camera is started (for example, by calling startPreview or joinChannel ). After a successful method call, the SDK triggers the onCameraFocusAreaChanged callback.This method is for Android and iOS only. /// /// * [positionX] The horizontal coordinate of the touchpoint in the view. /// * [positionY] The vertical coordinate of the touchpoint in the view. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setCameraFocusPositionInPreview( {required double positionX, required double positionY}); /// Enables the camera flash. - /// This method is for Android and iOS only.Call this method before calling joinChannel [2/2] , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// This method is for Android and iOS only.Call this method before calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. /// /// * [isOn] Whether to turn on the camera flash:true: Turn on the flash.false: (Default) Turn off the flash. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setCameraTorchOn(bool isOn); - /// Sets whether to enable face autofocus. - /// By default, the SDK disables face autofocus on Android and enables face autofocus on iOS. To set face autofocus, call this method.This method is for Android and iOS only.Call this method after the camera is started, such as after joinChannel [2/2] , enableVideo , or enableLocalVideo . + /// Enables the camera auto-face focus function. + /// By default, the SDK disables face autofocus on Android and enables face autofocus on iOS. To set face autofocus, call this method.This method is for Android and iOS only.Call this method after the camera is started, such as after joinChannel , enableVideo or enableLocalVideo . /// - /// * [enabled] Whether to enable face autofocus:true: Enable face autofocus.false: Disable face autofocus. + /// * [enabled] Whether to enable face autofocus:true: Enable the camera auto-face focus function.false: Disable face autofocus. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setCameraAutoFocusFaceModeEnabled(bool enabled); /// Checks whether the device supports manual exposure. - /// Call this method after enabling the local camera, for example, by calling joinChannel [2/2] , enableVideo , or enableLocalVideo ,depending on which method you use to turn on your local camera.This method is for Android and iOS only. + /// Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera.This method is for Android and iOS only. /// /// Returns /// true: The device supports manual exposure.false: The device does not support manual exposure. Future isCameraExposurePositionSupported(); /// Sets the camera exposure position. - /// This method needs to be called after the camera is started (for example, by calling startPreview or joinChannel [2/2] ).After a successful method call, the SDK triggers the onCameraExposureAreaChanged callback.This method is for Android and iOS only. + /// This method needs to be called after the camera is started (for example, by calling startPreview or joinChannel ).After a successful method call, the SDK triggers the onCameraExposureAreaChanged callback.This method is for Android and iOS only. /// /// * [positionXinView] The horizontal coordinate of the touchpoint in the view. /// * [positionYinView] The vertical coordinate of the touchpoint in the view. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setCameraExposurePosition( {required double positionXinView, required double positionYinView}); /// Checks whether the device supports auto exposure. - /// Call this method after enabling the local camera, for example, by calling joinChannel [2/2] , enableVideo , or enableLocalVideo ,depending on which method you use to turn on your local camera.This method applies to iOS only. + /// This method applies to iOS only.Call this method before calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. /// /// Returns /// true: The device supports auto exposure.false: The device does not support auto exposure. Future isCameraAutoExposureFaceModeSupported(); /// Sets whether to enable auto exposure. - /// This method applies to iOS only.Call this method before calling joinChannel [2/2] , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// This method applies to iOS only.Call this method before calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// + /// * [enabled] Whether to enable auto exposure:true: Enable auto exposure.false: Disable auto exposure. /// - /// * [enabled] Whether to enable auto exposure: - /// true: Enable auto exposure.false: Disable auto exposure. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setCameraAutoExposureFaceModeEnabled(bool enabled); /// Sets the default audio playback route. /// This method applies to Android and iOS only.Ensure that you call this method before joining a channel. If you need to change the audio route after joining a channel, call setEnableSpeakerphone .Most mobile phones have two audio routes: an earpiece at the top, and a speakerphone at the bottom. The earpiece plays at a lower volume, and the speakerphone at a higher volume. When setting the default audio route, you determine whether audio playback comes through the earpiece or speakerphone when no external audio device is connected. /// /// * [defaultToSpeaker] Whether to set the speakerphone as the default audio route:true: Set the speakerphone as the default audio route.false: Set the earpiece as the default audio route. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setDefaultAudioRouteToSpeakerphone(bool defaultToSpeaker); /// Enables/Disables the audio route to the speakerphone. - /// This method is for Android and iOS only.After a successful method call, the SDK triggers the onAudioRoutingChanged callback.You can call this method before joining a channel, when in a channel, or after leaving a channel. However, Agora recommends calling this method only when you are in a channel to change the audio route temporarily.If you do not have a clear requirement for transient settings, Agora recommends calling setDefaultAudioRouteToSpeakerphone to set the audio route.Any user behavior or audio-related API call might change the transient setting of setEnableSpeakerphone. + /// If the default audio route of the SDK (see Set the Audio Route) or the setting in setDefaultAudioRouteToSpeakerphone cannot meet your requirements, you can call setEnableSpeakerphone to switch the current audio route. After a successful method call, the SDK triggers the onAudioRoutingChanged callback.This method only sets the audio route in the current channel and does not influence the default audio route. If the user leaves the current channel and joins another channel, the default audio route is used.This method applies to Android and iOS only.Call this method after joining a channel.If the user uses an external audio playback device such as a Bluetooth or wired headset, this method does not take effect, and the SDK plays audio through the external device. When the user uses multiple external devices, the SDK plays audio through the last connected device. /// - /// * [speakerOn] Whether to set the speakerphone as the default audio route:true: Set the speakerphone as the audio route temporarily.false: Do not set the speakerphone as the audio route. + /// * [speakerOn] Sets whether to enable the speakerphone or earpiece:true: Enable device state monitoring. The audio route is the speakerphone.false: Disable device state monitoring. The audio route is the earpiece. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setEnableSpeakerphone(bool speakerOn); /// Checks whether the speakerphone is enabled. @@ -3981,39 +4540,52 @@ abstract class RtcEngine { Future isSpeakerphoneEnabled(); /// Gets a list of shareable screens and windows. - /// You can call this method before sharing a screen or window to get a list of shareable screens and windows, which enables a user to use thumbnails in the list to choose a particular screen or window to share. This list also contains important information such as window ID and screen ID, with which you can call startScreenCaptureByWindowId or startScreenCaptureByDisplayId to start the sharing. + /// You can call this method before sharing a screen or window to get a list of shareable screens and windows, which enables a user to use thumbnails in the list to easily choose a particular screen or window to share. This list also contains important information such as window ID and screen ID, with which you can call startScreenCaptureByWindowId or startScreenCaptureByDisplayId to start the sharing.This method applies to macOS and Windows only. /// /// * [thumbSize] The target size of the screen or window thumbnail (the width and height are in pixels). The SDK scales the original image to make the length of the longest side of the image the same as that of the target size without distorting the original image. For example, if the original image is 400 × 300 and thumbSize is 100 × 100, the actual size of the thumbnail is 100 × 75. If the target size is larger than the original size, the thumbnail is the original image and the SDK does not scale it. /// * [iconSize] The target size of the icon corresponding to the application program (the width and height are in pixels). The SDK scales the original image to make the length of the longest side of the image the same as that of the target size without distorting the original image. For example, if the original image is 400 × 300 and iconSize is 100 × 100, the actual size of the icon is 100 × 75. If the target size is larger than the original size, the icon is the original image and the SDK does not scale it. - /// * [includeScreen] Whether the SDK returns the screen information in addition to the window information:true: The SDK returns screen and window information.false: The SDK returns the window information only. + /// * [includeScreen] Whether the SDK returns the screen information in addition to the window information:true: The SDK returns screen and window information.false: The SDK returns window information only. + /// + /// Returns + /// The ScreenCaptureSourceInfo array. Future> getScreenCaptureSources( {required SIZE thumbSize, required SIZE iconSize, required bool includeScreen}); - /// @nodoc + /// Sets the operational permission of the SDK on the audio session. + /// The SDK and the app can both configure the audio session by default. If you need to only use the app to configure the audio session, this method restricts the operational permission of the SDK on the audio session.You can call this method either before or after joining a channel. Once you call this method to restrict the operational permission of the SDK on the audio session, the restriction takes effect when the SDK needs to change the audio session.This method is only available for iOS platforms.This method does not restrict the operational permission of the app on the audio session. + /// + /// * [restriction] The operational permission of the SDK on the audio session. See AudioSessionOperationRestriction . This parameter is in bit mask format, and each bit corresponds to a permission. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setAudioSessionOperationRestriction( AudioSessionOperationRestriction restriction); - /// Shares the screen by specifying the display ID. - /// This method shares a screen or part of the screen.There are two ways to start screen sharing, you can choose one according to the actual needs:Call this method before joining a channel, and then call joinChannel [2/2] to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.This method is for Windows and macOS only. + /// Captures the screen by specifying the display ID. + /// This method shares a screen or part of the screen.There are two ways to start screen sharing, you can choose one according to your needs:Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.This method is for Windows and macOS only. /// /// * [displayId] The display ID of the screen to be shared. - /// * [regionRect] (Optional) Sets the relative location of the region to the screen. If you do not set this parameter, the SDK shares the whole screen. See Rectangle . If the specified region overruns the screen, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen. + /// * [regionRect] (Optional) Sets the relative location of the region to the screen. If you do not set this parameter, the SDK shares the whole screen. See Rectangle . /// * [captureParams] Screen sharing configurations. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future startScreenCaptureByDisplayId( {required int displayId, required Rectangle regionRect, required ScreenCaptureParameters captureParams}); - /// Shares the whole or part of a screen by specifying the screen rect. - /// There are two ways to start screen sharing, you can choose one according to the actual needs: - /// Call this method before joining a channel, and then call joinChannel [2/2] to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. - /// Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. Deprecated:This method is deprecated. Use startScreenCaptureByDisplayId instead. Agora strongly recommends using startScreenCaptureByDisplayId if you need to start screen sharing on a device connected to another display.This method shares a screen or part of the screen. You need to specify the area of the screen to be shared.This method applies to Windows only. + /// Captures the whole or part of a screen by specifying the screen rect. + /// There are two ways to start screen sharing, you can choose one according to your needs:Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.Deprecated:This method is deprecated. Use startScreenCaptureByDisplayId instead. Agora strongly recommends using startScreenCaptureByDisplayId if you need to start screen sharing on a device connected to another display.This method shares a screen or part of the screen. You need to specify the area of the screen to be shared.This method applies to Windows only. /// /// * [screenRect] Sets the relative location of the screen to the virtual screen. /// * [regionRect] Rectangle . If the specified region overruns the screen, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen. - /// * [captureParams] The screen sharing encoding parameters. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters . + /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future startScreenCaptureByScreenRect( {required Rectangle screenRect, required Rectangle regionRect, @@ -4026,14 +4598,15 @@ abstract class RtcEngine { /// The DeviceInfo object that identifies the audio device information.Not null: Success.Null: Failure. Future getAudioDeviceInfo(); - /// Shares the whole or part of a window by specifying the window ID. - /// There are two ways to start screen sharing, you can choose one according to the actual needs: - /// Call this method before joining a channel, and then call joinChannel [2/2] to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. - /// Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. This method shares a window or part of the window. You need to specify the ID of the window to be shared.Applies to the macOS and Windows platforms only.The window sharing feature of the Agora SDK relies on WGC (Windows Graphics Capture) or GDI (Graphics Device Interface) capture, and WGC cannot be set to disable mouse capture on systems earlier than Windows 10 2004. Therefore, captureMouseCursor(false) might not work when you start window sharing on a device with a system earlier than Windows 10 2004. See ScreenCaptureParameters .This method supports window sharing of UWP (Universal Windows Platform) applications. Agora tests the mainstream UWP applications by using the lastest SDK, see details as follows: + /// Captures the whole or part of a window by specifying the window ID. + /// There are two ways to start screen sharing, you can choose one according to your needs:Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.This method captures a window or part of the window. You need to specify the ID of the window to be captured.This method applies to the macOS and Windows only.The window sharing feature of the Agora SDK relies on WGC (Windows Graphics Capture) or GDI (Graphics Device Interface) capture, and WGC cannot be set to disable mouse capture on systems earlier than Windows 10 2004. Therefore, captureMouseCursor(false) might not work when you start window sharing on a device with a system earlier than Windows 10 2004. See ScreenCaptureParameters .This method supports window sharing of UWP (Universal Windows Platform) applications. Agora tests the mainstream UWP applications by using the lastest SDK, see details as follows: /// /// * [windowId] The ID of the window to be shared. /// * [regionRect] (Optional) Sets the relative location of the region to the screen. If you do not set this parameter, the SDK shares the whole screen. See Rectangle . If the specified region overruns the window, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole window. - /// * [captureParams] Screen sharing configurations. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters . + /// * [captureParams] Screen sharing configurations. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future startScreenCaptureByWindowId( {required int windowId, required Rectangle regionRect, @@ -4043,42 +4616,94 @@ abstract class RtcEngine { /// A content hint suggests the type of the content being shared, so that the SDK applies different optimization algorithms to different types of content. If you don't call this method, the default content hint is contentHintNone.You can call this method either before or after you start screen sharing. /// /// * [contentHint] The content hint for screen sharing. See VideoContentHint . - Future setScreenCaptureContentHint(VideoContentHint contentHint); - - /// Sets the screen sharing scenario. - /// When you start screen sharing or window sharing, you can call this method to set the screen sharing scenario. The SDK adjusts the video quality and experience of the sharing according to the scenario.This method applies to macOS and Windows only. /// - /// * [screenScenario] The screen sharing scenario. See ScreenScenarioType . - Future setScreenCaptureScenario(ScreenScenarioType screenScenario); + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. + Future setScreenCaptureContentHint(VideoContentHint contentHint); - /// Updates the screen sharing region. + /// Updates the screen capturing region. /// Call this method after starting screen sharing or window sharing. /// /// * [regionRect] The relative location of the screen-share area to the screen or window. If you do not set this parameter, the SDK shares the whole screen or window. See Rectangle . If the specified region overruns the screen or window, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen or window. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future updateScreenCaptureRegion(Rectangle regionRect); - /// Updates the screen sharing parameters. + /// Updates the screen capturing parameters. /// This method is for Windows and macOS only.Call this method after starting screen sharing or window sharing. /// - /// * [captureParams] The screen sharing encoding parameters. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters + /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// -2: The parameter is invalid. + /// -8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future updateScreenCaptureParameters( ScreenCaptureParameters captureParams); - /// Starts screen sharing. - /// There are two ways to start screen sharing, you can choose one according to your needs:Call this method before joining a channel, then call joinChannel [2/2] to join channel and set publishScreenCaptureVideo to true to start screen sharing.Call this method after joining a channel, then call updateChannelMediaOptions and set publishScreenCaptureVideo to true to start screen sharing.This method applies to Android and iOS only.On the iOS platform, screen sharing is only available on iOS 12.0 and later.The billing for the screen sharing stream is based on the dimensions in ScreenVideoParameters. When you do not pass in a value, Agora bills you at 1280 × 720; when you pass a value in, Agora bills you at that value. If you are using the custom audio source instead of the SDK to capture audio, Agora recommends you add the keep-alive processing logic to your application to avoid screen sharing stopping when the application goes to the background.This feature requires high-performance device, and Agora recommends that you use it on iPhone X and later models.This method relies on the iOS screen sharing dynamic library AgoraReplayKitExtension.xcframework. If the dynamic library is deleted, screen sharing cannot be enabled normally.On the Android platform, make sure the user has granted the app screen capture permission.On Android 9 and later, to avoid the application being killed by the system after going to the background, Agora recommends you add the foreground service permission android.permission.FOREGROUND_SERVICE to the /app/Manifests/AndroidManifest.xml file.Due to performance limitations, screen sharing is not supported on Android TV.Due to system limitations, if you are using Huawei phones, do not adjust the video encoding resolution of the screen sharing stream during the screen sharing, or you could experience crashes.Due to system limitations, some Xiaomi devices do not support capturing system audio during screen sharing.To avoid system audio capture failure when screen sharing, Agora recommends that you set the audio application scenario to audioScenarioGameStreaming by using the setAudioScenario method before joining the channel. + /// Starts screen capture. + /// There are two ways to start screen sharing, you can choose one according to your needs:Call this method before joining a channel, then call joinChannel to join channel and set publishScreenCaptureVideo to true to start screen sharing.Call this method after joining a channel, then call updateChannelMediaOptions and set publishScreenCaptureVideo to true to start screen sharing.This method applies to Android and iOS only.On the iOS platform, screen sharing is only available on iOS 12.0 and later.The billing for the screen sharing stream is based on the dimensions in ScreenVideoParameters. When you do not pass in a value, Agora bills you at 1280 × 720; when you pass a value in, Agora bills you at that value. If you are using the custom audio source instead of the SDK to capture audio, Agora recommends you add the keep-alive processing logic to your application to avoid screen sharing stopping when the application goes to the background.This feature requires high-performance device, and Agora recommends that you use it on iPhone X and later models.This method relies on the iOS screen sharing dynamic library AgoraReplayKitExtension.xcframework. If the dynamic library is deleted, screen sharing cannot be enabled normally.On the Android platform, make sure the user has granted the app screen capture permission.On Android 9 and later, to avoid the application being killed by the system after going to the background, Agora recommends you add the foreground service android.permission.FOREGROUND_SERVICE to the /app/Manifests/AndroidManifest.xml file.Due to performance limitations, screen sharing is not supported on Android TV.Due to system limitations, if you are using Huawei phones, do not adjust the video encoding resolution of the screen sharing stream during the screen sharing, or you could experience crashes.Due to system limitations, some Xiaomi devices do not support capturing system audio during screen sharing.To avoid system audio capture failure when screen sharing, Agora recommends that you set the audio application scenario to audioScenarioGameStreaming by using the setAudioScenario method before joining the channel. /// /// * [captureParams] The screen sharing encoding parameters. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2 . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is null. Future startScreenCapture(ScreenCaptureParameters2 captureParams); - /// Updates the screen sharing parameters. + /// Starts screen capture. + /// This method, as well as startScreenCapture , startScreenCaptureByDisplayId , and startScreenCaptureByWindowId , all have the capability to start screen capture, with the following differences:startScreenCapture only applies to Android and iOS, whereas this method only applies to Windows and iOS.startScreenCaptureByDisplayId and startScreenCaptureByWindowId only support capturing video from a single screen or window. By calling this method and specifying the sourceType parameter, you can capture multiple video streams used for local video mixing or multi-channel publishing.This method applies to the macOS and Windows only.If you call this method to start screen capture, Agora recommends that you call stopScreenCaptureBySourceType to stop the capture and avoid using stopScreenCapture . + /// + /// * [sourceType] The type of the video source. See VideoSourceType .Windows supports up to four screen capture video streams.macOS supports only one screen capture video stream. You can only set this parameter to videoSourceScreen(2). + /// * [config] The configuration of the captured screen. See ScreenCaptureConfiguration . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + Future startScreenCaptureBySourceType( + {required VideoSourceType sourceType, + required ScreenCaptureConfiguration config}); + + /// Updates the screen capturing parameters. /// If the system audio is not captured when screen sharing is enabled, and then you want to update the parameter configuration and publish the system audio, you can refer to the following steps:Call this method, and set captureAudio to true.Call updateChannelMediaOptions , and set publishScreenCaptureAudio to true to publish the audio captured by the screen.This method applies to Android and iOS only.On the iOS platform, screen sharing is only available on iOS 12.0 and later. /// /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2 . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// -2: The parameter is invalid. + /// -8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future updateScreenCapture(ScreenCaptureParameters2 captureParams); - /// Stops screen sharing. + /// Queries the highest frame rate supported by the device during screen sharing. + /// + /// Returns + /// The highest frame rate supported by the device, if the method is called successfully. See ScreenCaptureFramerateCapability .< 0: Failure. + Future queryScreenCaptureCapability(); + + /// Sets the screen sharing scenario. + /// When you start screen sharing or window sharing, you can call this method to set the screen sharing scenario. The SDK adjusts the video quality and experience of the sharing according to the scenario.Agora recommends that you call this method before joining a channel. + /// + /// * [screenScenario] The screen sharing scenario. See ScreenScenarioType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + Future setScreenCaptureScenario(ScreenScenarioType screenScenario); + + /// Stops screen capture. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopScreenCapture(); + /// Stops screen capture. + /// After calling startScreenCaptureBySourceType to start capturing video from one or more screens, you can call this method and set the sourceType parameter to stop capturing from the specified screens.This method applies to the macOS and Windows only.If you call startScreenCapture , startScreenCaptureByWindowId , or startScreenCaptureByDisplayId to start screen capure, Agora recommends that you call stopScreenCapture instead to stop the capture. + /// + /// * [sourceType] The type of the video source. See VideoSourceType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + Future stopScreenCaptureBySourceType(VideoSourceType sourceType); + /// Retrieves the call ID. /// When a user joins a channel on a client, a callId is generated to identify the call from the client. Some methods, such as rate and complain , must be called after the call ends to submit feedback to the SDK. These methods require the callId parameter.Call this method after joining a channel. /// @@ -4090,8 +4715,11 @@ abstract class RtcEngine { /// Ensure that you call this method after leaving a channel. /// /// * [callId] The current call ID. You can get the call ID by calling getCallId . - /// * [rating] The rating of the call. The value is between 1 (lowest score) and 5 (highest score). If you set a value out of this range, the SDK returns the -2 (ERR_INVALID_ARGUMENT) error. + /// * [rating] The rating of the call. The value is between 1 (the lowest score) and 5 (the highest score). If you set a value out of this range, the SDK returns the -2 (ERR_INVALID_ARGUMENT) error. /// * [description] A description of the call. The string length should be less than 800 bytes. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2 (ERR_INVALID_ARGUMENT).-3 (ERR_NOT_READY). Future rate( {required String callId, required int rating, @@ -4102,76 +4730,111 @@ abstract class RtcEngine { /// /// * [callId] The current call ID. You can get the call ID by calling getCallId . /// * [description] A description of the call. The string length should be less than 800 bytes. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.- 3: The SDK is not ready. Possible reasons include the following:The initialization of RtcEngine fails. Reinitialize the RtcEngine.No user has joined the channel when the method is called. Please check your code logic.The user has not left the channel when the rate or complain method is called. Please check your code logic.The audio module is disabled. The program is not complete. Future complain({required String callId, required String description}); - /// Starts Media Push without transcoding. - /// Ensure that you enable the media push service before using this function.Call this method after joining a channel.Only hosts in the LIVE_BROADCASTING profile can call this method.If you want to retry pushing streams after a failed push, make sure to call stopRtmpStream first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push.You can call this method to push an audio or video stream to the specified CDN address. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the Media Push. + /// Starts pushing media streams to a CDN without transcoding. + /// Ensure that you enable the Media Push service before using this function. See Enable Media Push. + /// Call this method after joining a channel. + /// Only hosts in the LIVE_BROADCASTING profile can call this method. + /// If you want to retry pushing streams after a failed push, make sure to call stopRtmpStream first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. + /// Agora recommends that you use the server-side Media Push function. You can call this method to push an audio or video stream to the specified CDN address. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// + /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. /// - /// * [url] The address of media push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// -2: The URL is null or the string length is 0. + /// -7: The SDK is not initialized before calling this method. + /// -19: The Media Push URL is already in use, use another URL instead. Future startRtmpStreamWithoutTranscoding(String url); /// Starts Media Push and sets the transcoding configuration. - /// You can call this method to push an audio or video stream to the specified CDN address and set the transcoding configuration. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the Media Push.Ensure that you enable the media push service before using this function.Call this method after joining a channel.Only hosts in the LIVE_BROADCASTING profile can call this method.If you want to retry pushing streams after a failed push, make sure to call stopRtmpStream first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. + /// Agora recommends that you use the server-side Media Push function. You can call this method to push a live audio-and-video stream to the specified CDN address and set the transcoding configuration. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming.Ensure that you enable the Media Push service before using this function. See Enable Media Push.Call this method after joining a channel.Only hosts in the LIVE_BROADCASTING profile can call this method.If you want to retry pushing streams after a failed push, make sure to call stopRtmpStream first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. + /// + /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. + /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding . /// - /// * [url] The address of media push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. - /// * [transcoding] The transcoding configuration for media push. See LiveTranscoding . + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The URL is null or the string length is 0.-7: The SDK is not initialized before calling this method.-19: The Media Push URL is already in use, use another URL instead. Future startRtmpStreamWithTranscoding( {required String url, required LiveTranscoding transcoding}); /// Updates the transcoding configuration. - /// After you start pushing media streams to CDN with transcoding, you can dynamically update the transcoding configuration according to the scenario. The SDK triggers the onTranscodingUpdated callback after the transcoding configuration is updated. + /// Agora recommends that you use the server-side Media Push function. After you start pushing media streams to CDN with transcoding, you can dynamically update the transcoding configuration according to the scenario. The SDK triggers the onTranscodingUpdated callback after the transcoding configuration is updated. /// - /// * [transcoding] The transcoding configuration for media push. See LiveTranscoding . + /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future updateRtmpTranscoding(LiveTranscoding transcoding); /// Stops pushing media streams to a CDN. - /// You can call this method to stop the live stream on the specified CDN address. This method can stop pushing media streams to only one CDN address at a time, so if you need to stop pushing streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// Agora recommends that you use the server-side Media Push function. You can call this method to stop the live stream on the specified CDN address. This method can stop pushing media streams to only one CDN address at a time, so if you need to stop pushing streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// + /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. /// - /// * [url] The address of media push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopRtmpStream(String url); /// Starts the local video mixing. - /// After calling this method, you can merge multiple video streams into one video stream locally. Common scenarios include the following:In a live streaming scenario with cohosts or when using the Media Push function, you can locally mix the videos of multiple hosts into one.In scenarios where you capture multiple local video streams (for example, video captured by cameras, screen sharing streams, video files, or pictures), you can merge them into one video stream and then publish the mixed video stream after joining the channel. + /// After calling this method, you can merge multiple video streams into one video stream locally. For example, you can merge the video streams captured by the camera, screen sharing, media player, remote video, video files, images, etc. into one video stream, and then publish the mixed video stream to the channel.Local video mixing requires more CPU resources. Therefore, Agora recommends enabling this function on devices with higher performance.If you need to mix locally captured video streams, the SDK supports the following capture combinations:On the Windows platform, it supports up to 4 video streams captured by cameras + 4 screen sharing streams.On the macOS platform, it supports up to 4 video streams captured by cameras + 1 screen sharing stream.On Android and iOS platforms, it supports video streams captured by up to 2 cameras (the device itself needs to support dual cameras or supports external cameras) + 1 screen sharing stream.If you need to mix the locally collected video streams, you need to call this method after startCameraCapture or startScreenCaptureBySourceType If you want to publish the mixed video stream to the channel, you need to set publishTranscodedVideoTrack in ChannelMediaOptions to true when calling joinChannel or updateChannelMediaOptions . + /// + /// * [config] Configuration of the local video mixing, see LocalTranscoderConfiguration .The maximum resolution of each video stream participating in the local video mixing is 4096 × 2160. If this limit is exceeded, video mixing does not take effect.The maximum resolution of the mixed video stream is 4096 × 2160. /// - /// * [config] Configuration of the local video mixing. See LocalTranscoderConfiguration . + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future startLocalVideoTranscoder(LocalTranscoderConfiguration config); - /// Update the local video mixing configuration. - /// After calling startLocalVideoTranscoder , call this method if you want to update the local video mixing configuration. + /// Updates the local video mixing configuration. + /// After calling startLocalVideoTranscoder , call this method if you want to update the local video mixing configuration.If you want to update the video source type used for local video mixing, such as adding a second camera or screen to capture video, you need to call this method after startCameraCapture or startScreenCaptureBySourceType /// /// * [config] Configuration of the local video mixing, see LocalTranscoderConfiguration . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future updateLocalTranscoderConfiguration( LocalTranscoderConfiguration config); /// Stops the local video mixing. /// After calling startLocalVideoTranscoder , call this method if you want to stop the local video mixing. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. Future stopLocalVideoTranscoder(); - /// Starts video capture with a primary camera. + /// Starts camera capture. + /// You can call this method to start capturing video from one or more cameras by specifying sourceType.On the iOS platform, if you want to disable multi-camera capture, you need to call enableMultiCamera and set enabled to true before calling this method. /// - /// * [config] The configuration of the video capture with a primary camera. See CameraCapturerConfiguration . - Future startPrimaryCameraCapture(CameraCapturerConfiguration config); - - /// Starts video capture with a secondary camera. + /// * [sourceType] The type of the video source. See VideoSourceType .On the mobile platforms, you can capture video from up to 2 cameras, provided the device has dual cameras or supports an external camera.On the desktop platforms, you can capture video from up to 4 cameras. + /// * [config] The configuration of the video capture. See CameraCapturerConfiguration .On the iOS platform, this parameter has no practical function. Use the config parameter in enableMultiCamera instead to set the video capture configuration. /// - /// * [config] The configuration of the video capture with a primary camera. See CameraCapturerConfiguration . - Future startSecondaryCameraCapture(CameraCapturerConfiguration config); - - /// Stops capturing video through a primary camera. - /// You can call this method to stop capturing video through the primary camera after calling the startPrimaryCameraCapture . - Future stopPrimaryCameraCapture(); + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + Future startCameraCapture( + {required VideoSourceType sourceType, + required CameraCapturerConfiguration config}); - /// Stops capturing video through the second camera. - /// startSecondaryCameraCapture You can call this method to stop capturing video through the second camera after calling the .On the iOS platform, if you want to disable multi-camera capture, you need to call enableMultiCamera after calling this method and set enabled to false. - Future stopSecondaryCameraCapture(); + /// Stops camera capture. + /// After calling startCameraCapture to start capturing video through one or more cameras, you can call this method and set the sourceType parameter to stop the capture from the specified cameras.On the iOS platform, if you want to disable multi-camera capture, you need to call enableMultiCamera after calling this method and set enabled to false.If you are using the local video mixing function, calling this method can cause the local video mixing to be interrupted. + /// + /// * [sourceType] The type of the video source. See VideoSourceType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + Future stopCameraCapture(VideoSourceType sourceType); /// Sets the rotation angle of the captured video. - /// When the video capture device does not have the gravity sensing function, you can call this method to manually adjust the rotation angle of the captured video. + /// This method applies to Windows only.When the video capture device does not have the gravity sensing function, you can call this method to manually adjust the rotation angle of the captured video. /// /// * [type] The video source type. See VideoSourceType . /// * [orientation] The clockwise rotation angle. See VideoOrientation . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setCameraDeviceOrientation( {required VideoSourceType type, required VideoOrientation orientation}); @@ -4179,42 +4842,30 @@ abstract class RtcEngine { Future setScreenCaptureOrientation( {required VideoSourceType type, required VideoOrientation orientation}); - /// Starts sharing the primary screen. - /// - /// * [config] The configuration of the captured screen. See ScreenCaptureConfiguration . - Future startPrimaryScreenCapture(ScreenCaptureConfiguration config); - - /// Starts sharing a secondary screen. - /// - /// * [config] The configuration of the captured screen. See ScreenCaptureConfiguration . - Future startSecondaryScreenCapture(ScreenCaptureConfiguration config); - - /// Stop sharing the first screen. - /// After calling startPrimaryScreenCapture , you can call this method to stop sharing the first screen. - Future stopPrimaryScreenCapture(); - - /// Stops sharing the secondary screen. - /// After calling startSecondaryScreenCapture , you can call this method to stop sharing the secondary screen. - Future stopSecondaryScreenCapture(); - /// Gets the current connection state of the SDK. /// You can call this method either before or after joining a channel. /// /// Returns - /// The current connection state. + /// The current connection state. See ConnectionStateType . Future getConnectionState(); - /// Adds event handlers. - /// The SDK uses the RtcEngineEventHandler class to send callbacks to the app. The app inherits the methods of this class to receive these callbacks. All methods in this interface class have default (empty) implementations. Therefore, the application can only inherit some required events. In the callbacks, avoid time-consuming tasks or calling APIs that can block the thread, such as the sendStreamMessage method. + /// Adds event handlers + /// The SDK uses the RtcEngineEventHandler class to send callbacks to the app. The app inherits the methods of this class to receive these callbacks. All methods in this class have default (empty) implementations. Therefore, apps only need to inherits callbacks according to the scenarios. In the callbacks, avoid time-consuming tasks or calling APIs that can block the thread, such as the sendStreamMessage method. /// Otherwise, the SDK may not work properly. /// - /// * [eventHandler] Callback events to be added. + /// * [eventHandler] Callback events to be added. See RtcEngineEventHandler . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. void registerEventHandler(RtcEngineEventHandler eventHandler); - /// Removes the specified IRtcEngineEventHandler instance. + /// Removes the specified callback handler. /// This method removes the specified callback handler. For callback events that you want to listen for only once, call this method to remove the relevant callback handler after you have received them. /// /// * [eventHandler] The callback handler to be deleted. See RtcEngineEventHandler . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. void unregisterEventHandler(RtcEngineEventHandler eventHandler); /// @nodoc @@ -4222,22 +4873,31 @@ abstract class RtcEngine { {required int uid, required PriorityType userPriority}); /// Sets the built-in encryption mode. - /// Deprecated:Use enableEncryption instead.The Agora SDK supports built-in encryption, which is set to the AES-128-GCM mode by default. Call this method to use other encryption modes. All users in the same channel must use the same encryption mode and secret. Refer to the information related to the AES encryption algorithm on the differences between the encryption modes.Before calling this method, please call setEncryptionSecret to enable the built-in encryption function. + /// Deprecated:Use enableEncryption instead.The SDK supports built-in encryption schemes, AES-128-GCM is supported by default. Call this method to use other encryption modes. All users in the same channel must use the same encryption mode and secret. Refer to the information related to the AES encryption algorithm on the differences between the encryption modes.Before calling this method, please call setEncryptionSecret to enable the built-in encryption function. /// /// * [encryptionMode] The following encryption modes:"aes-128-xts": 128-bit AES encryption, XTS mode."aes-128-ecb": 128-bit AES encryption, ECB mode."aes-256-xts": 256-bit AES encryption, XTS mode."sm4-128-ecb": 128-bit SM4 encryption, ECB mode."aes-128-gcm": 128-bit AES encryption, GCM mode."aes-256-gcm": 256-bit AES encryption, GCM mode."": When this parameter is set as null, the encryption mode is set as "aes-128-gcm" by default. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setEncryptionMode(String encryptionMode); /// Enables built-in encryption with an encryption password before users join a channel. - /// Deprecated:This method is deprecated. Use enableEncryption instead.Before joining the channel, you need to call this method to set the secret parameter to enable the built-in encryption. All users in the same channel should use the same secret. The secret is automatically cleared once a user leaves the channel. If you do not specify the secret or secret is set as null, the built-in encryption is disabled.Do not use this method for CDN live streaming.For optimal transmission, ensure that the encrypted data size does not exceed the original data size + 16 bytes. 16 bytes is the maximum padding size for AES encryption. + /// Deprecated:Use enableEncryption instead.Before joining the channel, you need to call this method to set the secret parameter to enable the built-in encryption. All users in the same channel should use the same secret. The secret is automatically cleared once a user leaves the channel. If you do not specify the secret or secret is set as null, the built-in encryption is disabled.Do not use this method for Media Push.For optimal transmission, ensure that the encrypted data size does not exceed the original data size + 16 bytes. 16 bytes is the maximum padding size for AES encryption. /// /// * [secret] The encryption password. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setEncryptionSecret(String secret); - /// Enables/Disables the built-in encryption. + /// Enables or disables the built-in encryption. /// In scenarios requiring high security, Agora recommends calling this method to enable the built-in encryption before joining a channel.All users in the same channel must use the same encryption mode and encryption key. After the user leaves the channel, the SDK automatically disables the built-in encryption. To enable the built-in encryption, call this method before the user joins the channel again.If you enable the built-in encryption, you cannot use the Media Push function. /// /// * [enabled] Whether to enable built-in encryption:true: Enable the built-in encryption.false: Disable the built-in encryption. /// * [config] Built-in encryption configurations. See EncryptionConfig . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: An invalid parameter is used. Set the parameter with a valid value.-4: The built-in encryption mode is incorrect or the SDK fails to load the external encryption library. Check the enumeration or reload the external encryption library.-7: The SDK is not initialized. Initialize the RtcEngine instance before calling this method. Future enableEncryption( {required bool enabled, required EncryptionConfig config}); @@ -4255,20 +4915,29 @@ abstract class RtcEngine { /// A failed method call triggers the onStreamMessageError callback on the remote client.Ensure that you call createDataStream to create a data channel before calling this method.In live streaming scenarios, this method only applies to hosts. /// /// * [streamId] The data stream ID. You can get the data stream ID by calling createDataStream. - /// * [data] The data to be sent. + /// * [data] The message to be sent. /// * [length] The length of the data. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future sendStreamMessage( {required int streamId, required Uint8List data, required int length}); /// Adds a watermark image to the local video. - /// This method adds a PNG watermark image to the local video in the live streaming. Once the watermark image is added, all the audience in the channel (CDN audience included), and the capturing device can see and capture it. Agora supports adding only one watermark image onto the local video, and the newly watermark image replaces the previous one.The watermark coordinates are dependent on the settings in the setVideoEncoderConfiguration method:If the orientation mode of the encoding video ( OrientationMode ) is fixed landscape mode or the adaptive landscape mode, the watermark uses the landscape orientation.If the orientation mode of the encoding video (OrientationMode) is fixed portrait mode or the adaptive portrait mode, the watermark uses the portrait orientation.When setting the watermark position, the region must be less than the dimensions set in the setVideoEncoderConfiguration method; otherwise, the watermark image will be cropped.Ensure that call this method after enableVideo .If you only want to add a watermark to the media push, you can call this method or the setLiveTranscoding method.This method supports adding a watermark image in the PNG file format only. Supported pixel formats of the PNG image are RGBA, RGB, Palette, Gray, and Alpha_gray.If the dimensions of the PNG image differ from your settings in this method, the image will be cropped or zoomed to conform to your settings.If you have enabled the local video preview by calling the startPreview method, you can use the visibleInPreview member to set whether or not the watermark is visible in the preview.If you have enabled the mirror mode for the local video, the watermark on the local video is also mirrored. To avoid mirroring the watermark, Agora recommends that you do not use the mirror and watermark functions for the local video at the same time. You can implement the watermark function in your application layer. + /// This method adds a PNG watermark image to the local video in the live streaming. Once the watermark image is added, all the audience in the channel (CDN audience included), and the capturing device can see and capture it. The Agora SDK supports adding only one watermark image onto a local video or CDN live stream. The newly added watermark image replaces the previous one.The watermark coordinates are dependent on the settings in the setVideoEncoderConfiguration method:If the orientation mode of the encoding video ( OrientationMode ) is fixed landscape mode or the adaptive landscape mode, the watermark uses the landscape orientation.If the orientation mode of the encoding video (OrientationMode) is fixed portrait mode or the adaptive portrait mode, the watermark uses the portrait orientation.When setting the watermark position, the region must be less than the dimensions set in the setVideoEncoderConfiguration method; otherwise, the watermark image will be cropped.Ensure that calling this method after enableVideo .If you only want to add a watermark to the media push, you can call this method or the method.This method supports adding a watermark image in the PNG file format only. Supported pixel formats of the PNG image are RGBA, RGB, Palette, Gray, and Alpha_gray.If the dimensions of the PNG image differ from your settings in this method, the image will be cropped or zoomed to conform to your settings.If you have enabled the local video preview by calling the startPreview method, you can use the visibleInPreview member to set whether or not the watermark is visible in the preview.If you have enabled the mirror mode for the local video, the watermark on the local video is also mirrored. To avoid mirroring the watermark, Agora recommends that you do not use the mirror and watermark functions for the local video at the same time. You can implement the watermark function in your application layer. /// /// * [watermarkUrl] The local file path of the watermark image to be added. This method supports adding a watermark image from the local absolute or relative file path. - /// * [options] The options of the watermark image to be added. + /// * [options] The options of the watermark image to be added. See WatermarkOptions . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future addVideoWatermark( {required String watermarkUrl, required WatermarkOptions options}); /// Removes the watermark image from the video stream. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future clearVideoWatermarks(); /// @nodoc @@ -4278,9 +4947,12 @@ abstract class RtcEngine { Future resumeAudio(); /// Enables interoperability with the Agora Web SDK (applicable only in the live streaming scenarios). - /// Deprecated:The SDK automatically enables interoperability with the Web SDK, so you no longer need to call this method.This method enables or disables interoperability with the Agora Web SDK. If the channel has Web SDK users, ensure that you call this method, or the video of the Native user will be a black screen for the Web user.This method is only applicable in live streaming scenarios, and interoperability is enabled by default in communication scenarios. + /// Deprecated:The SDK automatically enables interoperability with the Web SDK, so you no longer need to call this method.You can call this method to enable or disable interoperability with the Agora Web SDK. If a channel has Web SDK users, ensure that you call this method, or the video of the Native user will be a black screen for the Web user.This method is only applicable in live streaming scenarios, and interoperability is enabled by default in communication scenarios. /// - /// * [enabled] Whether to enable interoperability with the Agora Web SDK.true: Enable interoperability.false: (Default) Disable interoperability. + /// * [enabled] Whether to enable interoperability:true: Enable interoperability.false: (Default) Disable interoperability. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableWebSdkInteroperability(bool enabled); /// Reports customized messages. @@ -4294,14 +4966,23 @@ abstract class RtcEngine { /// Registers the metadata observer. /// You need to implement the MetadataObserver class and specify the metadata type in this method. This method enables you to add synchronized metadata in the video stream for more diversified - /// live interactive streaming, such as sending shopping links, digital coupons, and online quizzes.Call this method before joinChannel [2/2]. + /// live interactive streaming, such as sending shopping links, digital coupons, and online quizzes.Call this method before joinChannel. /// /// * [observer] The metadata observer. See MetadataObserver . /// * [type] The metadata type. The SDK currently only supports videoMetadata. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. void registerMediaMetadataObserver( {required MetadataObserver observer, required MetadataType type}); /// Unregisters the specified metadata observer. + /// + /// * [observer] The metadata observer. See MetadataObserver . + /// * [type] The metadata type. The SDK currently only supports videoMetadata. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. void unregisterMediaMetadataObserver( {required MetadataObserver observer, required MetadataType type}); @@ -4321,26 +5002,50 @@ abstract class RtcEngine { required int userId, required String location}); + /// Sets whether to enable the AI ​​noise reduction function and set the noise reduction mode. + /// You can call this method to enable AI noise reduction function. Once enabled, the SDK automatically detects and reduces stationary and non-stationary noise from your audio on the premise of ensuring the quality of human voice. Stationary noise refers to noise signal with constant average statistical properties and negligibly small fluctuations of level within the period of observation. Common sources of stationary noises are:Television;Air conditioner;Machinery, etc.Non-stationary noise refers to noise signal with huge fluctuations of level within the period of observation. Common sources of non-stationary noises are:Thunder;Explosion;Cracking, etc. + /// + /// * [enabled] Whether to enable the AI noise reduction function:true: Enable the AI noise reduction.false: (Default) Disable the AI noise reduction. + /// * [mode] The AI noise reduction modes. See AudioAinsMode . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + Future setAINSMode( + {required bool enabled, required AudioAinsMode mode}); + /// Registers a user account. - /// Once registered, the user account can be used to identify the local user when the user joins the channel. After the registration is successful, the user account can identify the identity of the local user, and the user can use it to join the channel.After the user successfully registers a user account, the SDK triggers the onLocalUserRegistered callback on the local client, reporting the user ID and user account of the local user.This method is optional. To join a channel with a user account, you can choose either of the following ways:Call registerLocalUserAccount to create a user account, and then call joinChannelWithUserAccount to join the channel.Call the joinChannelWithUserAccount method to join the channel.The difference between the two ways is that the time elapsed between calling the registerLocalUserAccount method and joining the channel is shorter than directly calling joinChannelWithUserAccount.Ensure that you set the userAccount parameter; otherwise, this method does not take effect.Ensure that the userAccount is unique in the channel.To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. + /// Once registered, the user account can be used to identify the local user when the user joins the channel. After the registration is successful, the user account can identify the identity of the local user, and the user can use it to join the channel.After the user successfully registers a user account, the SDK triggers the onLocalUserRegistered callback on the local client, reporting the user ID and account of the local user.This method is optional. To join a channel with a user account, you can choose either of the following ways:Call registerLocalUserAccount to create a user account, and then call joinChannelWithUserAccount to join the channel.Call the joinChannelWithUserAccount method to join the channel.The difference between the two ways is that the time elapsed between calling the registerLocalUserAccount method and joining the channel is shorter than directly calling joinChannelWithUserAccount.Ensure that you set the userAccount parameter; otherwise, this method does not take effect.Ensure that the userAccount is unique in the channel.To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. /// /// * [appId] The App ID of your project on Agora Console. - /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total):The 26 lowercase English letters: a to z.The 26 uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are as follow(89 in total):The 26 lowercase English letters: a to z.The 26 uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future registerLocalUserAccount( {required String appId, required String userAccount}); - /// Joins a channel with a User Account and Token. - /// This method allows a user to join the channel with the user account and a token. After the user successfully joins the channel, the SDK triggers the following callbacks:The local client: onLocalUserRegistered , onJoinChannelSuccess and onConnectionStateChanged callbacks.The remote client: onUserJoined and onUserInfoUpdated , if the user joining the channel is in the communication profile or is a host in the live streaming profile.Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods.To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. + /// Joins the channel with a user account, and configures whether to automatically subscribe to audio or video streams after joining the channel. + /// This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks:The local client: onLocalUserRegistered , onJoinChannelSuccess and onConnectionStateChanged callbacks.The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile.Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods.To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. /// - /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total): - /// The 26 lowercase English letters: a to z. - /// The 26 uppercase English letters: A to Z. + /// * [token] The token generated on your server for authentication. + /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters: + /// All lowercase English letters: a to z. + /// All uppercase English letters: A to Z. /// All numeric characters: 0 to 9. /// Space - /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// * [token] The token generated on your server for authentication. - /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters:All lowercase English letters: a to z.All uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// * [options] The channel media options. See ChannelMediaOptions. + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total):The 26 lowercase English letters: a to z.The 26 uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [options] The channel media options. See ChannelMediaOptions . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. + /// -3: Failes to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. + /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. + /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling startEchoTest to stop the test after calling stopEchoTest to start a call loop test. You need to call stopEchoTest before calling this method. + /// -17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected(1) state. + /// -102: The channel name is invalid. You need to pass in a valid channelname in channelId to rejoin the channel. + /// -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. Future joinChannelWithUserAccount( {required String token, required String channelId, @@ -4348,76 +5053,129 @@ abstract class RtcEngine { ChannelMediaOptions? options}); /// Joins the channel with a user account, and configures whether to automatically subscribe to audio or video streams after joining the channel. - /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type.Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods.This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks:The local client: onLocalUserRegistered , onJoinChannelSuccess and onConnectionStateChanged callbacks.The remote client: The onUserJoined callback if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. + /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods.This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks:The local client: onLocalUserRegistered , onJoinChannelSuccess and onConnectionStateChanged callbacks.The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. /// - /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total):The 26 lowercase English letters: a to z.The 26 uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," /// * [token] The token generated on your server for authentication. - /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters:All lowercase English letters: a to z.All uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters:All lowercase English letters: a to z.All uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total):The 26 lowercase English letters: a to z.The 26 uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," /// * [options] The channel media options. See ChannelMediaOptions . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future joinChannelWithUserAccountEx( {required String token, required String channelId, required String userAccount, required ChannelMediaOptions options}); - /// Gets the user information by passing in the User Account. - /// After a remote user joins the channel, the SDK gets the UID and User Account of the remote user, caches them in a mapping table object, and triggers the onUserInfoUpdated callback on the local client. After receiving the callback, you can call this method to get the user account of the remote user from the UserInfo object by passing in the user ID. + /// Gets the user information by passing in the user account. + /// After a remote user joins the channel, the SDK gets the user ID and account of the remote user, caches them in a mapping table object, and triggers the onUserInfoUpdated callback on the local client. After receiving the callback, you can call this method to get the user account of the remote user from the UserInfo object by passing in the user ID. /// /// * [userAccount] The user account. /// /// Returns - /// The UserInfo object that identifies the user information.A pointer to the UserInfo instance, if the method call succeeds.If the call fails, returns NULL. + /// A pointer to the UserInfo instance, if the method call succeeds. + /// If the call fails, returns NULL. Future getUserInfoByUserAccount(String userAccount); /// Gets the user information by passing in the user ID. - /// After a remote user joins the channel, the SDK gets the UID and User Account of the remote user, caches them in a mapping table object, and triggers the onUserInfoUpdated callback on the local client. After receiving the callback, you can call this method to get the user account of the remote user from the UserInfo object by passing in the user ID. + /// After a remote user joins the channel, the SDK gets the user ID and account of the remote user, caches them in a mapping table object, and triggers the onUserInfoUpdated callback on the local client. After receiving the callback, you can call this method to get the user account of the remote user from the UserInfo object by passing in the user ID. /// /// * [uid] The user ID. /// /// Returns - /// The UserInfo object that identifies the user information.A pointer to the UserInfo instance, if the method call succeeds.If the call fails, returns NULL. + /// A pointer to the UserInfo instance, if the method call succeeds.If the call fails, returns NULL. Future getUserInfoByUid(int uid); + /// Starts relaying media streams across channels or updates channels for media relay. + /// The first successful call to this method starts relaying media streams from the source channel to the destination channels. To relay the media stream to other channels, or exit one of the current media relays, you can call this method again to update the destination channels.After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged callback, and this callback returns the state of the media stream relay. Common states are as follows:If the onChannelMediaRelayStateChanged callback returns relayStateRunning (2) and relayOk (0), it means that the SDK starts relaying media streams from the source channel to the destination channel.If the onChannelMediaRelayStateChanged callback returns relayStateFailure (3), an exception occurs during the media stream relay.Call this method after joining the channel.This method takes effect only when you are a host in a live streaming channel.The relaying media streams across channels function needs to be enabled by contacting .Agora does not support string user accounts in this API. + /// + /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-7: The method call was rejected. It may be because the SDK has not been initialized successfully, or the user role is not an host.-8: Internal state error. Probably because the user is not an audience member. + Future startOrUpdateChannelMediaRelay( + ChannelMediaRelayConfiguration configuration); + /// Starts relaying media streams across channels. This method can be used to implement scenarios such as co-host across channels. - /// After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged and onChannelMediaRelayEvent callbacks, and these callbacks return the state and events of the media stream relay.If the onChannelMediaRelayStateChanged callback returns relayStateRunning (2) and relayOk (0), and the onChannelMediaRelayEvent callback returns relayEventPacketSentToDestChannel (4), it means that the SDK starts relaying media streams between the source channel and the destination channel.If the onChannelMediaRelayStateChanged callback returnsrelayStateFailure (3), an exception occurs during the media stream relay.Call this method after joining the channel.This method takes effect only when you are a host in a live streaming channel.After a successful method call, if you want to call this method again, ensure that you call the stopChannelMediaRelay method to quit the current relay.The relaying media streams across channels function needs to be enabled.We do not support string user accounts in this API. + /// Deprecated:This method is deprecated. Use startOrUpdateChannelMediaRelay instead.After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged and onChannelMediaRelayEvent callbacks, and these callbacks return the state and events of the media stream relay.If the onChannelMediaRelayStateChanged callback returns relayStateRunning (2) and relayOk (0), and the onChannelMediaRelayEvent callback returns relayEventPacketSentToDestChannel (4), it means that the SDK starts relaying media streams between the source channel and the target channel.If the onChannelMediaRelayStateChanged callback returns relayStateFailure (3), an exception occurs during the media stream relay.Call this method after joining the channel.This method takes effect only when you are a host in a live streaming channel.After a successful method call, if you want to call this method again, ensure that you call the stopChannelMediaRelay method to quit the current relay.The relaying media streams across channels function needs to be enabled by contacting .Agora does not support string user accounts in this API. /// /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-7: The method call was rejected. It may be because the SDK has not been initialized successfully, or the user role is not an host.-8: Internal state error. Probably because the user is not an audience member. Future startChannelMediaRelay( ChannelMediaRelayConfiguration configuration); /// Updates the channels for media stream relay. - /// After the media relay starts, if you want to relay the media stream to more channels, or leave the current relay channel, you can call this method.After a successful method call, the SDK triggers the onChannelMediaRelayEvent callback with the relayEventPacketUpdateDestChannel (7) state code.Call the method after successfully calling the startChannelMediaRelay method and receiving onChannelMediaRelayStateChanged (relayStateRunning, relayOk); otherwise, the method call fails. + /// Deprecated:This method is deprecated. Use startOrUpdateChannelMediaRelay instead.After the media relay starts, if you want to relay the media stream to more channels, or leave the current relay channel, you can call this method.After a successful method call, the SDK triggers the onChannelMediaRelayEvent callback with the relayEventPacketUpdateDestChannel (7) state code.Call the method after successfully calling the startChannelMediaRelay method and receiving onChannelMediaRelayStateChanged (relayStateRunning, relayOk); otherwise, the method call fails. + /// + /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration . /// - /// * [configuration] The configuration of the media stream relay. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future updateChannelMediaRelay( ChannelMediaRelayConfiguration configuration); - /// Stops the media stream relay. Once the relay stops, the host quits all the destination channels. + /// Stops the media stream relay. Once the relay stops, the host quits all the target channels. /// After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged callback. If the callback reports relayStateIdle (0) and relayOk (0), the host successfully stops the relay.If the method call fails, the SDK triggers the onChannelMediaRelayStateChanged callback with the relayErrorServerNoResponse (2) or relayErrorServerConnectionLost (8) status code. You can call the leaveChannel method to leave the channel, and the media stream relay automatically stops. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopChannelMediaRelay(); - /// Pauses the media stream relay to all destination channels. - /// After the cross-channel media stream relay starts, you can call this method to pause relaying media streams to all destination channels; after the pause, if you want to resume the relay, call resumeAllChannelMediaRelay .After a successful method call, the SDK triggers the onChannelMediaRelayEvent callback to report whether the media stream relay is successfully paused.Call this method after the startChannelMediaRelay method. + /// Pauses the media stream relay to all target channels. + /// After the cross-channel media stream relay starts, you can call this method to pause relaying media streams to all target channels; after the pause, if you want to resume the relay, call resumeAllChannelMediaRelay .Call this method after startOrUpdateChannelMediaRelay . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future pauseAllChannelMediaRelay(); - /// @nodoc + /// Resumes the media stream relay to all target channels. + /// After calling the pauseAllChannelMediaRelay method, you can call this method to resume relaying media streams to all destination channels.Call this method after pauseAllChannelMediaRelay . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future resumeAllChannelMediaRelay(); - /// @nodoc + /// Sets the audio profile of the audio streams directly pushed to the CDN by the host. + /// When you set the publishMicrophoneTrack or publishCustomAudioTrack in the DirectCdnStreamingMediaOptions as true to capture audios, you can call this method to set the audio profile. + /// + /// * [profile] The audio profile, including the sampling rate, bitrate, encoding mode, and the number of channels. See AudioProfileType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setDirectCdnStreamingAudioConfiguration( AudioProfileType profile); - /// @nodoc + /// Sets the video profile of the media streams directly pushed to the CDN by the host. + /// This method only affects video streams captured by cameras or screens, or from custom video capture sources. That is, when you set publishCameraTrack or publishCustomVideoTrack in DirectCdnStreamingMediaOptions as true to capture videos, you can call this method to set the video profiles.If your local camera does not support the video resolution you set,the SDK automatically adjusts the video resolution to a value that is closest to your settings for capture, encoding or streaming, with the same aspect ratio as the resolution you set. You can get the actual resolution of the video streams through the onDirectCdnStreamingStats callback. + /// + /// * [config] Video profile. See VideoEncoderConfiguration .During CDN live streaming, Agora only supports setting OrientationMode as orientationModeFixedLandscape or orientationModeFixedPortrait. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setDirectCdnStreamingVideoConfiguration( VideoEncoderConfiguration config); - /// @nodoc + /// Starts pushing media streams to the CDN directly. + /// Aogra does not support pushing media streams to one URL repeatedly.Media optionsAgora does not support setting the value of publishCameraTrack and publishCustomVideoTrack as true, or the value of publishMicrophoneTrack and publishCustomAudioTrack as true at the same time. When choosing media setting options ( DirectCdnStreamingMediaOptions ), you can refer to the following examples:If you want to push audio and video streams published by the host to the CDN, the media setting options should be set as follows:publishCustomAudioTrack is set as true and call the pushAudioFrame methodpublishCustomVideoTrack is set as true and call the pushVideoFrame methodpublishCameraTrack is set as false (the default value)publishMicrophoneTrack is set as false (the default value)As of v4.2.0, Agora SDK supports audio-only live streaming. You can set publishCustomAudioTrack or publishMicrophoneTrack in DirectCdnStreamingMediaOptions as true and call pushAudioFrame to push audio streams. Agora only supports pushing one audio and video streams or one audio streams to CDN. + /// + /// * [eventHandler] See onDirectCdnStreamingStateChanged and onDirectCdnStreamingStats . + /// * [publishUrl] The CDN live streaming URL. + /// * [options] The media setting options for the host. See DirectCdnStreamingMediaOptions . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future startDirectCdnStreaming( {required DirectCdnStreamingEventHandler eventHandler, required String publishUrl, required DirectCdnStreamingMediaOptions options}); - /// @nodoc + /// Stops pushing media streams to the CDN directly. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopDirectCdnStreaming(); /// @nodoc @@ -4430,6 +5188,9 @@ abstract class RtcEngine { /// * [sound1] The absolute path or URL address (including the filename extensions) of the file for the downbeat. For example, C:\music\audio.mp4. For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. /// * [sound2] The absolute path or URL address (including the filename extensions) of the file for the upbeats. For example, C:\music\audio.mp4. For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. /// * [config] The metronome configuration. See AgoraRhythmPlayerConfig . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-22: Cannot find audio effect files. Please set the correct paths for sound1 and sound2. Future startRhythmPlayer( {required String sound1, required String sound2, @@ -4437,15 +5198,18 @@ abstract class RtcEngine { /// Disables the virtual metronome. /// After calling startRhythmPlayer , you can call this method to disable the virtual metronome.This method is for Android and iOS only. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopRhythmPlayer(); /// Configures the virtual metronome. - /// This method is for Android and iOS only. - /// After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig . For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration. - /// By default, the sound of the virtual metronome is published in the channel. If you do not want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as false. - /// After calling startRhythmPlayer , you can call this method to reconfigure the virtual metronome.After successfully calling this method, the SDK triggers the onRhythmPlayerStateChanged callback locally to report the status of the virtual metronome. + /// This method is for Android and iOS only.After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig . For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration.By default, the sound of the virtual metronome is published in the channel. If you do not want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as false.After calling startRhythmPlayer , you can call this method to reconfigure the virtual metronome.After successfully calling this method, the SDK triggers the onRhythmPlayerStateChanged callback locally to report the status of the virtual metronome. /// /// * [config] The metronome configuration. See AgoraRhythmPlayerConfig . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future configRhythmPlayer(AgoraRhythmPlayerConfig config); /// Takes a snapshot of a video stream. @@ -4453,42 +5217,56 @@ abstract class RtcEngine { /// /// * [uid] The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. /// * [filePath] The local path (including filename extensions) of the snapshot. For example:Windows: C:\Users\\AppData\Local\Agora\\example.jpgiOS: /App Sandbox/Library/Caches/example.jpgmacOS: ~/Library/Logs/example.jpgAndroid: /storage/emulated/0/Android/data//files/example.jpgEnsure that the path you specify exists and is writable. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future takeSnapshot({required int uid, required String filePath}); /// Enables or disables video screenshot and upload. - /// When video screenshot and upload function is enabled, the SDK takes screenshots and upload videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig . After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service.Before calling this method, ensure that you contact to enbale Agora video screenshot and upload service. This method relies on the video content moderation library libagora_ci_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// When video screenshot and upload function is enabled, the SDK takes screenshots and upload videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig . After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service.Before calling this method, ensure that the video screenshot upload service has been activated. This method relies on the video screenshot and upload dynamic library libagora_content_inspect_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// /// * [enabled] Whether to enable video screenshot and uploadtrue: Enables video screenshot and upload. false: Disables video screenshot and upload. /// * [config] Configuration of video screenshot and upload. See ContentInspectConfig . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableContentInspect( {required bool enabled, required ContentInspectConfig config}); /// Adjusts the volume of the custom external audio source when it is published in the channel. - /// Ensure you have called the setExternalAudioSource method to create an external audio track before calling this method.If you want to change the volume of the audio to be published, you need to call this method again. + /// Ensure you have called the createCustomAudioTrack method to create an external audio track before calling this method.If you want to change the volume of the audio to be published, you need to call this method again. /// - /// * [sourceId] The ID of external audio source. If you want to publish a custom external audio source, set this parameter to the ID of the corresponding custom audio track you want to publish. + /// * [trackId] The audio track ID. Set this parameter to the custom audio track ID returned in createCustomAudioTrack. /// * [volume] The volume of the audio source. The value can range from 0 to 100. 0 means mute; 100 means the original volume. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future adjustCustomAudioPublishVolume( - {required int sourceId, required int volume}); + {required int trackId, required int volume}); /// @nodoc Future adjustCustomAudioPlayoutVolume( - {required int sourceId, required int volume}); + {required int trackId, required int volume}); - /// Sets the Agora cloud proxy service. - /// When users' network access is restricted by a firewall, configure the firewall to allow specific IP addresses and ports provided by Agora; then, call this method to enable the cloud proxy and set the cloud proxy type with the proxyType parameter.After successfully connecting to the cloud proxy, the SDK triggers the onConnectionStateChanged (connectionStateConnecting, connectionChangedSettingProxyServer) callback.To disable the cloud proxy that has been set, call the setCloudProxy (noneProxy).To change the cloud proxy type that has been set, call the setCloudProxy (noneProxy) first, and then call the setCloudProxy to set the proxyType you want.Agora recommends that you call this method before joining the channel or after leaving the channel.When a user is behind a firewall and uses the Force UDP cloud proxy, the services for the Media Push and cohosting across channels are not available.When you use the Force TCP cloud proxy, note that an error would occur when calling the startAudioMixing method to play online music files in the HTTP protocol. The services for the Media Push and cohosting across channels use the cloud proxy with the TCP protocol. + /// Sets up cloud proxy service. + /// When users' network access is restricted by a firewall, configure the firewall to allow specific IP addresses and ports provided by Agora; then, call this method to enable the cloud proxyType and set the cloud proxy type with the proxyType parameter.After successfully connecting to the cloud proxy, the SDK triggers the onConnectionStateChanged (connectionStateConnecting, connectionChangedSettingProxyServer) callback.To disable the cloud proxy that has been set, call the setCloudProxy (noneProxy).To change the cloud proxy type that has been set, call the setCloudProxy (noneProxy) first, and then call the setCloudProxy to set the proxyType you want.Agora recommends that you call this method after joining a channel.When a user is behind a firewall and uses the Force UDP cloud proxy, the services for Media Push and cohosting across channels are not available.When you use the Force TCP cloud proxy, note that an error would occur when calling the startAudioMixing method to play online music files in the HTTP protocol. The services for Media Push and cohosting across channels use the cloud proxy with the TCP protocol. /// /// * [proxyType] The type of the cloud proxy. See CloudProxyType .This parameter is mandatory. The SDK reports an error if you do not pass in a value. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure.-2: The parameter is invalid.-7: The SDK is not initialized. Future setCloudProxy(CloudProxyType proxyType); /// @nodoc Future setLocalAccessPoint(LocalAccessPointConfiguration config); /// Sets audio advanced options. - /// If you have advanced audio processing requirements, such as capturing and sending stereo audio, you can call this method to set advanced audio options.This method is for Android and iOS only.Call this method after calling joinChannel [2/2] , enableAudio and enableLocalAudio . + /// If you have advanced audio processing requirements, such as capturing and sending stereo audio, you can call this method to set advanced audio options.Call this method after calling joinChannel , enableAudio and enableLocalAudio . /// - /// * [options] The advanced options for audio. See AdvancedAudioOptions. + /// * [options] The advanced options for audio. See AdvancedAudioOptions . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setAdvancedAudioOptions( {required AdvancedAudioOptions options, int sourceType = 0}); @@ -4500,10 +5278,17 @@ abstract class RtcEngine { /// /// * [enable] Whether to replace the current video feeds with custom images:true: Replace the current video feeds with custom images.false: (Default) Do not replace the current video feeds with custom images. /// * [options] Image configurations. See ImageTrackOptions . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableVideoImageSource( {required bool enable, required ImageTrackOptions options}); - /// @nodoc + /// Gets the current Monotonic Time of the SDK. + /// Monotonic Time refers to a monotonically increasing time series whose value increases over time. The unit is milliseconds.In custom video capture and custom audio capture scenarios, in order to ensure audio and video synchronization, Agora recommends that you call this method to obtain the current Monotonic Time of the SDK, and then pass this value into the timestamp parameter in the captured video frame ( VideoFrame ) and audio frame ( AudioFrame ). + /// + /// Returns + /// ≥0: The method call is successful, and returns the current Monotonic Time of the SDK (in milliseconds).< 0: Failure. Future getCurrentMonotonicTimeInMs(); /// @nodoc @@ -4516,6 +5301,35 @@ abstract class RtcEngine { /// ≥ 0: The method call is successful, and the local network connection type is returned.0: The SDK disconnects from the network.1: The network type is LAN.2: The network type is Wi-Fi (including hotspots).3: The network type is mobile 2G.4: The network type is mobile 3G.5: The network type is mobile 4G.6: The network type is mobile 5G.< 0: The method call failed with an error code.-1: The network type is unknown. Future getNetworkType(); + /// Provides technical preview functionalities or special customizations by configuring the SDK with JSON options. + /// + /// * [parameters] Pointer to the set parameters in a JSON string. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + Future setParameters(String parameters); + + /// Enables tracing the video frame rendering process. + /// The SDK starts tracing the rendering status of the video frames in the channel from the moment this method is successfully called and reports information about the event through the onVideoRenderingTracingResult callback.By default, the SDK starts tracing the video rendering event automatically when the local user successfully joins the channel. You can call this method at an appropriate time according to the actual application scenario to customize the tracing process.After the local user leaves the current channel, the SDK automatically resets the time point to the next time when the user successfully joins the channel. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-7: The method is called before RtcEngine is initialized. + Future startMediaRenderingTracing(); + + /// Enables audio and video frame instant rendering. + /// After successfully calling this method, the SDK enables the instant frame rendering mode, which can speed up the first frame rendering speed after the user joins the channel.Once the instant rendering function is enabled, it can only be canceled by calling the release method to destroy the RtcEngine object.In this mode, the SDK uses Agora's custom encryption algorithm to shorten the time required to establish transmission links, and the security is reduced compared to the standard DTLS (Datagram Transport Layer Security). If the application scenario requires higher security standards, Agora recommends that you do not use this method. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-7: The method is called before RtcEngine is initialized. + Future enableInstantMediaRendering(); + + /// Gets the current NTP (Network Time Protocol) time. + /// In the real-time chorus scenario, especially when the downlink connections are inconsistent due to network issues among multiple receiving ends, you can call this method to obtain the current NTP time as the reference time, in order to align the lyrics and music of multiple receiving ends and achieve chorus synchronization. + /// + /// Returns + /// The Unix timestamp (ms) of the current NTP time. + Future getNtpWallTimeInMs(); + /// Gets the AudioDeviceManager object to manage audio devices. /// /// Returns @@ -4541,13 +5355,6 @@ abstract class RtcEngine { /// One MediaEngine object. MediaEngine getMediaEngine(); - /// Gets one MediaRecorder object. - /// Make sure the RtcEngine is initialized before you call this method. - /// - /// Returns - /// One MediaRecorder object. - MediaRecorder getMediaRecorder(); - /// Gets one LocalSpatialAudioEngine object. /// Make sure the RtcEngine is initialized before you call this method. /// @@ -4555,31 +5362,36 @@ abstract class RtcEngine { /// One LocalSpatialAudioEngine object. LocalSpatialAudioEngine getLocalSpatialAudioEngine(); - /// Sends media affiliate information. - /// If the media attachment information is successfully sent, the receiver will receive the onMetadataReceived callback. + /// Sends media metadata. + /// If the metadata is sent successfully, the SDK triggers the onMetadataReceived callback on the receiver. + /// + /// * [metadata] Media metadata See Metadata . + /// * [sourceType] The type of the video source. See VideoSourceType . /// - /// * [metadata] Media metadata. See Metadata . + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future sendMetaData( {required Metadata metadata, required VideoSourceType sourceType}); - /// Sets the maximum size of media metadata information. - /// After calling registerMediaMetadataObserver , you can call this method to set the maximum size of media metadata information. + /// Sets the maximum size of the media metadata. + /// After calling registerMediaMetadataObserver , you can call this method to set the maximum size of the media metadata. + /// + /// * [size] The maximum size of media metadata. /// - /// * [size] Sets the maximum size of media metadata information. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setMaxMetadataSize(int size); /// Unregisters the encoded audio frame observer. /// /// * [observer] The encoded audio observer. See AudioEncodedFrameObserver . - void unregisterAudioEncodedFrameObserver(AudioEncodedFrameObserver observer); - - /// Provides technical preview functionalities or special customizations by configuring the SDK with JSON options. /// - /// * [parameters] Pointer to the set parameters in a JSON string. - Future setParameters(String parameters); + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + void unregisterAudioEncodedFrameObserver(AudioEncodedFrameObserver observer); - /// Gets the C++ handle of the native SDK. - /// This method retrieves the C++ handle of the SDK, for example for registering the audio and video frame observer. + /// Gets the C++ handle of the Native SDK. + /// This method retrieves the C++ handle of the SDK, which is used for registering the audio and video frame observer. /// /// Returns /// The native handle of the SDK. @@ -4614,7 +5426,7 @@ extension QualityReportFormatTypeExt on QualityReportFormatType { /// Media device states. @JsonEnum(alwaysCreate: true) enum MediaDeviceStateType { - /// 0: The device is ready for use. + /// @nodoc @JsonValue(0) mediaDeviceStateIdle, @@ -4630,7 +5442,7 @@ enum MediaDeviceStateType { @JsonValue(4) mediaDeviceStateNotPresent, - /// 8: The device is not connected. + /// 8: The device is unplugged. @JsonValue(8) mediaDeviceStateUnplugged, } @@ -4959,7 +5771,7 @@ class SDKBuildInfo { @JsonKey(name: 'build') final int? build; - /// SDK version information. String, such as 4.0.0. + /// SDK version information. String format, such as 6.0.0. @JsonKey(name: 'version') final String? version; diff --git a/lib/src/agora_rtc_engine.g.dart b/lib/src/agora_rtc_engine.g.dart index be4272e2e..d276e05fe 100644 --- a/lib/src/agora_rtc_engine.g.dart +++ b/lib/src/agora_rtc_engine.g.dart @@ -102,10 +102,64 @@ const _$CaptureBrightnessLevelTypeEnumMap = { CaptureBrightnessLevelType.captureBrightnessLevelDark: 2, }; +RemoteAudioStats _$RemoteAudioStatsFromJson(Map json) => + RemoteAudioStats( + uid: json['uid'] as int?, + quality: json['quality'] as int?, + networkTransportDelay: json['networkTransportDelay'] as int?, + jitterBufferDelay: json['jitterBufferDelay'] as int?, + audioLossRate: json['audioLossRate'] as int?, + numChannels: json['numChannels'] as int?, + receivedSampleRate: json['receivedSampleRate'] as int?, + receivedBitrate: json['receivedBitrate'] as int?, + totalFrozenTime: json['totalFrozenTime'] as int?, + frozenRate: json['frozenRate'] as int?, + mosValue: json['mosValue'] as int?, + frozenRateByCustomPlcCount: json['frozenRateByCustomPlcCount'] as int?, + plcCount: json['plcCount'] as int?, + totalActiveTime: json['totalActiveTime'] as int?, + publishDuration: json['publishDuration'] as int?, + qoeQuality: json['qoeQuality'] as int?, + qualityChangedReason: json['qualityChangedReason'] as int?, + rxAudioBytes: json['rxAudioBytes'] as int?, + ); + +Map _$RemoteAudioStatsToJson(RemoteAudioStats instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('uid', instance.uid); + writeNotNull('quality', instance.quality); + writeNotNull('networkTransportDelay', instance.networkTransportDelay); + writeNotNull('jitterBufferDelay', instance.jitterBufferDelay); + writeNotNull('audioLossRate', instance.audioLossRate); + writeNotNull('numChannels', instance.numChannels); + writeNotNull('receivedSampleRate', instance.receivedSampleRate); + writeNotNull('receivedBitrate', instance.receivedBitrate); + writeNotNull('totalFrozenTime', instance.totalFrozenTime); + writeNotNull('frozenRate', instance.frozenRate); + writeNotNull('mosValue', instance.mosValue); + writeNotNull( + 'frozenRateByCustomPlcCount', instance.frozenRateByCustomPlcCount); + writeNotNull('plcCount', instance.plcCount); + writeNotNull('totalActiveTime', instance.totalActiveTime); + writeNotNull('publishDuration', instance.publishDuration); + writeNotNull('qoeQuality', instance.qoeQuality); + writeNotNull('qualityChangedReason', instance.qualityChangedReason); + writeNotNull('rxAudioBytes', instance.rxAudioBytes); + return val; +} + RemoteVideoStats _$RemoteVideoStatsFromJson(Map json) => RemoteVideoStats( uid: json['uid'] as int?, delay: json['delay'] as int?, + e2eDelay: json['e2eDelay'] as int?, width: json['width'] as int?, height: json['height'] as int?, receivedBitrate: json['receivedBitrate'] as int?, @@ -120,8 +174,8 @@ RemoteVideoStats _$RemoteVideoStatsFromJson(Map json) => avSyncTimeMs: json['avSyncTimeMs'] as int?, totalActiveTime: json['totalActiveTime'] as int?, publishDuration: json['publishDuration'] as int?, - superResolutionType: json['superResolutionType'] as int?, mosValue: json['mosValue'] as int?, + rxVideoBytes: json['rxVideoBytes'] as int?, ); Map _$RemoteVideoStatsToJson(RemoteVideoStats instance) { @@ -135,6 +189,7 @@ Map _$RemoteVideoStatsToJson(RemoteVideoStats instance) { writeNotNull('uid', instance.uid); writeNotNull('delay', instance.delay); + writeNotNull('e2eDelay', instance.e2eDelay); writeNotNull('width', instance.width); writeNotNull('height', instance.height); writeNotNull('receivedBitrate', instance.receivedBitrate); @@ -148,8 +203,8 @@ Map _$RemoteVideoStatsToJson(RemoteVideoStats instance) { writeNotNull('avSyncTimeMs', instance.avSyncTimeMs); writeNotNull('totalActiveTime', instance.totalActiveTime); writeNotNull('publishDuration', instance.publishDuration); - writeNotNull('superResolutionType', instance.superResolutionType); writeNotNull('mosValue', instance.mosValue); + writeNotNull('rxVideoBytes', instance.rxVideoBytes); return val; } @@ -312,24 +367,6 @@ Map _$PublisherConfigurationToJson( return val; } -AudioTrackConfig _$AudioTrackConfigFromJson(Map json) => - AudioTrackConfig( - enableLocalPlayback: json['enableLocalPlayback'] as bool?, - ); - -Map _$AudioTrackConfigToJson(AudioTrackConfig instance) { - final val = {}; - - void writeNotNull(String key, dynamic value) { - if (value != null) { - val[key] = value; - } - } - - writeNotNull('enableLocalPlayback', instance.enableLocalPlayback); - return val; -} - CameraCapturerConfiguration _$CameraCapturerConfigurationFromJson( Map json) => CameraCapturerConfiguration( @@ -462,7 +499,11 @@ ScreenCaptureSourceInfo _$ScreenCaptureSourceInfoFromJson( sourceTitle: json['sourceTitle'] as String?, primaryMonitor: json['primaryMonitor'] as bool?, isOccluded: json['isOccluded'] as bool?, + position: json['position'] == null + ? null + : Rectangle.fromJson(json['position'] as Map), minimizeWindow: json['minimizeWindow'] as bool?, + sourceDisplayId: json['sourceDisplayId'] as int?, ); Map _$ScreenCaptureSourceInfoToJson( @@ -484,7 +525,9 @@ Map _$ScreenCaptureSourceInfoToJson( writeNotNull('sourceTitle', instance.sourceTitle); writeNotNull('primaryMonitor', instance.primaryMonitor); writeNotNull('isOccluded', instance.isOccluded); + writeNotNull('position', instance.position?.toJson()); writeNotNull('minimizeWindow', instance.minimizeWindow); + writeNotNull('sourceDisplayId', instance.sourceDisplayId); return val; } @@ -554,19 +597,14 @@ ChannelMediaOptions _$ChannelMediaOptionsFromJson(Map json) => publishScreenTrack: json['publishScreenTrack'] as bool?, publishSecondaryScreenTrack: json['publishSecondaryScreenTrack'] as bool?, publishCustomAudioTrack: json['publishCustomAudioTrack'] as bool?, - publishCustomAudioSourceId: json['publishCustomAudioSourceId'] as int?, - publishCustomAudioTrackEnableAec: - json['publishCustomAudioTrackEnableAec'] as bool?, - publishDirectCustomAudioTrack: - json['publishDirectCustomAudioTrack'] as bool?, - publishCustomAudioTrackAec: json['publishCustomAudioTrackAec'] as bool?, + publishCustomAudioTrackId: json['publishCustomAudioTrackId'] as int?, publishCustomVideoTrack: json['publishCustomVideoTrack'] as bool?, publishEncodedVideoTrack: json['publishEncodedVideoTrack'] as bool?, publishMediaPlayerAudioTrack: json['publishMediaPlayerAudioTrack'] as bool?, publishMediaPlayerVideoTrack: json['publishMediaPlayerVideoTrack'] as bool?, - publishTrancodedVideoTrack: json['publishTrancodedVideoTrack'] as bool?, + publishTranscodedVideoTrack: json['publishTranscodedVideoTrack'] as bool?, autoSubscribeAudio: json['autoSubscribeAudio'] as bool?, autoSubscribeVideo: json['autoSubscribeVideo'] as bool?, enableAudioRecordingOrPlayout: @@ -610,14 +648,7 @@ Map _$ChannelMediaOptionsToJson(ChannelMediaOptions instance) { writeNotNull( 'publishSecondaryScreenTrack', instance.publishSecondaryScreenTrack); writeNotNull('publishCustomAudioTrack', instance.publishCustomAudioTrack); - writeNotNull( - 'publishCustomAudioSourceId', instance.publishCustomAudioSourceId); - writeNotNull('publishCustomAudioTrackEnableAec', - instance.publishCustomAudioTrackEnableAec); - writeNotNull( - 'publishDirectCustomAudioTrack', instance.publishDirectCustomAudioTrack); - writeNotNull( - 'publishCustomAudioTrackAec', instance.publishCustomAudioTrackAec); + writeNotNull('publishCustomAudioTrackId', instance.publishCustomAudioTrackId); writeNotNull('publishCustomVideoTrack', instance.publishCustomVideoTrack); writeNotNull('publishEncodedVideoTrack', instance.publishEncodedVideoTrack); writeNotNull( @@ -625,7 +656,7 @@ Map _$ChannelMediaOptionsToJson(ChannelMediaOptions instance) { writeNotNull( 'publishMediaPlayerVideoTrack', instance.publishMediaPlayerVideoTrack); writeNotNull( - 'publishTrancodedVideoTrack', instance.publishTrancodedVideoTrack); + 'publishTranscodedVideoTrack', instance.publishTranscodedVideoTrack); writeNotNull('autoSubscribeAudio', instance.autoSubscribeAudio); writeNotNull('autoSubscribeVideo', instance.autoSubscribeVideo); writeNotNull( @@ -795,6 +826,7 @@ RtcEngineContext _$RtcEngineContextFromJson(Map json) => _$ThreadPriorityTypeEnumMap, json['threadPriority']), useExternalEglContext: json['useExternalEglContext'] as bool?, domainLimit: json['domainLimit'] as bool?, + autoRegisterAgoraExtensions: json['autoRegisterAgoraExtensions'] as bool?, ); Map _$RtcEngineContextToJson(RtcEngineContext instance) { @@ -818,6 +850,8 @@ Map _$RtcEngineContextToJson(RtcEngineContext instance) { 'threadPriority', _$ThreadPriorityTypeEnumMap[instance.threadPriority]); writeNotNull('useExternalEglContext', instance.useExternalEglContext); writeNotNull('domainLimit', instance.domainLimit); + writeNotNull( + 'autoRegisterAgoraExtensions', instance.autoRegisterAgoraExtensions); return val; } @@ -1031,6 +1065,8 @@ const _$MediaDeviceTypeEnumMap = { MediaDeviceType.videoRenderDevice: 2, MediaDeviceType.videoCaptureDevice: 3, MediaDeviceType.audioApplicationPlayoutDevice: 4, + MediaDeviceType.audioVirtualPlayoutDevice: 5, + MediaDeviceType.audioVirtualRecordingDevice: 6, }; const _$AudioMixingStateTypeEnumMap = { @@ -1113,6 +1149,8 @@ const _$ProxyTypeEnumMap = { ProxyType.tcpProxyType: 2, ProxyType.localProxyType: 3, ProxyType.tcpProxyAutoFallbackType: 4, + ProxyType.httpProxyType: 5, + ProxyType.httpsProxyType: 6, }; const _$MetadataTypeEnumMap = { diff --git a/lib/src/agora_rtc_engine_ex.dart b/lib/src/agora_rtc_engine_ex.dart index fb7a28d68..819267788 100644 --- a/lib/src/agora_rtc_engine_ex.dart +++ b/lib/src/agora_rtc_engine_ex.dart @@ -27,83 +27,110 @@ class RtcConnection { /// Inherited from RtcEngine . abstract class RtcEngineEx implements RtcEngine { /// Joins a channel with the connection ID. - /// You can call this method multiple times to join more than one channel.If you are already in a channel, you cannot rejoin it with the same user ID.If you want to join the same channel from different devices, ensure that the user IDs are different for all devices.Ensure that the app ID you use to generate the token is the same as RtcEngine the app ID used when creating the instance. + /// You can call this method multiple times to join more than one channel.If you are already in a channel, you cannot rejoin it with the same user ID.If you want to join the same channel from different devices, ensure that the user IDs are different for all devices.Ensure that the app ID you use to generate the token is the same as the app ID used when creating the RtcEngine instance. /// - /// * [options] The channel media options. See ChannelMediaOptions . /// * [token] The token generated on your server for authentication. /// * [connection] The connection information. See RtcConnection . + /// * [options] The channel media options. See ChannelMediaOptions . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. + /// -3: Failes to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. + /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. + /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling startEchoTest to stop the test after calling stopEchoTest to start a call loop test. You need to call stopEchoTest before calling this method. + /// -17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected(1) state. + /// -102: The channel name is invalid. You need to pass in a valid channelname in channelId to rejoin the channel. + /// -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. Future joinChannelEx( {required String token, required RtcConnection connection, required ChannelMediaOptions options}); - /// Leaves a channel. + /// Sets channel options and leaves the channel. + /// This method lets the user leave the channel, for example, by hanging up or exiting the call.After calling joinChannelEx to join the channel, this method must be called to end the call before starting the next call.This method can be called whether or not a call is currently in progress. This method releases all resources related to the session.This method call is asynchronous. When this method returns, it does not necessarily mean that the user has left the channel. After you leave the channel, the SDK triggers the onLeaveChannel callback.After actually leaving the channel, the local user triggers the onLeaveChannel callback; after the user in the communication scenario and the host in the live streaming scenario leave the channel, the remote user triggers the onUserOffline callback.If you call release immediately after calling this method, the SDK does not trigger the onLeaveChannel callback.Calling leaveChannel will leave the channels when calling joinChannel and joinChannelEx at the same time. /// /// * [connection] The connection information. See RtcConnection . /// * [options] The options for leaving the channel. See LeaveChannelOptions .This parameter only supports the stopMicrophoneRecording member in the LeaveChannelOptions settings; setting other members does not take effect. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future leaveChannelEx( {required RtcConnection connection, LeaveChannelOptions? options}); /// Updates the channel media options after joining the channel. /// - /// * [connection] The connection information. See RtcConnection . /// * [options] The channel media options. See ChannelMediaOptions . + /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// -2: The value of a member in the ChannelMediaOptions structure is invalid. For example, the token or the user ID is invalid. You need to fill in a valid parameter. + /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. + /// -8: The internal state of the RtcEngine object is wrong. The possible reason is that the user is not in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. If you receive the connectionStateDisconnected (1) or connectionStateFailed (5) state, the user is not in the channel. You need to call joinChannel to join a channel before calling this method. Future updateChannelMediaOptionsEx( {required ChannelMediaOptions options, required RtcConnection connection}); /// Sets the encoder configuration for the local video. - /// Each configuration profile corresponds to a set of video parameters, including the resolution, frame rate, and bitrate.The config specified in this method is the maximum values under ideal network conditions. If the network condition is not good, the video engine cannot use the config renders local video, which automatically reduces to an appropriate video parameter setting. + /// Each configuration profile corresponds to a set of video parameters, including the resolution, frame rate, and bitrate.The config specified in this method is the maximum value under ideal network conditions. If the video engine cannot render the video using the specified config due to unreliable network conditions, the parameters further down the list are considered until a successful configuration is found. /// - /// * [connection] The connection information. See RtcConnection . /// * [config] Video profile. See VideoEncoderConfiguration . + /// * [connection] The connection information. See RtcConnection . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setVideoEncoderConfigurationEx( {required VideoEncoderConfiguration config, required RtcConnection connection}); /// Initializes the video view of a remote user. - /// This method initializes the video view of a remote stream on the local device. It affects only the video view that the local user sees. Call this method to bind the remote video stream to a video view and to set the rendering and mirror modes of the video view.The application specifies the uid of the remote video in the VideoCanvas method before the remote user joins the channel.If the remote uid is unknown to the application, set it after the application receives the onUserJoined callback. If the Video Recording function is enabled, the Video Recording Service joins the channel as a dummy client, causing other clients to also receive the onUserJoined callback. Do not bind the dummy client to the application view because the dummy client does not send any video streams.To unbind the remote user from the view, set the view parameter to NULL.Once the remote user leaves the channel, the SDK unbinds the remote user. + /// This method initializes the video view of a remote stream on the local device. It affects only the video view that the local user sees. Call this method to bind the remote video stream to a video view and to set the rendering and mirror modes of the video view.The application specifies the uid of the remote video in the VideoCanvas method before the remote user joins the channel.If the remote uid is unknown to the application, set it after the application receives the onUserJoined callback. If the Video Recording function is enabled, the Video Recording Service joins the channel as a dummy client, causing other clients to also receive the onUserJoined callback. Do not bind the dummy client to the application view because the dummy client does not send any video streams.To unbind the remote user from the view, set the view parameter to NULL.Once the remote user leaves the channel, the SDK unbinds the remote user.To update the rendering or mirror mode of the remote video view during a call, use the setRemoteRenderModeEx method. /// - /// * [connection] The connection information. See RtcConnection . /// * [canvas] The remote video view settings. See VideoCanvas . + /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setupRemoteVideoEx( {required VideoCanvas canvas, required RtcConnection connection}); /// Stops or resumes receiving the audio stream of a specified user. - /// This method is used to stops or resumes receiving the audio stream of a specified user. You can call this method before or after joining a channel. If a user leaves a channel, the settings in this method become invalid. /// - /// * [connection] The connection information. See RtcConnection . /// * [uid] The ID of the specified user. /// * [mute] Whether to stop receiving the audio stream of the specified user:true: Stop receiving the audio stream of the specified user.false: (Default) Resume receiving the audio stream of the specified user. + /// * [connection] The connection information. See RtcConnection . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. Future muteRemoteAudioStreamEx( {required int uid, required bool mute, required RtcConnection connection}); /// Stops or resumes receiving the video stream of a specified user. - /// This method is used to stops or resumes receiving the video stream of a specified user. You can call this method before or after joining a channel. If a user leaves a channel, the settings in this method become invalid. + /// This method is used to stop or resume receiving the video stream of a specified user. You can call this method before or after joining a channel. If a user leaves a channel, the settings in this method become invalid. /// - /// * [connection] The connection information. See RtcConnection . /// * [uid] The user ID of the remote user. /// * [mute] Whether to stop receiving the video stream of the specified user:true: Stop receiving the video stream of the specified user.false: (Default) Resume receiving the video stream of the specified user. + /// * [connection] The connection information. See RtcConnection . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future muteRemoteVideoStreamEx( {required int uid, required bool mute, required RtcConnection connection}); /// Sets the stream type of the remote video. - /// Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamModeEx (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate. - /// By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream. - /// The SDK enables the low-quality video stream auto mode on the sender by default (not actively sending low-quality video streams). The host at the receiving end can call this method to initiate a low-quality video stream stream request on the receiving end, and the sender automatically switches to the low-quality video stream mode after receiving the request. - /// The result of this method returns in the onApiCallExecuted callback. + /// Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamModeEx (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate.By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream.The SDK enables the low-quality video stream auto mode on the sender by default (not actively sending low-quality video streams). The host at the receiving end can call this method to initiate a low-quality video stream stream request on the receiving end, and the sender automatically switches to the low-quality video stream mode after receiving the request. /// /// * [uid] The user ID. - /// * [streamType] The video stream type. See VideoStreamType. + /// * [streamType] The video stream type: VideoStreamType . /// * [connection] The connection information. See RtcConnection . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRemoteVideoStreamTypeEx( {required int uid, required VideoStreamType streamType, @@ -112,94 +139,98 @@ abstract class RtcEngineEx implements RtcEngine { /// Stops or resumes publishing the local audio stream. /// This method does not affect any ongoing audio recording, because it does not disable the audio capture device. /// - /// * [connection] The connection information. See RtcConnection . /// * [mute] Whether to stop publishing the local audio stream:true: Stops publishing the local audio stream.false: (Default) Resumes publishing the local audio stream. + /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. Future muteLocalAudioStreamEx( {required bool mute, required RtcConnection connection}); /// Stops or resumes publishing the local video stream. /// A successful call of this method triggers the onUserMuteVideo callback on the remote client.This method does not affect any ongoing video recording, because it does not disable the camera. /// - /// * [connection] The connection information. See RtcConnection . /// * [mute] Whether to stop publishing the local video stream.true: Stop publishing the local video stream.false: (Default) Publish the local video stream. + /// * [connection] The connection information. See RtcConnection . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future muteLocalVideoStreamEx( {required bool mute, required RtcConnection connection}); /// Stops or resumes subscribing to the audio streams of all remote users. - /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including the ones who join the channel subsequent to this call.Call this method after joining a channel.If you do not want to subscribe the audio streams of remote users before joining a channel, you can set autoSubscribeAudio as false when calling joinChannel [2/2] . + /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including the ones join the channel subsequent to this call.Call this method after joining a channel.If you do not want to subscribe the audio streams of remote users before joining a channel, you can set autoSubscribeAudio as false when calling joinChannel . /// + /// * [mute] Whether to stop subscribing to the audio streams of all remote users:true: Stops subscribing to the audio streams of all remote users.false: (Default) Subscribes to the audio streams of all remote users by default. /// * [connection] The connection information. See RtcConnection . - /// * [mute] Whether to stop subscribing to the audio streams of all remote users:true: Stops subscribing to the audio streams of all remote users.false: (Default) Subscribes to the audio streams of all remote users. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. Future muteAllRemoteAudioStreamsEx( {required bool mute, required RtcConnection connection}); /// Stops or resumes subscribing to the video streams of all remote users. /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. /// + /// * [mute] Whether to stop subscribing to the video streams of all remote users.true: Stop subscribing to the video streams of all remote users.false: (Default) Subscribe to the audio streams of all remote users by default. /// * [connection] The connection information. See RtcConnection . - /// * [mute] Whether to stop subscribing to the video streams of all remote users. - /// true: Stop subscribing to the video streams of all remote users. - /// false: (Default) Subscribe to the audio streams of all remote users by default. /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future muteAllRemoteVideoStreamsEx( {required bool mute, required RtcConnection connection}); /// Set the blocklist of subscriptions for audio streams. - /// You can call this method to specify the audio streams of a user that you do not want to subscribe to. You can call this method either before or after joining a channel. - /// The blocklist is not affected by the setting in muteRemoteAudioStream , muteAllRemoteAudioStreams , and autoSubscribeAudio in ChannelMediaOptions . - /// Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. - /// If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. + /// You can call this method to specify the audio streams of a user that you do not want to subscribe to.You can call this method either before or after joining a channel.The blocklist is not affected by the setting in muteRemoteAudioStream , muteAllRemoteAudioStreams , and autoSubscribeAudio in ChannelMediaOptions .Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. /// - /// * [connection] The connection information. See RtcConnection . + /// * [uidList] The user ID list of users that you do not want to subscribe to.If you want to specify the audio streams of a user that you do not want to subscribe to, add the user ID in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeAudioBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. - /// * [uidList] The user ID list of users that you do not want to subscribe to. - /// If you want to specify the audio streams of a user that you do not want to subscribe to, add the user ID in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeAudioBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. + /// * [connection] The connection information. See RtcConnection . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setSubscribeAudioBlocklistEx( {required List uidList, required int uidNumber, required RtcConnection connection}); /// Sets the allowlist of subscriptions for audio streams. - /// You can call this method to specify the audio streams of a user that you want to subscribe to. If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect.You can call this method either before or after joining a channel.The allowlist is not affected by the setting in muteRemoteAudioStream , muteAllRemoteAudioStreams and autoSubscribeAudio in ChannelMediaOptions . - /// Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. + /// You can call this method to specify the audio streams of a user that you want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect.You can call this method either before or after joining a channel.The allowlist is not affected by the setting in muteRemoteAudioStream , muteAllRemoteAudioStreams and autoSubscribeAudio in ChannelMediaOptions .Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. /// - /// * [connection] The connection information. See RtcConnection . + /// * [uidList] The user ID list of users that you want to subscribe to.If you want to specify the audio streams of a user for subscription, add the user ID in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeAudioAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. - /// * [uidList] The user ID list of users that you want to subscribe to. - /// If you want to specify the audio streams of a user for subscription, add the user ID in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeAudioAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. + /// * [connection] The connection information. See RtcConnection . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setSubscribeAudioAllowlistEx( {required List uidList, required int uidNumber, required RtcConnection connection}); /// Set the blocklist of subscriptions for video streams. - /// You can call this method to specify the video streams of a user that you do not want to subscribe to. If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. - /// Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. - /// You can call this method either before or after joining a channel. - /// The blocklist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . + /// You can call this method to specify the video streams of a user that you do not want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect.Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel.You can call this method either before or after joining a channel.The blocklist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . /// - /// * [connection] The connection information. See RtcConnection . + /// * [uidList] The user ID list of users that you do not want to subscribe to.If you want to specify the video streams of a user that you do not want to subscribe to, add the user ID of that user in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeVideoBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. - /// * [uidList] The user ID list of users that you do not want to subscribe to. - /// If you want to specify the video streams of a user that you do not want to subscribe to, add the user ID of that user in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeVideoBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. + /// * [connection] The connection information. See RtcConnection . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setSubscribeVideoBlocklistEx( {required List uidList, required int uidNumber, required RtcConnection connection}); /// Set the allowlist of subscriptions for video streams. - /// You can call this method to specify the video streams of a user that you want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. - /// Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. - /// You can call this method either before or after joining a channel. - /// The allowlist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . + /// You can call this method to specify the video streams of a user that you want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect.Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel.You can call this method either before or after joining a channel.The allowlist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . /// - /// * [connection] The connection information. See RtcConnection . - /// * [uidNumber] The number of users in the user ID list. /// * [uidList] The user ID list of users that you want to subscribe to.If you want to specify the video streams of a user for subscription, add the user ID of that user in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeVideoAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. + /// * [uidNumber] The number of users in the user ID list. + /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setSubscribeVideoAllowlistEx( {required List uidList, required int uidNumber, @@ -208,9 +239,12 @@ abstract class RtcEngineEx implements RtcEngine { /// Options for subscribing to remote video streams. /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user. /// - /// * [connection] The connection information. See RtcConnection . - /// * [options] The video subscription options. See VideoSubscriptionOptions . /// * [uid] The user ID of the remote user. + /// * [options] The video subscription options. See VideoSubscriptionOptions . + /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. Future setRemoteVideoSubscriptionOptionsEx( {required int uid, required VideoSubscriptionOptions options, @@ -219,11 +253,13 @@ abstract class RtcEngineEx implements RtcEngine { /// Sets the 2D position (the position on the horizontal plane) of the remote user's voice. /// This method sets the voice position and volume of a remote user.When the local user calls this method to set the voice position of a remote user, the voice difference between the left and right channels allows the local user to track the real-time position of the remote user, creating a sense of space. This method applies to massive multiplayer online games, such as Battle Royale games.For the best voice positioning, Agora recommends using a wired headset.Call this method after joining a channel. /// - /// * [connection] The connection information. See RtcConnection . /// * [uid] The user ID of the remote user. /// * [pan] The voice position of the remote user. The value ranges from -1.0 to 1.0:-1.0: The remote voice comes from the left.0.0: (Default) The remote voice comes from the front.1.0: The remote voice comes from the right. /// * [gain] The volume of the remote user. The value ranges from 0.0 to 100.0. The default value is 100.0 (the original volume of the remote user). The smaller the value, the lower the volume. + /// * [connection] The connection information. See RtcConnection . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRemoteVoicePositionEx( {required int uid, required double pan, @@ -236,32 +272,54 @@ abstract class RtcEngineEx implements RtcEngine { required SpatialAudioParams params, required RtcConnection connection}); - /// @nodoc + /// Sets the video display mode of a specified remote user. + /// After initializing the video view of a remote user, you can call this method to update its rendering and mirror modes. This method affects only the video view that the local user sees.Call this method after initializing the remote view by calling the setupRemoteVideo method.During a call, you can call this method as many times as necessary to update the display mode of the video view of a remote user. + /// + /// * [uid] The user ID of the remote user. + /// * [renderMode] The video display mode of the remote user. See RenderModeType . + /// * [mirrorMode] The mirror mode of the remote user view. See VideoMirrorModeType . + /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRemoteRenderModeEx( {required int uid, required RenderModeType renderMode, required VideoMirrorModeType mirrorMode, required RtcConnection connection}); - /// Enables loopback audio capture. - /// If you enable loopback audio capture, the output of the sound card is mixed into the audio stream sent to the other end.macOS does not support loopback audio capture of the default sound card. If you need to use this method, use a virtual sound card and pass its name to the deviceName parameter. Agora recommends that you use Soundflower for loopback audio capture.You can call this method either before or after joining a channel. + /// Enables loopback audio capturing. + /// If you enable loopback audio capturing, the output of the sound card is mixed into the audio stream sent to the other end.This method applies to the macOS and Windows only.macOS does not support loopback audio capture of the default sound card. If you need to use this function, use a virtual sound card and pass its name to the deviceName parameter. Agora recommends using AgoraALD as the virtual sound card for audio capturing.This method only supports using one sound card for audio capturing. /// - /// * [deviceName] macOS: The device name of the virtual sound card. The default is set to null, which means the SDK uses Soundflower for loopback audio capture.Windows: The device name of the sound card. The default is set to null, which means the SDK uses the sound card of your device for loopback audio capture. /// * [connection] The connection information. See RtcConnection . - /// * [enabled] Sets whether to enable loopback audio capture: - /// true: Enable loopback audio capture.false: (Default) Disable loopback audio capture. + /// * [enabled] Sets whether to enable loopback audio capture:true: Enable loopback audio capturing.false: (Default) Disable loopback audio capturing. + /// * [deviceName] macOS: The device name of the virtual sound card. The default value is set to NULL, which means using AgoraALD for loopback audio capturing. + /// Windows: The device name of the sound card. The default is set to NULL, which means the SDK uses the sound card of your device for loopback audio capturing. /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableLoopbackRecordingEx( {required RtcConnection connection, required bool enabled, String? deviceName}); + /// @nodoc + Future adjustRecordingSignalVolumeEx( + {required int volume, required RtcConnection connection}); + + /// @nodoc + Future muteRecordingSignalEx( + {required bool mute, required RtcConnection connection}); + /// Adjusts the playback signal volume of a specified remote user. /// You can call this method to adjust the playback volume of a specified remote user. To adjust the playback volume of different remote users, call the method as many times, once for each remote user.Call this method after joining a channel.The playback volume here refers to the mixed volume of a specified remote user. /// + /// * [uid] The user ID of the remote user. /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. /// * [connection] The connection information. See RtcConnection . - /// * [uid] The user ID of the remote user. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future adjustUserPlaybackSignalVolumeEx( {required int uid, required int volume, @@ -273,7 +331,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection . /// /// Returns - /// The current connection state. + /// The current connection state. See ConnectionStateType . Future getConnectionStateEx(RtcConnection connection); /// @nodoc @@ -285,23 +343,25 @@ abstract class RtcEngineEx implements RtcEngine { /// Creates a data stream. /// Creates a data stream. Each user can create up to five data streams in a single channel.Compared with createDataStreamEx , this method does not support data reliability. If a data packet is not received five seconds after it was sent, the SDK directly discards the data. /// - /// * [connection] The connection information. See RtcConnection . /// * [config] The configurations for the data stream. See DataStreamConfig . + /// * [connection] The connection information. See RtcConnection . /// /// Returns - /// < 0: Failure. + /// ID of the created data stream, if the method call succeeds.< 0: Failure. Future createDataStreamEx( {required DataStreamConfig config, required RtcConnection connection}); /// Sends data stream messages. - /// After calling createDataStreamEx , you can call this method to send data stream messages to all users in the channel.The SDK has the following restrictions on this method:Up to 30 packets can be sent per second in a channel with each packet having a maximum size of 1 kB.Each client can send up to 6 KB of data per second.Each user can have up to five data streams simultaneously.A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. + /// After calling createDataStreamEx , you can call this method to send data stream messages to all users in the channel.The SDK has the following restrictions on this method:Up to 60 packets can be sent per second in a channel with each packet having a maximum size of 1 KB.Each client can send up to 30 KB of data per second.Each user can have up to five data streams simultaneously.A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. /// A failed method call triggers the onStreamMessageError callback on the remote client.Ensure that you call createDataStreamEx to create a data channel before calling this method.This method applies only to the COMMUNICATION profile or to the hosts in the LIVE_BROADCASTING profile. If an audience in the LIVE_BROADCASTING profile calls this method, the audience may be switched to a host. /// - /// * [connection] The connection information. See RtcConnection . /// * [streamId] The data stream ID. You can get the data stream ID by calling createDataStreamEx. - /// * [data] The data to be sent. + /// * [data] The message to be sent. /// * [length] The length of the data. + /// * [connection] The connection information. See RtcConnection . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future sendStreamMessageEx( {required int streamId, required Uint8List data, @@ -309,12 +369,15 @@ abstract class RtcEngineEx implements RtcEngine { required RtcConnection connection}); /// Adds a watermark image to the local video. - /// This method adds a PNG watermark image to the local video in the live streaming. Once the watermark image is added, all the audience in the channel (CDN audience included), and the capturing device can see and capture it. Agora supports adding only one watermark image onto the local video, and the newly watermark image replaces the previous one.The watermark coordinatesare dependent on the settings in the setVideoEncoderConfigurationEx method:If the orientation mode of the encoding video ( OrientationMode ) is fixed landscape mode or the adaptive landscape mode, the watermark uses the landscape orientation.If the orientation mode of the encoding video (OrientationMode) is fixed portrait mode or the adaptive portrait mode, the watermark uses the portrait orientation.When setting the watermark position, the region must be less than the setVideoEncoderConfigurationEx dimensions set in the method; otherwise, the watermark image will be cropped.Ensure that you have called enableVideo before calling this method.This method supports adding a watermark image in the PNG file format only. Supported pixel formats of the PNG image are RGBA, RGB, Palette, Gray, and Alpha_gray.If the dimensions of the PNG image differ from your settings in this method, the image will be cropped or zoomed to conform to your settings.If you have enabled the local video preview by calling the startPreview method, you can use the visibleInPreview member to set whether or not the watermark is visible in the preview.If you have enabled the mirror mode for the local video, the watermark on the local video is also mirrored. To avoid mirroring the watermark, Agora recommends that you do not use the mirror and watermark functions for the local video at the same time. You can implement the watermark function in your application layer. + /// This method adds a PNG watermark image to the local video in the live streaming. Once the watermark image is added, all the audience in the channel (CDN audience included), and the capturing device can see and capture it. The Agora SDK supports adding only one watermark image onto a local video or CDN live stream. The newly added watermark image replaces the previous one. + /// The watermark coordinates are dependent on the settings in the setVideoEncoderConfigurationEx method:If the orientation mode of the encoding video ( OrientationMode ) is fixed landscape mode or the adaptive landscape mode, the watermark uses the landscape orientation.If the orientation mode of the encoding video (OrientationMode) is fixed portrait mode or the adaptive portrait mode, the watermark uses the portrait orientation.When setting the watermark position, the region must be less than the dimensions set in the setVideoEncoderConfigurationEx method; otherwise, the watermark image will be cropped.Ensure that you have called enableVideo before calling this method.This method supports adding a watermark image in the PNG file format only. Supported pixel formats of the PNG image are RGBA, RGB, Palette, Gray, and Alpha_gray.If the dimensions of the PNG image differ from your settings in this method, the image will be cropped or zoomed to conform to your settings.If you have enabled the local video preview by calling the startPreview method, you can use the visibleInPreview member to set whether or not the watermark is visible in the preview.If you have enabled the mirror mode for the local video, the watermark on the local video is also mirrored. To avoid mirroring the watermark, Agora recommends that you do not use the mirror and watermark functions for the local video at the same time. You can implement the watermark function in your application layer. /// - /// * [connection] The connection information. See RtcConnection . - /// * [options] The options of the watermark image to be added. /// * [watermarkUrl] The local file path of the watermark image to be added. This method supports adding a watermark image from the local absolute or relative file path. + /// * [options] The options of the watermark image to be added. See WatermarkOptions . + /// * [connection] The connection information. See RtcConnection . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future addVideoWatermarkEx( {required String watermarkUrl, required WatermarkOptions options, @@ -323,6 +386,9 @@ abstract class RtcEngineEx implements RtcEngine { /// Removes the watermark image from the video stream. /// /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future clearVideoWatermarkEx(RtcConnection connection); /// Agora supports reporting and analyzing customized messages. @@ -338,15 +404,13 @@ abstract class RtcEngineEx implements RtcEngine { /// Enables the reporting of users' volume indication. /// This method enables the SDK to regularly report the volume information to the app of the local user who sends a stream and remote users (three users at most) whose instantaneous volumes are the highest. Once you call this method and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method. /// + /// * [interval] Sets the time interval between two consecutive volume indications:≤ 0: Disables the volume indication.> 0: Time interval (ms) between two consecutive volume indications. The lowest value is 50. + /// * [smooth] The smoothing factor that sets the sensitivity of the audio volume indicator. The value ranges between 0 and 10. The recommended value is 3. The greater the value, the more sensitive the indicator. + /// * [reportVad] true: Enables the voice activity detection of the local user. Once it is enabled, the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user.false: (Default) Disables the voice activity detection of the local user. Once it is disabled, the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user. /// * [connection] The connection information. See RtcConnection . - /// * [reportVad] true: Enable the voice activity detection of the local user. Once it is enabled,the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user. - /// false: (Default) Disable the voice activity detection of the local user. Once it is disabled, the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user. - /// - /// * [smooth] The smoothing factor sets the sensitivity of the audio volume indicator. The value ranges between 0 and 10. The recommended value is 3. The greater the value, the more sensitive the indicator. - /// * [interval] Sets the time interval between two consecutive volume indications: - /// ≤ 0: Disables the volume indication. - /// > 0: Time interval (ms) between two consecutive volume indications. You need to set this parameter to an integer multiple of 200. If the value is lower than 200, the SDK automatically adjusts the value to 200. /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableAudioVolumeIndicationEx( {required int interval, required int smooth, @@ -354,79 +418,120 @@ abstract class RtcEngineEx implements RtcEngine { required RtcConnection connection}); /// Starts pushing media streams to a CDN without transcoding. - /// Ensure that you enable the media push service before using this function. + /// Ensure that you enable the Media Push service before using this function. See Enable Media Push. /// Call this method after joining a channel. /// Only hosts in the LIVE_BROADCASTING profile can call this method. /// If you want to retry pushing streams after a failed push, make sure to call stopRtmpStream first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. - /// You can call this method to push an audio or video stream to the specified CDN address. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// Agora recommends that you use the server-side Media Push function. You can call this method to push an audio or video stream to the specified CDN address. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. /// + /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. /// * [connection] The connection information. See RtcConnection . - /// * [url] The address of media push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// -2: The URL is null or the string length is 0. + /// -7: The SDK is not initialized before calling this method. + /// -19: The Media Push URL is already in use, use another URL instead. Future startRtmpStreamWithoutTranscodingEx( {required String url, required RtcConnection connection}); /// Starts Media Push and sets the transcoding configuration. - /// You can call this method to push a live audio-and-video stream to the specified CDN address and set the transcoding configuration. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming.Ensure that you enable the Media Push service before using this function. Call this method after joining a channel.Only hosts in the LIVE_BROADCASTING profile can call this method.If you want to retry pushing streams after a failed push, make sure to call stopRtmpStreamEx first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. + /// Agora recommends that you use the server-side Media Push function. You can call this method to push a live audio-and-video stream to the specified CDN address and set the transcoding configuration. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming.Ensure that you enable the Media Push service before using this function. Call this method after joining a channel.Only hosts in the LIVE_BROADCASTING profile can call this method.If you want to retry pushing streams after a failed push, make sure to call stopRtmpStreamEx first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. /// + /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. + /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding . /// * [connection] The connection information. See RtcConnection . - /// * [transcoding] The transcoding configuration for media push. See LiveTranscoding . - /// * [url] The address of media push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The URL is null or the string length is 0.-7: The SDK is not initialized before calling this method.-19: The Media Push URL is already in use, use another URL instead. Future startRtmpStreamWithTranscodingEx( {required String url, required LiveTranscoding transcoding, required RtcConnection connection}); /// Updates the transcoding configuration. - /// After you start pushing media streams to CDN with transcoding, you can dynamically update the transcoding configuration according to the scenario. The SDK triggers the onTranscodingUpdated callback after the transcoding configuration is updated. + /// Agora recommends that you use the server-side Media Push function. After you start pushing media streams to CDN with transcoding, you can dynamically update the transcoding configuration according to the scenario. The SDK triggers the onTranscodingUpdated callback after the transcoding configuration is updated. /// + /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding . /// * [connection] The connection information. See RtcConnection . - /// * [transcoding] The transcoding configuration for media push. See LiveTranscoding . /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future updateRtmpTranscodingEx( {required LiveTranscoding transcoding, required RtcConnection connection}); /// Stops pushing media streams to a CDN. - /// You can call this method to stop the live stream on the specified CDN address. This method can stop pushing media streams to only one CDN address at a time, so if you need to stop pushing streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// Agora recommends that you use the server-side Media Push function. You can call this method to stop the live stream on the specified CDN address. This method can stop pushing media streams to only one CDN address at a time, so if you need to stop pushing streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// + /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. /// - /// * [url] The address of media push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopRtmpStreamEx( {required String url, required RtcConnection connection}); + /// Starts relaying media streams across channels or updates channels for media relay. + /// The first successful call to this method starts relaying media streams from the source channel to the destination channels. To relay the media stream to other channels, or exit one of the current media relays, you can call this method again to update the destination channels.After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged callback, and this callback returns the state of the media stream relay. Common states are as follows:If the onChannelMediaRelayStateChanged callback returns relayStateRunning (2) and relayOk (0), it means that the SDK starts relaying media streams from the source channel to the destination channel.If the onChannelMediaRelayStateChanged callback returns relayStateFailure (3), an exception occurs during the media stream relay.Call this method after joining the channel.This method takes effect only when you are a host in a live streaming channel.The relaying media streams across channels function needs to be enabled by contacting .Agora does not support string user accounts in this API. + /// + /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration . + /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-7: The method call was rejected. It may be because the SDK has not been initialized successfully, or the user role is not an host.-8: Internal state error. Probably because the user is not an audience member. + Future startOrUpdateChannelMediaRelayEx( + {required ChannelMediaRelayConfiguration configuration, + required RtcConnection connection}); + /// Starts relaying media streams across channels. This method can be used to implement scenarios such as co-host across channels. - /// After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged and onChannelMediaRelayEvent callbacks, and these callbacks return the state and events of the media stream relay.If the onChannelMediaRelayStateChanged callback returns relayStateRunning (2) and relayOk (0), and the onChannelMediaRelayEvent callback returns relayEventPacketSentToDestChannel (4), it means that the SDK starts relaying media streams between the source channel and the destination channel.If the onChannelMediaRelayStateChanged callback returnsrelayStateFailure (3), an exception occurs during the media stream relay.Call this method after joining the channel.This method takes effect only when you are a host in a live streaming channel.After a successful method call, if you want to call this method again, ensure that you call the stopChannelMediaRelayEx method to quit the current relay.The relaying media streams across channels function needs to be enabled.We do not support string user accounts in this API. + /// Deprecated:This method is deprecated. Use startOrUpdateChannelMediaRelayEx instead.After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged and onChannelMediaRelayEvent callbacks, and these callbacks return the state and events of the media stream relay.If the onChannelMediaRelayStateChanged callback returns relayStateRunning (2) and relayOk (0), and the onChannelMediaRelayEvent callback returns relayEventPacketSentToDestChannel (4), it means that the SDK starts relaying media streams between the source channel and the target channel.If the onChannelMediaRelayStateChanged callback returns relayStateFailure (3), an exception occurs during the media stream relay.Call this method after joining the channel.This method takes effect only when you are a host in a live streaming channel.After a successful method call, if you want to call this method again, ensure that you call the stopChannelMediaRelayEx method to quit the current relay.The relaying media streams across channels function needs to be enabled by contacting .Agora does not support string user accounts in this API. /// /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration . /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-7: The method call was rejected. It may be because the SDK has not been initialized successfully, or the user role is not an host.-8: Internal state error. Probably because the user is not an audience member. Future startChannelMediaRelayEx( {required ChannelMediaRelayConfiguration configuration, required RtcConnection connection}); /// Updates the channels for media stream relay. - /// After the media relay starts, if you want to relay the media stream to more channels, or leave the current relay channel, you can call the updateChannelMediaRelay method.After a successful method call, the SDK triggers the onChannelMediaRelayEvent callback with the relayEventPacketUpdateDestChannel (7) state code.Call the method after successfully calling the startChannelMediaRelayEx method and receiving onChannelMediaRelayStateChanged (relayStateRunning, relayOk); otherwise, the method call fails. + /// Deprecated:This method is deprecated. Use startOrUpdateChannelMediaRelayEx instead.After the media relay starts, if you want to relay the media stream to more channels, or leave the current relay channel, you can call this method.After a successful method call, the SDK triggers the onChannelMediaRelayEvent callback with the relayEventPacketUpdateDestChannel (7) state code.Call the method after successfully calling the startChannelMediaRelayEx method and receiving onChannelMediaRelayStateChanged (relayStateRunning, relayOk); otherwise, the method call fails. /// - /// * [connection] The connection information. See RtcConnection . /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration . + /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future updateChannelMediaRelayEx( {required ChannelMediaRelayConfiguration configuration, required RtcConnection connection}); - /// Stops the media stream relay. Once the relay stops, the host quits all the destination channels. + /// Stops the media stream relay. Once the relay stops, the host quits all the target channels. /// After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged callback. If the callback reports relayStateIdle (0) and relayOk (0), the host successfully stops the relay.If the method call fails, the SDK triggers the onChannelMediaRelayStateChanged callback with the relayErrorServerNoResponse (2) or relayErrorServerConnectionLost (8) status code. You can call the leaveChannel method to leave the channel, and the media stream relay automatically stops. /// /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopChannelMediaRelayEx(RtcConnection connection); - /// Pauses the media stream relay to all destination channels. - /// After the cross-channel media stream relay starts, you can call this method to pause relaying media streams to all destination channels; after the pause, if you want to resume the relay, call resumeAllChannelMediaRelay .After a successful method call, the SDK triggers the onChannelMediaRelayEvent callback to report whether the media stream relay is successfully paused.Call this method after startChannelMediaRelayEx . + /// Pauses the media stream relay to all target channels. + /// After the cross-channel media stream relay starts, you can call this method to pause relaying media streams to all target channels; after the pause, if you want to resume the relay, call resumeAllChannelMediaRelay .Call this method after startOrUpdateChannelMediaRelayEx . /// /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future pauseAllChannelMediaRelayEx(RtcConnection connection); - /// Resumes the media stream relay to all destination channels. - /// After calling the pauseAllChannelMediaRelayEx method, you can call this method to resume relaying media streams to all destination channels.After a successful method call, the SDK triggers the onChannelMediaRelayEvent callback to report whether the media stream relay is successfully resumed.Call this method after pauseAllChannelMediaRelayEx . + /// Resumes the media stream relay to all target channels. + /// After calling the pauseAllChannelMediaRelayEx method, you can call this method to resume relaying media streams to all destination channels.Call this method after pauseAllChannelMediaRelayEx . /// /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future resumeAllChannelMediaRelayEx(RtcConnection connection); /// @nodoc @@ -437,50 +542,65 @@ abstract class RtcEngineEx implements RtcEngine { Future getUserInfoByUidEx( {required int uid, required RtcConnection connection}); - /// @nodoc - Future setVideoProfileEx( - {required int width, - required int height, - required int frameRate, - required int bitrate}); - /// Enables or disables dual-stream mode on the sender side. - /// This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams.If you need to enable dual video streams in a multi-channel scenario, you can call the enableDualStreamModeEx method.You can call this method either before or after joining a channel.After you enable dual-stream mode, you can call setRemoteVideoStreamType to choose to receive either the high-quality video stream or the low-quality video stream on the subscriber side.You can call this method to enable or disable the dual-stream mode on the publisher side. Dual streams are a pairing of a high-quality video stream and a low-quality video stream:High-quality video stream: High bitrate, high resolution.Low-quality video stream: Low bitrate, low resolution. - /// - /// * [connection] The connection information. See RtcConnection . - /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig . + /// After you enable dual-stream mode, you can call setRemoteVideoStreamType to choose to receive either the high-quality video stream or the low-quality video stream on the subscriber side.You can call this method to enable or disable the dual-stream mode on the publisher side. Dual streams are a pairing of a high-quality video stream and a low-quality video stream:High-quality video stream: High bitrate, high resolution.Low-quality video stream: Low bitrate, low resolution.This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams. /// /// * [enabled] Whether to enable dual-stream mode: /// true: Enable dual-stream mode. /// false: (Default) Disable dual-stream mode. + /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig . + /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future enableDualStreamModeEx( {required bool enabled, required SimulcastStreamConfig streamConfig, required RtcConnection connection}); - /// Sets dual-stream mode on the sender side. - /// The SDK enables the low-quality video stream auto mode on the sender by default, which is equivalent to calling this method and setting the mode to autoSimulcastStream. If you want to modify this behavior, you can call this method and modify the mode to disableSimulcastStream(never sends low-quality video streams) or enableSimulcastStream(sends low-quality video streams).The difference and between this method and enableDualStreamModeEx is as follows:When calling this method and setting mode to disableSimulcastStream, it has the same effect as enableDualStreamModeEx(false).When calling this method and setting mode to enableSimulcastStream, it has the same effect as enableDualStreamModeEx(true).Both methods can be called before and after joining a channel. If they are used at the same time, the settings in the method called later shall prevail. + /// Sets the dual-stream mode on the sender side. + /// The SDK enables the low-quality video stream auto mode on the sender by default, which is equivalent to calling this method and setting the mode to autoSimulcastStream. If you want to modify this behavior, you can call this method and modify the mode to disableSimulcastStream (never send low-quality video streams) or enableSimulcastStream (always send low-quality video streams).The difference and connection between this method and enableDualStreamModeEx is as follows:When calling this method and setting mode to disableSimulcastStream, it has the same effect as enableDualStreamModeEx(false).When calling this method and setting mode to enableSimulcastStream, it has the same effect as enableDualStreamModeEx(true).Both methods can be called before and after joining a channel. If both methods are used, the settings in the method called later takes precedence. /// - /// * [connection] The connection information. See RtcConnection . + /// * [mode] The mode in which the video stream is sent. See SimulcastStreamMode . /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig . + /// * [connection] The connection information. See RtcConnection . /// - /// * [mode] The mode in which the video stream is sent. See SimulcastStreamMode . + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setDualStreamModeEx( {required SimulcastStreamMode mode, required SimulcastStreamConfig streamConfig, required RtcConnection connection}); - @override - Future enableWirelessAccelerate(bool enabled); + /// @nodoc + Future setHighPriorityUserListEx( + {required List uidList, + required int uidNum, + required StreamFallbackOptions option, + required RtcConnection connection}); /// Takes a snapshot of a video stream. /// The method is asynchronous, and the SDK has not taken the snapshot when the method call returns. After a successful method call, the SDK triggers the onSnapshotTaken callback to report whether the snapshot is successfully taken, as well as the details for that snapshot.This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path.Call this method after the joinChannelEx method.This method takes a snapshot of the published video stream specified in ChannelMediaOptions .If the user's video has been preprocessed, for example, watermarked or beautified, the resulting snapshot includes the pre-processing effect. /// - /// * [filePath] The local path (including filename extensions) of the snapshot. For example:Windows: C:\Users\\AppData\Local\Agora\\example.jpgiOS: /App Sandbox/Library/Caches/example.jpgmacOS: ~/Library/Logs/example.jpgAndroid: /storage/emulated/0/Android/data//files/example.jpgEnsure that the path you specify exists and is writable. - /// * [uid] The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. /// * [connection] The connection information. See RtcConnection . + /// * [uid] The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. + /// * [filePath] The local path (including filename extensions) of the snapshot. For example:Windows: C:\Users\\AppData\Local\Agora\\example.jpgiOS: /App Sandbox/Library/Caches/example.jpgmacOS: ~/Library/Logs/example.jpgAndroid: /storage/emulated/0/Android/data//files/example.jpgEnsure that the path you specify exists and is writable. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future takeSnapshotEx( {required RtcConnection connection, required int uid, required String filePath}); + + /// Enables tracing the video frame rendering process. + /// By default, the SDK starts tracing the video rendering event automatically when the local user successfully joins the channel. You can call this method at an appropriate time according to the actual application scenario to customize the tracing process. + /// After the local user leaves the current channel, the SDK automatically resets the time point to the next time when the user successfully joins the channel. + /// The SDK starts tracing the rendering status of the video frames in the channel from the moment this method is successfully called and reports information about the event through the onVideoRenderingTracingResult callback. + /// + /// * [connection] The connection information. See RtcConnection . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + Future startMediaRenderingTracingEx(RtcConnection connection); } diff --git a/lib/src/agora_rtc_engine_ext.dart b/lib/src/agora_rtc_engine_ext.dart index 451e4c75c..fa94a2da0 100644 --- a/lib/src/agora_rtc_engine_ext.dart +++ b/lib/src/agora_rtc_engine_ext.dart @@ -7,7 +7,12 @@ import 'impl/media_player_impl.dart'; /// @nodoc extension RtcEngineExt on RtcEngine { - /// @nodoc + /// Obtains the actual absolute path of the Asset through the relative path of the Asset. + /// + /// * [assetPath] The flutter -> assets field configured in the pubspec.yaml file. + /// + /// Returns + /// The actual path of the Asset. Future getAssetAbsolutePath(String assetPath) async { final impl = this as RtcEngineImpl; final p = await impl.engineMethodChannel @@ -31,26 +36,26 @@ class AgoraRtcException implements Exception { String toString() => 'AgoraRtcException($code, $message)'; } -/// Creates the RtcEngine object. -/// Currently, the Agora RTC SDK v4.0.0 supports creating only one RtcEngine object for an app. +/// Creates one RtcEngine object. +/// Currently, the Agora RTC SDK v6.x supports creating only one RtcEngine object for each app. /// /// Returns -/// RtcEngine object. +/// One RtcEngine object. RtcEngine createAgoraRtcEngine() { return impl.RtcEngineImpl.create(); } -/// Creates an RtcEngineEx object. -/// Currentluy, the Agora RTC v4.x SDK supports creating only one RtcEngineEx object for each app. +/// Creates one RtcEngineEx object. +/// Currently, the Agora RTC v6.x SDK supports creating only one RtcEngineEx object for each app. /// /// Returns -/// An RtcEngineEx object. +/// One RtcEngineEx object. RtcEngineEx createAgoraRtcEngineEx() { return impl.RtcEngineImpl.create(); } -/// Gets an MediaPlayerCacheManager instance. -/// Make sure the RtcEngine is initialized before you call this method. +/// Gets one MediaPlayerCacheManager instance. +/// When you successfully call this method, the SDK returns a media player cache manager instance. The cache manager is a singleton pattern. Therefore, multiple calls to this method returns the same instance.Make sure the RtcEngine is initialized before you call this method. /// /// Returns /// The MediaPlayerCacheManager instance. diff --git a/lib/src/agora_spatial_audio.dart b/lib/src/agora_spatial_audio.dart index a89eb7b63..9d5ce02ae 100644 --- a/lib/src/agora_spatial_audio.dart +++ b/lib/src/agora_spatial_audio.dart @@ -90,21 +90,30 @@ abstract class BaseSpatialAudioEngine { Future release(); /// Sets the maximum number of streams that a user can receive in a specified audio reception range. - /// If the number of receivable streams exceeds the set value, the local user receives the maxCount streams that are closest to the local user. If there are users who belong to the same team as the local user in the room, the local user receives the audio of the teammates first. For example, when maxCount is set to 3, if there are five remote users in the room, two of whom belong to the same team as the local user, and three of whom belong to different teams but are within the audio reception range of the local user, the local user can hear the two teammates and the one user from a different team closest to the local user. + /// If the number of receivable streams exceeds the set value, the local user receives the maxCount streams that are closest to the local user. /// - /// * [maxCount] The maximum number of streams that a user can receive within a specified audio reception range. + /// * [maxCount] The maximum number of streams that a user can receive within a specified audio reception range. The value of this parameter should be ≤ 16, and the default value is 10. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setMaxAudioRecvCount(int maxCount); /// Sets the audio reception range of the local user. /// After the setting is successful, the local user can only hear the remote users within the setting range or belonging to the same team. You can call this method at any time to update the audio reception range. /// - /// * [range] The maximum audio reception range. The unit is meters. The value must be greater than 0. + /// * [range] The maximum audio reception range. The unit is meters. The value of this parameter must be greater than 0, and the default value is 20. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setAudioRecvRange(double range); /// Sets the length (in meters) of the game engine distance per unit. /// In a game engine, the unit of distance is customized, while in the Agora spatial audio algorithm, distance is measured in meters. By default, the SDK converts the game engine distance per unit to one meter. You can call this method to convert the game engine distance per unit to a specified number of meters. /// - /// * [unit] The number of meters that the game engine distance per unit is equal to. This parameter must be greater than 0.00. For example, setting unit as 2.00 means the game engine distance per unit equals 2 meters.The larger the value is, the faster the sound heard by the local user attenuates when the remote user moves far away from the local user. + /// * [unit] The number of meters that the game engine distance per unit is equal to. The value of this parameter must be greater than 0.00, and the default value is 1.00. For example, setting unit as 2.00 means the game engine distance per unit equals 2 meters.The larger the value is, the faster the sound heard by the local user attenuates when the remote user moves far away from the local user. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setDistanceUnit(double unit); /// Updates the spatial position of the local user. @@ -114,6 +123,9 @@ abstract class BaseSpatialAudioEngine { /// * [axisForward] The unit vector of the x axis in the coordinate system. This parameter is an array of length 3, and the three values represent the front, right, and top coordinates in turn. /// * [axisRight] The unit vector of the y axis in the coordinate system. This parameter is an array of length 3, and the three values represent the front, right, and top coordinates in turn. /// * [axisUp] The unit vector of the z axis in the coordinate system. This parameter is an array of length 3, and the three values represent the front, right, and top coordinates in turn. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future updateSelfPosition( {required List position, required List axisForward, @@ -133,6 +145,9 @@ abstract class BaseSpatialAudioEngine { /// /// * [playerId] The ID of the media player. /// * [positionInfo] The spatial position of the media player. See RemoteVoicePositionInfo . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future updatePlayerPositionInfo( {required int playerId, required RemoteVoicePositionInfo positionInfo}); @@ -140,15 +155,21 @@ abstract class BaseSpatialAudioEngine { Future setParameters(String params); /// Stops or resumes publishing the local audio stream. - /// This method does not affect any ongoing audio recording, because it does not disable the audio capture device.Call this method after joinChannel [2/2] .When using the spatial audio effect, if you need to set whether to publish the local audio stream, Agora recommends calling this method instead of the muteLocalAudioStream method under RtcEngine . + /// This method does not affect any ongoing audio recording, because it does not disable the audio capture device.Call this method after joinChannel .When using the spatial audio effect, if you need to set whether to stop subscribing to the audio stream of a specified user, Agora recommends calling this method instead of the muteLocalAudioStream method in RtcEngine . + /// + /// * [mute] Whether to stop publishing the local audio stream:true: Stop publishing the local audio stream.false: Publish the local audio stream. /// - /// * [mute] Whether to stop publishing the local audio stream.true: Stop publishing the local audio stream.false: Publish the local audio stream. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future muteLocalAudioStream(bool mute); /// Stops or resumes subscribing to the audio streams of all remote users. - /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users.Call this method after joinChannel [2/2] .When using the spatial audio effect, if you need to set whether to stop subscribing to the audio streams of all remote users, Agora recommends calling this method instead of the muteAllRemoteAudioStreams method under RtcEngine .After calling this method, you need to call updateSelfPosition and updateRemotePosition to update the spatial location of the local user and the remote user; otherwise, the settings in this method do not take effect. + /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users.Call this method after joinChannel .When using the spatial audio effect, if you need to set whether to stop subscribing to the audio streams of all remote users, Agora recommends calling this method instead of the muteAllRemoteAudioStreams method in RtcEngine .After calling this method, you need to call updateSelfPosition and updateRemotePosition to update the spatial location of the local user and the remote user; otherwise, the settings in this method do not take effect. /// - /// * [mute] Whether to stop subscribing to the audio streams of all remote users:true: Stops subscribing to the audio streams of all remote users.false: Subscribe to the audio streams of all remote users. + /// * [mute] Whether to stop subscribing to the audio streams of all remote users:true: Stop subscribing to the audio streams of all remote users.false: Subscribe to the audio streams of all remote users. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future muteAllRemoteAudioStreams(bool mute); /// Sets the sound insulation area. @@ -156,6 +177,9 @@ abstract class BaseSpatialAudioEngine { /// /// * [zones] Sound insulation area settings. See SpatialAudioZone. /// * [zoneCount] The number of sound insulation areas. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setZones( {required List zones, required int zoneCount}); @@ -163,17 +187,23 @@ abstract class BaseSpatialAudioEngine { /// /// * [playerId] The ID of the media player. /// * [attenuation] The sound attenuation coefficient of the remote user or media player. The value range is [0,1]. The values are as follows:0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance.(0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment.0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the speaker_attenuation parameter.(0.5,1]: Strong attenuation mode, that is, the volume and timbre attenuate rapidly during the propagation process. - /// * [forceSet] Whether to force the sound attenuation effect of the media player:true: Force attenuation to set the attenuation of the user. At this time, the attenuation coefficient of the sound insulation area set in the audioAttenuation in the SpatialAudioZone does not take effect for the user.false: Do not force attenuation e to set the user's sound attenuationffect, as shown in the following two cases.If the sound source and listener are inside and outside the sound isolation area, the sound attenuation effect is determined by the audioAttenuation in SpatialAudioZone.If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method. + /// * [forceSet] Whether to force the sound attenuation effect of the media player:true: Force attenuation to set the attenuation of the media player. At this time, the attenuation coefficient of the sound insulation are set in the audioAttenuation in the SpatialAudioZone does not take effect for the media player.false: Do not force attenuation to set the sound attenuation effect of the media player, as shown in the following two cases.If the sound source and listener are inside and outside the sound isolation area, the sound attenuation effect is determined by the audioAttenuation in SpatialAudioZone.If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setPlayerAttenuation( {required int playerId, required double attenuation, required bool forceSet}); /// Stops or resumes subscribing to the audio stream of a specified user. - /// Call this method after joinChannel [2/2] .When using the spatial audio effect, if you need to set whether to stop subscribing to the audio stream of a specified user, Agora recommends calling this method instead of the muteRemoteAudioStream method under RtcEngine . + /// Call this method after joinChannel .When using the spatial audio effect, if you need to set whether to stop subscribing to the audio stream of a specified user, Agora recommends calling this method instead of the muteRemoteAudioStream method in RtcEngine . /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. /// * [mute] Whether to subscribe to the specified remote user's audio stream.true: Stop subscribing to the audio stream of the specified user.false: (Default) Subscribe to the audio stream of the specified user. The SDK decides whether to subscribe according to the distance between the local user and the remote user. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future muteRemoteAudioStream({required int uid, required bool mute}); } @@ -182,13 +212,19 @@ abstract class BaseSpatialAudioEngine { abstract class LocalSpatialAudioEngine implements BaseSpatialAudioEngine { /// Initializes LocalSpatialAudioEngine . /// Before calling other methods of the LocalSpatialAudioEngine class, you need to call this method to initialize LocalSpatialAudioEngine.The SDK supports creating only one LocalSpatialAudioEngine instance for an app. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future initialize(); /// Updates the spatial position of the specified remote user. - /// After successfully calling this method, the SDK calculates the spatial audio parameters based on the relative position of the local and remote user.Call this method after the joinChannel [2/2] method. + /// After successfully calling this method, the SDK calculates the spatial audio parameters based on the relative position of the local and remote user.Call this method after joinChannel . /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. /// * [posInfo] The spatial position of the remote user. See RemoteVoicePositionInfo . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future updateRemotePosition( {required int uid, required RemoteVoicePositionInfo posInfo}); @@ -202,6 +238,9 @@ abstract class LocalSpatialAudioEngine implements BaseSpatialAudioEngine { /// After successfully calling this method, the local user no longer hears the specified remote user.After leaving the channel, to avoid wasting resources, you can also call this method to delete the spatial position of the specified remote user. /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future removeRemotePosition(int uid); /// @nodoc @@ -210,6 +249,9 @@ abstract class LocalSpatialAudioEngine implements BaseSpatialAudioEngine { /// Removes the spatial positions of all remote users. /// After successfully calling this method, the local user no longer hears any remote users.After leaving the channel, to avoid wasting resources, you can also call this method to delete the spatial positions of all remote users. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future clearRemotePositions(); /// @nodoc @@ -219,7 +261,10 @@ abstract class LocalSpatialAudioEngine implements BaseSpatialAudioEngine { /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. /// * [attenuation] For the user's sound attenuation coefficient, the value range is [0,1]. The values are as follows:0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance.(0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment.0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the speaker_attenuation parameter.(0.5,1]: Strong attenuation mode, that is, the volume and timbre attenuate rapidly during the propagation process. - /// * [forceSet] Whether to force the user's sound attenuation effect:true: Force attenuation to set the sound attenuation of the user. At this time, the attenuation coefficient of the sound insulation area set in the audioAttenuation in the SpatialAudioZone does not take effect for the user.If the sound source and listener are inside and outside the sound isolation area, the sound attenuation effect is determined by the audioAttenuation in SpatialAudioZone.If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method.false: Do not force attenuation to set the user's sound attenuation effect, as shown in the following two cases. + /// * [forceSet] Whether to force the user's sound attenuation effect:true: Force attenuation to set the sound attenuation of the user. At this time, the attenuation coefficient of the sound insulation area set in the audioAttenuation of the SpatialAudioZone does not take effect for the user.If the sound source and listener are inside and outside the sound isolation area, the sound attenuation effect is determined by the audioAttenuation in SpatialAudioZone.If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method.false: Do not force attenuation to set the user's sound attenuation effect, as shown in the following two cases. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRemoteAudioAttenuation( {required int uid, required double attenuation, required bool forceSet}); } diff --git a/lib/src/audio_device_manager.dart b/lib/src/audio_device_manager.dart index f90a327d6..1c98b8a50 100644 --- a/lib/src/audio_device_manager.dart +++ b/lib/src/audio_device_manager.dart @@ -25,29 +25,37 @@ extension MaxDeviceIdLengthTypeExt on MaxDeviceIdLengthType { /// Audio device management methods. abstract class AudioDeviceManager { /// Enumerates the audio playback devices. + /// This method is for Windows and macOS only. /// /// Returns /// Success: Returns an AudioDeviceInfo array, which includes all the audio playback devices.Failure: An empty array. Future> enumeratePlaybackDevices(); /// Enumerates the audio capture devices. + /// This method is for Windows and macOS only. /// /// Returns /// Success: An AudioDeviceInfo array, which includes all the audio capture devices.Failure: An empty array. Future> enumerateRecordingDevices(); /// Sets the audio playback device. + /// This method is for Windows and macOS only.You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is speaker 1, you call this method to set the audio route as speaker 2 before joinging a channel and then start a device test, the SDK conducts device test on speaker 2. After the device test is completed and you join a channel, the SDK still uses speaker 1, the default audio route. /// /// * [deviceId] The ID of the specified audio playback device. You can get the device ID by calling enumeratePlaybackDevices . Connecting or disconnecting the audio device does not change the value of deviceId. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setPlaybackDevice(String deviceId); /// Retrieves the audio playback device associated with the device ID. + /// This method is for Windows and macOS only. /// /// Returns /// The current audio playback device. Future getPlaybackDevice(); /// Retrieves the audio playback device associated with the device ID. + /// This method is for Windows and macOS only. /// /// Returns /// An AudioDeviceInfo object, which contains the ID and device name of the audio devices. @@ -59,39 +67,52 @@ abstract class AudioDeviceManager { /// @nodoc Future getPlaybackDeviceVolume(); - /// Sets the audio recording device. + /// Sets the audio capture device. + /// This method is for Windows and macOS only.You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is microphone, you call this method to set the audio route as bluetooth earphones before joinging a channel and then start a device test, the SDK conducts device test on the bluetooth earphones. After the device test is completed and you join a channel, the SDK still uses the microphone for audio capturing. + /// + /// * [deviceId] The ID of the audio capture device. You can get the Device ID by calling enumerateRecordingDevices . Connecting or disconnecting the audio device does not change the value of deviceId. /// - /// * [deviceId] The ID of the audio recording device. You can get the device ID by calling enumerateRecordingDevices . Plugging or unplugging the audio device does not change the value of deviceId. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRecordingDevice(String deviceId); /// Gets the current audio recording device. + /// This method is for Windows and macOS only. /// /// Returns /// The current audio recording device. Future getRecordingDevice(); /// Retrieves the volume of the audio recording device. + /// This method is for Windows and macOS only. /// /// Returns /// An AudioDeviceInfo object, which includes the device ID and device name. Future getRecordingDeviceInfo(); - /// Sets the volume of the audio recording device. + /// Sets the volume of the audio capture device. + /// This method is for Windows and macOS only. + /// + /// * [volume] The volume of the audio recording device. The value range is [0,255]. 0 means no sound, 255 means maximum volume. /// - /// * [volume] The volume of the audio recording device. The value range is [0,255]. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setRecordingDeviceVolume(int volume); /// @nodoc Future getRecordingDeviceVolume(); /// Sets the loopback device. - /// The SDK uses the current playback device as the loopback device by default. If you want to specify another audio device as the loopback device, call this method, and set deviceId to the loopback device you want to specify.This method applies to Windows only.The scenarios where this method is applicable are as follows:Use app A to play music through a Bluetooth headset; when using app B for a video conference, play through the speakers.If the loopback device is set as the Bluetooth headset, the SDK publishes the music in app A to the remote end.If the loopback device is set as the speaker, the SDK does not publish the music in app A to the remote end.If you set the loopback device as the Bluetooth headset, and then use a wired headset to play the music in app A, you need to call this method again, set the loopback device as the wired headset, and the SDK continues to publish the music in app A to remote end. + /// The SDK uses the current playback device as the loopback device by default. If you want to specify another audio device as the loopback device, call this method, and set deviceId to the loopback device you want to specify.You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is microphone, you call this method to set the audio route as a sound card before joinging a channel and then start a device test, the SDK conducts device test on the sound card. After the device test is completed and you join a channel, the SDK still uses the microphone for audio capturing.This method is for Windows and macOS only.The scenarios where this method is applicable are as follows:Use app A to play music through a Bluetooth headset; when using app B for a video conference, play through the speakers.If the loopback device is set as the Bluetooth headset, the SDK publishes the music in app A to the remote end.If the loopback device is set as the speaker, the SDK does not publish the music in app A to the remote end.If you set the loopback device as the Bluetooth headset, and then use a wired headset to play the music in app A, you need to call this method again, set the loopback device as the wired headset, and the SDK continues to publish the music in app A to remote end. /// /// * [deviceId] Specifies the loopback device of the SDK. You can get the device ID by calling enumeratePlaybackDevices . Connecting or disconnecting the audio device does not change the value of deviceId.The maximum length is MaxDeviceIdLengthType . + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future setLoopbackDevice(String deviceId); /// Gets the current loopback device. - /// This method applies to Windows only. + /// This method is for Windows and macOS only. /// /// Returns /// The ID of the current loopback device. @@ -110,49 +131,78 @@ abstract class AudioDeviceManager { Future getRecordingDeviceMute(); /// Starts the audio playback device test. - /// This method tests whether the audio playback device works properly. Once a user starts the test, the SDK plays an audio file specified by the user. If the user can hear the audio, the playback device works properly.After calling this method, the SDK triggers the onAudioVolumeIndication callback every 100 ms, reporting uid = 1 and the volume information of the playback device.Ensure that you call this method before joining a channel. + /// This method tests whether the audio playback device works properly. Once a user starts the test, the SDK plays an audio file specified by the user. If the user can hear the audio, the playback device works properly.After calling this method, the SDK triggers the onAudioVolumeIndication callback every 100 ms, reporting uid = 1 and the volume information of the playback device.Ensure that you call this method before joining a channel.This method is for Windows and macOS only. /// /// * [testAudioFilePath] The path of the audio file. The data format is string in UTF-8.Supported file formats: wav, mp3, m4a, and aac.Supported file sample rates: 8000, 16000, 32000, 44100, and 48000 Hz. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future startPlaybackDeviceTest(String testAudioFilePath); /// Stops the audio playback device test. - /// This method stops the audio playback device test. You must call this method to stop the test after calling the startPlaybackDeviceTest method.Ensure that you call this method before joining a channel. + /// This method stops the audio playback device test. You must call this method to stop the test after calling the startPlaybackDeviceTest method.This method is for Windows and macOS only.Ensure that you call this method before joining a channel. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopPlaybackDeviceTest(); /// Starts the audio capture device test. /// This method tests whether the audio capture device works properly. After calling this method, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method, which reports uid = 0 and the volume information of the capturing device.This method is for Windows and macOS only.Ensure that you call this method before joining a channel. /// - /// * [indicationInterval] The time interval (ms) at which the SDK triggers the onAudioVolumeIndication callback. Agora recommends a setting greater than 200 ms. This value must not be less than 10 ms; otherwise, you can not receive the onAudioVolumeIndication callback. + /// * [indicationInterval] The time interval (ms) at which the SDK triggers the onAudioVolumeIndication callback. Agora recommends setting a value greater than 200 ms. This value must not be less than 10 ms; otherwise, you can not receive the onAudioVolumeIndication callback. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future startRecordingDeviceTest(int indicationInterval); /// Stops the audio capture device test. - /// This method stops the audio capture device test. You must call this method to stop the test after calling the startRecordingDeviceTest method.Ensure that you call this method before joining a channel. + /// This method stops the audio capture device test. You must call this method to stop the test after calling the startRecordingDeviceTest method.This method is for Windows and macOS only.Ensure that you call this method before joining a channel. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopRecordingDeviceTest(); /// Starts an audio device loopback test. - /// This method tests whether the local audio capture device and playback device are working properly. Once the test starts, the audio recording device records the local audio, and the audio playback device plays the captured audio. The SDK triggers two independent onAudioVolumeIndication callbacks at the time interval set in this method, which reports the volume information of the capture device (uid = 0) and the volume information of the playback device (uid = 1) respectively.Ensure that you call this method before joining a channel.This method tests local audio devices and does not report the network conditions. + /// This method tests whether the local audio capture device and playback device are working properly. After starting the test, the audio capture device records the local audio, and the audio playback device plays the captured audio. The SDK triggers two independent onAudioVolumeIndication callbacks at the time interval set in this method, which reports the volume information of the capture device (uid = 0) and the volume information of the playback device (uid = 1) respectively.This method is for Windows and macOS only.Ensure that you call this method before joining a channel.This method tests local audio devices and does not report the network conditions. /// /// * [indicationInterval] The time interval (ms) at which the SDK triggers the onAudioVolumeIndication callback. Agora recommends setting a value greater than 200 ms. This value must not be less than 10 ms; otherwise, you can not receive the onAudioVolumeIndication callback. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future startAudioDeviceLoopbackTest(int indicationInterval); /// Stops the audio device loopback test. - /// Ensure that you call this method before joining a channel.Ensure that you call this method to stop the loopback test after calling the startAudioDeviceLoopbackTest method. + /// This method is for Windows and macOS only.Ensure that you call this method before joining a channel.Ensure that you call this method to stop the loopback test after calling the startAudioDeviceLoopbackTest method. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future stopAudioDeviceLoopbackTest(); /// Sets the audio playback device used by the SDK to follow the system default audio playback device. + /// This method is for Windows and macOS only. + /// + /// * [enable] Whether to follow the system default audio playback device:true: Follow the system default audio playback device. The SDK immediately switches the audio playback device when the system default audio playback device changes.false: Do not follow the system default audio playback device. The SDK switches the audio playback device to the system default audio playback device only when the currently used audio playback device is disconnected. /// - /// * [enable] Whether to follow the system default audio playback device:true: Follow. The SDK immediately switches the audio playback device when the system default audio playback device changes.false: Do not follow. The SDK switches the audio playback device to the system default audio playback device only when the currently used audio playback device is disconnected. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future followSystemPlaybackDevice(bool enable); /// Sets the audio recording device used by the SDK to follow the system default audio recording device. + /// This method is for Windows and macOS only. + /// + /// * [enable] Whether to follow the system default audio recording device:true: Follow the system default audio playback device. The SDK immediately switches the audio recording device when the system default audio recording device changes.false: Do not follow the system default audio playback device. The SDK switches the audio recording device to the system default audio recording device only when the currently used audio recording device is disconnected. /// - /// * [enable] Whether to follow the system default audio recording device:true: Follow. The SDK immediately switches the audio recording device when the system default audio recording device changes.false: Do not follow. The SDK switches the audio recording device to the system default audio recording device only when the currently used audio recording device is disconnected. + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future followSystemRecordingDevice(bool enable); /// Sets whether the loopback device follows the system default playback device. - /// This method applies to Windows only. + /// This method is for Windows and macOS only. /// - /// * [enable] Whether to follow the system default audio playback device:true: Follow. When the default playback device of the system is changed, the SDK immediately switches to the loopback device.false: Do not follow. The SDK switches the audio loopback device to the system default audio playback device only when the current audio playback device is disconnected. + /// * [enable] Whether to follow the system default audio playback device:true: Follow the system default audio playback device. When the default playback device of the system is changed, the SDK immediately switches to the loopback device.false: Do not follow the system default audio playback device. The SDK switches the audio loopback device to the system default audio playback device only when the current audio playback device is disconnected. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. Future followSystemLoopbackDevice(bool enable); /// Releases all the resources occupied by the AudioDeviceManager object. diff --git a/lib/src/binding/agora_base_event_impl.dart b/lib/src/binding/agora_base_event_impl.dart index e5111ac4b..d0cdb7478 100644 --- a/lib/src/binding/agora_base_event_impl.dart +++ b/lib/src/binding/agora_base_event_impl.dart @@ -1,9 +1,10 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable - class AudioEncodedFrameObserverWrapper implements EventLoopEventHandler { const AudioEncodedFrameObserverWrapper(this.audioEncodedFrameObserver); final AudioEncodedFrameObserver audioEncodedFrameObserver; @@ -22,7 +23,7 @@ class AudioEncodedFrameObserverWrapper implements EventLoopEventHandler { bool handleEventInternal( String eventName, String eventData, List buffers) { switch (eventName) { - case 'OnRecordAudioEncodedFrame': + case 'onRecordAudioEncodedFrame': if (audioEncodedFrameObserver.onRecordAudioEncodedFrame == null) { return true; } @@ -45,7 +46,7 @@ class AudioEncodedFrameObserverWrapper implements EventLoopEventHandler { frameBuffer, length, audioEncodedFrameInfo); return true; - case 'OnPlaybackAudioEncodedFrame': + case 'onPlaybackAudioEncodedFrame': if (audioEncodedFrameObserver.onPlaybackAudioEncodedFrame == null) { return true; } @@ -68,7 +69,7 @@ class AudioEncodedFrameObserverWrapper implements EventLoopEventHandler { frameBuffer, length, audioEncodedFrameInfo); return true; - case 'OnMixedAudioEncodedFrame': + case 'onMixedAudioEncodedFrame': if (audioEncodedFrameObserver.onMixedAudioEncodedFrame == null) { return true; } diff --git a/lib/src/binding/agora_media_base_event_impl.dart b/lib/src/binding/agora_media_base_event_impl.dart index 298ddcd15..c98cf6a29 100644 --- a/lib/src/binding/agora_media_base_event_impl.dart +++ b/lib/src/binding/agora_media_base_event_impl.dart @@ -1,8 +1,59 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable +class AudioPcmFrameSinkWrapper implements EventLoopEventHandler { + const AudioPcmFrameSinkWrapper(this.audioPcmFrameSink); + final AudioPcmFrameSink audioPcmFrameSink; + @override + bool operator ==(Object other) { + if (other.runtimeType != runtimeType) { + return false; + } + return other is AudioPcmFrameSinkWrapper && + other.audioPcmFrameSink == audioPcmFrameSink; + } + + @override + int get hashCode => audioPcmFrameSink.hashCode; + @override + bool handleEventInternal( + String eventName, String eventData, List buffers) { + switch (eventName) { + case 'onFrame': + if (audioPcmFrameSink.onFrame == null) { + return true; + } + final jsonMap = jsonDecode(eventData); + AudioPcmFrameSinkOnFrameJson paramJson = + AudioPcmFrameSinkOnFrameJson.fromJson(jsonMap); + paramJson = paramJson.fillBuffers(buffers); + AudioPcmFrame? frame = paramJson.frame; + if (frame == null) { + return true; + } + frame = frame.fillBuffers(buffers); + audioPcmFrameSink.onFrame!(frame); + return true; + } + return false; + } + + @override + bool handleEvent( + String eventName, String eventData, List buffers) { + if (!eventName.startsWith('AudioPcmFrameSink')) return false; + final newEvent = eventName.replaceFirst('AudioPcmFrameSink_', ''); + if (handleEventInternal(newEvent, eventData, buffers)) { + return true; + } + + return false; + } +} class AudioFrameObserverBaseWrapper implements EventLoopEventHandler { const AudioFrameObserverBaseWrapper(this.audioFrameObserverBase); @@ -313,12 +364,13 @@ class VideoFrameObserverWrapper implements EventLoopEventHandler { VideoFrameObserverOnCaptureVideoFrameJson paramJson = VideoFrameObserverOnCaptureVideoFrameJson.fromJson(jsonMap); paramJson = paramJson.fillBuffers(buffers); + VideoSourceType? sourceType = paramJson.sourceType; VideoFrame? videoFrame = paramJson.videoFrame; - if (videoFrame == null) { + if (sourceType == null || videoFrame == null) { return true; } videoFrame = videoFrame.fillBuffers(buffers); - videoFrameObserver.onCaptureVideoFrame!(videoFrame); + videoFrameObserver.onCaptureVideoFrame!(sourceType, videoFrame); return true; case 'onPreEncodeVideoFrame': @@ -329,78 +381,13 @@ class VideoFrameObserverWrapper implements EventLoopEventHandler { VideoFrameObserverOnPreEncodeVideoFrameJson paramJson = VideoFrameObserverOnPreEncodeVideoFrameJson.fromJson(jsonMap); paramJson = paramJson.fillBuffers(buffers); + VideoSourceType? sourceType = paramJson.sourceType; VideoFrame? videoFrame = paramJson.videoFrame; - if (videoFrame == null) { - return true; - } - videoFrame = videoFrame.fillBuffers(buffers); - videoFrameObserver.onPreEncodeVideoFrame!(videoFrame); - return true; - - case 'onSecondaryCameraCaptureVideoFrame': - if (videoFrameObserver.onSecondaryCameraCaptureVideoFrame == null) { - return true; - } - final jsonMap = jsonDecode(eventData); - VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJson paramJson = - VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJson.fromJson( - jsonMap); - paramJson = paramJson.fillBuffers(buffers); - VideoFrame? videoFrame = paramJson.videoFrame; - if (videoFrame == null) { - return true; - } - videoFrame = videoFrame.fillBuffers(buffers); - videoFrameObserver.onSecondaryCameraCaptureVideoFrame!(videoFrame); - return true; - - case 'onSecondaryPreEncodeCameraVideoFrame': - if (videoFrameObserver.onSecondaryPreEncodeCameraVideoFrame == null) { - return true; - } - final jsonMap = jsonDecode(eventData); - VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJson paramJson = - VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJson.fromJson( - jsonMap); - paramJson = paramJson.fillBuffers(buffers); - VideoFrame? videoFrame = paramJson.videoFrame; - if (videoFrame == null) { - return true; - } - videoFrame = videoFrame.fillBuffers(buffers); - videoFrameObserver.onSecondaryPreEncodeCameraVideoFrame!(videoFrame); - return true; - - case 'onScreenCaptureVideoFrame': - if (videoFrameObserver.onScreenCaptureVideoFrame == null) { - return true; - } - final jsonMap = jsonDecode(eventData); - VideoFrameObserverOnScreenCaptureVideoFrameJson paramJson = - VideoFrameObserverOnScreenCaptureVideoFrameJson.fromJson(jsonMap); - paramJson = paramJson.fillBuffers(buffers); - VideoFrame? videoFrame = paramJson.videoFrame; - if (videoFrame == null) { - return true; - } - videoFrame = videoFrame.fillBuffers(buffers); - videoFrameObserver.onScreenCaptureVideoFrame!(videoFrame); - return true; - - case 'onPreEncodeScreenVideoFrame': - if (videoFrameObserver.onPreEncodeScreenVideoFrame == null) { - return true; - } - final jsonMap = jsonDecode(eventData); - VideoFrameObserverOnPreEncodeScreenVideoFrameJson paramJson = - VideoFrameObserverOnPreEncodeScreenVideoFrameJson.fromJson(jsonMap); - paramJson = paramJson.fillBuffers(buffers); - VideoFrame? videoFrame = paramJson.videoFrame; - if (videoFrame == null) { + if (sourceType == null || videoFrame == null) { return true; } videoFrame = videoFrame.fillBuffers(buffers); - videoFrameObserver.onPreEncodeScreenVideoFrame!(videoFrame); + videoFrameObserver.onPreEncodeVideoFrame!(sourceType, videoFrame); return true; case 'onMediaPlayerVideoFrame': @@ -420,40 +407,6 @@ class VideoFrameObserverWrapper implements EventLoopEventHandler { videoFrameObserver.onMediaPlayerVideoFrame!(videoFrame, mediaPlayerId); return true; - case 'onSecondaryScreenCaptureVideoFrame': - if (videoFrameObserver.onSecondaryScreenCaptureVideoFrame == null) { - return true; - } - final jsonMap = jsonDecode(eventData); - VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJson paramJson = - VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJson.fromJson( - jsonMap); - paramJson = paramJson.fillBuffers(buffers); - VideoFrame? videoFrame = paramJson.videoFrame; - if (videoFrame == null) { - return true; - } - videoFrame = videoFrame.fillBuffers(buffers); - videoFrameObserver.onSecondaryScreenCaptureVideoFrame!(videoFrame); - return true; - - case 'onSecondaryPreEncodeScreenVideoFrame': - if (videoFrameObserver.onSecondaryPreEncodeScreenVideoFrame == null) { - return true; - } - final jsonMap = jsonDecode(eventData); - VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJson paramJson = - VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJson.fromJson( - jsonMap); - paramJson = paramJson.fillBuffers(buffers); - VideoFrame? videoFrame = paramJson.videoFrame; - if (videoFrame == null) { - return true; - } - videoFrame = videoFrame.fillBuffers(buffers); - videoFrameObserver.onSecondaryPreEncodeScreenVideoFrame!(videoFrame); - return true; - case 'onRenderVideoFrame': if (videoFrameObserver.onRenderVideoFrame == null) { return true; @@ -531,12 +484,18 @@ class MediaRecorderObserverWrapper implements EventLoopEventHandler { MediaRecorderObserverOnRecorderStateChangedJson paramJson = MediaRecorderObserverOnRecorderStateChangedJson.fromJson(jsonMap); paramJson = paramJson.fillBuffers(buffers); + String? channelId = paramJson.channelId; + int? uid = paramJson.uid; RecorderState? state = paramJson.state; RecorderErrorCode? error = paramJson.error; - if (state == null || error == null) { + if (channelId == null || + uid == null || + state == null || + error == null) { return true; } - mediaRecorderObserver.onRecorderStateChanged!(state, error); + mediaRecorderObserver.onRecorderStateChanged!( + channelId, uid, state, error); return true; case 'onRecorderInfoUpdated': @@ -547,12 +506,14 @@ class MediaRecorderObserverWrapper implements EventLoopEventHandler { MediaRecorderObserverOnRecorderInfoUpdatedJson paramJson = MediaRecorderObserverOnRecorderInfoUpdatedJson.fromJson(jsonMap); paramJson = paramJson.fillBuffers(buffers); + String? channelId = paramJson.channelId; + int? uid = paramJson.uid; RecorderInfo? info = paramJson.info; - if (info == null) { + if (channelId == null || uid == null || info == null) { return true; } info = info.fillBuffers(buffers); - mediaRecorderObserver.onRecorderInfoUpdated!(info); + mediaRecorderObserver.onRecorderInfoUpdated!(channelId, uid, info); return true; } return false; diff --git a/lib/src/binding/agora_media_engine_impl.dart b/lib/src/binding/agora_media_engine_impl.dart index 254c7558b..2051b23d4 100644 --- a/lib/src/binding/agora_media_engine_impl.dart +++ b/lib/src/binding/agora_media_engine_impl.dart @@ -1,7 +1,9 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides class MediaEngineImpl implements MediaEngine { MediaEngineImpl(this.irisMethodChannel); @@ -74,75 +76,10 @@ class MediaEngineImpl implements MediaEngine { @override Future pushAudioFrame( - {required MediaSourceType type, - required AudioFrame frame, - bool wrap = false, - int sourceId = 0}) async { + {required AudioFrame frame, int trackId = 0}) async { final apiType = '${isOverrideClassName ? className : 'MediaEngine'}_pushAudioFrame'; - final param = createParams({ - 'type': type.value(), - 'frame': frame.toJson(), - 'wrap': wrap, - 'sourceId': sourceId - }); - final List buffers = []; - buffers.addAll(frame.collectBufferList()); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future pushCaptureAudioFrame(AudioFrame frame) async { - final apiType = - '${isOverrideClassName ? className : 'MediaEngine'}_pushCaptureAudioFrame'; - final param = createParams({'frame': frame.toJson()}); - final List buffers = []; - buffers.addAll(frame.collectBufferList()); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future pushReverseAudioFrame(AudioFrame frame) async { - final apiType = - '${isOverrideClassName ? className : 'MediaEngine'}_pushReverseAudioFrame'; - final param = createParams({'frame': frame.toJson()}); - final List buffers = []; - buffers.addAll(frame.collectBufferList()); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future pushDirectAudioFrame(AudioFrame frame) async { - final apiType = - '${isOverrideClassName ? className : 'MediaEngine'}_pushDirectAudioFrame'; - final param = createParams({'frame': frame.toJson()}); + final param = createParams({'frame': frame.toJson(), 'trackId': trackId}); final List buffers = []; buffers.addAll(frame.collectBufferList()); final callApiResult = await irisMethodChannel.invokeMethod( @@ -209,7 +146,6 @@ class MediaEngineImpl implements MediaEngine { {required bool enabled, required int sampleRate, required int channels, - int sourceNumber = 1, bool localPlayback = false, bool publish = true}) async { final apiType = @@ -218,7 +154,6 @@ class MediaEngineImpl implements MediaEngine { 'enabled': enabled, 'sampleRate': sampleRate, 'channels': channels, - 'sourceNumber': sourceNumber, 'localPlayback': localPlayback, 'publish': publish }); @@ -235,14 +170,30 @@ class MediaEngineImpl implements MediaEngine { } @override - Future setExternalAudioSink( - {required bool enabled, - required int sampleRate, - required int channels}) async { + Future createCustomAudioTrack( + {required AudioTrackType trackType, + required AudioTrackConfig config}) async { final apiType = - '${isOverrideClassName ? className : 'MediaEngine'}_setExternalAudioSink'; + '${isOverrideClassName ? className : 'MediaEngine'}_createCustomAudioTrack'; final param = createParams( - {'enabled': enabled, 'sampleRate': sampleRate, 'channels': channels}); + {'trackType': trackType.value(), 'config': config.toJson()}); + final List buffers = []; + buffers.addAll(config.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + return result as int; + } + + @override + Future destroyCustomAudioTrack(int trackId) async { + final apiType = + '${isOverrideClassName ? className : 'MediaEngine'}_destroyCustomAudioTrack'; + final param = createParams({'trackId': trackId}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { @@ -256,11 +207,14 @@ class MediaEngineImpl implements MediaEngine { } @override - Future enableCustomAudioLocalPlayback( - {required int sourceId, required bool enabled}) async { + Future setExternalAudioSink( + {required bool enabled, + required int sampleRate, + required int channels}) async { final apiType = - '${isOverrideClassName ? className : 'MediaEngine'}_enableCustomAudioLocalPlayback'; - final param = createParams({'sourceId': sourceId, 'enabled': enabled}); + '${isOverrideClassName ? className : 'MediaEngine'}_setExternalAudioSink'; + final param = createParams( + {'enabled': enabled, 'sampleRate': sampleRate, 'channels': channels}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { @@ -274,12 +228,11 @@ class MediaEngineImpl implements MediaEngine { } @override - Future setDirectExternalAudioSource( - {required bool enable, bool localPlayback = false}) async { + Future enableCustomAudioLocalPlayback( + {required int trackId, required bool enabled}) async { final apiType = - '${isOverrideClassName ? className : 'MediaEngine'}_setDirectExternalAudioSource'; - final param = - createParams({'enable': enable, 'localPlayback': localPlayback}); + '${isOverrideClassName ? className : 'MediaEngine'}_enableCustomAudioLocalPlayback'; + final param = createParams({'trackId': trackId, 'enabled': enabled}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { diff --git a/lib/src/binding/agora_media_player_event_impl.dart b/lib/src/binding/agora_media_player_event_impl.dart index ed2aa3863..4a5b33ec0 100644 --- a/lib/src/binding/agora_media_player_event_impl.dart +++ b/lib/src/binding/agora_media_player_event_impl.dart @@ -1,61 +1,10 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable - -class MediaPlayerAudioFrameObserverWrapper implements EventLoopEventHandler { - const MediaPlayerAudioFrameObserverWrapper( - this.mediaPlayerAudioFrameObserver); - final MediaPlayerAudioFrameObserver mediaPlayerAudioFrameObserver; - @override - bool operator ==(Object other) { - if (other.runtimeType != runtimeType) { - return false; - } - return other is MediaPlayerAudioFrameObserverWrapper && - other.mediaPlayerAudioFrameObserver == mediaPlayerAudioFrameObserver; - } - - @override - int get hashCode => mediaPlayerAudioFrameObserver.hashCode; - @override - bool handleEventInternal( - String eventName, String eventData, List buffers) { - switch (eventName) { - case 'onFrame': - if (mediaPlayerAudioFrameObserver.onFrame == null) { - return true; - } - final jsonMap = jsonDecode(eventData); - MediaPlayerAudioFrameObserverOnFrameJson paramJson = - MediaPlayerAudioFrameObserverOnFrameJson.fromJson(jsonMap); - paramJson = paramJson.fillBuffers(buffers); - AudioPcmFrame? frame = paramJson.frame; - if (frame == null) { - return true; - } - frame = frame.fillBuffers(buffers); - mediaPlayerAudioFrameObserver.onFrame!(frame); - return true; - } - return false; - } - - @override - bool handleEvent( - String eventName, String eventData, List buffers) { - if (!eventName.startsWith('MediaPlayerAudioFrameObserver')) return false; - final newEvent = - eventName.replaceFirst('MediaPlayerAudioFrameObserver_', ''); - if (handleEventInternal(newEvent, eventData, buffers)) { - return true; - } - - return false; - } -} - class MediaPlayerVideoFrameObserverWrapper implements EventLoopEventHandler { const MediaPlayerVideoFrameObserverWrapper( this.mediaPlayerVideoFrameObserver); diff --git a/lib/src/binding/agora_media_player_impl.dart b/lib/src/binding/agora_media_player_impl.dart index 98e961449..28a21abd2 100644 --- a/lib/src/binding/agora_media_player_impl.dart +++ b/lib/src/binding/agora_media_player_impl.dart @@ -1,7 +1,9 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides class MediaPlayerImpl implements MediaPlayer { MediaPlayerImpl(this.irisMethodChannel); @@ -566,6 +568,68 @@ class MediaPlayerImpl implements MediaPlayer { throw UnimplementedError('Unimplement for unregisterPlayerSourceObserver'); } + @override + void registerAudioFrameObserver( + {required AudioPcmFrameSink observer, + RawAudioFrameOpModeType mode = + RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly}) { +// Implementation template +// final apiType = '${isOverrideClassName ? className : 'MediaPlayer'}_registerAudioFrameObserver'; +// final param = createParams({// 'observer':observer, 'mode':mode.value()// }); +// final callApiResult = irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); +// if (callApiResult.irisReturnCode < 0) { +// throw AgoraRtcException(code: callApiResult.irisReturnCode); +// } +// final rm = callApiResult.data; +// final result = rm['result']; + throw UnimplementedError('Unimplement for registerAudioFrameObserver'); + } + + @override + void unregisterAudioFrameObserver(AudioPcmFrameSink observer) { +// Implementation template +// final apiType = '${isOverrideClassName ? className : 'MediaPlayer'}_unregisterAudioFrameObserver'; +// final param = createParams({// 'observer':observer// }); +// final callApiResult = irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); +// if (callApiResult.irisReturnCode < 0) { +// throw AgoraRtcException(code: callApiResult.irisReturnCode); +// } +// final rm = callApiResult.data; +// final result = rm['result']; +// if (result < 0) { +// throw AgoraRtcException(code: result); +// } + throw UnimplementedError('Unimplement for unregisterAudioFrameObserver'); + } + + @override + void registerVideoFrameObserver(MediaPlayerVideoFrameObserver observer) { +// Implementation template +// final apiType = '${isOverrideClassName ? className : 'MediaPlayer'}_registerVideoFrameObserver'; +// final param = createParams({// 'observer':observer// }); +// final callApiResult = irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); +// if (callApiResult.irisReturnCode < 0) { +// throw AgoraRtcException(code: callApiResult.irisReturnCode); +// } +// final rm = callApiResult.data; +// final result = rm['result']; + throw UnimplementedError('Unimplement for registerVideoFrameObserver'); + } + + @override + void unregisterVideoFrameObserver(MediaPlayerVideoFrameObserver observer) { +// Implementation template +// final apiType = '${isOverrideClassName ? className : 'MediaPlayer'}_unregisterVideoFrameObserver'; +// final param = createParams({// 'observer':observer// }); +// final callApiResult = irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); +// if (callApiResult.irisReturnCode < 0) { +// throw AgoraRtcException(code: callApiResult.irisReturnCode); +// } +// final rm = callApiResult.data; +// final result = rm['result']; + throw UnimplementedError('Unimplement for unregisterVideoFrameObserver'); + } + @override void registerMediaPlayerAudioSpectrumObserver( {required AudioSpectrumObserver observer, required int intervalInMS}) { @@ -873,62 +937,6 @@ class MediaPlayerImpl implements MediaPlayer { throw AgoraRtcException(code: result); } } - - @override - void registerAudioFrameObserver(MediaPlayerAudioFrameObserver observer) { -// Implementation template -// final apiType = '${isOverrideClassName ? className : 'MediaPlayer'}_registerAudioFrameObserver'; -// final param = createParams({// 'observer':observer// }); -// final callApiResult = irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); -// if (callApiResult.irisReturnCode < 0) { -// throw AgoraRtcException(code: callApiResult.irisReturnCode); -// } -// final rm = callApiResult.data; -// final result = rm['result']; - throw UnimplementedError('Unimplement for registerAudioFrameObserver'); - } - - @override - void unregisterAudioFrameObserver(MediaPlayerAudioFrameObserver observer) { -// Implementation template -// final apiType = '${isOverrideClassName ? className : 'MediaPlayer'}_unregisterAudioFrameObserver'; -// final param = createParams({// 'observer':observer// }); -// final callApiResult = irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); -// if (callApiResult.irisReturnCode < 0) { -// throw AgoraRtcException(code: callApiResult.irisReturnCode); -// } -// final rm = callApiResult.data; -// final result = rm['result']; - throw UnimplementedError('Unimplement for unregisterAudioFrameObserver'); - } - - @override - void registerVideoFrameObserver(MediaPlayerVideoFrameObserver observer) { -// Implementation template -// final apiType = '${isOverrideClassName ? className : 'MediaPlayer'}_registerVideoFrameObserver'; -// final param = createParams({// 'observer':observer// }); -// final callApiResult = irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); -// if (callApiResult.irisReturnCode < 0) { -// throw AgoraRtcException(code: callApiResult.irisReturnCode); -// } -// final rm = callApiResult.data; -// final result = rm['result']; - throw UnimplementedError('Unimplement for registerVideoFrameObserver'); - } - - @override - void unregisterVideoFrameObserver(MediaPlayerVideoFrameObserver observer) { -// Implementation template -// final apiType = '${isOverrideClassName ? className : 'MediaPlayer'}_unregisterVideoFrameObserver'; -// final param = createParams({// 'observer':observer// }); -// final callApiResult = irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); -// if (callApiResult.irisReturnCode < 0) { -// throw AgoraRtcException(code: callApiResult.irisReturnCode); -// } -// final rm = callApiResult.data; -// final result = rm['result']; - throw UnimplementedError('Unimplement for unregisterVideoFrameObserver'); - } } class MediaPlayerCacheManagerImpl implements MediaPlayerCacheManager { diff --git a/lib/src/binding/agora_media_player_source_event_impl.dart b/lib/src/binding/agora_media_player_source_event_impl.dart index 87876567a..34a0b36c3 100644 --- a/lib/src/binding/agora_media_player_source_event_impl.dart +++ b/lib/src/binding/agora_media_player_source_event_impl.dart @@ -1,9 +1,10 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable - class MediaPlayerSourceObserverWrapper implements EventLoopEventHandler { const MediaPlayerSourceObserverWrapper(this.mediaPlayerSourceObserver); final MediaPlayerSourceObserver mediaPlayerSourceObserver; diff --git a/lib/src/binding/agora_media_recorder_impl.dart b/lib/src/binding/agora_media_recorder_impl.dart index f49a990c2..53d90b4c0 100644 --- a/lib/src/binding/agora_media_recorder_impl.dart +++ b/lib/src/binding/agora_media_recorder_impl.dart @@ -1,7 +1,9 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides class MediaRecorderImpl implements MediaRecorder { MediaRecorderImpl(this.irisMethodChannel); @@ -21,17 +23,12 @@ class MediaRecorderImpl implements MediaRecorder { String get className => 'MediaRecorder'; @override - Future setMediaRecorderObserver( - {required RtcConnection connection, - required MediaRecorderObserver callback}) async { + Future setMediaRecorderObserver(MediaRecorderObserver callback) async { final apiType = '${isOverrideClassName ? className : 'MediaRecorder'}_setMediaRecorderObserver'; - final param = - createParams({'connection': connection.toJson(), 'callback': callback}); - final List buffers = []; - buffers.addAll(connection.collectBufferList()); + final param = createParams({'callback': callback}); final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { throw AgoraRtcException(code: callApiResult.irisReturnCode); } @@ -43,15 +40,11 @@ class MediaRecorderImpl implements MediaRecorder { } @override - Future startRecording( - {required RtcConnection connection, - required MediaRecorderConfiguration config}) async { + Future startRecording(MediaRecorderConfiguration config) async { final apiType = '${isOverrideClassName ? className : 'MediaRecorder'}_startRecording'; - final param = createParams( - {'connection': connection.toJson(), 'config': config.toJson()}); + final param = createParams({'config': config.toJson()}); final List buffers = []; - buffers.addAll(connection.collectBufferList()); buffers.addAll(config.collectBufferList()); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); @@ -66,28 +59,9 @@ class MediaRecorderImpl implements MediaRecorder { } @override - Future stopRecording(RtcConnection connection) async { + Future stopRecording() async { final apiType = '${isOverrideClassName ? className : 'MediaRecorder'}_stopRecording'; - final param = createParams({'connection': connection.toJson()}); - final List buffers = []; - buffers.addAll(connection.collectBufferList()); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future release() async { - final apiType = - '${isOverrideClassName ? className : 'MediaRecorder'}_release'; final param = createParams({}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); diff --git a/lib/src/binding/agora_music_content_center_event_impl.dart b/lib/src/binding/agora_music_content_center_event_impl.dart index 58d2bd4c7..8eb988735 100644 --- a/lib/src/binding/agora_music_content_center_event_impl.dart +++ b/lib/src/binding/agora_music_content_center_event_impl.dart @@ -1,9 +1,10 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable - class MusicContentCenterEventHandlerWrapper implements EventLoopEventHandler { const MusicContentCenterEventHandlerWrapper( this.musicContentCenterEventHandler); @@ -33,14 +34,14 @@ class MusicContentCenterEventHandlerWrapper implements EventLoopEventHandler { jsonMap); paramJson = paramJson.fillBuffers(buffers); String? requestId = paramJson.requestId; - MusicContentCenterStatusCode? status = paramJson.status; List? result = paramJson.result; - if (requestId == null || status == null || result == null) { + MusicContentCenterStatusCode? errorCode = paramJson.errorCode; + if (requestId == null || result == null || errorCode == null) { return true; } result = result.map((e) => e.fillBuffers(buffers)).toList(); musicContentCenterEventHandler.onMusicChartsResult!( - requestId, status, result); + requestId, result, errorCode); return true; case 'onMusicCollectionResult': @@ -53,13 +54,13 @@ class MusicContentCenterEventHandlerWrapper implements EventLoopEventHandler { jsonMap); paramJson = paramJson.fillBuffers(buffers); String? requestId = paramJson.requestId; - MusicContentCenterStatusCode? status = paramJson.status; MusicCollection? result = paramJson.result; - if (requestId == null || status == null || result == null) { + MusicContentCenterStatusCode? errorCode = paramJson.errorCode; + if (requestId == null || result == null || errorCode == null) { return true; } musicContentCenterEventHandler.onMusicCollectionResult!( - requestId, status, result); + requestId, result, errorCode); return true; case 'onLyricResult': @@ -72,10 +73,12 @@ class MusicContentCenterEventHandlerWrapper implements EventLoopEventHandler { paramJson = paramJson.fillBuffers(buffers); String? requestId = paramJson.requestId; String? lyricUrl = paramJson.lyricUrl; - if (requestId == null || lyricUrl == null) { + MusicContentCenterStatusCode? errorCode = paramJson.errorCode; + if (requestId == null || lyricUrl == null || errorCode == null) { return true; } - musicContentCenterEventHandler.onLyricResult!(requestId, lyricUrl); + musicContentCenterEventHandler.onLyricResult!( + requestId, lyricUrl, errorCode); return true; case 'onPreLoadEvent': @@ -88,18 +91,18 @@ class MusicContentCenterEventHandlerWrapper implements EventLoopEventHandler { paramJson = paramJson.fillBuffers(buffers); int? songCode = paramJson.songCode; int? percent = paramJson.percent; - PreloadStatusCode? status = paramJson.status; - String? msg = paramJson.msg; String? lyricUrl = paramJson.lyricUrl; + PreloadStatusCode? status = paramJson.status; + MusicContentCenterStatusCode? errorCode = paramJson.errorCode; if (songCode == null || percent == null || + lyricUrl == null || status == null || - msg == null || - lyricUrl == null) { + errorCode == null) { return true; } musicContentCenterEventHandler.onPreLoadEvent!( - songCode, percent, status, msg, lyricUrl); + songCode, percent, lyricUrl, status, errorCode); return true; } return false; diff --git a/lib/src/binding/agora_music_content_center_impl.dart b/lib/src/binding/agora_music_content_center_impl.dart index c49a97bff..32e6d0d01 100644 --- a/lib/src/binding/agora_music_content_center_impl.dart +++ b/lib/src/binding/agora_music_content_center_impl.dart @@ -1,7 +1,9 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides class MusicChartCollectionImpl implements MusicChartCollection { MusicChartCollectionImpl(this.irisMethodChannel); @@ -281,7 +283,7 @@ class MusicContentCenterImpl implements MusicContentCenter { } @override - Future createMusicPlayer() async { + Future createMusicPlayer() async { final apiType = '${isOverrideClassName ? className : 'MusicContentCenter'}_createMusicPlayer'; final param = createParams({}); @@ -390,6 +392,42 @@ class MusicContentCenterImpl implements MusicContentCenter { } } + @override + Future removeCache(int songCode) async { + final apiType = + '${isOverrideClassName ? className : 'MusicContentCenter'}_removeCache'; + final param = createParams({'songCode': songCode}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future> getCaches(int cacheInfoSize) async { + final apiType = + '${isOverrideClassName ? className : 'MusicContentCenter'}_getCaches'; + final param = createParams({'cacheInfoSize': cacheInfoSize}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + final getCachesJson = MusicContentCenterGetCachesJson.fromJson(rm); + return getCachesJson.cacheInfo; + } + @override Future isPreloaded(int songCode) async { final apiType = diff --git a/lib/src/binding/agora_rtc_engine_event_impl.dart b/lib/src/binding/agora_rtc_engine_event_impl.dart index 76a8769b5..c6115c0d5 100644 --- a/lib/src/binding/agora_rtc_engine_event_impl.dart +++ b/lib/src/binding/agora_rtc_engine_event_impl.dart @@ -1,9 +1,10 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable - class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { const RtcEngineEventHandlerWrapper(this.rtcEngineEventHandler); final RtcEngineEventHandler rtcEngineEventHandler; @@ -655,23 +656,6 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { connection, remoteUid, enabled); return true; - case 'onApiCallExecuted': - if (rtcEngineEventHandler.onApiCallExecuted == null) { - return true; - } - final jsonMap = jsonDecode(eventData); - RtcEngineEventHandlerOnApiCallExecutedJson paramJson = - RtcEngineEventHandlerOnApiCallExecutedJson.fromJson(jsonMap); - paramJson = paramJson.fillBuffers(buffers); - ErrorCodeType? err = paramJson.err; - String? api = paramJson.api; - String? result = paramJson.result; - if (err == null || api == null || result == null) { - return true; - } - rtcEngineEventHandler.onApiCallExecuted!(err, api, result); - return true; - case 'onLocalAudioStatsEx': if (rtcEngineEventHandler.onLocalAudioStats == null) { return true; @@ -802,8 +786,8 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { paramJson = paramJson.fillBuffers(buffers); int? imageWidth = paramJson.imageWidth; int? imageHeight = paramJson.imageHeight; - Rectangle? vecRectangle = paramJson.vecRectangle; - int? vecDistance = paramJson.vecDistance; + List? vecRectangle = paramJson.vecRectangle; + List? vecDistance = paramJson.vecDistance; int? numFaces = paramJson.numFaces; if (imageWidth == null || imageHeight == null || @@ -812,7 +796,7 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { numFaces == null) { return true; } - vecRectangle = vecRectangle.fillBuffers(buffers); + vecRectangle = vecRectangle.map((e) => e.fillBuffers(buffers)).toList(); rtcEngineEventHandler.onFacePositionChanged!( imageWidth, imageHeight, vecRectangle, vecDistance, numFaces); return true; @@ -1783,6 +1767,49 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { rtcEngineEventHandler.onUserAccountUpdated!( connection, remoteUid, userAccount); return true; + + case 'onLocalVideoTranscoderError': + if (rtcEngineEventHandler.onLocalVideoTranscoderError == null) { + return true; + } + final jsonMap = jsonDecode(eventData); + RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson paramJson = + RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson.fromJson( + jsonMap); + paramJson = paramJson.fillBuffers(buffers); + TranscodingVideoStream? stream = paramJson.stream; + VideoTranscoderError? error = paramJson.error; + if (stream == null || error == null) { + return true; + } + stream = stream.fillBuffers(buffers); + rtcEngineEventHandler.onLocalVideoTranscoderError!(stream, error); + return true; + + case 'onVideoRenderingTracingResultEx': + if (rtcEngineEventHandler.onVideoRenderingTracingResult == null) { + return true; + } + final jsonMap = jsonDecode(eventData); + RtcEngineEventHandlerOnVideoRenderingTracingResultJson paramJson = + RtcEngineEventHandlerOnVideoRenderingTracingResultJson.fromJson( + jsonMap); + paramJson = paramJson.fillBuffers(buffers); + RtcConnection? connection = paramJson.connection; + int? uid = paramJson.uid; + MediaTraceEvent? currentEvent = paramJson.currentEvent; + VideoRenderingTracingInfo? tracingInfo = paramJson.tracingInfo; + if (connection == null || + uid == null || + currentEvent == null || + tracingInfo == null) { + return true; + } + connection = connection.fillBuffers(buffers); + tracingInfo = tracingInfo.fillBuffers(buffers); + rtcEngineEventHandler.onVideoRenderingTracingResult!( + connection, uid, currentEvent, tracingInfo); + return true; } return false; } diff --git a/lib/src/binding/agora_rtc_engine_ex_impl.dart b/lib/src/binding/agora_rtc_engine_ex_impl.dart index 71c3baf25..65f53d52e 100644 --- a/lib/src/binding/agora_rtc_engine_ex_impl.dart +++ b/lib/src/binding/agora_rtc_engine_ex_impl.dart @@ -1,7 +1,9 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { RtcEngineExImpl(IrisMethodChannel irisMethodChannel) @@ -533,6 +535,48 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { } } + @override + Future adjustRecordingSignalVolumeEx( + {required int volume, required RtcConnection connection}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_adjustRecordingSignalVolumeEx'; + final param = + createParams({'volume': volume, 'connection': connection.toJson()}); + final List buffers = []; + buffers.addAll(connection.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future muteRecordingSignalEx( + {required bool mute, required RtcConnection connection}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_muteRecordingSignalEx'; + final param = + createParams({'mute': mute, 'connection': connection.toJson()}); + final List buffers = []; + buffers.addAll(connection.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future adjustUserPlaybackSignalVolumeEx( {required int uid, @@ -853,6 +897,31 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { } } + @override + Future startOrUpdateChannelMediaRelayEx( + {required ChannelMediaRelayConfiguration configuration, + required RtcConnection connection}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_startOrUpdateChannelMediaRelayEx'; + final param = createParams({ + 'configuration': configuration.toJson(), + 'connection': connection.toJson() + }); + final List buffers = []; + buffers.addAll(configuration.collectBufferList()); + buffers.addAll(connection.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future startChannelMediaRelayEx( {required ChannelMediaRelayConfiguration configuration, @@ -1007,32 +1076,6 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { return getUserInfoByUidExJson.userInfo; } - @override - Future setVideoProfileEx( - {required int width, - required int height, - required int frameRate, - required int bitrate}) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngineEx'}_setVideoProfileEx'; - final param = createParams({ - 'width': width, - 'height': height, - 'frameRate': frameRate, - 'bitrate': bitrate - }); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - @override Future enableDualStreamModeEx( {required bool enabled, @@ -1088,12 +1131,23 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { } @override - Future enableWirelessAccelerate(bool enabled) async { + Future setHighPriorityUserListEx( + {required List uidList, + required int uidNum, + required StreamFallbackOptions option, + required RtcConnection connection}) async { final apiType = - '${isOverrideClassName ? className : 'RtcEngineEx'}_enableWirelessAccelerate'; - final param = createParams({'enabled': enabled}); + '${isOverrideClassName ? className : 'RtcEngineEx'}_setHighPriorityUserListEx'; + final param = createParams({ + 'uidList': uidList, + 'uidNum': uidNum, + 'option': option.value(), + 'connection': connection.toJson() + }); + final List buffers = []; + buffers.addAll(connection.collectBufferList()); final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); if (callApiResult.irisReturnCode < 0) { throw AgoraRtcException(code: callApiResult.irisReturnCode); } @@ -1126,4 +1180,23 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { throw AgoraRtcException(code: result); } } + + @override + Future startMediaRenderingTracingEx(RtcConnection connection) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_startMediaRenderingTracingEx'; + final param = createParams({'connection': connection.toJson()}); + final List buffers = []; + buffers.addAll(connection.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } } diff --git a/lib/src/binding/agora_rtc_engine_impl.dart b/lib/src/binding/agora_rtc_engine_impl.dart index 7dc847c8f..90dc00fa3 100644 --- a/lib/src/binding/agora_rtc_engine_impl.dart +++ b/lib/src/binding/agora_rtc_engine_impl.dart @@ -1,7 +1,9 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides class VideoDeviceManagerImpl implements VideoDeviceManager { VideoDeviceManagerImpl(this.irisMethodChannel); @@ -244,6 +246,26 @@ class RtcEngineImpl implements RtcEngine { return result as String; } + @override + Future> queryCodecCapability(int size) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_queryCodecCapability'; + final param = createParams({'size': size}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + final queryCodecCapabilityJson = + RtcEngineQueryCodecCapabilityJson.fromJson(rm); + return queryCodecCapabilityJson.codecInfo; + } + @override Future joinChannel( {required String token, @@ -361,12 +383,14 @@ class RtcEngineImpl implements RtcEngine { } @override - Future startEchoTest({int intervalInSeconds = 10}) async { + Future startEchoTest(EchoTestConfiguration config) async { final apiType = '${isOverrideClassName ? className : 'RtcEngine'}_startEchoTest'; - final param = createParams({'intervalInSeconds': intervalInSeconds}); + final param = createParams({'config': config.toJson()}); + final List buffers = []; + buffers.addAll(config.collectBufferList()); final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); if (callApiResult.irisReturnCode < 0) { throw AgoraRtcException(code: callApiResult.irisReturnCode); } @@ -668,13 +692,14 @@ class RtcEngineImpl implements RtcEngine { } @override - Future enableRemoteSuperResolution( - {required int userId, required bool enable}) async { + Future setupRemoteVideo(VideoCanvas canvas) async { final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_enableRemoteSuperResolution'; - final param = createParams({'userId': userId, 'enable': enable}); + '${isOverrideClassName ? className : 'RtcEngine'}_setupRemoteVideo'; + final param = createParams({'canvas': canvas.toJson()}); + final List buffers = []; + buffers.addAll(canvas.collectBufferList()); final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); if (callApiResult.irisReturnCode < 0) { throw AgoraRtcException(code: callApiResult.irisReturnCode); } @@ -686,9 +711,9 @@ class RtcEngineImpl implements RtcEngine { } @override - Future setupRemoteVideo(VideoCanvas canvas) async { + Future setupLocalVideo(VideoCanvas canvas) async { final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_setupRemoteVideo'; + '${isOverrideClassName ? className : 'RtcEngine'}_setupLocalVideo'; final param = createParams({'canvas': canvas.toJson()}); final List buffers = []; buffers.addAll(canvas.collectBufferList()); @@ -705,14 +730,13 @@ class RtcEngineImpl implements RtcEngine { } @override - Future setupLocalVideo(VideoCanvas canvas) async { + Future setVideoScenario( + VideoApplicationScenarioType scenarioType) async { final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_setupLocalVideo'; - final param = createParams({'canvas': canvas.toJson()}); - final List buffers = []; - buffers.addAll(canvas.collectBufferList()); + '${isOverrideClassName ? className : 'RtcEngine'}_setVideoScenario'; + final param = createParams({'scenarioType': scenarioType.value()}); final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { throw AgoraRtcException(code: callApiResult.irisReturnCode); } @@ -1169,7 +1193,7 @@ class RtcEngineImpl implements RtcEngine { } @override - Future createMediaPlayer() async { + Future createMediaPlayer() async { final apiType = '${isOverrideClassName ? className : 'RtcEngine'}_createMediaPlayer'; final param = createParams({}); @@ -1200,6 +1224,40 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future createMediaRecorder(RecorderStreamInfo info) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_createMediaRecorder'; + final param = createParams({'info': info.toJson()}); + final List buffers = []; + buffers.addAll(info.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + return result as MediaRecorder; + } + + @override + Future destroyMediaRecorder(MediaRecorder mediaRecorder) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_destroyMediaRecorder'; + final param = createParams({'mediaRecorder': mediaRecorder}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future startAudioMixing( {required String filePath, @@ -2003,6 +2061,23 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future setLocalVoiceFormant(double formantRatio) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setLocalVoiceFormant'; + final param = createParams({'formantRatio': formantRatio}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setLocalVoiceEqualization( {required AudioEqualizationBandFrequency bandFrequency, @@ -2262,103 +2337,12 @@ class RtcEngineImpl implements RtcEngine { final result = rm['result']; } - @override - Future enableEchoCancellationExternal( - {required bool enabled, required int audioSourceDelay}) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_enableEchoCancellationExternal'; - final param = createParams( - {'enabled': enabled, 'audioSourceDelay': audioSourceDelay}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - @override Future enableCustomAudioLocalPlayback( - {required int sourceId, required bool enabled}) async { + {required int trackId, required bool enabled}) async { final apiType = '${isOverrideClassName ? className : 'RtcEngine'}_enableCustomAudioLocalPlayback'; - final param = createParams({'sourceId': sourceId, 'enabled': enabled}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future startPrimaryCustomAudioTrack(AudioTrackConfig config) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_startPrimaryCustomAudioTrack'; - final param = createParams({'config': config.toJson()}); - final List buffers = []; - buffers.addAll(config.collectBufferList()); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future stopPrimaryCustomAudioTrack() async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_stopPrimaryCustomAudioTrack'; - final param = createParams({}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future startSecondaryCustomAudioTrack(AudioTrackConfig config) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_startSecondaryCustomAudioTrack'; - final param = createParams({'config': config.toJson()}); - final List buffers = []; - buffers.addAll(config.collectBufferList()); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future stopSecondaryCustomAudioTrack() async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_stopSecondaryCustomAudioTrack'; - final param = createParams({}); + final param = createParams({'trackId': trackId, 'enabled': enabled}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { @@ -2664,6 +2648,27 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future setHighPriorityUserList( + {required List uidList, + required int uidNum, + required StreamFallbackOptions option}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setHighPriorityUserList'; + final param = createParams( + {'uidList': uidList, 'uidNum': uidNum, 'option': option.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future enableLoopbackRecording( {required bool enabled, String? deviceName}) async { @@ -3452,24 +3457,6 @@ class RtcEngineImpl implements RtcEngine { } } - @override - Future setScreenCaptureScenario( - ScreenScenarioType screenScenario) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_setScreenCaptureScenario'; - final param = createParams({'screenScenario': screenScenario.value()}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - @override Future updateScreenCaptureRegion(Rectangle regionRect) async { final apiType = @@ -3524,9 +3511,25 @@ class RtcEngineImpl implements RtcEngine { } final rm = callApiResult.data; final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); + } + + @override + Future startScreenCaptureBySourceType( + {required VideoSourceType sourceType, + required ScreenCaptureConfiguration config}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_startScreenCaptureBySourceType'; + final param = createParams( + {'sourceType': sourceType.value(), 'config': config.toJson()}); + final List buffers = []; + buffers.addAll(config.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); } + final rm = callApiResult.data; + final result = rm['result']; } @override @@ -3550,9 +3553,9 @@ class RtcEngineImpl implements RtcEngine { } @override - Future stopScreenCapture() async { + Future queryScreenCaptureCapability() async { final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_stopScreenCapture'; + '${isOverrideClassName ? className : 'RtcEngine'}_queryScreenCaptureCapability'; final param = createParams({}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); @@ -3561,11 +3564,55 @@ class RtcEngineImpl implements RtcEngine { } final rm = callApiResult.data; final result = rm['result']; + return result as int; + } + + @override + Future setScreenCaptureScenario( + ScreenScenarioType screenScenario) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setScreenCaptureScenario'; + final param = createParams({'screenScenario': screenScenario.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; if (result < 0) { throw AgoraRtcException(code: result); } } + @override + Future stopScreenCapture() async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_stopScreenCapture'; + final param = createParams({}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + } + + @override + Future stopScreenCaptureBySourceType(VideoSourceType sourceType) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_stopScreenCaptureBySourceType'; + final param = createParams({'sourceType': sourceType.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + } + @override Future getCallId() async { final apiType = @@ -3754,31 +3801,13 @@ class RtcEngineImpl implements RtcEngine { } @override - Future startPrimaryCameraCapture( - CameraCapturerConfiguration config) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_startPrimaryCameraCapture'; - final param = createParams({'config': config.toJson()}); - final List buffers = []; - buffers.addAll(config.collectBufferList()); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future startSecondaryCameraCapture( - CameraCapturerConfiguration config) async { + Future startCameraCapture( + {required VideoSourceType sourceType, + required CameraCapturerConfiguration config}) async { final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_startSecondaryCameraCapture'; - final param = createParams({'config': config.toJson()}); + '${isOverrideClassName ? className : 'RtcEngine'}_startCameraCapture'; + final param = createParams( + {'sourceType': sourceType.value(), 'config': config.toJson()}); final List buffers = []; buffers.addAll(config.collectBufferList()); final callApiResult = await irisMethodChannel.invokeMethod( @@ -3794,27 +3823,10 @@ class RtcEngineImpl implements RtcEngine { } @override - Future stopPrimaryCameraCapture() async { + Future stopCameraCapture(VideoSourceType sourceType) async { final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_stopPrimaryCameraCapture'; - final param = createParams({}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future stopSecondaryCameraCapture() async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_stopSecondaryCameraCapture'; - final param = createParams({}); + '${isOverrideClassName ? className : 'RtcEngine'}_stopCameraCapture'; + final param = createParams({'sourceType': sourceType.value()}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { @@ -3867,80 +3879,6 @@ class RtcEngineImpl implements RtcEngine { } } - @override - Future startPrimaryScreenCapture( - ScreenCaptureConfiguration config) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_startPrimaryScreenCapture'; - final param = createParams({'config': config.toJson()}); - final List buffers = []; - buffers.addAll(config.collectBufferList()); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future startSecondaryScreenCapture( - ScreenCaptureConfiguration config) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_startSecondaryScreenCapture'; - final param = createParams({'config': config.toJson()}); - final List buffers = []; - buffers.addAll(config.collectBufferList()); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future stopPrimaryScreenCapture() async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_stopPrimaryScreenCapture'; - final param = createParams({}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - - @override - Future stopSecondaryScreenCapture() async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_stopSecondaryScreenCapture'; - final param = createParams({}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - @override Future getConnectionState() async { final apiType = @@ -4309,6 +4247,24 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future setAINSMode( + {required bool enabled, required AudioAinsMode mode}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setAINSMode'; + final param = createParams({'enabled': enabled, 'mode': mode.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future registerLocalUserAccount( {required String appId, required String userAccount}) async { @@ -4421,6 +4377,26 @@ class RtcEngineImpl implements RtcEngine { return getUserInfoByUidJson.userInfo; } + @override + Future startOrUpdateChannelMediaRelay( + ChannelMediaRelayConfiguration configuration) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_startOrUpdateChannelMediaRelay'; + final param = createParams({'configuration': configuration.toJson()}); + final List buffers = []; + buffers.addAll(configuration.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future startChannelMediaRelay( ChannelMediaRelayConfiguration configuration) async { @@ -4712,10 +4688,10 @@ class RtcEngineImpl implements RtcEngine { @override Future adjustCustomAudioPublishVolume( - {required int sourceId, required int volume}) async { + {required int trackId, required int volume}) async { final apiType = '${isOverrideClassName ? className : 'RtcEngine'}_adjustCustomAudioPublishVolume'; - final param = createParams({'sourceId': sourceId, 'volume': volume}); + final param = createParams({'trackId': trackId, 'volume': volume}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { @@ -4730,10 +4706,10 @@ class RtcEngineImpl implements RtcEngine { @override Future adjustCustomAudioPlayoutVolume( - {required int sourceId, required int volume}) async { + {required int trackId, required int volume}) async { final apiType = '${isOverrideClassName ? className : 'RtcEngine'}_adjustCustomAudioPlayoutVolume'; - final param = createParams({'sourceId': sourceId, 'volume': volume}); + final param = createParams({'trackId': trackId, 'volume': volume}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { @@ -4888,6 +4864,72 @@ class RtcEngineImpl implements RtcEngine { return result as int; } + @override + Future setParameters(String parameters) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setParameters'; + final param = createParams({'parameters': parameters}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future startMediaRenderingTracing() async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_startMediaRenderingTracing'; + final param = createParams({}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future enableInstantMediaRendering() async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_enableInstantMediaRendering'; + final param = createParams({}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future getNtpWallTimeInMs() async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_getNtpWallTimeInMs'; + final param = createParams({}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + return result as int; + } + @override AudioDeviceManager getAudioDeviceManager() { // Implementation template @@ -4948,21 +4990,6 @@ class RtcEngineImpl implements RtcEngine { throw UnimplementedError('Unimplement for getMediaEngine'); } - @override - MediaRecorder getMediaRecorder() { -// Implementation template -// final apiType = '${isOverrideClassName ? className : 'RtcEngine'}_getMediaRecorder'; -// final param = createParams({// // }); -// final callApiResult = irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); -// if (callApiResult.irisReturnCode < 0) { -// throw AgoraRtcException(code: callApiResult.irisReturnCode); -// } -// final rm = callApiResult.data; -// final result = rm['result']; -// return result as MediaRecorder; - throw UnimplementedError('Unimplement for getMediaRecorder'); - } - @override LocalSpatialAudioEngine getLocalSpatialAudioEngine() { // Implementation template @@ -5025,20 +5052,6 @@ class RtcEngineImpl implements RtcEngine { 'Unimplement for unregisterAudioEncodedFrameObserver'); } - @override - Future setParameters(String parameters) async { - final apiType = - '${isOverrideClassName ? className : 'RtcEngine'}_setParameters'; - final param = createParams({'parameters': parameters}); - final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - } - @override Future getNativeHandle() async { final apiType = diff --git a/lib/src/binding/agora_spatial_audio_impl.dart b/lib/src/binding/agora_spatial_audio_impl.dart index 1eef4d39c..6905a64df 100644 --- a/lib/src/binding/agora_spatial_audio_impl.dart +++ b/lib/src/binding/agora_spatial_audio_impl.dart @@ -1,7 +1,9 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides class BaseSpatialAudioEngineImpl implements BaseSpatialAudioEngine { BaseSpatialAudioEngineImpl(this.irisMethodChannel); diff --git a/lib/src/binding/audio_device_manager_impl.dart b/lib/src/binding/audio_device_manager_impl.dart index 02ac33bc5..d59978c29 100644 --- a/lib/src/binding/audio_device_manager_impl.dart +++ b/lib/src/binding/audio_device_manager_impl.dart @@ -1,7 +1,9 @@ +/// GENERATED BY terra, DO NOT MODIFY BY HAND. + +// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides import 'package:agora_rtc_engine/src/binding_forward_export.dart'; import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -// ignore_for_file: public_member_api_docs, unused_local_variable, annotate_overrides class AudioDeviceManagerImpl implements AudioDeviceManager { AudioDeviceManagerImpl(this.irisMethodChannel); diff --git a/lib/src/binding/call_api_event_handler_buffer_ext.dart b/lib/src/binding/call_api_event_handler_buffer_ext.dart index c3f11fe1d..67b9d22d4 100644 --- a/lib/src/binding/call_api_event_handler_buffer_ext.dart +++ b/lib/src/binding/call_api_event_handler_buffer_ext.dart @@ -1,6 +1,7 @@ -import 'package:agora_rtc_engine/src/binding_forward_export.dart'; +/// GENERATED BY terra, DO NOT MODIFY BY HAND. // ignore_for_file: public_member_api_docs, unused_local_variable, prefer_is_empty +import 'package:agora_rtc_engine/src/binding_forward_export.dart'; extension LocalVideoStatsBufferExt on LocalVideoStats { LocalVideoStats fillBuffers(List bufferList) { @@ -14,6 +15,18 @@ extension LocalVideoStatsBufferExt on LocalVideoStats { } } +extension RemoteAudioStatsBufferExt on RemoteAudioStats { + RemoteAudioStats fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension RemoteVideoStatsBufferExt on RemoteVideoStats { RemoteVideoStats fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -88,18 +101,6 @@ extension PublisherConfigurationBufferExt on PublisherConfiguration { } } -extension AudioTrackConfigBufferExt on AudioTrackConfig { - AudioTrackConfig fillBuffers(List bufferList) { - if (bufferList.isEmpty) return this; - return this; - } - - List collectBufferList() { - final bufferList = []; - return bufferList; - } -} - extension CameraCapturerConfigurationBufferExt on CameraCapturerConfiguration { CameraCapturerConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -455,6 +456,18 @@ extension AdvanceOptionsBufferExt on AdvanceOptions { } } +extension CodecCapInfoBufferExt on CodecCapInfo { + CodecCapInfo fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension VideoEncoderConfigurationBufferExt on VideoEncoderConfiguration { VideoEncoderConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -551,18 +564,6 @@ extension ClientRoleOptionsBufferExt on ClientRoleOptions { } } -extension RemoteAudioStatsBufferExt on RemoteAudioStats { - RemoteAudioStats fillBuffers(List bufferList) { - if (bufferList.isEmpty) return this; - return this; - } - - List collectBufferList() { - final bufferList = []; - return bufferList; - } -} - extension VideoFormatBufferExt on VideoFormat { VideoFormat fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -847,6 +848,18 @@ extension SegmentationPropertyBufferExt on SegmentationProperty { } } +extension AudioTrackConfigBufferExt on AudioTrackConfig { + AudioTrackConfig fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension ScreenCaptureParametersBufferExt on ScreenCaptureParameters { ScreenCaptureParameters fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -1027,6 +1040,30 @@ extension ScreenCaptureParameters2BufferExt on ScreenCaptureParameters2 { } } +extension VideoRenderingTracingInfoBufferExt on VideoRenderingTracingInfo { + VideoRenderingTracingInfo fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + +extension RecorderStreamInfoBufferExt on RecorderStreamInfo { + RecorderStreamInfo fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension SpatialAudioParamsBufferExt on SpatialAudioParams { SpatialAudioParams fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -1122,6 +1159,10 @@ extension ExternalVideoFrameBufferExt on ExternalVideoFrame { if (bufferList.length > 1) { metadataBuffer = bufferList[1]; } + Uint8List? alphaBuffer; + if (bufferList.length > 2) { + alphaBuffer = bufferList[2]; + } return ExternalVideoFrame( type: type, format: format, @@ -1138,7 +1179,8 @@ extension ExternalVideoFrameBufferExt on ExternalVideoFrame { textureId: textureId, matrix: matrix, metadataBuffer: metadataBuffer, - metadataSize: metadataSize); + metadataSize: metadataSize, + alphaBuffer: alphaBuffer); } List collectBufferList() { @@ -1149,6 +1191,9 @@ extension ExternalVideoFrameBufferExt on ExternalVideoFrame { if (metadataBuffer != null) { bufferList.add(metadataBuffer!); } + if (alphaBuffer != null) { + bufferList.add(alphaBuffer!); + } return bufferList; } } @@ -1176,6 +1221,10 @@ extension VideoFrameBufferExt on VideoFrame { if (bufferList.length > 4) { alphaBuffer = bufferList[4]; } + Uint8List? pixelBuffer; + if (bufferList.length > 5) { + pixelBuffer = bufferList[5]; + } return VideoFrame( type: type, width: width, @@ -1193,7 +1242,8 @@ extension VideoFrameBufferExt on VideoFrame { metadataSize: metadataSize, textureId: textureId, matrix: matrix, - alphaBuffer: alphaBuffer); + alphaBuffer: alphaBuffer, + pixelBuffer: pixelBuffer); } List collectBufferList() { @@ -1213,6 +1263,9 @@ extension VideoFrameBufferExt on VideoFrame { if (alphaBuffer != null) { bufferList.add(alphaBuffer!); } + if (pixelBuffer != null) { + bufferList.add(pixelBuffer!); + } return bufferList; } } @@ -1460,6 +1513,18 @@ extension MusicChartInfoBufferExt on MusicChartInfo { } } +extension MusicCacheInfoBufferExt on MusicCacheInfo { + MusicCacheInfo fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension MvPropertyBufferExt on MvProperty { MvProperty fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; diff --git a/lib/src/binding/call_api_impl_params_json.dart b/lib/src/binding/call_api_impl_params_json.dart index 74844735e..40e1fb1e6 100644 --- a/lib/src/binding/call_api_impl_params_json.dart +++ b/lib/src/binding/call_api_impl_params_json.dart @@ -1,7 +1,8 @@ -import 'package:agora_rtc_engine/src/binding_forward_export.dart'; -part 'call_api_impl_params_json.g.dart'; +/// GENERATED BY terra, DO NOT MODIFY BY HAND. // ignore_for_file: public_member_api_docs, unused_local_variable +import 'package:agora_rtc_engine/src/binding_forward_export.dart'; +part 'call_api_impl_params_json.g.dart'; @JsonSerializable(explicitToJson: true) class VideoDeviceManagerGetDeviceJson { @@ -28,6 +29,19 @@ class VideoDeviceManagerGetCapabilityJson { _$VideoDeviceManagerGetCapabilityJsonToJson(this); } +@JsonSerializable(explicitToJson: true) +class RtcEngineQueryCodecCapabilityJson { + const RtcEngineQueryCodecCapabilityJson(this.codecInfo); + + @JsonKey(name: 'codecInfo') + final List codecInfo; + factory RtcEngineQueryCodecCapabilityJson.fromJson( + Map json) => + _$RtcEngineQueryCodecCapabilityJsonFromJson(json); + Map toJson() => + _$RtcEngineQueryCodecCapabilityJsonToJson(this); +} + @JsonSerializable(explicitToJson: true) class RtcEngineGetExtensionPropertyJson { const RtcEngineGetExtensionPropertyJson(this.value); @@ -361,6 +375,18 @@ class MusicContentCenterSearchMusicJson { _$MusicContentCenterSearchMusicJsonToJson(this); } +@JsonSerializable(explicitToJson: true) +class MusicContentCenterGetCachesJson { + const MusicContentCenterGetCachesJson(this.cacheInfo); + + @JsonKey(name: 'cacheInfo') + final List cacheInfo; + factory MusicContentCenterGetCachesJson.fromJson(Map json) => + _$MusicContentCenterGetCachesJsonFromJson(json); + Map toJson() => + _$MusicContentCenterGetCachesJsonToJson(this); +} + @JsonSerializable(explicitToJson: true) class MusicContentCenterGetLyricJson { const MusicContentCenterGetLyricJson(this.requestId); diff --git a/lib/src/binding/call_api_impl_params_json.g.dart b/lib/src/binding/call_api_impl_params_json.g.dart index 71a96b221..95a728a1e 100644 --- a/lib/src/binding/call_api_impl_params_json.g.dart +++ b/lib/src/binding/call_api_impl_params_json.g.dart @@ -32,6 +32,20 @@ Map _$VideoDeviceManagerGetCapabilityJsonToJson( 'capability': instance.capability.toJson(), }; +RtcEngineQueryCodecCapabilityJson _$RtcEngineQueryCodecCapabilityJsonFromJson( + Map json) => + RtcEngineQueryCodecCapabilityJson( + (json['codecInfo'] as List) + .map((e) => CodecCapInfo.fromJson(e as Map)) + .toList(), + ); + +Map _$RtcEngineQueryCodecCapabilityJsonToJson( + RtcEngineQueryCodecCapabilityJson instance) => + { + 'codecInfo': instance.codecInfo.map((e) => e.toJson()).toList(), + }; + RtcEngineGetExtensionPropertyJson _$RtcEngineGetExtensionPropertyJsonFromJson( Map json) => RtcEngineGetExtensionPropertyJson( @@ -369,6 +383,20 @@ Map _$MusicContentCenterSearchMusicJsonToJson( 'requestId': instance.requestId, }; +MusicContentCenterGetCachesJson _$MusicContentCenterGetCachesJsonFromJson( + Map json) => + MusicContentCenterGetCachesJson( + (json['cacheInfo'] as List) + .map((e) => MusicCacheInfo.fromJson(e as Map)) + .toList(), + ); + +Map _$MusicContentCenterGetCachesJsonToJson( + MusicContentCenterGetCachesJson instance) => + { + 'cacheInfo': instance.cacheInfo.map((e) => e.toJson()).toList(), + }; + MusicContentCenterGetLyricJson _$MusicContentCenterGetLyricJsonFromJson( Map json) => MusicContentCenterGetLyricJson( diff --git a/lib/src/binding/event_handler_param_json.dart b/lib/src/binding/event_handler_param_json.dart index 74717fc46..2bf878ede 100644 --- a/lib/src/binding/event_handler_param_json.dart +++ b/lib/src/binding/event_handler_param_json.dart @@ -1,7 +1,8 @@ -import 'package:agora_rtc_engine/src/binding_forward_export.dart'; -part 'event_handler_param_json.g.dart'; +/// GENERATED BY terra, DO NOT MODIFY BY HAND. // ignore_for_file: public_member_api_docs, unused_local_variable, prefer_is_empty +import 'package:agora_rtc_engine/src/binding_forward_export.dart'; +part 'event_handler_param_json.g.dart'; @JsonSerializable(explicitToJson: true) class RtcEngineEventHandlerOnJoinChannelSuccessJson { @@ -1041,38 +1042,6 @@ extension RtcEngineEventHandlerOnUserEnableLocalVideoJsonBufferExt } } -@JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnApiCallExecutedJson { - const RtcEngineEventHandlerOnApiCallExecutedJson( - {this.err, this.api, this.result}); - - @JsonKey(name: 'err') - final ErrorCodeType? err; - @JsonKey(name: 'api') - final String? api; - @JsonKey(name: 'result') - final String? result; - factory RtcEngineEventHandlerOnApiCallExecutedJson.fromJson( - Map json) => - _$RtcEngineEventHandlerOnApiCallExecutedJsonFromJson(json); - Map toJson() => - _$RtcEngineEventHandlerOnApiCallExecutedJsonToJson(this); -} - -extension RtcEngineEventHandlerOnApiCallExecutedJsonBufferExt - on RtcEngineEventHandlerOnApiCallExecutedJson { - RtcEngineEventHandlerOnApiCallExecutedJson fillBuffers( - List bufferList) { - if (bufferList.isEmpty) return this; - return this; - } - - List collectBufferList() { - final bufferList = []; - return bufferList; - } -} - @JsonSerializable(explicitToJson: true) class RtcEngineEventHandlerOnLocalAudioStatsJson { const RtcEngineEventHandlerOnLocalAudioStatsJson( @@ -1300,9 +1269,9 @@ class RtcEngineEventHandlerOnFacePositionChangedJson { @JsonKey(name: 'imageHeight') final int? imageHeight; @JsonKey(name: 'vecRectangle') - final Rectangle? vecRectangle; + final List? vecRectangle; @JsonKey(name: 'vecDistance') - final int? vecDistance; + final List? vecDistance; @JsonKey(name: 'numFaces') final int? numFaces; factory RtcEngineEventHandlerOnFacePositionChangedJson.fromJson( @@ -2941,6 +2910,70 @@ extension RtcEngineEventHandlerOnUserAccountUpdatedJsonBufferExt } } +@JsonSerializable(explicitToJson: true) +class RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson { + const RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson( + {this.stream, this.error}); + + @JsonKey(name: 'stream') + final TranscodingVideoStream? stream; + @JsonKey(name: 'error') + final VideoTranscoderError? error; + factory RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson.fromJson( + Map json) => + _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonFromJson(json); + Map toJson() => + _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonToJson(this); +} + +extension RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonBufferExt + on RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson { + RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson fillBuffers( + List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + +@JsonSerializable(explicitToJson: true) +class RtcEngineEventHandlerOnVideoRenderingTracingResultJson { + const RtcEngineEventHandlerOnVideoRenderingTracingResultJson( + {this.connection, this.uid, this.currentEvent, this.tracingInfo}); + + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'currentEvent') + final MediaTraceEvent? currentEvent; + @JsonKey(name: 'tracingInfo') + final VideoRenderingTracingInfo? tracingInfo; + factory RtcEngineEventHandlerOnVideoRenderingTracingResultJson.fromJson( + Map json) => + _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonFromJson(json); + Map toJson() => + _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonToJson(this); +} + +extension RtcEngineEventHandlerOnVideoRenderingTracingResultJsonBufferExt + on RtcEngineEventHandlerOnVideoRenderingTracingResultJson { + RtcEngineEventHandlerOnVideoRenderingTracingResultJson fillBuffers( + List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + @JsonSerializable(explicitToJson: true) class MetadataObserverOnMetadataReceivedJson { const MetadataObserverOnMetadataReceivedJson({this.metadata}); @@ -3157,6 +3190,30 @@ extension AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonBufferExt } } +@JsonSerializable(explicitToJson: true) +class AudioPcmFrameSinkOnFrameJson { + const AudioPcmFrameSinkOnFrameJson({this.frame}); + + @JsonKey(name: 'frame') + final AudioPcmFrame? frame; + factory AudioPcmFrameSinkOnFrameJson.fromJson(Map json) => + _$AudioPcmFrameSinkOnFrameJsonFromJson(json); + Map toJson() => _$AudioPcmFrameSinkOnFrameJsonToJson(this); +} + +extension AudioPcmFrameSinkOnFrameJsonBufferExt + on AudioPcmFrameSinkOnFrameJson { + AudioPcmFrameSinkOnFrameJson fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + @JsonSerializable(explicitToJson: true) class AudioFrameObserverBaseOnRecordAudioFrameJson { const AudioFrameObserverBaseOnRecordAudioFrameJson( @@ -3410,8 +3467,11 @@ extension VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonBufferExt @JsonSerializable(explicitToJson: true) class VideoFrameObserverOnCaptureVideoFrameJson { - const VideoFrameObserverOnCaptureVideoFrameJson({this.videoFrame}); + const VideoFrameObserverOnCaptureVideoFrameJson( + {this.sourceType, this.videoFrame}); + @JsonKey(name: 'sourceType') + final VideoSourceType? sourceType; @JsonKey(name: 'videoFrame') final VideoFrame? videoFrame; factory VideoFrameObserverOnCaptureVideoFrameJson.fromJson( @@ -3437,8 +3497,11 @@ extension VideoFrameObserverOnCaptureVideoFrameJsonBufferExt @JsonSerializable(explicitToJson: true) class VideoFrameObserverOnPreEncodeVideoFrameJson { - const VideoFrameObserverOnPreEncodeVideoFrameJson({this.videoFrame}); + const VideoFrameObserverOnPreEncodeVideoFrameJson( + {this.sourceType, this.videoFrame}); + @JsonKey(name: 'sourceType') + final VideoSourceType? sourceType; @JsonKey(name: 'videoFrame') final VideoFrame? videoFrame; factory VideoFrameObserverOnPreEncodeVideoFrameJson.fromJson( @@ -3462,117 +3525,6 @@ extension VideoFrameObserverOnPreEncodeVideoFrameJsonBufferExt } } -@JsonSerializable(explicitToJson: true) -class VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJson { - const VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJson( - {this.videoFrame}); - - @JsonKey(name: 'videoFrame') - final VideoFrame? videoFrame; - factory VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJson.fromJson( - Map json) => - _$VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJsonFromJson(json); - Map toJson() => - _$VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJsonToJson(this); -} - -extension VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJsonBufferExt - on VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJson { - VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJson fillBuffers( - List bufferList) { - if (bufferList.isEmpty) return this; - return this; - } - - List collectBufferList() { - final bufferList = []; - return bufferList; - } -} - -@JsonSerializable(explicitToJson: true) -class VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJson { - const VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJson( - {this.videoFrame}); - - @JsonKey(name: 'videoFrame') - final VideoFrame? videoFrame; - factory VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJson.fromJson( - Map json) => - _$VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJsonFromJson( - json); - Map toJson() => - _$VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJsonToJson(this); -} - -extension VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJsonBufferExt - on VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJson { - VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJson fillBuffers( - List bufferList) { - if (bufferList.isEmpty) return this; - return this; - } - - List collectBufferList() { - final bufferList = []; - return bufferList; - } -} - -@JsonSerializable(explicitToJson: true) -class VideoFrameObserverOnScreenCaptureVideoFrameJson { - const VideoFrameObserverOnScreenCaptureVideoFrameJson({this.videoFrame}); - - @JsonKey(name: 'videoFrame') - final VideoFrame? videoFrame; - factory VideoFrameObserverOnScreenCaptureVideoFrameJson.fromJson( - Map json) => - _$VideoFrameObserverOnScreenCaptureVideoFrameJsonFromJson(json); - Map toJson() => - _$VideoFrameObserverOnScreenCaptureVideoFrameJsonToJson(this); -} - -extension VideoFrameObserverOnScreenCaptureVideoFrameJsonBufferExt - on VideoFrameObserverOnScreenCaptureVideoFrameJson { - VideoFrameObserverOnScreenCaptureVideoFrameJson fillBuffers( - List bufferList) { - if (bufferList.isEmpty) return this; - return this; - } - - List collectBufferList() { - final bufferList = []; - return bufferList; - } -} - -@JsonSerializable(explicitToJson: true) -class VideoFrameObserverOnPreEncodeScreenVideoFrameJson { - const VideoFrameObserverOnPreEncodeScreenVideoFrameJson({this.videoFrame}); - - @JsonKey(name: 'videoFrame') - final VideoFrame? videoFrame; - factory VideoFrameObserverOnPreEncodeScreenVideoFrameJson.fromJson( - Map json) => - _$VideoFrameObserverOnPreEncodeScreenVideoFrameJsonFromJson(json); - Map toJson() => - _$VideoFrameObserverOnPreEncodeScreenVideoFrameJsonToJson(this); -} - -extension VideoFrameObserverOnPreEncodeScreenVideoFrameJsonBufferExt - on VideoFrameObserverOnPreEncodeScreenVideoFrameJson { - VideoFrameObserverOnPreEncodeScreenVideoFrameJson fillBuffers( - List bufferList) { - if (bufferList.isEmpty) return this; - return this; - } - - List collectBufferList() { - final bufferList = []; - return bufferList; - } -} - @JsonSerializable(explicitToJson: true) class VideoFrameObserverOnMediaPlayerVideoFrameJson { const VideoFrameObserverOnMediaPlayerVideoFrameJson( @@ -3603,63 +3555,6 @@ extension VideoFrameObserverOnMediaPlayerVideoFrameJsonBufferExt } } -@JsonSerializable(explicitToJson: true) -class VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJson { - const VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJson( - {this.videoFrame}); - - @JsonKey(name: 'videoFrame') - final VideoFrame? videoFrame; - factory VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJson.fromJson( - Map json) => - _$VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJsonFromJson(json); - Map toJson() => - _$VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJsonToJson(this); -} - -extension VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJsonBufferExt - on VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJson { - VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJson fillBuffers( - List bufferList) { - if (bufferList.isEmpty) return this; - return this; - } - - List collectBufferList() { - final bufferList = []; - return bufferList; - } -} - -@JsonSerializable(explicitToJson: true) -class VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJson { - const VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJson( - {this.videoFrame}); - - @JsonKey(name: 'videoFrame') - final VideoFrame? videoFrame; - factory VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJson.fromJson( - Map json) => - _$VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJsonFromJson( - json); - Map toJson() => - _$VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJsonToJson(this); -} - -extension VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJsonBufferExt - on VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJson { - VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJson fillBuffers( - List bufferList) { - if (bufferList.isEmpty) return this; - return this; - } - - List collectBufferList() { - final bufferList = []; - return bufferList; - } -} - @JsonSerializable(explicitToJson: true) class VideoFrameObserverOnRenderVideoFrameJson { const VideoFrameObserverOnRenderVideoFrameJson( @@ -3722,8 +3617,12 @@ extension VideoFrameObserverOnTranscodedVideoFrameJsonBufferExt @JsonSerializable(explicitToJson: true) class MediaRecorderObserverOnRecorderStateChangedJson { const MediaRecorderObserverOnRecorderStateChangedJson( - {this.state, this.error}); + {this.channelId, this.uid, this.state, this.error}); + @JsonKey(name: 'channelId') + final String? channelId; + @JsonKey(name: 'uid') + final int? uid; @JsonKey(name: 'state') final RecorderState? state; @JsonKey(name: 'error') @@ -3751,8 +3650,13 @@ extension MediaRecorderObserverOnRecorderStateChangedJsonBufferExt @JsonSerializable(explicitToJson: true) class MediaRecorderObserverOnRecorderInfoUpdatedJson { - const MediaRecorderObserverOnRecorderInfoUpdatedJson({this.info}); + const MediaRecorderObserverOnRecorderInfoUpdatedJson( + {this.channelId, this.uid, this.info}); + @JsonKey(name: 'channelId') + final String? channelId; + @JsonKey(name: 'uid') + final int? uid; @JsonKey(name: 'info') final RecorderInfo? info; factory MediaRecorderObserverOnRecorderInfoUpdatedJson.fromJson( @@ -3776,33 +3680,6 @@ extension MediaRecorderObserverOnRecorderInfoUpdatedJsonBufferExt } } -@JsonSerializable(explicitToJson: true) -class MediaPlayerAudioFrameObserverOnFrameJson { - const MediaPlayerAudioFrameObserverOnFrameJson({this.frame}); - - @JsonKey(name: 'frame') - final AudioPcmFrame? frame; - factory MediaPlayerAudioFrameObserverOnFrameJson.fromJson( - Map json) => - _$MediaPlayerAudioFrameObserverOnFrameJsonFromJson(json); - Map toJson() => - _$MediaPlayerAudioFrameObserverOnFrameJsonToJson(this); -} - -extension MediaPlayerAudioFrameObserverOnFrameJsonBufferExt - on MediaPlayerAudioFrameObserverOnFrameJson { - MediaPlayerAudioFrameObserverOnFrameJson fillBuffers( - List bufferList) { - if (bufferList.isEmpty) return this; - return this; - } - - List collectBufferList() { - final bufferList = []; - return bufferList; - } -} - @JsonSerializable(explicitToJson: true) class MediaPlayerVideoFrameObserverOnFrameJson { const MediaPlayerVideoFrameObserverOnFrameJson({this.frame}); @@ -4149,14 +4026,14 @@ extension MediaPlayerSourceObserverOnAudioVolumeIndicationJsonBufferExt @JsonSerializable(explicitToJson: true) class MusicContentCenterEventHandlerOnMusicChartsResultJson { const MusicContentCenterEventHandlerOnMusicChartsResultJson( - {this.requestId, this.status, this.result}); + {this.requestId, this.result, this.errorCode}); @JsonKey(name: 'requestId') final String? requestId; - @JsonKey(name: 'status') - final MusicContentCenterStatusCode? status; @JsonKey(name: 'result') final List? result; + @JsonKey(name: 'error_code') + final MusicContentCenterStatusCode? errorCode; factory MusicContentCenterEventHandlerOnMusicChartsResultJson.fromJson( Map json) => _$MusicContentCenterEventHandlerOnMusicChartsResultJsonFromJson(json); @@ -4181,14 +4058,14 @@ extension MusicContentCenterEventHandlerOnMusicChartsResultJsonBufferExt @JsonSerializable(explicitToJson: true) class MusicContentCenterEventHandlerOnMusicCollectionResultJson { const MusicContentCenterEventHandlerOnMusicCollectionResultJson( - {this.requestId, this.status, this.result}); + {this.requestId, this.result, this.errorCode}); @JsonKey(name: 'requestId') final String? requestId; - @JsonKey(name: 'status') - final MusicContentCenterStatusCode? status; @JsonKey(name: 'result', ignore: true) final MusicCollection? result; + @JsonKey(name: 'error_code') + final MusicContentCenterStatusCode? errorCode; factory MusicContentCenterEventHandlerOnMusicCollectionResultJson.fromJson( Map json) => _$MusicContentCenterEventHandlerOnMusicCollectionResultJsonFromJson(json); @@ -4213,12 +4090,14 @@ extension MusicContentCenterEventHandlerOnMusicCollectionResultJsonBufferExt @JsonSerializable(explicitToJson: true) class MusicContentCenterEventHandlerOnLyricResultJson { const MusicContentCenterEventHandlerOnLyricResultJson( - {this.requestId, this.lyricUrl}); + {this.requestId, this.lyricUrl, this.errorCode}); @JsonKey(name: 'requestId') final String? requestId; @JsonKey(name: 'lyricUrl') final String? lyricUrl; + @JsonKey(name: 'error_code') + final MusicContentCenterStatusCode? errorCode; factory MusicContentCenterEventHandlerOnLyricResultJson.fromJson( Map json) => _$MusicContentCenterEventHandlerOnLyricResultJsonFromJson(json); @@ -4243,18 +4122,22 @@ extension MusicContentCenterEventHandlerOnLyricResultJsonBufferExt @JsonSerializable(explicitToJson: true) class MusicContentCenterEventHandlerOnPreLoadEventJson { const MusicContentCenterEventHandlerOnPreLoadEventJson( - {this.songCode, this.percent, this.status, this.msg, this.lyricUrl}); + {this.songCode, + this.percent, + this.lyricUrl, + this.status, + this.errorCode}); @JsonKey(name: 'songCode') final int? songCode; @JsonKey(name: 'percent') final int? percent; - @JsonKey(name: 'status') - final PreloadStatusCode? status; - @JsonKey(name: 'msg') - final String? msg; @JsonKey(name: 'lyricUrl') final String? lyricUrl; + @JsonKey(name: 'status') + final PreloadStatusCode? status; + @JsonKey(name: 'error_code') + final MusicContentCenterStatusCode? errorCode; factory MusicContentCenterEventHandlerOnPreLoadEventJson.fromJson( Map json) => _$MusicContentCenterEventHandlerOnPreLoadEventJsonFromJson(json); diff --git a/lib/src/binding/event_handler_param_json.g.dart b/lib/src/binding/event_handler_param_json.g.dart index 80f04f050..f2def2db9 100644 --- a/lib/src/binding/event_handler_param_json.g.dart +++ b/lib/src/binding/event_handler_param_json.g.dart @@ -71,6 +71,8 @@ const _$ProxyTypeEnumMap = { ProxyType.tcpProxyType: 2, ProxyType.localProxyType: 3, ProxyType.tcpProxyAutoFallbackType: 4, + ProxyType.httpProxyType: 5, + ProxyType.httpsProxyType: 6, }; RtcEngineEventHandlerOnErrorJson _$RtcEngineEventHandlerOnErrorJsonFromJson( @@ -296,6 +298,8 @@ const _$MediaDeviceTypeEnumMap = { MediaDeviceType.videoRenderDevice: 2, MediaDeviceType.videoCaptureDevice: 3, MediaDeviceType.audioApplicationPlayoutDevice: 4, + MediaDeviceType.audioVirtualPlayoutDevice: 5, + MediaDeviceType.audioVirtualRecordingDevice: 6, }; const _$MediaDeviceStateTypeEnumMap = { @@ -481,6 +485,10 @@ const _$VideoSourceTypeEnumMap = { VideoSourceType.videoSourceRtcImageGif: 8, VideoSourceType.videoSourceRemote: 9, VideoSourceType.videoSourceTranscoded: 10, + VideoSourceType.videoSourceCameraThird: 11, + VideoSourceType.videoSourceCameraFourth: 12, + VideoSourceType.videoSourceScreenThird: 13, + VideoSourceType.videoSourceScreenFourth: 14, VideoSourceType.videoSourceUnknown: 100, }; @@ -649,6 +657,7 @@ const _$RemoteVideoStateReasonEnumMap = { RemoteVideoStateReason.remoteVideoStateReasonVideoStreamTypeChangeToLow: 10, RemoteVideoStateReason.remoteVideoStateReasonVideoStreamTypeChangeToHigh: 11, RemoteVideoStateReason.remoteVideoStateReasonSdkInBackground: 12, + RemoteVideoStateReason.remoteVideoStateReasonCodecNotSupport: 13, }; RtcEngineEventHandlerOnFirstRemoteVideoFrameJson @@ -822,23 +831,6 @@ Map _$RtcEngineEventHandlerOnUserEnableLocalVideoJsonToJson( 'enabled': instance.enabled, }; -RtcEngineEventHandlerOnApiCallExecutedJson - _$RtcEngineEventHandlerOnApiCallExecutedJsonFromJson( - Map json) => - RtcEngineEventHandlerOnApiCallExecutedJson( - err: $enumDecodeNullable(_$ErrorCodeTypeEnumMap, json['err']), - api: json['api'] as String?, - result: json['result'] as String?, - ); - -Map _$RtcEngineEventHandlerOnApiCallExecutedJsonToJson( - RtcEngineEventHandlerOnApiCallExecutedJson instance) => - { - 'err': _$ErrorCodeTypeEnumMap[instance.err], - 'api': instance.api, - 'result': instance.result, - }; - RtcEngineEventHandlerOnLocalAudioStatsJson _$RtcEngineEventHandlerOnLocalAudioStatsJsonFromJson( Map json) => @@ -975,11 +967,12 @@ RtcEngineEventHandlerOnFacePositionChangedJson RtcEngineEventHandlerOnFacePositionChangedJson( imageWidth: json['imageWidth'] as int?, imageHeight: json['imageHeight'] as int?, - vecRectangle: json['vecRectangle'] == null - ? null - : Rectangle.fromJson( - json['vecRectangle'] as Map), - vecDistance: json['vecDistance'] as int?, + vecRectangle: (json['vecRectangle'] as List?) + ?.map((e) => Rectangle.fromJson(e as Map)) + .toList(), + vecDistance: (json['vecDistance'] as List?) + ?.map((e) => e as int) + .toList(), numFaces: json['numFaces'] as int?, ); @@ -988,7 +981,7 @@ Map _$RtcEngineEventHandlerOnFacePositionChangedJsonToJson( { 'imageWidth': instance.imageWidth, 'imageHeight': instance.imageHeight, - 'vecRectangle': instance.vecRectangle?.toJson(), + 'vecRectangle': instance.vecRectangle?.map((e) => e.toJson()).toList(), 'vecDistance': instance.vecDistance, 'numFaces': instance.numFaces, }; @@ -1822,7 +1815,7 @@ const _$ConnectionChangedReasonTypeEnumMap = { ConnectionChangedReasonType.connectionChangedClientIpAddressChangedByUser: 18, ConnectionChangedReasonType.connectionChangedSameUidLogin: 19, ConnectionChangedReasonType.connectionChangedTooManyBroadcasters: 20, - ConnectionChangedReasonType.connectionChangedLicenseVerifyFailed: 21, + ConnectionChangedReasonType.connectionChangedLicenseValidationFailure: 21, }; RtcEngineEventHandlerOnWlAccMessageJson @@ -2217,6 +2210,68 @@ Map _$RtcEngineEventHandlerOnUserAccountUpdatedJsonToJson( 'userAccount': instance.userAccount, }; +RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson + _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonFromJson( + Map json) => + RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson( + stream: json['stream'] == null + ? null + : TranscodingVideoStream.fromJson( + json['stream'] as Map), + error: + $enumDecodeNullable(_$VideoTranscoderErrorEnumMap, json['error']), + ); + +Map + _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonToJson( + RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson instance) => + { + 'stream': instance.stream?.toJson(), + 'error': _$VideoTranscoderErrorEnumMap[instance.error], + }; + +const _$VideoTranscoderErrorEnumMap = { + VideoTranscoderError.vtErrOk: 0, + VideoTranscoderError.vtErrVideoSourceNotReady: 1, + VideoTranscoderError.vtErrInvalidVideoSourceType: 2, + VideoTranscoderError.vtErrInvalidImagePath: 3, + VideoTranscoderError.vtErrUnsupportImageFormat: 4, + VideoTranscoderError.vtErrInvalidLayout: 5, + VideoTranscoderError.vtErrInternal: 20, +}; + +RtcEngineEventHandlerOnVideoRenderingTracingResultJson + _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonFromJson( + Map json) => + RtcEngineEventHandlerOnVideoRenderingTracingResultJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + uid: json['uid'] as int?, + currentEvent: $enumDecodeNullable( + _$MediaTraceEventEnumMap, json['currentEvent']), + tracingInfo: json['tracingInfo'] == null + ? null + : VideoRenderingTracingInfo.fromJson( + json['tracingInfo'] as Map), + ); + +Map + _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonToJson( + RtcEngineEventHandlerOnVideoRenderingTracingResultJson instance) => + { + 'connection': instance.connection?.toJson(), + 'uid': instance.uid, + 'currentEvent': _$MediaTraceEventEnumMap[instance.currentEvent], + 'tracingInfo': instance.tracingInfo?.toJson(), + }; + +const _$MediaTraceEventEnumMap = { + MediaTraceEvent.mediaTraceEventVideoRendered: 0, + MediaTraceEvent.mediaTraceEventVideoDecoded: 1, +}; + MetadataObserverOnMetadataReceivedJson _$MetadataObserverOnMetadataReceivedJsonFromJson( Map json) => @@ -2345,6 +2400,20 @@ Map 'audioEncodedFrameInfo': instance.audioEncodedFrameInfo?.toJson(), }; +AudioPcmFrameSinkOnFrameJson _$AudioPcmFrameSinkOnFrameJsonFromJson( + Map json) => + AudioPcmFrameSinkOnFrameJson( + frame: json['frame'] == null + ? null + : AudioPcmFrame.fromJson(json['frame'] as Map), + ); + +Map _$AudioPcmFrameSinkOnFrameJsonToJson( + AudioPcmFrameSinkOnFrameJson instance) => + { + 'frame': instance.frame?.toJson(), + }; + AudioFrameObserverBaseOnRecordAudioFrameJson _$AudioFrameObserverBaseOnRecordAudioFrameJsonFromJson( Map json) => @@ -2491,6 +2560,8 @@ VideoFrameObserverOnCaptureVideoFrameJson _$VideoFrameObserverOnCaptureVideoFrameJsonFromJson( Map json) => VideoFrameObserverOnCaptureVideoFrameJson( + sourceType: + $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['sourceType']), videoFrame: json['videoFrame'] == null ? null : VideoFrame.fromJson(json['videoFrame'] as Map), @@ -2499,6 +2570,7 @@ VideoFrameObserverOnCaptureVideoFrameJson Map _$VideoFrameObserverOnCaptureVideoFrameJsonToJson( VideoFrameObserverOnCaptureVideoFrameJson instance) => { + 'sourceType': _$VideoSourceTypeEnumMap[instance.sourceType], 'videoFrame': instance.videoFrame?.toJson(), }; @@ -2506,6 +2578,8 @@ VideoFrameObserverOnPreEncodeVideoFrameJson _$VideoFrameObserverOnPreEncodeVideoFrameJsonFromJson( Map json) => VideoFrameObserverOnPreEncodeVideoFrameJson( + sourceType: + $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['sourceType']), videoFrame: json['videoFrame'] == null ? null : VideoFrame.fromJson(json['videoFrame'] as Map), @@ -2514,68 +2588,7 @@ VideoFrameObserverOnPreEncodeVideoFrameJson Map _$VideoFrameObserverOnPreEncodeVideoFrameJsonToJson( VideoFrameObserverOnPreEncodeVideoFrameJson instance) => { - 'videoFrame': instance.videoFrame?.toJson(), - }; - -VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJson - _$VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJsonFromJson( - Map json) => - VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJson( - videoFrame: json['videoFrame'] == null - ? null - : VideoFrame.fromJson(json['videoFrame'] as Map), - ); - -Map _$VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJsonToJson( - VideoFrameObserverOnSecondaryCameraCaptureVideoFrameJson instance) => - { - 'videoFrame': instance.videoFrame?.toJson(), - }; - -VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJson - _$VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJsonFromJson( - Map json) => - VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJson( - videoFrame: json['videoFrame'] == null - ? null - : VideoFrame.fromJson(json['videoFrame'] as Map), - ); - -Map _$VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJsonToJson( - VideoFrameObserverOnSecondaryPreEncodeCameraVideoFrameJson instance) => - { - 'videoFrame': instance.videoFrame?.toJson(), - }; - -VideoFrameObserverOnScreenCaptureVideoFrameJson - _$VideoFrameObserverOnScreenCaptureVideoFrameJsonFromJson( - Map json) => - VideoFrameObserverOnScreenCaptureVideoFrameJson( - videoFrame: json['videoFrame'] == null - ? null - : VideoFrame.fromJson(json['videoFrame'] as Map), - ); - -Map _$VideoFrameObserverOnScreenCaptureVideoFrameJsonToJson( - VideoFrameObserverOnScreenCaptureVideoFrameJson instance) => - { - 'videoFrame': instance.videoFrame?.toJson(), - }; - -VideoFrameObserverOnPreEncodeScreenVideoFrameJson - _$VideoFrameObserverOnPreEncodeScreenVideoFrameJsonFromJson( - Map json) => - VideoFrameObserverOnPreEncodeScreenVideoFrameJson( - videoFrame: json['videoFrame'] == null - ? null - : VideoFrame.fromJson(json['videoFrame'] as Map), - ); - -Map _$VideoFrameObserverOnPreEncodeScreenVideoFrameJsonToJson( - VideoFrameObserverOnPreEncodeScreenVideoFrameJson instance) => - { + 'sourceType': _$VideoSourceTypeEnumMap[instance.sourceType], 'videoFrame': instance.videoFrame?.toJson(), }; @@ -2596,38 +2609,6 @@ Map _$VideoFrameObserverOnMediaPlayerVideoFrameJsonToJson( 'mediaPlayerId': instance.mediaPlayerId, }; -VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJson - _$VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJsonFromJson( - Map json) => - VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJson( - videoFrame: json['videoFrame'] == null - ? null - : VideoFrame.fromJson(json['videoFrame'] as Map), - ); - -Map _$VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJsonToJson( - VideoFrameObserverOnSecondaryScreenCaptureVideoFrameJson instance) => - { - 'videoFrame': instance.videoFrame?.toJson(), - }; - -VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJson - _$VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJsonFromJson( - Map json) => - VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJson( - videoFrame: json['videoFrame'] == null - ? null - : VideoFrame.fromJson(json['videoFrame'] as Map), - ); - -Map _$VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJsonToJson( - VideoFrameObserverOnSecondaryPreEncodeScreenVideoFrameJson instance) => - { - 'videoFrame': instance.videoFrame?.toJson(), - }; - VideoFrameObserverOnRenderVideoFrameJson _$VideoFrameObserverOnRenderVideoFrameJsonFromJson( Map json) => @@ -2666,6 +2647,8 @@ MediaRecorderObserverOnRecorderStateChangedJson _$MediaRecorderObserverOnRecorderStateChangedJsonFromJson( Map json) => MediaRecorderObserverOnRecorderStateChangedJson( + channelId: json['channelId'] as String?, + uid: json['uid'] as int?, state: $enumDecodeNullable(_$RecorderStateEnumMap, json['state']), error: $enumDecodeNullable(_$RecorderErrorCodeEnumMap, json['error']), ); @@ -2673,6 +2656,8 @@ MediaRecorderObserverOnRecorderStateChangedJson Map _$MediaRecorderObserverOnRecorderStateChangedJsonToJson( MediaRecorderObserverOnRecorderStateChangedJson instance) => { + 'channelId': instance.channelId, + 'uid': instance.uid, 'state': _$RecorderStateEnumMap[instance.state], 'error': _$RecorderErrorCodeEnumMap[instance.error], }; @@ -2695,6 +2680,8 @@ MediaRecorderObserverOnRecorderInfoUpdatedJson _$MediaRecorderObserverOnRecorderInfoUpdatedJsonFromJson( Map json) => MediaRecorderObserverOnRecorderInfoUpdatedJson( + channelId: json['channelId'] as String?, + uid: json['uid'] as int?, info: json['info'] == null ? null : RecorderInfo.fromJson(json['info'] as Map), @@ -2703,24 +2690,11 @@ MediaRecorderObserverOnRecorderInfoUpdatedJson Map _$MediaRecorderObserverOnRecorderInfoUpdatedJsonToJson( MediaRecorderObserverOnRecorderInfoUpdatedJson instance) => { + 'channelId': instance.channelId, + 'uid': instance.uid, 'info': instance.info?.toJson(), }; -MediaPlayerAudioFrameObserverOnFrameJson - _$MediaPlayerAudioFrameObserverOnFrameJsonFromJson( - Map json) => - MediaPlayerAudioFrameObserverOnFrameJson( - frame: json['frame'] == null - ? null - : AudioPcmFrame.fromJson(json['frame'] as Map), - ); - -Map _$MediaPlayerAudioFrameObserverOnFrameJsonToJson( - MediaPlayerAudioFrameObserverOnFrameJson instance) => - { - 'frame': instance.frame?.toJson(), - }; - MediaPlayerVideoFrameObserverOnFrameJson _$MediaPlayerVideoFrameObserverOnFrameJsonFromJson( Map json) => @@ -2965,11 +2939,11 @@ MusicContentCenterEventHandlerOnMusicChartsResultJson Map json) => MusicContentCenterEventHandlerOnMusicChartsResultJson( requestId: json['requestId'] as String?, - status: $enumDecodeNullable( - _$MusicContentCenterStatusCodeEnumMap, json['status']), result: (json['result'] as List?) ?.map((e) => MusicChartInfo.fromJson(e as Map)) .toList(), + errorCode: $enumDecodeNullable( + _$MusicContentCenterStatusCodeEnumMap, json['error_code']), ); Map @@ -2977,13 +2951,20 @@ Map MusicContentCenterEventHandlerOnMusicChartsResultJson instance) => { 'requestId': instance.requestId, - 'status': _$MusicContentCenterStatusCodeEnumMap[instance.status], 'result': instance.result?.map((e) => e.toJson()).toList(), + 'error_code': + _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], }; const _$MusicContentCenterStatusCodeEnumMap = { MusicContentCenterStatusCode.kMusicContentCenterStatusOk: 0, MusicContentCenterStatusCode.kMusicContentCenterStatusErr: 1, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrGateway: 2, + MusicContentCenterStatusCode + .kMusicContentCenterStatusErrPermissionAndResource: 3, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrInternalDataParse: 4, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrMusicLoading: 5, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrMusicDecryption: 6, }; MusicContentCenterEventHandlerOnMusicCollectionResultJson @@ -2991,8 +2972,8 @@ MusicContentCenterEventHandlerOnMusicCollectionResultJson Map json) => MusicContentCenterEventHandlerOnMusicCollectionResultJson( requestId: json['requestId'] as String?, - status: $enumDecodeNullable( - _$MusicContentCenterStatusCodeEnumMap, json['status']), + errorCode: $enumDecodeNullable( + _$MusicContentCenterStatusCodeEnumMap, json['error_code']), ); Map { 'requestId': instance.requestId, - 'status': _$MusicContentCenterStatusCodeEnumMap[instance.status], + 'error_code': _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], }; MusicContentCenterEventHandlerOnLyricResultJson @@ -3009,6 +2990,8 @@ MusicContentCenterEventHandlerOnLyricResultJson MusicContentCenterEventHandlerOnLyricResultJson( requestId: json['requestId'] as String?, lyricUrl: json['lyricUrl'] as String?, + errorCode: $enumDecodeNullable( + _$MusicContentCenterStatusCodeEnumMap, json['error_code']), ); Map _$MusicContentCenterEventHandlerOnLyricResultJsonToJson( @@ -3016,6 +2999,7 @@ Map _$MusicContentCenterEventHandlerOnLyricResultJsonToJson( { 'requestId': instance.requestId, 'lyricUrl': instance.lyricUrl, + 'error_code': _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], }; MusicContentCenterEventHandlerOnPreLoadEventJson @@ -3024,10 +3008,11 @@ MusicContentCenterEventHandlerOnPreLoadEventJson MusicContentCenterEventHandlerOnPreLoadEventJson( songCode: json['songCode'] as int?, percent: json['percent'] as int?, + lyricUrl: json['lyricUrl'] as String?, status: $enumDecodeNullable(_$PreloadStatusCodeEnumMap, json['status']), - msg: json['msg'] as String?, - lyricUrl: json['lyricUrl'] as String?, + errorCode: $enumDecodeNullable( + _$MusicContentCenterStatusCodeEnumMap, json['error_code']), ); Map _$MusicContentCenterEventHandlerOnPreLoadEventJsonToJson( @@ -3035,13 +3020,14 @@ Map _$MusicContentCenterEventHandlerOnPreLoadEventJsonToJson( { 'songCode': instance.songCode, 'percent': instance.percent, - 'status': _$PreloadStatusCodeEnumMap[instance.status], - 'msg': instance.msg, 'lyricUrl': instance.lyricUrl, + 'status': _$PreloadStatusCodeEnumMap[instance.status], + 'error_code': _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], }; const _$PreloadStatusCodeEnumMap = { PreloadStatusCode.kPreloadStatusCompleted: 0, PreloadStatusCode.kPreloadStatusFailed: 1, PreloadStatusCode.kPreloadStatusPreloading: 2, + PreloadStatusCode.kPreloadStatusRemoved: 3, }; diff --git a/lib/src/impl/agora_media_engine_impl_override.dart b/lib/src/impl/agora_media_engine_impl_override.dart index 0c8c770d0..1d3407198 100644 --- a/lib/src/impl/agora_media_engine_impl_override.dart +++ b/lib/src/impl/agora_media_engine_impl_override.dart @@ -120,6 +120,7 @@ class MediaEngineImpl extends media_engine_impl_binding.MediaEngineImpl buffers.add(frame.buffer ?? Uint8List.fromList([])); buffers.add(Uint8List.fromList([])); buffers.add(frame.metadataBuffer ?? Uint8List.fromList([])); + buffers.add(frame.alphaBuffer ?? Uint8List.fromList([])); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); diff --git a/lib/src/impl/agora_media_recorder_impl_override.dart b/lib/src/impl/agora_media_recorder_impl_override.dart index 965435c0b..45fbec5ad 100644 --- a/lib/src/impl/agora_media_recorder_impl_override.dart +++ b/lib/src/impl/agora_media_recorder_impl_override.dart @@ -1,5 +1,7 @@ -import 'package:agora_rtc_engine/src/binding_forward_export.dart'; -import 'package:agora_rtc_engine/src/binding/impl_forward_export.dart'; +import 'dart:convert'; +import 'dart:typed_data'; + +import 'package:agora_rtc_engine/src/agora_media_base.dart'; import 'package:agora_rtc_engine/src/binding/agora_media_recorder_impl.dart' as media_recorder_impl_binding; import 'package:agora_rtc_engine/src/binding/agora_media_base_event_impl.dart' @@ -12,11 +14,11 @@ import 'package:iris_method_channel/iris_method_channel.dart'; class MediaRecorderObserverWrapperOverride extends media_base_event_b.MediaRecorderObserverWrapper { const MediaRecorderObserverWrapperOverride( - this.connection, + this.strNativeHandle, MediaRecorderObserver mediaRecorderObserver, ) : super(mediaRecorderObserver); - final RtcConnection connection; + final String strNativeHandle; @override bool operator ==(Object other) { @@ -25,64 +27,56 @@ class MediaRecorderObserverWrapperOverride } return other is MediaRecorderObserverWrapperOverride && other.mediaRecorderObserver == mediaRecorderObserver && - other.connection == connection; + other.strNativeHandle == strNativeHandle; } @override - int get hashCode => Object.hash(mediaRecorderObserver, connection); + int get hashCode => Object.hash(mediaRecorderObserver, strNativeHandle); @override - bool handleEvent( + bool handleEventInternal( String eventName, String eventData, List buffers) { - if (!eventName.startsWith('MediaRecorderObserver')) return false; - final newEvent = eventName.replaceFirst('MediaRecorderObserver_', ''); - final jsonMap = jsonDecode(eventData); - final ct = jsonMap['connection']; - if (ct == null) { - return false; - } - - final rtcConnection = RtcConnection.fromJson(ct); - - if (rtcConnection.channelId != connection.channelId || - rtcConnection.localUid != connection.localUid) { + final ct = jsonMap['nativeHandle']; + if (ct == null || ct != strNativeHandle) { return false; } - if (handleEventInternal(newEvent, eventData, buffers)) { - return true; - } - - return false; + return super.handleEventInternal(eventName, eventData, buffers); } } class MediaRecorderImpl extends media_recorder_impl_binding.MediaRecorderImpl with ScopedDisposableObjectMixin { - MediaRecorderImpl._(IrisMethodChannel irisMethodChannel) + MediaRecorderImpl._(IrisMethodChannel irisMethodChannel, this.strNativeHandle) : super(irisMethodChannel); - factory MediaRecorderImpl.create(IrisMethodChannel irisMethodChannel) { - return MediaRecorderImpl._(irisMethodChannel); + factory MediaRecorderImpl.fromNativeHandle( + IrisMethodChannel irisMethodChannel, String strNativeHandle) { + return MediaRecorderImpl._(irisMethodChannel, strNativeHandle); } final TypedScopedKey _mediaRecorderScopedKey = const TypedScopedKey(MediaRecorderImpl); + final String strNativeHandle; + @override - Future setMediaRecorderObserver( - {required RtcConnection connection, - required MediaRecorderObserver callback}) async { - const apiType = 'MediaRecorder_setMediaRecorderObserver'; + Map createParams(Map param) { + return { + 'nativeHandle': strNativeHandle, + ...param, + }; + } - final param = createParams({'connection': connection.toJson()}); + @override + Future setMediaRecorderObserver(MediaRecorderObserver callback) async { + const apiType = 'MediaRecorder_setMediaRecorderObserver'; - final List buffers = []; - buffers.addAll(connection.collectBufferList()); + final param = createParams({}); final eventHandlerWrapper = - MediaRecorderObserverWrapperOverride(connection, callback); + MediaRecorderObserverWrapperOverride(strNativeHandle, callback); await irisMethodChannel.registerEventHandler( ScopedEvent( @@ -93,15 +87,8 @@ class MediaRecorderImpl extends media_recorder_impl_binding.MediaRecorderImpl jsonEncode(param)); } - @override - Future release() async { - markDisposed(); - - await irisMethodChannel.unregisterEventHandlers(_mediaRecorderScopedKey); - } - @override Future dispose() async { - await release(); + await irisMethodChannel.unregisterEventHandlers(_mediaRecorderScopedKey); } } diff --git a/lib/src/impl/agora_music_content_center_impl_override.dart b/lib/src/impl/agora_music_content_center_impl_override.dart index 57f008f68..4d9bfb061 100644 --- a/lib/src/impl/agora_music_content_center_impl_override.dart +++ b/lib/src/impl/agora_music_content_center_impl_override.dart @@ -8,7 +8,6 @@ import 'package:agora_rtc_engine/src/binding/agora_music_content_center_event_im as event_binding; import 'package:agora_rtc_engine/src/binding/agora_music_content_center_impl.dart' as binding; -import 'package:agora_rtc_engine/src/binding/call_api_event_handler_buffer_ext.dart'; import 'package:agora_rtc_engine/src/binding/event_handler_param_json.dart'; import 'package:agora_rtc_engine/src/impl/agora_music_content_center_impl_json.dart'; @@ -60,27 +59,6 @@ class MusicContentCenterEventHandlerWrapper bool handleEventInternal( String eventName, String eventData, List buffers) { switch (eventName) { - case 'onMusicChartsResult': - if (musicContentCenterEventHandler.onMusicChartsResult == null) { - return true; - } - final jsonMap = jsonDecode(eventData); - MusicContentCenterEventHandlerOnMusicChartsResultJson paramJson = - MusicContentCenterEventHandlerOnMusicChartsResultJson.fromJson( - jsonMap); - paramJson = paramJson.fillBuffers(buffers); - String? requestId = paramJson.requestId; - MusicContentCenterStatusCode? status = paramJson.status; - List? result = paramJson.result; - if (requestId == null || status == null || result == null) { - return true; - } - result = result.map((e) => e.fillBuffers(buffers)).toList(); - musicContentCenterEventHandler.onMusicChartsResult!( - requestId, status, result); - - return true; - case 'onMusicCollectionResult': if (musicContentCenterEventHandler.onMusicCollectionResult == null) { return true; @@ -91,8 +69,8 @@ class MusicContentCenterEventHandlerWrapper jsonMap); paramJson = paramJson.fillBuffers(buffers); String? requestId = paramJson.requestId; - MusicContentCenterStatusCode? status = paramJson.status; - if (requestId == null || status == null) { + MusicContentCenterStatusCode? errorCode = paramJson.errorCode; + if (requestId == null || errorCode == null) { return true; } @@ -101,7 +79,7 @@ class MusicContentCenterEventHandlerWrapper final musicCollectionImpl = MusicCollectionImpl(musicCollectionJson); musicContentCenterEventHandler.onMusicCollectionResult!( - requestId, status, musicCollectionImpl); + requestId, musicCollectionImpl, errorCode); return true; } diff --git a/lib/src/impl/agora_rtc_engine_impl.dart b/lib/src/impl/agora_rtc_engine_impl.dart index f84f55e3e..766a2df35 100644 --- a/lib/src/impl/agora_rtc_engine_impl.dart +++ b/lib/src/impl/agora_rtc_engine_impl.dart @@ -44,6 +44,29 @@ import 'global_video_view_controller.dart'; // ignore_for_file: public_member_api_docs +int? _mockRtcEngineNativeHandle; +@visibleForTesting +void setMockRtcEngineNativeHandle(int? mockRtcEngineNativeHandle) { + assert(() { + _mockRtcEngineNativeHandle = mockRtcEngineNativeHandle; + return true; + }()); +} + +// In 64-bits system, the native handle ptr value (unsigned long 64) can be 2^64 - 1, +// which may greater than the dart int max value (2^63 - 1), so we can not decode +// the json with big int native handle ptr value and parse it directly. +// +// After dart sdk 2.0 support parse hexadecimal in unsigned int64 range. +// https://github.com/dart-lang/language/blob/ee1135e0c22391cee17bf3ee262d6a04582d25de/archive/newsletter/20170929.md#semantics +// +// So we retrive the native handle ptr value from the json string directly, and +// parse an int from hexadecimal here. +int _string2IntPtr(String stringPtr) { + BigInt nativeHandleBI = BigInt.parse(stringPtr); + return int.parse('0x${nativeHandleBI.toRadixString(16)}'); +} + extension RtcEngineExt on RtcEngine { GlobalVideoViewController get globalVideoViewController => (this as RtcEngineImpl)._globalVideoViewController!; @@ -190,7 +213,8 @@ class RtcEngineImpl extends rtc_engine_ex_binding.RtcEngineExImpl static RtcEngineEx create({IrisMethodChannel? irisMethodChannel}) { if (_instance != null) return _instance!; - _instance = RtcEngineImpl._(irisMethodChannel ?? IrisMethodChannel()); + _instance = RtcEngineImpl._(irisMethodChannel ?? + IrisMethodChannel(IrisApiEngineNativeBindingDelegateProvider())); return _instance!; } @@ -214,9 +238,9 @@ class RtcEngineImpl extends rtc_engine_ex_binding.RtcEngineExImpl .provideNativeBindingDelegate() as NativeIrisApiEngineBindingsDelegate; nativeBindingDelegate.initialize(); - nativeBindingDelegate.binding.FreeIrisVideoFrameBufferManager( + nativeBindingDelegate.binding.FreeIrisRtcRendering( ffi.Pointer.fromAddress( - _globalVideoViewController!.videoFrameBufferManagerIntPtr)); + _globalVideoViewController!.irisRtcRenderingHandle)); return true; }()); @@ -242,8 +266,17 @@ class RtcEngineImpl extends rtc_engine_ex_binding.RtcEngineExImpl externalFilesDir = androidInitResult['externalFilesDir'] ?? ''; } - await irisMethodChannel - .initilize(IrisApiEngineNativeBindingDelegateProvider()); + List args = []; + assert(() { + if (_mockRtcEngineNativeHandle != null) { + args.add(_mockRtcEngineNativeHandle!); + } + return true; + }()); + + await irisMethodChannel.initilize(args); + await _initializeInternal(context); + await super.initialize(context); await irisMethodChannel.invokeMethod(IrisMethodCall( @@ -261,8 +294,6 @@ class RtcEngineImpl extends rtc_engine_ex_binding.RtcEngineExImpl } } - await _initializeInternal(context); - _rtcEngineState.isInitialzed = true; _isReleased = false; _initializingCompleter?.complete(null); @@ -301,6 +332,8 @@ class RtcEngineImpl extends rtc_engine_ex_binding.RtcEngineExImpl .detachVideoFrameBufferManager(irisMethodChannel.getNativeHandle()); _globalVideoViewController = null; + await irisMethodChannel.unregisterEventHandlers(_rtcEngineImplScopedKey); + await super.release(sync: sync); irisMethodChannel.removeHotRestartListener(_hotRestartListener); @@ -342,13 +375,13 @@ class RtcEngineImpl extends rtc_engine_ex_binding.RtcEngineExImpl } @override - Future createMediaPlayer() async { + Future createMediaPlayer() async { const apiType = 'RtcEngine_createMediaPlayer'; final param = createParams({}); final callApiResult = await irisMethodChannel .invokeMethod(IrisMethodCall(apiType, jsonEncode(param))); if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); + return null; } final rm = callApiResult.data; final result = rm['result']; @@ -609,13 +642,6 @@ class RtcEngineImpl extends rtc_engine_ex_binding.RtcEngineExImpl () => media_engine_impl.MediaEngineImpl.create(irisMethodChannel)); } - @override - MediaRecorder getMediaRecorder() { - return _objectPool.putIfAbsent( - const TypedScopedKey(MediaRecorderImpl), - () => media_recorder_impl.MediaRecorderImpl.create(irisMethodChannel)); - } - @override LocalSpatialAudioEngine getLocalSpatialAudioEngine() { return _objectPool @@ -707,19 +733,16 @@ class RtcEngineImpl extends rtc_engine_ex_binding.RtcEngineExImpl } @override - Future startEchoTest({int intervalInSeconds = 10}) async { - const apiType = 'RtcEngine_startEchoTest2'; - final param = createParams({'intervalInSeconds': intervalInSeconds}); - final callApiResult = await irisMethodChannel - .invokeMethod(IrisMethodCall(apiType, jsonEncode(param))); + Future startEchoTest(EchoTestConfiguration config) async { + const apiType = 'RtcEngine_startEchoTest3'; + final param = createParams({'config': config.toJson()}); + final List buffers = []; + buffers.addAll(config.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); if (callApiResult.irisReturnCode < 0) { throw AgoraRtcException(code: callApiResult.irisReturnCode); } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } } @override @@ -986,36 +1009,105 @@ class RtcEngineImpl extends rtc_engine_ex_binding.RtcEngineExImpl final nativeHandleIntPtr = resultStr.substring(resultStr.indexOf(':') + 1, resultStr.length - 1); - BigInt nativeHandleBI = BigInt.parse(nativeHandleIntPtr); - int nativeHandleBIHexInt = - int.parse('0x${nativeHandleBI.toRadixString(16)}'); + int nativeHandleBIHexInt = _string2IntPtr(nativeHandleIntPtr); return nativeHandleBIHexInt; } + @override + Future createMediaRecorder(RecorderStreamInfo info) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_createMediaRecorder'; + final param = createParams({'info': info.toJson()}); + final List buffers = []; + buffers.addAll(info.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + return media_recorder_impl.MediaRecorderImpl.fromNativeHandle( + irisMethodChannel, result); + } + + @override + Future destroyMediaRecorder(MediaRecorder mediaRecorder) async { + final impl = mediaRecorder as media_recorder_impl.MediaRecorderImpl; + + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_destroyMediaRecorder'; + final param = createParams({'nativeHandle': impl.strNativeHandle}); + await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + } + + @override + Future startScreenCaptureBySourceType( + {required VideoSourceType sourceType, + required ScreenCaptureConfiguration config}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_startScreenCapture2'; + final param = createParams( + {'sourceType': sourceType.value(), 'config': config.toJson()}); + final List buffers = []; + buffers.addAll(config.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future stopScreenCaptureBySourceType(VideoSourceType sourceType) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_stopScreenCapture2'; + final param = createParams({'sourceType': sourceType.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + /////////// debug //////// /// [type] see [VideoSourceType], only [VideoSourceType.videoSourceCamera], [VideoSourceType.videoSourceRemote] supported Future startDumpVideo(int type, String dir) async { - await irisMethodChannel.invokeMethod(IrisMethodCall( - 'StartDumpVideo', - jsonEncode({ - 'nativeHandle': - _globalVideoViewController!.videoFrameBufferManagerIntPtr, - 'type': type, - 'dir': dir, - }), - )); + await setParameters( + "{\"engine.video.enable_video_dump\":{\"mode\": 0, \"enable\": true"); + + // await irisMethodChannel.invokeMethod(IrisMethodCall( + // 'StartDumpVideo', + // jsonEncode({ + // 'nativeHandle': _globalVideoViewController!.irisRtcRenderingHandle, + // 'type': type, + // 'dir': dir, + // }), + // )); } Future stopDumpVideo() async { - await irisMethodChannel.invokeMethod(IrisMethodCall( - 'StopDumpVideo', - jsonEncode({ - 'nativeHandle': - _globalVideoViewController!.videoFrameBufferManagerIntPtr - }), - )); + await setParameters( + "{\"engine.video.enable_video_dump\":{\"mode\": 0, \"enable\": false"); + + // await irisMethodChannel.invokeMethod(IrisMethodCall( + // 'StopDumpVideo', + // jsonEncode( + // {'nativeHandle': _globalVideoViewController!.irisRtcRenderingHandle}), + // )); } ////////////////////////// diff --git a/lib/src/impl/agora_video_view_impl.dart b/lib/src/impl/agora_video_view_impl.dart index 4a2733221..eae56396b 100644 --- a/lib/src/impl/agora_video_view_impl.dart +++ b/lib/src/impl/agora_video_view_impl.dart @@ -326,7 +326,8 @@ class _AgoraRtcRenderTextureState extends State final sourceTypeInt = sourceType.value(); // int value of `VideoSourceType.videoSourceScreen` and `VideoSourceType.videoSourceScreenPrimary` is the same return sourceTypeInt == VideoSourceType.videoSourceScreenPrimary.value() || - sourceTypeInt == VideoSourceType.videoSourceScreenSecondary.value(); + sourceTypeInt == VideoSourceType.videoSourceScreenSecondary.value() || + sourceTypeInt == VideoSourceType.videoSourceTranscoded.value(); } Widget _applyMirrorMode(VideoMirrorModeType mirrorMode, Widget child, diff --git a/lib/src/impl/global_video_view_controller.dart b/lib/src/impl/global_video_view_controller.dart index 7f1a0b435..f3de82f4f 100644 --- a/lib/src/impl/global_video_view_controller.dart +++ b/lib/src/impl/global_video_view_controller.dart @@ -15,28 +15,27 @@ class GlobalVideoViewController { final MethodChannel methodChannel = const MethodChannel('agora_rtc_ng/video_view_controller'); - int _videoFrameBufferManagerIntPtr = 0; - int get videoFrameBufferManagerIntPtr => _videoFrameBufferManagerIntPtr; + int _irisRtcRenderingHandle = 0; + int get irisRtcRenderingHandle => _irisRtcRenderingHandle; final Map> _destroyTextureRenderCompleters = {}; bool _isDetachVFBMing = false; Future attachVideoFrameBufferManager(int irisRtcEngineIntPtr) async { - if (_videoFrameBufferManagerIntPtr != 0) { + if (_irisRtcRenderingHandle != 0) { return; } final CallApiResult result = await irisMethodChannel.invokeMethod(IrisMethodCall( - 'CreateIrisVideoFrameBufferManager', + 'CreateIrisRtcRendering', jsonEncode({'irisRtcEngineNativeHandle': irisRtcEngineIntPtr}), )); - _videoFrameBufferManagerIntPtr = - result.data['videoFrameBufferManagerNativeHandle'] ?? 0; + _irisRtcRenderingHandle = result.data['irisRtcRenderingHandle'] ?? 0; } Future detachVideoFrameBufferManager(int irisRtcEngineIntPtr) async { - if (_videoFrameBufferManagerIntPtr == 0) { + if (_irisRtcRenderingHandle == 0) { return; } @@ -54,59 +53,28 @@ class GlobalVideoViewController { _destroyTextureRenderCompleters.clear(); await irisMethodChannel.invokeMethod(IrisMethodCall( - 'FreeIrisVideoFrameBufferManager', + 'FreeIrisRtcRendering', jsonEncode({ 'irisRtcEngineNativeHandle': irisRtcEngineIntPtr, - 'videoFrameBufferManagerNativeHandle': _videoFrameBufferManagerIntPtr, + 'irisRtcRenderingHandle': _irisRtcRenderingHandle, }), )); - _videoFrameBufferManagerIntPtr = 0; + _irisRtcRenderingHandle = 0; } - Future createTextureRender( - int uid, String channelId, int videoSourceType) async { + Future createTextureRender(int uid, String channelId, + int videoSourceType, int videoViewSetupMode) async { final textureId = await methodChannel.invokeMethod('createTextureRender', { - 'videoFrameBufferManagerNativeHandle': _videoFrameBufferManagerIntPtr, + 'irisRtcRenderingHandle': _irisRtcRenderingHandle, 'uid': uid, 'channelId': channelId, 'videoSourceType': videoSourceType, + 'videoViewSetupMode': videoViewSetupMode, }); return textureId ?? kTextureNotInit; } - /// [videoSourceType] definition: - /// - /// ```c++ - /// typedef enum IrisVideoSourceType { - /// kVideoSourceTypeCameraPrimary, - /// kVideoSourceTypeCameraSecondary, - /// kVideoSourceTypeScreenPrimary, - /// kVideoSourceTypeScreenSecondary, - /// kVideoSourceTypeCustom, - /// kVideoSourceTypeMediaPlayer, - /// kVideoSourceTypeRtcImagePng, - /// kVideoSourceTypeRtcImageJpeg, - /// kVideoSourceTypeRtcImageGif, - /// kVideoSourceTypeRemote, - /// kVideoSourceTypeTranscoded, - /// kVideoSourceTypePreEncode, - /// kVideoSourceTypePreEncodeSecondaryCamera, - /// kVideoSourceTypePreEncodeScreen, - /// kVideoSourceTypePreEncodeSecondaryScreen, - /// kVideoSourceTypeUnknown, - /// } IrisVideoSourceType; - /// ``` - Future updateTextureRenderData( - int textureId, int uid, String channelId, int videoSourceType) async { - await methodChannel.invokeMethod('updateTextureRenderData', { - 'textureId': textureId, - 'uid': uid, - 'channelId': channelId, - 'videoSourceType': videoSourceType, - }); - } - /// Call `IrisVideoFrameBufferManager.DisableVideoFrameBuffer` in the native side Future destroyTextureRender(int textureId) async { _destroyTextureRenderCompleters.putIfAbsent( diff --git a/lib/src/impl/media_player_controller_impl.dart b/lib/src/impl/media_player_controller_impl.dart index 8c85db682..79150f1d1 100644 --- a/lib/src/impl/media_player_controller_impl.dart +++ b/lib/src/impl/media_player_controller_impl.dart @@ -174,8 +174,11 @@ class MediaPlayerControllerImpl } @override - void registerAudioFrameObserver(MediaPlayerAudioFrameObserver observer) { - _mediaPlayer?.registerAudioFrameObserver(observer); + void registerAudioFrameObserver( + {required AudioPcmFrameSink observer, + RawAudioFrameOpModeType mode = + RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly}) { + _mediaPlayer?.registerAudioFrameObserver(observer: observer, mode: mode); } @override @@ -308,7 +311,7 @@ class MediaPlayerControllerImpl } @override - void unregisterAudioFrameObserver(MediaPlayerAudioFrameObserver observer) { + void unregisterAudioFrameObserver(AudioPcmFrameSink observer) { _mediaPlayer?.unregisterAudioFrameObserver(observer); } @@ -349,12 +352,9 @@ class MediaPlayerControllerImpl @override Future createTextureRender( - int uid, String channelId, int videoSourceType) { + int uid, String channelId, int videoSourceType, int videoViewSetupMode) { return super.createTextureRender( - getMediaPlayerId(), - channelId, - videoSourceType, - ); + getMediaPlayerId(), channelId, videoSourceType, videoViewSetupMode); } @override diff --git a/lib/src/impl/media_player_impl.dart b/lib/src/impl/media_player_impl.dart index 95a44b941..fec4ecc55 100644 --- a/lib/src/impl/media_player_impl.dart +++ b/lib/src/impl/media_player_impl.dart @@ -38,21 +38,21 @@ class _MediaPlayerScopedKey extends TypedScopedKey { int get hashCode => Object.hash(type, mediaPlayerId); } -class MediaPlayerAudioFrameObserverWrapper - extends media_player_event_binding.MediaPlayerAudioFrameObserverWrapper { - const MediaPlayerAudioFrameObserverWrapper(this.mediaPlayerId, - MediaPlayerAudioFrameObserver mediaPlayerAudioFrameObserver) - : super(mediaPlayerAudioFrameObserver); +class AudioPcmFrameSinkWrapper + extends media_base_event_binding.AudioPcmFrameSinkWrapper { + const AudioPcmFrameSinkWrapper( + this.mediaPlayerId, AudioPcmFrameSink audioPcmFrameSink) + : super(audioPcmFrameSink); final int mediaPlayerId; @override - bool handleEvent( + bool handleEventInternal( String eventName, String eventData, List buffers) { final jsonMap = Map.from(jsonDecode(eventData)); if (jsonMap.containsKey('playerId') && jsonMap['playerId'] == mediaPlayerId) { - return super.handleEvent(eventName, eventData, buffers); + return super.handleEventInternal(eventName, eventData, buffers); } return false; @@ -68,12 +68,12 @@ class MediaPlayerVideoFrameObserverWrapper final int mediaPlayerId; @override - bool handleEvent( + bool handleEventInternal( String eventName, String eventData, List buffers) { final jsonMap = Map.from(jsonDecode(eventData)); if (jsonMap.containsKey('playerId') && jsonMap['playerId'] == mediaPlayerId) { - return super.handleEvent(eventName, eventData, buffers); + return super.handleEventInternal(eventName, eventData, buffers); } return false; @@ -90,12 +90,12 @@ class MediaPlayerSourceObserverWrapper final int mediaPlayerId; @override - bool handleEvent( + bool handleEventInternal( String eventName, String eventData, List buffers) { final jsonMap = Map.from(jsonDecode(eventData)); if (jsonMap.containsKey('playerId') && jsonMap['playerId'] == mediaPlayerId) { - return super.handleEvent(eventName, eventData, buffers); + return super.handleEventInternal(eventName, eventData, buffers); } return false; @@ -111,13 +111,14 @@ class AudioSpectrumObserverWrapper final int mediaPlayerId; @override - bool handleEvent( + bool handleEventInternal( String eventName, String eventData, List buffers) { final jsonMap = Map.from(jsonDecode(eventData)); if (jsonMap.containsKey('playerId') && jsonMap['playerId'] == mediaPlayerId) { - return super.handleEvent(eventName, eventData, buffers); + return super.handleEventInternal(eventName, eventData, buffers); } + return false; } } @@ -223,9 +224,11 @@ class MediaPlayerImpl extends agora_media_player_impl_binding.MediaPlayerImpl @override void registerAudioFrameObserver( - MediaPlayerAudioFrameObserver observer) async { + {required AudioPcmFrameSink observer, + RawAudioFrameOpModeType mode = + RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly}) async { final eventHandlerWrapper = - MediaPlayerAudioFrameObserverWrapper(getMediaPlayerId(), observer); + AudioPcmFrameSinkWrapper(getMediaPlayerId(), observer); final param = createParams({}); await irisMethodChannel.registerEventHandler( @@ -238,10 +241,9 @@ class MediaPlayerImpl extends agora_media_player_impl_binding.MediaPlayerImpl } @override - void unregisterAudioFrameObserver( - MediaPlayerAudioFrameObserver observer) async { + void unregisterAudioFrameObserver(AudioPcmFrameSink observer) async { final eventHandlerWrapper = - MediaPlayerAudioFrameObserverWrapper(getMediaPlayerId(), observer); + AudioPcmFrameSinkWrapper(getMediaPlayerId(), observer); final param = createParams({}); await irisMethodChannel.unregisterEventHandler( diff --git a/lib/src/impl/native_iris_api_engine_binding_delegate.dart b/lib/src/impl/native_iris_api_engine_binding_delegate.dart index 7ce874a18..4dbd17cf9 100644 --- a/lib/src/impl/native_iris_api_engine_binding_delegate.dart +++ b/lib/src/impl/native_iris_api_engine_binding_delegate.dart @@ -36,18 +36,20 @@ class NativeIrisApiEngineBindingsDelegate extends NativeBindingDelegate { } @override - ffi.Pointer createNativeApiEngine( - List>? args) { + CreateNativeApiEngineResult createNativeApiEngine( + List> args) { ffi.Pointer enginePtr = ffi.nullptr; assert(() { - if (args != null && args.isNotEmpty) { + if (args.isNotEmpty) { assert(args.length == 1); enginePtr = args[0]; } return true; }()); - return _binding.CreateIrisApiEngine(enginePtr); + final apiEnginePtr = _binding.CreateIrisApiEngine(enginePtr); + + return CreateNativeApiEngineResult(apiEnginePtr); } void _response(ffi.Pointer param, Map result) { @@ -93,40 +95,27 @@ class NativeIrisApiEngineBindingsDelegate extends NativeBindingDelegate { return _binding.StopDumpVideo( ffi.Pointer.fromAddress(videoFrameBufferManagerIntPtr)); } - case 'CreateIrisVideoFrameBufferManager': + case 'CreateIrisRtcRendering': { final data = jsonDecode(methodCall.params); assert(data.containsKey('irisRtcEngineNativeHandle')); final irisRtcEngineNativeHandle = data['irisRtcEngineNativeHandle'] as int; - final bufferManager = _binding.CreateIrisVideoFrameBufferManager(); - _binding.Attach( - ffi.Pointer.fromAddress(irisRtcEngineNativeHandle), - bufferManager, - ); + final bufferManager = _binding.CreateIrisRtcRendering( + ffi.Pointer.fromAddress(irisRtcEngineNativeHandle)); - final result = { - 'videoFrameBufferManagerNativeHandle': bufferManager.address - }; + final result = {'irisRtcRenderingHandle': bufferManager.address}; _response(param, result); return 0; } - case 'FreeIrisVideoFrameBufferManager': + case 'FreeIrisRtcRendering': { final data = jsonDecode(methodCall.params); final videoFrameBufferManagerIntPtr = - data['videoFrameBufferManagerNativeHandle'] as int; - final irisRtcEngineNativeHandle = - data['irisRtcEngineNativeHandle'] as int; - - _binding.Detach( - ffi.Pointer.fromAddress(irisRtcEngineNativeHandle), - ffi.Pointer.fromAddress(videoFrameBufferManagerIntPtr), - ); - - _binding.FreeIrisVideoFrameBufferManager( + data['irisRtcRenderingHandle'] as int; + _binding.FreeIrisRtcRendering( ffi.Pointer.fromAddress(videoFrameBufferManagerIntPtr)); _response(param, {}); diff --git a/lib/src/impl/native_iris_api_engine_bindings.dart b/lib/src/impl/native_iris_api_engine_bindings.dart index a7a88b6ec..cc3ce2f9c 100644 --- a/lib/src/impl/native_iris_api_engine_bindings.dart +++ b/lib/src/impl/native_iris_api_engine_bindings.dart @@ -54,6 +54,89 @@ class NativeIrisApiEngineBinding { late final _InitIrisLogger = _InitIrisLoggerPtr.asFunction< void Function(ffi.Pointer, int, int)>(); + IrisRtcRenderingHandle CreateIrisRtcRendering( + ffi.Pointer iris_api_engine_handle, + ) { + return _CreateIrisRtcRendering( + iris_api_engine_handle, + ); + } + + late final _CreateIrisRtcRenderingPtr = _lookup< + ffi.NativeFunction< + IrisRtcRenderingHandle Function( + ffi.Pointer)>>('CreateIrisRtcRendering'); + late final _CreateIrisRtcRendering = _CreateIrisRtcRenderingPtr.asFunction< + IrisRtcRenderingHandle Function(ffi.Pointer)>(); + + void FreeIrisRtcRendering( + IrisRtcRenderingHandle handle, + ) { + return _FreeIrisRtcRendering( + handle, + ); + } + + late final _FreeIrisRtcRenderingPtr = + _lookup>( + 'FreeIrisRtcRendering'); + late final _FreeIrisRtcRendering = _FreeIrisRtcRenderingPtr.asFunction< + void Function(IrisRtcRenderingHandle)>(); + + /// See `IrisRtcRendering::AddVideoFrameCacheKey` + void AddVideoFrameCacheKey( + IrisRtcRenderingHandle handle, + IrisRtcVideoFrameConfig arg1, + ) { + return _AddVideoFrameCacheKey( + handle, + arg1, + ); + } + + late final _AddVideoFrameCacheKeyPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(IrisRtcRenderingHandle, + IrisRtcVideoFrameConfig)>>('AddVideoFrameCacheKey'); + late final _AddVideoFrameCacheKey = _AddVideoFrameCacheKeyPtr.asFunction< + void Function(IrisRtcRenderingHandle, IrisRtcVideoFrameConfig)>(); + + void RemoveVideoFrameCacheKey( + IrisRtcRenderingHandle handle, + IrisRtcVideoFrameConfig arg1, + ) { + return _RemoveVideoFrameCacheKey( + handle, + arg1, + ); + } + + late final _RemoveVideoFrameCacheKeyPtr = _lookup< + ffi.NativeFunction< + ffi.Void Function(IrisRtcRenderingHandle, + IrisRtcVideoFrameConfig)>>('RemoveVideoFrameCacheKey'); + late final _RemoveVideoFrameCacheKey = + _RemoveVideoFrameCacheKeyPtr.asFunction< + void Function(IrisRtcRenderingHandle, IrisRtcVideoFrameConfig)>(); + + /// See `IrisRtcRendering::GetVideoFrameCache` + int GetVideoFrameCache( + IrisRtcRenderingHandle handle, + IrisRtcVideoFrameConfig arg1, + ) { + return _GetVideoFrameCache( + handle, + arg1, + ); + } + + late final _GetVideoFrameCachePtr = _lookup< + ffi.NativeFunction< + ffi.Int32 Function(IrisRtcRenderingHandle, + IrisRtcVideoFrameConfig)>>('GetVideoFrameCache'); + late final _GetVideoFrameCache = _GetVideoFrameCachePtr.asFunction< + int Function(IrisRtcRenderingHandle, IrisRtcVideoFrameConfig)>(); + IrisVideoFrameBufferManagerPtr CreateIrisVideoFrameBufferManager() { return _CreateIrisVideoFrameBufferManager(); } @@ -438,11 +521,13 @@ abstract class IrisAppType { } abstract class IrisLogLevel { - static const int levelTrace = 0; - static const int levelDebug = 1; - static const int levelInfo = 2; - static const int levelWarn = 3; - static const int levelErr = 4; + static const int LOG_LEVEL_TRACE = 0; + static const int LOG_LEVEL_DEBUG = 1; + static const int LOG_LEVEL_INFO = 2; + static const int LOG_LEVEL_WARN = 3; + static const int LOG_LEVEL_ERROR = 4; + static const int LOG_LEVEL_CRITICAL = 5; + static const int LOG_LEVEL_OFF = 6; } class EventParam extends ffi.Struct { @@ -470,11 +555,87 @@ class IrisCEventHandler extends ffi.Struct { typedef Func_Event = ffi .Pointer)>>; +/// The agora::media::base::VideoFrame C projection but remove some unsupported property in C, +/// e.g., agora::media::base::VideoFrame.sharedContext, agora::media::base::VideoFrame.textureId, etc. +/// +/// NOTE: If the agora::media::base::VideoFrame is updated, make sure this struct be up to date. +/// TODO(littlegnal): maybe we can use terra to generate the C projection. +class IrisCVideoFrame extends ffi.Struct { + /// The agora::media::base::VideoFrame.type, but convert it to int type + @ffi.Int32() + external int type; + + @ffi.Int32() + external int width; + + @ffi.Int32() + external int height; + + @ffi.Int32() + external int yStride; + + @ffi.Int32() + external int uStride; + + @ffi.Int32() + external int vStride; + + external ffi.Pointer yBuffer; + + external ffi.Pointer uBuffer; + + external ffi.Pointer vBuffer; + + @ffi.Int32() + external int rotation; + + @ffi.Int64() + external int renderTimeMs; + + @ffi.Int32() + external int avsync_type; + + external ffi.Pointer metadata_buffer; + + @ffi.Int32() + external int metadata_size; + + @ffi.Array.multi([16]) + external ffi.Array matrix; + + external ffi.Pointer alphaBuffer; +} + +class IrisRtcVideoFrameConfig extends ffi.Struct { + /// int value of agora::rtc::VIDEO_SOURCE_TYPE + @ffi.Int32() + external int video_source_type; + + /// int value of agora::media::base::VIDEO_PIXEL_FORMAT. use in convertFrame() + @ffi.Int32() + external int video_frame_format; + + @ffi.Uint32() + external int uid; + + @ffi.Array.multi([512]) + external ffi.Array channelId; +} + +abstract class GET_VIDEO_FRAME_CACHE_RETURN_TYPE { + static const int OK = 0; + static const int RESIZED = 1; + static const int NO_CACHE = 2; +} + +typedef IrisRtcRenderingHandle = ffi.Pointer; + abstract class IRIS_VIDEO_PROCESS_ERR { static const int ERR_OK = 0; static const int ERR_NULL_POINTER = 1; static const int ERR_SIZE_NOT_MATCHING = 2; static const int ERR_BUFFER_EMPTY = 5; + static const int ERR_FRAM_TYPE_NOT_MATCHING = 6; } class IrisVideoFrameBufferConfig extends ffi.Struct { @@ -500,11 +661,19 @@ abstract class IrisVideoSourceType { static const int kVideoSourceTypeRtcImageGif = 8; static const int kVideoSourceTypeRemote = 9; static const int kVideoSourceTypeTranscoded = 10; - static const int kVideoSourceTypePreEncode = 11; - static const int kVideoSourceTypePreEncodeSecondaryCamera = 12; - static const int kVideoSourceTypePreEncodeScreen = 13; - static const int kVideoSourceTypePreEncodeSecondaryScreen = 14; - static const int kVideoSourceTypeUnknown = 15; + static const int kVideoSourceTypeCameraThird = 11; + static const int kVideoSourceTypeCameraFourth = 12; + static const int kVideoSourceTypeScreenThird = 13; + static const int kVideoSourceTypeScreenFourth = 14; + static const int kVideoSourceTypePreEncodeCameraPrimary = 40; + static const int kVideoSourceTypePreEncodeCameraSecondary = 41; + static const int kVideoSourceTypePreEncodeCameraThird = 42; + static const int kVideoSourceTypePreEncodeCameraFourth = 43; + static const int kVideoSourceTypePreEncodeScreenPrimary = 44; + static const int kVideoSourceTypePreEncodeScreenSecondary = 45; + static const int kVideoSourceTypePreEncodeScreenThrid = 46; + static const int kVideoSourceTypePreEncodeScreenFourth = 47; + static const int kVideoSourceTypeUnknown = 100; } class IrisCVideoFrameBuffer extends ffi.Struct { @@ -518,10 +687,10 @@ class IrisCVideoFrameBuffer extends ffi.Struct { } abstract class IrisVideoFrameType { - static const int kVideoFrameTypeYUV420 = 0; - static const int kVideoFrameTypeYUV422 = 1; - static const int kVideoFrameTypeRGBA = 2; - static const int kVideoFrameTypeBGRA = 3; + static const int kVideoFrameTypeYUV420 = 1; + static const int kVideoFrameTypeYUV422 = 16; + static const int kVideoFrameTypeRGBA = 4; + static const int kVideoFrameTypeBGRA = 2; } typedef Func_VideoFrame = ffi.Pointer< @@ -584,6 +753,8 @@ class IrisVideoFrame extends ffi.Struct { @ffi.Array.multi([16]) external ffi.Array matrix; + + external ffi.Pointer alphaBuffer; } typedef IrisVideoFrameBufferManagerPtr = ffi.Pointer; @@ -602,3 +773,5 @@ const int kBasicResultLength = 65536; const int kEventResultLenght = 1024; const int kBasicStringLength = 512; + +const int kDefaultLogFileSize = 5242880; diff --git a/lib/src/impl/video_view_controller_impl.dart b/lib/src/impl/video_view_controller_impl.dart index a042b89f1..37099849f 100644 --- a/lib/src/impl/video_view_controller_impl.dart +++ b/lib/src/impl/video_view_controller_impl.dart @@ -1,4 +1,5 @@ import 'package:agora_rtc_engine/src/agora_base.dart'; +import 'package:agora_rtc_engine/src/agora_media_base.dart'; import 'package:agora_rtc_engine/src/agora_rtc_engine_ex.dart'; import 'package:agora_rtc_engine/src/impl/agora_rtc_engine_impl.dart'; import 'package:agora_rtc_engine/src/render/video_view_controller.dart'; @@ -126,6 +127,7 @@ mixin VideoViewControllerBaseMixin implements VideoViewControllerBase { int uid, String channelId, int videoSourceType, + int videoViewSetupMode, ) async { if (_isCreatedRender) { return _textureId; @@ -135,6 +137,7 @@ mixin VideoViewControllerBaseMixin implements VideoViewControllerBase { uid, channelId, videoSourceType, + videoViewSetupMode, ); _isCreatedRender = true; @@ -151,6 +154,8 @@ mixin VideoViewControllerBaseMixin implements VideoViewControllerBase { canvas.uid!, connection?.channelId ?? '', canvas.sourceType?.value() ?? getVideoSourceType(), + canvas.setupMode?.value() ?? + VideoViewSetupMode.videoViewSetupReplace.value(), ); } } else {} diff --git a/lib/src/render/agora_video_view.dart b/lib/src/render/agora_video_view.dart index 36ae442b2..65e748f75 100644 --- a/lib/src/render/agora_video_view.dart +++ b/lib/src/render/agora_video_view.dart @@ -15,7 +15,7 @@ class AgoraVideoView extends StatefulWidget { /// Controls the type of video to render:If you want to render video of the RtcEngine, see VideoViewController .If you want to render video of the media player, see MediaPlayerController . final VideoViewControllerBase controller; - /// Callback when [AgoraVideoView] created. + /// @nodoc final void Function(int viewId)? onAgoraVideoViewCreated; @override diff --git a/lib/src/render/video_view_controller.dart b/lib/src/render/video_view_controller.dart index 05004759f..bf4061703 100644 --- a/lib/src/render/video_view_controller.dart +++ b/lib/src/render/video_view_controller.dart @@ -1,4 +1,5 @@ import 'package:agora_rtc_engine/src/agora_base.dart'; +import 'package:agora_rtc_engine/src/agora_media_base.dart'; import 'package:agora_rtc_engine/src/agora_rtc_engine.dart'; import 'package:agora_rtc_engine/src/agora_rtc_engine_ex.dart'; import 'package:agora_rtc_engine/src/impl/video_view_controller_impl.dart'; @@ -39,6 +40,7 @@ abstract class VideoViewControllerBase { int uid, String channelId, int videoSourceType, + int videoViewSetupMode, ); @internal diff --git a/macos/agora_rtc_engine.podspec b/macos/agora_rtc_engine.podspec index 55e3880c3..b80a286c2 100644 --- a/macos/agora_rtc_engine.podspec +++ b/macos/agora_rtc_engine.podspec @@ -15,10 +15,10 @@ A new flutter plugin project. s.source = { :path => '.' } s.source_files = 'Classes/**/*.{h,mm}', 'Classes/File.swift' s.dependency 'FlutterMacOS' - # s.dependency 'AgoraRtcWrapper' - s.dependency 'AgoraRtcEngine_macOS', '4.1.0' - s.dependency 'AgoraIrisRTC_macOS', '4.1.0-rc.2' + # s.dependency 'AgoraRtcWrapper' + s.dependency 'AgoraRtcEngine_macOS', '4.2.0' + s.dependency 'AgoraIrisRTC_macOS', '4.2.0-build.3' s.platform = :osx, '10.11' s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES' } -end +end \ No newline at end of file diff --git a/pubspec.yaml b/pubspec.yaml index 870a2c609..5a464a477 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -2,7 +2,7 @@ name: agora_rtc_engine description: >- Flutter plugin of Agora RTC SDK, allow you to simply integrate Agora Video Calling or Live Video Streaming to your app with just a few lines of code. -version: 6.1.1+1 +version: 6.2.0-dev.1 homepage: https://www.agora.io repository: https://github.com/AgoraIO-Extensions/Agora-Flutter-SDK/tree/main environment: @@ -17,7 +17,7 @@ dependencies: ffi: '>=1.1.2' async: ^2.8.2 meta: ^1.7.0 - iris_method_channel: 1.1.0-rc.5 + iris_method_channel: 1.1.0 dev_dependencies: flutter_test: sdk: flutter diff --git a/scripts/artifacts_version.sh b/scripts/artifacts_version.sh index ef2606ead..0e80b3b7f 100644 --- a/scripts/artifacts_version.sh +++ b/scripts/artifacts_version.sh @@ -1,6 +1,6 @@ set -e -export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.1.0_DCG_Android_Video_20230105_0846.zip" -export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.1.0_DCG_iOS_Video_20230105_0846.zip" -export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.1.0_DCG_Mac_Video_20230105_0846.zip" -export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.1.0_DCG_Windows_Video_20230105_0846.zip" +export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.2.0-build.3_DCG_Android_Video_20230525_0541.zip" +export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.2.0-build.3_DCG_iOS_Video_20230525_0544.zip" +export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.2.0-build.3_DCG_Mac_Video_20230525_0545.zip" +export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.2.0-build.3_DCG_Windows_Video_20230525_0541.zip" diff --git a/scripts/build-ios-arch.sh b/scripts/build-ios-arch.sh index 533d4fe0f..01bf98eb4 100644 --- a/scripts/build-ios-arch.sh +++ b/scripts/build-ios-arch.sh @@ -15,7 +15,7 @@ build() { mkdir -p ./build/ios/"$1" cd ./build/ios/"$1" || exit if [ "$1" = "OS64COMBINED" ]; then - archs="armv7 arm64" + archs="arm64" elif [ "$1" = "SIMULATOR64" ]; then archs="x86_64" # archs="arm64 x86_64" diff --git a/scripts/build-iris-ios.sh b/scripts/build-iris-ios.sh index 82de6d880..1ba5af106 100644 --- a/scripts/build-iris-ios.sh +++ b/scripts/build-iris-ios.sh @@ -39,4 +39,4 @@ rm -rf ${AGORA_FLUTTER_PROJECT_PATH}/ios/libs/ALL_ARCHITECTURE # fi # cp -RP "$IRIS_PROJECT_PATH/third_party/agora/rtc/include/" "$AGORA_FLUTTER_PROJECT_PATH/integration_test_app/iris_integration_test/third_party/agora/rtc/include" -cp -RP "${IRIS_PROJECT_PATH}/build/ios/ALL_ARCHITECTURE/output/dcg/${BUILD_TYPE}/AgoraRtcWrapper.xcframework" "$AGORA_FLUTTER_PROJECT_PATH/ios/" \ No newline at end of file +cp -RP "${IRIS_PROJECT_PATH}/build/ios/ALL_ARCHITECTURE/output/dcg/${BUILD_TYPE}/AgoraRtcWrapper.xcframework" "$AGORA_FLUTTER_PROJECT_PATH/ios/libs" \ No newline at end of file diff --git a/scripts/build-iris-macos-arch.sh b/scripts/build-iris-macos-arch.sh index dd58b621c..d4f2160e8 100644 --- a/scripts/build-iris-macos-arch.sh +++ b/scripts/build-iris-macos-arch.sh @@ -14,7 +14,7 @@ build() { cd "$root_path" || exit mkdir -p ./build/mac/"$1" cd ./build/mac/"$1" || exit - archs="arm64 x86_64" + archs="x86_64" cmake \ -G "Xcode" \ -DCMAKE_TOOLCHAIN_FILE="$root_path"/cmake/ios.toolchain.cmake \ diff --git a/scripts/build-iris-macos.sh b/scripts/build-iris-macos.sh index e76a3876a..e1a1b16c7 100644 --- a/scripts/build-iris-macos.sh +++ b/scripts/build-iris-macos.sh @@ -20,19 +20,20 @@ if [[ -d "$AGORA_FLUTTER_PROJECT_PATH/macos/AgoraRtcWrapper.framework" ]]; then rm -rf $AGORA_FLUTTER_PROJECT_PATH/macos/AgoraRtcWrapper.framework fi +if [[ -d "${AGORA_FLUTTER_PROJECT_PATH}/macos/libs" ]]; then + rm -rf "${AGORA_FLUTTER_PROJECT_PATH}/macos/libs" +fi + +mkdir -p "${AGORA_FLUTTER_PROJECT_PATH}/macos/libs" + +# /Users/fenglang/codes/aw/iris/build/mac/MAC/output/dcg/Debug/AgoraRtcWrapper.framework echo "Copying $IRIS_PROJECT_PATH/build/mac/MAC/output/$IRIS_TYPE/$BUILD_TYPE/AgoraRtcWrapper.framework $AGORA_FLUTTER_PROJECT_PATH/macos/AgoraRtcWrapper.framework" -cp -RP "$IRIS_PROJECT_PATH/build/mac/MAC/output/$IRIS_TYPE/$BUILD_TYPE/AgoraRtcWrapper.framework" "$AGORA_FLUTTER_PROJECT_PATH/macos/" +cp -RP "$IRIS_PROJECT_PATH/build/mac/MAC/output/$IRIS_TYPE/$BUILD_TYPE/AgoraRtcWrapper.framework" "$AGORA_FLUTTER_PROJECT_PATH/macos/libs" cp -RP "$IRIS_PROJECT_PATH/build/mac/MAC/output/$IRIS_TYPE/$BUILD_TYPE/$BUILD_TYPE/IrisDebugger.framework" "$AGORA_FLUTTER_PROJECT_PATH/test_shard/iris_tester/macos/" rm -rf $AGORA_FLUTTER_PROJECT_PATH/third_party/include mkdir -p $AGORA_FLUTTER_PROJECT_PATH/third_party/include cp -RP $IRIS_PROJECT_PATH/build/mac/MAC/output/$IRIS_TYPE/$BUILD_TYPE/AgoraRtcWrapper.framework/Headers/* $AGORA_FLUTTER_PROJECT_PATH/third_party/include/ -if [[ -d "${AGORA_FLUTTER_PROJECT_PATH}/macos/libs" ]]; then - rm -rf "${AGORA_FLUTTER_PROJECT_PATH}/macos/libs" -fi - -mkdir -p "${AGORA_FLUTTER_PROJECT_PATH}/macos/libs" - echo "Copying Agora RTC engine frameworks" cp -RP ${IRIS_PROJECT_PATH}/third_party/agora/$IRIS_TYPE/libs/$NATIVE_SDK_PATH_NAME/libs/* "${AGORA_FLUTTER_PROJECT_PATH}/macos/libs/" \ No newline at end of file diff --git a/scripts/code_gen.sh b/scripts/code_gen.sh new file mode 100644 index 000000000..993a31074 --- /dev/null +++ b/scripts/code_gen.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -e +set -x + +TERRA_PATH=$1 +MY_PATH=$(realpath $(dirname "$0")) +PROJECT_ROOT=$(realpath ${MY_PATH}/..) + +pushd ${PROJECT_ROOT} + +flutter packages get +bash ${PROJECT_ROOT}/tool/terra/build.sh ${TERRA_PATH} +bash ${MY_PATH}/flutter-build-runner.sh +bash ${PROJECT_ROOT}/tool/testcase_gen/build.sh + +popd \ No newline at end of file diff --git a/scripts/download_unzip_iris_cdn_artifacts.sh b/scripts/download_unzip_iris_cdn_artifacts.sh index e2829f0ff..dbbad9adc 100644 --- a/scripts/download_unzip_iris_cdn_artifacts.sh +++ b/scripts/download_unzip_iris_cdn_artifacts.sh @@ -35,7 +35,7 @@ if [[ ${PLATFORM} == "Android" ]];then mkdir -p "${IRIS_TESTER_PATH}/android/libs/${ABI}" fi - cp -RP "${UNZIP_PATH}/ALL_ARCHITECTURE/Release/${ABI}/libIrisDebugger.so" "${IRIS_TESTER_PATH}/android/libs/${ABI}/libIrisDebugger.so" + cp -RP "${UNZIP_PATH}/Debugger/ALL_ARCHITECTURE/${ABI}/libIrisDebugger.so" "${IRIS_TESTER_PATH}/android/libs/${ABI}/libIrisDebugger.so" ls ${IRIS_TESTER_PATH}/android/libs/${ABI}/ done; @@ -44,15 +44,16 @@ if [[ ${PLATFORM} == "Android" ]];then fi if [[ ${PLATFORM} == "MAC" ]];then - cp -RP "${UNZIP_PATH}/MAC/Release/Release/IrisDebugger.framework" "${IRIS_TESTER_PATH}/macos/" + cp -RP "${UNZIP_PATH}/Debugger/MAC/IrisDebugger.framework" "${IRIS_TESTER_PATH}/macos/" fi if [[ ${PLATFORM} == "iOS" ]];then - cp -RP "${UNZIP_PATH}/ALL_ARCHITECTURE/Release/Release/IrisDebugger.xcframework" "${IRIS_TESTER_PATH}/ios/" + cp -RP "${UNZIP_PATH}/Debugger/ALL_ARCHITECTURE/IrisDebugger.xcframework" "${IRIS_TESTER_PATH}/ios/" fi if [[ ${PLATFORM} == "Windows" ]];then - cp -RP "${UNZIP_PATH}/x64/Release/Release/IrisDebugger.dll" "${IRIS_TESTER_PATH}/windows/IrisDebugger.dll" + cp -RP "${UNZIP_PATH}/Debugger/x64/IrisDebugger.dll" "${IRIS_TESTER_PATH}/windows/IrisDebugger.dll" + cp -RP "${UNZIP_PATH}/Debugger/x64/IrisDebugger.lib" "${IRIS_TESTER_PATH}/windows/IrisDebugger.lib" fi # pushd ${UNZIP_PATH} diff --git a/scripts/flutter-build-runner.sh b/scripts/flutter-build-runner.sh index 176e64c01..3e393a642 100644 --- a/scripts/flutter-build-runner.sh +++ b/scripts/flutter-build-runner.sh @@ -2,10 +2,14 @@ set -e -AGORA_FLUTTER_PROJECT_PATH=$(pwd) +MY_PATH=$(realpath $(dirname "$0")) +AGORA_FLUTTER_PROJECT_PATH=$(realpath ${MY_PATH}/..) + +pushd ${AGORA_FLUTTER_PROJECT_PATH} rm -rf $AGORA_FLUTTER_PROJECT_PATH/example/macos/Flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/example/windows/Flutter/ephemeral +rm -rf $AGORA_FLUTTER_PROJECT_PATH/example/linux/Flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/example/ios/.symlinks rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/fake_test_app/macos/Flutter/ephemeral @@ -20,4 +24,6 @@ rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/rendering_test/macos/Flutter/ephem rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/rendering_test/windows/Flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/rendering_test/ios/.symlinks -flutter packages pub run build_runner build --delete-conflicting-outputs \ No newline at end of file +flutter packages pub run build_runner build --delete-conflicting-outputs + +popd \ No newline at end of file diff --git a/shared/darwin/TextureRenderer.h b/shared/darwin/TextureRenderer.h index 3ca463926..3e457732f 100644 --- a/shared/darwin/TextureRenderer.h +++ b/shared/darwin/TextureRenderer.h @@ -12,9 +12,9 @@ - (instancetype) initWithTextureRegistry:(NSObject *)textureRegistry messenger:(NSObject *)messenger - videoFrameBufferManager:(void *)manager; +irisRtcRenderingHandle:(void *)irisRtcRenderingHandle; -- (void)updateData:(NSNumber *)uid channelId:(NSString *)channelId videoSourceType:(NSNumber *)videoSourceType; +- (void)updateData:(NSNumber *)uid channelId:(NSString *)channelId videoSourceType:(NSNumber *)videoSourceType videoViewSetupMode:(NSNumber *)videoViewSetupMode; - (void)dispose; diff --git a/shared/darwin/TextureRenderer.mm b/shared/darwin/TextureRenderer.mm index 3cb30b55c..a9fd3a604 100644 --- a/shared/darwin/TextureRenderer.mm +++ b/shared/darwin/TextureRenderer.mm @@ -1,8 +1,10 @@ #import #import "TextureRenderer.h" -#import -#import -#import +#import +#import +#import + +#import using namespace agora::iris; @@ -12,47 +14,53 @@ @interface TextureRender () @property(nonatomic, strong) FlutterMethodChannel *channel; @property(nonatomic) CVPixelBufferRef buffer_cache; @property(nonatomic, strong) dispatch_semaphore_t lock; -@property(nonatomic) agora::iris::IrisVideoFrameBufferManager *videoFrameBufferManager; -@property(nonatomic) NSDictionary *cvBufferProperties; +@property(nonatomic) agora::iris::IrisRtcRendering *irisRtcRendering; +@property(nonatomic, assign) int delegateId; +@property(nonatomic, assign) BOOL isDirtyBuffer; @end namespace { -class RendererDelegate : public IrisVideoFrameBufferDelegate { +class RendererDelegate : public agora::iris::VideoFrameObserverDelegate { public: - RendererDelegate(void *renderer) : renderer_(renderer) {} - - void OnVideoFrameReceived(const IrisVideoFrame &video_frame, - const IrisVideoFrameBufferConfig *config, - bool resize) override { + RendererDelegate(void *renderer) : renderer_(renderer) { } + + void OnVideoFrameReceived(const void *videoFrame, + const IrisRtcVideoFrameConfig &config, bool resize) override { @autoreleasepool { TextureRender *renderer = (__bridge TextureRender *)renderer_; - if (renderer.buffer_cache != NULL && resize) { - CVBufferRelease(renderer.buffer_cache); - renderer.buffer_cache = NULL; - } + agora::media::base::VideoFrame *vf = (agora::media::base::VideoFrame *)videoFrame; - CVPixelBufferRef buffer = renderer.buffer_cache; - if (renderer.buffer_cache == NULL) { - CVPixelBufferCreate(kCFAllocatorDefault, video_frame.width, - video_frame.height, kCVPixelFormatType_32BGRA, - (__bridge CFDictionaryRef)renderer.cvBufferProperties, &buffer); - - [renderer.channel invokeMethod:@"onSizeChanged" - arguments:@{@"width": @(video_frame.width), - @"height": @(video_frame.height)}]; + if (vf->width == 0 || vf->height == 0) { + return; } - CVPixelBufferLockBaseAddress(buffer, 0); - void *copyBaseAddress = CVPixelBufferGetBaseAddress(buffer); - memcpy(copyBaseAddress, video_frame.y_buffer, - video_frame.y_buffer_length); - CVPixelBufferUnlockBaseAddress(buffer, 0); - - renderer.buffer_cache = buffer; - [renderer.textureRegistry textureFrameAvailable:renderer.textureId]; + CVPixelBufferRef _Nullable pixelBuffer = reinterpret_cast(vf->pixelBuffer); + if (pixelBuffer) { + if (resize) { + [renderer.channel invokeMethod:@"onSizeChanged" + arguments:@{@"width": @(vf->width), + @"height": @(vf->height)}]; + } + + dispatch_semaphore_wait(renderer.lock, DISPATCH_TIME_FOREVER); + if (!renderer.isDirtyBuffer) { + // Ensure the previous retained `CVPixelBufferRef` be released. + if (renderer.buffer_cache) { + CVBufferRelease(renderer.buffer_cache); + } + + renderer.buffer_cache = CVPixelBufferRetain(pixelBuffer); + renderer.isDirtyBuffer = YES; + } + dispatch_semaphore_signal(renderer.lock); + + if (renderer.isDirtyBuffer) { + [renderer.textureRegistry textureFrameAvailable:renderer.textureId]; + } + } } } @@ -71,11 +79,11 @@ @implementation TextureRender - (instancetype) initWithTextureRegistry:(NSObject *)textureRegistry messenger:(NSObject *)messenger - videoFrameBufferManager:(void *)manager { + irisRtcRenderingHandle:(void *)irisRtcRenderingHandle { self = [super init]; if (self) { self.textureRegistry = textureRegistry; - self.videoFrameBufferManager = (IrisVideoFrameBufferManager *)manager; + self.irisRtcRendering = (agora::iris::IrisRtcRendering *)irisRtcRenderingHandle; self.textureId = [self.textureRegistry registerTexture:self]; self.channel = [FlutterMethodChannel methodChannelWithName: @@ -85,50 +93,48 @@ - (instancetype) initWithTextureRegistry:(NSObject *)tex self.lock = dispatch_semaphore_create(1); - self.cvBufferProperties = @{ - (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA), - (__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey: @{}, - (__bridge NSString *)kCVPixelBufferOpenGLCompatibilityKey: @YES, - (__bridge NSString *)kCVPixelBufferMetalCompatibilityKey: @YES, - }; - self.delegate = new ::RendererDelegate((__bridge void *)self); + self.buffer_cache = NULL; + self.isDirtyBuffer = YES; } return self; } -- (void)updateData:(NSNumber *)uid channelId:(NSString *)channelId videoSourceType:(NSNumber *)videoSourceType { - IrisVideoFrameBuffer buffer(kVideoFrameTypeBGRA, - self.delegate, 16); - IrisVideoFrameBufferConfig config; - - config.id = [uid unsignedIntValue]; - config.type = (IrisVideoSourceType)[videoSourceType intValue]; - - if (channelId && (NSNull *)channelId != [NSNull null]) { - strcpy(config.key, [channelId UTF8String]); - - } else { - strcpy(config.key, ""); - } +- (void)updateData:(NSNumber *)uid channelId:(NSString *)channelId videoSourceType:(NSNumber *)videoSourceType videoViewSetupMode:(NSNumber *)videoViewSetupMode { + IrisRtcVideoFrameConfig config; + config.video_frame_format = agora::media::base::VIDEO_PIXEL_FORMAT::VIDEO_CVPIXEL_NV12; + config.uid = [uid unsignedIntValue]; + config.video_source_type = [videoSourceType intValue]; + if (channelId && (NSNull *)channelId != [NSNull null]) { + strcpy(config.channelId, [channelId UTF8String]); + } else { + strcpy(config.channelId, ""); + } + config.video_view_setup_mode = [videoViewSetupMode intValue]; - self.videoFrameBufferManager->EnableVideoFrameBuffer(buffer, &config); + self.delegateId = self.irisRtcRendering->AddVideoFrameObserverDelegate(config, self.delegate); } - (void)dispose { - self.videoFrameBufferManager->DisableVideoFrameBuffer(self.delegate); + self.irisRtcRendering->RemoveVideoFrameObserverDelegate(self.delegateId); if (self.delegate) { delete self.delegate; + self.delegate = NULL; } [self.textureRegistry unregisterTexture:self.textureId]; - if (self.buffer_cache) { + if (self.isDirtyBuffer) { CVPixelBufferRelease(self.buffer_cache); self.buffer_cache = NULL; } } - (CVPixelBufferRef _Nullable)copyPixelBuffer { - return CVPixelBufferRetain(self.buffer_cache); + dispatch_semaphore_wait(self.lock, DISPATCH_TIME_FOREVER); + CVPixelBufferRef buffer_temp = CVPixelBufferRetain(self.buffer_cache); + self.isDirtyBuffer = NO; + dispatch_semaphore_signal(self.lock); + + return buffer_temp; } - (void)onTextureUnregistered:(NSObject *)texture { diff --git a/shared/darwin/VideoViewController.h b/shared/darwin/VideoViewController.h index afc34cd0d..973dc9e72 100644 --- a/shared/darwin/VideoViewController.h +++ b/shared/darwin/VideoViewController.h @@ -16,10 +16,11 @@ - (BOOL)destroyPlatformRender:(int64_t)platformRenderId; -- (int64_t)createTextureRender:(intptr_t)videoFrameBufferManagerIntPtr +- (int64_t)createTextureRender:(intptr_t)irisRtcRenderingHandle uid:(NSNumber *)uid channelId:(NSString *)channelId - videoSourceType:(NSNumber *)videoSourceType; + videoSourceType:(NSNumber *)videoSourceType + videoViewSetupMode:(NSNumber *)videoViewSetupMode; - (BOOL)destroyTextureRender:(int64_t)textureId; diff --git a/shared/darwin/VideoViewController.mm b/shared/darwin/VideoViewController.mm index 702424ac7..e28d2f62e 100644 --- a/shared/darwin/VideoViewController.mm +++ b/shared/darwin/VideoViewController.mm @@ -2,7 +2,7 @@ #import "VideoViewController.h" #import "TextureRenderer.h" #import -#import +#import @interface VideoViewController () @property(nonatomic, weak) NSObject *textureRegistry; @@ -44,29 +44,22 @@ - (instancetype)initWith:(NSObject *)textureRegistry - (void)onMethodCall:(FlutterMethodCall *)call result:(FlutterResult)result { if ([@"createTextureRender" isEqualToString:call.method]) { NSDictionary *data = call.arguments; - NSNumber *videoFrameBufferManagerNativeHandle = data[@"videoFrameBufferManagerNativeHandle"]; + NSNumber *irisRtcRenderingHandle = data[@"irisRtcRenderingHandle"]; NSNumber *uid = data[@"uid"]; NSString *channelId = data[@"channelId"]; NSNumber *videoSourceType = data[@"videoSourceType"]; + NSNumber *videoViewSetupMode = data[@"videoViewSetupMode"]; - int64_t textureId = [self createTextureRender:(intptr_t)[videoFrameBufferManagerNativeHandle longLongValue] + int64_t textureId = [self createTextureRender:(intptr_t)[irisRtcRenderingHandle longLongValue] uid:uid channelId:channelId - videoSourceType:videoSourceType]; + videoSourceType:videoSourceType + videoViewSetupMode:videoViewSetupMode]; result(@(textureId)); } else if ([@"destroyTextureRender" isEqualToString:call.method]) { NSNumber *textureIdValue = call.arguments; BOOL success = [self destroyTextureRender: [textureIdValue longLongValue]]; result(@(success)); - } else if ([@"updateTextureRenderData" isEqualToString:call.method]) { - NSDictionary *data = call.arguments; - int64_t textureId = [data[@"uid"] longLongValue]; - NSNumber *uid = data[@"uid"]; - NSString *channelId = data[@"channelId"]; - NSNumber *videoSourceType = data[@"videoSourceType"]; - - [self updateTextureRenderData:textureId uid:uid channelId:channelId videoSourceType:videoSourceType]; - result(@(YES)); } } @@ -78,25 +71,22 @@ - (BOOL)destroyPlatformRender:(int64_t)platformRenderId { return true; } -- (int64_t)createTextureRender:(intptr_t)videoFrameBufferManagerIntPtr +- (int64_t)createTextureRender:(intptr_t)irisRtcRenderingHandle uid:(NSNumber *)uid channelId:(NSString *)channelId - videoSourceType:(NSNumber *)videoSourceType { - agora::iris::IrisVideoFrameBufferManager *videoFrameBufferManager = reinterpret_cast(videoFrameBufferManagerIntPtr); + videoSourceType:(NSNumber *)videoSourceType + videoViewSetupMode:(NSNumber *)videoViewSetupMode { + agora::iris::IrisRtcRendering *irisRtcRendering = reinterpret_cast(irisRtcRenderingHandle); TextureRender *textureRender = [[TextureRender alloc] initWithTextureRegistry:self.textureRegistry messenger:self.messenger - videoFrameBufferManager:videoFrameBufferManager]; + irisRtcRenderingHandle:irisRtcRendering]; int64_t textureId = [textureRender textureId]; - [textureRender updateData:uid channelId:channelId videoSourceType:videoSourceType]; + [textureRender updateData:uid channelId:channelId videoSourceType:videoSourceType videoViewSetupMode:videoViewSetupMode]; self.textureRenders[@(textureId)] = textureRender; return textureId; } -- (void)updateTextureRenderData:(int64_t)textureId uid:(NSNumber *)uid channelId:(NSString *)channelId videoSourceType:(NSNumber *)videoSourceType { - [self.textureRenders[@(textureId)] updateData:uid channelId:channelId videoSourceType:videoSourceType]; -} - - (BOOL)destroyTextureRender:(int64_t)textureId { TextureRender *textureRender = [self.textureRenders objectForKey:@(textureId)]; if (textureRender != nil) { diff --git a/test_shard/fake_test_app/integration_test/apis_call_fake_test.dart b/test_shard/fake_test_app/integration_test/apis_call_fake_test.dart index 2575efb8f..d0cbf1316 100644 --- a/test_shard/fake_test_app/integration_test/apis_call_fake_test.dart +++ b/test_shard/fake_test_app/integration_test/apis_call_fake_test.dart @@ -1,3 +1,4 @@ +import 'package:flutter_test/flutter_test.dart'; import 'package:integration_test/integration_test.dart'; import 'generated/audiodevicemanager_fake_test.generated.dart' as audiodevicemanager; @@ -14,9 +15,24 @@ import 'testcases/rtcengineex_testcases.dart' as rtcengineex; import 'generated/videodevicemanager_fake_test.generated.dart' as videodevicemanager; +import 'package:iris_tester/iris_tester.dart'; +import 'package:agora_rtc_engine/src/impl/agora_rtc_engine_impl.dart'; + void main() { IntegrationTestWidgetsFlutterBinding.ensureInitialized(); + IrisTester irisTester = IrisTester(); + + setUp(() { + irisTester.initialize(); + setMockRtcEngineNativeHandle(irisTester.getfakeRtcEngineHandle()); + }); + + tearDown(() { + irisTester.dispose(); + setMockRtcEngineNativeHandle(null); + }); + audiodevicemanager.audioDeviceManagerSmokeTestCases(); localspatialaudioengine.testCases(); diff --git a/test_shard/fake_test_app/integration_test/eventhandlers_fake_test.dart b/test_shard/fake_test_app/integration_test/eventhandlers_fake_test.dart index b4e483737..ffca1b8d5 100644 --- a/test_shard/fake_test_app/integration_test/eventhandlers_fake_test.dart +++ b/test_shard/fake_test_app/integration_test/eventhandlers_fake_test.dart @@ -1,5 +1,7 @@ +import 'package:flutter_test/flutter_test.dart'; import 'package:integration_test/integration_test.dart'; -import 'generated/rtcengine_rtcengineeventhandler_testcases.generated.dart' +import 'package:iris_tester/iris_tester.dart'; +import 'testcases/rtcengine_rtcengineeventhandler_testcases.dart' as rtcengine_rtcengineeventhandler; import 'generated/rtcengine_audiospectrumobserver_testcases.generated.dart' as rtcengine_audiospectrumobserver; @@ -7,16 +9,14 @@ import 'generated/rtcengine_audioencodedframeobserver_testcases.generated.dart' as rtcengine_audioencodedframeobserver; import 'generated/rtcengine_metadataobserver_testcases.generated.dart' as rtcengine_metadataobserver; -import 'generated/mediaengine_audioframeobserver_testcases.generated.dart' - as mediaengine_audioframeobserver; import 'generated/mediaengine_videoframeobserver_testcases.generated.dart' as mediaengine_videoframeobserver; import 'generated/mediaengine_videoencodedframeobserver_testcases.generated.dart' as mediaengine_videoencodedframeobserver; import 'generated/mediaplayer_audiospectrumobserver_testcases.generated.dart' as mediaplayer_audiospectrumobserver; -import 'generated/mediaplayer_mediaplayeraudioframeobserver_testcases.generated.dart' - as mediaplayer_mediaplayeraudioframeobserver; +import 'generated/mediaplayer_audiopcmframesink_testcases.generated.dart' + as mediaplayer_audiopcmframesink; import 'generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart' as mediaplayer_mediaplayersourceobserver; import 'generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart' @@ -25,28 +25,43 @@ import 'generated/mediarecorder_mediarecorderobserver_testcases.generated.dart' as mediarecorder_mediarecorderobserver; import 'generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart' as musiccontentcenter_musiccontentcentereventhandler; +import 'package:agora_rtc_engine/src/impl/agora_rtc_engine_impl.dart'; void main() { + IntegrationTestWidgetsFlutterBinding.ensureInitialized(); + + IrisTester irisTester = IrisTester(); + + setUp(() { + irisTester.initialize(); + setMockRtcEngineNativeHandle(irisTester.getfakeRtcEngineHandle()); + }); + + tearDown(() { + irisTester.dispose(); + setMockRtcEngineNativeHandle(null); + }); + // RtcEngine events - rtcengine_rtcengineeventhandler.generatedTestCases(); - rtcengine_audiospectrumobserver.generatedTestCases(); - rtcengine_audioencodedframeobserver.generatedTestCases(); - rtcengine_metadataobserver.generatedTestCases(); + rtcengine_rtcengineeventhandler.testCases(irisTester); + rtcengine_audiospectrumobserver.generatedTestCases(irisTester); + rtcengine_audioencodedframeobserver.generatedTestCases(irisTester); + rtcengine_metadataobserver.generatedTestCases(irisTester); // MediaEngine events - mediaengine_audioframeobserver.generatedTestCases(); - mediaengine_videoframeobserver.generatedTestCases(); - mediaengine_videoencodedframeobserver.generatedTestCases(); + mediaengine_videoframeobserver.generatedTestCases(irisTester); + mediaengine_videoencodedframeobserver.generatedTestCases(irisTester); // MediaPlayerController events - mediaplayer_audiospectrumobserver.generatedTestCases(); - mediaplayer_mediaplayeraudioframeobserver.generatedTestCases(); - mediaplayer_mediaplayersourceobserver.generatedTestCases(); - mediaplayer_mediaplayervideoframeobserver.generatedTestCases(); + mediaplayer_audiospectrumobserver.generatedTestCases(irisTester); + mediaplayer_audiopcmframesink.generatedTestCases(irisTester); + mediaplayer_mediaplayersourceobserver.generatedTestCases(irisTester); + mediaplayer_mediaplayervideoframeobserver.generatedTestCases(irisTester); // MediaRecorder events - mediarecorder_mediarecorderobserver.generatedTestCases(); + mediarecorder_mediarecorderobserver.generatedTestCases(irisTester); // MusicContentCenter events - musiccontentcenter_musiccontentcentereventhandler.generatedTestCases(); + musiccontentcenter_musiccontentcentereventhandler + .generatedTestCases(irisTester); } diff --git a/test_shard/fake_test_app/integration_test/generated/audiodevicemanager_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/audiodevicemanager_fake_test.generated.dart index ec9eb88da..44a3411a5 100644 --- a/test_shard/fake_test_app/integration_test/generated/audiodevicemanager_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/audiodevicemanager_fake_test.generated.dart @@ -13,10 +13,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'enumeratePlaybackDevices', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -50,10 +46,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'enumerateRecordingDevices', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -87,10 +79,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'setPlaybackDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -127,10 +115,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'getPlaybackDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -164,10 +148,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'getPlaybackDeviceInfo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -201,10 +181,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'setPlaybackDeviceVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -241,10 +217,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'getPlaybackDeviceVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -278,10 +250,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'setRecordingDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -318,10 +286,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'getRecordingDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -355,10 +319,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'getRecordingDeviceInfo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -392,10 +352,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'setRecordingDeviceVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -432,10 +388,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'getRecordingDeviceVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -469,10 +421,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'setLoopbackDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -509,10 +457,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'getLoopbackDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -546,10 +490,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'setPlaybackDeviceMute', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -586,10 +526,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'getPlaybackDeviceMute', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -623,10 +559,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'setRecordingDeviceMute', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -663,10 +595,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'getRecordingDeviceMute', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -700,10 +628,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'startPlaybackDeviceTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -740,10 +664,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'stopPlaybackDeviceTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -777,10 +697,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'startRecordingDeviceTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -817,10 +733,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'stopRecordingDeviceTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -854,10 +766,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'startAudioDeviceLoopbackTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -894,10 +802,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'stopAudioDeviceLoopbackTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -931,10 +835,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'followSystemPlaybackDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -971,10 +871,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'followSystemRecordingDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1011,10 +907,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'followSystemLoopbackDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1051,10 +943,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'release', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1088,10 +976,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'getPlaybackDefaultDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1125,10 +1009,6 @@ void audioDeviceManagerSmokeTestCases() { testWidgets( 'getRecordingDefaultDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); diff --git a/test_shard/fake_test_app/integration_test/generated/basespatialaudioengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/basespatialaudioengine_fake_test.generated.dart index 92c654237..070494aa0 100644 --- a/test_shard/fake_test_app/integration_test/generated/basespatialaudioengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/basespatialaudioengine_fake_test.generated.dart @@ -12,10 +12,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'release', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -50,10 +46,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'setMaxAudioRecvCount', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -91,10 +83,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'setAudioRecvRange', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -132,10 +120,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'setDistanceUnit', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -173,10 +157,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'updatePlayerPositionInfo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -221,10 +201,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'setParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -262,10 +238,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'muteLocalAudioStream', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -303,10 +275,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'muteAllRemoteAudioStreams', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -344,10 +312,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'setZones', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -387,10 +351,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'setPlayerAttenuation', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -432,10 +392,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'muteRemoteAudioStream', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); diff --git a/test_shard/fake_test_app/integration_test/generated/localspatialaudioengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/localspatialaudioengine_fake_test.generated.dart index f726aec41..ee4a9d829 100644 --- a/test_shard/fake_test_app/integration_test/generated/localspatialaudioengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/localspatialaudioengine_fake_test.generated.dart @@ -13,10 +13,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'initialize', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -51,10 +47,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'updateRemotePosition', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -99,10 +91,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'updateRemotePositionEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -154,10 +142,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'removeRemotePosition', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -195,10 +179,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'removeRemotePositionEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -243,10 +223,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'clearRemotePositions', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -281,10 +257,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'clearRemotePositionsEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -327,10 +299,6 @@ void localSpatialAudioEngineSmokeTestCases() { testWidgets( 'setRemoteAudioAttenuation', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart index 3cdfd29a0..d256d4a09 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onPlaybackAudioFrameBeforeMixing', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart index 3772ea144..6df1cc1a9 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart @@ -14,10 +14,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'registerAudioFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -62,10 +58,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'registerVideoFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -79,16 +71,12 @@ void mediaEngineSmokeTestCases() { try { final VideoFrameObserver observer = VideoFrameObserver( - onCaptureVideoFrame: (VideoFrame videoFrame) {}, - onPreEncodeVideoFrame: (VideoFrame videoFrame) {}, - onSecondaryCameraCaptureVideoFrame: (VideoFrame videoFrame) {}, - onSecondaryPreEncodeCameraVideoFrame: (VideoFrame videoFrame) {}, - onScreenCaptureVideoFrame: (VideoFrame videoFrame) {}, - onPreEncodeScreenVideoFrame: (VideoFrame videoFrame) {}, + onCaptureVideoFrame: + (VideoSourceType sourceType, VideoFrame videoFrame) {}, + onPreEncodeVideoFrame: + (VideoSourceType sourceType, VideoFrame videoFrame) {}, onMediaPlayerVideoFrame: (VideoFrame videoFrame, int mediaPlayerId) {}, - onSecondaryScreenCaptureVideoFrame: (VideoFrame videoFrame) {}, - onSecondaryPreEncodeScreenVideoFrame: (VideoFrame videoFrame) {}, onRenderVideoFrame: (String channelId, int remoteUid, VideoFrame videoFrame) {}, onTranscodedVideoFrame: (VideoFrame videoFrame) {}, @@ -117,10 +105,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'registerVideoEncodedFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -162,10 +146,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'pushAudioFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -178,7 +158,6 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const MediaSourceType type = MediaSourceType.audioPlayoutSource; const AudioFrameType frameType = AudioFrameType.frameTypePcm16; const BytesPerSample frameBytesPerSample = BytesPerSample.twoBytesPerSample; @@ -198,13 +177,10 @@ void mediaEngineSmokeTestCases() { renderTimeMs: frameRenderTimeMs, avsyncType: frameAvsyncType, ); - const bool wrap = true; - const int sourceId = 10; + const int trackId = 10; await mediaEngine.pushAudioFrame( - type: type, frame: frame, - wrap: wrap, - sourceId: sourceId, + trackId: trackId, ); } catch (e) { if (e is! AgoraRtcException) { @@ -224,190 +200,9 @@ void mediaEngineSmokeTestCases() { // skip: !(), ); - testWidgets( - 'pushCaptureAudioFrame', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - final mediaEngine = rtcEngine.getMediaEngine(); - - try { - const AudioFrameType frameType = AudioFrameType.frameTypePcm16; - const BytesPerSample frameBytesPerSample = - BytesPerSample.twoBytesPerSample; - const int frameSamplesPerChannel = 10; - const int frameChannels = 10; - const int frameSamplesPerSec = 10; - Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int frameRenderTimeMs = 10; - const int frameAvsyncType = 10; - final AudioFrame frame = AudioFrame( - type: frameType, - samplesPerChannel: frameSamplesPerChannel, - bytesPerSample: frameBytesPerSample, - channels: frameChannels, - samplesPerSec: frameSamplesPerSec, - buffer: frameBuffer, - renderTimeMs: frameRenderTimeMs, - avsyncType: frameAvsyncType, - ); - await mediaEngine.pushCaptureAudioFrame( - frame, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[pushCaptureAudioFrame] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await mediaEngine.release(); - await rtcEngine.release(); - }, -// skip: !(), - ); - - testWidgets( - 'pushReverseAudioFrame', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - final mediaEngine = rtcEngine.getMediaEngine(); - - try { - const AudioFrameType frameType = AudioFrameType.frameTypePcm16; - const BytesPerSample frameBytesPerSample = - BytesPerSample.twoBytesPerSample; - const int frameSamplesPerChannel = 10; - const int frameChannels = 10; - const int frameSamplesPerSec = 10; - Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int frameRenderTimeMs = 10; - const int frameAvsyncType = 10; - final AudioFrame frame = AudioFrame( - type: frameType, - samplesPerChannel: frameSamplesPerChannel, - bytesPerSample: frameBytesPerSample, - channels: frameChannels, - samplesPerSec: frameSamplesPerSec, - buffer: frameBuffer, - renderTimeMs: frameRenderTimeMs, - avsyncType: frameAvsyncType, - ); - await mediaEngine.pushReverseAudioFrame( - frame, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[pushReverseAudioFrame] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await mediaEngine.release(); - await rtcEngine.release(); - }, -// skip: !(), - ); - - testWidgets( - 'pushDirectAudioFrame', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - final mediaEngine = rtcEngine.getMediaEngine(); - - try { - const AudioFrameType frameType = AudioFrameType.frameTypePcm16; - const BytesPerSample frameBytesPerSample = - BytesPerSample.twoBytesPerSample; - const int frameSamplesPerChannel = 10; - const int frameChannels = 10; - const int frameSamplesPerSec = 10; - Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int frameRenderTimeMs = 10; - const int frameAvsyncType = 10; - final AudioFrame frame = AudioFrame( - type: frameType, - samplesPerChannel: frameSamplesPerChannel, - bytesPerSample: frameBytesPerSample, - channels: frameChannels, - samplesPerSec: frameSamplesPerSec, - buffer: frameBuffer, - renderTimeMs: frameRenderTimeMs, - avsyncType: frameAvsyncType, - ); - await mediaEngine.pushDirectAudioFrame( - frame, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[pushDirectAudioFrame] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await mediaEngine.release(); - await rtcEngine.release(); - }, -// skip: !(), - ); - testWidgets( 'pullAudioFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -463,10 +258,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'setExternalVideoSource', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -519,10 +310,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'setExternalAudioSource', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -538,14 +325,12 @@ void mediaEngineSmokeTestCases() { const bool enabled = true; const int sampleRate = 10; const int channels = 10; - const int sourceNumber = 10; const bool localPlayback = true; const bool publish = true; await mediaEngine.setExternalAudioSource( enabled: enabled, sampleRate: sampleRate, channels: channels, - sourceNumber: sourceNumber, localPlayback: localPlayback, publish: publish, ); @@ -568,12 +353,8 @@ void mediaEngineSmokeTestCases() { ); testWidgets( - 'setExternalAudioSink', + 'destroyCustomAudioTrack', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -586,17 +367,13 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const bool enabled = true; - const int sampleRate = 10; - const int channels = 10; - await mediaEngine.setExternalAudioSink( - enabled: enabled, - sampleRate: sampleRate, - channels: channels, + const int trackId = 10; + await mediaEngine.destroyCustomAudioTrack( + trackId, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[setExternalAudioSink] error: ${e.toString()}'); + debugPrint('[destroyCustomAudioTrack] error: ${e.toString()}'); rethrow; } @@ -613,12 +390,8 @@ void mediaEngineSmokeTestCases() { ); testWidgets( - 'enableCustomAudioLocalPlayback', + 'setExternalAudioSink', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -631,15 +404,17 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const int sourceId = 10; const bool enabled = true; - await mediaEngine.enableCustomAudioLocalPlayback( - sourceId: sourceId, + const int sampleRate = 10; + const int channels = 10; + await mediaEngine.setExternalAudioSink( enabled: enabled, + sampleRate: sampleRate, + channels: channels, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[enableCustomAudioLocalPlayback] error: ${e.toString()}'); + debugPrint('[setExternalAudioSink] error: ${e.toString()}'); rethrow; } @@ -656,12 +431,8 @@ void mediaEngineSmokeTestCases() { ); testWidgets( - 'setDirectExternalAudioSource', + 'enableCustomAudioLocalPlayback', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -674,15 +445,15 @@ void mediaEngineSmokeTestCases() { final mediaEngine = rtcEngine.getMediaEngine(); try { - const bool enable = true; - const bool localPlayback = true; - await mediaEngine.setDirectExternalAudioSource( - enable: enable, - localPlayback: localPlayback, + const int trackId = 10; + const bool enabled = true; + await mediaEngine.enableCustomAudioLocalPlayback( + trackId: trackId, + enabled: enabled, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[setDirectExternalAudioSource] error: ${e.toString()}'); + debugPrint('[enableCustomAudioLocalPlayback] error: ${e.toString()}'); rethrow; } @@ -701,10 +472,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'pushVideoFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -733,6 +500,7 @@ void mediaEngineSmokeTestCases() { const List frameMatrix = []; Uint8List frameMetadataBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); const int frameMetadataSize = 10; + Uint8List frameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); final ExternalVideoFrame frame = ExternalVideoFrame( type: frameType, format: frameFormat, @@ -750,6 +518,7 @@ void mediaEngineSmokeTestCases() { matrix: frameMatrix, metadataBuffer: frameMetadataBuffer, metadataSize: frameMetadataSize, + alphaBuffer: frameAlphaBuffer, ); const int videoTrackId = 10; await mediaEngine.pushVideoFrame( @@ -777,10 +546,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'pushEncodedVideoImage', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -852,10 +617,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'release', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -890,10 +651,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'unregisterAudioFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -938,10 +695,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'unregisterVideoFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -955,16 +708,12 @@ void mediaEngineSmokeTestCases() { try { final VideoFrameObserver observer = VideoFrameObserver( - onCaptureVideoFrame: (VideoFrame videoFrame) {}, - onPreEncodeVideoFrame: (VideoFrame videoFrame) {}, - onSecondaryCameraCaptureVideoFrame: (VideoFrame videoFrame) {}, - onSecondaryPreEncodeCameraVideoFrame: (VideoFrame videoFrame) {}, - onScreenCaptureVideoFrame: (VideoFrame videoFrame) {}, - onPreEncodeScreenVideoFrame: (VideoFrame videoFrame) {}, + onCaptureVideoFrame: + (VideoSourceType sourceType, VideoFrame videoFrame) {}, + onPreEncodeVideoFrame: + (VideoSourceType sourceType, VideoFrame videoFrame) {}, onMediaPlayerVideoFrame: (VideoFrame videoFrame, int mediaPlayerId) {}, - onSecondaryScreenCaptureVideoFrame: (VideoFrame videoFrame) {}, - onSecondaryPreEncodeScreenVideoFrame: (VideoFrame videoFrame) {}, onRenderVideoFrame: (String channelId, int remoteUid, VideoFrame videoFrame) {}, onTranscodedVideoFrame: (VideoFrame videoFrame) {}, @@ -993,10 +742,6 @@ void mediaEngineSmokeTestCases() { testWidgets( 'unregisterVideoEncodedFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart index e80e53aff..4c776f574 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onEncodedVideoFrameReceived', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart index 7a4fc5c5f..e4a851e8f 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoframeobserver_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onCaptureVideoFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -27,7 +23,8 @@ void generatedTestCases() { final onCaptureVideoFrameCompleter = Completer(); final theVideoFrameObserver = VideoFrameObserver( - onCaptureVideoFrame: (VideoFrame videoFrame) { + onCaptureVideoFrame: + (VideoSourceType sourceType, VideoFrame videoFrame) { onCaptureVideoFrameCompleter.complete(true); }, ); @@ -40,6 +37,8 @@ void generatedTestCases() { await Future.delayed(const Duration(milliseconds: 500)); { + const VideoSourceType sourceType = + VideoSourceType.videoSourceCameraPrimary; const VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; const int videoFrameWidth = 10; @@ -59,6 +58,7 @@ void generatedTestCases() { const int videoFrameTextureId = 10; const List videoFrameMatrix = []; Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); + Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); final VideoFrame videoFrame = VideoFrame( type: videoFrameType, width: videoFrameWidth, @@ -77,9 +77,11 @@ void generatedTestCases() { textureId: videoFrameTextureId, matrix: videoFrameMatrix, alphaBuffer: videoFrameAlphaBuffer, + pixelBuffer: videoFramePixelBuffer, ); final eventJson = { + 'sourceType': sourceType.value(), 'videoFrame': videoFrame.toJson(), }; @@ -106,10 +108,6 @@ void generatedTestCases() { testWidgets( 'onPreEncodeVideoFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -119,7 +117,8 @@ void generatedTestCases() { final onPreEncodeVideoFrameCompleter = Completer(); final theVideoFrameObserver = VideoFrameObserver( - onPreEncodeVideoFrame: (VideoFrame videoFrame) { + onPreEncodeVideoFrame: + (VideoSourceType sourceType, VideoFrame videoFrame) { onPreEncodeVideoFrameCompleter.complete(true); }, ); @@ -132,6 +131,8 @@ void generatedTestCases() { await Future.delayed(const Duration(milliseconds: 500)); { + const VideoSourceType sourceType = + VideoSourceType.videoSourceCameraPrimary; const VideoPixelFormat videoFrameType = VideoPixelFormat.videoPixelDefault; const int videoFrameWidth = 10; @@ -151,6 +152,7 @@ void generatedTestCases() { const int videoFrameTextureId = 10; const List videoFrameMatrix = []; Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); + Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); final VideoFrame videoFrame = VideoFrame( type: videoFrameType, width: videoFrameWidth, @@ -169,9 +171,11 @@ void generatedTestCases() { textureId: videoFrameTextureId, matrix: videoFrameMatrix, alphaBuffer: videoFrameAlphaBuffer, + pixelBuffer: videoFramePixelBuffer, ); final eventJson = { + 'sourceType': sourceType.value(), 'videoFrame': videoFrame.toJson(), }; @@ -195,385 +199,9 @@ void generatedTestCases() { timeout: const Timeout(Duration(minutes: 1)), ); - testWidgets( - 'onSecondaryCameraCaptureVideoFrame', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: 'app_id', - areaCode: AreaCode.areaCodeGlob.value(), - )); - final mediaEngine = rtcEngine.getMediaEngine(); - - final onSecondaryCameraCaptureVideoFrameCompleter = Completer(); - final theVideoFrameObserver = VideoFrameObserver( - onSecondaryCameraCaptureVideoFrame: (VideoFrame videoFrame) { - onSecondaryCameraCaptureVideoFrameCompleter.complete(true); - }, - ); - - mediaEngine.registerVideoFrameObserver( - theVideoFrameObserver, - ); - -// Delay 500 milliseconds to ensure the registerVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - { - const VideoPixelFormat videoFrameType = - VideoPixelFormat.videoPixelDefault; - const int videoFrameWidth = 10; - const int videoFrameHeight = 10; - const int videoFrameYStride = 10; - const int videoFrameUStride = 10; - const int videoFrameVStride = 10; - Uint8List videoFrameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameRotation = 10; - const int videoFrameRenderTimeMs = 10; - const int videoFrameAvsyncType = 10; - Uint8List videoFrameMetadataBuffer = - Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameMetadataSize = 10; - const int videoFrameTextureId = 10; - const List videoFrameMatrix = []; - Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame videoFrame = VideoFrame( - type: videoFrameType, - width: videoFrameWidth, - height: videoFrameHeight, - yStride: videoFrameYStride, - uStride: videoFrameUStride, - vStride: videoFrameVStride, - yBuffer: videoFrameYBuffer, - uBuffer: videoFrameUBuffer, - vBuffer: videoFrameVBuffer, - rotation: videoFrameRotation, - renderTimeMs: videoFrameRenderTimeMs, - avsyncType: videoFrameAvsyncType, - metadataBuffer: videoFrameMetadataBuffer, - metadataSize: videoFrameMetadataSize, - textureId: videoFrameTextureId, - matrix: videoFrameMatrix, - alphaBuffer: videoFrameAlphaBuffer, - ); - - final eventJson = { - 'videoFrame': videoFrame.toJson(), - }; - - irisTester.fireEvent( - 'VideoFrameObserver_onSecondaryCameraCaptureVideoFrame', - params: eventJson); - } - - final eventCalled = - await onSecondaryCameraCaptureVideoFrameCompleter.future; - expect(eventCalled, isTrue); - - { - mediaEngine.unregisterVideoFrameObserver( - theVideoFrameObserver, - ); - } -// Delay 500 milliseconds to ensure the unregisterVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - await rtcEngine.release(); - }, - timeout: const Timeout(Duration(minutes: 1)), - ); - - testWidgets( - 'onSecondaryPreEncodeCameraVideoFrame', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: 'app_id', - areaCode: AreaCode.areaCodeGlob.value(), - )); - final mediaEngine = rtcEngine.getMediaEngine(); - - final onSecondaryPreEncodeCameraVideoFrameCompleter = Completer(); - final theVideoFrameObserver = VideoFrameObserver( - onSecondaryPreEncodeCameraVideoFrame: (VideoFrame videoFrame) { - onSecondaryPreEncodeCameraVideoFrameCompleter.complete(true); - }, - ); - - mediaEngine.registerVideoFrameObserver( - theVideoFrameObserver, - ); - -// Delay 500 milliseconds to ensure the registerVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - { - const VideoPixelFormat videoFrameType = - VideoPixelFormat.videoPixelDefault; - const int videoFrameWidth = 10; - const int videoFrameHeight = 10; - const int videoFrameYStride = 10; - const int videoFrameUStride = 10; - const int videoFrameVStride = 10; - Uint8List videoFrameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameRotation = 10; - const int videoFrameRenderTimeMs = 10; - const int videoFrameAvsyncType = 10; - Uint8List videoFrameMetadataBuffer = - Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameMetadataSize = 10; - const int videoFrameTextureId = 10; - const List videoFrameMatrix = []; - Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame videoFrame = VideoFrame( - type: videoFrameType, - width: videoFrameWidth, - height: videoFrameHeight, - yStride: videoFrameYStride, - uStride: videoFrameUStride, - vStride: videoFrameVStride, - yBuffer: videoFrameYBuffer, - uBuffer: videoFrameUBuffer, - vBuffer: videoFrameVBuffer, - rotation: videoFrameRotation, - renderTimeMs: videoFrameRenderTimeMs, - avsyncType: videoFrameAvsyncType, - metadataBuffer: videoFrameMetadataBuffer, - metadataSize: videoFrameMetadataSize, - textureId: videoFrameTextureId, - matrix: videoFrameMatrix, - alphaBuffer: videoFrameAlphaBuffer, - ); - - final eventJson = { - 'videoFrame': videoFrame.toJson(), - }; - - irisTester.fireEvent( - 'VideoFrameObserver_onSecondaryPreEncodeCameraVideoFrame', - params: eventJson); - } - - final eventCalled = - await onSecondaryPreEncodeCameraVideoFrameCompleter.future; - expect(eventCalled, isTrue); - - { - mediaEngine.unregisterVideoFrameObserver( - theVideoFrameObserver, - ); - } -// Delay 500 milliseconds to ensure the unregisterVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - await rtcEngine.release(); - }, - timeout: const Timeout(Duration(minutes: 1)), - ); - - testWidgets( - 'onScreenCaptureVideoFrame', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: 'app_id', - areaCode: AreaCode.areaCodeGlob.value(), - )); - final mediaEngine = rtcEngine.getMediaEngine(); - - final onScreenCaptureVideoFrameCompleter = Completer(); - final theVideoFrameObserver = VideoFrameObserver( - onScreenCaptureVideoFrame: (VideoFrame videoFrame) { - onScreenCaptureVideoFrameCompleter.complete(true); - }, - ); - - mediaEngine.registerVideoFrameObserver( - theVideoFrameObserver, - ); - -// Delay 500 milliseconds to ensure the registerVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - { - const VideoPixelFormat videoFrameType = - VideoPixelFormat.videoPixelDefault; - const int videoFrameWidth = 10; - const int videoFrameHeight = 10; - const int videoFrameYStride = 10; - const int videoFrameUStride = 10; - const int videoFrameVStride = 10; - Uint8List videoFrameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameRotation = 10; - const int videoFrameRenderTimeMs = 10; - const int videoFrameAvsyncType = 10; - Uint8List videoFrameMetadataBuffer = - Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameMetadataSize = 10; - const int videoFrameTextureId = 10; - const List videoFrameMatrix = []; - Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame videoFrame = VideoFrame( - type: videoFrameType, - width: videoFrameWidth, - height: videoFrameHeight, - yStride: videoFrameYStride, - uStride: videoFrameUStride, - vStride: videoFrameVStride, - yBuffer: videoFrameYBuffer, - uBuffer: videoFrameUBuffer, - vBuffer: videoFrameVBuffer, - rotation: videoFrameRotation, - renderTimeMs: videoFrameRenderTimeMs, - avsyncType: videoFrameAvsyncType, - metadataBuffer: videoFrameMetadataBuffer, - metadataSize: videoFrameMetadataSize, - textureId: videoFrameTextureId, - matrix: videoFrameMatrix, - alphaBuffer: videoFrameAlphaBuffer, - ); - - final eventJson = { - 'videoFrame': videoFrame.toJson(), - }; - - irisTester.fireEvent('VideoFrameObserver_onScreenCaptureVideoFrame', - params: eventJson); - } - - final eventCalled = await onScreenCaptureVideoFrameCompleter.future; - expect(eventCalled, isTrue); - - { - mediaEngine.unregisterVideoFrameObserver( - theVideoFrameObserver, - ); - } -// Delay 500 milliseconds to ensure the unregisterVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - await rtcEngine.release(); - }, - timeout: const Timeout(Duration(minutes: 1)), - ); - - testWidgets( - 'onPreEncodeScreenVideoFrame', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: 'app_id', - areaCode: AreaCode.areaCodeGlob.value(), - )); - final mediaEngine = rtcEngine.getMediaEngine(); - - final onPreEncodeScreenVideoFrameCompleter = Completer(); - final theVideoFrameObserver = VideoFrameObserver( - onPreEncodeScreenVideoFrame: (VideoFrame videoFrame) { - onPreEncodeScreenVideoFrameCompleter.complete(true); - }, - ); - - mediaEngine.registerVideoFrameObserver( - theVideoFrameObserver, - ); - -// Delay 500 milliseconds to ensure the registerVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - { - const VideoPixelFormat videoFrameType = - VideoPixelFormat.videoPixelDefault; - const int videoFrameWidth = 10; - const int videoFrameHeight = 10; - const int videoFrameYStride = 10; - const int videoFrameUStride = 10; - const int videoFrameVStride = 10; - Uint8List videoFrameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameRotation = 10; - const int videoFrameRenderTimeMs = 10; - const int videoFrameAvsyncType = 10; - Uint8List videoFrameMetadataBuffer = - Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameMetadataSize = 10; - const int videoFrameTextureId = 10; - const List videoFrameMatrix = []; - Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame videoFrame = VideoFrame( - type: videoFrameType, - width: videoFrameWidth, - height: videoFrameHeight, - yStride: videoFrameYStride, - uStride: videoFrameUStride, - vStride: videoFrameVStride, - yBuffer: videoFrameYBuffer, - uBuffer: videoFrameUBuffer, - vBuffer: videoFrameVBuffer, - rotation: videoFrameRotation, - renderTimeMs: videoFrameRenderTimeMs, - avsyncType: videoFrameAvsyncType, - metadataBuffer: videoFrameMetadataBuffer, - metadataSize: videoFrameMetadataSize, - textureId: videoFrameTextureId, - matrix: videoFrameMatrix, - alphaBuffer: videoFrameAlphaBuffer, - ); - - final eventJson = { - 'videoFrame': videoFrame.toJson(), - }; - - irisTester.fireEvent('VideoFrameObserver_onPreEncodeScreenVideoFrame', - params: eventJson); - } - - final eventCalled = await onPreEncodeScreenVideoFrameCompleter.future; - expect(eventCalled, isTrue); - - { - mediaEngine.unregisterVideoFrameObserver( - theVideoFrameObserver, - ); - } -// Delay 500 milliseconds to ensure the unregisterVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - await rtcEngine.release(); - }, - timeout: const Timeout(Duration(minutes: 1)), - ); - testWidgets( 'onMediaPlayerVideoFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -615,6 +243,7 @@ void generatedTestCases() { const int videoFrameTextureId = 10; const List videoFrameMatrix = []; Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); + Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); final VideoFrame videoFrame = VideoFrame( type: videoFrameType, width: videoFrameWidth, @@ -633,6 +262,7 @@ void generatedTestCases() { textureId: videoFrameTextureId, matrix: videoFrameMatrix, alphaBuffer: videoFrameAlphaBuffer, + pixelBuffer: videoFramePixelBuffer, ); const int mediaPlayerId = 10; @@ -661,201 +291,9 @@ void generatedTestCases() { timeout: const Timeout(Duration(minutes: 1)), ); - testWidgets( - 'onSecondaryScreenCaptureVideoFrame', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: 'app_id', - areaCode: AreaCode.areaCodeGlob.value(), - )); - final mediaEngine = rtcEngine.getMediaEngine(); - - final onSecondaryScreenCaptureVideoFrameCompleter = Completer(); - final theVideoFrameObserver = VideoFrameObserver( - onSecondaryScreenCaptureVideoFrame: (VideoFrame videoFrame) { - onSecondaryScreenCaptureVideoFrameCompleter.complete(true); - }, - ); - - mediaEngine.registerVideoFrameObserver( - theVideoFrameObserver, - ); - -// Delay 500 milliseconds to ensure the registerVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - { - const VideoPixelFormat videoFrameType = - VideoPixelFormat.videoPixelDefault; - const int videoFrameWidth = 10; - const int videoFrameHeight = 10; - const int videoFrameYStride = 10; - const int videoFrameUStride = 10; - const int videoFrameVStride = 10; - Uint8List videoFrameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameRotation = 10; - const int videoFrameRenderTimeMs = 10; - const int videoFrameAvsyncType = 10; - Uint8List videoFrameMetadataBuffer = - Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameMetadataSize = 10; - const int videoFrameTextureId = 10; - const List videoFrameMatrix = []; - Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame videoFrame = VideoFrame( - type: videoFrameType, - width: videoFrameWidth, - height: videoFrameHeight, - yStride: videoFrameYStride, - uStride: videoFrameUStride, - vStride: videoFrameVStride, - yBuffer: videoFrameYBuffer, - uBuffer: videoFrameUBuffer, - vBuffer: videoFrameVBuffer, - rotation: videoFrameRotation, - renderTimeMs: videoFrameRenderTimeMs, - avsyncType: videoFrameAvsyncType, - metadataBuffer: videoFrameMetadataBuffer, - metadataSize: videoFrameMetadataSize, - textureId: videoFrameTextureId, - matrix: videoFrameMatrix, - alphaBuffer: videoFrameAlphaBuffer, - ); - - final eventJson = { - 'videoFrame': videoFrame.toJson(), - }; - - irisTester.fireEvent( - 'VideoFrameObserver_onSecondaryScreenCaptureVideoFrame', - params: eventJson); - } - - final eventCalled = - await onSecondaryScreenCaptureVideoFrameCompleter.future; - expect(eventCalled, isTrue); - - { - mediaEngine.unregisterVideoFrameObserver( - theVideoFrameObserver, - ); - } -// Delay 500 milliseconds to ensure the unregisterVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - await rtcEngine.release(); - }, - timeout: const Timeout(Duration(minutes: 1)), - ); - - testWidgets( - 'onSecondaryPreEncodeScreenVideoFrame', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: 'app_id', - areaCode: AreaCode.areaCodeGlob.value(), - )); - final mediaEngine = rtcEngine.getMediaEngine(); - - final onSecondaryPreEncodeScreenVideoFrameCompleter = Completer(); - final theVideoFrameObserver = VideoFrameObserver( - onSecondaryPreEncodeScreenVideoFrame: (VideoFrame videoFrame) { - onSecondaryPreEncodeScreenVideoFrameCompleter.complete(true); - }, - ); - - mediaEngine.registerVideoFrameObserver( - theVideoFrameObserver, - ); - -// Delay 500 milliseconds to ensure the registerVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - { - const VideoPixelFormat videoFrameType = - VideoPixelFormat.videoPixelDefault; - const int videoFrameWidth = 10; - const int videoFrameHeight = 10; - const int videoFrameYStride = 10; - const int videoFrameUStride = 10; - const int videoFrameVStride = 10; - Uint8List videoFrameYBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameUBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - Uint8List videoFrameVBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameRotation = 10; - const int videoFrameRenderTimeMs = 10; - const int videoFrameAvsyncType = 10; - Uint8List videoFrameMetadataBuffer = - Uint8List.fromList([1, 2, 3, 4, 5]); - const int videoFrameMetadataSize = 10; - const int videoFrameTextureId = 10; - const List videoFrameMatrix = []; - Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); - final VideoFrame videoFrame = VideoFrame( - type: videoFrameType, - width: videoFrameWidth, - height: videoFrameHeight, - yStride: videoFrameYStride, - uStride: videoFrameUStride, - vStride: videoFrameVStride, - yBuffer: videoFrameYBuffer, - uBuffer: videoFrameUBuffer, - vBuffer: videoFrameVBuffer, - rotation: videoFrameRotation, - renderTimeMs: videoFrameRenderTimeMs, - avsyncType: videoFrameAvsyncType, - metadataBuffer: videoFrameMetadataBuffer, - metadataSize: videoFrameMetadataSize, - textureId: videoFrameTextureId, - matrix: videoFrameMatrix, - alphaBuffer: videoFrameAlphaBuffer, - ); - - final eventJson = { - 'videoFrame': videoFrame.toJson(), - }; - - irisTester.fireEvent( - 'VideoFrameObserver_onSecondaryPreEncodeScreenVideoFrame', - params: eventJson); - } - - final eventCalled = - await onSecondaryPreEncodeScreenVideoFrameCompleter.future; - expect(eventCalled, isTrue); - - { - mediaEngine.unregisterVideoFrameObserver( - theVideoFrameObserver, - ); - } -// Delay 500 milliseconds to ensure the unregisterVideoFrameObserver call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - await rtcEngine.release(); - }, - timeout: const Timeout(Duration(minutes: 1)), - ); - testWidgets( 'onRenderVideoFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -900,6 +338,7 @@ void generatedTestCases() { const int videoFrameTextureId = 10; const List videoFrameMatrix = []; Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); + Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); final VideoFrame videoFrame = VideoFrame( type: videoFrameType, width: videoFrameWidth, @@ -918,6 +357,7 @@ void generatedTestCases() { textureId: videoFrameTextureId, matrix: videoFrameMatrix, alphaBuffer: videoFrameAlphaBuffer, + pixelBuffer: videoFramePixelBuffer, ); final eventJson = { @@ -949,10 +389,6 @@ void generatedTestCases() { testWidgets( 'onTranscodedVideoFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -994,6 +430,7 @@ void generatedTestCases() { const int videoFrameTextureId = 10; const List videoFrameMatrix = []; Uint8List videoFrameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); + Uint8List videoFramePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); final VideoFrame videoFrame = VideoFrame( type: videoFrameType, width: videoFrameWidth, @@ -1012,6 +449,7 @@ void generatedTestCases() { textureId: videoFrameTextureId, matrix: videoFrameMatrix, alphaBuffer: videoFrameAlphaBuffer, + pixelBuffer: videoFramePixelBuffer, ); final eventJson = { diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayeraudioframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiopcmframesink_testcases.generated.dart similarity index 83% rename from test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayeraudioframeobserver_testcases.generated.dart rename to test_shard/fake_test_app/integration_test/generated/mediaplayer_audiopcmframesink_testcases.generated.dart index 85296f52d..baaf81dec 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayeraudioframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiopcmframesink_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -28,14 +24,18 @@ void generatedTestCases() { await mediaPlayerController.initialize(); final onFrameCompleter = Completer(); - final theMediaPlayerAudioFrameObserver = MediaPlayerAudioFrameObserver( + final theAudioPcmFrameSink = AudioPcmFrameSink( onFrame: (AudioPcmFrame frame) { onFrameCompleter.complete(true); }, ); + const RawAudioFrameOpModeType mode = + RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly; + mediaPlayerController.registerAudioFrameObserver( - theMediaPlayerAudioFrameObserver, + observer: theAudioPcmFrameSink, + mode: mode, ); // Delay 500 milliseconds to ensure the registerAudioFrameObserver call completed. @@ -62,8 +62,7 @@ void generatedTestCases() { 'frame': frame.toJson(), }; - irisTester.fireEvent('MediaPlayerAudioFrameObserver_onFrame', - params: eventJson); + irisTester.fireEvent('AudioPcmFrameSink_onFrame', params: eventJson); } final eventCalled = await onFrameCompleter.future; @@ -71,7 +70,7 @@ void generatedTestCases() { { mediaPlayerController.unregisterAudioFrameObserver( - theMediaPlayerAudioFrameObserver, + theAudioPcmFrameSink, ); } // Delay 500 milliseconds to ensure the unregisterAudioFrameObserver call completed. diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiospectrumobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiospectrumobserver_testcases.generated.dart index 436f7fa85..51c127312 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiospectrumobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_audiospectrumobserver_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onLocalAudioSpectrum', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -79,10 +75,6 @@ void generatedTestCases() { testWidgets( 'onRemoteAudioSpectrum', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart index 540524edc..673a28b0b 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart @@ -14,10 +14,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'getMediaPlayerId', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -54,10 +50,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'open', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -99,10 +91,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'openWithMediaSource', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -157,10 +145,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'play', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -197,10 +181,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'pause', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -237,10 +217,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'stop', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -277,10 +253,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'resume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -317,10 +289,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'seek', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -360,10 +328,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'setAudioPitch', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -403,10 +367,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'getDuration', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -443,10 +403,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'getPlayPosition', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -483,10 +439,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'getStreamCount', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -523,10 +475,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'getStreamInfo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -566,10 +514,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'setLoopCount', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -609,10 +553,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'setPlaybackSpeed', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -652,10 +592,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'selectAudioTrack', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -695,10 +631,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'setPlayerOptionInInt', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -740,10 +672,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'setPlayerOptionInString', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -785,10 +713,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'takeScreenshot', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -828,10 +752,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'selectInternalSubtitle', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -871,10 +791,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'setExternalSubtitle', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -914,10 +830,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'getState', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -954,10 +866,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'mute', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -997,10 +905,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'getMute', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1037,10 +941,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'adjustPlayoutVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1080,10 +980,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'getPlayoutVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1120,10 +1016,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'adjustPublishSignalVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1163,10 +1055,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'getPublishSignalVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1203,10 +1091,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'setView', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1246,10 +1130,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'setRenderMode', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1289,10 +1169,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'registerPlayerSourceObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1346,10 +1222,6 @@ void mediaPlayerControllerSmokeTestCases() { testWidgets( 'unregisterPlayerSourceObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1401,12 +1273,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'registerMediaPlayerAudioSpectrumObserver', + 'registerAudioFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1421,19 +1289,18 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final AudioSpectrumObserver observer = AudioSpectrumObserver( - onLocalAudioSpectrum: (AudioSpectrumData data) {}, - onRemoteAudioSpectrum: (List spectrums, int spectrumNumber) {}, + final AudioPcmFrameSink observer = AudioPcmFrameSink( + onFrame: (AudioPcmFrame frame) {}, ); - const int intervalInMS = 10; - mediaPlayerController.registerMediaPlayerAudioSpectrumObserver( + const RawAudioFrameOpModeType mode = + RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly; + mediaPlayerController.registerAudioFrameObserver( observer: observer, - intervalInMS: intervalInMS, + mode: mode, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint( - '[registerMediaPlayerAudioSpectrumObserver] error: ${e.toString()}'); + debugPrint('[registerAudioFrameObserver] error: ${e.toString()}'); rethrow; } @@ -1450,12 +1317,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'unregisterMediaPlayerAudioSpectrumObserver', + 'unregisterAudioFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1470,17 +1333,15 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final AudioSpectrumObserver observer = AudioSpectrumObserver( - onLocalAudioSpectrum: (AudioSpectrumData data) {}, - onRemoteAudioSpectrum: (List spectrums, int spectrumNumber) {}, + final AudioPcmFrameSink observer = AudioPcmFrameSink( + onFrame: (AudioPcmFrame frame) {}, ); - mediaPlayerController.unregisterMediaPlayerAudioSpectrumObserver( + mediaPlayerController.unregisterAudioFrameObserver( observer, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint( - '[unregisterMediaPlayerAudioSpectrumObserver] error: ${e.toString()}'); + debugPrint('[unregisterAudioFrameObserver] error: ${e.toString()}'); rethrow; } @@ -1497,12 +1358,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'setAudioDualMonoMode', + 'registerVideoFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1517,13 +1374,16 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const AudioDualMonoMode mode = AudioDualMonoMode.audioDualMonoStereo; - await mediaPlayerController.setAudioDualMonoMode( - mode, + final MediaPlayerVideoFrameObserver observer = + MediaPlayerVideoFrameObserver( + onFrame: (VideoFrame frame) {}, + ); + mediaPlayerController.registerVideoFrameObserver( + observer, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[setAudioDualMonoMode] error: ${e.toString()}'); + debugPrint('[registerVideoFrameObserver] error: ${e.toString()}'); rethrow; } @@ -1540,12 +1400,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'getPlayerSdkVersion', + 'unregisterVideoFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1560,10 +1416,16 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - await mediaPlayerController.getPlayerSdkVersion(); + final MediaPlayerVideoFrameObserver observer = + MediaPlayerVideoFrameObserver( + onFrame: (VideoFrame frame) {}, + ); + mediaPlayerController.unregisterVideoFrameObserver( + observer, + ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[getPlayerSdkVersion] error: ${e.toString()}'); + debugPrint('[unregisterVideoFrameObserver] error: ${e.toString()}'); rethrow; } @@ -1580,12 +1442,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'getPlaySrc', + 'registerMediaPlayerAudioSpectrumObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1600,10 +1458,19 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - await mediaPlayerController.getPlaySrc(); + final AudioSpectrumObserver observer = AudioSpectrumObserver( + onLocalAudioSpectrum: (AudioSpectrumData data) {}, + onRemoteAudioSpectrum: (List spectrums, int spectrumNumber) {}, + ); + const int intervalInMS = 10; + mediaPlayerController.registerMediaPlayerAudioSpectrumObserver( + observer: observer, + intervalInMS: intervalInMS, + ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[getPlaySrc] error: ${e.toString()}'); + debugPrint( + '[registerMediaPlayerAudioSpectrumObserver] error: ${e.toString()}'); rethrow; } @@ -1620,12 +1487,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'openWithAgoraCDNSrc', + 'unregisterMediaPlayerAudioSpectrumObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1640,15 +1503,17 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String src = "hello"; - const int startPos = 10; - await mediaPlayerController.openWithAgoraCDNSrc( - src: src, - startPos: startPos, + final AudioSpectrumObserver observer = AudioSpectrumObserver( + onLocalAudioSpectrum: (AudioSpectrumData data) {}, + onRemoteAudioSpectrum: (List spectrums, int spectrumNumber) {}, + ); + mediaPlayerController.unregisterMediaPlayerAudioSpectrumObserver( + observer, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[openWithAgoraCDNSrc] error: ${e.toString()}'); + debugPrint( + '[unregisterMediaPlayerAudioSpectrumObserver] error: ${e.toString()}'); rethrow; } @@ -1665,12 +1530,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'getAgoraCDNLineCount', + 'setAudioDualMonoMode', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1685,10 +1546,13 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - await mediaPlayerController.getAgoraCDNLineCount(); + const AudioDualMonoMode mode = AudioDualMonoMode.audioDualMonoStereo; + await mediaPlayerController.setAudioDualMonoMode( + mode, + ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[getAgoraCDNLineCount] error: ${e.toString()}'); + debugPrint('[setAudioDualMonoMode] error: ${e.toString()}'); rethrow; } @@ -1705,12 +1569,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'switchAgoraCDNLineByIndex', + 'getPlayerSdkVersion', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1725,13 +1585,10 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const int index = 10; - await mediaPlayerController.switchAgoraCDNLineByIndex( - index, - ); + await mediaPlayerController.getPlayerSdkVersion(); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[switchAgoraCDNLineByIndex] error: ${e.toString()}'); + debugPrint('[getPlayerSdkVersion] error: ${e.toString()}'); rethrow; } @@ -1748,12 +1605,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'getCurrentAgoraCDNIndex', + 'getPlaySrc', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1768,10 +1621,10 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - await mediaPlayerController.getCurrentAgoraCDNIndex(); + await mediaPlayerController.getPlaySrc(); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[getCurrentAgoraCDNIndex] error: ${e.toString()}'); + debugPrint('[getPlaySrc] error: ${e.toString()}'); rethrow; } @@ -1788,12 +1641,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'enableAutoSwitchAgoraCDN', + 'openWithAgoraCDNSrc', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1808,13 +1657,15 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const bool enable = true; - await mediaPlayerController.enableAutoSwitchAgoraCDN( - enable, + const String src = "hello"; + const int startPos = 10; + await mediaPlayerController.openWithAgoraCDNSrc( + src: src, + startPos: startPos, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[enableAutoSwitchAgoraCDN] error: ${e.toString()}'); + debugPrint('[openWithAgoraCDNSrc] error: ${e.toString()}'); rethrow; } @@ -1831,12 +1682,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'renewAgoraCDNSrcToken', + 'getAgoraCDNLineCount', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1851,15 +1698,10 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String token = "hello"; - const int ts = 10; - await mediaPlayerController.renewAgoraCDNSrcToken( - token: token, - ts: ts, - ); + await mediaPlayerController.getAgoraCDNLineCount(); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[renewAgoraCDNSrcToken] error: ${e.toString()}'); + debugPrint('[getAgoraCDNLineCount] error: ${e.toString()}'); rethrow; } @@ -1876,12 +1718,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'switchAgoraCDNSrc', + 'switchAgoraCDNLineByIndex', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1896,15 +1734,13 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String src = "hello"; - const bool syncPts = true; - await mediaPlayerController.switchAgoraCDNSrc( - src: src, - syncPts: syncPts, + const int index = 10; + await mediaPlayerController.switchAgoraCDNLineByIndex( + index, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[switchAgoraCDNSrc] error: ${e.toString()}'); + debugPrint('[switchAgoraCDNLineByIndex] error: ${e.toString()}'); rethrow; } @@ -1921,12 +1757,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'switchSrc', + 'getCurrentAgoraCDNIndex', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1941,15 +1773,10 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String src = "hello"; - const bool syncPts = true; - await mediaPlayerController.switchSrc( - src: src, - syncPts: syncPts, - ); + await mediaPlayerController.getCurrentAgoraCDNIndex(); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[switchSrc] error: ${e.toString()}'); + debugPrint('[getCurrentAgoraCDNIndex] error: ${e.toString()}'); rethrow; } @@ -1966,12 +1793,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'preloadSrc', + 'enableAutoSwitchAgoraCDN', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1986,15 +1809,13 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String src = "hello"; - const int startPos = 10; - await mediaPlayerController.preloadSrc( - src: src, - startPos: startPos, + const bool enable = true; + await mediaPlayerController.enableAutoSwitchAgoraCDN( + enable, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[preloadSrc] error: ${e.toString()}'); + debugPrint('[enableAutoSwitchAgoraCDN] error: ${e.toString()}'); rethrow; } @@ -2011,12 +1832,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'playPreloadedSrc', + 'renewAgoraCDNSrcToken', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2031,13 +1848,15 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const String src = "hello"; - await mediaPlayerController.playPreloadedSrc( - src, + const String token = "hello"; + const int ts = 10; + await mediaPlayerController.renewAgoraCDNSrcToken( + token: token, + ts: ts, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[playPreloadedSrc] error: ${e.toString()}'); + debugPrint('[renewAgoraCDNSrcToken] error: ${e.toString()}'); rethrow; } @@ -2054,12 +1873,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'unloadSrc', + 'switchAgoraCDNSrc', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2075,12 +1890,14 @@ void mediaPlayerControllerSmokeTestCases() { try { const String src = "hello"; - await mediaPlayerController.unloadSrc( - src, + const bool syncPts = true; + await mediaPlayerController.switchAgoraCDNSrc( + src: src, + syncPts: syncPts, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[unloadSrc] error: ${e.toString()}'); + debugPrint('[switchAgoraCDNSrc] error: ${e.toString()}'); rethrow; } @@ -2097,12 +1914,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'setSpatialAudioParams', + 'switchSrc', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2117,30 +1930,15 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const double paramsSpeakerAzimuth = 10.0; - const double paramsSpeakerElevation = 10.0; - const double paramsSpeakerDistance = 10.0; - const int paramsSpeakerOrientation = 10; - const bool paramsEnableBlur = true; - const bool paramsEnableAirAbsorb = true; - const double paramsSpeakerAttenuation = 10.0; - const bool paramsEnableDoppler = true; - const SpatialAudioParams params = SpatialAudioParams( - speakerAzimuth: paramsSpeakerAzimuth, - speakerElevation: paramsSpeakerElevation, - speakerDistance: paramsSpeakerDistance, - speakerOrientation: paramsSpeakerOrientation, - enableBlur: paramsEnableBlur, - enableAirAbsorb: paramsEnableAirAbsorb, - speakerAttenuation: paramsSpeakerAttenuation, - enableDoppler: paramsEnableDoppler, - ); - await mediaPlayerController.setSpatialAudioParams( - params, + const String src = "hello"; + const bool syncPts = true; + await mediaPlayerController.switchSrc( + src: src, + syncPts: syncPts, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[setSpatialAudioParams] error: ${e.toString()}'); + debugPrint('[switchSrc] error: ${e.toString()}'); rethrow; } @@ -2157,12 +1955,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'setSoundPositionParams', + 'preloadSrc', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2177,15 +1971,15 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - const double pan = 10.0; - const double gain = 10.0; - await mediaPlayerController.setSoundPositionParams( - pan: pan, - gain: gain, + const String src = "hello"; + const int startPos = 10; + await mediaPlayerController.preloadSrc( + src: src, + startPos: startPos, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[setSoundPositionParams] error: ${e.toString()}'); + debugPrint('[preloadSrc] error: ${e.toString()}'); rethrow; } @@ -2202,12 +1996,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'registerAudioFrameObserver', + 'playPreloadedSrc', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2222,16 +2012,13 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final MediaPlayerAudioFrameObserver observer = - MediaPlayerAudioFrameObserver( - onFrame: (AudioPcmFrame frame) {}, - ); - mediaPlayerController.registerAudioFrameObserver( - observer, + const String src = "hello"; + await mediaPlayerController.playPreloadedSrc( + src, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[registerAudioFrameObserver] error: ${e.toString()}'); + debugPrint('[playPreloadedSrc] error: ${e.toString()}'); rethrow; } @@ -2248,12 +2035,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'unregisterAudioFrameObserver', + 'unloadSrc', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2268,16 +2051,13 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final MediaPlayerAudioFrameObserver observer = - MediaPlayerAudioFrameObserver( - onFrame: (AudioPcmFrame frame) {}, - ); - mediaPlayerController.unregisterAudioFrameObserver( - observer, + const String src = "hello"; + await mediaPlayerController.unloadSrc( + src, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[unregisterAudioFrameObserver] error: ${e.toString()}'); + debugPrint('[unloadSrc] error: ${e.toString()}'); rethrow; } @@ -2294,12 +2074,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'registerVideoFrameObserver', + 'setSpatialAudioParams', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2314,16 +2090,30 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final MediaPlayerVideoFrameObserver observer = - MediaPlayerVideoFrameObserver( - onFrame: (VideoFrame frame) {}, + const double paramsSpeakerAzimuth = 10.0; + const double paramsSpeakerElevation = 10.0; + const double paramsSpeakerDistance = 10.0; + const int paramsSpeakerOrientation = 10; + const bool paramsEnableBlur = true; + const bool paramsEnableAirAbsorb = true; + const double paramsSpeakerAttenuation = 10.0; + const bool paramsEnableDoppler = true; + const SpatialAudioParams params = SpatialAudioParams( + speakerAzimuth: paramsSpeakerAzimuth, + speakerElevation: paramsSpeakerElevation, + speakerDistance: paramsSpeakerDistance, + speakerOrientation: paramsSpeakerOrientation, + enableBlur: paramsEnableBlur, + enableAirAbsorb: paramsEnableAirAbsorb, + speakerAttenuation: paramsSpeakerAttenuation, + enableDoppler: paramsEnableDoppler, ); - mediaPlayerController.registerVideoFrameObserver( - observer, + await mediaPlayerController.setSpatialAudioParams( + params, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[registerVideoFrameObserver] error: ${e.toString()}'); + debugPrint('[setSpatialAudioParams] error: ${e.toString()}'); rethrow; } @@ -2340,12 +2130,8 @@ void mediaPlayerControllerSmokeTestCases() { ); testWidgets( - 'unregisterVideoFrameObserver', + 'setSoundPositionParams', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2360,16 +2146,15 @@ void mediaPlayerControllerSmokeTestCases() { await mediaPlayerController.initialize(); try { - final MediaPlayerVideoFrameObserver observer = - MediaPlayerVideoFrameObserver( - onFrame: (VideoFrame frame) {}, - ); - mediaPlayerController.unregisterVideoFrameObserver( - observer, + const double pan = 10.0; + const double gain = 10.0; + await mediaPlayerController.setSoundPositionParams( + pan: pan, + gain: gain, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[unregisterVideoFrameObserver] error: ${e.toString()}'); + debugPrint('[setSoundPositionParams] error: ${e.toString()}'); rethrow; } diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart index d02f21c15..545dd9ffb 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onPlayerSourceStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -75,10 +71,6 @@ void generatedTestCases() { testWidgets( 'onPositionChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -132,10 +124,6 @@ void generatedTestCases() { testWidgets( 'onPlayerEvent', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -195,10 +183,6 @@ void generatedTestCases() { testWidgets( 'onMetaData', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -254,10 +238,6 @@ void generatedTestCases() { testWidgets( 'onPlayBufferUpdated', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -311,10 +291,6 @@ void generatedTestCases() { testWidgets( 'onPreloadEvent', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -371,10 +347,6 @@ void generatedTestCases() { testWidgets( 'onCompleted', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -424,10 +396,6 @@ void generatedTestCases() { testWidgets( 'onAgoraCDNTokenWillExpire', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -478,10 +446,6 @@ void generatedTestCases() { testWidgets( 'onPlayerSrcInfoChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -547,10 +511,6 @@ void generatedTestCases() { testWidgets( 'onPlayerInfoUpdated', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -618,10 +578,6 @@ void generatedTestCases() { testWidgets( 'onAudioVolumeIndication', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart index ea46c67b1..d5c69c37a 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayervideoframeobserver_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -59,6 +55,7 @@ void generatedTestCases() { const int frameTextureId = 10; const List frameMatrix = []; Uint8List frameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); + Uint8List framePixelBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); final VideoFrame frame = VideoFrame( type: frameType, width: frameWidth, @@ -77,6 +74,7 @@ void generatedTestCases() { textureId: frameTextureId, matrix: frameMatrix, alphaBuffer: frameAlphaBuffer, + pixelBuffer: framePixelBuffer, ); final eventJson = { diff --git a/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart index d75bf2507..e7c3dcfe3 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediarecorder_fake_test.generated.dart @@ -13,10 +13,6 @@ void mediaRecorderSmokeTestCases() { testWidgets( 'setMediaRecorderObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -26,23 +22,18 @@ void mediaRecorderSmokeTestCases() { areaCode: AreaCode.areaCodeGlob.value(), )); - final mediaRecorder = rtcEngine.getMediaRecorder(); + final mediaRecorder = (await rtcEngine.createMediaRecorder( + RecorderStreamInfo(channelId: 'hello', uid: 0)))!; try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( - channelId: connectionChannelId, - localUid: connectionLocalUid, - ); final MediaRecorderObserver callback = MediaRecorderObserver( - onRecorderStateChanged: - (RecorderState state, RecorderErrorCode error) {}, - onRecorderInfoUpdated: (RecorderInfo info) {}, + onRecorderStateChanged: (String channelId, int uid, + RecorderState state, RecorderErrorCode error) {}, + onRecorderInfoUpdated: + (String channelId, int uid, RecorderInfo info) {}, ); await mediaRecorder.setMediaRecorderObserver( - connection: connection, - callback: callback, + callback, ); } catch (e) { if (e is! AgoraRtcException) { @@ -56,7 +47,7 @@ void mediaRecorderSmokeTestCases() { } } - await mediaRecorder.release(); + await rtcEngine.destroyMediaRecorder(mediaRecorder); await rtcEngine.release(); }, // skip: !(), @@ -65,10 +56,6 @@ void mediaRecorderSmokeTestCases() { testWidgets( 'startRecording', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -78,15 +65,10 @@ void mediaRecorderSmokeTestCases() { areaCode: AreaCode.areaCodeGlob.value(), )); - final mediaRecorder = rtcEngine.getMediaRecorder(); + final mediaRecorder = (await rtcEngine.createMediaRecorder( + RecorderStreamInfo(channelId: 'hello', uid: 0)))!; try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( - channelId: connectionChannelId, - localUid: connectionLocalUid, - ); const MediaRecorderContainerFormat configContainerFormat = MediaRecorderContainerFormat.formatMp4; const MediaRecorderStreamType configStreamType = @@ -102,8 +84,7 @@ void mediaRecorderSmokeTestCases() { recorderInfoUpdateInterval: configRecorderInfoUpdateInterval, ); await mediaRecorder.startRecording( - connection: connection, - config: config, + config, ); } catch (e) { if (e is! AgoraRtcException) { @@ -117,7 +98,7 @@ void mediaRecorderSmokeTestCases() { } } - await mediaRecorder.release(); + await rtcEngine.destroyMediaRecorder(mediaRecorder); await rtcEngine.release(); }, // skip: !(), @@ -126,10 +107,6 @@ void mediaRecorderSmokeTestCases() { testWidgets( 'stopRecording', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -139,18 +116,11 @@ void mediaRecorderSmokeTestCases() { areaCode: AreaCode.areaCodeGlob.value(), )); - final mediaRecorder = rtcEngine.getMediaRecorder(); + final mediaRecorder = (await rtcEngine.createMediaRecorder( + RecorderStreamInfo(channelId: 'hello', uid: 0)))!; try { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( - channelId: connectionChannelId, - localUid: connectionLocalUid, - ); - await mediaRecorder.stopRecording( - connection, - ); + await mediaRecorder.stopRecording(); } catch (e) { if (e is! AgoraRtcException) { debugPrint('[stopRecording] error: ${e.toString()}'); @@ -163,45 +133,7 @@ void mediaRecorderSmokeTestCases() { } } - await mediaRecorder.release(); - await rtcEngine.release(); - }, -// skip: !(), - ); - - testWidgets( - 'release', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - final mediaRecorder = rtcEngine.getMediaRecorder(); - - try { - await mediaRecorder.release(); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[release] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await mediaRecorder.release(); + await rtcEngine.destroyMediaRecorder(mediaRecorder); await rtcEngine.release(); }, // skip: !(), diff --git a/test_shard/fake_test_app/integration_test/generated/mediarecorder_mediarecorderobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediarecorder_mediarecorderobserver_testcases.generated.dart index 56420a92b..c58a406aa 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediarecorder_mediarecorderobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediarecorder_mediarecorderobserver_testcases.generated.dart @@ -10,49 +10,43 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onRecorderStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', areaCode: AreaCode.areaCodeGlob.value(), )); - final mediaRecorder = rtcEngine.getMediaRecorder(); + final mediaRecorder = (await rtcEngine.createMediaRecorder( + RecorderStreamInfo(channelId: 'hello', uid: 0)))!; final onRecorderStateChangedCompleter = Completer(); final theMediaRecorderObserver = MediaRecorderObserver( - onRecorderStateChanged: (RecorderState state, RecorderErrorCode error) { + onRecorderStateChanged: (String channelId, int uid, RecorderState state, + RecorderErrorCode error) { onRecorderStateChangedCompleter.complete(true); }, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( - channelId: connectionChannelId, - localUid: connectionLocalUid, - ); - await mediaRecorder.setMediaRecorderObserver( - connection: connection, - callback: theMediaRecorderObserver, + theMediaRecorderObserver, ); // Delay 500 milliseconds to ensure the setMediaRecorderObserver call completed. await Future.delayed(const Duration(milliseconds: 500)); { + const String channelId = "hello"; + const int uid = 10; const RecorderState state = RecorderState.recorderStateError; const RecorderErrorCode error = RecorderErrorCode.recorderErrorNone; final eventJson = { + 'channelId': channelId, + 'uid': uid, 'state': state.value(), 'error': error.value(), }; @@ -68,7 +62,7 @@ void generatedTestCases() { // Delay 500 milliseconds to ensure the call completed. await Future.delayed(const Duration(milliseconds: 500)); - await mediaRecorder.release(); + await rtcEngine.destroyMediaRecorder(mediaRecorder); await rtcEngine.release(); }, timeout: const Timeout(Duration(minutes: 1)), @@ -77,41 +71,32 @@ void generatedTestCases() { testWidgets( 'onRecorderInfoUpdated', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', areaCode: AreaCode.areaCodeGlob.value(), )); - final mediaRecorder = rtcEngine.getMediaRecorder(); + final mediaRecorder = (await rtcEngine.createMediaRecorder( + RecorderStreamInfo(channelId: 'hello', uid: 0)))!; final onRecorderInfoUpdatedCompleter = Completer(); final theMediaRecorderObserver = MediaRecorderObserver( - onRecorderInfoUpdated: (RecorderInfo info) { + onRecorderInfoUpdated: (String channelId, int uid, RecorderInfo info) { onRecorderInfoUpdatedCompleter.complete(true); }, ); - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( - channelId: connectionChannelId, - localUid: connectionLocalUid, - ); - await mediaRecorder.setMediaRecorderObserver( - connection: connection, - callback: theMediaRecorderObserver, + theMediaRecorderObserver, ); // Delay 500 milliseconds to ensure the setMediaRecorderObserver call completed. await Future.delayed(const Duration(milliseconds: 500)); { + const String channelId = "hello"; + const int uid = 10; const String infoFileName = "hello"; const int infoDurationMs = 10; const int infoFileSize = 10; @@ -122,6 +107,8 @@ void generatedTestCases() { ); final eventJson = { + 'channelId': channelId, + 'uid': uid, 'info': info.toJson(), }; @@ -136,7 +123,7 @@ void generatedTestCases() { // Delay 500 milliseconds to ensure the call completed. await Future.delayed(const Duration(milliseconds: 500)); - await mediaRecorder.release(); + await rtcEngine.destroyMediaRecorder(mediaRecorder); await rtcEngine.release(); }, timeout: const Timeout(Duration(minutes: 1)), diff --git a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart index e734c028a..897f128f6 100644 --- a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart @@ -13,10 +13,6 @@ void musicContentCenterSmokeTestCases() { testWidgets( 'initialize', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -32,11 +28,13 @@ void musicContentCenterSmokeTestCases() { const String configurationAppId = "hello"; const String configurationToken = "hello"; const int configurationMccUid = 10; + const int configurationMaxCacheSize = 10; const MusicContentCenterConfiguration configuration = MusicContentCenterConfiguration( appId: configurationAppId, token: configurationToken, mccUid: configurationMccUid, + maxCacheSize: configurationMaxCacheSize, ); await musicContentCenter.initialize( configuration, @@ -62,10 +60,6 @@ void musicContentCenterSmokeTestCases() { testWidgets( 'renewToken', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -103,10 +97,6 @@ void musicContentCenterSmokeTestCases() { testWidgets( 'release', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -141,10 +131,6 @@ void musicContentCenterSmokeTestCases() { testWidgets( 'registerEventHandler', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -159,13 +145,17 @@ void musicContentCenterSmokeTestCases() { try { final MusicContentCenterEventHandler eventHandler = MusicContentCenterEventHandler( - onMusicChartsResult: (String requestId, - MusicContentCenterStatusCode status, List result) {}, - onMusicCollectionResult: (String requestId, - MusicContentCenterStatusCode status, MusicCollection result) {}, - onLyricResult: (String requestId, String lyricUrl) {}, - onPreLoadEvent: (int songCode, int percent, PreloadStatusCode status, - String msg, String lyricUrl) {}, + onMusicChartsResult: (String requestId, List result, + MusicContentCenterStatusCode errorCode) {}, + onMusicCollectionResult: (String requestId, MusicCollection result, + MusicContentCenterStatusCode errorCode) {}, + onLyricResult: (String requestId, String lyricUrl, + MusicContentCenterStatusCode errorCode) {}, + onPreLoadEvent: (int songCode, + int percent, + String lyricUrl, + PreloadStatusCode status, + MusicContentCenterStatusCode errorCode) {}, ); musicContentCenter.registerEventHandler( eventHandler, @@ -191,10 +181,6 @@ void musicContentCenterSmokeTestCases() { testWidgets( 'unregisterEventHandler', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -229,10 +215,6 @@ void musicContentCenterSmokeTestCases() { testWidgets( 'getMusicCharts', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -267,10 +249,6 @@ void musicContentCenterSmokeTestCases() { testWidgets( 'getMusicCollectionByMusicChartId', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -315,10 +293,6 @@ void musicContentCenterSmokeTestCases() { testWidgets( 'searchMusic', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -362,10 +336,6 @@ void musicContentCenterSmokeTestCases() { testWidgets( 'preload', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -403,12 +373,82 @@ void musicContentCenterSmokeTestCases() { ); testWidgets( - 'isPreloaded', + 'removeCache', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final musicContentCenter = rtcEngine.getMusicContentCenter(); + try { + const int songCode = 10; + await musicContentCenter.removeCache( + songCode, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[removeCache] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await musicContentCenter.release(); + await rtcEngine.release(); + }, +// skip: !(), + ); + + testWidgets( + 'getCaches', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final musicContentCenter = rtcEngine.getMusicContentCenter(); + + try { + const int cacheInfoSize = 10; + await musicContentCenter.getCaches( + cacheInfoSize, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[getCaches] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await musicContentCenter.release(); + await rtcEngine.release(); + }, +// skip: !(), + ); + + testWidgets( + 'isPreloaded', + (WidgetTester tester) async { String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -446,10 +486,6 @@ void musicContentCenterSmokeTestCases() { testWidgets( 'getLyric', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); diff --git a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart index bd5353f60..c9683d01e 100644 --- a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onMusicChartsResult', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -31,8 +27,8 @@ void generatedTestCases() { final onMusicChartsResultCompleter = Completer(); final theMusicContentCenterEventHandler = MusicContentCenterEventHandler( - onMusicChartsResult: (String requestId, - MusicContentCenterStatusCode status, List result) { + onMusicChartsResult: (String requestId, List result, + MusicContentCenterStatusCode errorCode) { onMusicChartsResultCompleter.complete(true); }, ); @@ -46,14 +42,14 @@ void generatedTestCases() { { const String requestId = "hello"; - const MusicContentCenterStatusCode status = - MusicContentCenterStatusCode.kMusicContentCenterStatusOk; const List result = []; + const MusicContentCenterStatusCode errorCode = + MusicContentCenterStatusCode.kMusicContentCenterStatusOk; final eventJson = { 'requestId': requestId, - 'status': status.value(), 'result': result, + 'errorCode': errorCode.value(), }; irisTester.fireEvent( @@ -79,10 +75,6 @@ void generatedTestCases() { testWidgets( 'onMusicCollectionResult', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -96,8 +88,8 @@ void generatedTestCases() { final onMusicCollectionResultCompleter = Completer(); final theMusicContentCenterEventHandler = MusicContentCenterEventHandler( - onMusicCollectionResult: (String requestId, - MusicContentCenterStatusCode status, MusicCollection result) { + onMusicCollectionResult: (String requestId, MusicCollection result, + MusicContentCenterStatusCode errorCode) { onMusicCollectionResultCompleter.complete(true); }, ); @@ -111,12 +103,12 @@ void generatedTestCases() { { const String requestId = "hello"; - const MusicContentCenterStatusCode status = + const MusicContentCenterStatusCode errorCode = MusicContentCenterStatusCode.kMusicContentCenterStatusOk; final eventJson = { 'requestId': requestId, - 'status': status.value(), + 'errorCode': errorCode.value(), }; irisTester.fireEvent( @@ -142,10 +134,6 @@ void generatedTestCases() { testWidgets( 'onLyricResult', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -159,7 +147,8 @@ void generatedTestCases() { final onLyricResultCompleter = Completer(); final theMusicContentCenterEventHandler = MusicContentCenterEventHandler( - onLyricResult: (String requestId, String lyricUrl) { + onLyricResult: (String requestId, String lyricUrl, + MusicContentCenterStatusCode errorCode) { onLyricResultCompleter.complete(true); }, ); @@ -174,10 +163,13 @@ void generatedTestCases() { { const String requestId = "hello"; const String lyricUrl = "hello"; + const MusicContentCenterStatusCode errorCode = + MusicContentCenterStatusCode.kMusicContentCenterStatusOk; final eventJson = { 'requestId': requestId, 'lyricUrl': lyricUrl, + 'errorCode': errorCode.value(), }; irisTester.fireEvent('MusicContentCenterEventHandler_onLyricResult', @@ -202,10 +194,6 @@ void generatedTestCases() { testWidgets( 'onPreLoadEvent', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -219,8 +207,8 @@ void generatedTestCases() { final onPreLoadEventCompleter = Completer(); final theMusicContentCenterEventHandler = MusicContentCenterEventHandler( - onPreLoadEvent: (int songCode, int percent, PreloadStatusCode status, - String msg, String lyricUrl) { + onPreLoadEvent: (int songCode, int percent, String lyricUrl, + PreloadStatusCode status, MusicContentCenterStatusCode errorCode) { onPreLoadEventCompleter.complete(true); }, ); @@ -235,17 +223,18 @@ void generatedTestCases() { { const int songCode = 10; const int percent = 10; + const String lyricUrl = "hello"; const PreloadStatusCode status = PreloadStatusCode.kPreloadStatusCompleted; - const String msg = "hello"; - const String lyricUrl = "hello"; + const MusicContentCenterStatusCode errorCode = + MusicContentCenterStatusCode.kMusicContentCenterStatusOk; final eventJson = { 'songCode': songCode, 'percent': percent, - 'status': status.value(), - 'msg': msg, 'lyricUrl': lyricUrl, + 'status': status.value(), + 'errorCode': errorCode.value(), }; irisTester.fireEvent('MusicContentCenterEventHandler_onPreLoadEvent', diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart index b70361e88..da026a008 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onRecordAudioEncodedFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -85,7 +81,7 @@ void generatedTestCases() { }; irisTester.fireEvent( - 'AudioEncodedFrameObserver_OnRecordAudioEncodedFrame', + 'AudioEncodedFrameObserver_onRecordAudioEncodedFrame', params: eventJson); } @@ -108,10 +104,6 @@ void generatedTestCases() { testWidgets( 'onPlaybackAudioEncodedFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -179,7 +171,7 @@ void generatedTestCases() { }; irisTester.fireEvent( - 'AudioEncodedFrameObserver_OnPlaybackAudioEncodedFrame', + 'AudioEncodedFrameObserver_onPlaybackAudioEncodedFrame', params: eventJson); } @@ -202,10 +194,6 @@ void generatedTestCases() { testWidgets( 'onMixedAudioEncodedFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -273,7 +261,7 @@ void generatedTestCases() { }; irisTester.fireEvent( - 'AudioEncodedFrameObserver_OnMixedAudioEncodedFrame', + 'AudioEncodedFrameObserver_onMixedAudioEncodedFrame', params: eventJson); } diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_audiospectrumobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_audiospectrumobserver_testcases.generated.dart index b31d8b7ed..b71f0e344 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_audiospectrumobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_audiospectrumobserver_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onLocalAudioSpectrum', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -73,10 +69,6 @@ void generatedTestCases() { testWidgets( 'onRemoteAudioSpectrum', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart index 211fce3d8..3cec4ae77 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart @@ -14,10 +14,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'release', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -51,10 +47,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'initialize', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -84,6 +76,7 @@ void rtcEngineSmokeTestCases() { const int contextAreaCode = 10; const bool contextUseExternalEglContext = true; const bool contextDomainLimit = true; + const bool contextAutoRegisterAgoraExtensions = true; const RtcEngineContext context = RtcEngineContext( appId: contextAppId, channelProfile: contextChannelProfile, @@ -94,6 +87,7 @@ void rtcEngineSmokeTestCases() { threadPriority: contextThreadPriority, useExternalEglContext: contextUseExternalEglContext, domainLimit: contextDomainLimit, + autoRegisterAgoraExtensions: contextAutoRegisterAgoraExtensions, ); await rtcEngine.initialize( context, @@ -117,10 +111,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getVersion', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -151,10 +141,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getErrorDescription', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -188,10 +174,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'joinChannel', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -221,15 +203,12 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioSourceId = 10; - const bool optionsPublishCustomAudioTrackEnableAec = true; - const bool optionsPublishDirectCustomAudioTrack = true; - const bool optionsPublishCustomAudioTrackAec = true; + const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomVideoTrack = true; const bool optionsPublishEncodedVideoTrack = true; const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishTranscodedVideoTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -251,16 +230,12 @@ void rtcEngineSmokeTestCases() { publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, - publishCustomAudioSourceId: optionsPublishCustomAudioSourceId, - publishCustomAudioTrackEnableAec: - optionsPublishCustomAudioTrackEnableAec, - publishDirectCustomAudioTrack: optionsPublishDirectCustomAudioTrack, - publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, + publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomVideoTrack: optionsPublishCustomVideoTrack, publishEncodedVideoTrack: optionsPublishEncodedVideoTrack, publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, - publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -303,10 +278,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'updateChannelMediaOptions', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -333,15 +304,12 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioSourceId = 10; - const bool optionsPublishCustomAudioTrackEnableAec = true; - const bool optionsPublishDirectCustomAudioTrack = true; - const bool optionsPublishCustomAudioTrackAec = true; + const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomVideoTrack = true; const bool optionsPublishEncodedVideoTrack = true; const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishTranscodedVideoTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -363,16 +331,12 @@ void rtcEngineSmokeTestCases() { publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, - publishCustomAudioSourceId: optionsPublishCustomAudioSourceId, - publishCustomAudioTrackEnableAec: - optionsPublishCustomAudioTrackEnableAec, - publishDirectCustomAudioTrack: optionsPublishDirectCustomAudioTrack, - publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, + publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomVideoTrack: optionsPublishCustomVideoTrack, publishEncodedVideoTrack: optionsPublishEncodedVideoTrack, publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, - publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -412,10 +376,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'leaveChannel', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -456,10 +416,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'renewToken', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -493,10 +449,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setChannelProfile', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -531,10 +483,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setClientRole', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -574,10 +522,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startEchoTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -588,9 +532,22 @@ void rtcEngineSmokeTestCases() { )); try { - const int intervalInSeconds = 10; + const int configView = 10; + const bool configEnableAudio = true; + const bool configEnableVideo = true; + const String configToken = "hello"; + const String configChannelId = "hello"; + const int configIntervalInSeconds = 10; + const EchoTestConfiguration config = EchoTestConfiguration( + view: configView, + enableAudio: configEnableAudio, + enableVideo: configEnableVideo, + token: configToken, + channelId: configChannelId, + intervalInSeconds: configIntervalInSeconds, + ); await rtcEngine.startEchoTest( - intervalInSeconds: intervalInSeconds, + config, ); } catch (e) { if (e is! AgoraRtcException) { @@ -611,10 +568,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopEchoTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -645,10 +598,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableMultiCamera', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -701,10 +650,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableVideo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -735,10 +680,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'disableVideo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -769,10 +710,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startPreview', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -807,10 +744,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopPreview', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -845,10 +778,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startLastmileProbeTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -891,10 +820,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopLastmileProbeTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -925,10 +850,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setVideoEncoderConfiguration', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -996,10 +917,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setBeautyEffectOptions', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1049,10 +966,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setLowlightEnhanceOptions', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1097,10 +1010,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setVideoDenoiserOptions', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1145,10 +1054,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setColorEnhanceOptions', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1191,10 +1096,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableVirtualBackground', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1207,7 +1108,7 @@ void rtcEngineSmokeTestCases() { try { const bool enabled = true; const BackgroundSourceType backgroundSourceBackgroundSourceType = - BackgroundSourceType.backgroundColor; + BackgroundSourceType.backgroundNone; const BackgroundBlurDegree backgroundSourceBlurDegree = BackgroundBlurDegree.blurDegreeLow; const int backgroundSourceColor = 10; @@ -1248,52 +1149,9 @@ void rtcEngineSmokeTestCases() { }, ); - testWidgets( - 'enableRemoteSuperResolution', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const int userId = 10; - const bool enable = true; - await rtcEngine.enableRemoteSuperResolution( - userId: userId, - enable: enable, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[enableRemoteSuperResolution] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - testWidgets( 'setupRemoteVideo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1324,6 +1182,7 @@ void rtcEngineSmokeTestCases() { const int canvasView = 10; const int canvasUid = 10; const int canvasMediaPlayerId = 10; + const bool canvasEnableAlphaMask = true; const VideoCanvas canvas = VideoCanvas( view: canvasView, uid: canvasUid, @@ -1333,6 +1192,7 @@ void rtcEngineSmokeTestCases() { sourceType: canvasSourceType, mediaPlayerId: canvasMediaPlayerId, cropArea: canvasCropArea, + enableAlphaMask: canvasEnableAlphaMask, ); await rtcEngine.setupRemoteVideo( canvas, @@ -1356,10 +1216,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setupLocalVideo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1390,6 +1246,7 @@ void rtcEngineSmokeTestCases() { const int canvasView = 10; const int canvasUid = 10; const int canvasMediaPlayerId = 10; + const bool canvasEnableAlphaMask = true; const VideoCanvas canvas = VideoCanvas( view: canvasView, uid: canvasUid, @@ -1399,6 +1256,7 @@ void rtcEngineSmokeTestCases() { sourceType: canvasSourceType, mediaPlayerId: canvasMediaPlayerId, cropArea: canvasCropArea, + enableAlphaMask: canvasEnableAlphaMask, ); await rtcEngine.setupLocalVideo( canvas, @@ -1420,12 +1278,42 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'enableAudio', + 'setVideoScenario', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const VideoApplicationScenarioType scenarioType = + VideoApplicationScenarioType.applicationScenarioGeneral; + await rtcEngine.setVideoScenario( + scenarioType, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[setVideoScenario] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( + 'enableAudio', + (WidgetTester tester) async { String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1456,10 +1344,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'disableAudio', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1490,10 +1374,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setAudioProfile', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1530,10 +1410,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setAudioScenario', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1568,10 +1444,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableLocalAudio', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1605,10 +1477,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'muteLocalAudioStream', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1642,10 +1510,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'muteAllRemoteAudioStreams', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1679,10 +1543,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setDefaultMuteAllRemoteAudioStreams', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1717,10 +1577,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'muteRemoteAudioStream', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1756,10 +1612,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'muteLocalVideoStream', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1793,10 +1645,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableLocalVideo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1830,10 +1678,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'muteAllRemoteVideoStreams', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1867,10 +1711,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setDefaultMuteAllRemoteVideoStreams', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1905,10 +1745,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'muteRemoteVideoStream', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1944,10 +1780,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setRemoteVideoStreamType', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1983,10 +1815,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setRemoteVideoSubscriptionOptions', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2028,10 +1856,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setRemoteDefaultVideoStreamType', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2066,10 +1890,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableAudioVolumeIndication', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2107,10 +1927,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startAudioRecording', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2159,10 +1975,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'registerAudioEncodedFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2215,10 +2027,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopAudioRecording', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2249,10 +2057,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startAudioMixing', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2292,10 +2096,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopAudioMixing', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2326,10 +2126,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'pauseAudioMixing', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2360,10 +2156,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'resumeAudioMixing', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2394,10 +2186,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'selectAudioTrack', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2431,10 +2219,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getAudioTrackCount', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2465,10 +2249,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'adjustAudioMixingVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2502,10 +2282,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'adjustAudioMixingPublishVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2539,10 +2315,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getAudioMixingPublishVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2573,10 +2345,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'adjustAudioMixingPlayoutVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2610,10 +2378,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getAudioMixingPlayoutVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2644,10 +2408,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getAudioMixingDuration', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2678,10 +2438,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getAudioMixingCurrentPosition', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2712,10 +2468,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setAudioMixingPosition', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2749,10 +2501,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setAudioMixingDualMonoMode', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2787,10 +2535,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setAudioMixingPitch', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2824,10 +2568,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getEffectsVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2858,10 +2598,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setEffectsVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2895,10 +2631,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'preloadEffect', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2936,10 +2668,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'playEffect', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2987,10 +2715,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'playAllEffects', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3032,10 +2756,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getVolumeOfEffect', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3069,10 +2789,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setVolumeOfEffect', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3108,10 +2824,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'pauseEffect', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3145,10 +2857,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'pauseAllEffects', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3179,10 +2887,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'resumeEffect', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3216,10 +2920,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'resumeAllEffects', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3250,10 +2950,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopEffect', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3287,10 +2983,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopAllEffects', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3321,10 +3013,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'unloadEffect', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3358,10 +3046,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'unloadAllEffects', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3392,10 +3076,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getEffectDuration', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3429,10 +3109,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setEffectPosition', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3468,10 +3144,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getEffectCurrentPosition', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3505,10 +3177,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableSoundPositionIndication', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3542,10 +3210,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setRemoteVoicePosition', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3583,10 +3247,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableSpatialAudio', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3620,10 +3280,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setRemoteUserSpatialAudioParams', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3677,10 +3333,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setVoiceBeautifierPreset', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3715,10 +3367,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setAudioEffectPreset', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3752,10 +3400,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setVoiceConversionPreset', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3790,10 +3434,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setAudioEffectParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3831,10 +3471,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setVoiceBeautifierParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3873,10 +3509,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setVoiceConversionParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3915,10 +3547,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setLocalVoicePitch', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3950,12 +3578,41 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'setLocalVoiceEqualization', + 'setLocalVoiceFormant', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const double formantRatio = 10.0; + await rtcEngine.setLocalVoiceFormant( + formantRatio, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[setLocalVoiceFormant] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( + 'setLocalVoiceEqualization', + (WidgetTester tester) async { String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -3992,10 +3649,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setLocalVoiceReverb', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4031,10 +3684,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setHeadphoneEQPreset', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4069,10 +3718,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setHeadphoneEQParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4108,10 +3753,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setLogFile', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4145,10 +3786,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setLogFilter', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4182,10 +3819,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setLogLevel', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4219,10 +3852,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setLogFileSize', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4256,10 +3885,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'uploadLogFile', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4293,10 +3918,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setLocalRenderMode', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4333,10 +3954,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setRemoteRenderMode', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4375,10 +3992,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setLocalVideoMirrorMode', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4413,10 +4026,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableDualStreamMode', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4464,10 +4073,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setDualStreamMode', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4514,12 +4119,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'enableEchoCancellationExternal', + 'enableCustomAudioLocalPlayback', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4530,15 +4131,15 @@ void rtcEngineSmokeTestCases() { )); try { + const int trackId = 10; const bool enabled = true; - const int audioSourceDelay = 10; - await rtcEngine.enableEchoCancellationExternal( + await rtcEngine.enableCustomAudioLocalPlayback( + trackId: trackId, enabled: enabled, - audioSourceDelay: audioSourceDelay, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[enableEchoCancellationExternal] error: ${e.toString()}'); + debugPrint('[enableCustomAudioLocalPlayback] error: ${e.toString()}'); rethrow; } @@ -4553,12 +4154,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'enableCustomAudioLocalPlayback', + 'setRecordingAudioFrameParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4569,15 +4166,21 @@ void rtcEngineSmokeTestCases() { )); try { - const int sourceId = 10; - const bool enabled = true; - await rtcEngine.enableCustomAudioLocalPlayback( - sourceId: sourceId, - enabled: enabled, + const int sampleRate = 10; + const int channel = 10; + const RawAudioFrameOpModeType mode = + RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly; + const int samplesPerCall = 10; + await rtcEngine.setRecordingAudioFrameParameters( + sampleRate: sampleRate, + channel: channel, + mode: mode, + samplesPerCall: samplesPerCall, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[enableCustomAudioLocalPlayback] error: ${e.toString()}'); + debugPrint( + '[setRecordingAudioFrameParameters] error: ${e.toString()}'); rethrow; } @@ -4592,12 +4195,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'startPrimaryCustomAudioTrack', + 'setPlaybackAudioFrameParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4608,16 +4207,21 @@ void rtcEngineSmokeTestCases() { )); try { - const bool configEnableLocalPlayback = true; - const AudioTrackConfig config = AudioTrackConfig( - enableLocalPlayback: configEnableLocalPlayback, - ); - await rtcEngine.startPrimaryCustomAudioTrack( - config, + const int sampleRate = 10; + const int channel = 10; + const RawAudioFrameOpModeType mode = + RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly; + const int samplesPerCall = 10; + await rtcEngine.setPlaybackAudioFrameParameters( + sampleRate: sampleRate, + channel: channel, + mode: mode, + samplesPerCall: samplesPerCall, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[startPrimaryCustomAudioTrack] error: ${e.toString()}'); + debugPrint( + '[setPlaybackAudioFrameParameters] error: ${e.toString()}'); rethrow; } @@ -4632,12 +4236,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'stopPrimaryCustomAudioTrack', + 'setMixedAudioFrameParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4648,10 +4248,17 @@ void rtcEngineSmokeTestCases() { )); try { - await rtcEngine.stopPrimaryCustomAudioTrack(); + const int sampleRate = 10; + const int channel = 10; + const int samplesPerCall = 10; + await rtcEngine.setMixedAudioFrameParameters( + sampleRate: sampleRate, + channel: channel, + samplesPerCall: samplesPerCall, + ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[stopPrimaryCustomAudioTrack] error: ${e.toString()}'); + debugPrint('[setMixedAudioFrameParameters] error: ${e.toString()}'); rethrow; } @@ -4666,217 +4273,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'startSecondaryCustomAudioTrack', + 'setEarMonitoringAudioFrameParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const bool configEnableLocalPlayback = true; - const AudioTrackConfig config = AudioTrackConfig( - enableLocalPlayback: configEnableLocalPlayback, - ); - await rtcEngine.startSecondaryCustomAudioTrack( - config, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[startSecondaryCustomAudioTrack] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'stopSecondaryCustomAudioTrack', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - await rtcEngine.stopSecondaryCustomAudioTrack(); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[stopSecondaryCustomAudioTrack] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'setRecordingAudioFrameParameters', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const int sampleRate = 10; - const int channel = 10; - const RawAudioFrameOpModeType mode = - RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly; - const int samplesPerCall = 10; - await rtcEngine.setRecordingAudioFrameParameters( - sampleRate: sampleRate, - channel: channel, - mode: mode, - samplesPerCall: samplesPerCall, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint( - '[setRecordingAudioFrameParameters] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'setPlaybackAudioFrameParameters', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const int sampleRate = 10; - const int channel = 10; - const RawAudioFrameOpModeType mode = - RawAudioFrameOpModeType.rawAudioFrameOpModeReadOnly; - const int samplesPerCall = 10; - await rtcEngine.setPlaybackAudioFrameParameters( - sampleRate: sampleRate, - channel: channel, - mode: mode, - samplesPerCall: samplesPerCall, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint( - '[setPlaybackAudioFrameParameters] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'setMixedAudioFrameParameters', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const int sampleRate = 10; - const int channel = 10; - const int samplesPerCall = 10; - await rtcEngine.setMixedAudioFrameParameters( - sampleRate: sampleRate, - channel: channel, - samplesPerCall: samplesPerCall, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[setMixedAudioFrameParameters] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'setEarMonitoringAudioFrameParameters', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4918,10 +4316,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setPlaybackAudioFrameBeforeMixingParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4958,10 +4352,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableAudioSpectrumMonitor', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -4995,10 +4385,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'disableAudioSpectrumMonitor', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5029,10 +4415,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'registerAudioSpectrumObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5069,10 +4451,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'unregisterAudioSpectrumObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5110,10 +4488,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'adjustRecordingSignalVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5147,10 +4521,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'muteRecordingSignal', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5184,10 +4554,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'adjustPlaybackSignalVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5221,10 +4587,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'adjustUserPlaybackSignalVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5260,10 +4622,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setLocalPublishFallbackOption', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5298,10 +4656,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setRemoteSubscribeFallbackOption', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5337,10 +4691,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableLoopbackRecording', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5376,10 +4726,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'adjustLoopbackSignalVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5413,10 +4759,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getLoopbackRecordingVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5447,10 +4789,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableInEarMonitoring', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5487,10 +4825,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setInEarMonitoringVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5524,10 +4858,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'loadExtensionProvider', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5563,10 +4893,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setExtensionProviderProperty', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5604,10 +4930,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'registerExtension', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5645,10 +4967,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableExtension', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5688,10 +5006,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setExtensionProperty', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5733,10 +5047,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getExtensionProperty', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5778,10 +5088,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setCameraCapturerConfiguration', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5832,10 +5138,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'destroyCustomVideoTrack', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5869,10 +5171,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'destroyCustomEncodedVideoTrack', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5906,10 +5204,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'switchCamera', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5940,10 +5234,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'isCameraZoomSupported', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -5974,10 +5264,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'isCameraFaceDetectSupported', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6008,10 +5294,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'isCameraTorchSupported', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6042,10 +5324,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'isCameraFocusSupported', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6076,10 +5354,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'isCameraAutoFocusFaceModeSupported', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6111,10 +5385,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setCameraZoomFactor', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6148,10 +5418,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableFaceDetection', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6185,10 +5451,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getCameraMaxZoomFactor', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6219,10 +5481,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setCameraFocusPositionInPreview', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6259,10 +5517,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setCameraTorchOn', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6296,10 +5550,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setCameraAutoFocusFaceModeEnabled', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6334,10 +5584,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'isCameraExposurePositionSupported', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6369,10 +5615,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setCameraExposurePosition', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6408,10 +5650,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'isCameraAutoExposureFaceModeSupported', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6443,10 +5681,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setCameraAutoExposureFaceModeEnabled', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6481,10 +5715,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setDefaultAudioRouteToSpeakerphone', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6519,10 +5749,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setEnableSpeakerphone', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6556,10 +5782,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'isSpeakerphoneEnabled', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6590,10 +5812,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getScreenCaptureSources', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6641,10 +5859,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setAudioSessionOperationRestriction', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6681,10 +5895,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startScreenCaptureByDisplayId', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6757,10 +5967,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startScreenCaptureByScreenRect', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6842,10 +6048,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getAudioDeviceInfo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6876,10 +6078,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startScreenCaptureByWindowId', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6952,10 +6150,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setScreenCaptureContentHint', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -6987,12 +6181,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'setScreenCaptureScenario', + 'updateScreenCaptureRegion', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7003,14 +6193,22 @@ void rtcEngineSmokeTestCases() { )); try { - const ScreenScenarioType screenScenario = - ScreenScenarioType.screenScenarioDocument; - await rtcEngine.setScreenCaptureScenario( - screenScenario, + const int regionRectX = 10; + const int regionRectY = 10; + const int regionRectWidth = 10; + const int regionRectHeight = 10; + const Rectangle regionRect = Rectangle( + x: regionRectX, + y: regionRectY, + width: regionRectWidth, + height: regionRectHeight, + ); + await rtcEngine.updateScreenCaptureRegion( + regionRect, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[setScreenCaptureScenario] error: ${e.toString()}'); + debugPrint('[updateScreenCaptureRegion] error: ${e.toString()}'); rethrow; } @@ -7025,58 +6223,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'updateScreenCaptureRegion', + 'updateScreenCaptureParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const int regionRectX = 10; - const int regionRectY = 10; - const int regionRectWidth = 10; - const int regionRectHeight = 10; - const Rectangle regionRect = Rectangle( - x: regionRectX, - y: regionRectY, - width: regionRectWidth, - height: regionRectHeight, - ); - await rtcEngine.updateScreenCaptureRegion( - regionRect, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[updateScreenCaptureRegion] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'updateScreenCaptureParameters', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7136,10 +6284,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startScreenCapture', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7204,12 +6348,101 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'updateScreenCapture', + 'startScreenCaptureBySourceType', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const VideoSourceType sourceType = + VideoSourceType.videoSourceCameraPrimary; + const int screenRectX = 10; + const int screenRectY = 10; + const int screenRectWidth = 10; + const int screenRectHeight = 10; + const Rectangle configScreenRect = Rectangle( + x: screenRectX, + y: screenRectY, + width: screenRectWidth, + height: screenRectHeight, + ); + const int dimensionsWidth = 10; + const int dimensionsHeight = 10; + const VideoDimensions paramsDimensions = VideoDimensions( + width: dimensionsWidth, + height: dimensionsHeight, + ); + const int paramsFrameRate = 10; + const int paramsBitrate = 10; + const bool paramsCaptureMouseCursor = true; + const bool paramsWindowFocus = true; + const List paramsExcludeWindowList = []; + const int paramsExcludeWindowCount = 10; + const int paramsHighLightWidth = 10; + const int paramsHighLightColor = 10; + const bool paramsEnableHighLight = true; + const ScreenCaptureParameters configParams = ScreenCaptureParameters( + dimensions: paramsDimensions, + frameRate: paramsFrameRate, + bitrate: paramsBitrate, + captureMouseCursor: paramsCaptureMouseCursor, + windowFocus: paramsWindowFocus, + excludeWindowList: paramsExcludeWindowList, + excludeWindowCount: paramsExcludeWindowCount, + highLightWidth: paramsHighLightWidth, + highLightColor: paramsHighLightColor, + enableHighLight: paramsEnableHighLight, + ); + const int regionRectX = 10; + const int regionRectY = 10; + const int regionRectWidth = 10; + const int regionRectHeight = 10; + const Rectangle configRegionRect = Rectangle( + x: regionRectX, + y: regionRectY, + width: regionRectWidth, + height: regionRectHeight, + ); + const bool configIsCaptureWindow = true; + const int configDisplayId = 10; + const int configWindowId = 10; + const ScreenCaptureConfiguration config = ScreenCaptureConfiguration( + isCaptureWindow: configIsCaptureWindow, + displayId: configDisplayId, + screenRect: configScreenRect, + windowId: configWindowId, + params: configParams, + regionRect: configRegionRect, + ); + await rtcEngine.startScreenCaptureBySourceType( + sourceType: sourceType, + config: config, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[startScreenCaptureBySourceType] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( + 'updateScreenCapture', + (WidgetTester tester) async { String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7274,12 +6507,72 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'stopScreenCapture', + 'queryScreenCaptureCapability', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + await rtcEngine.queryScreenCaptureCapability(); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[queryScreenCaptureCapability] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'setScreenCaptureScenario', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const ScreenScenarioType screenScenario = + ScreenScenarioType.screenScenarioDocument; + await rtcEngine.setScreenCaptureScenario( + screenScenario, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[setScreenCaptureScenario] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( + 'stopScreenCapture', + (WidgetTester tester) async { String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7308,12 +6601,42 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'getCallId', + 'stopScreenCaptureBySourceType', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const VideoSourceType sourceType = + VideoSourceType.videoSourceCameraPrimary; + await rtcEngine.stopScreenCaptureBySourceType( + sourceType, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[stopScreenCaptureBySourceType] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + await rtcEngine.release(); + }, + ); + + testWidgets( + 'getCallId', + (WidgetTester tester) async { String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7344,10 +6667,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'rate', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7385,10 +6704,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'complain', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7424,10 +6739,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startRtmpStreamWithoutTranscoding', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7462,10 +6773,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startRtmpStreamWithTranscoding', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7552,10 +6859,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'updateRtmpTranscoding', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7640,10 +6943,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopRtmpStream', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7677,10 +6976,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startLocalVideoTranscoder', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7763,10 +7058,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'updateLocalTranscoderConfiguration', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7850,10 +7141,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopLocalVideoTranscoder', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7882,12 +7169,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'startPrimaryCameraCapture', + 'startCameraCapture', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -7898,6 +7181,8 @@ void rtcEngineSmokeTestCases() { )); try { + const VideoSourceType sourceType = + VideoSourceType.videoSourceCameraPrimary; const CameraDirection configCameraDirection = CameraDirection.cameraRear; const int formatWidth = 10; @@ -7916,12 +7201,13 @@ void rtcEngineSmokeTestCases() { format: configFormat, followEncodeDimensionRatio: configFollowEncodeDimensionRatio, ); - await rtcEngine.startPrimaryCameraCapture( - config, + await rtcEngine.startCameraCapture( + sourceType: sourceType, + config: config, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[startPrimaryCameraCapture] error: ${e.toString()}'); + debugPrint('[startCameraCapture] error: ${e.toString()}'); rethrow; } @@ -7936,380 +7222,26 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'startSecondaryCameraCapture', + 'stopCameraCapture', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const CameraDirection configCameraDirection = - CameraDirection.cameraRear; - const int formatWidth = 10; - const int formatHeight = 10; - const int formatFps = 10; - const VideoFormat configFormat = VideoFormat( - width: formatWidth, - height: formatHeight, - fps: formatFps, - ); - const String configDeviceId = "hello"; - const bool configFollowEncodeDimensionRatio = true; - const CameraCapturerConfiguration config = CameraCapturerConfiguration( - cameraDirection: configCameraDirection, - deviceId: configDeviceId, - format: configFormat, - followEncodeDimensionRatio: configFollowEncodeDimensionRatio, - ); - await rtcEngine.startSecondaryCameraCapture( - config, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[startSecondaryCameraCapture] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'stopPrimaryCameraCapture', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - await rtcEngine.stopPrimaryCameraCapture(); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[stopPrimaryCameraCapture] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'stopSecondaryCameraCapture', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - await rtcEngine.stopSecondaryCameraCapture(); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[stopSecondaryCameraCapture] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'setCameraDeviceOrientation', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const VideoSourceType type = VideoSourceType.videoSourceCameraPrimary; - const VideoOrientation orientation = VideoOrientation.videoOrientation0; - await rtcEngine.setCameraDeviceOrientation( - type: type, - orientation: orientation, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[setCameraDeviceOrientation] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'setScreenCaptureOrientation', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const VideoSourceType type = VideoSourceType.videoSourceCameraPrimary; - const VideoOrientation orientation = VideoOrientation.videoOrientation0; - await rtcEngine.setScreenCaptureOrientation( - type: type, - orientation: orientation, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[setScreenCaptureOrientation] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'startPrimaryScreenCapture', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const int screenRectX = 10; - const int screenRectY = 10; - const int screenRectWidth = 10; - const int screenRectHeight = 10; - const Rectangle configScreenRect = Rectangle( - x: screenRectX, - y: screenRectY, - width: screenRectWidth, - height: screenRectHeight, - ); - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions paramsDimensions = VideoDimensions( - width: dimensionsWidth, - height: dimensionsHeight, - ); - const int paramsFrameRate = 10; - const int paramsBitrate = 10; - const bool paramsCaptureMouseCursor = true; - const bool paramsWindowFocus = true; - const List paramsExcludeWindowList = []; - const int paramsExcludeWindowCount = 10; - const int paramsHighLightWidth = 10; - const int paramsHighLightColor = 10; - const bool paramsEnableHighLight = true; - const ScreenCaptureParameters configParams = ScreenCaptureParameters( - dimensions: paramsDimensions, - frameRate: paramsFrameRate, - bitrate: paramsBitrate, - captureMouseCursor: paramsCaptureMouseCursor, - windowFocus: paramsWindowFocus, - excludeWindowList: paramsExcludeWindowList, - excludeWindowCount: paramsExcludeWindowCount, - highLightWidth: paramsHighLightWidth, - highLightColor: paramsHighLightColor, - enableHighLight: paramsEnableHighLight, - ); - const int regionRectX = 10; - const int regionRectY = 10; - const int regionRectWidth = 10; - const int regionRectHeight = 10; - const Rectangle configRegionRect = Rectangle( - x: regionRectX, - y: regionRectY, - width: regionRectWidth, - height: regionRectHeight, - ); - const bool configIsCaptureWindow = true; - const int configDisplayId = 10; - const int configWindowId = 10; - const ScreenCaptureConfiguration config = ScreenCaptureConfiguration( - isCaptureWindow: configIsCaptureWindow, - displayId: configDisplayId, - screenRect: configScreenRect, - windowId: configWindowId, - params: configParams, - regionRect: configRegionRect, - ); - await rtcEngine.startPrimaryScreenCapture( - config, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[startPrimaryScreenCapture] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - - testWidgets( - 'startSecondaryScreenCapture', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const int screenRectX = 10; - const int screenRectY = 10; - const int screenRectWidth = 10; - const int screenRectHeight = 10; - const Rectangle configScreenRect = Rectangle( - x: screenRectX, - y: screenRectY, - width: screenRectWidth, - height: screenRectHeight, - ); - const int dimensionsWidth = 10; - const int dimensionsHeight = 10; - const VideoDimensions paramsDimensions = VideoDimensions( - width: dimensionsWidth, - height: dimensionsHeight, - ); - const int paramsFrameRate = 10; - const int paramsBitrate = 10; - const bool paramsCaptureMouseCursor = true; - const bool paramsWindowFocus = true; - const List paramsExcludeWindowList = []; - const int paramsExcludeWindowCount = 10; - const int paramsHighLightWidth = 10; - const int paramsHighLightColor = 10; - const bool paramsEnableHighLight = true; - const ScreenCaptureParameters configParams = ScreenCaptureParameters( - dimensions: paramsDimensions, - frameRate: paramsFrameRate, - bitrate: paramsBitrate, - captureMouseCursor: paramsCaptureMouseCursor, - windowFocus: paramsWindowFocus, - excludeWindowList: paramsExcludeWindowList, - excludeWindowCount: paramsExcludeWindowCount, - highLightWidth: paramsHighLightWidth, - highLightColor: paramsHighLightColor, - enableHighLight: paramsEnableHighLight, - ); - const int regionRectX = 10; - const int regionRectY = 10; - const int regionRectWidth = 10; - const int regionRectHeight = 10; - const Rectangle configRegionRect = Rectangle( - x: regionRectX, - y: regionRectY, - width: regionRectWidth, - height: regionRectHeight, - ); - const bool configIsCaptureWindow = true; - const int configDisplayId = 10; - const int configWindowId = 10; - const ScreenCaptureConfiguration config = ScreenCaptureConfiguration( - isCaptureWindow: configIsCaptureWindow, - displayId: configDisplayId, - screenRect: configScreenRect, - windowId: configWindowId, - params: configParams, - regionRect: configRegionRect, - ); - await rtcEngine.startSecondaryScreenCapture( - config, + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const VideoSourceType sourceType = + VideoSourceType.videoSourceCameraPrimary; + await rtcEngine.stopCameraCapture( + sourceType, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[startSecondaryScreenCapture] error: ${e.toString()}'); + debugPrint('[stopCameraCapture] error: ${e.toString()}'); rethrow; } @@ -8324,12 +7256,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'stopPrimaryScreenCapture', + 'setCameraDeviceOrientation', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -8340,10 +7268,15 @@ void rtcEngineSmokeTestCases() { )); try { - await rtcEngine.stopPrimaryScreenCapture(); + const VideoSourceType type = VideoSourceType.videoSourceCameraPrimary; + const VideoOrientation orientation = VideoOrientation.videoOrientation0; + await rtcEngine.setCameraDeviceOrientation( + type: type, + orientation: orientation, + ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[stopPrimaryScreenCapture] error: ${e.toString()}'); + debugPrint('[setCameraDeviceOrientation] error: ${e.toString()}'); rethrow; } @@ -8358,12 +7291,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'stopSecondaryScreenCapture', + 'setScreenCaptureOrientation', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -8374,10 +7303,15 @@ void rtcEngineSmokeTestCases() { )); try { - await rtcEngine.stopSecondaryScreenCapture(); + const VideoSourceType type = VideoSourceType.videoSourceCameraPrimary; + const VideoOrientation orientation = VideoOrientation.videoOrientation0; + await rtcEngine.setScreenCaptureOrientation( + type: type, + orientation: orientation, + ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[stopSecondaryScreenCapture] error: ${e.toString()}'); + debugPrint('[setScreenCaptureOrientation] error: ${e.toString()}'); rethrow; } @@ -8394,10 +7328,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getConnectionState', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -8428,10 +7358,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'registerEventHandler', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -8503,7 +7429,6 @@ void rtcEngineSmokeTestCases() { (RtcConnection connection, int remoteUid, int state) {}, onUserEnableLocalVideo: (RtcConnection connection, int remoteUid, bool enabled) {}, - onApiCallExecuted: (ErrorCodeType err, String api, String result) {}, onLocalAudioStats: (RtcConnection connection, LocalAudioStats stats) {}, onRemoteAudioStats: @@ -8516,7 +7441,7 @@ void rtcEngineSmokeTestCases() { onCameraFocusAreaChanged: (int x, int y, int width, int height) {}, onCameraExposureAreaChanged: (int x, int y, int width, int height) {}, onFacePositionChanged: (int imageWidth, int imageHeight, - Rectangle vecRectangle, int vecDistance, int numFaces) {}, + List vecRectangle, List vecDistance, int numFaces) {}, onVideoStopped: () {}, onAudioMixingStateChanged: (AudioMixingStateType state, AudioMixingReasonType reason) {}, @@ -8617,6 +7542,12 @@ void rtcEngineSmokeTestCases() { (String provider, String extension, int error, String message) {}, onUserAccountUpdated: (RtcConnection connection, int remoteUid, String userAccount) {}, + onLocalVideoTranscoderError: + (TranscodingVideoStream stream, VideoTranscoderError error) {}, + onVideoRenderingTracingResult: (RtcConnection connection, + int uid, + MediaTraceEvent currentEvent, + VideoRenderingTracingInfo tracingInfo) {}, ); rtcEngine.registerEventHandler( eventHandler, @@ -8640,10 +7571,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'unregisterEventHandler', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -8715,7 +7642,6 @@ void rtcEngineSmokeTestCases() { (RtcConnection connection, int remoteUid, int state) {}, onUserEnableLocalVideo: (RtcConnection connection, int remoteUid, bool enabled) {}, - onApiCallExecuted: (ErrorCodeType err, String api, String result) {}, onLocalAudioStats: (RtcConnection connection, LocalAudioStats stats) {}, onRemoteAudioStats: @@ -8728,7 +7654,7 @@ void rtcEngineSmokeTestCases() { onCameraFocusAreaChanged: (int x, int y, int width, int height) {}, onCameraExposureAreaChanged: (int x, int y, int width, int height) {}, onFacePositionChanged: (int imageWidth, int imageHeight, - Rectangle vecRectangle, int vecDistance, int numFaces) {}, + List vecRectangle, List vecDistance, int numFaces) {}, onVideoStopped: () {}, onAudioMixingStateChanged: (AudioMixingStateType state, AudioMixingReasonType reason) {}, @@ -8829,6 +7755,12 @@ void rtcEngineSmokeTestCases() { (String provider, String extension, int error, String message) {}, onUserAccountUpdated: (RtcConnection connection, int remoteUid, String userAccount) {}, + onLocalVideoTranscoderError: + (TranscodingVideoStream stream, VideoTranscoderError error) {}, + onVideoRenderingTracingResult: (RtcConnection connection, + int uid, + MediaTraceEvent currentEvent, + VideoRenderingTracingInfo tracingInfo) {}, ); rtcEngine.unregisterEventHandler( eventHandler, @@ -8852,10 +7784,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setRemoteUserPriority', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -8891,10 +7819,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setEncryptionMode', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -8928,10 +7852,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setEncryptionSecret', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -8965,10 +7885,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableEncryption', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9011,10 +7927,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'sendStreamMessage', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9052,10 +7964,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'addVideoWatermark', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9128,10 +8036,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'clearVideoWatermarks', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9162,10 +8066,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'pauseAudio', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9196,10 +8096,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'resumeAudio', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9230,10 +8126,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableWebSdkInteroperability', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9267,10 +8159,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'sendCustomReportMessage', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9312,10 +8200,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'registerMediaMetadataObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9353,10 +8237,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'unregisterMediaMetadataObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9395,10 +8275,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startAudioFrameDump', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9444,10 +8320,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopAudioFrameDump', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9483,12 +8355,43 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'registerLocalUserAccount', + 'setAINSMode', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const bool enabled = true; + const AudioAinsMode mode = AudioAinsMode.ainsModeBalanced; + await rtcEngine.setAINSMode( + enabled: enabled, + mode: mode, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[setAINSMode] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( + 'registerLocalUserAccount', + (WidgetTester tester) async { String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9524,10 +8427,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'joinChannelWithUserAccount', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9557,15 +8456,12 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioSourceId = 10; - const bool optionsPublishCustomAudioTrackEnableAec = true; - const bool optionsPublishDirectCustomAudioTrack = true; - const bool optionsPublishCustomAudioTrackAec = true; + const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomVideoTrack = true; const bool optionsPublishEncodedVideoTrack = true; const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishTranscodedVideoTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -9587,16 +8483,12 @@ void rtcEngineSmokeTestCases() { publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, - publishCustomAudioSourceId: optionsPublishCustomAudioSourceId, - publishCustomAudioTrackEnableAec: - optionsPublishCustomAudioTrackEnableAec, - publishDirectCustomAudioTrack: optionsPublishDirectCustomAudioTrack, - publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, + publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomVideoTrack: optionsPublishCustomVideoTrack, publishEncodedVideoTrack: optionsPublishEncodedVideoTrack, publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, - publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -9639,10 +8531,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'joinChannelWithUserAccountEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9672,15 +8560,12 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioSourceId = 10; - const bool optionsPublishCustomAudioTrackEnableAec = true; - const bool optionsPublishDirectCustomAudioTrack = true; - const bool optionsPublishCustomAudioTrackAec = true; + const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomVideoTrack = true; const bool optionsPublishEncodedVideoTrack = true; const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishTranscodedVideoTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -9702,16 +8587,12 @@ void rtcEngineSmokeTestCases() { publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, - publishCustomAudioSourceId: optionsPublishCustomAudioSourceId, - publishCustomAudioTrackEnableAec: - optionsPublishCustomAudioTrackEnableAec, - publishDirectCustomAudioTrack: optionsPublishDirectCustomAudioTrack, - publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, + publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomVideoTrack: optionsPublishCustomVideoTrack, publishEncodedVideoTrack: optionsPublishEncodedVideoTrack, publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, - publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -9754,10 +8635,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getUserInfoByUserAccount', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9791,10 +8668,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getUserInfoByUid', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9828,10 +8701,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopChannelMediaRelay', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9862,10 +8731,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'pauseAllChannelMediaRelay', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9896,10 +8761,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'resumeAllChannelMediaRelay', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9930,10 +8791,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setDirectCdnStreamingAudioConfiguration', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -9968,10 +8825,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setDirectCdnStreamingVideoConfiguration', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10040,10 +8893,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startDirectCdnStreaming', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10102,10 +8951,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopDirectCdnStreaming', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10136,10 +8981,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'updateDirectCdnStreamingMediaOptions', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10190,10 +9031,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'startRhythmPlayer', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10236,10 +9073,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'stopRhythmPlayer', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10270,10 +9103,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'configRhythmPlayer', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10312,10 +9141,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'takeSnapshot', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10351,10 +9176,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableContentInspect', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10397,10 +9218,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'adjustCustomAudioPublishVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10411,10 +9228,10 @@ void rtcEngineSmokeTestCases() { )); try { - const int sourceId = 10; + const int trackId = 10; const int volume = 10; await rtcEngine.adjustCustomAudioPublishVolume( - sourceId: sourceId, + trackId: trackId, volume: volume, ); } catch (e) { @@ -10436,10 +9253,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'adjustCustomAudioPlayoutVolume', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10450,10 +9263,10 @@ void rtcEngineSmokeTestCases() { )); try { - const int sourceId = 10; + const int trackId = 10; const int volume = 10; await rtcEngine.adjustCustomAudioPlayoutVolume( - sourceId: sourceId, + trackId: trackId, volume: volume, ); } catch (e) { @@ -10475,10 +9288,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setCloudProxy', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10512,10 +9321,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setAdvancedAudioOptions', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10554,10 +9359,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setAVSyncSource', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10593,10 +9394,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableVideoImageSource', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10640,10 +9437,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getCurrentMonotonicTimeInMs', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10674,10 +9467,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'enableWirelessAccelerate', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10711,10 +9500,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getNetworkType', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10743,12 +9528,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'getAudioDeviceManager', + 'setParameters', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10759,10 +9540,13 @@ void rtcEngineSmokeTestCases() { )); try { - rtcEngine.getAudioDeviceManager(); + const String parameters = "hello"; + await rtcEngine.setParameters( + parameters, + ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[getAudioDeviceManager] error: ${e.toString()}'); + debugPrint('[setParameters] error: ${e.toString()}'); rethrow; } @@ -10777,12 +9561,8 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'getVideoDeviceManager', + 'startMediaRenderingTracing', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10793,10 +9573,10 @@ void rtcEngineSmokeTestCases() { )); try { - rtcEngine.getVideoDeviceManager(); + await rtcEngine.startMediaRenderingTracing(); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[getVideoDeviceManager] error: ${e.toString()}'); + debugPrint('[startMediaRenderingTracing] error: ${e.toString()}'); rethrow; } @@ -10811,12 +9591,38 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'getMusicContentCenter', + 'enableInstantMediaRendering', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + await rtcEngine.enableInstantMediaRendering(); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[enableInstantMediaRendering] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( + 'getNtpWallTimeInMs', + (WidgetTester tester) async { String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10827,10 +9633,10 @@ void rtcEngineSmokeTestCases() { )); try { - rtcEngine.getMusicContentCenter(); + await rtcEngine.getNtpWallTimeInMs(); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[getMusicContentCenter] error: ${e.toString()}'); + debugPrint('[getNtpWallTimeInMs] error: ${e.toString()}'); rethrow; } @@ -10845,12 +9651,38 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'getMediaEngine', + 'getAudioDeviceManager', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + try { + rtcEngine.getAudioDeviceManager(); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[getAudioDeviceManager] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'getVideoDeviceManager', + (WidgetTester tester) async { String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10861,10 +9693,10 @@ void rtcEngineSmokeTestCases() { )); try { - rtcEngine.getMediaEngine(); + rtcEngine.getVideoDeviceManager(); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[getMediaEngine] error: ${e.toString()}'); + debugPrint('[getVideoDeviceManager] error: ${e.toString()}'); rethrow; } @@ -10879,12 +9711,38 @@ void rtcEngineSmokeTestCases() { ); testWidgets( - 'getMediaRecorder', + 'getMusicContentCenter', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + rtcEngine.getMusicContentCenter(); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[getMusicContentCenter] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + await rtcEngine.release(); + }, + ); + + testWidgets( + 'getMediaEngine', + (WidgetTester tester) async { String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10895,10 +9753,10 @@ void rtcEngineSmokeTestCases() { )); try { - rtcEngine.getMediaRecorder(); + rtcEngine.getMediaEngine(); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[getMediaRecorder] error: ${e.toString()}'); + debugPrint('[getMediaEngine] error: ${e.toString()}'); rethrow; } @@ -10915,10 +9773,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'getLocalSpatialAudioEngine', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10949,10 +9803,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'sendMetaData', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -10998,10 +9848,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'setMaxMetadataSize', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -11035,10 +9881,6 @@ void rtcEngineSmokeTestCases() { testWidgets( 'unregisterAudioEncodedFrameObserver', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -11077,50 +9919,9 @@ void rtcEngineSmokeTestCases() { }, ); - testWidgets( - 'setParameters', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const String parameters = "hello"; - await rtcEngine.setParameters( - parameters, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[setParameters] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - testWidgets( 'getNativeHandle', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart index 147abfa14..2c94a0225 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_metadataobserver_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onMetadataReceived', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart index fffa151ca..8fe926ca8 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart @@ -10,14 +10,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { testWidgets( 'onJoinChannelSuccess', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -77,10 +73,6 @@ void generatedTestCases() { testWidgets( 'onRejoinChannelSuccess', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -140,10 +132,6 @@ void generatedTestCases() { testWidgets( 'onProxyConnected', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -205,10 +193,6 @@ void generatedTestCases() { testWidgets( 'onError', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -263,10 +247,6 @@ void generatedTestCases() { testWidgets( 'onAudioQuality', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -333,10 +313,6 @@ void generatedTestCases() { testWidgets( 'onLastmileProbeResult', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -415,10 +391,6 @@ void generatedTestCases() { testWidgets( 'onAudioVolumeIndication', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -483,10 +455,6 @@ void generatedTestCases() { testWidgets( 'onLeaveChannel', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -620,10 +588,6 @@ void generatedTestCases() { testWidgets( 'onRtcStats', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -757,10 +721,6 @@ void generatedTestCases() { testWidgets( 'onAudioDeviceStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -820,10 +780,6 @@ void generatedTestCases() { testWidgets( 'onAudioMixingPositionChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -878,10 +834,6 @@ void generatedTestCases() { testWidgets( 'onAudioMixingFinished', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -930,10 +882,6 @@ void generatedTestCases() { testWidgets( 'onAudioEffectFinished', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -986,10 +934,6 @@ void generatedTestCases() { testWidgets( 'onVideoDeviceStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1049,10 +993,6 @@ void generatedTestCases() { testWidgets( 'onNetworkQuality', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1117,10 +1057,6 @@ void generatedTestCases() { testWidgets( 'onIntraRequestReceived', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1178,10 +1114,6 @@ void generatedTestCases() { testWidgets( 'onUplinkNetworkInfoUpdated', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1238,10 +1170,6 @@ void generatedTestCases() { testWidgets( 'onDownlinkNetworkInfoUpdated', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1307,10 +1235,6 @@ void generatedTestCases() { testWidgets( 'onLastmileQuality', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1363,10 +1287,6 @@ void generatedTestCases() { testWidgets( 'onFirstLocalVideoFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1426,10 +1346,6 @@ void generatedTestCases() { testWidgets( 'onFirstLocalVideoFramePublished', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1492,10 +1408,6 @@ void generatedTestCases() { testWidgets( 'onFirstRemoteVideoDecoded', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1563,10 +1475,6 @@ void generatedTestCases() { testWidgets( 'onVideoSizeChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1640,10 +1548,6 @@ void generatedTestCases() { testWidgets( 'onLocalVideoStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1703,10 +1607,6 @@ void generatedTestCases() { testWidgets( 'onRemoteVideoStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1778,10 +1678,6 @@ void generatedTestCases() { testWidgets( 'onFirstRemoteVideoFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1848,10 +1744,6 @@ void generatedTestCases() { testWidgets( 'onUserJoined', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1913,10 +1805,6 @@ void generatedTestCases() { testWidgets( 'onUserOffline', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -1980,10 +1868,6 @@ void generatedTestCases() { testWidgets( 'onUserMuteAudio', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2045,10 +1929,6 @@ void generatedTestCases() { testWidgets( 'onUserMuteVideo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2110,10 +1990,6 @@ void generatedTestCases() { testWidgets( 'onUserEnableVideo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2176,10 +2052,6 @@ void generatedTestCases() { testWidgets( 'onUserStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2242,10 +2114,6 @@ void generatedTestCases() { testWidgets( 'onUserEnableLocalVideo', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2305,73 +2173,9 @@ void generatedTestCases() { timeout: const Timeout(Duration(minutes: 1)), ); - testWidgets( - 'onApiCallExecuted', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: 'app_id', - areaCode: AreaCode.areaCodeGlob.value(), - )); - - final onApiCallExecutedCompleter = Completer(); - final theRtcEngineEventHandler = RtcEngineEventHandler( - onApiCallExecuted: (ErrorCodeType err, String api, String result) { - onApiCallExecutedCompleter.complete(true); - }, - ); - - rtcEngine.registerEventHandler( - theRtcEngineEventHandler, - ); - -// Delay 500 milliseconds to ensure the registerEventHandler call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - { - const ErrorCodeType err = ErrorCodeType.errOk; - const String api = "hello"; - const String result = "hello"; - - final eventJson = { - 'err': err.value(), - 'api': api, - 'result': result, - }; - - irisTester.fireEvent('RtcEngineEventHandler_onApiCallExecuted', - params: eventJson); - irisTester.fireEvent('RtcEngineEventHandlerEx_onApiCallExecuted', - params: eventJson); - } - - final eventCalled = await onApiCallExecutedCompleter.future; - expect(eventCalled, isTrue); - - { - rtcEngine.unregisterEventHandler( - theRtcEngineEventHandler, - ); - } -// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - await rtcEngine.release(); - }, - timeout: const Timeout(Duration(minutes: 1)), - ); - testWidgets( 'onLocalAudioStats', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2444,10 +2248,6 @@ void generatedTestCases() { testWidgets( 'onRemoteAudioStats', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2486,10 +2286,13 @@ void generatedTestCases() { const int statsTotalFrozenTime = 10; const int statsFrozenRate = 10; const int statsMosValue = 10; + const int statsFrozenRateByCustomPlcCount = 10; + const int statsPlcCount = 10; const int statsTotalActiveTime = 10; const int statsPublishDuration = 10; const int statsQoeQuality = 10; const int statsQualityChangedReason = 10; + const int statsRxAudioBytes = 10; const RemoteAudioStats stats = RemoteAudioStats( uid: statsUid, quality: statsQuality, @@ -2502,10 +2305,13 @@ void generatedTestCases() { totalFrozenTime: statsTotalFrozenTime, frozenRate: statsFrozenRate, mosValue: statsMosValue, + frozenRateByCustomPlcCount: statsFrozenRateByCustomPlcCount, + plcCount: statsPlcCount, totalActiveTime: statsTotalActiveTime, publishDuration: statsPublishDuration, qoeQuality: statsQoeQuality, qualityChangedReason: statsQualityChangedReason, + rxAudioBytes: statsRxAudioBytes, ); final eventJson = { @@ -2538,10 +2344,6 @@ void generatedTestCases() { testWidgets( 'onLocalVideoStats', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2650,10 +2452,6 @@ void generatedTestCases() { testWidgets( 'onRemoteVideoStats', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2685,6 +2483,7 @@ void generatedTestCases() { VideoStreamType.videoStreamHigh; const int statsUid = 10; const int statsDelay = 10; + const int statsE2eDelay = 10; const int statsWidth = 10; const int statsHeight = 10; const int statsReceivedBitrate = 10; @@ -2697,11 +2496,12 @@ void generatedTestCases() { const int statsAvSyncTimeMs = 10; const int statsTotalActiveTime = 10; const int statsPublishDuration = 10; - const int statsSuperResolutionType = 10; const int statsMosValue = 10; + const int statsRxVideoBytes = 10; const RemoteVideoStats stats = RemoteVideoStats( uid: statsUid, delay: statsDelay, + e2eDelay: statsE2eDelay, width: statsWidth, height: statsHeight, receivedBitrate: statsReceivedBitrate, @@ -2715,8 +2515,8 @@ void generatedTestCases() { avSyncTimeMs: statsAvSyncTimeMs, totalActiveTime: statsTotalActiveTime, publishDuration: statsPublishDuration, - superResolutionType: statsSuperResolutionType, mosValue: statsMosValue, + rxVideoBytes: statsRxVideoBytes, ); final eventJson = { @@ -2749,10 +2549,6 @@ void generatedTestCases() { testWidgets( 'onCameraReady', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2801,10 +2597,6 @@ void generatedTestCases() { testWidgets( 'onCameraFocusAreaChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2863,10 +2655,6 @@ void generatedTestCases() { testWidgets( 'onCameraExposureAreaChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -2924,89 +2712,9 @@ void generatedTestCases() { timeout: const Timeout(Duration(minutes: 1)), ); - testWidgets( - 'onFacePositionChanged', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: 'app_id', - areaCode: AreaCode.areaCodeGlob.value(), - )); - - final onFacePositionChangedCompleter = Completer(); - final theRtcEngineEventHandler = RtcEngineEventHandler( - onFacePositionChanged: (int imageWidth, int imageHeight, - Rectangle vecRectangle, int vecDistance, int numFaces) { - onFacePositionChangedCompleter.complete(true); - }, - ); - - rtcEngine.registerEventHandler( - theRtcEngineEventHandler, - ); - -// Delay 500 milliseconds to ensure the registerEventHandler call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - { - const int imageWidth = 10; - const int imageHeight = 10; - const int vecRectangleX = 10; - const int vecRectangleY = 10; - const int vecRectangleWidth = 10; - const int vecRectangleHeight = 10; - const Rectangle vecRectangle = Rectangle( - x: vecRectangleX, - y: vecRectangleY, - width: vecRectangleWidth, - height: vecRectangleHeight, - ); - const int vecDistance = 10; - const int numFaces = 10; - - final eventJson = { - 'imageWidth': imageWidth, - 'imageHeight': imageHeight, - 'vecRectangle': vecRectangle.toJson(), - 'vecDistance': vecDistance, - 'numFaces': numFaces, - }; - - irisTester.fireEvent('RtcEngineEventHandler_onFacePositionChanged', - params: eventJson); - irisTester.fireEvent('RtcEngineEventHandlerEx_onFacePositionChanged', - params: eventJson); - } - - final eventCalled = await onFacePositionChangedCompleter.future; - expect(eventCalled, isTrue); - - { - rtcEngine.unregisterEventHandler( - theRtcEngineEventHandler, - ); - } -// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - await rtcEngine.release(); - }, - timeout: const Timeout(Duration(minutes: 1)), - // TODO(littlegnal): Enable after the API signature fixed. - skip: true, - ); - testWidgets( 'onVideoStopped', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3055,10 +2763,6 @@ void generatedTestCases() { testWidgets( 'onAudioMixingStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3117,10 +2821,6 @@ void generatedTestCases() { testWidgets( 'onRhythmPlayerStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3179,10 +2879,6 @@ void generatedTestCases() { testWidgets( 'onConnectionLost', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3240,10 +2936,6 @@ void generatedTestCases() { testWidgets( 'onConnectionInterrupted', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3301,10 +2993,6 @@ void generatedTestCases() { testWidgets( 'onConnectionBanned', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3362,10 +3050,6 @@ void generatedTestCases() { testWidgets( 'onStreamMessage', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3434,10 +3118,6 @@ void generatedTestCases() { testWidgets( 'onStreamMessageError', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3506,10 +3186,6 @@ void generatedTestCases() { testWidgets( 'onRequestToken', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3567,10 +3243,6 @@ void generatedTestCases() { testWidgets( 'onTokenPrivilegeWillExpire', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3631,10 +3303,6 @@ void generatedTestCases() { testWidgets( 'onLicenseValidationFailure', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3696,10 +3364,6 @@ void generatedTestCases() { testWidgets( 'onFirstLocalAudioFramePublished', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3762,10 +3426,6 @@ void generatedTestCases() { testWidgets( 'onFirstRemoteAudioFrame', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3823,17 +3483,11 @@ void generatedTestCases() { await rtcEngine.release(); }, timeout: const Timeout(Duration(minutes: 1)), - // TODO(littlegnal): Enable after iris fixed. - skip: true, ); testWidgets( 'onFirstRemoteAudioDecoded', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3897,10 +3551,6 @@ void generatedTestCases() { testWidgets( 'onLocalAudioStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -3965,10 +3615,6 @@ void generatedTestCases() { testWidgets( 'onRemoteAudioStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4040,10 +3686,6 @@ void generatedTestCases() { testWidgets( 'onActiveSpeaker', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4103,10 +3745,6 @@ void generatedTestCases() { testWidgets( 'onContentInspectResult', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4160,10 +3798,6 @@ void generatedTestCases() { testWidgets( 'onSnapshotTaken', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4232,10 +3866,6 @@ void generatedTestCases() { testWidgets( 'onClientRoleChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4304,10 +3934,6 @@ void generatedTestCases() { testWidgets( 'onClientRoleChangeFailed', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4371,10 +3997,6 @@ void generatedTestCases() { testWidgets( 'onAudioDeviceVolumeChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4433,10 +4055,6 @@ void generatedTestCases() { testWidgets( 'onRtmpStreamingStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4498,10 +4116,6 @@ void generatedTestCases() { testWidgets( 'onRtmpStreamingEvent', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4557,10 +4171,6 @@ void generatedTestCases() { testWidgets( 'onTranscodingUpdated', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4609,10 +4219,6 @@ void generatedTestCases() { testWidgets( 'onAudioRoutingChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4665,10 +4271,6 @@ void generatedTestCases() { testWidgets( 'onChannelMediaRelayStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4727,10 +4329,6 @@ void generatedTestCases() { testWidgets( 'onChannelMediaRelayEvent', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4784,10 +4382,6 @@ void generatedTestCases() { testWidgets( 'onLocalPublishFallbackToAudioOnly', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4843,10 +4437,6 @@ void generatedTestCases() { testWidgets( 'onRemoteSubscribeFallbackToAudioOnly', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4905,10 +4495,6 @@ void generatedTestCases() { testWidgets( 'onRemoteAudioTransportStats', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -4977,10 +4563,6 @@ void generatedTestCases() { testWidgets( 'onRemoteVideoTransportStats', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5049,10 +4631,6 @@ void generatedTestCases() { testWidgets( 'onConnectionStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5117,10 +4695,6 @@ void generatedTestCases() { testWidgets( 'onWlAccMessage', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5187,10 +4761,6 @@ void generatedTestCases() { testWidgets( 'onWlAccStats', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5267,10 +4837,6 @@ void generatedTestCases() { testWidgets( 'onNetworkTypeChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5330,10 +4896,6 @@ void generatedTestCases() { testWidgets( 'onEncryptionError', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5395,10 +4957,6 @@ void generatedTestCases() { testWidgets( 'onPermissionError', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5451,10 +5009,6 @@ void generatedTestCases() { testWidgets( 'onLocalUserRegistered', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5509,10 +5063,6 @@ void generatedTestCases() { testWidgets( 'onUserInfoUpdated', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5572,10 +5122,6 @@ void generatedTestCases() { testWidgets( 'onUploadLogResult', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5640,10 +5186,6 @@ void generatedTestCases() { testWidgets( 'onAudioSubscribeStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5710,10 +5252,6 @@ void generatedTestCases() { testWidgets( 'onVideoSubscribeStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5780,10 +5318,6 @@ void generatedTestCases() { testWidgets( 'onAudioPublishStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5846,10 +5380,6 @@ void generatedTestCases() { testWidgets( 'onVideoPublishStateChanged', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5915,10 +5445,6 @@ void generatedTestCases() { testWidgets( 'onExtensionEvent', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -5978,10 +5504,6 @@ void generatedTestCases() { testWidgets( 'onExtensionStarted', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -6036,10 +5558,6 @@ void generatedTestCases() { testWidgets( 'onExtensionStopped', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -6094,10 +5612,6 @@ void generatedTestCases() { testWidgets( 'onExtensionError', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -6157,10 +5671,6 @@ void generatedTestCases() { testWidgets( 'onUserAccountUpdated', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -6219,5 +5729,170 @@ void generatedTestCases() { }, timeout: const Timeout(Duration(minutes: 1)), ); + + testWidgets( + 'onLocalVideoTranscoderError', + (WidgetTester tester) async { + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: 'app_id', + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final onLocalVideoTranscoderErrorCompleter = Completer(); + final theRtcEngineEventHandler = RtcEngineEventHandler( + onLocalVideoTranscoderError: + (TranscodingVideoStream stream, VideoTranscoderError error) { + onLocalVideoTranscoderErrorCompleter.complete(true); + }, + ); + + rtcEngine.registerEventHandler( + theRtcEngineEventHandler, + ); + +// Delay 500 milliseconds to ensure the registerEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + { + const VideoSourceType streamSourceType = + VideoSourceType.videoSourceCameraPrimary; + const int streamRemoteUserUid = 10; + const String streamImageUrl = "hello"; + const int streamMediaPlayerId = 10; + const int streamX = 10; + const int streamY = 10; + const int streamWidth = 10; + const int streamHeight = 10; + const int streamZOrder = 10; + const double streamAlpha = 10.0; + const bool streamMirror = true; + const TranscodingVideoStream stream = TranscodingVideoStream( + sourceType: streamSourceType, + remoteUserUid: streamRemoteUserUid, + imageUrl: streamImageUrl, + mediaPlayerId: streamMediaPlayerId, + x: streamX, + y: streamY, + width: streamWidth, + height: streamHeight, + zOrder: streamZOrder, + alpha: streamAlpha, + mirror: streamMirror, + ); + const VideoTranscoderError error = VideoTranscoderError.vtErrOk; + + final eventJson = { + 'stream': stream.toJson(), + 'error': error.value(), + }; + + irisTester.fireEvent( + 'RtcEngineEventHandler_onLocalVideoTranscoderError', + params: eventJson); + irisTester.fireEvent( + 'RtcEngineEventHandlerEx_onLocalVideoTranscoderError', + params: eventJson); + } + + final eventCalled = await onLocalVideoTranscoderErrorCompleter.future; + expect(eventCalled, isTrue); + + { + rtcEngine.unregisterEventHandler( + theRtcEngineEventHandler, + ); + } +// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + await rtcEngine.release(); + }, + timeout: const Timeout(Duration(minutes: 1)), + ); + + testWidgets( + 'onVideoRenderingTracingResult', + (WidgetTester tester) async { + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: 'app_id', + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final onVideoRenderingTracingResultCompleter = Completer(); + final theRtcEngineEventHandler = RtcEngineEventHandler( + onVideoRenderingTracingResult: (RtcConnection connection, + int uid, + MediaTraceEvent currentEvent, + VideoRenderingTracingInfo tracingInfo) { + onVideoRenderingTracingResultCompleter.complete(true); + }, + ); + + rtcEngine.registerEventHandler( + theRtcEngineEventHandler, + ); + +// Delay 500 milliseconds to ensure the registerEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + { + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + const int uid = 10; + const MediaTraceEvent currentEvent = + MediaTraceEvent.mediaTraceEventVideoRendered; + const int tracingInfoElapsedTime = 10; + const int tracingInfoStart2JoinChannel = 10; + const int tracingInfoJoin2JoinSuccess = 10; + const int tracingInfoJoinSuccess2RemoteJoined = 10; + const int tracingInfoRemoteJoined2SetView = 10; + const int tracingInfoRemoteJoined2UnmuteVideo = 10; + const int tracingInfoRemoteJoined2PacketReceived = 10; + const VideoRenderingTracingInfo tracingInfo = VideoRenderingTracingInfo( + elapsedTime: tracingInfoElapsedTime, + start2JoinChannel: tracingInfoStart2JoinChannel, + join2JoinSuccess: tracingInfoJoin2JoinSuccess, + joinSuccess2RemoteJoined: tracingInfoJoinSuccess2RemoteJoined, + remoteJoined2SetView: tracingInfoRemoteJoined2SetView, + remoteJoined2UnmuteVideo: tracingInfoRemoteJoined2UnmuteVideo, + remoteJoined2PacketReceived: tracingInfoRemoteJoined2PacketReceived, + ); + + final eventJson = { + 'connection': connection.toJson(), + 'uid': uid, + 'currentEvent': currentEvent.value(), + 'tracingInfo': tracingInfo.toJson(), + }; + + irisTester.fireEvent( + 'RtcEngineEventHandler_onVideoRenderingTracingResult', + params: eventJson); + irisTester.fireEvent( + 'RtcEngineEventHandlerEx_onVideoRenderingTracingResult', + params: eventJson); + } + + final eventCalled = await onVideoRenderingTracingResultCompleter.future; + expect(eventCalled, isTrue); + + { + rtcEngine.unregisterEventHandler( + theRtcEngineEventHandler, + ); + } +// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + await rtcEngine.release(); + }, + timeout: const Timeout(Duration(minutes: 1)), + ); } diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart index 80aab0451..0ef99e739 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart @@ -14,10 +14,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'joinChannelEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -51,15 +47,12 @@ void rtcEngineExSmokeTestCases() { const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioSourceId = 10; - const bool optionsPublishCustomAudioTrackEnableAec = true; - const bool optionsPublishDirectCustomAudioTrack = true; - const bool optionsPublishCustomAudioTrackAec = true; + const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomVideoTrack = true; const bool optionsPublishEncodedVideoTrack = true; const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishTranscodedVideoTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -81,16 +74,12 @@ void rtcEngineExSmokeTestCases() { publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, - publishCustomAudioSourceId: optionsPublishCustomAudioSourceId, - publishCustomAudioTrackEnableAec: - optionsPublishCustomAudioTrackEnableAec, - publishDirectCustomAudioTrack: optionsPublishDirectCustomAudioTrack, - publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, + publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomVideoTrack: optionsPublishCustomVideoTrack, publishEncodedVideoTrack: optionsPublishEncodedVideoTrack, publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, - publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -133,10 +122,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'leaveChannelEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -185,10 +170,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'updateChannelMediaOptionsEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -215,15 +196,12 @@ void rtcEngineExSmokeTestCases() { const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; - const int optionsPublishCustomAudioSourceId = 10; - const bool optionsPublishCustomAudioTrackEnableAec = true; - const bool optionsPublishDirectCustomAudioTrack = true; - const bool optionsPublishCustomAudioTrackAec = true; + const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomVideoTrack = true; const bool optionsPublishEncodedVideoTrack = true; const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; - const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishTranscodedVideoTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -245,16 +223,12 @@ void rtcEngineExSmokeTestCases() { publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, - publishCustomAudioSourceId: optionsPublishCustomAudioSourceId, - publishCustomAudioTrackEnableAec: - optionsPublishCustomAudioTrackEnableAec, - publishDirectCustomAudioTrack: optionsPublishDirectCustomAudioTrack, - publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, + publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomVideoTrack: optionsPublishCustomVideoTrack, publishEncodedVideoTrack: optionsPublishEncodedVideoTrack, publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, - publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -302,10 +276,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'setVideoEncoderConfigurationEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -381,10 +351,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'setupRemoteVideoEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -415,6 +381,7 @@ void rtcEngineExSmokeTestCases() { const int canvasView = 10; const int canvasUid = 10; const int canvasMediaPlayerId = 10; + const bool canvasEnableAlphaMask = true; const VideoCanvas canvas = VideoCanvas( view: canvasView, uid: canvasUid, @@ -424,6 +391,7 @@ void rtcEngineExSmokeTestCases() { sourceType: canvasSourceType, mediaPlayerId: canvasMediaPlayerId, cropArea: canvasCropArea, + enableAlphaMask: canvasEnableAlphaMask, ); const String connectionChannelId = "hello"; const int connectionLocalUid = 10; @@ -455,10 +423,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'muteRemoteAudioStreamEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -502,10 +466,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'muteRemoteVideoStreamEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -549,10 +509,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'setRemoteVideoStreamTypeEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -596,10 +552,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'muteLocalAudioStreamEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -641,10 +593,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'muteLocalVideoStreamEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -686,10 +634,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'muteAllRemoteAudioStreamsEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -731,10 +675,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'muteAllRemoteVideoStreamsEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -776,10 +716,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'setRemoteVideoSubscriptionOptionsEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -829,10 +765,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'setRemoteVoicePositionEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -878,10 +810,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'setRemoteUserSpatialAudioParamsEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -943,10 +871,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'setRemoteRenderModeEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -993,10 +917,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'enableLoopbackRecordingEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1038,12 +958,90 @@ void rtcEngineExSmokeTestCases() { ); testWidgets( - 'adjustUserPlaybackSignalVolumeEx', + 'adjustRecordingSignalVolumeEx', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngineEx rtcEngineEx = createAgoraRtcEngineEx(); + await rtcEngineEx.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const int volume = 10; + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + await rtcEngineEx.adjustRecordingSignalVolumeEx( + volume: volume, + connection: connection, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[adjustRecordingSignalVolumeEx] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngineEx.release(); + }, +// skip: !(), + ); + + testWidgets( + 'muteRecordingSignalEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngineEx rtcEngineEx = createAgoraRtcEngineEx(); + await rtcEngineEx.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const bool mute = true; + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + await rtcEngineEx.muteRecordingSignalEx( + mute: mute, + connection: connection, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[muteRecordingSignalEx] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + await rtcEngineEx.release(); + }, +// skip: !(), + ); + + testWidgets( + 'adjustUserPlaybackSignalVolumeEx', + (WidgetTester tester) async { String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1088,10 +1086,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'getConnectionStateEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1131,10 +1125,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'enableEncryptionEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1185,10 +1175,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'sendStreamMessageEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1234,10 +1220,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'addVideoWatermarkEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1318,10 +1300,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'clearVideoWatermarkEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1361,10 +1339,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'sendCustomReportMessageEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1414,10 +1388,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'enableAudioVolumeIndicationEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1463,10 +1433,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'startRtmpStreamWithoutTranscodingEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1509,10 +1475,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'startRtmpStreamWithTranscodingEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1608,10 +1570,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'updateRtmpTranscodingEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1704,10 +1662,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'stopRtmpStreamEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1749,10 +1703,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'stopChannelMediaRelayEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1792,10 +1742,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'pauseAllChannelMediaRelayEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1835,10 +1781,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'resumeAllChannelMediaRelayEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1878,10 +1820,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'getUserInfoByUserAccountEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1923,10 +1861,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'getUserInfoByUidEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -1965,57 +1899,9 @@ void rtcEngineExSmokeTestCases() { // skip: !(), ); - testWidgets( - 'setVideoProfileEx', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngineEx rtcEngineEx = createAgoraRtcEngineEx(); - await rtcEngineEx.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - const int width = 10; - const int height = 10; - const int frameRate = 10; - const int bitrate = 10; - await rtcEngineEx.setVideoProfileEx( - width: width, - height: height, - frameRate: frameRate, - bitrate: bitrate, - ); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[setVideoProfileEx] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngineEx.release(); - }, -// skip: !(), - ); - testWidgets( 'enableDualStreamModeEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2071,10 +1957,6 @@ void rtcEngineExSmokeTestCases() { testWidgets( 'setDualStreamModeEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2129,12 +2011,8 @@ void rtcEngineExSmokeTestCases() { ); testWidgets( - 'enableWirelessAccelerate', + 'takeSnapshotEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2145,13 +2023,22 @@ void rtcEngineExSmokeTestCases() { )); try { - const bool enabled = true; - await rtcEngineEx.enableWirelessAccelerate( - enabled, + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + const int uid = 10; + const String filePath = "hello"; + await rtcEngineEx.takeSnapshotEx( + connection: connection, + uid: uid, + filePath: filePath, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[enableWirelessAccelerate] error: ${e.toString()}'); + debugPrint('[takeSnapshotEx] error: ${e.toString()}'); rethrow; } @@ -2167,12 +2054,8 @@ void rtcEngineExSmokeTestCases() { ); testWidgets( - 'takeSnapshotEx', + 'startMediaRenderingTracingEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -2189,16 +2072,12 @@ void rtcEngineExSmokeTestCases() { channelId: connectionChannelId, localUid: connectionLocalUid, ); - const int uid = 10; - const String filePath = "hello"; - await rtcEngineEx.takeSnapshotEx( - connection: connection, - uid: uid, - filePath: filePath, + await rtcEngineEx.startMediaRenderingTracingEx( + connection, ); } catch (e) { if (e is! AgoraRtcException) { - debugPrint('[takeSnapshotEx] error: ${e.toString()}'); + debugPrint('[startMediaRenderingTracingEx] error: ${e.toString()}'); rethrow; } diff --git a/test_shard/fake_test_app/integration_test/generated/videodevicemanager_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/videodevicemanager_fake_test.generated.dart index 0445e6259..5d7fc38b1 100644 --- a/test_shard/fake_test_app/integration_test/generated/videodevicemanager_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/videodevicemanager_fake_test.generated.dart @@ -13,10 +13,6 @@ void videoDeviceManagerSmokeTestCases() { testWidgets( 'enumerateVideoDevices', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -51,10 +47,6 @@ void videoDeviceManagerSmokeTestCases() { testWidgets( 'setDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -92,10 +84,6 @@ void videoDeviceManagerSmokeTestCases() { testWidgets( 'getDevice', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -130,10 +118,6 @@ void videoDeviceManagerSmokeTestCases() { testWidgets( 'numberOfCapabilities', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -171,10 +155,6 @@ void videoDeviceManagerSmokeTestCases() { testWidgets( 'getCapability', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -214,10 +194,6 @@ void videoDeviceManagerSmokeTestCases() { testWidgets( 'startDeviceTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -255,10 +231,6 @@ void videoDeviceManagerSmokeTestCases() { testWidgets( 'stopDeviceTest', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -293,10 +265,6 @@ void videoDeviceManagerSmokeTestCases() { testWidgets( 'release', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); diff --git a/test_shard/fake_test_app/integration_test/testcases/localspatialaudioengine_testcases.dart b/test_shard/fake_test_app/integration_test/testcases/localspatialaudioengine_testcases.dart index 3c2460d35..4d2a7fb34 100644 --- a/test_shard/fake_test_app/integration_test/testcases/localspatialaudioengine_testcases.dart +++ b/test_shard/fake_test_app/integration_test/testcases/localspatialaudioengine_testcases.dart @@ -16,10 +16,6 @@ void testCases() { testWidgets( 'updateSelfPosition', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -62,10 +58,6 @@ void testCases() { testWidgets( 'updateSelfPositionEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); diff --git a/test_shard/fake_test_app/integration_test/testcases/rtcengine_debug_testcases.dart b/test_shard/fake_test_app/integration_test/testcases/rtcengine_debug_testcases.dart index 584cc4069..e6cdcfe8d 100644 --- a/test_shard/fake_test_app/integration_test/testcases/rtcengine_debug_testcases.dart +++ b/test_shard/fake_test_app/integration_test/testcases/rtcengine_debug_testcases.dart @@ -13,10 +13,6 @@ void testCases() { testWidgets( 'startDumpVideo/stopDumpVideo fake test', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); diff --git a/test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart b/test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart new file mode 100644 index 000000000..c14f6379d --- /dev/null +++ b/test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart @@ -0,0 +1,80 @@ +import 'dart:async'; +import 'dart:io'; +import 'dart:typed_data'; + +import 'package:agora_rtc_engine/agora_rtc_engine.dart'; +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:integration_test/integration_test.dart'; +import 'package:iris_tester/iris_tester.dart'; +import '../generated/rtcengine_rtcengineeventhandler_testcases.generated.dart' + as generated; +import 'package:path/path.dart' as path; +import 'package:iris_method_channel/iris_method_channel.dart'; + +void testCases(IrisTester irisTester) { + generated.generatedTestCases(irisTester); + + testWidgets( + 'onFacePositionChanged', + (WidgetTester tester) async { + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: 'app_id', + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final onFacePositionChangedCompleter = Completer(); + final theRtcEngineEventHandler = RtcEngineEventHandler( + onFacePositionChanged: (int imageWidth, int imageHeight, + List vecRectangle, List vecDistance, int numFaces) { + onFacePositionChangedCompleter.complete(true); + }, + ); + + rtcEngine.registerEventHandler( + theRtcEngineEventHandler, + ); + +// Delay 500 milliseconds to ensure the registerEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + { + const int imageWidth = 10; + const int imageHeight = 10; + const List vecRectangle = []; + const List vecDistance = []; + const int numFaces = 10; + + final eventJson = { + 'imageWidth': imageWidth, + 'imageHeight': imageHeight, + 'vecRectangle': vecRectangle, + 'vecDistance': vecDistance, + 'numFaces': numFaces, + }; + + irisTester.fireEvent('RtcEngineEventHandler_onFacePositionChanged', + params: eventJson); + irisTester.fireEvent('RtcEngineEventHandlerEx_onFacePositionChanged', + params: eventJson); + } + + final eventCalled = await onFacePositionChangedCompleter.future; + expect(eventCalled, isTrue); + + { + rtcEngine.unregisterEventHandler( + theRtcEngineEventHandler, + ); + } +// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + await rtcEngine.release(); + }, + timeout: const Timeout(Duration(minutes: 1)), + skip: !(Platform.isAndroid || Platform.isIOS), + ); +} diff --git a/test_shard/fake_test_app/integration_test/testcases/rtcengine_testcases.dart b/test_shard/fake_test_app/integration_test/testcases/rtcengine_testcases.dart index 7ee38c628..ffb971d6a 100644 --- a/test_shard/fake_test_app/integration_test/testcases/rtcengine_testcases.dart +++ b/test_shard/fake_test_app/integration_test/testcases/rtcengine_testcases.dart @@ -18,10 +18,6 @@ void testCases() { testWidgets( 'startChannelMediaRelay', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -72,10 +68,6 @@ void testCases() { testWidgets( 'updateChannelMediaRelay', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -126,10 +118,6 @@ void testCases() { testWidgets( 'setLocalAccessPoint', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -192,10 +180,6 @@ void testCases() { testWidgets( 'setSubscribeAudioBlocklist', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -231,10 +215,6 @@ void testCases() { testWidgets( 'setSubscribeAudioAllowlist', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -270,10 +250,6 @@ void testCases() { testWidgets( 'setSubscribeVideoBlocklist', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -309,10 +285,6 @@ void testCases() { testWidgets( 'setSubscribeVideoAllowlist', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -344,4 +316,146 @@ void testCases() { await rtcEngine.release(); }, ); + + testWidgets( + 'createMediaRecorder', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final mediaRecorder = await rtcEngine + .createMediaRecorder(RecorderStreamInfo(channelId: 'hello', uid: 0)); + + await rtcEngine.destroyMediaRecorder(mediaRecorder!); + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'queryCodecCapability', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const int size = 0; + await rtcEngine.queryCodecCapability( + size, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[queryCodecCapability] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'setHighPriorityUserList', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const List uidList = [1]; + const int uidNum = 1; + const StreamFallbackOptions option = + StreamFallbackOptions.streamFallbackOptionDisabled; + await rtcEngine.setHighPriorityUserList( + uidList: uidList, + uidNum: uidNum, + option: option, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[setHighPriorityUserList] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'startOrUpdateChannelMediaRelay', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const String srcInfoChannelName = "hello"; + const String srcInfoToken = "hello"; + const int srcInfoUid = 10; + const ChannelMediaInfo configurationSrcInfo = ChannelMediaInfo( + channelName: srcInfoChannelName, + token: srcInfoToken, + uid: srcInfoUid, + ); + const List configurationDestInfos = [ + configurationSrcInfo + ]; + const int configurationDestCount = 1; + const ChannelMediaRelayConfiguration configuration = + ChannelMediaRelayConfiguration( + srcInfo: configurationSrcInfo, + destInfos: configurationDestInfos, + destCount: configurationDestCount, + ); + await rtcEngine.startOrUpdateChannelMediaRelay( + configuration, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[startOrUpdateChannelMediaRelay] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); } diff --git a/test_shard/fake_test_app/integration_test/testcases/rtcengineex_testcases.dart b/test_shard/fake_test_app/integration_test/testcases/rtcengineex_testcases.dart index 53cc3f013..1ab733957 100644 --- a/test_shard/fake_test_app/integration_test/testcases/rtcengineex_testcases.dart +++ b/test_shard/fake_test_app/integration_test/testcases/rtcengineex_testcases.dart @@ -15,10 +15,6 @@ void testCases() { testWidgets( 'setSubscribeAudioBlocklistEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -62,10 +58,6 @@ void testCases() { testWidgets( 'setSubscribeAudioAllowlistEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -109,10 +101,6 @@ void testCases() { testWidgets( 'setSubscribeVideoBlocklistEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -156,10 +144,6 @@ void testCases() { testWidgets( 'setSubscribeVideoAllowlistEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -203,10 +187,6 @@ void testCases() { testWidgets( 'startChannelMediaRelayEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -265,10 +245,6 @@ void testCases() { testWidgets( 'updateChannelMediaRelayEx', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -323,4 +299,109 @@ void testCases() { }, // skip: !(), ); + + testWidgets( + 'startOrUpdateChannelMediaRelayEx', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngineEx rtcEngineEx = createAgoraRtcEngineEx(); + await rtcEngineEx.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const String srcInfoChannelName = "hello"; + const String srcInfoToken = "hello"; + const int srcInfoUid = 10; + const ChannelMediaInfo configurationSrcInfo = ChannelMediaInfo( + channelName: srcInfoChannelName, + token: srcInfoToken, + uid: srcInfoUid, + ); + const List configurationDestInfos = [ + configurationSrcInfo + ]; + const int configurationDestCount = 1; + const ChannelMediaRelayConfiguration configuration = + ChannelMediaRelayConfiguration( + srcInfo: configurationSrcInfo, + destInfos: configurationDestInfos, + destCount: configurationDestCount, + ); + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + await rtcEngineEx.startOrUpdateChannelMediaRelayEx( + configuration: configuration, + connection: connection, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[startOrUpdateChannelMediaRelayEx] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngineEx.release(); + }, +// skip: !(), + ); + + testWidgets( + 'setHighPriorityUserListEx', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngineEx rtcEngineEx = createAgoraRtcEngineEx(); + await rtcEngineEx.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const List uidList = [1]; + const int uidNum = 1; + const StreamFallbackOptions option = + StreamFallbackOptions.streamFallbackOptionDisabled; + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + await rtcEngineEx.setHighPriorityUserListEx( + uidList: uidList, + uidNum: uidNum, + option: option, + connection: connection, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[setHighPriorityUserListEx] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngineEx.release(); + }, +// skip: !(), + ); } diff --git a/test_shard/fake_test_app/lib/main.dart b/test_shard/fake_test_app/lib/main.dart index 418ccb09a..07ab34bc8 100644 --- a/test_shard/fake_test_app/lib/main.dart +++ b/test_shard/fake_test_app/lib/main.dart @@ -72,7 +72,7 @@ class _MyHomePageState extends State { }); } - @override + @override void initState() { super.initState(); @@ -80,25 +80,25 @@ class _MyHomePageState extends State { } Future _init() async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + // final irisTester = IrisTester(); + // final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + // setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); + // String engineAppId = const String.fromEnvironment('TEST_APP_ID', + // defaultValue: ''); - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); + // RtcEngine rtcEngine = createAgoraRtcEngine(); + // await rtcEngine.initialize(RtcEngineContext( + // appId: engineAppId, + // areaCode: AreaCode.areaCodeGlob.value(), + // )); - final audioDeviceManager = rtcEngine.getAudioDeviceManager(); + // final audioDeviceManager = rtcEngine.getAudioDeviceManager(); - await audioDeviceManager.getRecordingDefaultDevice(); + // await audioDeviceManager.getRecordingDefaultDevice(); - await audioDeviceManager.release(); - await rtcEngine.release(); + // await audioDeviceManager.release(); + // await rtcEngine.release(); } @override diff --git a/test_shard/integration_test_app/integration_test/fake/fake_iris_method_channel.dart b/test_shard/integration_test_app/integration_test/fake/fake_iris_method_channel.dart index 88b5ff26b..08cd73e21 100644 --- a/test_shard/integration_test_app/integration_test/fake/fake_iris_method_channel.dart +++ b/test_shard/integration_test_app/integration_test/fake/fake_iris_method_channel.dart @@ -2,10 +2,13 @@ import 'package:flutter/foundation.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; class FakeIrisMethodChannel extends IrisMethodChannel { + FakeIrisMethodChannel(NativeBindingsProvider provider) : super(provider); final List methodCallQueue = []; @override - Future initilize(NativeBindingsProvider provider) async {} + Future initilize(List args) async { + return null; + } @override Future invokeMethod(IrisMethodCall methodCall) async { diff --git a/test_shard/integration_test_app/integration_test/testcases/fake_agora_video_view_testcases.dart b/test_shard/integration_test_app/integration_test/testcases/fake_agora_video_view_testcases.dart index b6430c1d6..d3cd821a4 100644 --- a/test_shard/integration_test_app/integration_test/testcases/fake_agora_video_view_testcases.dart +++ b/test_shard/integration_test_app/integration_test/testcases/fake_agora_video_view_testcases.dart @@ -7,6 +7,7 @@ import 'package:flutter/foundation.dart'; import 'package:flutter/material.dart'; import 'package:flutter_test/flutter_test.dart'; import 'package:agora_rtc_engine/src/impl/agora_rtc_engine_impl.dart'; +import 'package:agora_rtc_engine/src/impl/native_iris_api_engine_binding_delegate.dart'; import '../fake/fake_iris_method_channel.dart'; class _RenderViewWidget extends StatefulWidget { @@ -78,7 +79,8 @@ class _RenderViewWidgetState extends State<_RenderViewWidget> { void testCases() { group('Test with FakeIrisMethodChannel', () { - final FakeIrisMethodChannel irisMethodChannel = FakeIrisMethodChannel(); + final FakeIrisMethodChannel irisMethodChannel = + FakeIrisMethodChannel(IrisApiEngineNativeBindingDelegateProvider()); final RtcEngine rtcEngine = RtcEngineImpl.create(irisMethodChannel: irisMethodChannel); @@ -137,8 +139,6 @@ void testCases() { skip: !(Platform.isAndroid || Platform.isIOS), ); - - testWidgets( 'Switch local/remote AgoraVideoView with RtcConnection', (WidgetTester tester) async { diff --git a/test_shard/integration_test_app/integration_test/testcases/mediaengine_smoke_test_testcases.dart b/test_shard/integration_test_app/integration_test/testcases/mediaengine_smoke_test_testcases.dart index 60c3c585a..fffef6b01 100644 --- a/test_shard/integration_test_app/integration_test/testcases/mediaengine_smoke_test_testcases.dart +++ b/test_shard/integration_test_app/integration_test/testcases/mediaengine_smoke_test_testcases.dart @@ -113,23 +113,27 @@ void testCases() { Completer onPreEncodeVideoFrameCalledCompleter = Completer(); final VideoFrameObserver observer = VideoFrameObserver( - onCaptureVideoFrame: (videoFrame) { - debugPrint('[onCaptureVideoFrame] videoFrame: ${videoFrame.toJson()}'); - if (eventCalledCompleter.isCompleted) return; - eventCalledCompleter.complete(true); - }, onRenderVideoFrame: - (String channelId, int remoteUid, VideoFrame videoFrame) { - // logSink.log( - // '[onRenderVideoFrame] channelId: $channelId, remoteUid: $remoteUid, videoFrame: ${videoFrame.toJson()}'); - debugPrint( - '[onRenderVideoFrame] channelId: $channelId, remoteUid: $remoteUid, videoFrame: ${videoFrame.toJson()}'); - if (onRenderVideoFrameCalledCompleter.isCompleted) return; - onRenderVideoFrameCalledCompleter.complete(true); - }, onPreEncodeVideoFrame: (VideoFrame videoFrame) { - debugPrint('[onPreEncodeVideoFrame] videoFrame: ${videoFrame.toJson()}'); - if (onPreEncodeVideoFrameCalledCompleter.isCompleted) return; - onPreEncodeVideoFrameCalledCompleter.complete(true); - }); + onCaptureVideoFrame: (sourceType, videoFrame) { + debugPrint('[onCaptureVideoFrame] videoFrame: ${videoFrame.toJson()}'); + if (eventCalledCompleter.isCompleted) return; + eventCalledCompleter.complete(true); + }, + onRenderVideoFrame: + (String channelId, int remoteUid, VideoFrame videoFrame) { + // logSink.log( + // '[onRenderVideoFrame] channelId: $channelId, remoteUid: $remoteUid, videoFrame: ${videoFrame.toJson()}'); + debugPrint( + '[onRenderVideoFrame] channelId: $channelId, remoteUid: $remoteUid, videoFrame: ${videoFrame.toJson()}'); + if (onRenderVideoFrameCalledCompleter.isCompleted) return; + onRenderVideoFrameCalledCompleter.complete(true); + }, + onPreEncodeVideoFrame: (sourceType, VideoFrame videoFrame) { + debugPrint( + '[onPreEncodeVideoFrame] videoFrame: ${videoFrame.toJson()}'); + if (onPreEncodeVideoFrameCalledCompleter.isCompleted) return; + onPreEncodeVideoFrameCalledCompleter.complete(true); + }, + ); mediaEngine.registerVideoFrameObserver( observer, diff --git a/test_shard/integration_test_app/integration_test/testcases/mediaplayer_smoke_test_testcases.dart b/test_shard/integration_test_app/integration_test/testcases/mediaplayer_smoke_test_testcases.dart index d9274ba2c..d1b5793a2 100644 --- a/test_shard/integration_test_app/integration_test/testcases/mediaplayer_smoke_test_testcases.dart +++ b/test_shard/integration_test_app/integration_test/testcases/mediaplayer_smoke_test_testcases.dart @@ -7,7 +7,6 @@ import 'package:integration_test/integration_test.dart'; import 'package:integration_test_app/main.dart' as app; void testCases() { - testWidgets( 'registerAudioFrameObserver smoke test', (WidgetTester tester) async { @@ -40,15 +39,14 @@ void testCases() { .registerPlayerSourceObserver(mediaPlayerSourceObserver); Completer? eventCalledCompleter = Completer(); - final MediaPlayerAudioFrameObserver observer = - MediaPlayerAudioFrameObserver( + final AudioPcmFrameSink observer = AudioPcmFrameSink( onFrame: (AudioPcmFrame frame) { if (eventCalledCompleter == null) return; eventCalledCompleter.complete(true); }, ); mediaPlayerController.registerAudioFrameObserver( - observer, + observer: observer, ); await rtcEngine.enableVideo(); diff --git a/test_shard/iris_tester/lib/iris_tester.dart b/test_shard/iris_tester/lib/iris_tester.dart index 93247fb11..e2f0e34b7 100644 --- a/test_shard/iris_tester/lib/iris_tester.dart +++ b/test_shard/iris_tester/lib/iris_tester.dart @@ -27,22 +27,20 @@ class IrisTester { IrisTester({NativeIrisTesterBinding? nativeIrisTesterBinding}) { _nativeIrisTesterBinding = nativeIrisTesterBinding ?? NativeIrisTesterBinding(_loadLib()); - _debugApiEnginePtr = _nativeIrisTesterBinding.CreateDebugApiEngine(); } late final NativeIrisTesterBinding _nativeIrisTesterBinding; - late final ffi.Pointer _debugApiEnginePtr; + late ffi.Pointer _fakeRtcEngineHandle; - @Deprecated('Use getDebugApiEngineNativeHandle instead.') - int createDebugApiEngine() { - return _debugApiEnginePtr.address; + int getfakeRtcEngineHandle() { + return _fakeRtcEngineHandle.address; } - int getDebugApiEngineNativeHandle() { - return _debugApiEnginePtr.address; + void initialize() { + _fakeRtcEngineHandle = _nativeIrisTesterBinding.CreateFakeRtcEngine(); } void dispose() { - calloc.free(_debugApiEnginePtr); + calloc.free(_fakeRtcEngineHandle); } void expectCalled(String funcName, String params) { @@ -94,8 +92,8 @@ class IrisTester { ..ref.length = bufferListLengthPtr ..ref.buffer_count = bufferLength; - final ret = _nativeIrisTesterBinding.TriggerEventWithFakeApiEngine( - _debugApiEnginePtr, apiParam); + final ret = _nativeIrisTesterBinding.TriggerEventWithFakeRtcEngine( + _fakeRtcEngineHandle, apiParam); if (ret != 0) { debugPrint( diff --git a/test_shard/rendering_test/integration_test/common/screenshot_matcher_ext.dart b/test_shard/rendering_test/integration_test/common/screenshot_matcher_ext.dart index 88ea1c2ba..711e019ed 100644 --- a/test_shard/rendering_test/integration_test/common/screenshot_matcher_ext.dart +++ b/test_shard/rendering_test/integration_test/common/screenshot_matcher_ext.dart @@ -75,7 +75,13 @@ Future matchScreenShotDesktop( ); debugPrint('compareImages result: $result'); - expect(result < 0.01, isTrue); + if (Platform.isMacOS) { + // TODO(littlegnal): Need more tolerance after upgrade to the native sdk 4.2.0, see if + // we can reduce the result later. + expect(result < 0.07, isTrue); + } else { + expect(result < 0.01, isTrue); + } } return; diff --git a/test_shard/rendering_test/integration_test/common/widget_tester_ext.dart b/test_shard/rendering_test/integration_test/common/widget_tester_ext.dart index dc654fedc..8f882b008 100644 --- a/test_shard/rendering_test/integration_test/common/widget_tester_ext.dart +++ b/test_shard/rendering_test/integration_test/common/widget_tester_ext.dart @@ -3,9 +3,10 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:integration_test/integration_test.dart'; Future waitFrame(WidgetTester tester) async { - await tester.pumpAndSettle(const Duration(seconds: 10)); - // Need `pumpAndSettle` again since there's a `setState` logic inside the `AgoraVideoView` - await tester.pumpAndSettle(const Duration(seconds: 10)); + // Call `pumpAndSettle` more times to ensure the video rendered + for (int i = 0; i < 5; i++) { + await tester.pumpAndSettle(const Duration(seconds: 10)); + } } Future waitDisposed( diff --git a/test_shard/rendering_test/integration_test/local_video_view.dart b/test_shard/rendering_test/integration_test/local_video_view.dart index 908f7fed3..06fcce380 100644 --- a/test_shard/rendering_test/integration_test/local_video_view.dart +++ b/test_shard/rendering_test/integration_test/local_video_view.dart @@ -33,6 +33,7 @@ class _LocalVideoViewState extends State { late final MediaPlayerController mediaPlayerController; late final MediaPlayerVideoFrameObserver observer; late final MediaPlayerSourceObserver mediaPlayerSourceObserver; + bool _isOnFrameCalled = false; @override void initState() { diff --git a/test_shard/rendering_test/integration_test/remote_video_view.dart b/test_shard/rendering_test/integration_test/remote_video_view.dart index 89a5b71c3..78944bfcd 100644 --- a/test_shard/rendering_test/integration_test/remote_video_view.dart +++ b/test_shard/rendering_test/integration_test/remote_video_view.dart @@ -35,6 +35,7 @@ class _RemoteVideoViewState extends State { late final MediaPlayerSourceObserver mediaPlayerSourceObserver; late final VideoFrameObserver videoFrameObserver; bool isMpkJoined = false; + bool _isOnFrameCalled = false; static const int _myUid = 12345; static const int _mpkRemoteUid = 67890; diff --git a/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.local.donot_handle_rendermode.png b/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.local.donot_handle_rendermode.png index 37207f2e8173ce76784be67df070827bea60eca3..38a745d56d9fa96d76143c53ea105094c92133d1 100644 GIT binary patch literal 1567 zcmeAS@N?(olHy`uVBq!ia0y~yV4MKL9Be?5hW%z|7#LWedAc};R4~51y3v=}k%8f$ z{pqC_m;_}n=<5bB3zbgz=a^!ddh}(BY4E|c>bdrO3_q5>e#gM@Po0@j!BBufXp|Za z0+LceVyEG;cP#JDSeC1QXy9RKIO4$IK@BS6(c7oR<%iCGzJ9!LPI5V$jm*$an!s>6 z{;p;A=}MXBspUuAp5KW*&}<|yt)jO6*4zVgD`#-jpQlpD0mHvV_1T==!VO2M5fRjh j&QVjS7rQJCPoyn+WX;@uJz5DY_!&H1{an^LB{Ts5Wd|h< literal 1564 zcmeAS@N?(olHy`uVBq!ia0y~yV4MKL9Be?5hW%z|7#LU|d%8G=R4~51y3yA;P{7r( z{)wJO6Ca0$%tdxhT><+yM~Y5OoEJ7@X2Rp$7cJL!3p4z=R9VHqP(QhWVFCvSL+2

`~LlnvO>NkJXYiPoHVtwAC2VF1%u>s?maDOuoE#_3izf402m${bpb|@9Dtc z!NStuGD?jG0ZFMq;!*K$5L_=HFSxJVp}|Z4#2iqfccr+iI&U z{oAj<-tbp7>U?d3tB^q3jr!bY`xtoC6c|*Pm>iTwsbLlb<n|LIMmzjEo9` zqts{+kdzAC_Uzl!pKD)Nuk(BT`Hg?8zkK*s^P2mP;%H)`O@2x|vMn$9w*BpGbIP7? ztbJg`bg1L-$IOD-BIzB|8W<*Ua4>X^Qo}3={xG>Mm&~}d=igjlQP1G%>gTe~DWM4f DsS@R5 diff --git a/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.local.with_rendermodeadaptive.png b/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.local.with_rendermodeadaptive.png index 96d9597b8975f2d64e6fd038cdffa1e44ea9b38b..2ba826239c5ffa8224a4a727c30a44af418d7002 100644 GIT binary patch delta 167 zcmeC@nZ`3g#rCeJi(^OyPnmd literal 1551 zcmeAS@N?(olHy`uVBq!ia0y~yV4MKL9Be?5hW%z|7#LWudAc};R4~51YN+QHD8X>i zJ#Kl(s<)2qt=cA4)*BiBFTFJT$un-tbk@sfckYvA_~8A#gn^-cawEe82@ZzNQED^@ zNJ<48jvhb!y5!J?(mnU@_`Cn9sM$H&?}5CL0D}-Sqk`cmMdcva_G3@n@m;gl&Dhd+ zx^iY;!&;se-MstrmI)uT^=Chj#>V83q`;suN)59h_{4hdzStD|y>+vI1wDhOtDnm{ Hr-UW|?5`*` diff --git a/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.local.with_rendermodefit.png b/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.local.with_rendermodefit.png index ede3dab0e8ecce40ec1193e05b85b08b061dad65..19608b4172d3b908d1de564a5853ad30aafceab8 100644 GIT binary patch delta 202 zcmbQmGnZ$Aip3L87srqa#CWV;BQst<=P&iORJd7ua#eTSk6hMFaw7ChNGaE*f5hCQ9zxD i95sb{vCA-_hOxj+!d8BTjw`UtXYh3Ob6Mw<&;$VFbsP-< diff --git a/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.local.with_rendermodehidden.png b/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.local.with_rendermodehidden.png index 11d48e00d283945bb6d4997a85be8c33a58fd103..34a187ccfd07260ba1f5f0d05db2163b647ae9da 100644 GIT binary patch literal 1561 zcmeAS@N?(olHy`uVBq!ia0y~yV4MKL9Be?5hW%z|7#LU|c)B=-R4~51s_4fY$l!WW z(3w*xIJWVS%n)MrdL_Gm~J@bfBJ0va1QGpoAT@P?YS9ZGUwJZG~@~iFbFX+ zDhQ5Jqd`DYDrmEipPwFoW6I^Xe?M)z&40VTrcz?t+5f5v3@S`a4oagGm4kriW8HV# z--njf+>4pE{%i6#-W^>H21g!#&HUEtYk9_wp|OEs0tW{}=O{JIg5VF+N++?(skI-x Qfki%pr>mdKI;Vst04L<~g#Z8m delta 158 zcmV;P0Ac@`44Dj&Ei6&4Kf{qiw;!l8WKKP;bjkHh}IzCR87v#$bO z0U&d_&Yo~6zo%{4)@}9-*hN4X;(U3VJ>gK!w|kyF0(P-s*Z~%jKV~5okW0qC4FCWD M07*qoM6N<$f|+qfr~m)} diff --git a/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.remote.donot_handle_rendermode.png b/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.remote.donot_handle_rendermode.png index 1c7cb0d654e0234c618cadc031444be8ddcc263d..370f95a74873f06b6ec131650980961b63eae659 100644 GIT binary patch delta 232 zcmV(F27;FryobF9u(mNu zk~O4~Sco5tA>qAcV7Azqzx!a9^WFpif3!UH005jPlOY4_v)}?P0h8bYA2MNxr@NP7 zXRo%e(`x;8*w6cCw;OiV#khICKlKP)AOQg&5fA_p0pSeI({{fO{Jw&dvjGDP0h8bY zLNQ^8&Gq}Rv)A+IwEoyki}`B6>5KiEzwM`^_xZ;c|L1tN-}W$;fH1_vaHli&iI7LqSaIx0!;MMnbw0000*)p#An99*7(YR#s7htzVNK#GsX9R-?i0Q&B^fZ#fGnp41fHY zm>iT87*s~7(I6lx6&w=q=hr{Ht$+9Rmp}fQy`7e6b@uQ+OYQ6DzemqMASookAjHV1 zAUH}W7F4Lto?oZJ9Cb+gT$jxW%fDM_VGTak6$f`Vm8_o zeBeAwi{d-Ee71G{ci8H@9T+@VSQ=bLsbLlbc8n_|q#HsGdVdDi2MnIBelF{r5}E*f CarONG diff --git a/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.remote.with_default_rendermode.png b/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.remote.with_default_rendermode.png index 1c7cb0d654e0234c618cadc031444be8ddcc263d..370f95a74873f06b6ec131650980961b63eae659 100644 GIT binary patch delta 232 zcmV(F27;FryobF9u(mNu zk~O4~Sco5tA>qAcV7Azqzx!a9^WFpif3!UH005jPlOY4_v)}?P0h8bYA2MNxr@NP7 zXRo%e(`x;8*w6cCw;OiV#khICKlKP)AOQg&5fA_p0pSeI({{fO{Jw&dvjGDP0h8bY zLNQ^8&Gq}Rv)A+IwEoyki}`B6>5KiEzwM`^_xZ;c|L1tN-}W$;fH1_vaHli&iI7LqSaIx0!;MMnbw0000*)p#An99*7(YR#s7htzVNK#GsX9R-?i0Q&B^fZ#fGnp41fHY zm>iT87*s~7(I6lx6&w=q=hr{Ht$+9Rmp}fQy`7e6b@uQ+OYQ6DzemqMASookAjHV1 zAUH}W7F4Lto?oZJ9Cb+gT$jxW%fDM_VGTak6$f`Vm8_o zeBeAwi{d-Ee71G{ci8H@9T+@VSQ=bLsbLlbc8n_|q#HsGdVdDi2MnIBelF{r5}E*f CarONG diff --git a/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.remote.with_default_rendermodede.with_videoMirrorModeEnabled.png b/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.remote.with_default_rendermodede.with_videoMirrorModeEnabled.png index 09f58a31a8e6434327ddebb6168511a8d6009e36..6eccadc4f4df9a9834222a5014da8c5a574a1294 100644 GIT binary patch delta 229 zcmVU zy*{l^PKWi;`DAC^JX}q-H~*J_0FVd>0EvLWOg5YCV>y1kjlca0dr7nL0vQ3412i&( zNxYnmKYU$m?{SCq9KO#o3{%fGF5ZXdTL&-H2evHQQ|DVZC%V%adS2Dwvp&?gDfI*0n zQ9*E&8Vv%HQbE|qpMUJC52?MGZs@n$&@S)YvEwi6w${hplMAb!{omVx!GndR!DW=9 zRuDWmf8)FyL%N_q*rwUG3DVO(R&9(ey*?rRrR1J94^;OwR`4}vl=-S|s}pcDe}C{>z2WKOXUZ&RFg%#PZ81B;2YXHq zhRz0t2^^!;Xb_N;3fw=I{I{w)w0-y8(pvjE?<&TDPmRPQtcXR7ik9h>=l2 zP#`QnZdzvXqVwSeK1@swN(u}rqZDO9u%T%7Mdk=y#zs?X>zdOq1lLS$C=i){efLG- zeQ(#X-E+fvEDENBz z=ZnI8&(43{a(7OB{c-=Zd$dnHef~mJMiVIc-T(Sx^*gTe~DWM4fuuV4& diff --git a/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.remote.with_rendermodefit.png b/test_shard/rendering_test/screenshot/macos.agora_video_view_render.texture.remote.with_rendermodefit.png index a16e8f610ea22c0ce4558821026ecceadf100ca0..6e150bc328689dee02fa2c180b172c10264be5b1 100644 GIT binary patch literal 1600 zcmeAS@N?(olHy`uVBq!ia0y~yV4MKL9Be?5hW%z|7#P^tJzX3_Dj45h-|g#S$l!Xg z_K(#fcqi+}){`fL6IVdSG zsEkshK|oR}IHb2NceZV#{xONo>Khao85INt7=)-nl@-?iHmN?eeD?F>hkq8z^c}zH z{j6eV)m+;~S5A)8(#dD$F?b6JO#42s|NpPw4Yr&d44n-O6F7zvb>Pvqo6Hf@8aC`X zecAGNh1>Cmzl(Q$yL;nF^Ir~*-8-sx%mW3id%}ZqD*A3ix8bojoCnUaur#7%@Mgyvn#p`T6a}mq7(8A5T-G@yGywo`y)rof delta 209 zcmV;?051Q)47d!CBR2*ANklSSr9ygwddG_?@a{nU?sT3(PFXn z!y!1nSET*}A>V0i76AC6)3IDX0u0u5ojrkX#u&3Fa6pk^OC0Rta(dAYwX z%a}c6g3mj@K3r$d;GmO{1iLWc;MUm#I4l8SCwH?4+`O-2d+6&j{^3u$-FiE(vuAKn z0>UoNm&e%?_-1^C1b&y3K?6$&yBK54o&fAjv#|tu0h6!(F27;FryobF9u(mNu zk~O4~Sco5tA>qAcV7Azqzx!a9^WFpif3!UH005jPlOY4_v)}?P0h8bYA2MNxr@NP7 zXRo%e(`x;8*w6cCw;OiV#khICKlKP)AOQg&5fA_p0pSeI({{fO{Jw&dvjGDP0h8bY zLNQ^8&Gq}Rv)A+IwEoyki}`B6>5KiEzwM`^_xZ;c|L1tN-}W$;fH1_vaHli&iI7LqSaIx0!;MMnbw0000*)p#An99*7(YR#s7htzVNK#GsX9R-?i0Q&B^fZ#fGnp41fHY zm>iT87*s~7(I6lx6&w=q=hr{Ht$+9Rmp}fQy`7e6b@uQ+OYQ6DzemqMASookAjHV1 zAUH}W7F4Lto?oZJ9Cb+gT$jxW%fDM_VGTak6$f`Vm8_o zeBeAwi{d-Ee71G{ci8H@9T+@VSQ=bLsbLlbc8n_|q#HsGdVdDi2MnIBelF{r5}E*f CarONG diff --git a/tool/ffi_gen/ffigen_config.yaml b/tool/ffi_gen/ffigen_config.yaml new file mode 100644 index 000000000..0e20a1d08 --- /dev/null +++ b/tool/ffi_gen/ffigen_config.yaml @@ -0,0 +1,19 @@ +name: 'NativeIrisApiEngineBinding' +description: 'Bindings to IrisApiEngine' + +output: 'lib/src/impl/native_iris_api_engine_bindings.dart' + +headers: + entry-points: + - 'tmp_ffi_gen_include/iris_rtc_c_api.h' + include-directives: + - 'tmp_ffi_gen_include/iris_rtc_c_api.h' + - 'tmp_ffi_gen_include/iris_base.h' + - 'tmp_ffi_gen_include/iris_video_processor_c.h' + - 'tmp_ffi_gen_include/iris_rtc_rendering_c.h' + +preamble: | + // ignore_for_file: camel_case_types, non_constant_identifier_names + +llvm-path: + - '/usr/local/opt/llvm' \ No newline at end of file diff --git a/tool/ffi_gen/run_ffi_gen.sh b/tool/ffi_gen/run_ffi_gen.sh new file mode 100644 index 000000000..fadcb1df7 --- /dev/null +++ b/tool/ffi_gen/run_ffi_gen.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +set -e +set -x + +IRIS_PATH=$1 +MY_PATH=$(realpath $(dirname "$0")) +PROJECT_ROOT=$(realpath ${MY_PATH}/../..) +TMP_FFI_GEN_INCLUDE_DIR_NAME=${PROJECT_ROOT}/tmp_ffi_gen_include + +rm -rf ${TMP_FFI_GEN_INCLUDE_DIR_NAME} +mkdir ${TMP_FFI_GEN_INCLUDE_DIR_NAME} + +cp -RP ${IRIS_PATH}/src/interface/* ${TMP_FFI_GEN_INCLUDE_DIR_NAME} +cp -RP ${IRIS_PATH}/src/interface/rtc/* ${TMP_FFI_GEN_INCLUDE_DIR_NAME} +# cp -RP ${MY_PATH}/ffigen_config.yaml ${TMP_FFI_GEN_INCLUDE_DIR_NAME}/ffigen_config.yaml + +pushd ${PROJECT_ROOT} + +flutter packages get +flutter pub run ffigen --config=${MY_PATH}/ffigen_config.yaml + +popd + +rm -rf ${TMP_FFI_GEN_INCLUDE_DIR_NAME} \ No newline at end of file diff --git a/tool/terra/terra_config_main.yaml b/tool/terra/terra_config_main.yaml index e19e7c42a..3d9b61934 100644 --- a/tool/terra/terra_config_main.yaml +++ b/tool/terra/terra_config_main.yaml @@ -1,9 +1,9 @@ -include: shared:rtc_4.1.0/shared_configs.yaml +include: shared:rtc_4.2.0/shared_configs.yaml language: dart legacy_renders: - - DartSyntaxRenderBeforeNative420 + - DartSyntaxRender - DartEventHandlerParamJsonRender - DartCallApiIrisMethodChannelRender - DartEventHandlerIrisMethodChannelRender diff --git a/tool/testcase_gen/bin/event_handler_gen_config.dart b/tool/testcase_gen/bin/event_handler_gen_config.dart index 66acc4b33..b66fc0398 100644 --- a/tool/testcase_gen/bin/event_handler_gen_config.dart +++ b/tool/testcase_gen/bin/event_handler_gen_config.dart @@ -17,16 +17,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -45,6 +41,9 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { eventPrefixOverride: 'RtcEngineEventHandlerEx', registerFunctionName: 'registerEventHandler', unregisterFunctionName: 'unregisterEventHandler', + skipMemberFunctions: [ + 'onFacePositionChanged', + ], ), EventHandlerTemplatedTestCase( callerObjClassName: 'RtcEngine', @@ -60,16 +59,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -87,7 +82,6 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { outputDir: outputDir, registerFunctionName: 'registerAudioEncodedFrameObserver', unregisterFunctionName: 'unregisterAudioEncodedFrameObserver', - isUpperFirstCaseOfEventName: true, ), EventHandlerTemplatedTestCase( callerObjClassName: 'RtcEngine', @@ -103,16 +97,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -145,16 +135,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -187,16 +173,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -230,16 +212,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -273,16 +251,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -316,16 +290,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -361,16 +331,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -394,7 +360,7 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { ), EventHandlerTemplatedTestCase( callerObjClassName: 'MediaPlayer', - className: 'MediaPlayerAudioFrameObserver', + className: 'AudioPcmFrameSink', testCaseFileTemplate: ''' $defaultHeader @@ -406,16 +372,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -451,16 +413,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', @@ -496,27 +454,24 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', areaCode: AreaCode.areaCodeGlob.value(), )); - final mediaRecorder = rtcEngine.getMediaRecorder(); + final mediaRecorder = (await rtcEngine.createMediaRecorder( + RecorderStreamInfo(channelId: 'hello', uid: 0)))!; {{TEST_CASE_BODY}} - await mediaRecorder.release(); + await rtcEngine.destroyMediaRecorder(mediaRecorder); await rtcEngine.release(); }, timeout: const Timeout(Duration(minutes: 1)), @@ -541,16 +496,12 @@ import 'package:flutter_test/flutter_test.dart'; import 'package:iris_tester/iris_tester.dart'; import 'package:iris_method_channel/iris_method_channel.dart'; -void generatedTestCases() { +void generatedTestCases(IrisTester irisTester) { {{TEST_CASES_CONTENT}} } ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - RtcEngine rtcEngine = createAgoraRtcEngine(); await rtcEngine.initialize(RtcEngineContext( appId: 'app_id', diff --git a/tool/testcase_gen/bin/method_call_gen_config.dart b/tool/testcase_gen/bin/method_call_gen_config.dart index ca3bcf76b..d2776854b 100644 --- a/tool/testcase_gen/bin/method_call_gen_config.dart +++ b/tool/testcase_gen/bin/method_call_gen_config.dart @@ -79,10 +79,6 @@ void rtcEngineSmokeTestCases() { ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -114,6 +110,7 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { outputDir: outputDir, skipMemberFunctions: [ 'destroyMediaPlayer', + 'destroyMediaRecorder', // These cases should handle the list size manually. 'setLocalAccessPoint', 'startChannelMediaRelay', @@ -122,6 +119,9 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { 'setSubscribeAudioAllowlist', 'setSubscribeVideoBlocklist', 'setSubscribeVideoAllowlist', + 'queryCodecCapability', + 'setHighPriorityUserList', + 'startOrUpdateChannelMediaRelay', ], outputFileSuffixName: 'fake_test', ), @@ -144,10 +144,6 @@ void rtcEngineExSmokeTestCases() { ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -186,6 +182,8 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { 'setSubscribeVideoAllowlistEx', 'startChannelMediaRelayEx', 'updateChannelMediaRelayEx', + 'startOrUpdateChannelMediaRelayEx', + 'setHighPriorityUserListEx', ], outputFileSuffixName: 'fake_test', ), @@ -207,10 +205,6 @@ void audioDeviceManagerSmokeTestCases() { ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -264,10 +258,6 @@ void videoDeviceManagerSmokeTestCases() { ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -323,10 +313,6 @@ void mediaPlayerControllerSmokeTestCases() { ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -384,10 +370,6 @@ void mediaEngineSmokeTestCases() { ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -442,10 +424,6 @@ void mediaRecorderSmokeTestCases() { ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -455,7 +433,8 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { areaCode: AreaCode.areaCodeGlob.value(), )); - final mediaRecorder = rtcEngine.getMediaRecorder(); + final mediaRecorder = (await rtcEngine.createMediaRecorder( + RecorderStreamInfo(channelId: 'hello', uid: 0)))!; try { {{TEST_CASE_BODY}} @@ -471,7 +450,7 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { } } - await mediaRecorder.release(); + await rtcEngine.destroyMediaRecorder(mediaRecorder); await rtcEngine.release(); }, // skip: {{TEST_CASE_SKIP}}, @@ -500,10 +479,6 @@ void localSpatialAudioEngineSmokeTestCases() { ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -558,10 +533,6 @@ void localSpatialAudioEngineSmokeTestCases() { ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); @@ -619,10 +590,6 @@ void musicContentCenterSmokeTestCases() { ''', testCaseTemplate: ''' testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - String engineAppId = const String.fromEnvironment('TEST_APP_ID', defaultValue: ''); diff --git a/tool/testcase_gen/build.sh b/tool/testcase_gen/build.sh new file mode 100644 index 000000000..ec3b9abde --- /dev/null +++ b/tool/testcase_gen/build.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +set -e +set -x + +MY_PATH=$(realpath $(dirname "$0")) +PROJECT_ROOT=$(realpath ${MY_PATH}/../../) + +dart pub get + +dart run ${MY_PATH}/bin/testcase_gen.dart \ + --gen-fake-test --output-dir=${PROJECT_ROOT}/test_shard/fake_test_app/integration_test/generated + +dart run ${MY_PATH}/bin/testcase_gen.dart \ + --gen-integration-test --output-dir=${PROJECT_ROOT}/test_shard/integration_test_app/integration_test/generated \ No newline at end of file diff --git a/tool/testcase_gen/lib/templated_generator.dart b/tool/testcase_gen/lib/templated_generator.dart index fc463949a..06e1e1b1d 100644 --- a/tool/testcase_gen/lib/templated_generator.dart +++ b/tool/testcase_gen/lib/templated_generator.dart @@ -14,6 +14,7 @@ abstract class TemplatedTestCase { required this.testCaseTemplate, required this.outputDir, this.outputFileSuffixName = 'testcases', + this.skipMemberFunctions = const [], }); final String className; @@ -21,6 +22,7 @@ abstract class TemplatedTestCase { final String testCaseTemplate; final String outputDir; final String outputFileSuffixName; + final List skipMemberFunctions; } class MethoCallTemplatedTestCase extends TemplatedTestCase { @@ -30,7 +32,7 @@ class MethoCallTemplatedTestCase extends TemplatedTestCase { required String testCaseTemplate, required String outputDir, required this.methodInvokeObjectName, - this.skipMemberFunctions = const [], + List skipMemberFunctions = const [], required String outputFileSuffixName, }) : super( className: className, @@ -38,10 +40,10 @@ class MethoCallTemplatedTestCase extends TemplatedTestCase { testCaseTemplate: testCaseTemplate, outputDir: outputDir, outputFileSuffixName: outputFileSuffixName, + skipMemberFunctions: skipMemberFunctions, ); final String methodInvokeObjectName; - final List skipMemberFunctions; } class EventHandlerTemplatedTestCase extends TemplatedTestCase { @@ -56,11 +58,13 @@ class EventHandlerTemplatedTestCase extends TemplatedTestCase { required this.registerFunctionName, required this.unregisterFunctionName, this.isUpperFirstCaseOfEventName = false, + List skipMemberFunctions = const [], }) : super( className: className, testCaseFileTemplate: testCaseFileTemplate, testCaseTemplate: testCaseTemplate, outputDir: outputDir, + skipMemberFunctions: skipMemberFunctions, ); final String callerObjClassName; @@ -95,10 +99,26 @@ class TemplatedGenerator extends DefaultGenerator { outputFileName = '${templated.className.toLowerCase()}_${templated.outputFileSuffixName}.generated.dart'; } else if (templated is EventHandlerTemplatedTestCase) { + late Clazz templatedCallerObjClazz; + late Clazz templatedClazz; + try { + templatedCallerObjClazz = + parseResult.getClazz(templated.callerObjClassName)[0]; + } catch (e) { + stderr.writeln( + 'Can not find the callerObjClassName: ${templated.callerObjClassName}, make sure the class name is correct.'); + } + + try { + templatedClazz = parseResult.getClazz(templated.className)[0]; + } catch (e) { + stderr.writeln( + 'Can not find the className: ${templated.className}, make sure the class name is correct.'); + } output = _generateEventHandlerCasesWithTemplate( parseResult: parseResult, - callerObjClazz: parseResult.getClazz(templated.callerObjClassName)[0], - eventHandlerClazz: parseResult.getClazz(templated.className)[0], + callerObjClazz: templatedCallerObjClazz, + eventHandlerClazz: templatedClazz, testCaseTemplate: templated.testCaseFileTemplate, testCasesContentTemplate: templated.testCaseTemplate, callerObjName: templated.callerObjName, @@ -106,6 +126,7 @@ class TemplatedGenerator extends DefaultGenerator { registerFunctionName: templated.registerFunctionName, unregisterFunctionName: templated.unregisterFunctionName, isUpperFirstCaseOfEventName: templated.isUpperFirstCaseOfEventName, + skipMemberFunctions: templated.skipMemberFunctions, ); outputFileName = '${templated.callerObjClassName.toLowerCase()}_${templated.className.toLowerCase()}_${templated.outputFileSuffixName}.generated.dart'; @@ -136,6 +157,7 @@ class TemplatedGenerator extends DefaultGenerator { required String registerFunctionName, required String unregisterFunctionName, required bool isUpperFirstCaseOfEventName, + List skipMemberFunctions = const [], }) { // final fields = clazz.fields; final eventHandlerName = 'the${eventHandlerClazz.name}'; @@ -168,6 +190,10 @@ class TemplatedGenerator extends DefaultGenerator { String eventName = field.name; + if (skipMemberFunctions.contains(eventName)) { + continue; + } + StringBuffer jsonBuffer = StringBuffer(); StringBuffer pb = StringBuffer(); _createParameterInitializedList( @@ -278,7 +304,6 @@ await Future.delayed(const Duration(milliseconds: 500)); if (functionName == methodName) { StringBuffer pb = StringBuffer(); - _createParameterInitializedList( parseResult, pb, method.parameters, [eventHandlerClazz.name]); diff --git a/windows/CMakeLists.txt b/windows/CMakeLists.txt index 60eeb7cba..4e9271475 100644 --- a/windows/CMakeLists.txt +++ b/windows/CMakeLists.txt @@ -12,20 +12,24 @@ project(${PROJECT_NAME} LANGUAGES CXX) # not be changed set(PLUGIN_NAME "agora_rtc_engine_plugin") -set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.1.0_DCG_Windows_Video_20221220_0216.zip") -set(IRIS_SDK_DOWNLOAD_NAME "iris_4.1.0_DCG_Windows") +set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.2.0-build.3_DCG_Windows_Video_20230525_0541.zip") +set(IRIS_SDK_DOWNLOAD_NAME "iris_4.2.0-build.3_DCG_Windows") set(RTC_SDK_DOWNLOAD_NAME "Agora_Native_SDK_for_Windows_FULL") set(IRIS_SDK_VERSION "v3_6_2_fix.1") # Add this project's cmake/ directory to the module path. set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/cmake") +message("CMAKE_CURRENT_SOURCE_DIR: ${CMAKE_CURRENT_SOURCE_DIR}") + set(IRIS_DOWNLOAD_PATH "${CMAKE_CURRENT_SOURCE_DIR}/third_party/iris") set(IRIS_SDK_PATH "${IRIS_DOWNLOAD_PATH}/${IRIS_SDK_DOWNLOAD_NAME}/x64") set(IRIS_SDK_BIN_PATH "${IRIS_SDK_PATH}/Release") -set(RTC_SDK_PATH "${IRIS_DOWNLOAD_PATH}/${IRIS_SDK_DOWNLOAD_NAME}/DCG") -set(RTC_SDK_BIN_PATH "${RTC_SDK_PATH}/${RTC_SDK_DOWNLOAD_NAME}/sdk/x86_64") +set(RTC_SDK_PATH "${IRIS_DOWNLOAD_PATH}/${IRIS_SDK_DOWNLOAD_NAME}/DCG/${RTC_SDK_DOWNLOAD_NAME}/sdk") +set(RTC_SDK_BIN_PATH "${RTC_SDK_PATH}/x86_64") + +message("RTC_SDK_PATH: ${RTC_SDK_PATH}") if(NOT EXISTS "${CMAKE_CURRENT_SOURCE_DIR}/.plugin_dev") # Download and extract the SDK binary distribution (executes DownloadSDK.cmake). @@ -63,8 +67,9 @@ target_compile_definitions(${PLUGIN_NAME} PRIVATE FLUTTER_PLUGIN_IMPL) target_include_directories(${PLUGIN_NAME} INTERFACE "${CMAKE_CURRENT_SOURCE_DIR}/include" PRIVATE - "${IRIS_SDK_PATH}/include" - ) + "${IRIS_DOWNLOAD_PATH}/${IRIS_SDK_DOWNLOAD_NAME}/include" +) + target_link_libraries(${PLUGIN_NAME} PRIVATE flutter flutter_wrapper_plugin @@ -88,10 +93,17 @@ foreach (RTC_LIB ${RTC_LIBS}) list(APPEND BUNDLED_LIBRARIES ${RTC_LIB}) endforeach (RTC_LIB ${RTC_LIBS}) +message("include dir: ${IRIS_DOWNLOAD_PATH}/${IRIS_SDK_DOWNLOAD_NAME}/include") +file(GLOB HEADERS ${IRIS_DOWNLOAD_PATH}/${IRIS_SDK_DOWNLOAD_NAME}/include/*.h) +foreach (HEADER ${HEADERS}) + message("Dir: ${IRIS_DOWNLOAD_PATH}/${IRIS_SDK_DOWNLOAD_NAME}/include") + message("Header: ${HEADER}") +endforeach (HEADER ${HEADERS}) + # List of absolute paths to libraries that should be bundled with the plugin. # This list could contain prebuilt libraries, or libraries created by an # external build triggered from this build file. set(agora_rtc_engine_bundled_libraries ${BUNDLED_LIBRARIES} PARENT_SCOPE -) +) \ No newline at end of file diff --git a/windows/cmake/DownloadSDK.cmake b/windows/cmake/DownloadSDK.cmake index a2fbc16b1..b72412501 100644 --- a/windows/cmake/DownloadSDK.cmake +++ b/windows/cmake/DownloadSDK.cmake @@ -62,5 +62,21 @@ function(DOWNLOAD_SDK_BY_URL download_url download_dir) COMMAND ${CMAKE_COMMAND} -E tar xzf "${SDK_DOWNLOAD_DIR}/${SDK_DOWNLOAD_FILENAME}" WORKING_DIRECTORY ${SDK_DOWNLOAD_DIR} ) + + STRING(REGEX REPLACE "(_Video_[0-9]+_[0-9]+)$" "" IRIS_EXTRACTED_DIR_NAME ${SDK_DISTRIBUTION}) + + set(THIRD_PARTY_INCLUDE_DIR "${SDK_DOWNLOAD_DIR}/${IRIS_EXTRACTED_DIR_NAME}/include") + file(MAKE_DIRECTORY ${THIRD_PARTY_INCLUDE_DIR}) + + # Copy all third-party headers to a single include dir to avoid file hierarchy so deep + message(STATUS "Copy iris headers to ${THIRD_PARTY_INCLUDE_DIR}") + file(GLOB IRIS_PUBLIC_HEADERS ${SDK_DOWNLOAD_DIR}/${IRIS_EXTRACTED_DIR_NAME}/x64/include/*.h) + file(COPY ${IRIS_PUBLIC_HEADERS} + DESTINATION ${THIRD_PARTY_INCLUDE_DIR}) + + message(STATUS "Copy native sdk headers to ${THIRD_PARTY_INCLUDE_DIR}") + file(GLOB NATIVE_SDK_PUBLIC_HEADERS ${SDK_DOWNLOAD_DIR}/${IRIS_EXTRACTED_DIR_NAME}/DCG/Agora_Native_SDK_for_Windows_FULL/sdk/high_level_api/include/*.h) + file(COPY ${NATIVE_SDK_PUBLIC_HEADERS} + DESTINATION ${THIRD_PARTY_INCLUDE_DIR}) endif () endfunction() diff --git a/windows/include/agora_rtc_engine/texture_render.h b/windows/include/agora_rtc_engine/texture_render.h index e7e74ba11..f001afc7c 100644 --- a/windows/include/agora_rtc_engine/texture_render.h +++ b/windows/include/agora_rtc_engine/texture_render.h @@ -8,24 +8,23 @@ #include #include -#include "iris_rtc_raw_data.h" -#include "iris_video_processor_cxx.h" +#include "iris_rtc_rendering_cxx.h" -class TextureRender : public agora::iris::IrisVideoFrameBufferDelegate +class TextureRender : public agora::iris::VideoFrameObserverDelegate { public: TextureRender(flutter::BinaryMessenger *messenger, flutter::TextureRegistrar *registrar, - agora::iris::IrisVideoFrameBufferManager *videoFrameBufferManager); + agora::iris::IrisRtcRendering *iris_rtc_rendering); virtual ~TextureRender(); int64_t texture_id(); - virtual void OnVideoFrameReceived(const IrisVideoFrame &video_frame, - const IrisVideoFrameBufferConfig *config, + virtual void OnVideoFrameReceived(const void *videoFrame, + const IrisRtcVideoFrameConfig &config, bool resize) override; - void UpdateData(unsigned int uid, const std::string &channelId, unsigned int videoSourceType); + void UpdateData(unsigned int uid, const std::string &channelId, unsigned int videoSourceType, unsigned int videoViewSetupMode); // Checks if texture registrar, texture id and texture are available. bool TextureRegistered() @@ -33,12 +32,14 @@ class TextureRender : public agora::iris::IrisVideoFrameBufferDelegate return registrar_ && texture_ && texture_id_ > -1; } + void Dispose(); + private: const FlutterDesktopPixelBuffer *CopyPixelBuffer(size_t width, size_t height); public: flutter::TextureRegistrar *registrar_; - agora::iris::IrisVideoFrameBufferManager *videoFrameBufferManager_; + agora::iris::IrisRtcRendering *iris_rtc_rendering_; std::unique_ptr> method_channel_; int64_t texture_id_ = -1; @@ -51,6 +52,12 @@ class TextureRender : public agora::iris::IrisVideoFrameBufferDelegate std::unique_ptr texture_; std::unique_ptr flutter_desktop_pixel_buffer_ = nullptr; + + // IrisRtcVideoFrameConfig config_; + + int delegate_id_; + + bool is_dirty_; }; #endif // TEXTURE_RENDER_H_ \ No newline at end of file diff --git a/windows/include/agora_rtc_engine/video_view_controller.h b/windows/include/agora_rtc_engine/video_view_controller.h index 667c3b18c..16ff3006c 100644 --- a/windows/include/agora_rtc_engine/video_view_controller.h +++ b/windows/include/agora_rtc_engine/video_view_controller.h @@ -26,10 +26,11 @@ class VideoViewController bool DestroyPlatformRender(int64_t platformRenderId); int64_t CreateTextureRender( - const intptr_t &videoFrameBufferManagerNativeHandle, + const intptr_t &irisRtcRenderingHandle, unsigned int uid, const std::string &channelId, - unsigned int videoSourceType); + unsigned int videoSourceType, + unsigned int videoViewSetupMode); bool DestroyTextureRender(int64_t textureId); diff --git a/windows/texture_render.cc b/windows/texture_render.cc index dfd2fbb61..a4551d904 100644 --- a/windows/texture_render.cc +++ b/windows/texture_render.cc @@ -2,18 +2,17 @@ #include -#include "iris_rtc_raw_data.h" -#include "iris_video_processor_cxx.h" +#include "AgoraMediaBase.h" using namespace flutter; -using namespace agora::iris; -using namespace agora::iris::rtc; TextureRender::TextureRender(flutter::BinaryMessenger *messenger, flutter::TextureRegistrar *registrar, - agora::iris::IrisVideoFrameBufferManager *videoFrameBufferManager) + agora::iris::IrisRtcRendering *iris_rtc_rendering) : registrar_(registrar), - videoFrameBufferManager_(videoFrameBufferManager) + iris_rtc_rendering_(iris_rtc_rendering), + delegate_id_(agora::iris::INVALID_DELEGATE_ID), + is_dirty_(false) { // Create flutter desktop pixelbuffer texture; texture_ = @@ -34,50 +33,43 @@ TextureRender::TextureRender(flutter::BinaryMessenger *messenger, TextureRender::~TextureRender() { - if (videoFrameBufferManager_) - { - videoFrameBufferManager_->DisableVideoFrameBuffer(this); - videoFrameBufferManager_ = nullptr; - } - - const std::lock_guard lock(buffer_mutex_); - - if (registrar_ && texture_id_ != -1) - { - registrar_->UnregisterTexture(texture_id_); - - registrar_ = nullptr; - texture_id_ = -1; - } + Dispose(); } int64_t TextureRender::texture_id() { return texture_id_; } -void TextureRender::OnVideoFrameReceived(const IrisVideoFrame &video_frame, - const IrisVideoFrameBufferConfig *config, +void TextureRender::OnVideoFrameReceived(const void *videoFrame, + const IrisRtcVideoFrameConfig &config, bool resize) { std::lock_guard lock_guard(buffer_mutex_); - const uint32_t bytes_per_pixel = 4; - const uint32_t pixels_total = video_frame.width * video_frame.height; - const uint32_t data_size = pixels_total * bytes_per_pixel; - - if (buffer_.size() != data_size) + if (!is_dirty_) { - buffer_.resize(data_size); + const agora::media::base::VideoFrame *video_frame = static_cast(videoFrame); - flutter::EncodableMap args = { - {EncodableValue("width"), EncodableValue(video_frame.width)}, - {EncodableValue("height"), EncodableValue(video_frame.height)}}; - method_channel_->InvokeMethod("onSizeChanged", std::make_unique(EncodableValue(args))); - } + const uint32_t bytes_per_pixel = 4; + const uint32_t pixels_total = video_frame->width * video_frame->height; + const uint32_t data_size = pixels_total * bytes_per_pixel; + + if (buffer_.size() != data_size) + { + buffer_.resize(data_size); - std::copy(static_cast(video_frame.y_buffer), static_cast(video_frame.y_buffer) + data_size, buffer_.data()); + flutter::EncodableMap args = { + {EncodableValue("width"), EncodableValue(video_frame->width)}, + {EncodableValue("height"), EncodableValue(video_frame->height)}}; + method_channel_->InvokeMethod("onSizeChanged", std::make_unique(EncodableValue(args))); + } - frame_width_ = video_frame.width; - frame_height_ = video_frame.height; - if (TextureRegistered()) + std::copy(static_cast(video_frame->yBuffer), static_cast(video_frame->yBuffer) + data_size, buffer_.data()); + + frame_width_ = video_frame->width; + frame_height_ = video_frame->height; + + is_dirty_ = true; + } + if (TextureRegistered() && is_dirty_) { registrar_->MarkTextureFrameAvailable(texture_id_); } @@ -88,11 +80,18 @@ TextureRender::CopyPixelBuffer(size_t width, size_t height) { std::unique_lock buffer_lock(buffer_mutex_); + is_dirty_ = false; + if (!TextureRegistered()) { return nullptr; } + if (frame_width_ == 0 || frame_height_ == 0) + { + return nullptr; + } + if (!flutter_desktop_pixel_buffer_) { flutter_desktop_pixel_buffer_ = @@ -117,24 +116,43 @@ TextureRender::CopyPixelBuffer(size_t width, size_t height) return flutter_desktop_pixel_buffer_.get(); } -void TextureRender::UpdateData(unsigned int uid, const std::string &channelId, unsigned int videoSourceType) +void TextureRender::UpdateData(unsigned int uid, const std::string &channelId, unsigned int videoSourceType, unsigned int videoViewSetupMode) { - IrisVideoFrameBuffer buffer(kVideoFrameTypeRGBA, this, 16); - IrisVideoFrameBufferConfig config; - - config.id = uid; - config.type = (IrisVideoSourceType)videoSourceType; - + IrisRtcVideoFrameConfig config; + config.uid = uid; + config.video_source_type = videoSourceType; + config.video_frame_format = agora::media::base::VIDEO_PIXEL_FORMAT::VIDEO_PIXEL_RGBA; if (!channelId.empty()) { - strcpy_s(config.key, channelId.c_str()); + strcpy_s(config.channelId, channelId.c_str()); } else { - strcpy_s(config.key, ""); + strcpy_s(config.channelId, ""); } - if (videoFrameBufferManager_) + config.video_view_setup_mode = videoViewSetupMode; + + if (iris_rtc_rendering_) + { + delegate_id_ = iris_rtc_rendering_->AddVideoFrameObserverDelegate(config, this); + } +} + +void TextureRender::Dispose() +{ + if (iris_rtc_rendering_) { - videoFrameBufferManager_->EnableVideoFrameBuffer(buffer, &config); + iris_rtc_rendering_->RemoveVideoFrameObserverDelegate(delegate_id_); + iris_rtc_rendering_ = nullptr; + } + + const std::lock_guard lock(buffer_mutex_); + + if (registrar_ && texture_id_ != -1) + { + registrar_->UnregisterTexture(texture_id_); + + registrar_ = nullptr; + texture_id_ = -1; } } \ No newline at end of file diff --git a/windows/video_view_controller.cc b/windows/video_view_controller.cc index dc08bc3ef..d5702302f 100644 --- a/windows/video_view_controller.cc +++ b/windows/video_view_controller.cc @@ -8,8 +8,6 @@ #include #include -#include "iris_rtc_raw_data.h" -#include "iris_video_processor_cxx.h" template static bool GetValueFromEncodableMap(const flutter::EncodableMap *map, @@ -60,10 +58,10 @@ void VideoViewController::HandleMethodCall( return; } - intptr_t videoFrameBufferManagerNativeHandle; - if (!GetValueFromEncodableMap(arguments, "videoFrameBufferManagerNativeHandle", videoFrameBufferManagerNativeHandle)) + intptr_t irisRtcRenderingHandle; + if (!GetValueFromEncodableMap(arguments, "irisRtcRenderingHandle", irisRtcRenderingHandle)) { - result->Error("Invalid arguments", "No videoFrameBufferManagerNativeHandle provided."); + result->Error("Invalid arguments", "No irisRtcRenderingHandle provided."); return; } @@ -95,7 +93,14 @@ void VideoViewController::HandleMethodCall( return; } - auto textureId = CreateTextureRender(videoFrameBufferManagerNativeHandle, static_cast(uid), channelId, videoSourceType); + int32_t videoViewSetupMode; + if (!GetValueFromEncodableMap(arguments, "videoViewSetupMode", videoViewSetupMode)) + { + result->Error("Invalid arguments", "No videoViewSetupMode provided."); + return; + } + + auto textureId = CreateTextureRender(irisRtcRenderingHandle, static_cast(uid), channelId, videoSourceType, videoViewSetupMode); result->Success(flutter::EncodableValue(textureId)); } @@ -128,19 +133,20 @@ bool VideoViewController::DestroyPlatformRender(int64_t platformRenderId) } int64_t VideoViewController::CreateTextureRender( - const intptr_t &videoFrameBufferManagerNativeHandle, + const intptr_t &irisRtcRenderingHandle, unsigned int uid, const std::string &channelId, - unsigned int videoSourceType) + unsigned int videoSourceType, + unsigned int videoViewSetupMode) { - agora::iris::IrisVideoFrameBufferManager *videoFrameBufferManager = reinterpret_cast(videoFrameBufferManagerNativeHandle); + agora::iris::IrisRtcRendering *iris_rtc_rendering = reinterpret_cast(irisRtcRenderingHandle); std::unique_ptr textureRender = std::make_unique( messenger_, texture_registrar_, - videoFrameBufferManager); + iris_rtc_rendering); int64_t texture_id = textureRender->texture_id(); - textureRender.get()->UpdateData(uid, channelId, videoSourceType); + textureRender.get()->UpdateData(uid, channelId, videoSourceType, videoViewSetupMode); renderers_[texture_id] = std::move(textureRender); return texture_id; @@ -151,6 +157,7 @@ bool VideoViewController::DestroyTextureRender(int64_t textureId) auto it = renderers_.find(textureId); if (it != renderers_.end()) { + it->second->Dispose(); renderers_.erase(it); return true; }