diff --git a/Android/APIExample-Audio/app/build.gradle b/Android/APIExample-Audio/app/build.gradle index 034142e8a..2a09cec4f 100644 --- a/Android/APIExample-Audio/app/build.gradle +++ b/Android/APIExample-Audio/app/build.gradle @@ -48,7 +48,7 @@ dependencies { implementation fileTree(dir: "${localSdkPath}", include: ['*.jar', '*.aar']) } else{ - def agora_sdk_version = "4.2.2" + def agora_sdk_version = "4.2.3" // case 1: full single lib with voice only implementation "io.agora.rtc:voice-sdk:${agora_sdk_version}" // case 2: partial libs with voice only diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/ReadyFragment.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/ReadyFragment.java index 7e81063ae..f183c3616 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/ReadyFragment.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/ReadyFragment.java @@ -98,6 +98,7 @@ private void runOnPermissionGranted(@NonNull Runnable runnable) { permissionList.add(Permission.WRITE_EXTERNAL_STORAGE); permissionList.add(Permission.RECORD_AUDIO); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + permissionList.add(Manifest.permission.READ_PHONE_STATE); permissionList.add(Manifest.permission.BLUETOOTH_CONNECT); } diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/common/widget/WaveformView.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/common/widget/WaveformView.java new file mode 100644 index 000000000..839ebb022 --- /dev/null +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/common/widget/WaveformView.java @@ -0,0 +1,209 @@ +package io.agora.api.example.common.widget; + +import android.content.Context; +import android.content.res.TypedArray; +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.LinearGradient; +import android.graphics.Paint; +import android.graphics.Shader; +import android.util.AttributeSet; +import android.view.View; + +import androidx.annotation.Nullable; + +import java.util.ArrayList; + +import io.agora.api.example.R; + +public class WaveformView extends View { + private ArrayList datas = new ArrayList<>(); + private short max = 100; + private float mWidth; + private float mHeight; + private float space =1f; + private Paint mWavePaint; + private Paint baseLinePaint; + private int mWaveColor = Color.WHITE; + private int mBaseLineColor = Color.WHITE; + private float waveStrokeWidth = 4f; + private int invalidateTime = 1000 / 100; + private long drawTime; + private boolean isMaxConstant = false; + + public WaveformView(Context context) { + this(context, null); + } + + public WaveformView(Context context, @Nullable AttributeSet attrs) { + this(context, attrs, 0); + } + + public WaveformView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { + super(context, attrs, defStyleAttr); + init(attrs, defStyleAttr); + } + + private void init(AttributeSet attrs, int defStyle) { + final TypedArray a = getContext().obtainStyledAttributes( + attrs, R.styleable.WaveView, defStyle, 0); + mWaveColor = a.getColor( + R.styleable.WaveView_waveColor, + mWaveColor); + mBaseLineColor = a.getColor( + R.styleable.WaveView_baselineColor, + mBaseLineColor); + + waveStrokeWidth = a.getDimension( + R.styleable.WaveView_waveStokeWidth, + waveStrokeWidth); + + max = (short) a.getInt(R.styleable.WaveView_maxValue, max); + invalidateTime = a.getInt(R.styleable.WaveView_invalidateTime, invalidateTime); + + space = a.getDimension(R.styleable.WaveView_space, space); + a.recycle(); + initPainters(); + + } + + private void initPainters() { + mWavePaint = new Paint(); + mWavePaint.setColor(mWaveColor);// 画笔为color + mWavePaint.setStrokeWidth(waveStrokeWidth);// 设置画笔粗细 + mWavePaint.setAntiAlias(true); + mWavePaint.setFilterBitmap(true); + mWavePaint.setStrokeCap(Paint.Cap.ROUND); + mWavePaint.setStyle(Paint.Style.FILL); + Shader shader = new LinearGradient(0, 0, 1000, 0, 0xffffffff, 0xFFe850ee, Shader.TileMode.CLAMP); + mWavePaint.setShader(shader); + baseLinePaint = new Paint(); + baseLinePaint.setColor(mBaseLineColor);// 画笔为color + baseLinePaint.setStrokeWidth(1f);// 设置画笔粗细 + baseLinePaint.setAntiAlias(true); + baseLinePaint.setFilterBitmap(true); + baseLinePaint.setStyle(Paint.Style.FILL); + } + + public short getMax() { + return max; + } + + public void setMax(short max) { + this.max = max; + } + + public float getSpace() { + return space; + } + + public void setSpace(float space) { + this.space = space; + } + + public int getmWaveColor() { + return mWaveColor; + } + + public void setmWaveColor(int mWaveColor) { + this.mWaveColor = mWaveColor; + invalidateNow(); + } + + public int getmBaseLineColor() { + return mBaseLineColor; + } + + public void setmBaseLineColor(int mBaseLineColor) { + this.mBaseLineColor = mBaseLineColor; + invalidateNow(); + } + + public float getWaveStrokeWidth() { + return waveStrokeWidth; + } + + public void setWaveStrokeWidth(float waveStrokeWidth) { + this.waveStrokeWidth = waveStrokeWidth; + invalidateNow(); + } + + public int getInvalidateTime() { + return invalidateTime; + } + + public void setInvalidateTime(int invalidateTime) { + this.invalidateTime = invalidateTime; + } + + public boolean isMaxConstant() { + return isMaxConstant; + } + + public void setMaxConstant(boolean maxConstant) { + isMaxConstant = maxConstant; + } + + /** + * 如果改变相应配置 需要刷新相应的paint设置 + */ + public void invalidateNow() { + initPainters(); + invalidate(); + } + + public void addData(short data) { + + if (data < 0) { + data = (short) -data; + } + if (data > max && !isMaxConstant) { + max = data; + } + if (datas.size() > mWidth / space) { + synchronized (this) { + datas.remove(0); + datas.add(data); + } + } else { + datas.add(data); + } + if (System.currentTimeMillis() - drawTime > invalidateTime) { + invalidate(); + drawTime = System.currentTimeMillis(); + } + + } + + public void clear() { + datas.clear(); + invalidateNow(); + } + + + @Override + protected void onDraw(Canvas canvas) { + canvas.translate(0, mHeight / 2); + drawBaseLine(canvas); + drawWave(canvas); + } + + @Override + protected void onSizeChanged(int w, int h, int oldw, int oldh) { + mWidth = w; + mHeight = h; + } + + private void drawWave(Canvas mCanvas) { + for (int i = 0; i < datas.size(); i++) { + float x = (i) * space; + float y = (float) datas.get(i) / max * mHeight / 2; + mCanvas.drawLine(x, -y, x, y, mWavePaint); + } + + } + + private void drawBaseLine(Canvas mCanvas) { + mCanvas.drawLine(0, 0, mWidth, 0, baseLinePaint); + } +} \ No newline at end of file diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java index 214494c25..3e30223ca 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java @@ -105,7 +105,7 @@ public class VoiceEffects extends BaseFragment implements View.OnClickListener, private EditText et_channel; private Button join; private Spinner audioProfile, audioScenario, - chatBeautifier, timbreTransformation, voiceChanger, styleTransformation, roomAcoustics, pitchCorrection, _pitchModeOption, _pitchValueOption, voiceConversion, + chatBeautifier, timbreTransformation, voiceChanger, styleTransformation, roomAcoustics, pitchCorrection, _pitchModeOption, _pitchValueOption, voiceConversion, ainsMode, customBandFreq, customReverbKey; private ViewGroup _voice3DLayout, _pitchModeLayout, _pitchValueLayout; private SeekBar _voice3DCircle, customPitch, customBandGain, customReverbValue, customVoiceFormant; @@ -152,6 +152,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat _pitchValueLayout = view.findViewById(R.id.audio_pitch_value_layout); _pitchValueOption = view.findViewById(R.id.audio_pitch_value_option); voiceConversion = view.findViewById(R.id.audio_voice_conversion); + ainsMode = view.findViewById(R.id.audio_ains_mode); chatBeautifier.setOnItemSelectedListener(this); timbreTransformation.setOnItemSelectedListener(this); @@ -163,6 +164,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat _voice3DCircle.setOnSeekBarChangeListener(this); _pitchModeOption.setOnItemSelectedListener(this); _pitchValueOption.setOnItemSelectedListener(this); + ainsMode.setOnItemSelectedListener(this); // Customize Voice Effects Layout customPitch = view.findViewById(R.id.audio_custom_pitch); // engine.setLocalVoicePitch() @@ -201,6 +203,7 @@ private void resetControlLayoutByJoined() { _pitchModeLayout.setVisibility(View.GONE); _pitchValueLayout.setVisibility(View.GONE); voiceConversion.setEnabled(joined); + ainsMode.setEnabled(joined); customPitch.setEnabled(joined); customBandFreq.setEnabled(joined); @@ -216,6 +219,7 @@ private void resetControlLayoutByJoined() { roomAcoustics.setSelection(0); pitchCorrection.setSelection(0); voiceConversion.setSelection(0); + ainsMode.setSelection(0); customPitch.setProgress(0); customBandGain.setProgress(0); @@ -615,11 +619,26 @@ public void onItemSelected(AdapterView parent, View view, int position, long return; } + if(parent == _pitchModeOption || parent == _pitchValueOption){ int effectOption1 = getPitch1Value(_pitchModeOption.getSelectedItem().toString()); int effectOption2 = getPitch2Value(_pitchValueOption.getSelectedItem().toString()); engine.setAudioEffectParameters(PITCH_CORRECTION, effectOption1, effectOption2); } + + if(parent == ainsMode){ + boolean enable = position > 0; + /* + The AI noise suppression modes: + 0: (Default) Balance mode. This mode allows for a balanced performance on noice suppression and time delay. + 1: Aggressive mode. In scenarios where high performance on noise suppression is required, such as live streaming + outdoor events, this mode reduces nosies more dramatically, but sometimes may affect the original character of the audio. + 2: Aggressive mode with low latency. The noise suppression delay of this mode is about only half of that of the balance + and aggressive modes. It is suitable for scenarios that have high requirements on noise suppression with low latency, + such as sing together online in real time. + */ + engine.setAINSMode(enable, position - 1); + } } private int getVoiceConversionValue(String label) { diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/audio/AudioWaveform.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/audio/AudioWaveform.java new file mode 100644 index 000000000..1f72db793 --- /dev/null +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/audio/AudioWaveform.java @@ -0,0 +1,314 @@ +package io.agora.api.example.examples.audio; + +import android.content.Context; +import android.os.Bundle; +import android.util.Log; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; + +import java.util.Locale; + +import io.agora.api.example.MainApplication; +import io.agora.api.example.R; +import io.agora.api.example.annotation.Example; +import io.agora.api.example.common.BaseFragment; +import io.agora.api.example.common.model.Examples; +import io.agora.api.example.databinding.FragmentAudioWaveformBinding; +import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.TokenUtils; +import io.agora.rtc2.ChannelMediaOptions; +import io.agora.rtc2.Constants; +import io.agora.rtc2.IRtcEngineEventHandler; +import io.agora.rtc2.RtcEngine; +import io.agora.rtc2.RtcEngineConfig; + +@Example( + index = 7, + group = Examples.ADVANCED, + name = R.string.item_audiowaveform, + actionId = R.id.action_mainFragment_to_AudioWaveform, + tipsId = R.string.audiorouter_palyer +) +public class AudioWaveform extends BaseFragment { + private static final String TAG = "AudioWaveform"; + private FragmentAudioWaveformBinding mBinding; + private RtcEngine engine; + private int myUid; + private boolean joined = false; + + @Override + public void onCreate(@Nullable Bundle savedInstanceState) { + super.onCreate(savedInstanceState); + // Check if the context is valid + Context context = getContext(); + if (context == null) { + return; + } + try { + RtcEngineConfig config = new RtcEngineConfig(); + /* + * The context of Android Activity + */ + config.mContext = context.getApplicationContext(); + /* + * The App ID issued to you by Agora. See How to get the App ID + */ + config.mAppId = getString(R.string.agora_app_id); + /* Sets the channel profile of the Agora RtcEngine. + CHANNEL_PROFILE_COMMUNICATION(0): (Default) The Communication profile. + Use this profile in one-on-one calls or group calls, where all users can talk freely. + CHANNEL_PROFILE_LIVE_BROADCASTING(1): The Live-Broadcast profile. Users in a live-broadcast + channel have a role as either broadcaster or audience. A broadcaster can both send and receive streams; + an audience can only receive streams.*/ + config.mChannelProfile = Constants.CHANNEL_PROFILE_LIVE_BROADCASTING; + /* + * IRtcEngineEventHandler is an abstract class providing default implementation. + * The SDK uses this class to report to the app on SDK runtime events. + */ + config.mEventHandler = iRtcEngineEventHandler; + config.mAreaCode = ((MainApplication) requireActivity().getApplication()).getGlobalSettings().getAreaCode(); + engine = RtcEngine.create(config); + /* + * This parameter is for reporting the usages of APIExample to agora background. + * Generally, it is not necessary for you to set this parameter. + */ + engine.setParameters("{" + + "\"rtc.report_app_scenario\":" + + "{" + + "\"appScenario\":" + 100 + "," + + "\"serviceType\":" + 11 + "," + + "\"appVersion\":\"" + RtcEngine.getSdkVersion() + "\"" + + "}" + + "}"); + /* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/ + engine.setLocalAccessPoint(((MainApplication) requireActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig()); + + } catch (Exception e) { + e.printStackTrace(); + requireActivity().onBackPressed(); + } + } + + @Override + public void onDestroy() { + super.onDestroy(); + /*leaveChannel and Destroy the RtcEngine instance*/ + if (engine != null) { + engine.leaveChannel(); + } + handler.post(RtcEngine::destroy); + engine = null; + } + + @Nullable + @Override + public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { + mBinding = FragmentAudioWaveformBinding.inflate(inflater); + return mBinding.getRoot(); + } + + @Override + public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { + super.onViewCreated(view, savedInstanceState); + mBinding.btnJoin.setOnClickListener(v -> { + if (!joined) { + CommonUtil.hideInputBoard(requireActivity(), mBinding.etChannel); + joinChannel(mBinding.etChannel.getText().toString()); + joined = true; + mBinding.btnJoin.setText(R.string.leave); + mBinding.waveformView.clear(); + } else { + engine.leaveChannel(); + joined = false; + mBinding.btnJoin.setText(R.string.join); + } + }); + } + + + /** + * @param channelId Specify the channel name that you want to join. + * Users that input the same channel name join the same channel. + */ + private void joinChannel(String channelId) { + + engine.enableAudio(); + engine.setDefaultAudioRoutetoSpeakerphone(true); + + /* + * Enables the reporting of users' volume indication. + * + * @param interval Sets the time interval between two consecutive volume indications + * ≤ 0: Disables the volume indication. + * > 0: Time interval (ms) between two consecutive volume indications. The lowest value is 50. + * @param smooth The smoothing factor that sets the sensitivity of the audio volume indicator. The value ranges between 0 and 10. + * The recommended value is 3. The greater the value, the more sensitive the indicator. + * @param reportVad true: Enables the voice activity detection of the local user. Once it is enabled, + * the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user. + * false: (Default) Disables the voice activity detection of the local user. Once it is disabled, + * the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, + * except for the scenario where the engine automatically detects the voice activity of the local user. + */ + engine.enableAudioVolumeIndication(1000, 3, true); + + /*In the demo, the default is to enter as the anchor.*/ + ChannelMediaOptions option = new ChannelMediaOptions(); + option.channelProfile = Constants.CHANNEL_PROFILE_LIVE_BROADCASTING; + option.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER; + option.autoSubscribeAudio = true; + option.autoSubscribeVideo = true; + option.publishMicrophoneTrack = true; + + /*Please configure accessToken in the string_config file. + * A temporary token generated in Console. A temporary token is valid for 24 hours. For details, see + * https://docs.agora.io/en/Agora%20Platform/token?platform=All%20Platforms#get-a-temporary-token + * A token generated at the server. This applies to scenarios with high-security requirements. For details, see + * https://docs.agora.io/en/cloud-recording/token_server_java?platform=Java*/ + TokenUtils.gen(requireContext(), channelId, 0, ret -> { + + /* Allows a user to join a channel. + if you do not specify the uid, we will generate the uid for you*/ + int res = engine.joinChannel(ret, channelId, 0, option); + if (res != 0) { + // Usually happens with invalid parameters + // Error code description can be found at: + // en: https://docs.agora.io/en/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_error_code.html + // cn: https://docs.agora.io/cn/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_error_code.html + showAlert(RtcEngine.getErrorDescription(Math.abs(res))); + Log.e(TAG, RtcEngine.getErrorDescription(Math.abs(res))); + return; + } + // Prevent repeated entry + mBinding.btnJoin.setEnabled(false); + }); + + } + + + /** + * IRtcEngineEventHandler is an abstract class providing default implementation. + * The SDK uses this class to report to the app on SDK runtime events. + */ + private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() { + /** + * Error code description can be found at: + * en: ... + * cn: ... + */ + @Override + public void onError(int error) { + Log.w(TAG, String.format("onError code %d message %s", error, RtcEngine.getErrorDescription(error))); + runOnUIThread(() -> mBinding.btnJoin.setEnabled(true)); + } + + /**Occurs when a user leaves the channel. + * @param stats With this callback, the application retrieves the channel information, + * such as the call duration and statistics.*/ + @Override + public void onLeaveChannel(RtcStats stats) { + super.onLeaveChannel(stats); + Log.i(TAG, String.format("local user %d leaveChannel!", myUid)); + showLongToast(String.format(Locale.US, "local user %d leaveChannel!", myUid)); + } + + /**Occurs when the local user joins a specified channel. + * The channel name assignment is based on channelName specified in the joinChannel method. + * If the uid is not specified when joinChannel is called, the server automatically assigns a uid. + * @param channel Channel name + * @param uid User ID + * @param elapsed Time elapsed (ms) from the user calling joinChannel until this callback is triggered*/ + @Override + public void onJoinChannelSuccess(String channel, int uid, int elapsed) { + Log.i(TAG, String.format("onJoinChannelSuccess channel %s uid %d", channel, uid)); + showLongToast(String.format(Locale.US, "onJoinChannelSuccess channel %s uid %d", channel, uid)); + myUid = uid; + joined = true; + runOnUIThread(() -> mBinding.btnJoin.setEnabled(true)); + } + + /**Since v2.9.0. + * This callback indicates the state change of the remote audio stream. + * PS: This callback does not work properly when the number of users (in the Communication profile) or + * broadcasters (in the Live-broadcast profile) in the channel exceeds 17. + * @param uid ID of the user whose audio state changes. + * @param state State of the remote audio + * REMOTE_AUDIO_STATE_STOPPED(0): The remote audio is in the default state, probably due + * to REMOTE_AUDIO_REASON_LOCAL_MUTED(3), REMOTE_AUDIO_REASON_REMOTE_MUTED(5), + * or REMOTE_AUDIO_REASON_REMOTE_OFFLINE(7). + * REMOTE_AUDIO_STATE_STARTING(1): The first remote audio packet is received. + * REMOTE_AUDIO_STATE_DECODING(2): The remote audio stream is decoded and plays normally, + * probably due to REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2), + * REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4) or REMOTE_AUDIO_REASON_REMOTE_UNMUTED(6). + * REMOTE_AUDIO_STATE_FROZEN(3): The remote audio is frozen, probably due to + * REMOTE_AUDIO_REASON_NETWORK_CONGESTION(1). + * REMOTE_AUDIO_STATE_FAILED(4): The remote audio fails to start, probably due to + * REMOTE_AUDIO_REASON_INTERNAL(0). + * @param reason The reason of the remote audio state change. + * REMOTE_AUDIO_REASON_INTERNAL(0): Internal reasons. + * REMOTE_AUDIO_REASON_NETWORK_CONGESTION(1): Network congestion. + * REMOTE_AUDIO_REASON_NETWORK_RECOVERY(2): Network recovery. + * REMOTE_AUDIO_REASON_LOCAL_MUTED(3): The local user stops receiving the remote audio + * stream or disables the audio module. + * REMOTE_AUDIO_REASON_LOCAL_UNMUTED(4): The local user resumes receiving the remote audio + * stream or enables the audio module. + * REMOTE_AUDIO_REASON_REMOTE_MUTED(5): The remote user stops sending the audio stream or + * disables the audio module. + * REMOTE_AUDIO_REASON_REMOTE_UNMUTED(6): The remote user resumes sending the audio stream + * or enables the audio module. + * REMOTE_AUDIO_REASON_REMOTE_OFFLINE(7): The remote user leaves the channel. + * @param elapsed Time elapsed (ms) from the local user calling the joinChannel method + * until the SDK triggers this callback.*/ + @Override + public void onRemoteAudioStateChanged(int uid, int state, int reason, int elapsed) { + super.onRemoteAudioStateChanged(uid, state, reason, elapsed); + Log.i(TAG, "onRemoteAudioStateChanged->" + uid + ", state->" + state + ", reason->" + reason); + } + + /**Occurs when a remote user (Communication)/host (Live Broadcast) joins the channel. + * @param uid ID of the user whose audio state changes. + * @param elapsed Time delay (ms) from the local user calling joinChannel/setClientRole + * until this callback is triggered.*/ + @Override + public void onUserJoined(int uid, int elapsed) { + super.onUserJoined(uid, elapsed); + Log.i(TAG, "onUserJoined->" + uid); + showLongToast(String.format(Locale.US, "user %d joined!", uid)); + } + + /**Occurs when a remote user (Communication)/host (Live Broadcast) leaves the channel. + * @param uid ID of the user whose audio state changes. + * @param reason Reason why the user goes offline: + * USER_OFFLINE_QUIT(0): The user left the current channel. + * USER_OFFLINE_DROPPED(1): The SDK timed out and the user dropped offline because no data + * packet was received within a certain period of time. If a user quits the + * call and the message is not passed to the SDK (due to an unreliable channel), + * the SDK assumes the user dropped offline. + * USER_OFFLINE_BECOME_AUDIENCE(2): (Live broadcast only.) The client role switched from + * the host to the audience.*/ + @Override + public void onUserOffline(int uid, int reason) { + Log.i(TAG, String.format("user %d offline! reason:%d", uid, reason)); + showLongToast(String.format(Locale.US, "user %d offline! reason:%d", uid, reason)); + } + + /** + * Reports the volume information of users. + * + * @param speakers The volume information of the users. See AudioVolumeInfo. + * An empty speakers array in the callback indicates that no remote user is in the channel or is sending a stream. + * @param totalVolume The volume of the speaker. The value range is [0,255]. + * In the callback for the local user, totalVolume is the volume of the local user who sends a stream. + * In the callback for remote users, totalVolume is the sum of the volume of all remote users (up to three) + * whose instantaneous volume is the highest. If the user calls startAudioMixing [2/2], then totalVolume is the volume after audio mixing. + */ + @Override + public void onAudioVolumeIndication(AudioVolumeInfo[] speakers, int totalVolume) { + super.onAudioVolumeIndication(speakers, totalVolume); + runOnUIThread(() -> mBinding.waveformView.addData((short) totalVolume)); + } + }; +} diff --git a/Android/APIExample-Audio/app/src/main/res/layout/fragment_audio_waveform.xml b/Android/APIExample-Audio/app/src/main/res/layout/fragment_audio_waveform.xml new file mode 100644 index 000000000..d3ca4fa96 --- /dev/null +++ b/Android/APIExample-Audio/app/src/main/res/layout/fragment_audio_waveform.xml @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/AudioWaveform/zh-Hans.lproj/AudioWaveform.strings b/iOS/APIExample/APIExample/Examples/Advanced/AudioWaveform/zh-Hans.lproj/AudioWaveform.strings new file mode 100644 index 000000000..b42ff128a --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/AudioWaveform/zh-Hans.lproj/AudioWaveform.strings @@ -0,0 +1,33 @@ + +/* Class = "UILabel"; text = "PlaybackVolume"; ObjectID = "07c-He-s8j"; */ +"07c-He-s8j.text" = "播放音量"; + +/* Class = "UILabel"; text = "RecordingVolume"; ObjectID = "DJt-Y7-fkM"; */ +"DJt-Y7-fkM.text" = "采集音量"; + +/* Class = "UITextField"; placeholder = "Enter channel name"; ObjectID = "GWc-L5-fZV"; */ +"GWc-L5-fZV.placeholder" = "输入频道名"; + +/* Class = "UILabel"; text = "Audio Scenario"; ObjectID = "Q0E-5B-IED"; */ +"Q0E-5B-IED.text" = "音频使用场景"; + +/* Class = "UILabel"; text = "InEar Monitoring Volume"; ObjectID = "VMe-lv-SUb"; */ +"VMe-lv-SUb.text" = "耳返音量"; + +/* Class = "UILabel"; text = "Audio Profile"; ObjectID = "iUn-XK-AS2"; */ +"iUn-XK-AS2.text" = "音频参数配置"; + +/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "iZP-Ce-Oxt"; */ +"iZP-Ce-Oxt.normalTitle" = "Button"; + +/* Class = "UILabel"; text = "InEar Monitoring"; ObjectID = "iru-5f-bbo"; */ +"iru-5f-bbo.text" = "耳返"; + +/* Class = "UIViewController"; title = "Join Channel Audio"; ObjectID = "jxp-ZN-2yG"; */ +"jxp-ZN-2yG.title" = "实时音频通话/直播"; + +/* Class = "UIButton"; normalTitle = "Join"; ObjectID = "kbN-ZR-nNn"; */ +"kbN-ZR-nNn.normalTitle" = "加入频道"; + +/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "myR-6e-1zj"; */ +"myR-6e-1zj.normalTitle" = "Button"; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/AuidoRouterPlayer/AuidoRouterPlayer.swift b/iOS/APIExample/APIExample/Examples/Advanced/AuidoRouterPlayer/AuidoRouterPlayer.swift new file mode 100644 index 000000000..7ebce97d4 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/AuidoRouterPlayer/AuidoRouterPlayer.swift @@ -0,0 +1,368 @@ +// +// JoinChannelVideo.swift +// APIExample +// +// Created by 张乾泽 on 2020/4/17. +// Copyright © 2020 Agora Corp. All rights reserved. +// +import UIKit +import AGEVideoLayout +import AgoraRtcKit +import IJKMediaFramework + +enum ThirdPlayerType: String { + case ijk = "ijkplayer" + case origin = "avplayer" +} + +class AuidoRouterPlayerEntry : UIViewController +{ + @IBOutlet weak var joinButton: UIButton! + @IBOutlet weak var channelTextField: UITextField! + let identifier = "AuidoRouterPlayer" + @IBOutlet var resolutionBtn: UIButton! + @IBOutlet var fpsBtn: UIButton! + @IBOutlet var orientationBtn: UIButton! + @IBOutlet weak var chosePlayerButton: UIButton! + var width:Int = 960, height:Int = 540, orientation:AgoraVideoOutputOrientationMode = .adaptative, fps = 15 + private var playerType: ThirdPlayerType = .ijk + + override func viewDidLoad() { + super.viewDidLoad() + } + + + @IBAction func onChosePlayerType(_ sender: UIButton) { + let alert = UIAlertController(title: "Player Type(ijkplayer/avplayer)".localized, + message: nil, + preferredStyle: UIDevice.current.userInterfaceIdiom == .pad ? UIAlertController.Style.alert : UIAlertController.Style.actionSheet) + alert.addAction(getPlayerAction(ThirdPlayerType.ijk.rawValue)) + alert.addAction(getPlayerAction(ThirdPlayerType.origin.rawValue)) + alert.addCancelAction() + present(alert, animated: true, completion: nil) + } + func getPlayerAction(_ title: String) -> UIAlertAction{ + return UIAlertAction(title: title, style: .default, handler: {[unowned self] action in + self.chosePlayerButton.setTitle(title, for: .normal) + self.playerType = ThirdPlayerType(rawValue: title) ?? .ijk + }) + } + func getResolutionAction(width:Int, height:Int) -> UIAlertAction{ + return UIAlertAction(title: "\(width)x\(height)", style: .default, handler: {[unowned self] action in + self.width = width + self.height = height + self.resolutionBtn.setTitle("\(width)x\(height)", for: .normal) + }) + } + + func getFpsAction(_ fps:Int) -> UIAlertAction{ + return UIAlertAction(title: "\(fps)fps", style: .default, handler: {[unowned self] action in + self.fps = fps + self.fpsBtn.setTitle("\(fps)fps", for: .normal) + }) + } + + func getOrientationAction(_ orientation:AgoraVideoOutputOrientationMode) -> UIAlertAction{ + return UIAlertAction(title: "\(orientation.description())", style: .default, handler: {[unowned self] action in + self.orientation = orientation + self.orientationBtn.setTitle("\(orientation.description())", for: .normal) + }) + } + + @IBAction func setResolution(){ + let alert = UIAlertController(title: "Set Resolution".localized, message: nil, preferredStyle: UIDevice.current.userInterfaceIdiom == .pad ? UIAlertController.Style.alert : UIAlertController.Style.actionSheet) + alert.addAction(getResolutionAction(width: 90, height: 90)) + alert.addAction(getResolutionAction(width: 160, height: 120)) + alert.addAction(getResolutionAction(width: 320, height: 240)) + alert.addAction(getResolutionAction(width: 960, height: 540)) + alert.addAction(getResolutionAction(width: 1280, height: 720)) + alert.addCancelAction() + present(alert, animated: true, completion: nil) + } + + @IBAction func setFps(){ + let alert = UIAlertController(title: "Set Fps".localized, message: nil, preferredStyle: UIDevice.current.userInterfaceIdiom == .pad ? UIAlertController.Style.alert : UIAlertController.Style.actionSheet) + alert.addAction(getFpsAction(10)) + alert.addAction(getFpsAction(15)) + alert.addAction(getFpsAction(24)) + alert.addAction(getFpsAction(30)) + alert.addAction(getFpsAction(60)) + alert.addCancelAction() + present(alert, animated: true, completion: nil) + } + + @IBAction func setOrientation(){ + let alert = UIAlertController(title: "Set Orientation".localized, message: nil, preferredStyle: UIDevice.current.userInterfaceIdiom == .pad ? UIAlertController.Style.alert : UIAlertController.Style.actionSheet) + alert.addAction(getOrientationAction(.adaptative)) + alert.addAction(getOrientationAction(.fixedLandscape)) + alert.addAction(getOrientationAction(.fixedPortrait)) + alert.addCancelAction() + present(alert, animated: true, completion: nil) + } + + @IBAction func doJoinPressed(sender: UIButton) { + guard let channelName = channelTextField.text else {return} + //resign channel text field + channelTextField.resignFirstResponder() + + let storyBoard: UIStoryboard = UIStoryboard(name: identifier, bundle: nil) + // create new view controller every time to ensure we get a clean vc + guard let newViewController = storyBoard.instantiateViewController(withIdentifier: identifier) as? BaseViewController else {return} + newViewController.title = channelName + newViewController.configs = ["channelName":channelName, + "resolution":CGSize(width: width, height: height), + "fps": fps, + "orientation": orientation, + "playerType": playerType.rawValue] + navigationController?.pushViewController(newViewController, animated: true) + } +} + +class AuidoRouterPlayerMain: BaseViewController { + var localVideo = Bundle.loadVideoView(type: .local, audioOnly: false) + var remoteVideo = Bundle.loadVideoView(type: .remote, audioOnly: false) + @IBOutlet weak var playerView: UIView! + @IBOutlet weak var speakerSwitch: UISwitch! + @IBOutlet weak var container: AGEVideoContainer! + var agoraKit: AgoraRtcEngineKit! + private let videoString = "https://agora-adc-artifacts.s3.cn-north-1.amazonaws.com.cn/resources/sample.mp4" + private lazy var ijkPlayer: IJKAVMoviePlayerController? = { + let player = IJKAVMoviePlayerController(contentURL: URL(string: videoString)) + player?.view.autoresizingMask = [.flexibleWidth, .flexibleHeight] + player?.view.frame = playerView.bounds + player?.scalingMode = .aspectFit + player?.shouldAutoplay = true + player?.prepareToPlay() + player?.playbackVolume = 30 + player?.allowsMediaAirPlay = true + player?.isDanmakuMediaAirPlay = true + return player + }() + private lazy var avPlayer: AVPlayerViewController? = { + guard let url = URL(string: videoString) else { return nil } + let player = AVPlayer(url: url) + player.volume = 30 + let playerVC = AVPlayerViewController() + playerVC.player = player + playerVC.view.autoresizingMask = [.flexibleWidth, .flexibleHeight] + playerVC.view.frame = playerView.bounds + playerVC.delegate = self + player.play() + return playerVC + }() + + // indicate if current instance has joined channel + var isJoined: Bool = false + + override func viewDidLoad() { + super.viewDidLoad() + // layout render view + localVideo.setPlaceholder(text: "Local Host".localized) + remoteVideo.setPlaceholder(text: "Remote Host".localized) + container.layoutStream(views: [localVideo, remoteVideo]) + + // set up agora instance when view loaded + let config = AgoraRtcEngineConfig() + config.appId = KeyCenter.AppId + config.areaCode = GlobalSettings.shared.area + config.channelProfile = .liveBroadcasting + agoraKit = AgoraRtcEngineKit.sharedEngine(with: config, delegate: self) + // Configuring Privatization Parameters + Util.configPrivatization(agoraKit: agoraKit) + + agoraKit.setLogFile(LogUtils.sdkLogPath()) + + // get channel name from configs + guard let channelName = configs["channelName"] as? String, + let resolution = configs["resolution"] as? CGSize, + let fps = configs["fps"] as? Int, + let orientation = configs["orientation"] as? AgoraVideoOutputOrientationMode else {return} + + // make myself a broadcaster + agoraKit.setClientRole(GlobalSettings.shared.getUserRole()) + // enable video module and set up video encoding configs + agoraKit.enableVideo() + agoraKit.enableAudio() + agoraKit.setVideoEncoderConfiguration(AgoraVideoEncoderConfiguration(size: resolution, + frameRate: AgoraVideoFrameRate(rawValue: fps) ?? .fps15, + bitrate: AgoraVideoBitrateStandard, + orientationMode: orientation, mirrorMode: .auto)) + + // set up local video to render your local camera preview + let videoCanvas = AgoraRtcVideoCanvas() + videoCanvas.uid = 0 + // the view to be binded + videoCanvas.view = localVideo.videoView + videoCanvas.renderMode = .hidden + agoraKit.setupLocalVideo(videoCanvas) + // you have to call startPreview to see local video + agoraKit.startPreview() + + // Set audio route to speaker + agoraKit.setEnableSpeakerphone(true) + + // keep audio session + agoraKit.setParameters("{\"che.audio.keep.audiosession\": true}") + + // start joining channel + // 1. Users can only see each other after they join the + // same channel successfully using the same app id. + // 2. If app certificate is turned on at dashboard, token is needed + // when joining channel. The channel name and uid used to calculate + // the token has to match the ones used for channel join + let option = AgoraRtcChannelMediaOptions() + option.publishCameraTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.publishMicrophoneTrack = GlobalSettings.shared.getUserRole() == .broadcaster + option.clientRoleType = GlobalSettings.shared.getUserRole() + NetworkManager.shared.generateToken(channelName: channelName, success: { token in + let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) + if result != 0 { + // Usually happens with invalid parameters + // Error code description can be found at: + // en: https://api-ref.agora.io/en/voice-sdk/macos/3.x/Constants/AgoraErrorCode.html#content + // cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraErrorCode.html + self.showAlert(title: "Error", message: "joinChannel call failed: \(result), please check your params") + } + }) + } + + override func viewDidAppear(_ animated: Bool) { + super.viewDidAppear(animated) + let playerType = ThirdPlayerType(rawValue: configs["playerType"] as! String) + if playerType == .ijk { + setupIJKPlayer() + } else { + setupAVPlayer() + } + } + + private func setupIJKPlayer() { + guard let ijkPlayerView = ijkPlayer?.view else { return } + playerView.addSubview(ijkPlayerView) + } + + private func setupAVPlayer() { + guard let avPlayerView = avPlayer?.view else { return } + playerView.addSubview(avPlayerView) + } + + @IBAction func onSpeakerSwitch(_ sender: UISwitch) { + agoraKit.setEnableSpeakerphone(sender.isOn) + } + + deinit { + agoraKit.disableAudio() + agoraKit.disableVideo() + + if isJoined { + agoraKit.stopPreview() + agoraKit.leaveChannel { (stats) -> Void in + LogUtils.log(message: "left channel, duration: \(stats.duration)", level: .info) + } + } + AgoraRtcEngineKit.destroy() + let playerType = ThirdPlayerType(rawValue: configs["playerType"] as! String) + if playerType == .origin { + avPlayer?.player?.pause() + } else { + ijkPlayer?.shutdown() + } + } +} + +extension AuidoRouterPlayerMain: AVPlayerViewControllerDelegate { + func playerViewController(_ playerViewController: AVPlayerViewController, willEndFullScreenPresentationWithAnimationCoordinator coordinator: UIViewControllerTransitionCoordinator) { + // The system pauses when returning from full screen, we need to 'resume' manually. + coordinator.animate(alongsideTransition: nil) { transitionContext in + playerViewController.player?.play() + } + } +} + +/// agora rtc engine delegate events +extension AuidoRouterPlayerMain: AgoraRtcEngineDelegate { + /// callback when warning occured for agora sdk, warning can usually be ignored, still it's nice to check out + /// what is happening + /// Warning code description can be found at: + /// en: https://api-ref.agora.io/en/voice-sdk/ios/3.x/Constants/AgoraWarningCode.html + /// cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraWarningCode.html + /// @param warningCode warning code of the problem + func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurWarning warningCode: AgoraWarningCode) { + LogUtils.log(message: "warning: \(warningCode.description)", level: .warning) + } + + /// callback when error occured for agora sdk, you are recommended to display the error descriptions on demand + /// to let user know something wrong is happening + /// Error code description can be found at: + /// en: https://api-ref.agora.io/en/voice-sdk/macos/3.x/Constants/AgoraErrorCode.html#content + /// cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraErrorCode.html + /// @param errorCode error code of the problem + func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurError errorCode: AgoraErrorCode) { + LogUtils.log(message: "error: \(errorCode)", level: .error) +// self.showAlert(title: "Error", message: "Error \(errorCode.description) occur") + } + + func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinChannel channel: String, withUid uid: UInt, elapsed: Int) { + self.isJoined = true + LogUtils.log(message: "Join \(channel) with uid \(uid) elapsed \(elapsed)ms", level: .info) + } + + /// callback when a remote user is joinning the channel, note audience in live broadcast mode will NOT trigger this event + /// @param uid uid of remote joined user + /// @param elapsed time elapse since current sdk instance join the channel in ms + func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinedOfUid uid: UInt, elapsed: Int) { + LogUtils.log(message: "remote user join: \(uid) \(elapsed)ms", level: .info) + + // Only one remote video view is available for this + // tutorial. Here we check if there exists a surface + // view tagged as this uid. + let videoCanvas = AgoraRtcVideoCanvas() + videoCanvas.uid = uid + // the view to be binded + videoCanvas.view = remoteVideo.videoView + videoCanvas.renderMode = .hidden + agoraKit.setupRemoteVideo(videoCanvas) + } + + /// callback when a remote user is leaving the channel, note audience in live broadcast mode will NOT trigger this event + /// @param uid uid of remote joined user + /// @param reason reason why this user left, note this event may be triggered when the remote user + /// become an audience in live broadcasting profile + func rtcEngine(_ engine: AgoraRtcEngineKit, didOfflineOfUid uid: UInt, reason: AgoraUserOfflineReason) { + LogUtils.log(message: "remote user left: \(uid) reason \(reason)", level: .info) + + // to unlink your view from sdk, so that your view reference will be released + // note the video will stay at its last frame, to completely remove it + // you will need to remove the EAGL sublayer from your binded view + let videoCanvas = AgoraRtcVideoCanvas() + videoCanvas.uid = uid + // the view to be binded + videoCanvas.view = nil + videoCanvas.renderMode = .hidden + agoraKit.setupRemoteVideo(videoCanvas) + } + + /// Reports the statistics of the current call. The SDK triggers this callback once every two seconds after the user joins the channel. + /// @param stats stats struct + func rtcEngine(_ engine: AgoraRtcEngineKit, reportRtcStats stats: AgoraChannelStats) { + localVideo.statsInfo?.updateChannelStats(stats) + } + + /// Reports the statistics of the uploading local audio streams once every two seconds. + /// @param stats stats struct + func rtcEngine(_ engine: AgoraRtcEngineKit, localAudioStats stats: AgoraRtcLocalAudioStats) { + localVideo.statsInfo?.updateLocalAudioStats(stats) + } + + /// Reports the statistics of the video stream from each remote user/host. + /// @param stats stats struct + func rtcEngine(_ engine: AgoraRtcEngineKit, remoteVideoStats stats: AgoraRtcRemoteVideoStats) { + remoteVideo.statsInfo?.updateVideoStats(stats) + } + + /// Reports the statistics of the audio stream from each remote user/host. + /// @param stats stats struct for current call statistics + func rtcEngine(_ engine: AgoraRtcEngineKit, remoteAudioStats stats: AgoraRtcRemoteAudioStats) { + remoteVideo.statsInfo?.updateAudioStats(stats) + } +} diff --git a/iOS/APIExample/APIExample/Examples/Advanced/AuidoRouterPlayer/Base.lproj/AuidoRouterPlayer.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/AuidoRouterPlayer/Base.lproj/AuidoRouterPlayer.storyboard new file mode 100644 index 000000000..826971e21 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/AuidoRouterPlayer/Base.lproj/AuidoRouterPlayer.storyboard @@ -0,0 +1,139 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/AuidoRouterPlayer/zh-Hans.lproj/AuidoRouterPlayer.strings b/iOS/APIExample/APIExample/Examples/Advanced/AuidoRouterPlayer/zh-Hans.lproj/AuidoRouterPlayer.strings new file mode 100644 index 000000000..b1f01b9bd --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/AuidoRouterPlayer/zh-Hans.lproj/AuidoRouterPlayer.strings @@ -0,0 +1,25 @@ + +/* Class = "UITextField"; placeholder = "Enter channel name"; ObjectID = "GWc-L5-fZV"; */ +"GWc-L5-fZV.placeholder" = "输入频道名"; + +/* Class = "UINavigationItem"; title = "Join Channel"; ObjectID = "Iy0-Dq-h5x"; */ +"Iy0-Dq-h5x.title" = "加入频道"; + +/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "VpM-9W-auG"; */ +"VpM-9W-auG.normalTitle" = "Button"; + +/* Class = "UIButton"; normalTitle = "Join"; ObjectID = "kbN-ZR-nNn"; */ +"kbN-ZR-nNn.normalTitle" = "加入频道"; + +/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "kf0-3f-UI5"; */ +"kf0-3f-UI5.normalTitle" = "Button"; + +/* Class = "UIViewController"; title = "Join Channel Video"; ObjectID = "p70-sh-D1h"; */ +"p70-sh-D1h.title" = "视频实时通话"; + +/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "wHl-zh-dFe"; */ +"wHl-zh-dFe.normalTitle" = "Button"; + +"M22-MV-Wnj.text" = "扬声器"; + +"nd3-pG-lkL.title" = "播放器类型选项(ijkplauer/原生)"; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ContentInspect/Base.lproj/ContentInspect.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/ContentInspect/Base.lproj/ContentInspect.storyboard index 3e415eb8d..59e2b05f6 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ContentInspect/Base.lproj/ContentInspect.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/ContentInspect/Base.lproj/ContentInspect.storyboard @@ -1,9 +1,9 @@ - + - + @@ -18,11 +18,11 @@ - + -