Skip to content

Commit 16196c2

Browse files
authored
Merge pull request #32 from GetStream/feat/allow-handling-audio-interruptions
feat: Added support for audio focus interruption handling
2 parents bdda90e + aa2eddb commit 16196c2

15 files changed

+358
-5
lines changed

.github/workflows/build.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ jobs:
2525
- name: Dart Format Check
2626
run: dart format lib/ test/ --set-exit-if-changed
2727
- name: Import Sorter Check
28-
run: flutter pub run import_sorter:main --no-comments --exit-if-changed
28+
run: dart pub run import_sorter:main --no-comments --exit-if-changed
2929
- name: Dart Analyze Check
3030
run: flutter analyze
3131
- name: Dart Test Check

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,9 @@
11

22
# Changelog
33

4+
[1.0.7] - 2025-06-10
5+
* Added `handleCallInterruptionCallbacks` method to provide an option to handle system audio interruption like incoming calls, or other media playing
6+
47
[1.0.6] - 2025-05-27
58
* [iOS] Added native audio route picker for iOS
69
* [Android] Expanded the mapping for audio device types

android/src/main/java/io/getstream/webrtc/flutter/FlutterWebRTCPlugin.java

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,11 @@ public class FlutterWebRTCPlugin implements FlutterPlugin, ActivityAware, EventC
4545
public EventChannel.EventSink eventSink;
4646

4747
public FlutterWebRTCPlugin() {
48-
sharedSingleton = this;
48+
if (sharedSingleton == null) {
49+
sharedSingleton = this;
50+
} else {
51+
Log.w(TAG, "Warning - Multiple plugin instances detected. Keeping existing singleton.");
52+
}
4953
}
5054

5155
public static FlutterWebRTCPlugin sharedSingleton;

android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
import io.getstream.webrtc.flutter.audio.AudioProcessingFactoryProvider;
2525
import io.getstream.webrtc.flutter.audio.AudioProcessingController;
2626
import io.getstream.webrtc.flutter.audio.AudioSwitchManager;
27+
import io.getstream.webrtc.flutter.audio.AudioFocusManager;
2728
import io.getstream.webrtc.flutter.audio.AudioUtils;
2829
import io.getstream.webrtc.flutter.audio.LocalAudioTrack;
2930
// import io.getstream.webrtc.flutter.audio.PlaybackSamplesReadyCallbackAdapter;
@@ -124,6 +125,8 @@ public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider {
124125

125126
private AudioDeviceModule audioDeviceModule;
126127

128+
private AudioFocusManager audioFocusManager;
129+
127130
private FlutterRTCFrameCryptor frameCryptor;
128131

129132
private Activity activity;
@@ -147,6 +150,10 @@ static private void resultError(String method, String error, Result result) {
147150
}
148151

149152
void dispose() {
153+
if (audioFocusManager != null) {
154+
audioFocusManager.setAudioFocusChangeListener(null);
155+
audioFocusManager = null;
156+
}
150157
for (final MediaStream mediaStream : localStreams.values()) {
151158
streamDispose(mediaStream);
152159
mediaStream.dispose();
@@ -161,6 +168,7 @@ void dispose() {
161168
}
162169
mPeerConnectionObservers.clear();
163170
}
171+
164172
private void initialize(boolean bypassVoiceProcessing, int networkIgnoreMask, boolean forceSWCodec, List<String> forceSWCodecList,
165173
@Nullable ConstraintsMap androidAudioConfiguration) {
166174
if (mFactory != null) {
@@ -359,6 +367,43 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) {
359367
result.success(null);
360368
break;
361369
}
370+
case "handleCallInterruptionCallbacks": {
371+
String interruptionSource = call.argument("androidInterruptionSource");
372+
AudioFocusManager.InterruptionSource source;
373+
374+
switch (interruptionSource) {
375+
case "audioFocusOnly":
376+
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_ONLY;
377+
break;
378+
case "telephonyOnly":
379+
source = AudioFocusManager.InterruptionSource.TELEPHONY_ONLY;
380+
break;
381+
case "audioFocusAndTelephony":
382+
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY;
383+
break;
384+
default:
385+
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY;
386+
break;
387+
}
388+
389+
audioFocusManager = new AudioFocusManager(context, source);
390+
audioFocusManager.setAudioFocusChangeListener(new AudioFocusManager.AudioFocusChangeListener() {
391+
@Override
392+
public void onInterruptionStart() {
393+
ConstraintsMap params = new ConstraintsMap();
394+
params.putString("event", "onInterruptionStart");
395+
FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap());
396+
}
397+
398+
@Override
399+
public void onInterruptionEnd() {
400+
ConstraintsMap params = new ConstraintsMap();
401+
params.putString("event", "onInterruptionEnd");
402+
FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap());
403+
}
404+
});
405+
result.success(null);
406+
}
362407
case "createPeerConnection": {
363408
Map<String, Object> constraints = call.argument("constraints");
364409
Map<String, Object> configuration = call.argument("configuration");
Lines changed: 209 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,209 @@
1+
package io.getstream.webrtc.flutter.audio;
2+
3+
import android.content.Context;
4+
import android.media.AudioAttributes;
5+
import android.media.AudioFocusRequest;
6+
import android.media.AudioManager;
7+
import android.os.Build;
8+
import android.telephony.PhoneStateListener;
9+
import android.telephony.TelephonyCallback;
10+
import android.telephony.TelephonyManager;
11+
import android.util.Log;
12+
13+
import io.getstream.webrtc.flutter.utils.ConstraintsMap;
14+
15+
public class AudioFocusManager {
16+
private static final String TAG = "AudioFocusManager";
17+
18+
public enum InterruptionSource {
19+
AUDIO_FOCUS_ONLY,
20+
TELEPHONY_ONLY,
21+
AUDIO_FOCUS_AND_TELEPHONY
22+
}
23+
24+
private AudioManager audioManager;
25+
private TelephonyManager telephonyManager;
26+
27+
private PhoneStateListener phoneStateListener;
28+
private AudioFocusChangeListener focusChangeListener;
29+
30+
private TelephonyCallback telephonyCallback;
31+
private AudioFocusRequest audioFocusRequest;
32+
33+
private InterruptionSource interruptionSource;
34+
private Context context;
35+
36+
public interface AudioFocusChangeListener {
37+
void onInterruptionStart();
38+
void onInterruptionEnd();
39+
}
40+
41+
public AudioFocusManager(Context context) {
42+
this(context, InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY);
43+
}
44+
45+
public AudioFocusManager(Context context, InterruptionSource interruptionSource) {
46+
this.context = context;
47+
this.interruptionSource = interruptionSource;
48+
49+
if (interruptionSource == InterruptionSource.AUDIO_FOCUS_ONLY ||
50+
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
51+
audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
52+
}
53+
54+
if (interruptionSource == InterruptionSource.TELEPHONY_ONLY ||
55+
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
56+
telephonyManager = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE);
57+
}
58+
}
59+
60+
public void setAudioFocusChangeListener(AudioFocusChangeListener listener) {
61+
this.focusChangeListener = listener;
62+
63+
if (listener != null) {
64+
startMonitoring();
65+
} else {
66+
stopMonitoring();
67+
}
68+
}
69+
70+
public void startMonitoring() {
71+
if (interruptionSource == InterruptionSource.AUDIO_FOCUS_ONLY ||
72+
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
73+
requestAudioFocusInternal();
74+
}
75+
76+
if (interruptionSource == InterruptionSource.TELEPHONY_ONLY ||
77+
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
78+
registerTelephonyListener();
79+
}
80+
}
81+
82+
public void stopMonitoring() {
83+
if (interruptionSource == InterruptionSource.AUDIO_FOCUS_ONLY ||
84+
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
85+
abandonAudioFocusInternal();
86+
}
87+
88+
if (interruptionSource == InterruptionSource.TELEPHONY_ONLY ||
89+
interruptionSource == InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY) {
90+
unregisterTelephonyListener();
91+
}
92+
}
93+
94+
private void requestAudioFocusInternal() {
95+
if (audioManager == null) {
96+
Log.w(TAG, "AudioManager is null, cannot request audio focus");
97+
return;
98+
}
99+
100+
AudioManager.OnAudioFocusChangeListener onAudioFocusChangeListener = focusChange -> {
101+
switch (focusChange) {
102+
case AudioManager.AUDIOFOCUS_LOSS:
103+
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
104+
Log.d(TAG, "Audio focus lost");
105+
if (focusChangeListener != null) {
106+
focusChangeListener.onInterruptionStart();
107+
}
108+
break;
109+
case AudioManager.AUDIOFOCUS_GAIN:
110+
Log.d(TAG, "Audio focus gained");
111+
if (focusChangeListener != null) {
112+
focusChangeListener.onInterruptionEnd();
113+
}
114+
break;
115+
}
116+
};
117+
118+
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
119+
AudioAttributes audioAttributes = new AudioAttributes.Builder()
120+
.setUsage(AudioAttributes.USAGE_VOICE_COMMUNICATION)
121+
.setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
122+
.build();
123+
124+
audioFocusRequest = new AudioFocusRequest.Builder(AudioManager.AUDIOFOCUS_GAIN)
125+
.setAudioAttributes(audioAttributes)
126+
.setOnAudioFocusChangeListener(onAudioFocusChangeListener)
127+
.build();
128+
129+
audioManager.requestAudioFocus(audioFocusRequest);
130+
} else {
131+
audioManager.requestAudioFocus(onAudioFocusChangeListener,
132+
AudioManager.STREAM_VOICE_CALL,
133+
AudioManager.AUDIOFOCUS_GAIN);
134+
}
135+
}
136+
137+
private void registerTelephonyListener() {
138+
if (telephonyManager == null) {
139+
Log.w(TAG, "TelephonyManager is null, cannot register telephony listener");
140+
return;
141+
}
142+
143+
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) {
144+
// Use TelephonyCallback for Android 12+ (API 31+)
145+
class CallStateCallback extends TelephonyCallback implements TelephonyCallback.CallStateListener {
146+
@Override
147+
public void onCallStateChanged(int state) {
148+
handleCallStateChange(state);
149+
}
150+
}
151+
telephonyCallback = new CallStateCallback();
152+
telephonyManager.registerTelephonyCallback(context.getMainExecutor(), telephonyCallback);
153+
} else {
154+
// Use PhoneStateListener for older Android versions
155+
phoneStateListener = new PhoneStateListener() {
156+
@Override
157+
public void onCallStateChanged(int state, String phoneNumber) {
158+
handleCallStateChange(state);
159+
}
160+
};
161+
telephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_CALL_STATE);
162+
}
163+
}
164+
165+
private void handleCallStateChange(int state) {
166+
if (focusChangeListener == null) {
167+
return;
168+
}
169+
170+
switch (state) {
171+
case TelephonyManager.CALL_STATE_RINGING:
172+
case TelephonyManager.CALL_STATE_OFFHOOK:
173+
Log.d(TAG, "Phone call interruption began");
174+
focusChangeListener.onInterruptionStart();
175+
break;
176+
case TelephonyManager.CALL_STATE_IDLE:
177+
Log.d(TAG, "Phone call interruption ended");
178+
focusChangeListener.onInterruptionEnd();
179+
break;
180+
}
181+
}
182+
183+
private void abandonAudioFocusInternal() {
184+
if (audioManager == null) {
185+
return;
186+
}
187+
188+
int result;
189+
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O && audioFocusRequest != null) {
190+
result = audioManager.abandonAudioFocusRequest(audioFocusRequest);
191+
} else {
192+
result = audioManager.abandonAudioFocus(null);
193+
}
194+
}
195+
196+
private void unregisterTelephonyListener() {
197+
if (telephonyManager == null) {
198+
return;
199+
}
200+
201+
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S && telephonyCallback != null) {
202+
telephonyManager.unregisterTelephonyCallback(telephonyCallback);
203+
telephonyCallback = null;
204+
} else if (phoneStateListener != null) {
205+
telephonyManager.listen(phoneStateListener, PhoneStateListener.LISTEN_NONE);
206+
phoneStateListener = null;
207+
}
208+
}
209+
}

common/darwin/Classes/FlutterWebRTCPlugin.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,9 @@ typedef void (^CapturerStopHandler)(CompletionHandler _Nonnull handler);
5656
@property(nonatomic, strong) NSObject<FlutterBinaryMessenger>* _Nonnull messenger;
5757
@property(nonatomic, strong) RTCCameraVideoCapturer* _Nullable videoCapturer;
5858
@property(nonatomic, strong) FlutterRTCFrameCapturer* _Nullable frameCapturer;
59+
#if TARGET_OS_IPHONE
5960
@property(nonatomic, strong) AVAudioSessionPort _Nullable preferredInput;
61+
#endif
6062
@property (nonatomic, strong) VideoEffectProcessor* _Nullable videoEffectProcessor;
6163

6264
@property(nonatomic, strong) NSString* _Nonnull focusMode;

common/darwin/Classes/FlutterWebRTCPlugin.m

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -106,7 +106,9 @@ @implementation FlutterWebRTCPlugin {
106106
id _textures;
107107
BOOL _speakerOn;
108108
BOOL _speakerOnButPreferBluetooth;
109+
#if TARGET_OS_IPHONE
109110
AVAudioSessionPort _preferredInput;
111+
#endif
110112
AudioManager* _audioManager;
111113
#if TARGET_OS_IPHONE
112114
FLutterRTCVideoPlatformViewFactory *_platformViewFactory;
@@ -125,7 +127,9 @@ + (FlutterWebRTCPlugin *)sharedSingleton
125127

126128
@synthesize messenger = _messenger;
127129
@synthesize eventSink = _eventSink;
130+
#if TARGET_OS_IPHONE
128131
@synthesize preferredInput = _preferredInput;
132+
#endif
129133
@synthesize audioManager = _audioManager;
130134

131135
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
@@ -250,6 +254,19 @@ - (void)didSessionRouteChange:(NSNotification*)notification {
250254
#endif
251255
}
252256

257+
- (void)handleInterruption:(NSNotification*)notification {
258+
#if TARGET_OS_IPHONE
259+
NSDictionary* info = notification.userInfo;
260+
AVAudioSessionInterruptionType type = [info[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
261+
262+
if (type == AVAudioSessionInterruptionTypeBegan) {
263+
postEvent(self.eventSink, @{@"event": @"onInterruptionStart"});
264+
} else if (type == AVAudioSessionInterruptionTypeEnded) {
265+
postEvent(self.eventSink, @{@"event": @"onInterruptionEnd"});
266+
}
267+
#endif
268+
}
269+
253270
- (void)initialize:(NSArray*)networkIgnoreMask
254271
bypassVoiceProcessing:(BOOL)bypassVoiceProcessing {
255272
// RTCSetMinDebugLogLevel(RTCLoggingSeverityVerbose);
@@ -312,6 +329,14 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
312329
NSArray* names = argsMap[@"names"];
313330

314331
[self mediaStreamTrackSetVideoEffects:trackId names:names];
332+
} else if ([@"handleCallInterruptionCallbacks" isEqualToString:call.method]) {
333+
#if TARGET_OS_IPHONE
334+
[[NSNotificationCenter defaultCenter] addObserver:self
335+
selector:@selector(handleInterruption:)
336+
name:AVAudioSessionInterruptionNotification
337+
object:[AVAudioSession sharedInstance]];
338+
#endif
339+
result(@"");
315340
} else if ([@"createPeerConnection" isEqualToString:call.method]) {
316341
NSDictionary* argsMap = call.arguments;
317342
NSDictionary* configuration = argsMap[@"configuration"];

ios/stream_webrtc_flutter.podspec

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
#
44
Pod::Spec.new do |s|
55
s.name = 'stream_webrtc_flutter'
6-
s.version = '1.0.6'
6+
s.version = '1.0.7'
77
s.summary = 'Flutter WebRTC plugin for iOS.'
88
s.description = <<-DESC
99
A new flutter plugin project.

0 commit comments

Comments
 (0)