Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: web/native device consistency management #243

Merged
merged 12 commits into from
Mar 7, 2023
110 changes: 50 additions & 60 deletions example/lib/widgets/controls.dart
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ class _ControlsWidgetState extends State<ControlsWidget> {
List<MediaDevice>? _audioInputs;
List<MediaDevice>? _audioOutputs;
List<MediaDevice>? _videoInputs;
MediaDevice? _selectedVideoInput;

StreamSubscription? _subscription;

Expand Down Expand Up @@ -61,7 +60,6 @@ class _ControlsWidgetState extends State<ControlsWidget> {
_audioInputs = devices.where((d) => d.kind == 'audioinput').toList();
_audioOutputs = devices.where((d) => d.kind == 'audiooutput').toList();
_videoInputs = devices.where((d) => d.kind == 'videoinput').toList();
_selectedVideoInput = _videoInputs?.first;
setState(() {});
}

Expand Down Expand Up @@ -94,23 +92,18 @@ class _ControlsWidgetState extends State<ControlsWidget> {
}

void _selectAudioOutput(MediaDevice device) async {
await Hardware.instance.selectAudioOutput(device);
await widget.room.setAudioOutputDevice(device);
setState(() {});
}

void _selectAudioInput(MediaDevice device) async {
await Hardware.instance.selectAudioInput(device);
await widget.room.setAudioInputDevice(device);
setState(() {});
}

void _selectVideoInput(MediaDevice device) async {
final track = participant.videoTracks.firstOrNull?.track;
if (track == null) return;
if (_selectedVideoInput?.deviceId != device.deviceId) {
await track.switchCamera(device.deviceId);
_selectedVideoInput = device;
setState(() {});
}
await widget.room.setVideoInputDevice(device);
setState(() {});
}

void _toggleCamera() async {
Expand Down Expand Up @@ -293,8 +286,7 @@ class _ControlsWidgetState extends State<ControlsWidget> {
value: device,
child: ListTile(
leading: (device.deviceId ==
Hardware
.instance.selectedAudioInput?.deviceId)
widget.room.selectedAudioInputDeviceId)
? const Icon(
EvaIcons.checkmarkSquare,
color: Colors.white,
Expand All @@ -317,45 +309,43 @@ class _ControlsWidgetState extends State<ControlsWidget> {
icon: const Icon(EvaIcons.micOff),
tooltip: 'un-mute audio',
),
if (!lkPlatformIs(PlatformType.web))
PopupMenuButton<MediaDevice>(
icon: const Icon(Icons.volume_up),
itemBuilder: (BuildContext context) {
return [
const PopupMenuItem<MediaDevice>(
value: null,
child: ListTile(
leading: Icon(
EvaIcons.speaker,
color: Colors.white,
),
title: Text('Select Audio Output'),
PopupMenuButton<MediaDevice>(
icon: const Icon(Icons.volume_up),
itemBuilder: (BuildContext context) {
return [
const PopupMenuItem<MediaDevice>(
value: null,
child: ListTile(
leading: Icon(
EvaIcons.speaker,
color: Colors.white,
),
title: Text('Select Audio Output'),
),
if (_audioOutputs != null)
..._audioOutputs!.map((device) {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading: (device.deviceId ==
Hardware
.instance.selectedAudioOutput?.deviceId)
? const Icon(
EvaIcons.checkmarkSquare,
color: Colors.white,
)
: const Icon(
EvaIcons.square,
color: Colors.white,
),
title: Text(device.label),
),
onTap: () => _selectAudioOutput(device),
);
}).toList()
];
},
),
),
if (_audioOutputs != null)
..._audioOutputs!.map((device) {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading: (device.deviceId ==
widget.room.selectedAudioOutputDeviceId)
? const Icon(
EvaIcons.checkmarkSquare,
color: Colors.white,
)
: const Icon(
EvaIcons.square,
color: Colors.white,
),
title: Text(device.label),
),
onTap: () => _selectAudioOutput(device),
);
}).toList()
];
},
),
if (participant.isCameraEnabled())
PopupMenuButton<MediaDevice>(
icon: const Icon(EvaIcons.video),
Expand All @@ -377,16 +367,16 @@ class _ControlsWidgetState extends State<ControlsWidget> {
return PopupMenuItem<MediaDevice>(
value: device,
child: ListTile(
leading:
(device.deviceId == _selectedVideoInput?.deviceId)
? const Icon(
EvaIcons.checkmarkSquare,
color: Colors.white,
)
: const Icon(
EvaIcons.square,
color: Colors.white,
),
leading: (device.deviceId ==
widget.room.selectedVideoInputDeviceId)
? const Icon(
EvaIcons.checkmarkSquare,
color: Colors.white,
)
: const Icon(
EvaIcons.square,
color: Colors.white,
),
title: Text(device.label),
),
onTap: () => _selectVideoInput(device),
Expand Down
70 changes: 5 additions & 65 deletions example/lib/widgets/participant.dart
Original file line number Diff line number Diff line change
Expand Up @@ -82,20 +82,12 @@ abstract class _ParticipantWidgetState<T extends ParticipantWidget>
VideoTrack? get activeVideoTrack;
TrackPublication? get videoPublication;
TrackPublication? get firstAudioPublication;
List<MediaDevice>? _audioOutputs;
MediaDevice? _selectedAudioDevice;

@override
void initState() {
super.initState();
widget.participant.addListener(_onParticipantChanged);
_onParticipantChanged();
Hardware.instance.audioOutputs().then((value) {
setState(() {
_audioOutputs = value;
_selectedAudioDevice = _audioOutputs?.firstOrNull;
});
});
}

@override
Expand All @@ -116,14 +108,6 @@ abstract class _ParticipantWidgetState<T extends ParticipantWidget>
// since the updated values are computed properties.
void _onParticipantChanged() => setState(() {});

void _onSelectAudioOutput(MediaDevice device) {
var audioTrack = firstAudioPublication?.track as RemoteAudioTrack;
audioTrack.setAudioOutput(device.deviceId);
setState(() {
_selectedAudioDevice = device;
});
}

// Widgets to show above the info bar
List<Widget> extraWidgets(bool isScreenShare) => [];

Expand All @@ -146,8 +130,10 @@ abstract class _ParticipantWidgetState<T extends ParticipantWidget>
InkWell(
onTap: () => setState(() => _visible = !_visible),
child: activeVideoTrack != null && !activeVideoTrack!.muted
? VideoTrackRenderer(activeVideoTrack!,
fit: RTCVideoViewObjectFit.RTCVideoViewObjectFitContain)
? VideoTrackRenderer(
activeVideoTrack!,
fit: RTCVideoViewObjectFit.RTCVideoViewObjectFitContain,
)
: const NoVideoWidget(),
),

Expand All @@ -158,9 +144,7 @@ abstract class _ParticipantWidgetState<T extends ParticipantWidget>
crossAxisAlignment: CrossAxisAlignment.stretch,
mainAxisSize: MainAxisSize.min,
children: [
...extraWidgets(
widget.isScreenShare,
),
...extraWidgets(widget.isScreenShare),
ParticipantInfoWidget(
title: widget.participant.name.isNotEmpty
? '${widget.participant.name} (${widget.participant.identity})'
Expand Down Expand Up @@ -227,13 +211,6 @@ class _RemoteParticipantWidgetState
pub: firstAudioPublication!,
icon: EvaIcons.volumeUp,
),
if (lkPlatformIs(PlatformType.web))
RemoteTrackAudioOutputSelectMenuWidget(
audioOutputs: _audioOutputs ?? [],
selected: _selectedAudioDevice,
onSelected: _onSelectAudioOutput,
icon: EvaIcons.speaker,
),
],
),
];
Expand Down Expand Up @@ -276,40 +253,3 @@ class RemoteTrackPublicationMenuWidget extends StatelessWidget {
),
);
}

class RemoteTrackAudioOutputSelectMenuWidget extends StatelessWidget {
final IconData icon;
final List<MediaDevice> audioOutputs;
final MediaDevice? selected;
final Function(MediaDevice) onSelected;
const RemoteTrackAudioOutputSelectMenuWidget({
required this.audioOutputs,
required this.onSelected,
required this.selected,
required this.icon,
Key? key,
}) : super(key: key);

@override
Widget build(BuildContext context) => Material(
color: Colors.black.withOpacity(0.3),
child: PopupMenuButton<Function>(
tooltip: 'Select AudioOutput',
icon: Icon(icon),
onSelected: (value) => value(),
itemBuilder: (BuildContext context) => <PopupMenuEntry<Function>>[
...audioOutputs
.map((e) => PopupMenuItem(
child: ListTile(
trailing: (e.deviceId == selected?.deviceId)
? const Icon(Icons.check, color: Colors.white)
: null,
title: Text(e.label),
),
value: () => onSelected(e),
))
.toList(),
],
),
);
}
100 changes: 100 additions & 0 deletions lib/src/core/room.dart
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import 'dart:async';

import 'package:collection/collection.dart';
import 'package:flutter/foundation.dart';
import 'package:livekit_client/src/hardware/hardware.dart';
import 'package:livekit_client/src/support/app_state.dart';
import 'package:meta/meta.dart';

Expand Down Expand Up @@ -343,6 +344,7 @@ class Room extends DisposableChangeNotifier with EventsEmittable<RoomEvent> {
event.stream,
trackSid,
receiver: event.receiver,
audioOutputOptions: roomOptions.defaultAudioOutputOptions,
);
} on TrackSubscriptionExceptionEvent catch (event) {
logger.severe('addSubscribedMediaTrack() throwed ${event}');
Expand Down Expand Up @@ -640,3 +642,101 @@ extension RoomDebugMethods on Room {
switchCandidate: switchCandidate);
}
}

/// Room extension methods for managing audio, video.
extension RoomHardwareManagementMethods on Room {
/// Get current audio output device.
String? get selectedAudioOutputDeviceId =>
roomOptions.defaultAudioOutputOptions.deviceId ??
Hardware.instance.selectedAudioOutput?.deviceId;

/// Get current audio input device.
String? get selectedAudioInputDeviceId =>
roomOptions.defaultAudioCaptureOptions.deviceId ??
Hardware.instance.selectedAudioInput?.deviceId;

/// Get current video input device.
String? get selectedVideoInputDeviceId =>
roomOptions.defaultCameraCaptureOptions.deviceId ??
Hardware.instance.selectedVideoInput?.deviceId;

/// Get mobile device's speaker status.
bool? get speakerOn => roomOptions.defaultAudioOutputOptions.speakerOn;

/// Set audio output device.
Future<void> setAudioOutputDevice(MediaDevice device) async {
if (lkPlatformIs(PlatformType.web)) {
participants.forEach((_, participant) {
for (var audioTrack in participant.audioTracks) {
audioTrack.track?.setSinkId(device.deviceId);
}
});
Hardware.instance.selectedAudioOutput = device;
} else {
await Hardware.instance.selectAudioOutput(device);
}
engine.roomOptions = engine.roomOptions.copyWith(
defaultAudioOutputOptions: roomOptions.defaultAudioOutputOptions.copyWith(
deviceId: device.deviceId,
),
);
}

/// Set audio input device.
Future<void> setAudioInputDevice(MediaDevice device) async {
if (lkPlatformIs(PlatformType.web) && localParticipant != null) {
for (var audioTrack in localParticipant!.audioTracks) {
await audioTrack.track?.setDeviceId(device.deviceId);
}
Hardware.instance.selectedAudioInput = device;
} else {
await Hardware.instance.selectAudioInput(device);
}
engine.roomOptions = engine.roomOptions.copyWith(
defaultAudioCaptureOptions:
roomOptions.defaultAudioCaptureOptions.copyWith(
deviceId: device.deviceId,
),
);
}

/// Set video input device.
Future<void> setVideoInputDevice(MediaDevice device) async {
final track = localParticipant?.videoTracks.firstOrNull?.track;
if (track == null) return;
if (selectedVideoInputDeviceId != device.deviceId) {
await track.switchCamera(device.deviceId);
Hardware.instance.selectedVideoInput = device;
}
engine.roomOptions = engine.roomOptions.copyWith(
defaultCameraCaptureOptions:
roomOptions.defaultCameraCaptureOptions.copyWith(
deviceId: device.deviceId,
),
);
}

Future<void> setSpeakerOn(bool speakerOn) async {
if (lkPlatformIs(PlatformType.iOS) || lkPlatformIs(PlatformType.android)) {
await Hardware.instance.setSpeakerphoneOn(speakerOn);
engine.roomOptions = engine.roomOptions.copyWith(
defaultAudioOutputOptions:
roomOptions.defaultAudioOutputOptions.copyWith(
speakerOn: speakerOn,
),
);
}
}

/// Apply audio output device settings.
@internal
Future<void> applyAudioSpeakerSettings() async {
if (roomOptions.defaultAudioOutputOptions.speakerOn != null) {
if (lkPlatformIs(PlatformType.iOS) ||
lkPlatformIs(PlatformType.android)) {
await Hardware.instance.setSpeakerphoneOn(
roomOptions.defaultAudioOutputOptions.speakerOn!);
}
}
}
}
Loading