Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -116,6 +116,7 @@ class GetUserMediaImpl {
private final Map<String, VideoCapturerInfo> mVideoCapturers = new HashMap<>();
private final Map<String, SurfaceTextureHelper> mSurfaceTextureHelpers = new HashMap<>();
private final Map<String, VideoSource> mVideoSources = new HashMap<>();
private final Map<String, AudioSource> mAudioSources = new HashMap<>();

private final StateProvider stateProvider;
private final Context applicationContext;
Expand Down Expand Up @@ -383,6 +384,8 @@ private ConstraintsMap getUserAudio(ConstraintsMap constraints, MediaStream stre
PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory();
AudioSource audioSource = pcFactory.createAudioSource(audioConstraints);

mAudioSources.put(trackId, audioSource);

if (deviceId != null) {
try {
if (VERSION.SDK_INT >= VERSION_CODES.M) {
Expand Down Expand Up @@ -712,6 +715,47 @@ private Integer getConstrainInt(@Nullable ConstraintsMap constraintsMap, String
return null;
}

public ConstraintsMap cloneTrack(String trackId) {
String newTrackId = stateProvider.getNextTrackUUID();
LocalTrack originalLocalTrack = stateProvider.getLocalTrack(trackId);

PeerConnectionFactory pcFactory = stateProvider.getPeerConnectionFactory();
ConstraintsMap trackParams = new ConstraintsMap();

if (originalLocalTrack instanceof LocalVideoTrack) {
VideoSource videoSource = mVideoSources.get(trackId);

mSurfaceTextureHelpers.put(newTrackId, mSurfaceTextureHelpers.get(trackId));
mVideoSources.put(newTrackId, videoSource);

VideoTrack track = pcFactory.createVideoTrack(newTrackId, videoSource);
LocalVideoTrack localVideoTrack = new LocalVideoTrack(track);

videoSource.setVideoProcessor(localVideoTrack);
stateProvider.putLocalTrack(track.id(),localVideoTrack);

trackParams.putBoolean("enabled", track.enabled());
trackParams.putString("kind", "video");
trackParams.putString("readyState", track.state().toString());
} else {
AudioSource audioSource = mAudioSources.get(trackId);

AudioTrack track = pcFactory.createAudioTrack(trackId, audioSource);

stateProvider.putLocalTrack(track.id(), new LocalAudioTrack(track));

trackParams.putBoolean("enabled", track.enabled());
trackParams.putString("kind", "audio");
trackParams.putString("readyState", track.state().toString());
}

trackParams.putString("id", newTrackId);
trackParams.putString("label", newTrackId);
trackParams.putBoolean("remote", false);

return trackParams;
}

private ConstraintsMap getUserVideo(ConstraintsMap constraints, MediaStream mediaStream) {
ConstraintsMap videoConstraintsMap = null;
ConstraintsMap videoConstraintsMandatory = null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -546,6 +546,16 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) {
result.success(null);
break;
}
case "trackClone": {
String trackId = call.argument("trackId");
String peerConnectionId = call.argument("peerConnectionId");


ConstraintsMap map = getUserMediaImpl.cloneTrack(trackId);

result.success(map.toMap());
break;
}
case "restartIce": {
String peerConnectionId = call.argument("peerConnectionId");
restartIce(peerConnectionId);
Expand Down
2 changes: 2 additions & 0 deletions common/darwin/Classes/FlutterRTCMediaStream.h
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@

@interface FlutterWebRTCPlugin (RTCMediaStream)

- (RTCVideoTrack*)cloneTrack:(nonnull NSString*)trackId;

- (void)getUserMedia:(nonnull NSDictionary*)constraints result:(nonnull FlutterResult)result;

- (void)createLocalMediaStream:(nonnull FlutterResult)result;
Expand Down
52 changes: 52 additions & 0 deletions common/darwin/Classes/FlutterRTCMediaStream.m
Original file line number Diff line number Diff line change
Expand Up @@ -321,6 +321,58 @@ - (int)getConstrainInt:(NSDictionary*)constraints forKey:(NSString*)key {
return 0;
}

- (RTCMediaStreamTrack*)cloneTrack:(nonnull NSString*)trackId {
NSString* newTrackId = [[NSUUID UUID] UUIDString];

RTCMediaStreamTrack *originalTrack = [self trackForId:trackId peerConnectionId: nil];
LocalVideoTrack* originalLocalTrack = self.localTracks[trackId];

if (originalTrack != nil && [originalTrack.kind isEqualToString:@"audio"]) {
RTCAudioTrack *originalAudioTrack = (RTCAudioTrack *)originalTrack;

RTCAudioTrack* audioTrack = [self.peerConnectionFactory audioTrackWithTrackId:trackId];

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What about the audioSource? On native side, we fetch an audioSource that we use to initialise the audioTrack (similar to how video tracks are being init)?

LocalAudioTrack *localAudioTrack = [[LocalAudioTrack alloc] initWithTrack:audioTrack];

audioTrack.settings = originalAudioTrack.settings;
[self.localTracks setObject:localAudioTrack forKey:newTrackId];

for (NSString* streamId in self.localStreams) {
RTCMediaStream* stream = [self.localStreams objectForKey:streamId];
for (RTCAudioTrack* track in stream.audioTracks) {
if ([trackId isEqualToString:track.trackId]) {
[stream addAudioTrack:audioTrack];
}
}
}

return audioTrack;
} else if (originalTrack != nil && [originalTrack.kind isEqualToString:@"video"]) {
RTCVideoTrack *originalVideoTrack = (RTCVideoTrack *)originalTrack;
RTCVideoSource *videoSource = originalVideoTrack.source;

RTCVideoTrack* videoTrack = [self.peerConnectionFactory videoTrackWithSource:videoSource
trackId:newTrackId];
LocalVideoTrack *localVideoTrack = [[LocalVideoTrack alloc] initWithTrack:videoTrack
videoProcessing:originalLocalTrack.processing];

videoTrack.settings = originalVideoTrack.settings;
[self.localTracks setObject:localVideoTrack forKey:newTrackId];

for (NSString* streamId in self.localStreams) {
RTCMediaStream* stream = [self.localStreams objectForKey:streamId];
for (RTCVideoTrack* track in stream.videoTracks) {
if ([trackId isEqualToString:trackId]) {
[stream addVideoTrack:videoTrack];
}
}
}

return videoTrack;
}

return originalTrack;
}

/**
* Initializes a new {@link RTCVideoTrack} which satisfies specific constraints,
* adds it to a specific {@link RTCMediaStream}, and reports success to a
Expand Down
10 changes: 9 additions & 1 deletion common/darwin/Classes/FlutterWebRTCPlugin.m
Original file line number Diff line number Diff line change
Expand Up @@ -921,7 +921,15 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
audioSource.volume = [volume doubleValue];
}
result(nil);
} else if ([@"setMicrophoneMute" isEqualToString:call.method]) {
}
else if ([@"trackClone" isEqualToString:call.method]){
NSDictionary* argsMap = call.arguments;
NSString* trackId = argsMap[@"trackId"];
RTCMediaStreamTrack* track = [self cloneTrack:trackId];

result([self mediaTrackToMap:track]);
}
else if ([@"setMicrophoneMute" isEqualToString:call.method]) {
NSDictionary* argsMap = call.arguments;
NSString* trackId = argsMap[@"trackId"];
NSNumber* mute = argsMap[@"mute"];
Expand Down
17 changes: 17 additions & 0 deletions lib/src/native/media_stream_track_impl.dart
Original file line number Diff line number Diff line change
Expand Up @@ -129,4 +129,21 @@ class MediaStreamTrackNative extends MediaStreamTrack {
<String, dynamic>{'trackId': _trackId},
);
}

@override
Future<MediaStreamTrack> clone() async {
final response = await WebRTC.invokeMethod(
'trackClone',
<String, dynamic>{
'trackId': _trackId,
'peerConnectionId': _peerConnectionId,
},
);

if (response == null) {
throw Exception('clone return null, something wrong');
}

return MediaStreamTrackNative.fromMap(response, peerConnectionId);
}
}