Skip to content

Commit

Permalink
Merge pull request #200 from flutter-webrtc/feat/desktop-capture-testing
Browse files Browse the repository at this point in the history
Screen sharing supports
  • Loading branch information
cloudwebrtc authored Oct 21, 2022
2 parents 45be971 + a92cc75 commit a6f4958
Show file tree
Hide file tree
Showing 5 changed files with 408 additions and 17 deletions.
1 change: 1 addition & 0 deletions android/app/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"

android {
compileSdkVersion 28
ndkVersion "21.4.7075529"

lintOptions {
disable 'InvalidPackage'
Expand Down
63 changes: 55 additions & 8 deletions lib/src/call_sample/call_sample.dart
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import 'package:flutter/material.dart';
import 'dart:core';
import '../widgets/screen_select_dialog.dart';
import 'signaling.dart';
import 'package:flutter_webrtc/flutter_webrtc.dart';

Expand Down Expand Up @@ -136,15 +137,19 @@ class _CallSampleState extends State<CallSample> {
title: Text("title"),
content: Text("accept?"),
actions: <Widget>[
TextButton(
child: Text("reject"),
MaterialButton(
child: Text(
'Reject',
style: TextStyle(color: Colors.red),
),
onPressed: () => Navigator.of(context).pop(false),
),
TextButton(
child: Text("accept"),
onPressed: () {
Navigator.of(context).pop(true);
},
MaterialButton(
child: Text(
'Accept',
style: TextStyle(color: Colors.green),
),
onPressed: () => Navigator.of(context).pop(true),
),
],
);
Expand Down Expand Up @@ -201,6 +206,41 @@ class _CallSampleState extends State<CallSample> {
_signaling?.switchCamera();
}

Future<void> selectScreenSourceDialog(BuildContext context) async {
MediaStream? screenStream;
if (WebRTC.platformIsDesktop) {
final source = await showDialog<DesktopCapturerSource>(
context: context,
builder: (context) => ScreenSelectDialog(),
);
if (source != null) {
try {
var stream =
await navigator.mediaDevices.getDisplayMedia(<String, dynamic>{
'video': {
'deviceId': {'exact': source.id},
'mandatory': {'frameRate': 30.0}
}
});
stream.getVideoTracks()[0].onEnded = () {
print(
'By adding a listener on onEnded you can: 1) catch stop video sharing on Web');
};
screenStream = stream;
} catch (e) {
print(e);
}
}
} else if (WebRTC.platformIsWeb) {
screenStream =
await navigator.mediaDevices.getDisplayMedia(<String, dynamic>{
'audio': false,
'video': true,
});
}
if (screenStream != null) _signaling?.switchToScreenSharing(screenStream);
}

_muteMic() {
_signaling?.muteMic();
}
Expand Down Expand Up @@ -254,14 +294,20 @@ class _CallSampleState extends State<CallSample> {
floatingActionButtonLocation: FloatingActionButtonLocation.centerFloat,
floatingActionButton: _inCalling
? SizedBox(
width: 200.0,
width: 240.0,
child: Row(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: <Widget>[
FloatingActionButton(
child: const Icon(Icons.switch_camera),
tooltip: 'Camera',
onPressed: _switchCamera,
),
FloatingActionButton(
child: const Icon(Icons.desktop_mac),
tooltip: 'Screen Sharing',
onPressed: () => selectScreenSourceDialog(context),
),
FloatingActionButton(
onPressed: _hangUp,
tooltip: 'Hangup',
Expand All @@ -270,6 +316,7 @@ class _CallSampleState extends State<CallSample> {
),
FloatingActionButton(
child: const Icon(Icons.mic_off),
tooltip: 'Mute Mic',
onPressed: _muteMic,
)
]))
Expand Down
52 changes: 44 additions & 8 deletions lib/src/call_sample/signaling.dart
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@ enum CallState {
CallStateBye,
}

enum VideoSource {
Camera,
Screen,
}

class Session {
Session({required this.sid, required this.pid});
String pid;
Expand All @@ -49,6 +54,8 @@ class Signaling {
Map<String, Session> _sessions = {};
MediaStream? _localStream;
List<MediaStream> _remoteStreams = <MediaStream>[];
List<RTCRtpSender> _senders = <RTCRtpSender>[];
VideoSource _videoSource = VideoSource.Camera;

Function(SignalingState state)? onSignalingStateChange;
Function(Session session, CallState state)? onCallStateChange;
Expand All @@ -60,8 +67,7 @@ class Signaling {
onDataChannelMessage;
Function(Session session, RTCDataChannel dc)? onDataChannel;

String get sdpSemantics =>
WebRTC.platformIsWindows ? 'plan-b' : 'unified-plan';
String get sdpSemantics => 'unified-plan';

Map<String, dynamic> _iceServers = {
'iceServers': [
Expand Down Expand Up @@ -99,7 +105,29 @@ class Signaling {

void switchCamera() {
if (_localStream != null) {
Helper.switchCamera(_localStream!.getVideoTracks()[0]);
if (_videoSource != VideoSource.Camera) {
_senders.forEach((sender) {
if (sender.track!.kind == 'video') {
sender.replaceTrack(_localStream!.getVideoTracks()[0]);
}
});
_videoSource = VideoSource.Camera;
onLocalStream?.call(_localStream!);
} else {
Helper.switchCamera(_localStream!.getVideoTracks()[0]);
}
}
}

void switchToScreenSharing(MediaStream stream) {
if (_localStream != null && _videoSource != VideoSource.Screen) {
_senders.forEach((sender) {
if (sender.track!.kind == 'video') {
sender.replaceTrack(stream.getVideoTracks()[0]);
}
});
onLocalStream?.call(stream);
_videoSource = VideoSource.Screen;
}
}

Expand Down Expand Up @@ -193,7 +221,6 @@ class Signaling {
newSession.remoteCandidates.clear();
}
onCallStateChange?.call(newSession, CallState.CallStateNew);

onCallStateChange?.call(newSession, CallState.CallStateRinging);
}
break;
Expand Down Expand Up @@ -381,8 +408,8 @@ class Signaling {
onAddRemoteStream?.call(newSession, event.streams[0]);
}
};
_localStream!.getTracks().forEach((track) {
pc.addTrack(track, _localStream!);
_localStream!.getTracks().forEach((track) async {
_senders.add(await pc.addTrack(track, _localStream!));
});
break;
}
Expand Down Expand Up @@ -492,7 +519,7 @@ class Signaling {
try {
RTCSessionDescription s =
await session.pc!.createOffer(media == 'data' ? _dcConstraints : {});
await session.pc!.setLocalDescription(s);
await session.pc!.setLocalDescription(_fixSdp(s));
_send('offer', {
'to': session.pid,
'from': _selfId,
Expand All @@ -505,11 +532,18 @@ class Signaling {
}
}

RTCSessionDescription _fixSdp(RTCSessionDescription s) {
var sdp = s.sdp;
s.sdp =
sdp!.replaceAll('profile-level-id=640c1f', 'profile-level-id=42e032');
return s;
}

Future<void> _createAnswer(Session session, String media) async {
try {
RTCSessionDescription s =
await session.pc!.createAnswer(media == 'data' ? _dcConstraints : {});
await session.pc!.setLocalDescription(s);
await session.pc!.setLocalDescription(_fixSdp(s));
_send('answer', {
'to': session.pid,
'from': _selfId,
Expand Down Expand Up @@ -565,5 +599,7 @@ class Signaling {

await session.pc?.close();
await session.dc?.close();
_senders.clear();
_videoSource = VideoSource.Camera;
}
}
Loading

0 comments on commit a6f4958

Please sign in to comment.