diff --git a/example/lib/pages/room.dart b/example/lib/pages/room.dart index 3defac25..f1820c64 100644 --- a/example/lib/pages/room.dart +++ b/example/lib/pages/room.dart @@ -190,31 +190,40 @@ class _RoomPageState extends State { @override Widget build(BuildContext context) => Scaffold( - body: Column( + body: Stack( children: [ - Expanded( - child: participantTracks.isNotEmpty - ? ParticipantWidget.widgetFor(participantTracks.first) - : Container()), - SizedBox( - height: 100, - child: ListView.builder( - scrollDirection: Axis.horizontal, - itemCount: math.max(0, participantTracks.length - 1), - itemBuilder: (BuildContext context, int index) => SizedBox( - width: 100, - height: 100, - child: - ParticipantWidget.widgetFor(participantTracks[index + 1]), - ), - ), + Column( + children: [ + Expanded( + child: participantTracks.isNotEmpty + ? ParticipantWidget.widgetFor(participantTracks.first, + showStatsLayer: true) + : Container()), + if (widget.room.localParticipant != null) + SafeArea( + top: false, + child: ControlsWidget( + widget.room, widget.room.localParticipant!), + ) + ], ), - if (widget.room.localParticipant != null) - SafeArea( - top: false, - child: - ControlsWidget(widget.room, widget.room.localParticipant!), - ), + Positioned( + left: 0, + right: 0, + top: 0, + child: SizedBox( + height: 120, + child: ListView.builder( + scrollDirection: Axis.horizontal, + itemCount: math.max(0, participantTracks.length - 1), + itemBuilder: (BuildContext context, int index) => SizedBox( + width: 180, + height: 120, + child: ParticipantWidget.widgetFor( + participantTracks[index + 1]), + ), + ), + )), ], ), ); diff --git a/example/lib/widgets/participant.dart b/example/lib/widgets/participant.dart index d0b202dd..af167a57 100644 --- a/example/lib/widgets/participant.dart +++ b/example/lib/widgets/participant.dart @@ -7,20 +7,24 @@ import 'package:livekit_example/theme.dart'; import 'no_video.dart'; import 'participant_info.dart'; +import 'participant_stats.dart'; abstract class ParticipantWidget extends StatefulWidget { // Convenience method to return relevant widget for participant - static ParticipantWidget widgetFor(ParticipantTrack participantTrack) { + static ParticipantWidget widgetFor(ParticipantTrack participantTrack, + {bool showStatsLayer = false}) { if (participantTrack.participant is LocalParticipant) { return LocalParticipantWidget( participantTrack.participant as LocalParticipant, participantTrack.videoTrack, - participantTrack.isScreenShare); + participantTrack.isScreenShare, + showStatsLayer); } else if (participantTrack.participant is RemoteParticipant) { return RemoteParticipantWidget( participantTrack.participant as RemoteParticipant, participantTrack.videoTrack, - participantTrack.isScreenShare); + participantTrack.isScreenShare, + showStatsLayer); } throw UnimplementedError('Unknown participant type'); } @@ -29,6 +33,7 @@ abstract class ParticipantWidget extends StatefulWidget { abstract final Participant participant; abstract final VideoTrack? videoTrack; abstract final bool isScreenShare; + abstract final bool showStatsLayer; final VideoQuality quality; const ParticipantWidget({ @@ -44,11 +49,14 @@ class LocalParticipantWidget extends ParticipantWidget { final VideoTrack? videoTrack; @override final bool isScreenShare; + @override + final bool showStatsLayer; const LocalParticipantWidget( this.participant, this.videoTrack, - this.isScreenShare, { + this.isScreenShare, + this.showStatsLayer, { Key? key, }) : super(key: key); @@ -63,11 +71,14 @@ class RemoteParticipantWidget extends ParticipantWidget { final VideoTrack? videoTrack; @override final bool isScreenShare; + @override + final bool showStatsLayer; const RemoteParticipantWidget( this.participant, this.videoTrack, - this.isScreenShare, { + this.isScreenShare, + this.showStatsLayer, { Key? key, }) : super(key: key); @@ -136,7 +147,13 @@ abstract class _ParticipantWidgetState ) : const NoVideoWidget(), ), - + if (widget.showStatsLayer) + Positioned( + top: 30, + right: 30, + child: ParticipantStatsWidget( + participant: widget.participant, + )), // Bottom bar Align( alignment: Alignment.bottomCenter, diff --git a/example/lib/widgets/participant_stats.dart b/example/lib/widgets/participant_stats.dart new file mode 100644 index 00000000..b00db4ba --- /dev/null +++ b/example/lib/widgets/participant_stats.dart @@ -0,0 +1,139 @@ +import 'package:flutter/material.dart'; +import 'package:livekit_client/livekit_client.dart'; + +enum StatsType { + kUnknown, + kLocalAudioSender, + kLocalVideoSender, + kRemoteAudioReceiver, + kRemoteVideoReceiver, +} + +class ParticipantStatsWidget extends StatefulWidget { + const ParticipantStatsWidget({Key? key, required this.participant}) + : super(key: key); + final Participant participant; + @override + State createState() => _ParticipantStatsWidgetState(); +} + +class _ParticipantStatsWidgetState extends State { + List> listeners = []; + StatsType statsType = StatsType.kUnknown; + Map stats = {}; + + void _setUpListener(Track track) { + var listener = track.createListener(); + listeners.add(listener); + if (track is LocalVideoTrack) { + statsType = StatsType.kLocalVideoSender; + listener.on((event) { + setState(() { + stats['video tx'] = 'total sent ${event.currentBitrate.toInt()} kpbs'; + event.stats.forEach((key, value) { + stats['layer-$key'] = + '${value.frameWidth ?? 0}x${value.frameHeight ?? 0} ${value.framesPerSecond?.toDouble() ?? 0} fps, ${event.bitrateForLayers[key] ?? 0} kbps'; + }); + var firstStats = + event.stats['f'] ?? event.stats['h'] ?? event.stats['q']; + if (firstStats != null) { + stats['encoder'] = firstStats.encoderImplementation ?? ''; + stats['video codec'] = + '${firstStats.mimeType}, ${firstStats.clockRate}hz, pt: ${firstStats.payloadType}'; + stats['qualityLimitationReason'] = + firstStats.qualityLimitationReason ?? ''; + } + }); + }); + } else if (track is RemoteVideoTrack) { + statsType = StatsType.kRemoteVideoReceiver; + listener.on((event) { + setState(() { + stats['video rx'] = '${event.currentBitrate.toInt()} kpbs'; + stats['video codec'] = + '${event.stats.mimeType}, ${event.stats.clockRate}hz, pt: ${event.stats.payloadType}'; + stats['video size'] = + '${event.stats.frameWidth}x${event.stats.frameHeight} ${event.stats.framesPerSecond?.toDouble()}fps'; + stats['video jitter'] = '${event.stats.jitter} s'; + stats['video decoder'] = '${event.stats.decoderImplementation}'; + //stats['video packets lost'] = '${event.stats.packetsLost}'; + //stats['video packets received'] = '${event.stats.packetsReceived}'; + stats['video frames received'] = '${event.stats.framesReceived}'; + stats['video frames decoded'] = '${event.stats.framesDecoded}'; + stats['video frames dropped'] = '${event.stats.framesDropped}'; + }); + }); + } else if (track is LocalAudioTrack) { + statsType = StatsType.kLocalAudioSender; + listener.on((event) { + setState(() { + stats['audio tx'] = '${event.currentBitrate.toInt()} kpbs'; + stats['audio codec'] = + '${event.stats.mimeType}, ${event.stats.clockRate}hz, ${event.stats.channels}ch, pt: ${event.stats.payloadType}'; + }); + }); + } else if (track is RemoteAudioTrack) { + statsType = StatsType.kRemoteAudioReceiver; + listener.on((event) { + setState(() { + stats['audio rx'] = '${event.currentBitrate.toInt()} kpbs'; + stats['audio codec'] = + '${event.stats.mimeType}, ${event.stats.clockRate}hz, ${event.stats.channels}ch, pt: ${event.stats.payloadType}'; + stats['audio jitter'] = '${event.stats.jitter} s'; + //stats['audio concealed samples'] = + // '${event.stats.concealedSamples} / ${event.stats.concealmentEvents}'; + stats['audio packets lost'] = '${event.stats.packetsLost}'; + stats['audio packets received'] = '${event.stats.packetsReceived}'; + }); + }); + } + } + + _onParticipantChanged() { + for (var element in listeners) { + element.dispose(); + } + listeners.clear(); + for (var track in [ + ...widget.participant.videoTracks, + ...widget.participant.audioTracks + ]) { + if (track.track != null) { + _setUpListener(track.track!); + } + } + } + + @override + void initState() { + super.initState(); + widget.participant.addListener(_onParticipantChanged); + // trigger initial change + _onParticipantChanged(); + } + + @override + void deactivate() { + for (var element in listeners) { + element.dispose(); + } + widget.participant.removeListener(_onParticipantChanged); + super.deactivate(); + } + + num sendBitrate = 0; + + @override + Widget build(BuildContext context) { + return Container( + color: Colors.black.withOpacity(0.3), + padding: const EdgeInsets.symmetric( + vertical: 8, + horizontal: 8, + ), + child: Column( + children: + stats.entries.map((e) => Text('${e.key}: ${e.value}')).toList()), + ); + } +} diff --git a/lib/src/events.dart b/lib/src/events.dart index ac2e4c72..424c3c0a 100644 --- a/lib/src/events.dart +++ b/lib/src/events.dart @@ -7,6 +7,7 @@ import 'participant/remote.dart'; import 'publication/local.dart'; import 'publication/remote.dart'; import 'publication/track_publication.dart'; +import 'track/stats.dart'; import 'track/track.dart'; import 'types/other.dart'; import 'types/participant_permissions.dart'; @@ -435,3 +436,57 @@ class AudioPlaybackStatusChanged with RoomEvent { String toString() => '${runtimeType}' 'Audio Playback Status Changed, isPlaying: ${isPlaying})'; } + +class AudioSenderStatsEvent with TrackEvent { + final AudioSenderStats stats; + final num currentBitrate; + const AudioSenderStatsEvent({ + required this.stats, + required this.currentBitrate, + }); + + @override + String toString() => '${runtimeType}' + 'stats: ${stats})'; +} + +class VideoSenderStatsEvent with TrackEvent { + final Map stats; + final Map bitrateForLayers; + final num currentBitrate; + const VideoSenderStatsEvent({ + required this.stats, + required this.currentBitrate, + required this.bitrateForLayers, + }); + + @override + String toString() => '${runtimeType}' + 'stats: ${stats})'; +} + +class AudioReceiverStatsEvent with TrackEvent { + final AudioReceiverStats stats; + final num currentBitrate; + const AudioReceiverStatsEvent({ + required this.stats, + required this.currentBitrate, + }); + + @override + String toString() => '${runtimeType}' + 'stats: ${stats})'; +} + +class VideoReceiverStatsEvent with TrackEvent { + final VideoReceiverStats stats; + final num currentBitrate; + const VideoReceiverStatsEvent({ + required this.stats, + required this.currentBitrate, + }); + + @override + String toString() => '${runtimeType}' + 'stats: ${stats})'; +} diff --git a/lib/src/track/local/audio.dart b/lib/src/track/local/audio.dart index fce84022..71c41d1f 100644 --- a/lib/src/track/local/audio.dart +++ b/lib/src/track/local/audio.dart @@ -1,12 +1,15 @@ import 'dart:async'; +import 'package:collection/collection.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc; import 'package:meta/meta.dart'; +import '../../events.dart'; import '../../proto/livekit_models.pb.dart' as lk_models; import '../../types/other.dart'; import '../audio_management.dart'; import '../options.dart'; +import '../stats.dart'; import 'local.dart'; class LocalAudioTrack extends LocalTrack @@ -25,6 +28,58 @@ class LocalAudioTrack extends LocalTrack } } + num? _currentBitrate; + get currentBitrate => _currentBitrate; + AudioSenderStats? prevStats; + + @override + Future monitorSender() async { + if (sender == null) { + _currentBitrate = 0; + return; + } + final stats = await getSenderStats(); + + if (stats != null && prevStats != null && sender != null) { + _currentBitrate = computeBitrateForSenderStats(stats, prevStats); + events.emit( + AudioSenderStatsEvent(stats: stats, currentBitrate: currentBitrate)); + } + + prevStats = stats; + } + + Future getSenderStats() async { + if (sender == null) { + return null; + } + + final stats = await sender!.getStats(); + AudioSenderStats? senderStats; + for (var v in stats) { + if (v.type == 'outbound-rtp') { + senderStats ??= AudioSenderStats(v.id, v.timestamp); + senderStats.packetsSent = getNumValFromReport(v.values, 'packetsSent'); + senderStats.packetsLost = getNumValFromReport(v.values, 'packetsLost'); + senderStats.bytesSent = getNumValFromReport(v.values, 'bytesSent'); + senderStats.roundTripTime = + getNumValFromReport(v.values, 'roundTripTime'); + senderStats.jitter = getNumValFromReport(v.values, 'jitter'); + + final c = stats.firstWhereOrNull((element) => element.type == 'codec'); + if (c != null) { + senderStats.mimeType = getStringValFromReport(c.values, 'mimeType'); + senderStats.payloadType = + getNumValFromReport(c.values, 'payloadType'); + senderStats.channels = getNumValFromReport(c.values, 'channels'); + senderStats.clockRate = getNumValFromReport(c.values, 'clockRate'); + } + break; + } + } + return senderStats; + } + // private constructor @internal LocalAudioTrack( @@ -45,7 +100,7 @@ class LocalAudioTrack extends LocalTrack static Future create([ AudioCaptureOptions? options, ]) async { - options ??= const AudioCaptureOptions(); + options = const AudioCaptureOptions(); final stream = await LocalTrack.createStream(options); return LocalAudioTrack( diff --git a/lib/src/track/local/local.dart b/lib/src/track/local/local.dart index 171a4dd2..9e399b78 100644 --- a/lib/src/track/local/local.dart +++ b/lib/src/track/local/local.dart @@ -1,3 +1,5 @@ +import 'dart:async'; + import 'package:flutter/material.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc; @@ -15,6 +17,7 @@ import '../../types/other.dart'; import '../options.dart'; import '../remote/audio.dart'; import '../remote/video.dart'; +import '../stats.dart'; import '../track.dart'; import 'audio.dart'; import 'video.dart'; @@ -195,7 +198,7 @@ abstract class LocalTrack extends Track { } logger.fine('$objectId.publish()'); - + startMonitor(); _published = true; return true; } @@ -209,8 +212,26 @@ abstract class LocalTrack extends Track { } logger.fine('$objectId.unpublish()'); - + stopMonitor(); _published = false; return true; } + + Timer? _monitorTimer; + + Future monitorSender(); + + @internal + void startMonitor() { + _monitorTimer ??= + Timer.periodic(const Duration(milliseconds: monitorFrequency), (_) { + monitorSender(); + }); + } + + @internal + void stopMonitor() { + _monitorTimer?.cancel(); + _monitorTimer = null; + } } diff --git a/lib/src/track/local/video.dart b/lib/src/track/local/video.dart index fb1b5839..fa35cfb2 100644 --- a/lib/src/track/local/video.dart +++ b/lib/src/track/local/video.dart @@ -1,9 +1,12 @@ +import 'package:collection/collection.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc; +import '../../events.dart'; import '../../logger.dart'; import '../../proto/livekit_models.pb.dart' as lk_models; import '../../types/other.dart'; import '../options.dart'; +import '../stats.dart'; import '../track.dart'; import 'audio.dart'; import 'local.dart'; @@ -15,6 +18,99 @@ class LocalVideoTrack extends LocalTrack with VideoTrack { @override covariant VideoCaptureOptions currentOptions; + num? _currentBitrate; + get currentBitrate => _currentBitrate; + Map? prevStats; + final Map _bitrateFoLayers = {}; + + @override + Future monitorSender() async { + if (sender == null) { + _currentBitrate = 0; + return; + } + List stats = []; + try { + stats = await getSenderStats(); + } catch (e) { + logger.warning('Failed to get sender stats: $e'); + return; + } + Map statsMap = {}; + + for (var s in stats) { + statsMap[s.rid ?? 'f'] = s; + } + + if (prevStats != null) { + num totalBitrate = 0; + statsMap.forEach((key, s) { + final prev = prevStats![key]; + var bitRateForlayer = computeBitrateForSenderStats(s, prev).toInt(); + _bitrateFoLayers[key] = bitRateForlayer; + totalBitrate += bitRateForlayer; + }); + _currentBitrate = totalBitrate; + events.emit(VideoSenderStatsEvent( + stats: statsMap, + currentBitrate: currentBitrate, + bitrateForLayers: _bitrateFoLayers, + )); + } + + prevStats = statsMap; + } + + Future> getSenderStats() async { + if (sender == null) { + return []; + } + + final stats = await sender!.getStats(); + List items = []; + for (var v in stats) { + if (v.type == 'outbound-rtp') { + VideoSenderStats vs = VideoSenderStats(v.id, v.timestamp); + vs.frameHeight = getNumValFromReport(v.values, 'frameHeight'); + vs.frameWidth = getNumValFromReport(v.values, 'frameWidth'); + vs.framesPerSecond = getNumValFromReport(v.values, 'framesPerSecond'); + vs.firCount = getNumValFromReport(v.values, 'firCount'); + vs.pliCount = getNumValFromReport(v.values, 'pliCount'); + vs.nackCount = getNumValFromReport(v.values, 'nackCount'); + vs.packetsSent = getNumValFromReport(v.values, 'packetsSent'); + vs.bytesSent = getNumValFromReport(v.values, 'bytesSent'); + vs.framesSent = getNumValFromReport(v.values, 'framesSent'); + vs.rid = getStringValFromReport(v.values, 'rid'); + vs.encoderImplementation = + getStringValFromReport(v.values, 'encoderImplementation'); + vs.retransmittedPacketsSent = + getNumValFromReport(v.values, 'retransmittedPacketsSent'); + vs.qualityLimitationReason = + getStringValFromReport(v.values, 'qualityLimitationReason'); + vs.qualityLimitationResolutionChanges = + getNumValFromReport(v.values, 'qualityLimitationResolutionChanges'); + + // locate the appropriate remote-inbound-rtp item + final remoteId = getStringValFromReport(v.values, 'remoteId'); + final r = stats.firstWhereOrNull((element) => element.id == remoteId); + if (r != null) { + vs.jitter = getNumValFromReport(r.values, 'jitter'); + vs.packetsLost = getNumValFromReport(r.values, 'packetsLost'); + vs.roundTripTime = getNumValFromReport(r.values, 'roundTripTime'); + } + final c = stats.firstWhereOrNull((element) => element.type == 'codec'); + if (c != null) { + vs.mimeType = getStringValFromReport(c.values, 'mimeType'); + vs.payloadType = getNumValFromReport(c.values, 'payloadType'); + vs.channels = getNumValFromReport(c.values, 'channels'); + vs.clockRate = getNumValFromReport(c.values, 'clockRate'); + } + items.add(vs); + } + } + return items; + } + // Private constructor LocalVideoTrack._( String name, @@ -34,7 +130,7 @@ class LocalVideoTrack extends LocalTrack with VideoTrack { static Future createCameraTrack([ CameraCaptureOptions? options, ]) async { - options ??= const CameraCaptureOptions(); + options = const CameraCaptureOptions(); final stream = await LocalTrack.createStream(options); return LocalVideoTrack._( @@ -53,7 +149,7 @@ class LocalVideoTrack extends LocalTrack with VideoTrack { static Future createScreenShareTrack([ ScreenShareCaptureOptions? options, ]) async { - options ??= const ScreenShareCaptureOptions(); + options = const ScreenShareCaptureOptions(); final stream = await LocalTrack.createStream(options); return LocalVideoTrack._( @@ -73,7 +169,7 @@ class LocalVideoTrack extends LocalTrack with VideoTrack { static Future> createScreenShareTracksWithAudio([ ScreenShareCaptureOptions? options, ]) async { - options ??= const ScreenShareCaptureOptions(captureScreenAudio: true); + options = const ScreenShareCaptureOptions(captureScreenAudio: true); final stream = await LocalTrack.createStream(options); diff --git a/lib/src/track/remote/audio.dart b/lib/src/track/remote/audio.dart index 2f537719..8c7f8ce3 100644 --- a/lib/src/track/remote/audio.dart +++ b/lib/src/track/remote/audio.dart @@ -1,10 +1,13 @@ +import 'package:collection/collection.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc; +import '../../events.dart'; import '../../internal/events.dart'; import '../../proto/livekit_models.pb.dart' as lk_models; import '../../types/other.dart'; import '../audio_management.dart'; import '../local/local.dart'; +import '../stats.dart'; import 'remote.dart'; import '../web/_audio_api.dart' if (dart.library.html) '../web/_audio_html.dart' @@ -58,4 +61,72 @@ class RemoteAudioTrack extends RemoteTrack audio.setSinkId(getCid(), deviceId); _deviceId = deviceId; } + + AudioReceiverStats? prevStats; + num? _currentBitrate; + get currentBitrate => _currentBitrate; + + @override + Future monitorReceiver() async { + if (receiver == null) { + _currentBitrate = 0; + return; + } + final stats = await getReceiverStats(); + + if (stats != null && prevStats != null && receiver != null) { + _currentBitrate = computeBitrateForReceiverStats(stats, prevStats); + events.emit(AudioReceiverStatsEvent( + stats: stats, currentBitrate: currentBitrate)); + } + + prevStats = stats; + } + + Future getReceiverStats() async { + if (receiver == null) { + return null; + } + + final stats = await receiver!.getStats(); + AudioReceiverStats? receiverStats; + for (var v in stats) { + if (v.type == 'inbound-rtp') { + receiverStats ??= AudioReceiverStats(v.id, v.timestamp); + + receiverStats.jitter = getNumValFromReport(v.values, 'jitter'); + receiverStats.packetsLost = + getNumValFromReport(v.values, 'packetsLost'); + receiverStats.jitterBufferDelay = + getNumValFromReport(v.values, 'jitterBufferDelay'); + receiverStats.bytesReceived = + getNumValFromReport(v.values, 'bytesReceived'); + receiverStats.packetsReceived = + getNumValFromReport(v.values, 'packetsReceived'); + receiverStats.concealedSamples = + getNumValFromReport(v.values, 'concealedSamples'); + receiverStats.concealmentEvents = + getNumValFromReport(v.values, 'concealmentEvents'); + receiverStats.silentConcealedSamples = + getNumValFromReport(v.values, 'silentConcealedSamples'); + receiverStats.silentConcealmentEvents = + getNumValFromReport(v.values, 'silentConcealmentEvents'); + receiverStats.totalAudioEnergy = + getNumValFromReport(v.values, 'totalAudioEnergy'); + receiverStats.totalSamplesDuration = + getNumValFromReport(v.values, 'totalSamplesDuration'); + + final c = stats.firstWhereOrNull((element) => element.type == 'codec'); + if (c != null) { + receiverStats.mimeType = getStringValFromReport(c.values, 'mimeType'); + receiverStats.payloadType = + getNumValFromReport(c.values, 'payloadType'); + receiverStats.channels = getNumValFromReport(c.values, 'channels'); + receiverStats.clockRate = getNumValFromReport(c.values, 'clockRate'); + } + break; + } + } + return receiverStats; + } } diff --git a/lib/src/track/remote/remote.dart b/lib/src/track/remote/remote.dart index e42916d0..1236d543 100644 --- a/lib/src/track/remote/remote.dart +++ b/lib/src/track/remote/remote.dart @@ -1,7 +1,11 @@ +import 'dart:async'; + import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc; +import 'package:meta/meta.dart'; import '../../proto/livekit_models.pb.dart' as lk_models; import '../../types/other.dart'; +import '../stats.dart'; import '../track.dart'; abstract class RemoteTrack extends Track { @@ -22,6 +26,7 @@ abstract class RemoteTrack extends Track { final didStart = await super.start(); if (didStart) { await enable(); + startMonitor(); } return didStart; } @@ -32,6 +37,25 @@ abstract class RemoteTrack extends Track { if (didStop) { await disable(); } + stopMonitor(); return didStop; } + + Timer? _monitorTimer; + + Future monitorReceiver(); + + @internal + void startMonitor() { + _monitorTimer ??= + Timer.periodic(const Duration(milliseconds: monitorFrequency), (_) { + monitorReceiver(); + }); + } + + @internal + void stopMonitor() { + _monitorTimer?.cancel(); + _monitorTimer = null; + } } diff --git a/lib/src/track/remote/video.dart b/lib/src/track/remote/video.dart index 7124405d..6217207f 100644 --- a/lib/src/track/remote/video.dart +++ b/lib/src/track/remote/video.dart @@ -1,12 +1,15 @@ +import 'package:collection/collection.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc; +import 'package:meta/meta.dart'; +import '../../events.dart'; import '../../proto/livekit_models.pb.dart' as lk_models; import '../../types/other.dart'; import '../local/local.dart'; +import '../stats.dart'; import 'remote.dart'; class RemoteVideoTrack extends RemoteTrack with VideoTrack { - // RemoteVideoTrack(String name, TrackSource source, rtc.MediaStream stream, rtc.MediaStreamTrack track, {rtc.RTCRtpReceiver? receiver}) @@ -18,4 +21,81 @@ class RemoteVideoTrack extends RemoteTrack with VideoTrack { track, receiver: receiver, ); + + VideoReceiverStats? prevStats; + num? _currentBitrate; + get currentBitrate => _currentBitrate; + + @internal + String? getDecoderImplementation() { + return prevStats?.decoderImplementation; + } + + @override + Future monitorReceiver() async { + if (receiver == null) { + _currentBitrate = 0; + return; + } + final stats = await getReceiverStats(); + + if (stats != null && prevStats != null && receiver != null) { + _currentBitrate = computeBitrateForReceiverStats(stats, prevStats); + events.emit(VideoReceiverStatsEvent( + stats: stats, currentBitrate: currentBitrate)); + } + + prevStats = stats; + } + + Future getReceiverStats() async { + if (receiver == null) { + return null; + } + + final stats = await receiver!.getStats(); + VideoReceiverStats? receiverStats; + for (var v in stats) { + if (v.type == 'inbound-rtp') { + receiverStats ??= VideoReceiverStats(v.id, v.timestamp); + receiverStats.jitter = getNumValFromReport(v.values, 'jitter'); + receiverStats.jitterBufferDelay = + getNumValFromReport(v.values, 'jitterBufferDelay'); + receiverStats.bytesReceived = + getNumValFromReport(v.values, 'bytesReceived'); + receiverStats.packetsLost = + getNumValFromReport(v.values, 'packetsLost'); + receiverStats.framesDecoded = + getNumValFromReport(v.values, 'framesDecoded'); + receiverStats.framesDropped = + getNumValFromReport(v.values, 'framesDropped'); + receiverStats.framesReceived = + getNumValFromReport(v.values, 'framesReceived'); + receiverStats.packetsReceived = + getNumValFromReport(v.values, 'packetsReceived'); + receiverStats.framesPerSecond = + getNumValFromReport(v.values, 'framesPerSecond'); + receiverStats.frameWidth = getNumValFromReport(v.values, 'frameWidth'); + receiverStats.frameHeight = + getNumValFromReport(v.values, 'frameHeight'); + receiverStats.pliCount = getNumValFromReport(v.values, 'pliCount'); + receiverStats.firCount = getNumValFromReport(v.values, 'firCount'); + receiverStats.nackCount = getNumValFromReport(v.values, 'nackCount'); + receiverStats.decoderImplementation = + getStringValFromReport(v.values, 'decoderImplementation'); + + final c = stats.firstWhereOrNull((element) => element.type == 'codec'); + if (c != null) { + receiverStats.mimeType = getStringValFromReport(c.values, 'mimeType'); + receiverStats.payloadType = + getNumValFromReport(c.values, 'payloadType'); + receiverStats.channels = getNumValFromReport(c.values, 'channels'); + receiverStats.clockRate = getNumValFromReport(c.values, 'clockRate'); + } + break; + } + } + + return receiverStats; + } } diff --git a/lib/src/track/stats.dart b/lib/src/track/stats.dart new file mode 100644 index 00000000..262f4beb --- /dev/null +++ b/lib/src/track/stats.dart @@ -0,0 +1,200 @@ +import 'package:flutter/foundation.dart'; + +import '../proto/livekit_models.pb.dart'; + +const monitorFrequency = 2000; + +class CodecStats { + String? mimeType; + num? payloadType; + num? channels; + num? clockRate; +} + +// key stats for senders and receivers +class SenderStats extends CodecStats { + SenderStats(this.streamId, this.timestamp); + + /// number of packets sent + num? packetsSent; + + /// number of bytes sent + num? bytesSent; + + /// jitter as perceived by remote + num? jitter; + + /// packets reported lost by remote + num? packetsLost; + + /// RTT reported by remote + num? roundTripTime; + + /// ID of the outbound stream + String streamId; + + String? encoderImplementation; + + num timestamp; +} + +class AudioSenderStats extends SenderStats { + AudioSenderStats(String streamId, num timestamp) : super(streamId, timestamp); + TrackType type = TrackType.AUDIO; +} + +class VideoSenderStats extends SenderStats { + VideoSenderStats(String streamId, num timestamp) : super(streamId, timestamp); + TrackType type = TrackType.VIDEO; + + num? firCount; + + num? pliCount; + + num? nackCount; + + String? rid; + + num? frameWidth; + + num? frameHeight; + + num? framesSent; + + num? framesPerSecond; + + // bandwidth, cpu, other, none + String? qualityLimitationReason; + + num? qualityLimitationResolutionChanges; + + num? retransmittedPacketsSent; +} + +class ReceiverStats extends CodecStats { + ReceiverStats(this.streamId, this.timestamp); + num? jitterBufferDelay; + + /// packets reported lost by remote + num? packetsLost; + + /// number of packets sent + num? packetsReceived; + + num? bytesReceived; + + String streamId; + + num? jitter; + + num timestamp; +} + +class AudioReceiverStats extends ReceiverStats { + AudioReceiverStats(String streamId, num timestamp) + : super(streamId, timestamp); + TrackType type = TrackType.AUDIO; + + num? concealedSamples; + + num? concealmentEvents; + + num? silentConcealedSamples; + + num? silentConcealmentEvents; + + num? totalAudioEnergy; + + num? totalSamplesDuration; +} + +class VideoReceiverStats extends ReceiverStats { + VideoReceiverStats(String streamId, num timestamp) + : super(streamId, timestamp); + + TrackType type = TrackType.VIDEO; + + num? framesDecoded; + + num? framesDropped; + + num? framesReceived; + + num? framesPerSecond; + + num? frameWidth; + + num? frameHeight; + + num? firCount; + + num? pliCount; + + num? nackCount; + + String? decoderImplementation; +} + +num computeBitrateForSenderStats( + SenderStats currentStats, + SenderStats? prevStats, +) { + if (prevStats == null) { + return 0; + } + num? bytesNow; + num? bytesPrev; + bytesNow = currentStats.bytesSent; + bytesPrev = prevStats.bytesSent; + if (bytesNow == null || bytesPrev == null) { + return 0; + } + if (kIsWeb) { + return ((bytesNow - bytesPrev) * 8) / + (currentStats.timestamp - prevStats.timestamp); + } + + return ((bytesNow - bytesPrev) * 8 * 1000) / + (currentStats.timestamp - prevStats.timestamp); +} + +num computeBitrateForReceiverStats( + ReceiverStats currentStats, + ReceiverStats? prevStats, +) { + if (prevStats == null) { + return 0; + } + num? bytesNow; + num? bytesPrev; + + bytesNow = currentStats.bytesReceived; + bytesPrev = prevStats.bytesReceived; + + if (bytesNow == null || bytesPrev == null) { + return 0; + } + if (kIsWeb) { + return ((bytesNow - bytesPrev) * 8) / + (currentStats.timestamp - prevStats.timestamp); + } + + return ((bytesNow - bytesPrev) * 8 * 1000) / + (currentStats.timestamp - prevStats.timestamp); +} + +num? getNumValFromReport(Map values, String key) { + if (values.containsKey(key)) { + return (values[key] is String) + ? num.tryParse(values[key]) + : values[key] as num; + } + return null; +} + +String? getStringValFromReport(Map values, String key) { + if (values.containsKey(key)) { + return values[key] as String; + } + return null; +}