From e4b582c9b7bedbaaca39ed105fffef0acefcb9d4 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" Date: Sun, 24 Nov 2024 16:14:51 +0000 Subject: [PATCH] [doc] daily update 2024-11-24 --- lib/src/agora_base.dart | 30 +++--- lib/src/agora_media_base.dart | 14 +-- lib/src/agora_media_engine.dart | 17 +--- lib/src/agora_media_player.dart | 15 +-- lib/src/agora_media_player_source.dart | 6 +- lib/src/agora_media_player_types.dart | 2 +- lib/src/agora_rtc_engine.dart | 124 +++++++++++-------------- lib/src/agora_rtc_engine_ex.dart | 9 +- 8 files changed, 97 insertions(+), 120 deletions(-) diff --git a/lib/src/agora_base.dart b/lib/src/agora_base.dart index 0a8a35a08..fdc18c7aa 100644 --- a/lib/src/agora_base.dart +++ b/lib/src/agora_base.dart @@ -692,7 +692,7 @@ enum QualityType { @JsonValue(7) qualityUnsupported, - /// 8: Detecting the network quality. + /// 8: The last-mile network probe test is in progress. @JsonValue(8) qualityDetecting, } @@ -930,7 +930,7 @@ extension OrientationModeExt on OrientationMode { /// Video degradation preferences when the bandwidth is a constraint. @JsonEnum(alwaysCreate: true) enum DegradationPreference { - /// 0: (Default) Prefers to reduce the video frame rate while maintaining video resolution during video encoding under limited bandwidth. This degradation preference is suitable for scenarios where video quality is prioritized. + /// 0: Prefers to reduce the video frame rate while maintaining video resolution during video encoding under limited bandwidth. This degradation preference is suitable for scenarios where video quality is prioritized. Deprecated: This enumerator is deprecated. Use other enumerations instead. @JsonValue(0) maintainQuality, @@ -1623,7 +1623,7 @@ enum CompressionPreference { @JsonValue(0) preferLowLatency, - /// 1: (Default) High quality preference. The SDK compresses video frames while maintaining video quality. This preference is suitable for scenarios where video quality is prioritized. + /// 1: High quality preference. The SDK compresses video frames while maintaining video quality. This preference is suitable for scenarios where video quality is prioritized. @JsonValue(1) preferQuality, } @@ -1859,7 +1859,7 @@ class VideoEncoderConfiguration { @JsonKey(name: 'frameRate') final int? frameRate; - /// The encoding bitrate (Kbps) of the video. This parameter does not need to be set; keeping the default value standardBitrate is sufficient. The SDK automatically matches the most suitable bitrate based on the video resolution and frame rate you have set. For the correspondence between video resolution and frame rate, see. standardBitrate (0): (Recommended) Standard bitrate mode. compatibleBitrate (-1): Adaptive bitrate mode. In general, Agora suggests that you do not use this value. + /// The encoding bitrate (Kbps) of the video.. This parameter does not need to be set; keeping the default value standardBitrate is sufficient. The SDK automatically matches the most suitable bitrate based on the video resolution and frame rate you have set. For the correspondence between video resolution and frame rate, see. standardBitrate (0): (Recommended) Standard bitrate mode. compatibleBitrate (-1): Adaptive bitrate mode. In general, Agora suggests that you do not use this value. @JsonKey(name: 'bitrate') final int? bitrate; @@ -2933,7 +2933,7 @@ enum LocalVideoStreamReason { @JsonValue(8) localVideoStreamReasonDeviceNotFound, - /// 9: (macOS only) The video capture device currently in use is disconnected (such as being unplugged). + /// 9: (macOS and Windows only) The video capture device currently in use is disconnected (such as being unplugged). @JsonValue(9) localVideoStreamReasonDeviceDisconnected, @@ -2974,7 +2974,7 @@ enum LocalVideoStreamReason { @JsonValue(20) localVideoStreamReasonScreenCaptureWindowNotSupported, - /// 21: (Windows only) The screen has not captured any data available for window sharing. + /// 21: (Windows and Android only) The currently captured window has no data. @JsonValue(21) localVideoStreamReasonScreenCaptureFailure, @@ -3980,7 +3980,7 @@ class LiveTranscoding { @JsonKey(name: 'height') final int? height; - /// The encoding bitrate (Kbps) of the video. This parameter does not need to be set; keeping the default value standardBitrate is sufficient. The SDK automatically matches the most suitable bitrate based on the video resolution and frame rate you have set. For the correspondence between video resolution and frame rate, see. + /// The encoding bitrate (Kbps) of the video.. This parameter does not need to be set; keeping the default value standardBitrate is sufficient. The SDK automatically matches the most suitable bitrate based on the video resolution and frame rate you have set. For the correspondence between video resolution and frame rate, see. @JsonKey(name: 'videoBitrate') final int? videoBitrate; @@ -4893,11 +4893,11 @@ class VideoDenoiserOptions { /// @nodoc const VideoDenoiserOptions({this.mode, this.level}); - /// Video noise reduction mode. + /// Video noise reduction mode.. @JsonKey(name: 'mode') final VideoDenoiserMode? mode; - /// Video noise reduction level. + /// Video noise reduction level.. @JsonKey(name: 'level') final VideoDenoiserLevel? level; @@ -4934,18 +4934,18 @@ extension VideoDenoiserModeExt on VideoDenoiserMode { } } -/// The video noise reduction level. +/// Video noise reduction level. @JsonEnum(alwaysCreate: true) enum VideoDenoiserLevel { /// 0: (Default) Promotes video quality during video noise reduction. balances performance consumption and video noise reduction quality. The performance consumption is moderate, the video noise reduction speed is moderate, and the overall video quality is optimal. @JsonValue(0) videoDenoiserLevelHighQuality, - /// 1: Promotes reducing performance consumption during video noise reduction. prioritizes reducing performance consumption over video noise reduction quality. The performance consumption is lower, and the video noise reduction speed is faster. To avoid a noticeable shadowing effect (shadows trailing behind moving objects) in the processed video, Agora recommends that you use this settinging when the camera is fixed. + /// 1: Promotes reducing performance consumption during video noise reduction. It prioritizes reducing performance consumption over video noise reduction quality. The performance consumption is lower, and the video noise reduction speed is faster. To avoid a noticeable shadowing effect (shadows trailing behind moving objects) in the processed video, Agora recommends that you use this setting when the camera is fixed. @JsonValue(1) videoDenoiserLevelFast, - /// 2: Enhanced video noise reduction. prioritizes video noise reduction quality over reducing performance consumption. The performance consumption is higher, the video noise reduction speed is slower, and the video noise reduction quality is better. If videoDenoiserLevelHighQuality is not enough for your video noise reduction needs, you can use this enumerator. + /// @nodoc @JsonValue(2) videoDenoiserLevelStrength, } @@ -5006,7 +5006,7 @@ class VirtualBackgroundSource { @JsonKey(name: 'source') final String? source; - /// The degree of blurring applied to the custom background image. This parameter takes effect only when the type of the custom background image is backgroundBlur. + /// The degree of blurring applied to the custom background image.. This parameter takes effect only when the type of the custom background image is backgroundBlur. @JsonKey(name: 'blur_degree') final BackgroundBlurDegree? blurDegree; @@ -5988,7 +5988,7 @@ class ChannelMediaRelayConfiguration { /// The information of the target channel ChannelMediaInfo. It contains the following members: channelName : The name of the target channel. token : The token for joining the target channel. It is generated with the channelName and uid you set in destInfos. /// If you have not enabled the App Certificate, set this parameter as the default value NULL, which means the SDK applies the App ID. - /// If you have enabled the App Certificate, you must use the token generated with the channelName and uid. If the token of any target channel expires, the whole media relay stops; hence Agora recommends that you specify the same expiration time for the tokens of all the target channels. uid : The unique user ID to identify the relay stream in the target channel. The value ranges from 0 to (2 32 -1). To avoid user ID conflicts, this user ID must be different from any other user ID in the target channel. The default value is 0, which means the SDK generates a random user ID. + /// If you have enabled the App Certificate, you must use the token generated with the channelName and uid. If the token of any target channel expires, the whole media relay stops; hence Agora recommends that you specify the same expiration time for the tokens of all the target channels. uid : The unique user ID to identify the relay stream in the target channel. The value ranges from 0 to (2 32 -1). To avoid user ID conflicts, this user ID must be different from any other user ID in the target channel. The default value is 0, which means the SDK generates a random UID. @JsonKey(name: 'destInfos') final List? destInfos; @@ -6518,7 +6518,7 @@ class ScreenVideoParameters { @JsonKey(name: 'bitrate') final int? bitrate; - /// The content hint for screen sharing. + /// The content hint for screen sharing.. @JsonKey(name: 'contentHint') final VideoContentHint? contentHint; diff --git a/lib/src/agora_media_base.dart b/lib/src/agora_media_base.dart index e9323f3f9..3824f5a85 100644 --- a/lib/src/agora_media_base.dart +++ b/lib/src/agora_media_base.dart @@ -632,11 +632,11 @@ extension VideoPixelFormatExt on VideoPixelFormat { /// Video display modes. @JsonEnum(alwaysCreate: true) enum RenderModeType { - /// 1: Hidden mode. Uniformly scale the video until one of its dimension fits the boundary (zoomed to fit). One dimension of the video may have clipped contents. + /// 1: Hidden mode. The priority is to fill the window. Any excess video that does not match the window size will be cropped. @JsonValue(1) renderModeHidden, - /// 2: Fit mode. Uniformly scale the video until one of its dimension fits the boundary (zoomed to fit). Areas that are not filled due to disparity in the aspect ratio are filled with black. + /// 2: Fit mode. The priority is to ensure that all video content is displayed. Any areas of the window that are not filled due to the mismatch between video size and window size will be filled with black. @JsonValue(2) renderModeFit, @@ -970,7 +970,9 @@ class VideoFrame { @JsonKey(name: 'matrix') final List? matrix; - /// The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering. + /// The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. + /// In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering. + /// Make sure that alphaBuffer is exactly the same size as the video frame (width × height), otherwise it may cause the app to crash. @JsonKey(name: 'alphaBuffer', ignore: true) final Uint8List? alphaBuffer; @@ -978,7 +980,7 @@ class VideoFrame { @JsonKey(name: 'pixelBuffer', ignore: true) final Uint8List? pixelBuffer; - /// The meta information in the video frame. To use this parameter, please contact. + /// The meta information in the video frame. To use this parameter, contact. @VideoFrameMetaInfoConverter() @JsonKey(name: 'metaInfo') final VideoFrameMetaInfo? metaInfo; @@ -1068,7 +1070,7 @@ class AudioPcmFrameSink { /// /// After registering the audio frame observer, the callback occurs every time the player receives an audio frame, reporting the detailed information of the audio frame. /// - /// * [frame] The audio frame information. See AudioPcmFrame. + /// * [frame] The audio frame information.. See AudioPcmFrame. final void Function(AudioPcmFrame frame)? onFrame; } @@ -1393,7 +1395,7 @@ class AudioSpectrumObserver { /// /// After successfully calling registerAudioSpectrumObserver to implement the onRemoteAudioSpectrum callback in the AudioSpectrumObserver and calling enableAudioSpectrumMonitor to enable audio spectrum monitoring, the SDK will trigger the callback as the time interval you set to report the received remote audio data spectrum. /// - /// * [spectrums] The audio spectrum information of the remote user, see UserAudioSpectrumInfo. The number of arrays is the number of remote users monitored by the SDK. If the array is null, it means that no audio spectrum of remote users is detected. + /// * [spectrums] The audio spectrum information of the remote user. See UserAudioSpectrumInfo. The number of arrays is the number of remote users monitored by the SDK. If the array is null, it means that no audio spectrum of remote users is detected. /// * [spectrumNumber] The number of remote users. final void Function( List spectrums, int spectrumNumber)? diff --git a/lib/src/agora_media_engine.dart b/lib/src/agora_media_engine.dart index 0db692f67..7c4914f4f 100644 --- a/lib/src/agora_media_engine.dart +++ b/lib/src/agora_media_engine.dart @@ -48,13 +48,7 @@ abstract class MediaEngine { /// Registers a raw video frame observer object. /// - /// If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: - /// Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. - /// Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. - /// After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. - /// Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: - /// The raw video data of group A users can be obtained through the callback in VideoFrameObserver, and the SDK renders the data by default. - /// The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver. If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one VideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the VideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. + /// If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one VideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the VideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. /// /// * [observer] The observer instance. See VideoFrameObserver. /// @@ -65,14 +59,7 @@ abstract class MediaEngine { /// Registers a receiver object for the encoded video image. /// - /// If you only want to observe encoded video frames (such as h.264 format) without decoding and rendering the video, Agora recommends that you implement one VideoEncodedFrameObserver class through this method. If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: - /// Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. - /// Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. - /// After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. - /// Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: - /// The raw video data of group A users can be obtained through the callback in VideoFrameObserver, and the SDK renders the data by default. - /// The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver. - /// Call this method before joining a channel. + /// If you only want to observe encoded video frames (such as H.264 format) without decoding and rendering the video, Agora recommends that you implement one VideoEncodedFrameObserver class through this method. Call this method before joining a channel. /// /// * [observer] The video frame observer object. See VideoEncodedFrameObserver. /// diff --git a/lib/src/agora_media_player.dart b/lib/src/agora_media_player.dart index f93b0f1c9..4eaf31414 100644 --- a/lib/src/agora_media_player.dart +++ b/lib/src/agora_media_player.dart @@ -105,8 +105,7 @@ abstract class MediaPlayer { /// * [index] The index of the media stream. This parameter must be less than the return value of getStreamCount. /// /// Returns - /// If the call succeeds, returns the detailed information of the media stream. See PlayerStreamInfo. - /// If the call fails, returns NULL. + /// If the call succeeds, returns the detailed information of the media stream. See PlayerStreamInfo. NULL, if the method call fails. Future getStreamInfo(int index); /// Sets the loop playback. @@ -125,8 +124,8 @@ abstract class MediaPlayer { /// /// Call this method after calling open. /// - /// * [speed] The playback speed. Agora recommends that you limit this value to a range between 50 and 400, which is defined as follows: - /// 50: Half the original speed. + /// * [speed] The playback speed. Agora recommends that you set this to a value between 30 and 400, defined as follows: + /// 30: 0.3 times the original speed. /// 100: The original speed. /// 400: 4 times the original speed. /// @@ -351,7 +350,7 @@ abstract class MediaPlayer { /// /// You can call this method to switch the media resource to be played according to the current network status. For example: /// When the network is poor, the media resource to be played is switched to a media resource address with a lower bitrate. - /// When the network is good, the media resource to be played is switched to a media resource address with a higher bitrate. After calling this method, if you receive the playerEventSwitchComplete event in the onPlayerEvent callback, the switch is successful; If you receive the playerEventSwitchError event in the onPlayerEvent callback, the switch fails. + /// When the network is good, the media resource to be played is switched to a media resource address with a higher bitrate. After calling this method, if you receive the onPlayerEvent callback report the playerEventSwitchComplete event, the switching is successful. If the switching fails, the SDK will automatically retry 3 times. If it still fails, you will receive the onPlayerEvent callback reporting the playerEventSwitchError event indicating an error occurred during media resource switching. /// Ensure that you call this method after open. /// To ensure normal playback, pay attention to the following when calling this method: /// Do not call this method when playback is paused. @@ -359,7 +358,7 @@ abstract class MediaPlayer { /// Before switching the media resource, make sure that the playback position does not exceed the total duration of the media resource to be switched. /// /// * [src] The URL of the media resource. - /// * [syncPts] Whether to synchronize the playback position (ms) before and after the switch: true : Synchronize the playback position before and after the switch. false : (Default) Do not synchronize the playback position before and after the switch. Make sure to set this parameter as false if you need to play live streams, or the switch fails. If you need to play on-demand streams, you can set the value of this parameter according to your scenarios. + /// * [syncPts] Whether to synchronize the playback position (ms) before and after the switch: true : Synchronize the playback position before and after the switch. false : (Default) Do not synchronize the playback position before and after the switch. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -367,7 +366,9 @@ abstract class MediaPlayer { /// Preloads a media resource. /// - /// You can call this method to preload a media resource into the playlist. If you need to preload multiple media resources, you can call this method multiple times. If the preload is successful and you want to play the media resource, call playPreloadedSrc; if you want to clear the playlist, call stop. Agora does not support preloading duplicate media resources to the playlist. However, you can preload the media resources that are being played to the playlist again. + /// You can call this method to preload a media resource into the playlist. If you need to preload multiple media resources, you can call this method multiple times. If the preload is successful and you want to play the media resource, call playPreloadedSrc; if you want to clear the playlist, call stop. + /// Before calling this method, ensure that you have called open or openWithMediaSource to open the media resource successfully. + /// Agora does not support preloading duplicate media resources to the playlist. However, you can preload the media resources that are being played to the playlist again. /// /// * [src] The URL of the media resource. /// * [startPos] The starting position (ms) for playing after the media resource is preloaded to the playlist. When preloading a live stream, set this parameter to 0. diff --git a/lib/src/agora_media_player_source.dart b/lib/src/agora_media_player_source.dart index 3042da599..d772c08ea 100644 --- a/lib/src/agora_media_player_source.dart +++ b/lib/src/agora_media_player_source.dart @@ -40,7 +40,7 @@ class MediaPlayerSourceObserver { /// /// After calling the seek method, the SDK triggers the callback to report the results of the seek operation. /// - /// * [eventCode] The player events. See MediaPlayerEvent. + /// * [eventCode] The player event. See MediaPlayerEvent. /// * [elapsedTime] The time (ms) when the event occurs. /// * [message] Information about the event. final void Function( @@ -58,8 +58,8 @@ class MediaPlayerSourceObserver { /// Reports the playback duration that the buffered data can support. /// /// When playing online media resources, the SDK triggers this callback every two seconds to report the playback duration that the currently buffered data can support. - /// When the playback duration supported by the buffered data is less than the threshold (0 by default), the SDK returns playerEventBufferLow. - /// When the playback duration supported by the buffered data is greater than the threshold (0 by default), the SDK returns playerEventBufferRecover. + /// When the playback duration supported by the buffered data is less than the threshold (0 by default), the SDK returns playerEventBufferLow (6). + /// When the playback duration supported by the buffered data is greater than the threshold (0 by default), the SDK returns playerEventBufferRecover (7). /// /// * [playCachedBuffer] The playback duration (ms) that the buffered data can support. final void Function(int playCachedBuffer)? onPlayBufferUpdated; diff --git a/lib/src/agora_media_player_types.dart b/lib/src/agora_media_player_types.dart index 7c74ce72b..4d695d4b5 100644 --- a/lib/src/agora_media_player_types.dart +++ b/lib/src/agora_media_player_types.dart @@ -582,7 +582,7 @@ class MediaSource { @JsonKey(name: 'startPos') final int? startPos; - /// Whether to enable autoplay once the media file is opened: true : (Default) Enables autoplay. false : Disables autoplay. If autoplay is disabled, you need to call the play method to play a media file after it is opened. + /// Whether to enable autoplay once the media file is opened: true : (Default) Yes. false : No. If autoplay is disabled, you need to call the play method to play a media file after it is opened. @JsonKey(name: 'autoPlay') final bool? autoPlay; diff --git a/lib/src/agora_rtc_engine.dart b/lib/src/agora_rtc_engine.dart index e09c6b292..070d2edac 100644 --- a/lib/src/agora_rtc_engine.dart +++ b/lib/src/agora_rtc_engine.dart @@ -454,7 +454,7 @@ class LocalVideoStats { @JsonKey(name: 'dualStreamEnabled') final bool? dualStreamEnabled; - /// The local video encoding acceleration type. + /// The local video encoding acceleration type.. /// 0: Software encoding is applied without acceleration. /// 1: Hardware encoding is applied for acceleration. @JsonKey(name: 'hwEncoderAccelerating') @@ -553,7 +553,7 @@ class RemoteAudioStats { @JsonKey(name: 'publishDuration') final int? publishDuration; - /// The Quality of Experience (QoE) of the local user when receiving a remote audio stream. + /// The Quality of Experience (QoE) of the local user when receiving a remote audio stream.. @JsonKey(name: 'qoeQuality') final int? qoeQuality; @@ -1234,7 +1234,7 @@ class ScreenCaptureSourceInfo { @JsonKey(name: 'isOccluded') final bool? isOccluded; - /// The position of a window relative to the entire screen space (including all shareable screens). See Rectangle. + /// The position of a window relative to the entire screen space (including all shareable screens). See Rectangle.. @JsonKey(name: 'position') final Rectangle? position; @@ -1390,7 +1390,7 @@ class ChannelMediaOptions { @JsonKey(name: 'publishCustomAudioTrack') final bool? publishCustomAudioTrack; - /// The ID of the custom audio source to publish. The default value is 0. If you have set sourceNumber in setExternalAudioSource to a value greater than 1, the SDK creates the corresponding number of custom audio tracks and assigns an ID to each audio track, starting from 0. + /// The ID of the custom audio track to be published. The default value is 0. You can obtain the custom audio track ID through the createCustomAudioTrack method. @JsonKey(name: 'publishCustomAudioTrackId') final int? publishCustomAudioTrackId; @@ -1414,7 +1414,7 @@ class ChannelMediaOptions { @JsonKey(name: 'publishTranscodedVideoTrack') final bool? publishTranscodedVideoTrack; - /// @nodoc + /// Whether to publish the mixed audio track: true : Publish the mixed audio track. false : Do not publish the mixed audio track. @JsonKey(name: 'publishMixedAudioTrack') final bool? publishMixedAudioTrack; @@ -1825,7 +1825,7 @@ class RtcEngineEventHandler { /// Reports the last mile network quality of each user in the channel. /// - /// This callback reports the last mile network conditions of each user in the channel. Last mile refers to the connection between the local device and Agora's edge server. The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times. This callback provides feedback on network quality through sending and receiving broadcast packets within the channel. Excessive broadcast packets can lead to broadcast storms. To prevent broadcast storms from causing a large amount of data transmission within the channel, this callback supports feedback on the network quality of up to 4 remote hosts simultaneously by default. txQuality is when the user is not sending a stream; rxQuality is when the user is not receiving a stream. + /// This callback reports the last mile network conditions of each user in the channel. Last mile refers to the connection between the local device and Agora's edge server. The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times. This callback provides feedback on network quality through sending and receiving broadcast packets within the channel. Excessive broadcast packets can lead to broadcast storms. To prevent broadcast storms from causing a large amount of data transmission within the channel, this callback supports feedback on the network quality of up to 4 remote hosts simultaneously by default. txQuality is Unknown when the user is not sending a stream; rxQuality is Unknown when the user is not receiving a stream. /// /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID. The network quality of the user with this user ID is reported. If the uid is 0, the local network quality is reported. @@ -1851,7 +1851,7 @@ class RtcEngineEventHandler { /// /// This callback reports the last-mile network conditions of the local user before the user joins the channel. Last mile refers to the connection between the local device and Agora's edge server. Before the user joins the channel, this callback is triggered by the SDK once startLastmileProbeTest is called and reports the last-mile network conditions of the local user. /// - /// * [quality] The last-mile network quality. qualityUnknown (0): The quality is unknown. qualityExcellent (1): The quality is excellent. qualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. qualityPoor (3): Users can feel the communication is slightly impaired. qualityBad (4): Users cannot communicate smoothly. qualityVbad (5): The quality is so bad that users can barely communicate. qualityDown (6): The network is down, and users cannot communicate at all. See QualityType. + /// * [quality] The last-mile network quality. qualityUnknown (0): The quality is unknown. qualityExcellent (1): The quality is excellent. qualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. qualityPoor (3): Users can feel the communication is slightly impaired. qualityBad (4): Users cannot communicate smoothly. qualityVbad (5): The quality is so bad that users can barely communicate. qualityDown (6): The network is down, and users cannot communicate at all. qualityDetecting (8): The last-mile probe test is in progress. See QualityType. final void Function(QualityType quality)? onLastmileQuality; /// Occurs when the first local video frame is displayed on the local video view. @@ -2143,7 +2143,7 @@ class RtcEngineEventHandler { /// The SDK triggers this callback when the local user receives the stream message that the remote user sends by calling the sendStreamMessage method. /// /// * [connection] The connection information. See RtcConnection. - /// * [uid] The ID of the remote user sending the message. + /// * [remoteUid] The ID of the remote user sending the message. /// * [streamId] The stream ID of the received message. /// * [data] The data received. /// * [length] The data length (byte). @@ -2158,7 +2158,7 @@ class RtcEngineEventHandler { /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the remote user sending the message. /// * [streamId] The stream ID of the received message. - /// * [code] The error code. See ErrorCodeType. + /// * [code] Error code. See ErrorCodeType. /// * [missed] The number of lost messages. /// * [cached] Number of incoming cached messages when the data stream is interrupted. final void Function(RtcConnection connection, int remoteUid, int streamId, @@ -2465,7 +2465,7 @@ class RtcEngineEventHandler { /// Video frame rendering event callback. /// - /// After calling the startMediaRenderingTracing method or joining the channel, the SDK triggers this callback to report the events of video frame rendering and the indicators during the rendering process. Developers can optimize the indicators to improve the efficiency of the first video frame rendering. + /// After calling the startMediaRenderingTracing method or joining a channel, the SDK triggers this callback to report the events of video frame rendering and the indicators during the rendering process. Developers can optimize the indicators to improve the efficiency of the first video frame rendering. /// /// * [connection] The connection information. See RtcConnection. /// * [uid] The user ID. @@ -3079,10 +3079,10 @@ abstract class RtcEngine { /// Gets the warning or error description. /// - /// * [code] The error code or warning code reported by the SDK. + /// * [code] The error code reported by the SDK. /// /// Returns - /// The specific error or warning description. + /// The specific error description. Future getErrorDescription(int code); /// Queries the video codec capabilities of the SDK. @@ -3364,7 +3364,9 @@ abstract class RtcEngine { /// /// * [enabled] Whether to enable the image enhancement function: true : Enable the image enhancement function. false : (Default) Disable the image enhancement function. /// * [options] The image enhancement options. See BeautyOptions. - /// * [type] Source type of the extension. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3375,17 +3377,13 @@ abstract class RtcEngine { /// Sets low-light enhancement. /// - /// The low-light enhancement feature can adaptively adjust the brightness value of the video captured in situations with low or uneven lighting, such as backlit, cloudy, or dark scenes. It restores or highlights the image details and improves the overall visual effect of the video. You can call this method to enable the color enhancement feature and set the options of the color enhancement effect. - /// Call this method after calling enableVideo. - /// Dark light enhancement has certain requirements for equipment performance. The low-light enhancement feature has certain performance requirements on devices. If your device overheats after you enable low-light enhancement, Agora recommends modifying the low-light enhancement options to a less performance-consuming level or disabling low-light enhancement entirely. - /// Both this method and setExtensionProperty can turn on low-light enhancement: - /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). - /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. - /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// You can call this method to enable the color enhancement feature and set the options of the color enhancement effect. /// /// * [enabled] Whether to enable low-light enhancement: true : Enable low-light enhancement. false : (Default) Disable low-light enhancement. /// * [options] The low-light enhancement options. See LowlightEnhanceOptions. - /// * [type] The type of the video source. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3396,17 +3394,13 @@ abstract class RtcEngine { /// Sets video noise reduction. /// - /// Underlit environments and low-end video capture devices can cause video images to contain significant noise, which affects video quality. In real-time interactive scenarios, video noise also consumes bitstream resources and reduces encoding efficiency during encoding. You can call this method to enable the video noise reduction feature and set the options of the video noise reduction effect. - /// Call this method after calling enableVideo. - /// Video noise reduction has certain requirements for equipment performance. If your device overheats after you enable video noise reduction, Agora recommends modifying the video noise reduction options to a less performance-consuming level or disabling video noise reduction entirely. - /// Both this method and setExtensionProperty can turn on video noise reduction function: - /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). - /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. - /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// You can call this method to enable the video noise reduction feature and set the options of the video noise reduction effect. If the noise reduction implemented by this method does not meet your needs, Agora recommends that you call the setBeautyEffectOptions method to enable the beauty and skin smoothing function to achieve better video noise reduction effects. The recommended BeautyOptions settings for intense noise reduction effect are as follows: lighteningContrastLevel lighteningContrastNormal lighteningLevel : 0.0 smoothnessLevel : 0.5 rednessLevel : 0.0 sharpnessLevel : 0.1 /// /// * [enabled] Whether to enable video noise reduction: true : Enable video noise reduction. false : (Default) Disable video noise reduction. /// * [options] The video noise reduction options. See VideoDenoiserOptions. - /// * [type] The type of the video source. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3420,14 +3414,13 @@ abstract class RtcEngine { /// The video images captured by the camera can have color distortion. The color enhancement feature intelligently adjusts video characteristics such as saturation and contrast to enhance the video color richness and color reproduction, making the video more vivid. You can call this method to enable the color enhancement feature and set the options of the color enhancement effect. /// Call this method after calling enableVideo. /// The color enhancement feature has certain performance requirements on devices. With color enhancement turned on, Agora recommends that you change the color enhancement level to one that consumes less performance or turn off color enhancement if your device is experiencing severe heat problems. - /// Both this method and setExtensionProperty can enable color enhancement: - /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). - /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// /// * [enabled] Whether to enable color enhancement: true Enable color enhancement. false : (Default) Disable color enhancement. /// * [options] The color enhancement options. See ColorEnhanceOptions. - /// * [type] The type of the video source. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3459,9 +3452,9 @@ abstract class RtcEngine { /// * [enabled] Whether to enable virtual background: true : Enable virtual background. false : Disable virtual background. /// * [backgroundSource] The custom background. See VirtualBackgroundSource. To adapt the resolution of the custom background image to that of the video captured by the SDK, the SDK scales and crops the custom background image while ensuring that the content of the custom background image is not distorted. /// * [segproperty] Processing properties for background images. See SegmentationProperty. - /// * [type] The type of the video source. See MediaSourceType. In this method, this parameter supports only the following two settings: - /// The default value is primaryCameraSource. - /// If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3515,7 +3508,7 @@ abstract class RtcEngine { /// If someone subscribes to the low-quality stream, the SDK enables the low-quality stream and resets it to the SimulcastStreamConfig configuration used in the most recent calling of setDualStreamMode. If no configuration has been set by the user previously, the following values are used: /// Resolution: 480 × 272 /// Frame rate: 15 fps - /// Bitrate: 500 Kbps applicationScenario1v1 (2) is suitable for 1v1 video call scenarios. To meet the requirements for low latency and high-quality video in this scenario, the SDK optimizes its strategies, improving performance in terms of video quality, first frame rendering, latency on mid-to-low-end devices, and smoothness under weak network conditions. + /// Bitrate: 500 Kbps applicationScenario1v1 (2) This is applicable to the scenario. To meet the requirements for low latency and high-quality video in this scenario, the SDK optimizes its strategies, improving performance in terms of video quality, first frame rendering, latency on mid-to-low-end devices, and smoothness under weak network conditions. (3) This is applicable to the scenario. In response to the high demands for first-frame rendering time and image quality, the SDK focuses on reducing first-frame rendering time and enhancing overall image quality, even in poor network conditions or on low-end devices. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3678,14 +3671,10 @@ abstract class RtcEngine { /// Options for subscribing to remote video streams. /// - /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user. - /// If you only register one VideoFrameObserver object, the SDK subscribes to the raw video data and encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false). - /// If you only register one VideoEncodedFrameObserver object, the SDK only subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to true). - /// If you register one VideoFrameObserver object and one VideoEncodedFrameObserver object successively, the SDK subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false). - /// If you call this method first with the options parameter set, and then register one VideoFrameObserver or VideoEncodedFrameObserver object, you need to call this method again and set the options parameter as described in the above two items to get the desired results. Agora recommends the following steps: - /// Set autoSubscribeVideo to false when calling joinChannel to join a channel. - /// Call this method after receiving the onUserJoined callback to set the subscription options for the specified remote user's video stream. - /// Call the muteRemoteVideoStream method to resume subscribing to the video stream of the specified remote user. If you set encodedFrameOnly to true in the previous step, the SDK triggers the onEncodedVideoFrameReceived callback locally to report the received encoded video frame information. + /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user. The default subscription behavior of the SDK for remote video streams depends on the type of registered video observer: + /// If the VideoFrameObserver observer is registered, the default is to subscribe to both raw data and encoded data. + /// If the VideoEncodedFrameObserver observer is registered, the default is to subscribe only to the encoded data. + /// If both types of observers are registered, the default behavior follows the last registered video observer. For example, if the last registered observer is the VideoFrameObserver observer, the default is to subscribe to both raw data and encoded data. If you want to modify the default behavior, or set different subscription options for different uids, you can call this method to set it. /// /// * [uid] The user ID of the remote user. /// * [options] The video subscription options. See VideoSubscriptionOptions. @@ -3897,7 +3886,7 @@ abstract class RtcEngine { /// Adjusts the volume during audio mixing. /// - /// This method adjusts the audio mixing volume on both the local client and remote clients. + /// This method adjusts the audio mixing volume on both the local client and remote clients. This method does not affect the volume of the audio file set in the playEffect method. /// /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. /// @@ -4815,11 +4804,11 @@ abstract class RtcEngine { /// /// If you enable loopback audio capturing, the output of the sound card is mixed into the audio stream sent to the other end. /// This method applies to the macOS and Windows only. - /// macOS does not support loopback audio capture of the default sound card. If you need to use this function, use a virtual sound card and pass its name to the deviceName parameter. Agora recommends using AgoraALD as the virtual sound card for audio capturing. + /// The macOS system's default sound card does not support recording functionality. As of v4.5.0, when you call this method for the first time, the SDK will automatically install the built-in AgoraALD virtual sound card developed by Agora. After successful installation, the audio routing will automatically switch to the virtual sound card and use it for audio capturing. /// You can call this method either before or after joining a channel. /// If you call the disableAudio method to disable the audio module, audio capturing will be disabled as well. If you need to enable audio capturing, call the enableAudio method to enable the audio module and then call the enableLoopbackRecording method. /// - /// * [enabled] Sets whether to enable loopback audio capturing. true : Enable loopback audio capturing. false : (Default) Disable loopback audio capturing. + /// * [enabled] Sets whether to enable loopback audio capturing. true : Enable sound card capturing. You can find the name of the virtual sound card in your system's Audio Devices > Output. false : Disable sound card capturing. The name of the virtual sound card will not be shown in your system's Audio Devices > Output. /// * [deviceName] macOS: The device name of the virtual sound card. The default value is set to NULL, which means using AgoraALD for loopback audio capturing. /// Windows: The device name of the sound card. The default is set to NULL, which means the SDK uses the sound card of your device for loopback audio capturing. /// @@ -4960,7 +4949,7 @@ abstract class RtcEngine { /// Checks whether the device camera supports face detection. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -4969,7 +4958,7 @@ abstract class RtcEngine { /// Checks whether the device supports camera flash. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// The app enables the front camera by default. If your front camera does not support flash, this method returns false. If you want to check whether the rear camera supports the flash function, call switchCamera before this method. /// On iPads with system version 15, even if isCameraTorchSupported returns true, you might fail to successfully enable the flash by calling setCameraTorchOn due to system issues. @@ -4980,7 +4969,7 @@ abstract class RtcEngine { /// Check whether the device supports the manual focus function. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -4989,7 +4978,7 @@ abstract class RtcEngine { /// Checks whether the device supports the face auto-focus function. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -5021,7 +5010,7 @@ abstract class RtcEngine { /// Gets the maximum zoom ratio supported by the camera. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -5065,7 +5054,7 @@ abstract class RtcEngine { /// Checks whether the device supports manual exposure. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -5089,7 +5078,7 @@ abstract class RtcEngine { /// Queries whether the current camera supports adjusting exposure value. /// /// This method is for Android and iOS only. - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// Before calling setCameraExposureFactor, Agora recoomends that you call this method to query whether the current camera supports adjusting the exposure value. /// By calling this method, you adjust the exposure value of the currently active camera, that is, the camera specified when calling setCameraCapturerConfiguration. /// @@ -5113,7 +5102,7 @@ abstract class RtcEngine { /// Checks whether the device supports auto exposure. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method applies to iOS only. /// /// Returns @@ -5213,8 +5202,8 @@ abstract class RtcEngine { /// /// You can call this method before sharing a screen or window to get a list of shareable screens and windows, which enables a user to use thumbnails in the list to easily choose a particular screen or window to share. This list also contains important information such as window ID and screen ID, with which you can call startScreenCaptureByWindowId or startScreenCaptureByDisplayId to start the sharing. This method applies to macOS and Windows only. /// - /// * [thumbSize] The target size of the screen or window thumbnail (the width and height are in pixels). The SDK scales the original image to make the length of the longest side of the image the same as that of the target size without distorting the original image. For example, if the original image is 400 × 300 and thumbSize is 100 × 100, the actual size of the thumbnail is 100 × 75. If the target size is larger than the original size, the thumbnail is the original image and the SDK does not scale it. - /// * [iconSize] The target size of the icon corresponding to the application program (the width and height are in pixels). The SDK scales the original image to make the length of the longest side of the image the same as that of the target size without distorting the original image. For example, if the original image is 400 × 300 and iconSize is 100 × 100, the actual size of the icon is 100 × 75. If the target size is larger than the original size, the icon is the original image and the SDK does not scale it. + /// * [thumbSize] The target size of the screen or window thumbnail (the width and height are in pixels).. The SDK scales the original image to make the length of the longest side of the image the same as that of the target size without distorting the original image. For example, if the original image is 400 × 300 and thumbSize is 100 × 100, the actual size of the thumbnail is 100 × 75. If the target size is larger than the original size, the thumbnail is the original image and the SDK does not scale it. + /// * [iconSize] The target size of the icon corresponding to the application program (the width and height are in pixels).. The SDK scales the original image to make the length of the longest side of the image the same as that of the target size without distorting the original image. For example, if the original image is 400 × 300 and iconSize is 100 × 100, the actual size of the icon is 100 × 75. If the target size is larger than the original size, the icon is the original image and the SDK does not scale it. /// * [includeScreen] Whether the SDK returns the screen information in addition to the window information: true : The SDK returns screen and window information. false : The SDK returns window information only. /// /// Returns @@ -5321,7 +5310,7 @@ abstract class RtcEngine { /// This method is for Windows and macOS only. /// Call this method after starting screen sharing or window sharing. /// - /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters. The video properties of the screen sharing stream only need to be set through this parameter, and are unrelated to setVideoEncoderConfiguration. + /// * [captureParams] The screen sharing encoding parameters. See ScreenCaptureParameters. The video properties of the screen sharing stream only need to be set through this parameter, and are unrelated to setVideoEncoderConfiguration. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -5335,7 +5324,7 @@ abstract class RtcEngine { /// When you do not pass in a value, Agora bills you at 1280 × 720. /// When you pass in a value, Agora bills you at that value. /// - /// * [captureParams] The screen sharing encoding parameters. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. + /// * [captureParams] The screen sharing encoding parameters. See ScreenCaptureParameters2. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -5349,7 +5338,7 @@ abstract class RtcEngine { /// This method is for Android and iOS only. /// On the iOS platform, screen sharing is only available on iOS 12.0 and later. /// - /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. + /// * [captureParams] The screen sharing encoding parameters. See ScreenCaptureParameters2. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -5597,9 +5586,8 @@ abstract class RtcEngine { /// Sends data stream messages. /// /// After calling createDataStream, you can call this method to send data stream messages to all users in the channel. The SDK has the following restrictions on this method: - /// Each user can have up to five data streams simultaneously. - /// Up to 60 packets can be sent per second in a data stream with each packet having a maximum size of 1 KB. - /// Up to 30 KB of data can be sent per second in a data stream. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. + /// Each client within the channel can have up to 5 data channels simultaneously, with a total shared packet bitrate limit of 30 KB/s for all data channels. + /// Each data channel can send up to 60 packets per second, with each packet being a maximum of 1 KB. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. /// This method needs to be called after createDataStream and joining the channel. /// In live streaming scenarios, this method only applies to hosts. /// @@ -5670,7 +5658,7 @@ abstract class RtcEngine { /// live interactive streaming, such as sending shopping links, digital coupons, and online quizzes. Call this method before joinChannel. /// /// * [observer] The metadata observer. See MetadataObserver. - /// * [type] The metadata type. The SDK currently only supports videoMetadata. + /// * [type] The metadata type. The SDK currently only supports videoMetadata.. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -5681,7 +5669,7 @@ abstract class RtcEngine { /// Unregisters the specified metadata observer. /// /// * [observer] The metadata observer. See MetadataObserver. - /// * [type] The metadata type. The SDK currently only supports videoMetadata. + /// * [type] The metadata type. The SDK currently only supports videoMetadata.. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -5990,7 +5978,7 @@ abstract class RtcEngine { /// When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. /// /// * [enabled] Whether to enalbe video screenshot and upload: true : Enables video screenshot and upload. false : Disables video screenshot and upload. - /// * [config] Screenshot and upload configuration. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(contentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. + /// * [config] Screenshot and upload configuration. See ContentInspectConfig. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -6026,7 +6014,7 @@ abstract class RtcEngine { /// Sets up cloud proxy service. /// /// When users' network access is restricted by a firewall, configure the firewall to allow specific IP addresses and ports provided by Agora; then, call this method to enable the cloud proxyType and set the cloud proxy type with the proxyType parameter. After successfully connecting to the cloud proxy, the SDK triggers the onConnectionStateChanged (connectionStateConnecting, connectionChangedSettingProxyServer) callback. To disable the cloud proxy that has been set, call the setCloudProxy (noneProxy). To change the cloud proxy type that has been set, call the setCloudProxy (noneProxy) first, and then call the setCloudProxy to set the proxyType you want. - /// Agora recommends that you call this method after joining a channel. + /// Agora recommends that you call this method before joining a channel. /// When a user is behind a firewall and uses the Force UDP cloud proxy, the services for Media Push and cohosting across channels are not available. /// When you use the Force TCP cloud proxy, note that an error would occur when calling the startAudioMixing method to play online music files in the HTTP protocol. The services for Media Push and cohosting across channels use the cloud proxy with the TCP protocol. /// diff --git a/lib/src/agora_rtc_engine_ex.dart b/lib/src/agora_rtc_engine_ex.dart index d155f28f9..3407e8bc8 100644 --- a/lib/src/agora_rtc_engine_ex.dart +++ b/lib/src/agora_rtc_engine_ex.dart @@ -427,9 +427,8 @@ abstract class RtcEngineEx implements RtcEngine { /// Sends data stream messages. /// /// A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. The SDK has the following restrictions on this method: - /// Each user can have up to five data streams simultaneously. - /// Up to 60 packets can be sent per second in a data stream with each packet having a maximum size of 1 KB. - /// Up to 30 KB of data can be sent per second in a data stream. After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. + /// Each client within the channel can have up to 5 data channels simultaneously, with a total shared packet bitrate limit of 30 KB/s for all data channels. + /// Each data channel can send up to 60 packets per second, with each packet being a maximum of 1 KB. After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. /// Call this method after joinChannelEx. /// Ensure that you call createDataStreamEx to create a data channel before calling this method. /// This method applies only to the COMMUNICATION profile or to the hosts in the LIVE_BROADCASTING profile. If an audience in the LIVE_BROADCASTING profile calls this method, the audience may be switched to a host. @@ -699,7 +698,7 @@ abstract class RtcEngineEx implements RtcEngine { /// This method can take screenshots for multiple video streams and upload them. When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. /// /// * [enabled] Whether to enalbe video screenshot and upload: true : Enables video screenshot and upload. false : Disables video screenshot and upload. - /// * [config] Screenshot and upload configuration. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(contentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. + /// * [config] Screenshot and upload configuration. See ContentInspectConfig. /// * [connection] The connection information. See RtcConnection. /// /// Returns @@ -726,7 +725,7 @@ abstract class RtcEngineEx implements RtcEngine { /// Gets the call ID with the connection ID. /// - /// When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get the callId parameter, and pass it in when calling methods such as rate and complain. + /// When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get callId, and pass it in when calling methods such as rate and complain. /// /// * [connection] The connection information. See RtcConnection. ///