diff --git a/stream-webrtc-android/api/stream-webrtc-android.api b/stream-webrtc-android/api/stream-webrtc-android.api index 340a37874..2dd67c4df 100644 --- a/stream-webrtc-android/api/stream-webrtc-android.api +++ b/stream-webrtc-android/api/stream-webrtc-android.api @@ -21,9 +21,16 @@ public class org/webrtc/AudioSource : org/webrtc/MediaSource { public class org/webrtc/AudioTrack : org/webrtc/MediaStreamTrack { public fun (J)V + public fun addSink (Lorg/webrtc/AudioTrackSink;)V + public fun dispose ()V + public fun removeSink (Lorg/webrtc/AudioTrackSink;)V public fun setVolume (D)V } +public abstract interface class org/webrtc/AudioTrackSink { + public abstract fun onData (Ljava/nio/ByteBuffer;IIIIJ)V +} + public class org/webrtc/BuiltinAudioDecoderFactoryFactory : org/webrtc/AudioDecoderFactoryFactory { public fun ()V public fun createNativeAudioDecoderFactory ()J @@ -450,6 +457,59 @@ public class org/webrtc/FileVideoCapturer : org/webrtc/VideoCapturer { public fun tick ()V } +public class org/webrtc/FrameCryptor { + public fun (J)V + public fun dispose ()V + public fun getKeyIndex ()I + public fun getNativeFrameCryptor ()J + public fun isEnabled ()Z + public fun setEnabled (Z)V + public fun setKeyIndex (I)V + public fun setObserver (Lorg/webrtc/FrameCryptor$Observer;)V +} + +public final class org/webrtc/FrameCryptor$FrameCryptionState : java/lang/Enum { + public static final field DECRYPTIONFAILED Lorg/webrtc/FrameCryptor$FrameCryptionState; + public static final field ENCRYPTIONFAILED Lorg/webrtc/FrameCryptor$FrameCryptionState; + public static final field INTERNALERROR Lorg/webrtc/FrameCryptor$FrameCryptionState; + public static final field KEYRATCHETED Lorg/webrtc/FrameCryptor$FrameCryptionState; + public static final field MISSINGKEY Lorg/webrtc/FrameCryptor$FrameCryptionState; + public static final field NEW Lorg/webrtc/FrameCryptor$FrameCryptionState; + public static final field OK Lorg/webrtc/FrameCryptor$FrameCryptionState; + public static fun valueOf (Ljava/lang/String;)Lorg/webrtc/FrameCryptor$FrameCryptionState; + public static fun values ()[Lorg/webrtc/FrameCryptor$FrameCryptionState; +} + +public abstract interface class org/webrtc/FrameCryptor$Observer { + public abstract fun onFrameCryptionStateChanged (Ljava/lang/String;Lorg/webrtc/FrameCryptor$FrameCryptionState;)V +} + +public final class org/webrtc/FrameCryptorAlgorithm : java/lang/Enum { + public static final field AES_GCM Lorg/webrtc/FrameCryptorAlgorithm; + public static fun valueOf (Ljava/lang/String;)Lorg/webrtc/FrameCryptorAlgorithm; + public static fun values ()[Lorg/webrtc/FrameCryptorAlgorithm; +} + +public class org/webrtc/FrameCryptorFactory { + public fun ()V + public static fun createFrameCryptorForRtpReceiver (Lorg/webrtc/PeerConnectionFactory;Lorg/webrtc/RtpReceiver;Ljava/lang/String;Lorg/webrtc/FrameCryptorAlgorithm;Lorg/webrtc/FrameCryptorKeyProvider;)Lorg/webrtc/FrameCryptor; + public static fun createFrameCryptorForRtpSender (Lorg/webrtc/PeerConnectionFactory;Lorg/webrtc/RtpSender;Ljava/lang/String;Lorg/webrtc/FrameCryptorAlgorithm;Lorg/webrtc/FrameCryptorKeyProvider;)Lorg/webrtc/FrameCryptor; + public static fun createFrameCryptorKeyProvider (Z[BI[BIIZ)Lorg/webrtc/FrameCryptorKeyProvider; +} + +public class org/webrtc/FrameCryptorKeyProvider { + public fun (J)V + public fun dispose ()V + public fun exportKey (Ljava/lang/String;I)[B + public fun exportSharedKey (I)[B + public fun getNativeKeyProvider ()J + public fun ratchetKey (Ljava/lang/String;I)[B + public fun ratchetSharedKey (I)[B + public fun setKey (Ljava/lang/String;I[B)Z + public fun setSharedKey (I[B)Z + public fun setSifTrailer ([B)V +} + public abstract interface class org/webrtc/FrameDecryptor { public abstract fun getNativeFrameDecryptor ()J } @@ -566,7 +626,7 @@ public class org/webrtc/LibaomAv1Decoder : org/webrtc/WrappedNativeVideoDecoder public class org/webrtc/LibaomAv1Encoder : org/webrtc/WrappedNativeVideoEncoder { public fun ()V - public fun createNativeVideoEncoder ()J + public fun createNative (J)J public fun isHardwareEncoder ()Z } @@ -577,7 +637,7 @@ public class org/webrtc/LibvpxVp8Decoder : org/webrtc/WrappedNativeVideoDecoder public class org/webrtc/LibvpxVp8Encoder : org/webrtc/WrappedNativeVideoEncoder { public fun ()V - public fun createNativeVideoEncoder ()J + public fun createNative (J)J public fun isHardwareEncoder ()Z } @@ -588,7 +648,7 @@ public class org/webrtc/LibvpxVp9Decoder : org/webrtc/WrappedNativeVideoDecoder public class org/webrtc/LibvpxVp9Encoder : org/webrtc/WrappedNativeVideoEncoder { public fun ()V - public fun createNativeVideoEncoder ()J + public fun createNative (J)J public fun isHardwareEncoder ()Z } @@ -702,6 +762,7 @@ public class org/webrtc/MediaStreamTrack { public fun dispose ()V public fun enabled ()Z public fun id ()Ljava/lang/String; + public fun isDisposed ()Z public fun kind ()Ljava/lang/String; public fun setEnabled (Z)Z public fun state ()Lorg/webrtc/MediaStreamTrack$State; @@ -1057,18 +1118,17 @@ public final class org/webrtc/PeerConnection$PortPrunePolicy : java/lang/Enum { public class org/webrtc/PeerConnection$RTCConfiguration { public field activeResetSrtpParams Z - public field allowCodecSwitching Ljava/lang/Boolean; public field audioJitterBufferFastAccelerate Z public field audioJitterBufferMaxPackets I public field bundlePolicy Lorg/webrtc/PeerConnection$BundlePolicy; public field candidateNetworkPolicy Lorg/webrtc/PeerConnection$CandidateNetworkPolicy; public field certificate Lorg/webrtc/RtcCertificatePem; - public field combinedAudioVideoBwe Ljava/lang/Boolean; public field continualGatheringPolicy Lorg/webrtc/PeerConnection$ContinualGatheringPolicy; public field cryptoOptions Lorg/webrtc/CryptoOptions; public field disableIPv6OnWifi Z public field enableCpuOveruseDetection Z public field enableDscp Z + public field enableIceGatheringOnAnyAddressPorts Z public field enableImplicitRollback Z public field iceBackupCandidatePairPingInterval I public field iceCandidatePoolSize I @@ -1514,7 +1574,7 @@ public final class org/webrtc/SimulcastAlignedVideoEncoderFactory : org/webrtc/V public class org/webrtc/SimulcastVideoEncoder : org/webrtc/WrappedNativeVideoEncoder { public fun (Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoCodecInfo;)V - public fun createNativeVideoEncoder ()J + public fun createNative (J)J public fun isHardwareEncoder ()Z } @@ -1703,9 +1763,9 @@ public class org/webrtc/VideoCodecInfo { public final field name Ljava/lang/String; public final field params Ljava/util/Map; public final field payload I - public field scalabilityModes [I + public final field scalabilityModes Ljava/util/List; public fun (ILjava/lang/String;Ljava/util/Map;)V - public fun (Ljava/lang/String;Ljava/util/Map;)V + public fun (Ljava/lang/String;Ljava/util/Map;Ljava/util/List;)V public fun equals (Ljava/lang/Object;)Z public fun hashCode ()I public fun toString ()Ljava/lang/String; @@ -1766,7 +1826,7 @@ public class org/webrtc/VideoDecoderFallback : org/webrtc/WrappedNativeVideoDeco } public abstract interface class org/webrtc/VideoEncoder { - public fun createNativeVideoEncoder ()J + public fun createNative (J)J public abstract fun encode (Lorg/webrtc/VideoFrame;Lorg/webrtc/VideoEncoder$EncodeInfo;)Lorg/webrtc/VideoCodecStatus; public fun getEncoderInfo ()Lorg/webrtc/VideoEncoder$EncoderInfo; public abstract fun getImplementationName ()Ljava/lang/String; @@ -1885,7 +1945,7 @@ public abstract interface class org/webrtc/VideoEncoderFactory$VideoEncoderSelec public class org/webrtc/VideoEncoderFallback : org/webrtc/WrappedNativeVideoEncoder { public fun (Lorg/webrtc/VideoEncoder;Lorg/webrtc/VideoEncoder;)V - public fun createNativeVideoEncoder ()J + public fun createNative (J)J public fun isHardwareEncoder ()Z } @@ -2028,7 +2088,7 @@ public abstract class org/webrtc/WrappedNativeVideoDecoder : org/webrtc/VideoDec public abstract class org/webrtc/WrappedNativeVideoEncoder : org/webrtc/VideoEncoder { public fun ()V - public abstract fun createNativeVideoEncoder ()J + public abstract fun createNative (J)J public final fun encode (Lorg/webrtc/VideoFrame;Lorg/webrtc/VideoEncoder$EncodeInfo;)Lorg/webrtc/VideoCodecStatus; public final fun getImplementationName ()Ljava/lang/String; public final fun getScalingSettings ()Lorg/webrtc/VideoEncoder$ScalingSettings; @@ -2070,6 +2130,8 @@ public abstract interface class org/webrtc/audio/AudioDeviceModule { public abstract fun getNativeAudioDeviceModulePointer ()J public abstract fun release ()V public abstract fun setMicrophoneMute (Z)V + public fun setNoiseSuppressorEnabled (Z)Z + public fun setPreferredMicrophoneFieldDimension (F)Z public abstract fun setSpeakerMute (Z)V } @@ -2084,6 +2146,7 @@ public class org/webrtc/audio/JavaAudioDeviceModule : org/webrtc/audio/AudioDevi public static fun isBuiltInNoiseSuppressorSupported ()Z public fun release ()V public fun setMicrophoneMute (Z)V + public fun setNoiseSuppressorEnabled (Z)Z public fun setPreferredInputDevice (Landroid/media/AudioDeviceInfo;)V public fun setSpeakerMute (Z)V } @@ -2136,7 +2199,6 @@ public class org/webrtc/audio/JavaAudioDeviceModule$Builder { public fun createAudioDeviceModule ()Lorg/webrtc/audio/JavaAudioDeviceModule; public fun setAudioAttributes (Landroid/media/AudioAttributes;)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; public fun setAudioFormat (I)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; - public fun setAudioRecordDataCallback (Lorg/webrtc/audio/AudioRecordDataCallback;)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; public fun setAudioRecordErrorCallback (Lorg/webrtc/audio/JavaAudioDeviceModule$AudioRecordErrorCallback;)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; public fun setAudioRecordStateCallback (Lorg/webrtc/audio/JavaAudioDeviceModule$AudioRecordStateCallback;)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; public fun setAudioSource (I)Lorg/webrtc/audio/JavaAudioDeviceModule$Builder; diff --git a/stream-webrtc-android/libs/arm64-v8a/libjingle_peerconnection_so.so b/stream-webrtc-android/libs/arm64-v8a/libjingle_peerconnection_so.so index abbfa238a..12d52f4b1 100755 Binary files a/stream-webrtc-android/libs/arm64-v8a/libjingle_peerconnection_so.so and b/stream-webrtc-android/libs/arm64-v8a/libjingle_peerconnection_so.so differ diff --git a/stream-webrtc-android/libs/armeabi-v7a/libjingle_peerconnection_so.so b/stream-webrtc-android/libs/armeabi-v7a/libjingle_peerconnection_so.so index b14cc059a..1d01f1930 100755 Binary files a/stream-webrtc-android/libs/armeabi-v7a/libjingle_peerconnection_so.so and b/stream-webrtc-android/libs/armeabi-v7a/libjingle_peerconnection_so.so differ diff --git a/stream-webrtc-android/libs/x86/libjingle_peerconnection_so.so b/stream-webrtc-android/libs/x86/libjingle_peerconnection_so.so index 238ae7cf6..cd113d823 100755 Binary files a/stream-webrtc-android/libs/x86/libjingle_peerconnection_so.so and b/stream-webrtc-android/libs/x86/libjingle_peerconnection_so.so differ diff --git a/stream-webrtc-android/libs/x86_64/libjingle_peerconnection_so.so b/stream-webrtc-android/libs/x86_64/libjingle_peerconnection_so.so index f98e29ad0..73d74f483 100755 Binary files a/stream-webrtc-android/libs/x86_64/libjingle_peerconnection_so.so and b/stream-webrtc-android/libs/x86_64/libjingle_peerconnection_so.so differ diff --git a/stream-webrtc-android/src/main/java/org/webrtc/AudioTrack.java b/stream-webrtc-android/src/main/java/org/webrtc/AudioTrack.java index ca745db63..856ed1c2f 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/AudioTrack.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/AudioTrack.java @@ -10,8 +10,12 @@ package org.webrtc; +import java.util.IdentityHashMap; + /** Java wrapper for a C++ AudioTrackInterface */ public class AudioTrack extends MediaStreamTrack { + private final IdentityHashMap sinks = new IdentityHashMap(); + public AudioTrack(long nativeTrack) { super(nativeTrack); } @@ -23,10 +27,54 @@ public void setVolume(double volume) { nativeSetVolume(getNativeAudioTrack(), volume); } + /** + * Adds an AudioTrackSink to the track. This callback is only + * called for remote audio tracks. + * + * Repeated addSink calls will not add the sink multiple times. + */ + public void addSink(AudioTrackSink sink) { + if (sink == null) { + throw new IllegalArgumentException("The AudioTrackSink is not allowed to be null"); + } + if (!sinks.containsKey(sink)) { + final long nativeSink = nativeWrapSink(sink); + sinks.put(sink, nativeSink); + nativeAddSink(getNativeMediaStreamTrack(), nativeSink); + } + } + + /** + * Removes an AudioTrackSink from the track. + * + * If the AudioTrackSink was not attached to the track, this is a no-op. + */ + public void removeSink(AudioTrackSink sink) { + final Long nativeSink = sinks.remove(sink); + if (nativeSink != null) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + } + + @Override + public void dispose() { + for (long nativeSink : sinks.values()) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + sinks.clear(); + super.dispose(); + } + /** Returns a pointer to webrtc::AudioTrackInterface. */ long getNativeAudioTrack() { return getNativeMediaStreamTrack(); } private static native void nativeSetVolume(long track, double volume); -} + private static native void nativeAddSink(long track, long nativeSink); + private static native void nativeRemoveSink(long track, long nativeSink); + private static native long nativeWrapSink(AudioTrackSink sink); + private static native void nativeFreeSink(long sink); +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/AudioTrackSink.java b/stream-webrtc-android/src/main/java/org/webrtc/AudioTrackSink.java new file mode 100644 index 000000000..8414a47fc --- /dev/null +++ b/stream-webrtc-android/src/main/java/org/webrtc/AudioTrackSink.java @@ -0,0 +1,27 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +/** + * Java version of rtc::AudioTrackSinkInterface. + */ +public interface AudioTrackSink { + /** + * Implementations should copy the audio data into a local copy if they wish + * to use the data after this function returns. + */ + @CalledByNative + void onData(ByteBuffer audioData, int bitsPerSample, int sampleRate, + int numberOfChannels, int numberOfFrames, + long absoluteCaptureTimestampMs); +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptor.java b/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptor.java new file mode 100644 index 000000000..f19206c4d --- /dev/null +++ b/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptor.java @@ -0,0 +1,108 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +public class FrameCryptor { + public enum FrameCryptionState { + NEW, + OK, + ENCRYPTIONFAILED, + DECRYPTIONFAILED, + MISSINGKEY, + KEYRATCHETED, + INTERNALERROR; + + @CalledByNative("FrameCryptionState") + static FrameCryptionState fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + public static interface Observer { + @CalledByNative("Observer") + void onFrameCryptionStateChanged(String participantId, FrameCryptionState newState); + } + + private long nativeFrameCryptor; + private long observerPtr; + + public long getNativeFrameCryptor() { + return nativeFrameCryptor; + } + + @CalledByNative + public FrameCryptor(long nativeFrameCryptor) { + this.nativeFrameCryptor = nativeFrameCryptor; + this.observerPtr = 0; + } + + public void setEnabled(boolean enabled) { + checkFrameCryptorExists(); + nativeSetEnabled(nativeFrameCryptor, enabled); + } + + public boolean isEnabled() { + checkFrameCryptorExists(); + return nativeIsEnabled(nativeFrameCryptor); + } + + public int getKeyIndex() { + checkFrameCryptorExists(); + return nativeGetKeyIndex(nativeFrameCryptor); + } + + public void setKeyIndex(int index) { + checkFrameCryptorExists(); + nativeSetKeyIndex(nativeFrameCryptor, index); + } + + public void dispose() { + checkFrameCryptorExists(); + nativeUnSetObserver(nativeFrameCryptor); + JniCommon.nativeReleaseRef(nativeFrameCryptor); + nativeFrameCryptor = 0; + if (observerPtr != 0) { + JniCommon.nativeReleaseRef(observerPtr); + observerPtr = 0; + } + } + + public void setObserver(@Nullable Observer observer) { + checkFrameCryptorExists(); + long newPtr = nativeSetObserver(nativeFrameCryptor, observer); + if (observerPtr != 0) { + JniCommon.nativeReleaseRef(observerPtr); + observerPtr = 0; + } + newPtr = observerPtr; + } + + private void checkFrameCryptorExists() { + if (nativeFrameCryptor == 0) { + throw new IllegalStateException("FrameCryptor has been disposed."); + } + } + + private static native void nativeSetEnabled(long frameCryptorPointer, boolean enabled); + private static native boolean nativeIsEnabled(long frameCryptorPointer); + private static native void nativeSetKeyIndex(long frameCryptorPointer, int index); + private static native int nativeGetKeyIndex(long frameCryptorPointer); + private static native long nativeSetObserver(long frameCryptorPointer, Observer observer); + private static native void nativeUnSetObserver(long frameCryptorPointer); +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptorAlgorithm.java b/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptorAlgorithm.java new file mode 100644 index 000000000..121656cc9 --- /dev/null +++ b/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptorAlgorithm.java @@ -0,0 +1,21 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +public enum FrameCryptorAlgorithm { + AES_GCM, +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptorFactory.java b/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptorFactory.java new file mode 100644 index 000000000..337eea8c5 --- /dev/null +++ b/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptorFactory.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +public class FrameCryptorFactory { + public static FrameCryptorKeyProvider createFrameCryptorKeyProvider( + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance, int keyRingSize, boolean discardFrameWhenCryptorNotReady) { + return nativeCreateFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes, failureTolerance, keyRingSize, discardFrameWhenCryptorNotReady); + } + + public static FrameCryptor createFrameCryptorForRtpSender(PeerConnectionFactory factory, RtpSender rtpSender, + String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { + return nativeCreateFrameCryptorForRtpSender(factory.getNativeOwnedFactoryAndThreads(),rtpSender.getNativeRtpSender(), participantId, + algorithm.ordinal(), keyProvider.getNativeKeyProvider()); + } + + public static FrameCryptor createFrameCryptorForRtpReceiver(PeerConnectionFactory factory, RtpReceiver rtpReceiver, + String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { + return nativeCreateFrameCryptorForRtpReceiver(factory.getNativeOwnedFactoryAndThreads(), rtpReceiver.getNativeRtpReceiver(), participantId, + algorithm.ordinal(), keyProvider.getNativeKeyProvider()); + } + + private static native FrameCryptor nativeCreateFrameCryptorForRtpSender(long factory, + long rtpSender, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); + private static native FrameCryptor nativeCreateFrameCryptorForRtpReceiver(long factory, + long rtpReceiver, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); + + private static native FrameCryptorKeyProvider nativeCreateFrameCryptorKeyProvider( + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance, int keyRingSize, boolean discardFrameWhenCryptorNotReady); +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptorKeyProvider.java b/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptorKeyProvider.java new file mode 100644 index 000000000..956632611 --- /dev/null +++ b/stream-webrtc-android/src/main/java/org/webrtc/FrameCryptorKeyProvider.java @@ -0,0 +1,93 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import java.util.ArrayList; + +public class FrameCryptorKeyProvider { + private long nativeKeyProvider; + + @CalledByNative + public FrameCryptorKeyProvider(long nativeKeyProvider) { + this.nativeKeyProvider = nativeKeyProvider; + } + + public long getNativeKeyProvider() { + return nativeKeyProvider; + } + + public boolean setSharedKey(int index, byte[] key) { + checkKeyProviderExists(); + return nativeSetSharedKey(nativeKeyProvider,index, key); + } + + public byte[] ratchetSharedKey(int index) { + checkKeyProviderExists(); + return nativeRatchetSharedKey(nativeKeyProvider, index); + } + + public byte[] exportSharedKey(int index) { + checkKeyProviderExists(); + return nativeExportSharedKey(nativeKeyProvider, index); + } + + public boolean setKey(String participantId, int index, byte[] key) { + checkKeyProviderExists(); + return nativeSetKey(nativeKeyProvider, participantId, index, key); + } + + public byte[] ratchetKey(String participantId, int index) { + checkKeyProviderExists(); + return nativeRatchetKey(nativeKeyProvider, participantId, index); + } + + public byte[] exportKey(String participantId, int index) { + checkKeyProviderExists(); + return nativeExportKey(nativeKeyProvider, participantId, index); + } + + public void setSifTrailer(byte[] sifTrailer) { + checkKeyProviderExists(); + nativeSetSifTrailer(nativeKeyProvider, sifTrailer); + } + + public void dispose() { + checkKeyProviderExists(); + JniCommon.nativeReleaseRef(nativeKeyProvider); + nativeKeyProvider = 0; + } + + private void checkKeyProviderExists() { + if (nativeKeyProvider == 0) { + throw new IllegalStateException("FrameCryptorKeyProvider has been disposed."); + } + } + private static native boolean nativeSetSharedKey( + long keyProviderPointer, int index, byte[] key); + private static native byte[] nativeRatchetSharedKey( + long keyProviderPointer, int index); + private static native byte[] nativeExportSharedKey( + long keyProviderPointer, int index); + private static native boolean nativeSetKey( + long keyProviderPointer, String participantId, int index, byte[] key); + private static native byte[] nativeRatchetKey( + long keyProviderPointer, String participantId, int index); + private static native byte[] nativeExportKey( + long keyProviderPointer, String participantId, int index); + private static native void nativeSetSifTrailer( + long keyProviderPointer, byte[] sifTrailer); +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/H264Utils.java b/stream-webrtc-android/src/main/java/org/webrtc/H264Utils.java index abb79c658..33fb43a8c 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/H264Utils.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/H264Utils.java @@ -12,6 +12,7 @@ import java.util.Map; import java.util.HashMap; +import java.util.ArrayList; /** Container for static helper functions related to dealing with H264 codecs. */ class H264Utils { @@ -23,30 +24,30 @@ class H264Utils { public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c"; public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex. public static final String H264_CONSTRAINED_HIGH_3_1 = - H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1; + H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1; public static final String H264_CONSTRAINED_BASELINE_3_1 = - H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1; + H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1; public static Map getDefaultH264Params(boolean isHighProfile) { final Map params = new HashMap<>(); params.put(VideoCodecInfo.H264_FMTP_LEVEL_ASYMMETRY_ALLOWED, "1"); params.put(VideoCodecInfo.H264_FMTP_PACKETIZATION_MODE, "1"); params.put(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID, - isHighProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1 - : VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1); + isHighProfile ? VideoCodecInfo.H264_CONSTRAINED_HIGH_3_1 + : VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1); return params; } public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC = - new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false)); + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false), new ArrayList<>()); public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC = - new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true)); + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true), new ArrayList<>()); public static boolean isSameH264Profile( - Map params1, Map params2) { + Map params1, Map params2) { return nativeIsSameH264Profile(params1, params2); } private static native boolean nativeIsSameH264Profile( - Map params1, Map params2); -} + Map params1, Map params2); +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoder.java b/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoder.java index 6d0edcf97..db3e2b688 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoder.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoder.java @@ -185,9 +185,9 @@ public void waitForZero() { * @throws IllegalArgumentException if colorFormat is unsupported */ public HardwareVideoEncoder(MediaCodecWrapperFactory mediaCodecWrapperFactory, String codecName, - VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat, - Map params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs, - BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) { + VideoCodecMimeType codecType, Integer surfaceColorFormat, Integer yuvColorFormat, + Map params, int keyFrameIntervalSec, int forceKeyFrameIntervalMs, + BitrateAdjuster bitrateAdjuster, EglBase14.Context sharedContext) { this.mediaCodecWrapperFactory = mediaCodecWrapperFactory; this.codecName = codecName; this.codecType = codecType; @@ -211,7 +211,7 @@ public VideoCodecStatus initEncode(Settings settings, Callback callback) { automaticResizeOn = settings.automaticResizeOn; if (settings.width % REQUIRED_RESOLUTION_ALIGNMENT != 0 - || settings.height % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + || settings.height % REQUIRED_RESOLUTION_ALIGNMENT != 0) { Logging.e(TAG, "MediaCodec requires 2x2 alignment."); return VideoCodecStatus.ERR_SIZE; } @@ -225,9 +225,9 @@ public VideoCodecStatus initEncode(Settings settings, Callback callback) { adjustedBitrate = bitrateAdjuster.getAdjustedBitrateBps(); Logging.d(TAG, - "initEncode name: " + codecName + " type: " + codecType + " width: " + width - + " height: " + height + " framerate_fps: " + settings.maxFramerate - + " bitrate_kbps: " + settings.startBitrate + " surface mode: " + useSurfaceMode); + "initEncode name: " + codecName + " type: " + codecType + " width: " + width + + " height: " + height + " framerate_fps: " + settings.maxFramerate + + " bitrate_kbps: " + settings.startBitrate + " surface mode: " + useSurfaceMode); return initEncodeInternal(); } @@ -253,7 +253,7 @@ private VideoCodecStatus initEncodeInternal() { format.setInteger(MediaFormat.KEY_BITRATE_MODE, BITRATE_MODE_CBR); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); format.setFloat( - MediaFormat.KEY_FRAME_RATE, (float) bitrateAdjuster.getAdjustedFramerateFps()); + MediaFormat.KEY_FRAME_RATE, (float) bitrateAdjuster.getAdjustedFramerateFps()); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec); if (codecType == VideoCodecMimeType.H264) { String profileLevelId = params.get(VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID); @@ -279,13 +279,13 @@ private VideoCodecStatus initEncodeInternal() { if (isEncodingStatisticsSupported()) { format.setInteger(MediaFormat.KEY_VIDEO_ENCODING_STATISTICS_LEVEL, - MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_1); + MediaFormat.VIDEO_ENCODING_STATISTICS_LEVEL_1); isEncodingStatisticsEnabled = true; } Logging.d(TAG, "Format: " + format); codec.configure( - format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE); + format, null /* surface */, null /* crypto */, MediaCodec.CONFIGURE_FLAG_ENCODE); if (useSurfaceMode) { textureEglBase = EglBase.createEgl14(sharedContext, EglBase.CONFIG_RECORDABLE); @@ -297,7 +297,7 @@ private VideoCodecStatus initEncodeInternal() { updateInputFormat(codec.getInputFormat()); codec.start(); - } catch (IllegalStateException e) { + } catch (IllegalArgumentException | IllegalStateException e) { Logging.e(TAG, "initEncodeInternal failed", e); release(); return VideoCodecStatus.FALLBACK_SOFTWARE; @@ -392,16 +392,16 @@ public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) { } EncodedImage.Builder builder = EncodedImage.builder() - .setCaptureTimeNs(videoFrame.getTimestampNs()) - .setEncodedWidth(videoFrame.getBuffer().getWidth()) - .setEncodedHeight(videoFrame.getBuffer().getHeight()) - .setRotation(videoFrame.getRotation()); + .setCaptureTimeNs(videoFrame.getTimestampNs()) + .setEncodedWidth(videoFrame.getBuffer().getWidth()) + .setEncodedHeight(videoFrame.getBuffer().getHeight()) + .setRotation(videoFrame.getRotation()); outputBuilders.offer(builder); long presentationTimestampUs = nextPresentationTimestampUs; // Round frame duration down to avoid bitrate overshoot. long frameDurationUs = - (long) (TimeUnit.SECONDS.toMicros(1) / bitrateAdjuster.getAdjustedFramerateFps()); + (long) (TimeUnit.SECONDS.toMicros(1) / bitrateAdjuster.getAdjustedFramerateFps()); nextPresentationTimestampUs += frameDurationUs; final VideoCodecStatus returnValue; @@ -421,7 +421,7 @@ public VideoCodecStatus encode(VideoFrame videoFrame, EncodeInfo encodeInfo) { } private VideoCodecStatus encodeTextureBuffer( - VideoFrame videoFrame, long presentationTimestampUs) { + VideoFrame videoFrame, long presentationTimestampUs) { encodeThreadChecker.checkIsOnValidThread(); try { // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway, @@ -429,7 +429,7 @@ private VideoCodecStatus encodeTextureBuffer( GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // It is not necessary to release this frame because it doesn't own the buffer. VideoFrame derotatedFrame = - new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs()); + new VideoFrame(videoFrame.getBuffer(), 0 /* rotation */, videoFrame.getTimestampNs()); videoFrameDrawer.drawFrame(derotatedFrame, textureDrawer, null /* additionalRenderMatrix */); textureEglBase.swapBuffers(TimeUnit.MICROSECONDS.toNanos(presentationTimestampUs)); } catch (RuntimeException e) { @@ -466,8 +466,8 @@ private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentati if (buffer.capacity() < frameSizeBytes) { Logging.e(TAG, - "Input buffer size: " + buffer.capacity() - + " is smaller than frame size: " + frameSizeBytes); + "Input buffer size: " + buffer.capacity() + + " is smaller than frame size: " + frameSizeBytes); return VideoCodecStatus.ERROR; } @@ -475,7 +475,7 @@ private VideoCodecStatus encodeByteBuffer(VideoFrame videoFrame, long presentati try { codec.queueInputBuffer( - index, 0 /* offset */, frameSizeBytes, presentationTimestampUs, 0 /* flags */); + index, 0 /* offset */, frameSizeBytes, presentationTimestampUs, 0 /* flags */); } catch (IllegalStateException e) { Logging.e(TAG, "queueInputBuffer failed", e); // IllegalStateException thrown when the codec is in the wrong state. @@ -528,8 +528,8 @@ public EncoderInfo getEncoderInfo() { // value to be 16. Additionally, this encoder produces a single stream. So it should not require // alignment for all layers. return new EncoderInfo( - /* requestedResolutionAlignment= */ REQUIRED_RESOLUTION_ALIGNMENT, - /* applyAlignmentToAllSimulcastLayers= */ false); + /* requestedResolutionAlignment= */ REQUIRED_RESOLUTION_ALIGNMENT, + /* applyAlignmentToAllSimulcastLayers= */ false); } private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseSurfaceMode) { @@ -540,7 +540,7 @@ private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseS } if (newWidth % REQUIRED_RESOLUTION_ALIGNMENT != 0 - || newHeight % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + || newHeight % REQUIRED_RESOLUTION_ALIGNMENT != 0) { Logging.e(TAG, "MediaCodec requires 2x2 alignment."); return VideoCodecStatus.ERR_SIZE; } @@ -604,7 +604,7 @@ protected void deliverEncodedImage() { if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size); if (info.size > 0 - && (codecType == VideoCodecMimeType.H264 || codecType == VideoCodecMimeType.H265)) { + && (codecType == VideoCodecMimeType.H264 || codecType == VideoCodecMimeType.H265)) { // In case of H264 and H265 config buffer contains SPS and PPS headers. Presence of these // headers makes IDR frame a truly keyframe. Some encoders issue IDR frames without SPS // and PPS. We save config buffer here to prepend it to all IDR frames encoder delivers. @@ -637,8 +637,8 @@ protected void deliverEncodedImage() { final Runnable releaseCallback; if (isKeyFrame && configBuffer != null) { Logging.d(TAG, - "Prepending config buffer of size " + configBuffer.capacity() - + " to output buffer with offset " + info.offset + ", size " + info.size); + "Prepending config buffer of size " + configBuffer.capacity() + + " to output buffer with offset " + info.offset + ", size " + info.size); frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity()); configBuffer.rewind(); frameBuffer.put(configBuffer); @@ -663,7 +663,7 @@ protected void deliverEncodedImage() { } final EncodedImage.FrameType frameType = isKeyFrame ? EncodedImage.FrameType.VideoFrameKey - : EncodedImage.FrameType.VideoFrameDelta; + : EncodedImage.FrameType.VideoFrameDelta; EncodedImage.Builder builder = outputBuilders.poll(); builder.setBuffer(frameBuffer, releaseCallback); @@ -746,9 +746,9 @@ private void updateInputFormat(MediaFormat format) { } Logging.d(TAG, - "updateInputFormat format: " + format + " stride: " + stride - + " sliceHeight: " + sliceHeight + " isSemiPlanar: " + isSemiPlanar - + " frameSizeBytes: " + frameSizeBytes); + "updateInputFormat format: " + format + " stride: " + stride + + " sliceHeight: " + sliceHeight + " isSemiPlanar: " + isSemiPlanar + + " frameSizeBytes: " + frameSizeBytes); } protected boolean isEncodingStatisticsSupported() { @@ -779,12 +779,12 @@ protected void fillInputBuffer(ByteBuffer buffer, VideoFrame.Buffer frame) { VideoFrame.I420Buffer i420 = frame.toI420(); if (isSemiPlanar) { YuvHelper.I420ToNV12(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), - i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride, - sliceHeight); + i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride, + sliceHeight); } else { YuvHelper.I420Copy(i420.getDataY(), i420.getStrideY(), i420.getDataU(), i420.getStrideU(), - i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride, - sliceHeight); + i420.getDataV(), i420.getStrideV(), buffer, i420.getWidth(), i420.getHeight(), stride, + sliceHeight); } i420.release(); } @@ -801,4 +801,4 @@ protected boolean isSemiPlanar(int colorFormat) { throw new IllegalArgumentException("Unsupported colorFormat: " + colorFormat); } } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java b/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java index d43fc27fa..c46dd9c9c 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java @@ -41,7 +41,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { // HW H.264 encoder on below devices has poor bitrate control - actual // bitrates deviates a lot from the target value. private static final List H264_HW_EXCEPTION_MODELS = - Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4"); + Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4"); @Nullable private final EglBase14.Context sharedContext; private final boolean enableIntelVp8Encoder; @@ -57,9 +57,9 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { * @param enableH264HighProfile true if H264 High Profile enabled. */ public HardwareVideoEncoderFactory( - EglBase.Context sharedContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) { + EglBase.Context sharedContext, boolean enableIntelVp8Encoder, boolean enableH264HighProfile) { this(sharedContext, enableIntelVp8Encoder, enableH264HighProfile, - /* codecAllowedPredicate= */ null); + /* codecAllowedPredicate= */ null); } /** @@ -73,7 +73,7 @@ public HardwareVideoEncoderFactory( * when predicate is not provided. */ public HardwareVideoEncoderFactory(EglBase.Context sharedContext, boolean enableIntelVp8Encoder, - boolean enableH264HighProfile, @Nullable Predicate codecAllowedPredicate) { + boolean enableH264HighProfile, @Nullable Predicate codecAllowedPredicate) { // Texture mode requires EglBase14. if (sharedContext instanceof EglBase14.Context) { this.sharedContext = (EglBase14.Context) sharedContext; @@ -104,15 +104,15 @@ public VideoEncoder createEncoder(VideoCodecInfo input) { String codecName = info.getName(); String mime = type.mimeType(); Integer surfaceColorFormat = MediaCodecUtils.selectColorFormat( - MediaCodecUtils.TEXTURE_COLOR_FORMATS, info.getCapabilitiesForType(mime)); + MediaCodecUtils.TEXTURE_COLOR_FORMATS, info.getCapabilitiesForType(mime)); Integer yuvColorFormat = MediaCodecUtils.selectColorFormat( - MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(mime)); + MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(mime)); if (type == VideoCodecMimeType.H264) { boolean isHighProfile = H264Utils.isSameH264Profile( - input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)); + input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true)); boolean isBaselineProfile = H264Utils.isSameH264Profile( - input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false)); + input.params, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false)); if (!isHighProfile && !isBaselineProfile) { return null; @@ -123,9 +123,9 @@ public VideoEncoder createEncoder(VideoCodecInfo input) { } return new HardwareVideoEncoder(new MediaCodecWrapperFactoryImpl(), codecName, type, - surfaceColorFormat, yuvColorFormat, input.params, PERIODIC_KEY_FRAME_INTERVAL_S, - getForcedKeyFrameIntervalMs(type, codecName), createBitrateAdjuster(type, codecName), - sharedContext); + surfaceColorFormat, yuvColorFormat, input.params, PERIODIC_KEY_FRAME_INTERVAL_S, + getForcedKeyFrameIntervalMs(type, codecName), createBitrateAdjuster(type, codecName), + sharedContext); } @Override @@ -134,8 +134,8 @@ public VideoCodecInfo[] getSupportedCodecs() { // Generate a list of supported codecs in order of preference: // VP8, VP9, H264 (high profile), H264 (baseline profile), AV1 and H265. for (VideoCodecMimeType type : - new VideoCodecMimeType[] {VideoCodecMimeType.VP8, VideoCodecMimeType.VP9, - VideoCodecMimeType.H264, VideoCodecMimeType.AV1, VideoCodecMimeType.H265}) { + new VideoCodecMimeType[] {VideoCodecMimeType.VP8, VideoCodecMimeType.VP9, + VideoCodecMimeType.H264, VideoCodecMimeType.AV1, VideoCodecMimeType.H265}) { MediaCodecInfo codec = findCodecForType(type); if (codec != null) { String name = type.name(); @@ -143,11 +143,11 @@ public VideoCodecInfo[] getSupportedCodecs() { // supported by the decoder. if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true), new ArrayList<>())); } supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false), new ArrayList<>())); } } @@ -181,8 +181,8 @@ private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) { } // Check for a supported color format. if (MediaCodecUtils.selectColorFormat( - MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType())) - == null) { + MediaCodecUtils.ENCODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType())) + == null) { return false; } return isHardwareSupportedInCurrentSdk(info, type) && isMediaCodecAllowed(info); @@ -213,17 +213,17 @@ private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) { String name = info.getName(); // QCOM Vp8 encoder is always supported. return name.startsWith(QCOM_PREFIX) - // Exynos VP8 encoder is supported in M or later. - || (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) - // Intel Vp8 encoder is always supported, with the intel encoder enabled. - || (name.startsWith(INTEL_PREFIX) && enableIntelVp8Encoder); + // Exynos VP8 encoder is supported in M or later. + || (name.startsWith(EXYNOS_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) + // Intel Vp8 encoder is always supported, with the intel encoder enabled. + || (name.startsWith(INTEL_PREFIX) && enableIntelVp8Encoder); } private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) { String name = info.getName(); return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX)) - // Both QCOM and Exynos VP9 encoders are supported in N or later. - && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N; + // Both QCOM and Exynos VP9 encoders are supported in N or later. + && Build.VERSION.SDK_INT >= Build.VERSION_CODES.N; } private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) { @@ -273,6 +273,6 @@ private BitrateAdjuster createBitrateAdjuster(VideoCodecMimeType type, String co private boolean isH264HighProfileSupported(MediaCodecInfo info) { return enableH264HighProfile && Build.VERSION.SDK_INT > Build.VERSION_CODES.M - && info.getName().startsWith(EXYNOS_PREFIX); + && info.getName().startsWith(EXYNOS_PREFIX); } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoderWrapper.java b/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoderWrapper.java index 6ec884a1e..04c3f965e 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoderWrapper.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/HardwareVideoEncoderWrapper.java @@ -191,8 +191,8 @@ public String getImplementationName() { } @Override - public long createNativeVideoEncoder() { - return internalEncoder.createNativeVideoEncoder(); + public long createNative(long webrtcEnvRef) { + return internalEncoder.createNative(webrtcEnvRef); } @Override @@ -214,4 +214,4 @@ public VideoEncoder.ResolutionBitrateLimits[] getResolutionBitrateLimits() { public VideoEncoder.EncoderInfo getEncoderInfo() { return internalEncoder.getEncoderInfo(); } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/LibaomAv1Encoder.java b/stream-webrtc-android/src/main/java/org/webrtc/LibaomAv1Encoder.java index 569a719f4..285a4da18 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/LibaomAv1Encoder.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/LibaomAv1Encoder.java @@ -9,17 +9,24 @@ */ package org.webrtc; +import java.util.List; public class LibaomAv1Encoder extends WrappedNativeVideoEncoder { @Override - public long createNativeVideoEncoder() { - return nativeCreateEncoder(); + public long createNative(long webrtcEnvRef) { + return nativeCreate(webrtcEnvRef); } - static native long nativeCreateEncoder(); + static native long nativeCreate(long webrtcEnvRef); @Override public boolean isHardwareEncoder() { return false; } -} + + static List scalabilityModes() { + return nativeGetSupportedScalabilityModes(); + } + + static native List nativeGetSupportedScalabilityModes(); +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/LibvpxVp8Encoder.java b/stream-webrtc-android/src/main/java/org/webrtc/LibvpxVp8Encoder.java index 4be9e52c1..2a671ea44 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/LibvpxVp8Encoder.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/LibvpxVp8Encoder.java @@ -12,14 +12,14 @@ public class LibvpxVp8Encoder extends WrappedNativeVideoEncoder { @Override - public long createNativeVideoEncoder() { - return nativeCreateEncoder(); + public long createNative(long webrtcEnvRef) { + return nativeCreate(webrtcEnvRef); } - static native long nativeCreateEncoder(); + static native long nativeCreate(long webrtcEnvRef); @Override public boolean isHardwareEncoder() { return false; } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/LibvpxVp9Encoder.java b/stream-webrtc-android/src/main/java/org/webrtc/LibvpxVp9Encoder.java index 1211ae93f..b0b3932a8 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/LibvpxVp9Encoder.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/LibvpxVp9Encoder.java @@ -9,14 +9,15 @@ */ package org.webrtc; +import java.util.List; public class LibvpxVp9Encoder extends WrappedNativeVideoEncoder { @Override - public long createNativeVideoEncoder() { - return nativeCreateEncoder(); + public long createNative(long webrtcEnvRef) { + return nativeCreate(webrtcEnvRef); } - static native long nativeCreateEncoder(); + static native long nativeCreate(long webrtcEnvRef); @Override public boolean isHardwareEncoder() { @@ -24,4 +25,10 @@ public boolean isHardwareEncoder() { } static native boolean nativeIsSupported(); -} + + static List scalabilityModes() { + return nativeGetSupportedScalabilityModes(); + } + + static native List nativeGetSupportedScalabilityModes(); +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/MediaCodecUtils.java b/stream-webrtc-android/src/main/java/org/webrtc/MediaCodecUtils.java index 5417fec4d..b3b204034 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/MediaCodecUtils.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/MediaCodecUtils.java @@ -27,10 +27,11 @@ class MediaCodecUtils { // Prefixes for supported hardware encoder/decoder component names. static final String EXYNOS_PREFIX = "OMX.Exynos."; static final String INTEL_PREFIX = "OMX.Intel."; + static final String MARVELL_PREFIX = "OMX.Marvell."; static final String NVIDIA_PREFIX = "OMX.Nvidia."; static final String QCOM_PREFIX = "OMX.qcom."; static final String[] SOFTWARE_IMPLEMENTATION_PREFIXES = { - "OMX.google.", "OMX.SEC.", "c2.android"}; + "OMX.google.", "OMX.SEC.", "c2.android"}; // NV12 color format supported by QCOM codec, but not declared in MediaCodec - // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h @@ -41,26 +42,27 @@ class MediaCodecUtils { // Color formats supported by hardware decoder - in order of preference. static final int[] DECODER_COLOR_FORMATS = new int[] {CodecCapabilities.COLOR_FormatYUV420Planar, - CodecCapabilities.COLOR_FormatYUV420SemiPlanar, - CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, - MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, - MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka, - MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka, - MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m}; + CodecCapabilities.COLOR_FormatYUV420SemiPlanar, + CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, + MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar32m4ka, + MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar16m4ka, + MediaCodecUtils.COLOR_QCOM_FORMATYVU420PackedSemiPlanar64x32Tile2m8ka, + MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m, + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible}; // Color formats supported by hardware encoder - in order of preference. static final int[] ENCODER_COLOR_FORMATS = { - MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar, - MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar, - MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, - MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m}; + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar, + MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar, + MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, + MediaCodecUtils.COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m}; // Color formats supported by texture mode encoding - in order of preference. static final int[] TEXTURE_COLOR_FORMATS = - new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface}; + new int[] {MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface}; static @Nullable Integer selectColorFormat( - int[] supportedColorFormats, CodecCapabilities capabilities) { + int[] supportedColorFormats, CodecCapabilities capabilities) { for (int supportedColorFormat : supportedColorFormats) { for (int codecColorFormat : capabilities.colorFormats) { if (codecColorFormat == supportedColorFormat) { @@ -127,4 +129,4 @@ private static boolean isSoftwareOnlyQOrHigher(android.media.MediaCodecInfo code private MediaCodecUtils() { // This class should not be instantiated. } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/stream-webrtc-android/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java index 9a73bc49f..54e354593 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/MediaCodecVideoDecoderFactory.java @@ -11,6 +11,7 @@ package org.webrtc; import static org.webrtc.MediaCodecUtils.EXYNOS_PREFIX; +import static org.webrtc.MediaCodecUtils.MARVELL_PREFIX; import static org.webrtc.MediaCodecUtils.QCOM_PREFIX; import android.media.MediaCodecInfo; @@ -38,7 +39,7 @@ class MediaCodecVideoDecoderFactory implements VideoDecoderFactory { * allowed when predicate is not provided. */ public MediaCodecVideoDecoderFactory(@Nullable EglBase.Context sharedContext, - @Nullable Predicate codecAllowedPredicate) { + @Nullable Predicate codecAllowedPredicate) { this.sharedContext = sharedContext; this.codecAllowedPredicate = codecAllowedPredicate; } @@ -55,8 +56,8 @@ public VideoDecoder createDecoder(VideoCodecInfo codecType) { CodecCapabilities capabilities = info.getCapabilitiesForType(type.mimeType()); return new AndroidVideoDecoder(new MediaCodecWrapperFactoryImpl(), info.getName(), type, - MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities), - sharedContext); + MediaCodecUtils.selectColorFormat(MediaCodecUtils.DECODER_COLOR_FORMATS, capabilities), + sharedContext); } @Override @@ -65,18 +66,18 @@ public VideoCodecInfo[] getSupportedCodecs() { // Generate a list of supported codecs in order of preference: // VP8, VP9, H264 (high profile), H264 (baseline profile), AV1 and H265. for (VideoCodecMimeType type : - new VideoCodecMimeType[] {VideoCodecMimeType.VP8, VideoCodecMimeType.VP9, - VideoCodecMimeType.H264, VideoCodecMimeType.AV1, VideoCodecMimeType.H265}) { + new VideoCodecMimeType[] {VideoCodecMimeType.VP8, VideoCodecMimeType.VP9, + VideoCodecMimeType.H264, VideoCodecMimeType.AV1, VideoCodecMimeType.H265}) { MediaCodecInfo codec = findCodecForType(type); if (codec != null) { String name = type.name(); if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true), new ArrayList<>())); } supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false), new ArrayList<>())); } } @@ -111,8 +112,8 @@ private boolean isSupportedCodec(MediaCodecInfo info, VideoCodecMimeType type) { } // Check for a supported color format. if (MediaCodecUtils.selectColorFormat( - MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType())) - == null) { + MediaCodecUtils.DECODER_COLOR_FORMATS, info.getCapabilitiesForType(type.mimeType())) + == null) { return false; } return isCodecAllowed(info); @@ -127,8 +128,8 @@ private boolean isCodecAllowed(MediaCodecInfo info) { private boolean isH264HighProfileSupported(MediaCodecInfo info) { String name = info.getName(); - // Support H.264 HP decoding on QCOM chips. - if (name.startsWith(QCOM_PREFIX)) { + // Support H.264 HP decoding on QCOM and Marvell chips + if (name.startsWith(QCOM_PREFIX) || name.startsWith(MARVELL_PREFIX)) { return true; } // Support H.264 HP decoding on Exynos chips for Android M and above. @@ -137,4 +138,4 @@ private boolean isH264HighProfileSupported(MediaCodecInfo info) { } return false; } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/MediaStreamTrack.java b/stream-webrtc-android/src/main/java/org/webrtc/MediaStreamTrack.java index 2e4c3e18f..aaf300644 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/MediaStreamTrack.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/MediaStreamTrack.java @@ -121,9 +121,13 @@ private void checkMediaStreamTrackExists() { } } + public boolean isDisposed() { + return nativeTrack == 0; + } + private static native String nativeGetId(long track); private static native String nativeGetKind(long track); private static native boolean nativeGetEnabled(long track); private static native boolean nativeSetEnabled(long track, boolean enabled); private static native State nativeGetState(long track); -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/NetworkMonitor.java b/stream-webrtc-android/src/main/java/org/webrtc/NetworkMonitor.java index 0bc461df1..e27e5c432 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/NetworkMonitor.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/NetworkMonitor.java @@ -43,13 +43,13 @@ private static class InstanceHolder { // Factory for creating NetworkChangeDetector. private NetworkChangeDetectorFactory networkChangeDetectorFactory = - new NetworkChangeDetectorFactory() { - @Override - public NetworkChangeDetector create( - NetworkChangeDetector.Observer observer, Context context) { - return new NetworkMonitorAutoDetect(observer, context); - } - }; + new NetworkChangeDetectorFactory() { + @Override + public NetworkChangeDetector create( + NetworkChangeDetector.Observer observer, Context context) { + return new NetworkMonitorAutoDetect(observer, context); + } + }; // Native observers of the connection type changes. private final ArrayList nativeNetworkObservers; @@ -130,14 +130,14 @@ public void startMonitoring() { */ @CalledByNative private void startMonitoring( - @Nullable Context applicationContext, long nativeObserver, String fieldTrialsString) { + @Nullable Context applicationContext, long nativeObserver, String fieldTrialsString) { Logging.d(TAG, - "Start monitoring with native observer " + nativeObserver - + " fieldTrialsString: " + fieldTrialsString); + "Start monitoring with native observer " + nativeObserver + + " fieldTrialsString: " + fieldTrialsString); startMonitoring( - applicationContext != null ? applicationContext : ContextUtils.getApplicationContext(), - fieldTrialsString); + applicationContext != null ? applicationContext : ContextUtils.getApplicationContext(), + fieldTrialsString); synchronized (nativeNetworkObservers) { nativeNetworkObservers.add(nativeObserver); @@ -189,7 +189,7 @@ private NetworkChangeDetector.ConnectionType getCurrentConnectionType() { } private NetworkChangeDetector createNetworkChangeDetector( - Context appContext, String fieldTrialsString) { + Context appContext, String fieldTrialsString) { return networkChangeDetectorFactory.create(new NetworkChangeDetector.Observer() { @Override public void onConnectionTypeChanged(NetworkChangeDetector.ConnectionType newConnectionType) { @@ -208,7 +208,7 @@ public void onNetworkDisconnect(long networkHandle) { @Override public void onNetworkPreference( - List types, int preference) { + List types, int preference) { notifyObserversOfNetworkPreference(types, preference); } @@ -226,11 +226,14 @@ private void updateCurrentConnectionType(NetworkChangeDetector.ConnectionType ne /** Alerts all observers of a connection change. */ private void notifyObserversOfConnectionTypeChange( - NetworkChangeDetector.ConnectionType newConnectionType) { - List nativeObservers = getNativeNetworkObserversSync(); - for (Long nativeObserver : nativeObservers) { - nativeNotifyConnectionTypeChanged(nativeObserver); + NetworkChangeDetector.ConnectionType newConnectionType) { + + synchronized (nativeNetworkObservers) { + for (Long nativeObserver : nativeNetworkObservers) { + nativeNotifyConnectionTypeChanged(nativeObserver); + } } + // This avoids calling external methods while locking on an object. List javaObservers; synchronized (networkObservers) { @@ -242,26 +245,29 @@ private void notifyObserversOfConnectionTypeChange( } private void notifyObserversOfNetworkConnect( - NetworkChangeDetector.NetworkInformation networkInfo) { - List nativeObservers = getNativeNetworkObserversSync(); - for (Long nativeObserver : nativeObservers) { - nativeNotifyOfNetworkConnect(nativeObserver, networkInfo); + NetworkChangeDetector.NetworkInformation networkInfo) { + synchronized (nativeNetworkObservers) { + for (Long nativeObserver : nativeNetworkObservers) { + nativeNotifyOfNetworkConnect(nativeObserver, networkInfo); + } } } private void notifyObserversOfNetworkDisconnect(long networkHandle) { - List nativeObservers = getNativeNetworkObserversSync(); - for (Long nativeObserver : nativeObservers) { - nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle); + synchronized (nativeNetworkObservers) { + for (Long nativeObserver : nativeNetworkObservers) { + nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle); + } } } private void notifyObserversOfNetworkPreference( - List types, int preference) { - List nativeObservers = getNativeNetworkObserversSync(); - for (NetworkChangeDetector.ConnectionType type : types) { - for (Long nativeObserver : nativeObservers) { - nativeNotifyOfNetworkPreference(nativeObserver, type, preference); + List types, int preference) { + synchronized(nativeNetworkObservers) { + for (NetworkChangeDetector.ConnectionType type : types) { + for (Long nativeObserver : nativeNetworkObservers) { + nativeNotifyOfNetworkPreference(nativeObserver, type, preference); + } } } } @@ -270,24 +276,18 @@ private void updateObserverActiveNetworkList(long nativeObserver) { List networkInfoList; synchronized (networkChangeDetectorLock) { networkInfoList = - (networkChangeDetector == null) ? null : networkChangeDetector.getActiveNetworkList(); + (networkChangeDetector == null) ? null : networkChangeDetector.getActiveNetworkList(); } if (networkInfoList == null) { return; } NetworkChangeDetector.NetworkInformation[] networkInfos = - new NetworkChangeDetector.NetworkInformation[networkInfoList.size()]; + new NetworkChangeDetector.NetworkInformation[networkInfoList.size()]; networkInfos = networkInfoList.toArray(networkInfos); nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos); } - private List getNativeNetworkObserversSync() { - synchronized (nativeNetworkObservers) { - return new ArrayList<>(nativeNetworkObservers); - } - } - /** * Adds an observer for any connection type changes. * @@ -329,16 +329,16 @@ public static boolean isOnline() { private native void nativeNotifyConnectionTypeChanged(long nativeAndroidNetworkMonitor); private native void nativeNotifyOfNetworkConnect( - long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation networkInfo); + long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation networkInfo); private native void nativeNotifyOfNetworkDisconnect( - long nativeAndroidNetworkMonitor, long networkHandle); + long nativeAndroidNetworkMonitor, long networkHandle); private native void nativeNotifyOfActiveNetworkList( - long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation[] networkInfos); + long nativeAndroidNetworkMonitor, NetworkChangeDetector.NetworkInformation[] networkInfos); private native void nativeNotifyOfNetworkPreference( - long nativeAndroidNetworkMonitor, NetworkChangeDetector.ConnectionType type, int preference); + long nativeAndroidNetworkMonitor, NetworkChangeDetector.ConnectionType type, int preference); // For testing only. @Nullable @@ -357,11 +357,11 @@ int getNumObservers() { // For testing only. static NetworkMonitorAutoDetect createAndSetAutoDetectForTest( - Context context, String fieldTrialsString) { + Context context, String fieldTrialsString) { NetworkMonitor networkMonitor = getInstance(); NetworkChangeDetector networkChangeDetector = - networkMonitor.createNetworkChangeDetector(context, fieldTrialsString); + networkMonitor.createNetworkChangeDetector(context, fieldTrialsString); networkMonitor.networkChangeDetector = networkChangeDetector; return (NetworkMonitorAutoDetect) networkChangeDetector; } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/PeerConnection.java b/stream-webrtc-android/src/main/java/org/webrtc/PeerConnection.java index e334a3fd9..640f65660 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/PeerConnection.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/PeerConnection.java @@ -203,14 +203,14 @@ public IceServer(String uri, String username, String password, TlsCertPolicy tls @Deprecated public IceServer(String uri, String username, String password, TlsCertPolicy tlsCertPolicy, - String hostname) { + String hostname) { this(uri, Collections.singletonList(uri), username, password, tlsCertPolicy, hostname, null, - null); + null); } private IceServer(String uri, List urls, String username, String password, - TlsCertPolicy tlsCertPolicy, String hostname, List tlsAlpnProtocols, - List tlsEllipticCurves) { + TlsCertPolicy tlsCertPolicy, String hostname, List tlsAlpnProtocols, + List tlsEllipticCurves) { if (uri == null || urls == null || urls.isEmpty()) { throw new IllegalArgumentException("uri == null || urls == null || urls.isEmpty()"); } @@ -241,7 +241,7 @@ private IceServer(String uri, List urls, String username, String passwor @Override public String toString() { return urls + " [" + username + ":" + password + "] [" + tlsCertPolicy + "] [" + hostname - + "] [" + tlsAlpnProtocols + "] [" + tlsEllipticCurves + "]"; + + "] [" + tlsAlpnProtocols + "] [" + tlsEllipticCurves + "]"; } @Override @@ -257,15 +257,15 @@ public boolean equals(@Nullable Object obj) { } IceServer other = (IceServer) obj; return (uri.equals(other.uri) && urls.equals(other.urls) && username.equals(other.username) - && password.equals(other.password) && tlsCertPolicy.equals(other.tlsCertPolicy) - && hostname.equals(other.hostname) && tlsAlpnProtocols.equals(other.tlsAlpnProtocols) - && tlsEllipticCurves.equals(other.tlsEllipticCurves)); + && password.equals(other.password) && tlsCertPolicy.equals(other.tlsCertPolicy) + && hostname.equals(other.hostname) && tlsAlpnProtocols.equals(other.tlsAlpnProtocols) + && tlsEllipticCurves.equals(other.tlsEllipticCurves)); } @Override public int hashCode() { Object[] values = {uri, urls, username, password, tlsCertPolicy, hostname, tlsAlpnProtocols, - tlsEllipticCurves}; + tlsEllipticCurves}; return Arrays.hashCode(values); } @@ -325,7 +325,7 @@ public Builder setTlsEllipticCurves(List tlsEllipticCurves) { public IceServer createIceServer() { return new IceServer(urls.get(0), urls, username, password, tlsCertPolicy, hostname, - tlsAlpnProtocols, tlsEllipticCurves); + tlsAlpnProtocols, tlsEllipticCurves); } } @@ -528,7 +528,6 @@ public static class RTCConfiguration { public boolean enableCpuOveruseDetection; public boolean suspendBelowMinBitrate; @Nullable public Integer screencastMinBitrate; - @Nullable public Boolean combinedAudioVideoBwe; // Use "Unknown" to represent no preference of adapter types, not the // preference of adapters of unknown types. public AdapterType networkPreference; @@ -541,11 +540,6 @@ public static class RTCConfiguration { // every offer/answer negotiation.This is only intended to be a workaround for crbug.com/835958 public boolean activeResetSrtpParams; - // Whether this client is allowed to switch encoding codec mid-stream. This is a workaround for - // a WebRTC bug where the receiver could get confussed if a codec switch happened mid-call. - // Null indicates no change to currently configured value. - @Nullable public Boolean allowCodecSwitching; - /** * Defines advanced optional cryptographic settings related to SRTP and * frame encryption for native WebRTC. Setting this will overwrite any @@ -573,6 +567,17 @@ public static class RTCConfiguration { */ public boolean offerExtmapAllowMixed; + /** + * When this flag is set, ports not bound to any specific network interface + * will be used, in addition to normal ports bound to the enumerated + * interfaces. Without this flag, these "any address" ports would only be + * used when network enumeration fails or is disabled. But under certain + * conditions, these ports may succeed where others fail, so they may allow + * the application to work in a wider variety of environments, at the expense + * of having to allocate additional candidates. + */ + public boolean enableIceGatheringOnAnyAddressPorts; + // TODO(deadbeef): Instead of duplicating the defaults here, we should do // something to pick up the defaults from C++. The Objective-C equivalent // of RTCConfiguration does that. @@ -607,15 +612,14 @@ public RTCConfiguration(List iceServers) { enableCpuOveruseDetection = true; suspendBelowMinBitrate = false; screencastMinBitrate = null; - combinedAudioVideoBwe = null; networkPreference = AdapterType.UNKNOWN; sdpSemantics = SdpSemantics.UNIFIED_PLAN; activeResetSrtpParams = false; cryptoOptions = null; turnLoggingId = null; - allowCodecSwitching = null; enableImplicitRollback = false; offerExtmapAllowMixed = true; + enableIceGatheringOnAnyAddressPorts = false; } @CalledByNative("RTCConfiguration") @@ -788,12 +792,6 @@ Integer getScreencastMinBitrate() { return screencastMinBitrate; } - @Nullable - @CalledByNative("RTCConfiguration") - Boolean getCombinedAudioVideoBwe() { - return combinedAudioVideoBwe; - } - @CalledByNative("RTCConfiguration") AdapterType getNetworkPreference() { return networkPreference; @@ -809,12 +807,6 @@ boolean getActiveResetSrtpParams() { return activeResetSrtpParams; } - @Nullable - @CalledByNative("RTCConfiguration") - Boolean getAllowCodecSwitching() { - return allowCodecSwitching; - } - @Nullable @CalledByNative("RTCConfiguration") CryptoOptions getCryptoOptions() { @@ -836,6 +828,11 @@ boolean getEnableImplicitRollback() { boolean getOfferExtmapAllowMixed() { return offerExtmapAllowMixed; } + + @CalledByNative("RTCConfiguration") + boolean getEnableIceGatheringOnAnyAddressPorts() { + return enableIceGatheringOnAnyAddressPorts; + } }; private final List localStreams = new ArrayList<>(); @@ -932,7 +929,7 @@ public boolean addIceCandidate(IceCandidate candidate) { public void addIceCandidate(IceCandidate candidate, AddIceObserver observer) { nativeAddIceCandidateWithObserver( - candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp, observer); + candidate.sdpMid, candidate.sdpMLineIndex, candidate.sdp, observer); } public boolean removeIceCandidates(final IceCandidate[] candidates) { @@ -1119,7 +1116,7 @@ public RtpTransceiver addTransceiver(MediaStreamTrack track) { } public RtpTransceiver addTransceiver( - MediaStreamTrack track, @Nullable RtpTransceiver.RtpTransceiverInit init) { + MediaStreamTrack track, @Nullable RtpTransceiver.RtpTransceiverInit init) { if (track == null) { throw new NullPointerException("No MediaStreamTrack specified for addTransceiver."); } @@ -1127,7 +1124,7 @@ public RtpTransceiver addTransceiver( init = new RtpTransceiver.RtpTransceiverInit(); } RtpTransceiver newTransceiver = - nativeAddTransceiverWithTrack(track.getNativeMediaStreamTrack(), init); + nativeAddTransceiverWithTrack(track.getNativeMediaStreamTrack(), init); if (newTransceiver == null) { throw new IllegalStateException("C++ addTransceiver failed."); } @@ -1140,7 +1137,7 @@ public RtpTransceiver addTransceiver(MediaStreamTrack.MediaType mediaType) { } public RtpTransceiver addTransceiver( - MediaStreamTrack.MediaType mediaType, @Nullable RtpTransceiver.RtpTransceiverInit init) { + MediaStreamTrack.MediaType mediaType, @Nullable RtpTransceiver.RtpTransceiverInit init) { if (mediaType == null) { throw new NullPointerException("No MediaType specified for addTransceiver."); } @@ -1311,9 +1308,9 @@ public static long createNativePeerConnectionObserver(Observer observer) { private static native void nativeFreeOwnedPeerConnection(long ownedPeerConnection); private native boolean nativeSetConfiguration(RTCConfiguration config); private native boolean nativeAddIceCandidate( - String sdpMid, int sdpMLineIndex, String iceCandidateSdp); + String sdpMid, int sdpMLineIndex, String iceCandidateSdp); private native void nativeAddIceCandidateWithObserver( - String sdpMid, int sdpMLineIndex, String iceCandidateSdp, AddIceObserver observer); + String sdpMid, int sdpMLineIndex, String iceCandidateSdp, AddIceObserver observer); private native boolean nativeRemoveIceCandidates(final IceCandidate[] candidates); private native boolean nativeAddLocalStream(long stream); private native void nativeRemoveLocalStream(long stream); @@ -1328,9 +1325,9 @@ private native void nativeAddIceCandidateWithObserver( private native RtpSender nativeAddTrack(long track, List streamIds); private native boolean nativeRemoveTrack(long sender); private native RtpTransceiver nativeAddTransceiverWithTrack( - long track, RtpTransceiver.RtpTransceiverInit init); + long track, RtpTransceiver.RtpTransceiverInit init); private native RtpTransceiver nativeAddTransceiverOfType( - MediaStreamTrack.MediaType mediaType, RtpTransceiver.RtpTransceiverInit init); + MediaStreamTrack.MediaType mediaType, RtpTransceiver.RtpTransceiverInit init); private native boolean nativeStartRtcEventLog(int file_descriptor, int max_size_bytes); private native void nativeStopRtcEventLog(); -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/RtpParameters.java b/stream-webrtc-android/src/main/java/org/webrtc/RtpParameters.java index 01f8082c5..108b70ff3 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/RtpParameters.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/RtpParameters.java @@ -76,6 +76,7 @@ public static class Encoding { // If non-null, scale the width and height down by this factor for video. If null, // implementation default scaling factor will be used. @Nullable public Double scaleResolutionDownBy; + // Scalability modes are used to represent simulcast and SVC layers. @Nullable public String scalabilityMode; // SSRC to be used by this encoding. // Can't be changed between getParameters/setParameters. @@ -93,9 +94,9 @@ public Encoding(String rid, boolean active, Double scaleResolutionDownBy) { @CalledByNative("Encoding") Encoding(String rid, boolean active, double bitratePriority, @Priority int networkPriority, - Integer maxBitrateBps, Integer minBitrateBps, Integer maxFramerate, - Integer numTemporalLayers, Double scaleResolutionDownBy, String scalabilityMode, Long ssrc, - boolean adaptiveAudioPacketTime) { + Integer maxBitrateBps, Integer minBitrateBps, Integer maxFramerate, + Integer numTemporalLayers, Double scaleResolutionDownBy, String scalabilityMode, + Long ssrc, boolean adaptiveAudioPacketTime) { this.rid = rid; this.active = active; this.bitratePriority = bitratePriority; @@ -195,7 +196,7 @@ public static class Codec { @CalledByNative("Codec") Codec(int payloadType, String name, MediaStreamTrack.MediaType kind, Integer clockRate, - Integer numChannels, Map parameters) { + Integer numChannels, Map parameters) { this.payloadType = payloadType; this.name = name; this.kind = kind; @@ -307,7 +308,7 @@ public boolean getEncrypted() { @CalledByNative RtpParameters(String transactionId, DegradationPreference degradationPreference, Rtcp rtcp, - List headerExtensions, List encodings, List codecs) { + List headerExtensions, List encodings, List codecs) { this.transactionId = transactionId; this.degradationPreference = degradationPreference; this.rtcp = rtcp; @@ -345,4 +346,4 @@ List getEncodings() { List getCodecs() { return codecs; } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/SimulcastVideoEncoder.java b/stream-webrtc-android/src/main/java/org/webrtc/SimulcastVideoEncoder.java index da39a1b6a..1ed098a1b 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/SimulcastVideoEncoder.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/SimulcastVideoEncoder.java @@ -2,26 +2,26 @@ public class SimulcastVideoEncoder extends WrappedNativeVideoEncoder { - static native long nativeCreateEncoder(VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info); + static native long nativeCreateEncoder(long webrtcEnvRef, VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info); - VideoEncoderFactory primary; - VideoEncoderFactory fallback; - VideoCodecInfo info; + VideoEncoderFactory primary; + VideoEncoderFactory fallback; + VideoCodecInfo info; - public SimulcastVideoEncoder(VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) { - this.primary = primary; - this.fallback = fallback; - this.info = info; - } + public SimulcastVideoEncoder(VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) { + this.primary = primary; + this.fallback = fallback; + this.info = info; + } - @Override - public long createNativeVideoEncoder() { - return nativeCreateEncoder(primary, fallback, info); - } + @Override + public long createNative(long webrtcEnvRef) { + return nativeCreateEncoder(webrtcEnvRef, primary, fallback, info); + } - @Override - public boolean isHardwareEncoder() { - return false; - } + @Override + public boolean isHardwareEncoder() { + return false; + } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/SoftwareVideoEncoderFactory.java b/stream-webrtc-android/src/main/java/org/webrtc/SoftwareVideoEncoderFactory.java index 7f4c457b9..5fc074bf4 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/SoftwareVideoEncoderFactory.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/SoftwareVideoEncoderFactory.java @@ -26,16 +26,15 @@ public SoftwareVideoEncoderFactory() { @Nullable @Override public VideoEncoder createEncoder(VideoCodecInfo info) { - long nativeEncoder = nativeCreateEncoder(nativeFactory, info); - if (nativeEncoder == 0) { + if (!nativeIsSupported(nativeFactory, info)) { Logging.w(TAG, "Trying to create encoder for unsupported format. " + info); return null; } return new WrappedNativeVideoEncoder() { @Override - public long createNativeVideoEncoder() { - return nativeEncoder; + public long createNative(long webrtcEnvRef) { + return nativeCreate(nativeFactory, webrtcEnvRef, info); } @Override @@ -52,7 +51,9 @@ public VideoCodecInfo[] getSupportedCodecs() { private static native long nativeCreateFactory(); - private static native long nativeCreateEncoder(long factory, VideoCodecInfo videoCodecInfo); + private static native boolean nativeIsSupported(long factory, VideoCodecInfo info); + + private static native long nativeCreate(long factory, long webrtcEnvRef, VideoCodecInfo info); private static native List nativeGetSupportedCodecs(long factory); -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/VideoCodecInfo.java b/stream-webrtc-android/src/main/java/org/webrtc/VideoCodecInfo.java index 86d67d6d5..5426676ce 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/VideoCodecInfo.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/VideoCodecInfo.java @@ -14,6 +14,8 @@ import java.util.Arrays; import java.util.Locale; import java.util.Map; +import java.util.List; +import java.util.ArrayList; /** * Represent a video codec as encoded in SDP. @@ -28,21 +30,22 @@ public class VideoCodecInfo { public static final String H264_PROFILE_CONSTRAINED_HIGH = "640c"; public static final String H264_LEVEL_3_1 = "1f"; // 31 in hex. public static final String H264_CONSTRAINED_HIGH_3_1 = - H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1; + H264_PROFILE_CONSTRAINED_HIGH + H264_LEVEL_3_1; public static final String H264_CONSTRAINED_BASELINE_3_1 = - H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1; + H264_PROFILE_CONSTRAINED_BASELINE + H264_LEVEL_3_1; public final String name; public final Map params; - public int[] scalabilityModes; + public final List scalabilityModes; + @Deprecated public final int payload; @CalledByNative - public VideoCodecInfo(String name, Map params) { + public VideoCodecInfo(String name, Map params, List scalabilityModes) { this.payload = 0; this.name = name; this.params = params; - this.scalabilityModes = new int[0]; + this.scalabilityModes = scalabilityModes; } @Deprecated @@ -50,7 +53,7 @@ public VideoCodecInfo(int payload, String name, Map params) { this.payload = payload; this.name = name; this.params = params; - this.scalabilityModes = new int[0]; + this.scalabilityModes = new ArrayList<>(); } @Override @@ -88,14 +91,7 @@ Map getParams() { } @CalledByNative - int[] getScalabilityModes() { + List getScalabilityModes() { return scalabilityModes; } - - @CalledByNative - void setScalabilityModes(int[] values) { - scalabilityModes = values; - } - - -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/VideoEncoder.java b/stream-webrtc-android/src/main/java/org/webrtc/VideoEncoder.java index 0d8cf830a..b71c1d32e 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/VideoEncoder.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/VideoEncoder.java @@ -33,14 +33,14 @@ public class Settings { // TODO(bugs.webrtc.org/10720): Remove. @Deprecated public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate, - int numberOfSimulcastStreams, boolean automaticResizeOn) { + int numberOfSimulcastStreams, boolean automaticResizeOn) { this(numberOfCores, width, height, startBitrate, maxFramerate, numberOfSimulcastStreams, - automaticResizeOn, new VideoEncoder.Capabilities(false /* lossNotification */)); + automaticResizeOn, new VideoEncoder.Capabilities(false /* lossNotification */)); } @CalledByNative("Settings") public Settings(int numberOfCores, int width, int height, int startBitrate, int maxFramerate, - int numberOfSimulcastStreams, boolean automaticResizeOn, Capabilities capabilities) { + int numberOfSimulcastStreams, boolean automaticResizeOn, Capabilities capabilities) { this.numberOfCores = numberOfCores; this.width = width; this.height = height; @@ -208,7 +208,7 @@ public class ResolutionBitrateLimits { public final int maxBitrateBps; public ResolutionBitrateLimits( - int frameSizePixels, int minStartBitrateBps, int minBitrateBps, int maxBitrateBps) { + int frameSizePixels, int minStartBitrateBps, int minBitrateBps, int maxBitrateBps) { this.frameSizePixels = frameSizePixels; this.minStartBitrateBps = minStartBitrateBps; this.minBitrateBps = minBitrateBps; @@ -275,7 +275,7 @@ public class EncoderInfo { public final boolean applyAlignmentToAllSimulcastLayers; public EncoderInfo( - int requestedResolutionAlignment, boolean applyAlignmentToAllSimulcastLayers) { + int requestedResolutionAlignment, boolean applyAlignmentToAllSimulcastLayers) { this.requestedResolutionAlignment = requestedResolutionAlignment; this.applyAlignmentToAllSimulcastLayers = applyAlignmentToAllSimulcastLayers; } @@ -310,11 +310,11 @@ public interface Callback { * encoder (e.g., an Android platform encoder), or alternatively 2) a native * encoder (e.g., a software encoder or a C++ encoder adapter). * - * For case 1), createNativeVideoEncoder() should return zero. + * For case 1), createNative() should return zero. * In this case, we expect the native library to call the encoder through * JNI using the Java interface declared below. * - * For case 2), createNativeVideoEncoder() should return a non-zero value. + * For case 2), createNative() should return a non-zero value. * In this case, we expect the native library to treat the returned value as * a raw pointer of type webrtc::VideoEncoder* (ownership is transferred to * the caller). The native library should then directly call the @@ -323,7 +323,7 @@ public interface Callback { * UnsupportedOperationException. */ @CalledByNative - default long createNativeVideoEncoder() { + default long createNative(long webrtcEnvRef) { return 0; } @@ -380,6 +380,6 @@ default ResolutionBitrateLimits[] getResolutionBitrateLimits() { @CalledByNative default EncoderInfo getEncoderInfo() { return new EncoderInfo( - /* requestedResolutionAlignment= */ 1, /* applyAlignmentToAllSimulcastLayers= */ false); + /* requestedResolutionAlignment= */ 1, /* applyAlignmentToAllSimulcastLayers= */ false); } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/VideoEncoderFallback.java b/stream-webrtc-android/src/main/java/org/webrtc/VideoEncoderFallback.java index fa36b7c98..106a61577 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/VideoEncoderFallback.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/VideoEncoderFallback.java @@ -23,8 +23,8 @@ public VideoEncoderFallback(VideoEncoder fallback, VideoEncoder primary) { } @Override - public long createNativeVideoEncoder() { - return nativeCreateEncoder(fallback, primary); + public long createNative(long webrtcEnvRef) { + return nativeCreate(webrtcEnvRef, fallback, primary); } @Override @@ -32,5 +32,6 @@ public boolean isHardwareEncoder() { return primary.isHardwareEncoder(); } - private static native long nativeCreateEncoder(VideoEncoder fallback, VideoEncoder primary); -} + private static native long nativeCreate( + long webrtcEnvRef, VideoEncoder fallback, VideoEncoder primary); +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/WrappedNativeVideoEncoder.java b/stream-webrtc-android/src/main/java/org/webrtc/WrappedNativeVideoEncoder.java index 7d0908a6a..a61fb5738 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/WrappedNativeVideoEncoder.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/WrappedNativeVideoEncoder.java @@ -14,7 +14,7 @@ * Wraps a native webrtc::VideoEncoder. */ public abstract class WrappedNativeVideoEncoder implements VideoEncoder { - @Override public abstract long createNativeVideoEncoder(); + @Override public abstract long createNative(long webrtcEnvRef); @Override public abstract boolean isHardwareEncoder(); @Override @@ -46,4 +46,4 @@ public final ScalingSettings getScalingSettings() { public final String getImplementationName() { throw new UnsupportedOperationException("Not implemented."); } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/audio/AudioDeviceModule.java b/stream-webrtc-android/src/main/java/org/webrtc/audio/AudioDeviceModule.java index 502c68cc9..ce7d44aff 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/audio/AudioDeviceModule.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/audio/AudioDeviceModule.java @@ -35,4 +35,22 @@ public interface AudioDeviceModule { /** Control muting/unmuting the microphone. */ void setMicrophoneMute(boolean mute); -} + + /** + * Enable or disable built in noise suppressor. Returns true if the enabling was successful, + * otherwise false is returned. + */ + default boolean setNoiseSuppressorEnabled(boolean enabled) { + return false; + } + + /** + * Sets the preferred field dimension for the built-in microphone. Returns + * true if setting was successful, otherwise false is returned. + * This functionality can be implemented with + * {@code android.media.MicrophoneDirection.setPreferredMicrophoneFieldDimension}. + */ + default boolean setPreferredMicrophoneFieldDimension(float dimension) { + return false; + } +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java b/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java index f675fd4fa..48f20d152 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/audio/JavaAudioDeviceModule.java @@ -51,7 +51,6 @@ public static class Builder { private AudioAttributes audioAttributes; private boolean useLowLatency; private boolean enableVolumeLogger; - private AudioRecordDataCallback audioRecordDataCallback; private Builder(Context context) { this.context = context; @@ -222,16 +221,6 @@ public Builder setEnableVolumeLogger(boolean enableVolumeLogger) { return this; } - /** - * Can be used to gain access to the raw ByteBuffer from the recording device before it's - * fed into WebRTC. You can use this to manipulate the ByteBuffer (e.g. audio filters). - * Make sure that the operation is fast. - */ - public Builder setAudioRecordDataCallback(AudioRecordDataCallback audioRecordDataCallback) { - this.audioRecordDataCallback = audioRecordDataCallback; - return this; - } - /** * Construct an AudioDeviceModule based on the supplied arguments. The caller takes ownership * and is responsible for calling release(). @@ -265,13 +254,13 @@ public JavaAudioDeviceModule createAudioDeviceModule() { executor = WebRtcAudioRecord.newDefaultScheduler(); } final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, - audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, - samplesReadyCallback, audioRecordDataCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); + audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, + samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); final WebRtcAudioTrack audioOutput = - new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, - audioTrackStateCallback, useLowLatency, enableVolumeLogger); + new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, + audioTrackStateCallback, useLowLatency, enableVolumeLogger); return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, - inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); + inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); } } @@ -298,8 +287,11 @@ public static interface AudioRecordStateCallback { * Contains audio sample information. */ public static class AudioSamples { + /** See {@link AudioRecord#getAudioFormat()} */ private final int audioFormat; + /** See {@link AudioRecord#getChannelCount()} */ private final int channelCount; + /** See {@link AudioRecord#getSampleRate()} */ private final int sampleRate; private final byte[] data; @@ -381,8 +373,8 @@ public static boolean isBuiltInNoiseSuppressorSupported() { private long nativeAudioDeviceModule; private JavaAudioDeviceModule(Context context, AudioManager audioManager, - WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate, - int outputSampleRate, boolean useStereoInput, boolean useStereoOutput) { + WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, int inputSampleRate, + int outputSampleRate, boolean useStereoInput, boolean useStereoOutput) { this.context = context; this.audioManager = audioManager; this.audioInput = audioInput; @@ -398,7 +390,7 @@ public long getNativeAudioDeviceModulePointer() { synchronized (nativeLock) { if (nativeAudioDeviceModule == 0) { nativeAudioDeviceModule = nativeCreateAudioDeviceModule(context, audioManager, audioInput, - audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); + audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); } return nativeAudioDeviceModule; } @@ -426,6 +418,12 @@ public void setMicrophoneMute(boolean mute) { audioInput.setMicrophoneMute(mute); } + @Override + public boolean setNoiseSuppressorEnabled(boolean enabled) { + Logging.d(TAG, "setNoiseSuppressorEnabled: " + enabled); + return audioInput.setNoiseSuppressorEnabled(enabled); + } + /** * Start to prefer a specific {@link AudioDeviceInfo} device for recording. Typically this should * only be used if a client gives an explicit option for choosing a physical device to record @@ -439,6 +437,6 @@ public void setPreferredInputDevice(AudioDeviceInfo preferredInputDevice) { } private static native long nativeCreateAudioDeviceModule(Context context, - AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, - int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput); -} + AudioManager audioManager, WebRtcAudioRecord audioInput, WebRtcAudioTrack audioOutput, + int inputSampleRate, int outputSampleRate, boolean useStereoInput, boolean useStereoOutput); +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioEffects.java b/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioEffects.java index a9ff1011b..9b2c120a4 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioEffects.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioEffects.java @@ -31,9 +31,9 @@ class WebRtcAudioEffects { // UUIDs for Software Audio Effects that we want to avoid using. // The implementor field will be set to "The Android Open Source Project". private static final UUID AOSP_ACOUSTIC_ECHO_CANCELER = - UUID.fromString("bb392ec0-8d4d-11e0-a896-0002a5d5c51b"); + UUID.fromString("bb392ec0-8d4d-11e0-a896-0002a5d5c51b"); private static final UUID AOSP_NOISE_SUPPRESSOR = - UUID.fromString("c06c8400-8e06-11e0-9cb6-0002a5d5c51b"); + UUID.fromString("c06c8400-8e06-11e0-9cb6-0002a5d5c51b"); // Contains the available effect descriptors returned from the // AudioEffect.getEffects() call. This result is cached to avoid doing the @@ -104,6 +104,19 @@ public boolean setNS(boolean enable) { return true; } + // Toggles an existing NoiseSuppressor to be enabled or disabled. + // Returns true if the toggling was successful, otherwise false is returned (this is also the case + // if no NoiseSuppressor was present). + public boolean toggleNS(boolean enable) { + if (ns == null) { + Logging.e(TAG, "Attempting to enable or disable nonexistent NoiseSuppressor."); + return false; + } + Logging.d(TAG, "toggleNS(" + enable + ")"); + boolean toggling_succeeded = ns.setEnabled(enable) == AudioEffect.SUCCESS; + return toggling_succeeded; + } + public void enable(int audioSession) { Logging.d(TAG, "enable(audioSession=" + audioSession + ")"); assertTrue(aec == null); @@ -116,10 +129,10 @@ public void enable(int audioSession) { for (Descriptor d : AudioEffect.queryEffects()) { if (effectTypeIsVoIP(d.type)) { Logging.d(TAG, - "name: " + d.name + ", " - + "mode: " + d.connectMode + ", " - + "implementor: " + d.implementor + ", " - + "UUID: " + d.uuid); + "name: " + d.name + ", " + + "mode: " + d.connectMode + ", " + + "implementor: " + d.implementor + ", " + + "UUID: " + d.uuid); } } } @@ -135,8 +148,8 @@ public void enable(int audioSession) { Logging.e(TAG, "Failed to set the AcousticEchoCanceler state"); } Logging.d(TAG, - "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled") + ", enable: " - + enable + ", is now: " + (aec.getEnabled() ? "enabled" : "disabled")); + "AcousticEchoCanceler: was " + (enabled ? "enabled" : "disabled") + ", enable: " + + enable + ", is now: " + (aec.getEnabled() ? "enabled" : "disabled")); } else { Logging.e(TAG, "Failed to create the AcousticEchoCanceler instance"); } @@ -153,8 +166,8 @@ public void enable(int audioSession) { Logging.e(TAG, "Failed to set the NoiseSuppressor state"); } Logging.d(TAG, - "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: " + enable - + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled")); + "NoiseSuppressor: was " + (enabled ? "enabled" : "disabled") + ", enable: " + enable + + ", is now: " + (ns.getEnabled() ? "enabled" : "disabled")); } else { Logging.e(TAG, "Failed to create the NoiseSuppressor instance"); } @@ -185,7 +198,7 @@ public void release() { // AutomaticGainControl.isAvailable() returns false. private boolean effectTypeIsVoIP(UUID type) { return (AudioEffect.EFFECT_TYPE_AEC.equals(type) && isAcousticEchoCancelerSupported()) - || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported()); + || (AudioEffect.EFFECT_TYPE_NS.equals(type) && isNoiseSuppressorSupported()); } // Helper method which throws an exception when an assertion has failed. @@ -224,4 +237,4 @@ private static boolean isEffectTypeAvailable(UUID effectType, UUID blockListedUu } return false; } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java b/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java index 3f97d2928..0086529f8 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -99,12 +99,11 @@ class WebRtcAudioRecord { private volatile boolean microphoneMute; private final AtomicReference audioSourceMatchesRecordingSessionRef = - new AtomicReference<>(); + new AtomicReference<>(); private byte[] emptyBytes; private final @Nullable AudioRecordErrorCallback errorCallback; private final @Nullable AudioRecordStateCallback stateCallback; - private final @Nullable AudioRecordDataCallback audioRecordDataCallback; private final @Nullable SamplesReadyCallback audioSamplesReadyCallback; private final boolean isAcousticEchoCancelerSupported; private final boolean isNoiseSuppressorSupported; @@ -150,27 +149,20 @@ public void run() { long captureTimeNs = 0; if (Build.VERSION.SDK_INT >= 24) { if (audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC) - == AudioRecord.SUCCESS) { + == AudioRecord.SUCCESS) { captureTimeNs = audioTimestamp.nanoTime; } } - - // Allow the client to intercept the ByteBuffer (to modify it) - if (audioRecordDataCallback != null) { - audioRecordDataCallback.onAudioDataRecorded(audioRecord.getAudioFormat(), - audioRecord.getChannelCount(), audioRecord.getSampleRate(), byteBuffer); - } - nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs); } if (audioSamplesReadyCallback != null) { // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily // at index 0. byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), - byteBuffer.capacity() + byteBuffer.arrayOffset()); + byteBuffer.capacity() + byteBuffer.arrayOffset()); audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady( - new JavaAudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(), - audioRecord.getChannelCount(), audioRecord.getSampleRate(), data)); + new JavaAudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(), + audioRecord.getChannelCount(), audioRecord.getSampleRate(), data)); } } else { String errorMessage = "AudioRecord.read failed: " + bytesRead; @@ -203,19 +195,17 @@ public void stopThread() { @CalledByNative WebRtcAudioRecord(Context context, AudioManager audioManager) { this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE, - DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, - null /* audioSamplesReadyCallback */, null /* audioRecordCallback */, - WebRtcAudioEffects.isAcousticEchoCancelerSupported(), - WebRtcAudioEffects.isNoiseSuppressorSupported()); + DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, + null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(), + WebRtcAudioEffects.isNoiseSuppressorSupported()); } public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, - AudioManager audioManager, int audioSource, int audioFormat, - @Nullable AudioRecordErrorCallback errorCallback, - @Nullable AudioRecordStateCallback stateCallback, - @Nullable SamplesReadyCallback audioSamplesReadyCallback, - @Nullable AudioRecordDataCallback audioRecordDataCallback, - boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { + AudioManager audioManager, int audioSource, int audioFormat, + @Nullable AudioRecordErrorCallback errorCallback, + @Nullable AudioRecordStateCallback stateCallback, + @Nullable SamplesReadyCallback audioSamplesReadyCallback, + boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) { throw new IllegalArgumentException("HW AEC not supported"); } @@ -230,7 +220,6 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, this.errorCallback = errorCallback; this.stateCallback = stateCallback; this.audioSamplesReadyCallback = audioSamplesReadyCallback; - this.audioRecordDataCallback = audioRecordDataCallback; this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported; this.isNoiseSuppressorSupported = isNoiseSuppressorSupported; Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); @@ -326,7 +315,7 @@ private int initRecording(int sampleRate, int channels) { // Use the AudioRecord.Builder class on Android M (23) and above. // Throws IllegalArgumentException. audioRecord = createAudioRecordOnMOrHigher( - audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); audioSourceMatchesRecordingSessionRef.set(null); if (preferredDevice != null) { setPreferredDevice(preferredDevice); @@ -335,7 +324,7 @@ private int initRecording(int sampleRate, int channels) { // Use the old AudioRecord constructor for API levels below 23. // Throws UnsupportedOperationException. audioRecord = createAudioRecordOnLowerThanM( - audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); audioSourceMatchesRecordingSessionRef.set(null); } } catch (IllegalArgumentException | UnsupportedOperationException e) { @@ -355,12 +344,12 @@ private int initRecording(int sampleRate, int channels) { // Check number of active recording sessions. Should be zero but we have seen conflict cases // and adding a log for it can help us figure out details about conflicting sessions. final int numActiveRecordingSessions = - logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); + logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); if (numActiveRecordingSessions != 0) { // Log the conflict as a warning since initialization did in fact succeed. Most likely, the // upcoming call to startRecording() will fail under these conditions. Logging.w( - TAG, "Potential microphone conflict. Active sessions: " + numActiveRecordingSessions); + TAG, "Potential microphone conflict. Active sessions: " + numActiveRecordingSessions); } return framesPerBuffer; } @@ -373,7 +362,7 @@ private int initRecording(int sampleRate, int channels) { @TargetApi(Build.VERSION_CODES.M) void setPreferredDevice(@Nullable AudioDeviceInfo preferredDevice) { Logging.d( - TAG, "setPreferredDevice " + (preferredDevice != null ? preferredDevice.getId() : null)); + TAG, "setPreferredDevice " + (preferredDevice != null ? preferredDevice.getId() : null)); this.preferredDevice = preferredDevice; if (audioRecord != null) { if (!audioRecord.setPreferredDevice(preferredDevice)) { @@ -391,13 +380,13 @@ private boolean startRecording() { audioRecord.startRecording(); } catch (IllegalStateException e) { reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, - "AudioRecord.startRecording failed: " + e.getMessage()); + "AudioRecord.startRecording failed: " + e.getMessage()); return false; } if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, - "AudioRecord.startRecording failed - incorrect state: " - + audioRecord.getRecordingState()); + "AudioRecord.startRecording failed - incorrect state: " + + audioRecord.getRecordingState()); return false; } audioThread = new AudioRecordThread("AudioRecordJavaThread"); @@ -430,40 +419,40 @@ private boolean stopRecording() { @TargetApi(Build.VERSION_CODES.M) private static AudioRecord createAudioRecordOnMOrHigher( - int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) { + int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) { Logging.d(TAG, "createAudioRecordOnMOrHigher"); return new AudioRecord.Builder() - .setAudioSource(audioSource) - .setAudioFormat(new AudioFormat.Builder() - .setEncoding(audioFormat) - .setSampleRate(sampleRate) - .setChannelMask(channelConfig) - .build()) - .setBufferSizeInBytes(bufferSizeInBytes) - .build(); + .setAudioSource(audioSource) + .setAudioFormat(new AudioFormat.Builder() + .setEncoding(audioFormat) + .setSampleRate(sampleRate) + .setChannelMask(channelConfig) + .build()) + .setBufferSizeInBytes(bufferSizeInBytes) + .build(); } private static AudioRecord createAudioRecordOnLowerThanM( - int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) { + int audioSource, int sampleRate, int channelConfig, int audioFormat, int bufferSizeInBytes) { Logging.d(TAG, "createAudioRecordOnLowerThanM"); return new AudioRecord(audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); } private void logMainParameters() { Logging.d(TAG, - "AudioRecord: " - + "session ID: " + audioRecord.getAudioSessionId() + ", " - + "channels: " + audioRecord.getChannelCount() + ", " - + "sample rate: " + audioRecord.getSampleRate()); + "AudioRecord: " + + "session ID: " + audioRecord.getAudioSessionId() + ", " + + "channels: " + audioRecord.getChannelCount() + ", " + + "sample rate: " + audioRecord.getSampleRate()); } @TargetApi(Build.VERSION_CODES.M) private void logMainParametersExtended() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { Logging.d(TAG, - "AudioRecord: " - // The frame count of the native AudioRecord buffer. - + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); + "AudioRecord: " + // The frame count of the native AudioRecord buffer. + + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); } } @@ -492,8 +481,8 @@ private int logRecordingConfigurations(AudioRecord audioRecord, boolean verifyAu // as its client parameters. If these do not match, recording might work but under invalid // conditions. audioSourceMatchesRecordingSessionRef.set( - verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(), - audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs)); + verifyAudioConfig(audioRecord.getAudioSource(), audioRecord.getAudioSessionId(), + audioRecord.getFormat(), audioRecord.getRoutedDevice(), configs)); } } return numActiveRecordingSessions; @@ -511,9 +500,9 @@ private int channelCountToConfiguration(int channels) { } private native void nativeCacheDirectBufferAddress( - long nativeAudioRecordJni, ByteBuffer byteBuffer); + long nativeAudioRecordJni, ByteBuffer byteBuffer); private native void nativeDataIsRecorded( - long nativeAudioRecordJni, int bytes, long captureTimestampNs); + long nativeAudioRecordJni, int bytes, long captureTimestampNs); // Sets all recorded samples to zero if `mute` is true, i.e., ensures that // the microphone is muted. @@ -522,6 +511,18 @@ public void setMicrophoneMute(boolean mute) { microphoneMute = mute; } + // Sets whether NoiseSuppressor should be enabled or disabled. + // Returns true if the enabling was successful, otherwise false is returned (this is also the case + // if the NoiseSuppressor effect is not supported). + public boolean setNoiseSuppressorEnabled(boolean enabled) { + if (!WebRtcAudioEffects.isNoiseSuppressorSupported()) { + Logging.e(TAG, "Noise suppressor is not supported."); + return false; + } + Logging.w(TAG, "SetNoiseSuppressorEnabled(" + enabled + ")"); + return effects.toggleNS(enabled); + } + // Releases the native AudioRecord resources. private void releaseAudioResources() { Logging.d(TAG, "releaseAudioResources"); @@ -542,7 +543,7 @@ private void reportWebRtcAudioRecordInitError(String errorMessage) { } private void reportWebRtcAudioRecordStartError( - AudioRecordStartErrorCode errorCode, String errorMessage) { + AudioRecordStartErrorCode errorCode, String errorMessage) { Logging.e(TAG, "Start recording error: " + errorCode + ". " + errorMessage); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); logRecordingConfigurations(audioRecord, false /* verifyAudioConfig */); @@ -617,7 +618,7 @@ private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) { @TargetApi(Build.VERSION_CODES.N) private static boolean logActiveRecordingConfigs( - int session, List configs) { + int session, List configs) { assertTrue(!configs.isEmpty()); final Iterator it = configs.iterator(); Logging.d(TAG, "AudioRecordingConfigurations: "); @@ -627,54 +628,54 @@ private static boolean logActiveRecordingConfigs( // The audio source selected by the client. final int audioSource = config.getClientAudioSource(); conf.append(" client audio source=") - .append(WebRtcAudioUtils.audioSourceToString(audioSource)) - .append(", client session id=") - .append(config.getClientAudioSessionId()) - // Compare with our own id (based on AudioRecord#getAudioSessionId()). - .append(" (") - .append(session) - .append(")") - .append("\n"); + .append(WebRtcAudioUtils.audioSourceToString(audioSource)) + .append(", client session id=") + .append(config.getClientAudioSessionId()) + // Compare with our own id (based on AudioRecord#getAudioSessionId()). + .append(" (") + .append(session) + .append(")") + .append("\n"); // Audio format at which audio is recorded on this Android device. Note that it may differ // from the client application recording format (see getClientFormat()). AudioFormat format = config.getFormat(); conf.append(" Device AudioFormat: ") - .append("channel count=") - .append(format.getChannelCount()) - .append(", channel index mask=") - .append(format.getChannelIndexMask()) - // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices. - .append(", channel mask=") - .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask())) - .append(", encoding=") - .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding())) - .append(", sample rate=") - .append(format.getSampleRate()) - .append("\n"); + .append("channel count=") + .append(format.getChannelCount()) + .append(", channel index mask=") + .append(format.getChannelIndexMask()) + // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices. + .append(", channel mask=") + .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask())) + .append(", encoding=") + .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding())) + .append(", sample rate=") + .append(format.getSampleRate()) + .append("\n"); // Audio format at which the client application is recording audio. format = config.getClientFormat(); conf.append(" Client AudioFormat: ") - .append("channel count=") - .append(format.getChannelCount()) - .append(", channel index mask=") - .append(format.getChannelIndexMask()) - // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices. - .append(", channel mask=") - .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask())) - .append(", encoding=") - .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding())) - .append(", sample rate=") - .append(format.getSampleRate()) - .append("\n"); + .append("channel count=") + .append(format.getChannelCount()) + .append(", channel index mask=") + .append(format.getChannelIndexMask()) + // Only AudioFormat#CHANNEL_IN_MONO is guaranteed to work on all devices. + .append(", channel mask=") + .append(WebRtcAudioUtils.channelMaskToString(format.getChannelMask())) + .append(", encoding=") + .append(WebRtcAudioUtils.audioEncodingToString(format.getEncoding())) + .append(", sample rate=") + .append(format.getSampleRate()) + .append("\n"); // Audio input device used for this recording session. final AudioDeviceInfo device = config.getAudioDevice(); if (device != null) { assertTrue(device.isSource()); conf.append(" AudioDevice: ") - .append("type=") - .append(WebRtcAudioUtils.deviceTypeToString(device.getType())) - .append(", id=") - .append(device.getId()); + .append("type=") + .append(WebRtcAudioUtils.deviceTypeToString(device.getType())) + .append(", id=") + .append(device.getId()); } Logging.d(TAG, conf.toString()); } @@ -685,7 +686,7 @@ private static boolean logActiveRecordingConfigs( // configuration (same as AudioRecord's). @TargetApi(Build.VERSION_CODES.N) private static boolean verifyAudioConfig(int source, int session, AudioFormat format, - AudioDeviceInfo device, List configs) { + AudioDeviceInfo device, List configs) { assertTrue(!configs.isEmpty()); final Iterator it = configs.iterator(); while (it.hasNext()) { @@ -695,19 +696,19 @@ private static boolean verifyAudioConfig(int source, int session, AudioFormat fo continue; } if ((config.getClientAudioSource() == source) - && (config.getClientAudioSessionId() == session) - // Check the client format (should match the format of the AudioRecord instance). - && (config.getClientFormat().getEncoding() == format.getEncoding()) - && (config.getClientFormat().getSampleRate() == format.getSampleRate()) - && (config.getClientFormat().getChannelMask() == format.getChannelMask()) - && (config.getClientFormat().getChannelIndexMask() == format.getChannelIndexMask()) - // Ensure that the device format is properly configured. - && (config.getFormat().getEncoding() != AudioFormat.ENCODING_INVALID) - && (config.getFormat().getSampleRate() > 0) - // For the channel mask, either the position or index-based value must be valid. - && ((config.getFormat().getChannelMask() != AudioFormat.CHANNEL_INVALID) - || (config.getFormat().getChannelIndexMask() != AudioFormat.CHANNEL_INVALID)) - && checkDeviceMatch(configDevice, device)) { + && (config.getClientAudioSessionId() == session) + // Check the client format (should match the format of the AudioRecord instance). + && (config.getClientFormat().getEncoding() == format.getEncoding()) + && (config.getClientFormat().getSampleRate() == format.getSampleRate()) + && (config.getClientFormat().getChannelMask() == format.getChannelMask()) + && (config.getClientFormat().getChannelIndexMask() == format.getChannelIndexMask()) + // Ensure that the device format is properly configured. + && (config.getFormat().getEncoding() != AudioFormat.ENCODING_INVALID) + && (config.getFormat().getSampleRate() > 0) + // For the channel mask, either the position or index-based value must be valid. + && ((config.getFormat().getChannelMask() != AudioFormat.CHANNEL_INVALID) + || (config.getFormat().getChannelIndexMask() != AudioFormat.CHANNEL_INVALID)) + && checkDeviceMatch(configDevice, device)) { Logging.d(TAG, "verifyAudioConfig: PASS"); return true; } @@ -746,9 +747,9 @@ static ScheduledExecutorService newDefaultScheduler() { public Thread newThread(Runnable r) { Thread thread = Executors.defaultThreadFactory().newThread(r); thread.setName(String.format("WebRtcAudioRecordScheduler-%s-%s", - nextSchedulerId.getAndIncrement(), nextThreadId.getAndIncrement())); + nextSchedulerId.getAndIncrement(), nextThreadId.getAndIncrement())); return thread; } }); } -} +} \ No newline at end of file diff --git a/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java b/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java index 2b34e3401..cf1358a86 100644 --- a/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/stream-webrtc-android/src/main/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -154,13 +154,13 @@ public void stopThread() { @CalledByNative WebRtcAudioTrack(Context context, AudioManager audioManager) { this(context, audioManager, null /* audioAttributes */, null /* errorCallback */, - null /* stateCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); + null /* stateCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); } WebRtcAudioTrack(Context context, AudioManager audioManager, - @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, - @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency, - boolean enableVolumeLogger) { + @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, + @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency, + boolean enableVolumeLogger) { threadChecker.detachThread(); this.context = context; this.audioManager = audioManager; @@ -181,8 +181,8 @@ public void setNativeAudioTrack(long nativeAudioTrack) { private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) { threadChecker.checkIsOnValidThread(); Logging.d(TAG, - "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels - + ", bufferSizeFactor=" + bufferSizeFactor + ")"); + "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + + ", bufferSizeFactor=" + bufferSizeFactor + ")"); final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8); byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND)); Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity()); @@ -197,8 +197,8 @@ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) { // Note that this size doesn't guarantee a smooth playback under load. final int channelConfig = channelCountToConfiguration(channels); final int minBufferSizeInBytes = (int) (AudioTrack.getMinBufferSize(sampleRate, channelConfig, - AudioFormat.ENCODING_PCM_16BIT) - * bufferSizeFactor); + AudioFormat.ENCODING_PCM_16BIT) + * bufferSizeFactor); Logging.d(TAG, "minBufferSizeInBytes: " + minBufferSizeInBytes); // For the streaming mode, data must be written to the audio sink in // chunks of size (given by byteBuffer.capacity()) less than or equal @@ -230,7 +230,7 @@ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) { if (useLowLatency && Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { // On API level 26 or higher, we can use a low latency mode. audioTrack = createAudioTrackOnOreoOrHigher( - sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); + sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); } else { // As we are on API level 21 or higher, it is possible to use a special AudioTrack // constructor that uses AudioAttributes and AudioFormat as input. It allows us to @@ -238,7 +238,7 @@ private int initPlayout(int sampleRate, int channels, double bufferSizeFactor) { // and to allow certain platforms or routing policies to use this information for more // refined volume or routing decisions. audioTrack = createAudioTrackBeforeOreo( - sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); + sampleRate, channelConfig, minBufferSizeInBytes, audioAttributes); } } catch (IllegalArgumentException e) { reportWebRtcAudioTrackInitError(e.getMessage()); @@ -279,13 +279,13 @@ private boolean startPlayout() { audioTrack.play(); } catch (IllegalStateException e) { reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_EXCEPTION, - "AudioTrack.play failed: " + e.getMessage()); + "AudioTrack.play failed: " + e.getMessage()); releaseAudioResources(); return false; } if (audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) { reportWebRtcAudioTrackStartError(AudioTrackStartErrorCode.AUDIO_TRACK_START_STATE_MISMATCH, - "AudioTrack.play failed - incorrect state :" + audioTrack.getPlayState()); + "AudioTrack.play failed - incorrect state :" + audioTrack.getPlayState()); releaseAudioResources(); return false; } @@ -375,18 +375,18 @@ private int GetPlayoutUnderrunCount() { private void logMainParameters() { Logging.d(TAG, - "AudioTrack: " - + "session ID: " + audioTrack.getAudioSessionId() + ", " - + "channels: " + audioTrack.getChannelCount() + ", " - + "sample rate: " + audioTrack.getSampleRate() - + ", " - // Gain (>=1.0) expressed as linear multiplier on sample values. - + "max gain: " + AudioTrack.getMaxVolume()); + "AudioTrack: " + + "session ID: " + audioTrack.getAudioSessionId() + ", " + + "channels: " + audioTrack.getChannelCount() + ", " + + "sample rate: " + audioTrack.getSampleRate() + + ", " + // Gain (>=1.0) expressed as linear multiplier on sample values. + + "max gain: " + AudioTrack.getMaxVolume()); } private static void logNativeOutputSampleRate(int requestedSampleRateInHz) { final int nativeOutputSampleRate = - AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL); + AudioTrack.getNativeOutputSampleRate(AudioManager.STREAM_VOICE_CALL); Logging.d(TAG, "nativeOutputSampleRate: " + nativeOutputSampleRate); if (requestedSampleRateInHz != nativeOutputSampleRate) { Logging.w(TAG, "Unable to use fast mode since requested sample rate is not native"); @@ -395,9 +395,9 @@ private static void logNativeOutputSampleRate(int requestedSampleRateInHz) { private static AudioAttributes getAudioAttributes(@Nullable AudioAttributes overrideAttributes) { AudioAttributes.Builder attributesBuilder = - new AudioAttributes.Builder() - .setUsage(DEFAULT_USAGE) - .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH); + new AudioAttributes.Builder() + .setUsage(DEFAULT_USAGE) + .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH); if (overrideAttributes != null) { if (overrideAttributes.getUsage() != AudioAttributes.USAGE_UNKNOWN) { @@ -420,18 +420,18 @@ private static AudioAttributes getAudioAttributes(@Nullable AudioAttributes over // It allows certain platforms or routing policies to use this information for more // refined volume or routing decisions. private static AudioTrack createAudioTrackBeforeOreo(int sampleRateInHz, int channelConfig, - int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { + int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { Logging.d(TAG, "createAudioTrackBeforeOreo"); logNativeOutputSampleRate(sampleRateInHz); // Create an audio track where the audio usage is for VoIP and the content type is speech. return new AudioTrack(getAudioAttributes(overrideAttributes), - new AudioFormat.Builder() - .setEncoding(AudioFormat.ENCODING_PCM_16BIT) - .setSampleRate(sampleRateInHz) - .setChannelMask(channelConfig) - .build(), - bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE); + new AudioFormat.Builder() + .setEncoding(AudioFormat.ENCODING_PCM_16BIT) + .setSampleRate(sampleRateInHz) + .setChannelMask(channelConfig) + .build(), + bufferSizeInBytes, AudioTrack.MODE_STREAM, AudioManager.AUDIO_SESSION_ID_GENERATE); } // Creates and AudioTrack instance using AudioAttributes and AudioFormat as input. @@ -441,37 +441,37 @@ private static AudioTrack createAudioTrackBeforeOreo(int sampleRateInHz, int cha // The behavior of the low-latency mode may be device dependent, use at your own risk. @TargetApi(Build.VERSION_CODES.O) private static AudioTrack createAudioTrackOnOreoOrHigher(int sampleRateInHz, int channelConfig, - int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { + int bufferSizeInBytes, @Nullable AudioAttributes overrideAttributes) { Logging.d(TAG, "createAudioTrackOnOreoOrHigher"); logNativeOutputSampleRate(sampleRateInHz); // Create an audio track where the audio usage is for VoIP and the content type is speech. return new AudioTrack.Builder() - .setAudioAttributes(getAudioAttributes(overrideAttributes)) - .setAudioFormat(new AudioFormat.Builder() - .setEncoding(AudioFormat.ENCODING_PCM_16BIT) - .setSampleRate(sampleRateInHz) - .setChannelMask(channelConfig) - .build()) - .setBufferSizeInBytes(bufferSizeInBytes) - .setPerformanceMode(AudioTrack.PERFORMANCE_MODE_LOW_LATENCY) - .setTransferMode(AudioTrack.MODE_STREAM) - .setSessionId(AudioManager.AUDIO_SESSION_ID_GENERATE) - .build(); + .setAudioAttributes(getAudioAttributes(overrideAttributes)) + .setAudioFormat(new AudioFormat.Builder() + .setEncoding(AudioFormat.ENCODING_PCM_16BIT) + .setSampleRate(sampleRateInHz) + .setChannelMask(channelConfig) + .build()) + .setBufferSizeInBytes(bufferSizeInBytes) + .setPerformanceMode(AudioTrack.PERFORMANCE_MODE_LOW_LATENCY) + .setTransferMode(AudioTrack.MODE_STREAM) + .setSessionId(AudioManager.AUDIO_SESSION_ID_GENERATE) + .build(); } @TargetApi(Build.VERSION_CODES.Q) private static AudioAttributes.Builder applyAttributesOnQOrHigher( - AudioAttributes.Builder builder, AudioAttributes overrideAttributes) { + AudioAttributes.Builder builder, AudioAttributes overrideAttributes) { return builder.setAllowedCapturePolicy(overrideAttributes.getAllowedCapturePolicy()); } private void logBufferSizeInFrames() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { Logging.d(TAG, - "AudioTrack: " - // The effective size of the AudioTrack buffer that the app writes to. - + "buffer size in frames: " + audioTrack.getBufferSizeInFrames()); + "AudioTrack: " + // The effective size of the AudioTrack buffer that the app writes to. + + "buffer size in frames: " + audioTrack.getBufferSizeInFrames()); } } @@ -491,9 +491,9 @@ private int getInitialBufferSizeInFrames() { private void logBufferCapacityInFrames() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { Logging.d(TAG, - "AudioTrack: " - // Maximum size of the AudioTrack buffer in frames. - + "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames()); + "AudioTrack: " + // Maximum size of the AudioTrack buffer in frames. + + "buffer capacity in frames: " + audioTrack.getBufferCapacityInFrames()); } } @@ -526,7 +526,7 @@ private int channelCountToConfiguration(int channels) { } private static native void nativeCacheDirectBufferAddress( - long nativeAudioTrackJni, ByteBuffer byteBuffer); + long nativeAudioTrackJni, ByteBuffer byteBuffer); private static native void nativeGetPlayoutData(long nativeAudioTrackJni, int bytes); // Sets all samples to be played out to zero if `mute` is true, i.e., @@ -554,7 +554,7 @@ private void reportWebRtcAudioTrackInitError(String errorMessage) { } private void reportWebRtcAudioTrackStartError( - AudioTrackStartErrorCode errorCode, String errorMessage) { + AudioTrackStartErrorCode errorCode, String errorMessage) { Logging.e(TAG, "Start playout error: " + errorCode + ". " + errorMessage); WebRtcAudioUtils.logAudioState(TAG, context, audioManager); if (errorCallback != null) { @@ -582,4 +582,4 @@ private void doAudioTrackStateCallback(int audioState) { } } } -} +} \ No newline at end of file