diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 0000000..ad7bdbd --- /dev/null +++ b/AUTHORS @@ -0,0 +1,133 @@ +# Names should be added to this file like so: +# Name or Organization + +Adam Fedor +Akshay Shah +Alex Henrie +Alexander Brauckmann +Alexandre Gouaillard +Andrew MacDonald +Andrey Efremov +Anil Kumar +Ben Strong +Bob Withers +Bridger Maxwell +Chris Tserng +Christophe Dumez +Cody Barnes +Colin Plumb +Cyril Lashkevich +David Porter +Dax Booysen +Danail Kirov +Dharmesh Chauhan +Dirk-Jan C. Binnema +Dmitry Lizin +Eric Rescorla, RTFM Inc. +Frederik Riedel, Frogg GmbH +Giji Gangadharan +Graham Yoakum +Gustavo Garcia +Hugues Ekra +Jake Hilton +James H. Brown +Jan Kalab +Jens Nielsen +Jiawei Ou +Jie Mao +Jose Antonio Olivera Ortega +Kiran Thind +Luke Weber +Maksim Khobat +Mallikarjuna Rao V +Manish Jethani +Martin Storsjo +Matthias Liebig +Maxim Potapov +Michael Iedema +Mike Gilbert +Mo Zanaty +Pali Rohar +Paul Kapustin +Philipp Hancke +Peng Yu +Rafael Lopez Diez +Ralph Giles +Riku Voipio +Robert Bares +Robert Nagy +Ryan Yoakum +Satender Saroha +Sarah Thompson +Saul Kravitz +Silviu Caragea +Stefan Gula +Steve Reid +Tarun Chawla +Trevor Hayes +Uladzislau Susha +Vladimir Beloborodov +Vicken Simonian +Victor Costan +Xiaohong Xu +Xiaolei Yu +Yura Yaroshevich +Yuriy Pavlyshak +Hans Knoechel +Korniltsev Anatoly +Todd Wong +Sergio Garcia Murillo +Maxim Pavlov +Yusuke Suzuki +Piasy Xu +Tomas Popela +Jan Grulich +Jiwon Kim +Eike Rathke +Michel Promonet +Min Wang +Ramprakash Jelari +CZ Theng +Miguel Paris +Raman Budny +Stephan Hartmann +Lennart Grahl + +&yet LLC <*@andyet.com> +8x8 Inc. <*@sip-communicator.org> +8x8 Inc. <*@8x8.com> +Agora IO <*@agora.io> +ARM Holdings <*@arm.com> +BroadSoft Inc. <*@broadsoft.com> +Facebook Inc. <*@fb.com> +Google Inc. <*@google.com> +HyperConnect Inc. <*@hpcnt.com> +Life On Air Inc. <*@lifeonair.com> +Intel Corporation <*@intel.com> +Microsoft Corporation <*@microsoft.com> +MIPS Technologies <*@mips.com> +Mozilla Foundation <*@mozilla.com> +NVIDIA Corporation <*@nvidia.com> +Opera Software ASA <*@opera.com> +Optical Tone Ltd <*@opticaltone.com> +Pengutronix e.K. <*@pengutronix.de> +RingCentral, Inc. <*@ringcentral.com> +Signal Messenger, LLC <*@signal.org> +Sinch AB <*@sinch.com> +struktur AG <*@struktur.de> +Telenor Digital AS <*@telenor.com> +Temasys Communications <*@temasys.io> +The Chromium Authors <*@chromium.org> +The WebRTC Authors <*@webrtc.org> +Twilio, Inc. <*@twilio.com> +Videxio AS <*@videxio.com> +Vidyo, Inc. <*@vidyo.com> +Vonage Holdings Corp. <*@vonage.com> +Wire Swiss GmbH <*@wire.com> +Vewd Software AS <*@vewd.com> +Highfive, Inc. <*@highfive.com> +CoSMo Software Consulting, Pte Ltd <*@cosmosoftware.io> +Tuple, LLC <*@tuple.app> +Videona Socialmedia <*@videona.com> +Threema GmbH <*@threema.ch> +Dennis Angelo diff --git a/BUILD.gn b/BUILD.gn new file mode 100644 index 0000000..9d834e5 --- /dev/null +++ b/BUILD.gn @@ -0,0 +1,757 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +# This is the root build file for GN. GN will start processing by loading this +# file, and recursively load all dependencies until all dependencies are either +# resolved or known not to exist (which will cause the build to fail). So if +# you add a new build file, there must be some path of dependencies from this +# file to your new one or GN won't know about it. + +import("//build/config/linux/pkg_config.gni") +import("//build/config/sanitizers/sanitizers.gni") +import("webrtc.gni") +if (rtc_enable_protobuf) { + import("//third_party/protobuf/proto_library.gni") +} +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +if (!build_with_chromium) { + # This target should (transitively) cause everything to be built; if you run + # 'ninja default' and then 'ninja all', the second build should do no work. + group("default") { + testonly = true + deps = [ ":webrtc" ] + if (rtc_build_examples) { + deps += [ "examples" ] + } + if (rtc_build_tools) { + deps += [ "rtc_tools" ] + } + if (rtc_include_tests) { + deps += [ + ":rtc_unittests", + ":slow_tests", + ":video_engine_tests", + ":voip_unittests", + ":webrtc_nonparallel_tests", + ":webrtc_perf_tests", + "common_audio:common_audio_unittests", + "common_video:common_video_unittests", + "examples:examples_unittests", + "media:rtc_media_unittests", + "modules:modules_tests", + "modules:modules_unittests", + "modules/audio_coding:audio_coding_tests", + "modules/audio_processing:audio_processing_tests", + "modules/remote_bitrate_estimator:rtp_to_text", + "modules/rtp_rtcp:test_packet_masks_metrics", + "modules/video_capture:video_capture_internal_impl", + "pc:peerconnection_unittests", + "pc:rtc_pc_unittests", + "rtc_tools:rtp_generator", + "rtc_tools:video_replay", + "stats:rtc_stats_unittests", + "system_wrappers:system_wrappers_unittests", + "test", + "video:screenshare_loopback", + "video:sv_loopback", + "video:video_loopback", + ] + if (!is_asan) { + # Do not build :webrtc_lib_link_test because lld complains on some OS + # (e.g. when target_os = "mac") when is_asan=true. For more details, + # see bugs.webrtc.org/11027#c5. + deps += [ ":webrtc_lib_link_test" ] + } + if (is_android) { + deps += [ + "examples:android_examples_junit_tests", + "sdk/android:android_instrumentation_test_apk", + "sdk/android:android_sdk_junit_tests", + ] + } else { + deps += [ "modules/video_capture:video_capture_tests" ] + } + if (rtc_enable_protobuf) { + deps += [ + "audio:low_bandwidth_audio_test", + "logging:rtc_event_log_rtp_dump", + "tools_webrtc/perf:webrtc_dashboard_upload", + ] + } + } + } +} + +# Abseil Flags by default doesn't register command line flags on mobile +# platforms, WebRTC tests requires them (e.g. on simualtors) so this +# config will be applied to testonly targets globally (see webrtc.gni). +config("absl_flags_configs") { + defines = [ "ABSL_FLAGS_STRIP_NAMES=0" ] +} + +config("library_impl_config") { + # Build targets that contain WebRTC implementation need this macro to + # be defined in order to correctly export symbols when is_component_build + # is true. + # For more info see: rtc_base/build/rtc_export.h. + defines = [ "WEBRTC_LIBRARY_IMPL" ] +} + +# Contains the defines and includes in common.gypi that are duplicated both as +# target_defaults and direct_dependent_settings. +config("common_inherited_config") { + defines = [] + cflags = [] + ldflags = [] + + if (rtc_enable_symbol_export || is_component_build) { + defines = [ "WEBRTC_ENABLE_SYMBOL_EXPORT" ] + } + + if (build_with_mozilla) { + defines += [ "WEBRTC_MOZILLA_BUILD" ] + } + + if (!rtc_builtin_ssl_root_certificates) { + defines += [ "WEBRTC_EXCLUDE_BUILT_IN_SSL_ROOT_CERTS" ] + } + + if (rtc_disable_check_msg) { + defines += [ "RTC_DISABLE_CHECK_MSG" ] + } + + # Some tests need to declare their own trace event handlers. If this define is + # not set, the first time TRACE_EVENT_* is called it will store the return + # value for the current handler in an static variable, so that subsequent + # changes to the handler for that TRACE_EVENT_* will be ignored. + # So when tests are included, we set this define, making it possible to use + # different event handlers in different tests. + if (rtc_include_tests) { + defines += [ "WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=1" ] + } else { + defines += [ "WEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=0" ] + } + if (build_with_chromium) { + defines += [ "WEBRTC_CHROMIUM_BUILD" ] + include_dirs = [ + # The overrides must be included first as that is the mechanism for + # selecting the override headers in Chromium. + "../webrtc_overrides", + + # Allow includes to be prefixed with webrtc/ in case it is not an + # immediate subdirectory of the top-level. + ".", + + # Just like the root WebRTC directory is added to include path, the + # corresponding directory tree with generated files needs to be added too. + # Note: this path does not change depending on the current target, e.g. + # it is always "//gen/third_party/webrtc" when building with Chromium. + # See also: http://cs.chromium.org/?q=%5C"default_include_dirs + # https://gn.googlesource.com/gn/+/master/docs/reference.md#target_gen_dir + target_gen_dir, + ] + } + if (build_with_owt) { + include_dirs = [ + # The overrides must be included first as that is the mechanism for + # selecting the override headers in Chromium. + #"../webrtc_overrides", + + # Allow includes to be prefixed with webrtc/ in case it is not an + # immediate subdirectory of the top-level. + ".", + + # Just like the root WebRTC directory is added to include path, the + # corresponding directory tree with generated files needs to be added too. + # Note: this path does not change depending on the current target, e.g. + # it is always "//gen/third_party/webrtc" when building with Chromium. + # See also: http://cs.chromium.org/?q=%5C"default_include_dirs + # https://gn.googlesource.com/gn/+/master/docs/reference.md#target_gen_dir + target_gen_dir, + ] + } + if (is_posix || is_fuchsia) { + defines += [ "WEBRTC_POSIX" ] + } + if (is_ios) { + defines += [ + "WEBRTC_MAC", + "WEBRTC_IOS", + ] + } + if (is_linux) { + defines += [ "WEBRTC_LINUX" ] + } + if (is_mac) { + defines += [ "WEBRTC_MAC" ] + } + if (is_fuchsia) { + defines += [ "WEBRTC_FUCHSIA" ] + } + if (is_win) { + defines += [ "WEBRTC_WIN" ] + } + if (is_android) { + defines += [ + "WEBRTC_LINUX", + "WEBRTC_ANDROID", + ] + + if (build_with_mozilla) { + defines += [ "WEBRTC_ANDROID_OPENSLES" ] + } + } + if (is_chromeos) { + defines += [ "CHROMEOS" ] + } + + if (rtc_sanitize_coverage != "") { + assert(is_clang, "sanitizer coverage requires clang") + cflags += [ "-fsanitize-coverage=${rtc_sanitize_coverage}" ] + ldflags += [ "-fsanitize-coverage=${rtc_sanitize_coverage}" ] + } + + if (is_ubsan) { + cflags += [ "-fsanitize=float-cast-overflow" ] + } + + if (!rtc_use_h265) { + defines += [ "DISABLE_H265" ] + } +} + +# TODO(bugs.webrtc.org/9693): Remove the possibility to suppress this warning +# as soon as WebRTC compiles without it. +config("no_exit_time_destructors") { + if (is_clang) { + cflags = [ "-Wno-exit-time-destructors" ] + } +} + +# TODO(bugs.webrtc.org/9693): Remove the possibility to suppress this warning +# as soon as WebRTC compiles without it. +config("no_global_constructors") { + if (is_clang) { + cflags = [ "-Wno-global-constructors" ] + } +} + +config("rtc_prod_config") { + # Ideally, WebRTC production code (but not test code) should have these flags. + if (is_clang) { + cflags = [ + "-Wexit-time-destructors", + "-Wglobal-constructors", + ] + } +} + +config("common_config") { + cflags = [] + cflags_c = [] + cflags_cc = [] + cflags_objc = [] + defines = [] + + if (rtc_enable_protobuf) { + defines += [ "WEBRTC_ENABLE_PROTOBUF=1" ] + } else { + defines += [ "WEBRTC_ENABLE_PROTOBUF=0" ] + } + + if (rtc_include_internal_audio_device) { + defines += [ "WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE" ] + } + + if (rtc_libvpx_build_vp9) { + defines += [ "RTC_ENABLE_VP9" ] + } + + if (rtc_enable_sctp) { + defines += [ "HAVE_SCTP" ] + } + + if (rtc_enable_external_auth) { + defines += [ "ENABLE_EXTERNAL_AUTH" ] + } + + if (rtc_use_h264) { + defines += [ "WEBRTC_USE_H264" ] + } + + if (rtc_use_absl_mutex) { + defines += [ "WEBRTC_ABSL_MUTEX" ] + } + + if (rtc_disable_logging) { + defines += [ "RTC_DISABLE_LOGGING" ] + } + + if (rtc_disable_trace_events) { + defines += [ "RTC_DISABLE_TRACE_EVENTS" ] + } + + if (rtc_disable_metrics) { + defines += [ "RTC_DISABLE_METRICS" ] + } + + if (rtc_exclude_transient_suppressor) { + defines += [ "WEBRTC_EXCLUDE_TRANSIENT_SUPPRESSOR" ] + } + + if (rtc_exclude_audio_processing_module) { + defines += [ "WEBRTC_EXCLUDE_AUDIO_PROCESSING_MODULE" ] + } + + cflags = [] + + if (build_with_chromium || build_with_owt) { + defines += [ + # NOTICE: Since common_inherited_config is used in public_configs for our + # targets, there's no point including the defines in that config here. + # TODO(kjellander): Cleanup unused ones and move defines closer to the + # source when webrtc:4256 is completed. + "HAVE_WEBRTC_VIDEO", + "LOGGING_INSIDE_WEBRTC", + ] + } else { + if (is_posix || is_fuchsia) { + cflags_c += [ + # TODO(bugs.webrtc.org/9029): enable commented compiler flags. + # Some of these flags should also be added to cflags_objc. + + # "-Wextra", (used when building C++ but not when building C) + # "-Wmissing-prototypes", (C/Obj-C only) + # "-Wmissing-declarations", (ensure this is always used C/C++, etc..) + "-Wstrict-prototypes", + + # "-Wpointer-arith", (ensure this is always used C/C++, etc..) + # "-Wbad-function-cast", (C/Obj-C only) + # "-Wnested-externs", (C/Obj-C only) + ] + cflags_objc += [ "-Wstrict-prototypes" ] + cflags_cc = [ + "-Wnon-virtual-dtor", + + # This is enabled for clang; enable for gcc as well. + "-Woverloaded-virtual", + ] + } + + if (is_clang) { + cflags += [ + "-Wc++11-narrowing", + "-Wimplicit-fallthrough", + "-Wthread-safety", + "-Winconsistent-missing-override", + "-Wundef", + ] + + # use_xcode_clang only refers to the iOS toolchain, host binaries use + # chromium's clang always. + if (!is_nacl && + (!use_xcode_clang || current_toolchain == host_toolchain)) { + # Flags NaCl (Clang 3.7) and Xcode 7.3 (Clang clang-703.0.31) do not + # recognize. + cflags += [ "-Wunused-lambda-capture" ] + } + } + + if (is_win && !is_clang) { + # MSVC warning suppressions (needed to use Abseil). + # TODO(bugs.webrtc.org/9274): Remove these warnings as soon as MSVC allows + # external headers warning suppression (or fix them upstream). + cflags += [ "/wd4702" ] # unreachable code + + # MSVC 2019 warning suppressions for C++17 compiling + cflags += + [ "/wd5041" ] # out-of-line definition for constexpr static data + # member is not needed and is deprecated in C++17 + } + } + + if (current_cpu == "arm64") { + defines += [ "WEBRTC_ARCH_ARM64" ] + defines += [ "WEBRTC_HAS_NEON" ] + } + + if (current_cpu == "arm") { + defines += [ "WEBRTC_ARCH_ARM" ] + if (arm_version >= 7) { + defines += [ "WEBRTC_ARCH_ARM_V7" ] + if (arm_use_neon) { + defines += [ "WEBRTC_HAS_NEON" ] + } + } + } + + if (current_cpu == "mipsel") { + defines += [ "MIPS32_LE" ] + if (mips_float_abi == "hard") { + defines += [ "MIPS_FPU_LE" ] + } + if (mips_arch_variant == "r2") { + defines += [ "MIPS32_R2_LE" ] + } + if (mips_dsp_rev == 1) { + defines += [ "MIPS_DSP_R1_LE" ] + } else if (mips_dsp_rev == 2) { + defines += [ + "MIPS_DSP_R1_LE", + "MIPS_DSP_R2_LE", + ] + } + } + + if (is_android && !is_clang) { + # The Android NDK doesn"t provide optimized versions of these + # functions. Ensure they are disabled for all compilers. + cflags += [ + "-fno-builtin-cos", + "-fno-builtin-sin", + "-fno-builtin-cosf", + "-fno-builtin-sinf", + ] + } + + if (use_fuzzing_engine && optimize_for_fuzzing) { + # Used in Chromium's overrides to disable logging + defines += [ "WEBRTC_UNSAFE_FUZZER_MODE" ] + } + + if (!build_with_chromium && rtc_win_undef_unicode) { + cflags += [ + "/UUNICODE", + "/U_UNICODE", + ] + } +} + +config("common_objc") { + frameworks = [ "Foundation.framework" ] + + if (rtc_use_metal_rendering) { + defines = [ "RTC_SUPPORTS_METAL" ] + } +} + +if (!build_with_chromium) { + # Target to build all the WebRTC production code. + rtc_static_library("webrtc") { + # Only the root target and the test should depend on this. + visibility = [ + ".:default", + ":webrtc_lib_link_test", + ] + + if (build_with_owt) { + visibility += [ "//talk/owt" ] + } + sources = [] + complete_static_lib = true + suppressed_configs += [ "//build/config/compiler:thin_archive" ] + defines = [] + + deps = [ + ":webrtc_common", + "api:create_peerconnection_factory", + "api:libjingle_peerconnection_api", + "api:rtc_error", + "api:transport_api", + "api/crypto", + "api/rtc_event_log:rtc_event_log_factory", + "api/task_queue", + "api/task_queue:default_task_queue_factory", + "audio", + "call", + "common_audio", + "common_video", + "logging:rtc_event_log_api", + "media", + "modules", + "modules/video_capture:video_capture_internal_impl", + "p2p:rtc_p2p", + "pc:libjingle_peerconnection", + "pc:peerconnection", + "pc:rtc_pc", + "pc:rtc_pc_base", + "rtc_base", + "sdk", + "video", + ] + + if (rtc_include_builtin_audio_codecs) { + deps += [ + "api/audio_codecs:builtin_audio_decoder_factory", + "api/audio_codecs:builtin_audio_encoder_factory", + ] + } + + if (rtc_include_builtin_video_codecs) { + deps += [ + "api/video_codecs:builtin_video_decoder_factory", + "api/video_codecs:builtin_video_encoder_factory", + ] + } + + if (build_with_mozilla) { + deps += [ + "api/video:video_frame", + "api/video:video_rtp_headers", + ] + } else { + deps += [ + "api", + "logging", + "p2p", + "pc", + "stats", + ] + } + + if (rtc_enable_protobuf) { + deps += [ "logging:rtc_event_log_proto" ] + } + } + + if (rtc_include_tests && !is_asan) { + rtc_executable("webrtc_lib_link_test") { + testonly = true + + sources = [ "webrtc_lib_link_test.cc" ] + deps = [ + # NOTE: Don't add deps here. If this test fails to link, it means you + # need to add stuff to the webrtc static lib target above. + ":webrtc", + ] + } + } +} + +rtc_source_set("webrtc_common") { + # Client code SHOULD NOT USE THIS TARGET, but for now it needs to be public + # because there exists client code that uses it. + # TODO(bugs.webrtc.org/9808): Move to private visibility as soon as that + # client code gets updated. + visibility = [ "*" ] + sources = [ "common_types.h" ] +} + +if (use_libfuzzer || use_afl) { + # This target is only here for gn to discover fuzzer build targets under + # webrtc/test/fuzzers/. + group("webrtc_fuzzers_dummy") { + testonly = true + deps = [ "test/fuzzers:webrtc_fuzzer_main" ] + } +} + +if (rtc_include_tests) { + rtc_test("rtc_unittests") { + testonly = true + + deps = [ + ":webrtc_common", + "api:compile_all_headers", + "api:rtc_api_unittests", + "api/audio/test:audio_api_unittests", + "api/audio_codecs/test:audio_codecs_api_unittests", + "api/transport:stun_unittest", + "api/video/test:rtc_api_video_unittests", + "api/video_codecs/test:video_codecs_api_unittests", + "call:fake_network_pipe_unittests", + "p2p:libstunprober_unittests", + "p2p:rtc_p2p_unittests", + "rtc_base:rtc_base_approved_unittests", + "rtc_base:rtc_base_unittests", + "rtc_base:rtc_json_unittests", + "rtc_base:rtc_numerics_unittests", + "rtc_base:rtc_operations_chain_unittests", + "rtc_base:rtc_task_queue_unittests", + "rtc_base:sigslot_unittest", + "rtc_base:weak_ptr_unittests", + "rtc_base/experiments:experiments_unittests", + "rtc_base/synchronization:sequence_checker_unittests", + "rtc_base/task_utils:pending_task_safety_flag_unittests", + "rtc_base/task_utils:to_queued_task_unittests", + "sdk:sdk_tests", + "test:rtp_test_utils", + "test:test_main", + "test/network:network_emulation_unittests", + ] + + if (rtc_enable_protobuf) { + deps += [ "logging:rtc_event_log_tests" ] + } + + if (is_android) { + # Do not use Chromium's launcher. native_unittests defines its own JNI_OnLoad. + use_default_launcher = false + + deps += [ + "sdk/android:native_unittests", + "sdk/android:native_unittests_java", + "//testing/android/native_test:native_test_support", + ] + shard_timeout = 900 + } + + if (is_ios || is_mac) { + deps += [ "sdk:rtc_unittests_objc" ] + } + } + + rtc_test("benchmarks") { + testonly = true + deps = [ + "rtc_base/synchronization:mutex_benchmark", + "test:benchmark_main", + ] + } + + # This runs tests that must run in real time and therefore can take some + # time to execute. They are in a separate executable to avoid making the + # regular unittest suite too slow to run frequently. + rtc_test("slow_tests") { + testonly = true + deps = [ + "rtc_base/task_utils:repeating_task_unittests", + "test:test_main", + ] + } + + # TODO(pbos): Rename test suite, this is no longer "just" for video targets. + video_engine_tests_resources = [ + "resources/foreman_cif_short.yuv", + "resources/voice_engine/audio_long16.pcm", + ] + + if (is_ios) { + bundle_data("video_engine_tests_bundle_data") { + testonly = true + sources = video_engine_tests_resources + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } + } + + rtc_test("video_engine_tests") { + testonly = true + deps = [ + "audio:audio_tests", + + # TODO(eladalon): call_tests aren't actually video-specific, so we + # should move them to a more appropriate test suite. + "call:call_tests", + "call/adaptation:resource_adaptation_tests", + "test:test_common", + "test:test_main", + "test:video_test_common", + "video:video_tests", + "video/adaptation:video_adaptation_tests", + ] + data = video_engine_tests_resources + if (is_android) { + deps += [ "//testing/android/native_test:native_test_native_code" ] + shard_timeout = 900 + } + if (is_ios) { + deps += [ ":video_engine_tests_bundle_data" ] + } + } + + webrtc_perf_tests_resources = [ + "resources/ConferenceMotion_1280_720_50.yuv", + "resources/audio_coding/speech_mono_16kHz.pcm", + "resources/audio_coding/speech_mono_32_48kHz.pcm", + "resources/audio_coding/testfile32kHz.pcm", + "resources/difficult_photo_1850_1110.yuv", + "resources/foreman_cif.yuv", + "resources/paris_qcif.yuv", + "resources/photo_1850_1110.yuv", + "resources/presentation_1850_1110.yuv", + "resources/voice_engine/audio_long16.pcm", + "resources/web_screenshot_1850_1110.yuv", + ] + + if (is_ios) { + bundle_data("webrtc_perf_tests_bundle_data") { + testonly = true + sources = webrtc_perf_tests_resources + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } + } + + rtc_test("webrtc_perf_tests") { + testonly = true + deps = [ + "audio:audio_perf_tests", + "call:call_perf_tests", + "modules/audio_coding:audio_coding_perf_tests", + "modules/audio_processing:audio_processing_perf_tests", + "pc:peerconnection_perf_tests", + "test:test_main", + "video:video_full_stack_tests", + "video:video_pc_full_stack_tests", + ] + + data = webrtc_perf_tests_resources + if (is_android) { + deps += [ "//testing/android/native_test:native_test_native_code" ] + shard_timeout = 4500 + } + if (is_ios) { + deps += [ ":webrtc_perf_tests_bundle_data" ] + } + } + + rtc_test("webrtc_nonparallel_tests") { + testonly = true + deps = [ "rtc_base:rtc_base_nonparallel_tests" ] + if (is_android) { + deps += [ "//testing/android/native_test:native_test_support" ] + shard_timeout = 900 + } + } + + rtc_test("voip_unittests") { + testonly = true + deps = [ + "api/voip:voip_engine_factory_unittests", + "audio/voip/test:audio_channel_unittests", + "audio/voip/test:audio_egress_unittests", + "audio/voip/test:audio_ingress_unittests", + "audio/voip/test:voip_core_unittests", + "test:test_main", + ] + } +} + +# ---- Poisons ---- +# +# Here is one empty dummy target for each poison type (needed because +# "being poisonous with poison type foo" is implemented as "depends on +# //:poison_foo"). +# +# The set of poison_* targets needs to be kept in sync with the +# `all_poison_types` list in webrtc.gni. +# +group("poison_audio_codecs") { +} + +group("poison_default_task_queue") { +} + +group("poison_rtc_json") { +} + +group("poison_software_video_codecs") { +} diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..7b48e69 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,68 @@ +# Contributors Code of Conduct + +Google and the WebRTC team are committed to preserving and fostering a diverse, welcoming and open +community. The WebRTC project is open to contributors from all walks of life and should be a +harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender +identity and expression, level of experience, nationality, personal appearance, race, religion, or +sexual identity and orientation. + +## Scope +This Code of Conduct applies to our repos and organizations, mailing lists, blog content, and any +other WebRTC-supported communication group, as well as any private communication initiated in the +context of these spaces. + +## Standards +Examples of behavior that contributes to creating a positive environment include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic address, without explicit +permission +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Responsibilities + +You are empowered to politely engage when you feel that you or others are disrespected. The person +making you feel uncomfortable may not be aware of what they are doing - politely bringing their +behavior to their attention is encouraged. + +If you are uncomfortable speaking up, or feel that your concerns are not being duly considered, you +can email community@webrtc.org to request involvement from a community manager. All concerns shared +with community managers will be kept confidential. While all reports will be taken seriously, the +WebRTC community managers may not act on complaints that they feel are not violations of this code +of conduct. + +## Enforcement + +Consequences for failing to comply with this policy may include, at the sole discretion of the +WebRTC community managers: + +* a request for an apology; +* a private or public warning or reprimand; +* a temporary ban from the mailing list, blog, WebRTC repository or organization, or other +WebRTC-supported communication group, including loss of committer status; +* a permanent ban from any of the above, or from all current and future WebRTC-supported or +Google-supported communities, including loss of committer status. + +Participants warned to stop any harassing behavior are expected to comply immediately; failure to do +so will result in an escalation of consequences. + +The decisions of the WebRTC community managers may be appealed via community-appeals@webrtc.org. + +## Acknowledgements + +This Code of Conduct is based on Contributor Covenant, version 1.4, +available [here](http://contributor-covenant.org/version/1/4) and [Chromium](https://chromium.googlesource.com/chromium/src/+/master/CODE_OF_CONDUCT.md) + +## License + +This Code of Conduct is available for reuse under the Creative Commons Zero (CC0) license. \ No newline at end of file diff --git a/DEPS b/DEPS new file mode 100644 index 0000000..1ca0b29 --- /dev/null +++ b/DEPS @@ -0,0 +1,3283 @@ +# This file contains dependencies for WebRTC. + +gclient_gn_args_file = 'src/build/config/gclient_args.gni' +gclient_gn_args = [ + 'mac_xcode_version', +] + +vars = { + # By default, we should check out everything needed to run on the main + # chromium waterfalls. More info at: crbug.com/570091. + 'checkout_configuration': 'default', + 'checkout_instrumented_libraries': 'checkout_linux and checkout_configuration == "default"', + 'chromium_revision': '54b8c1d09983552ff9d18cdcad8e47344b4fcff7', + + # This can be overridden, e.g. with custom_vars, to download a nonstandard + # Xcode version in build/mac_toolchain.py + # instead of downloading the prebuilt pinned revision. + 'mac_xcode_version': 'default', +} + +deps = { + # TODO(kjellander): Move this to be Android-only once the libevent dependency + # in base/third_party/libevent is solved. + 'src/base': + 'https://chromium.googlesource.com/chromium/src/base@d08d95a8bd36fa1e1ec0801a8c6d066dc6eca271', + 'src/build': + 'https://chromium.googlesource.com/chromium/src/build@960c227c19b6bf6f15aa85969729425e39a7091f', + 'src/buildtools': + 'https://chromium.googlesource.com/chromium/src/buildtools@b00ad0af636401e5eb4b5d0ab01b65164dca1914', + # Gradle 4.3-rc4. Used for testing Android Studio project generation for WebRTC. + 'src/examples/androidtests/third_party/gradle': { + 'url': 'https://chromium.googlesource.com/external/github.com/gradle/gradle.git@89af43c4d0506f69980f00dde78c97b2f81437f8', + 'condition': 'checkout_android', + }, + 'src/ios': { + 'url': 'https://chromium.googlesource.com/chromium/src/ios@b0b7693530f7e28d91eb1d1b2454c3400c6168b4', + 'condition': 'checkout_ios', + }, + 'src/testing': + 'https://chromium.googlesource.com/chromium/src/testing@e3ac1706db1bf70a8c2660551ebe3b481aaed683', + 'src/third_party': + 'https://chromium.googlesource.com/chromium/src/third_party@fb8c70e76ac12220febe4ef3cda42535a4354a22', + + 'src/buildtools/linux64': { + 'packages': [ + { + 'package': 'gn/gn/linux-amd64', + 'version': 'git_revision:e327ffdc503815916db2543ec000226a8df45163', + } + ], + 'dep_type': 'cipd', + 'condition': 'checkout_linux', + }, + 'src/buildtools/mac': { + 'packages': [ + { + 'package': 'gn/gn/mac-amd64', + 'version': 'git_revision:e327ffdc503815916db2543ec000226a8df45163', + } + ], + 'dep_type': 'cipd', + 'condition': 'checkout_mac', + }, + 'src/buildtools/win': { + 'packages': [ + { + 'package': 'gn/gn/windows-amd64', + 'version': 'git_revision:e327ffdc503815916db2543ec000226a8df45163', + } + ], + 'dep_type': 'cipd', + 'condition': 'checkout_win', + }, + + 'src/buildtools/clang_format/script': + 'https://chromium.googlesource.com/chromium/llvm-project/cfe/tools/clang-format.git@96636aa0e9f047f17447f2d45a094d0b59ed7917', + 'src/buildtools/third_party/libc++/trunk': + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxx.git@d9040c75cfea5928c804ab7c235fed06a63f743a', + 'src/buildtools/third_party/libc++abi/trunk': + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libcxxabi.git@196ba1aaa8ac285d94f4ea8d9836390a45360533', + 'src/buildtools/third_party/libunwind/trunk': + 'https://chromium.googlesource.com/external/github.com/llvm/llvm-project/libunwind.git@d999d54f4bca789543a2eb6c995af2d9b5a1f3ed', + + 'src/tools/clang/dsymutil': { + 'packages': [ + { + 'package': 'chromium/llvm-build-tools/dsymutil', + 'version': 'M56jPzDv1620Rnm__jTMYS62Zi8rxHVq7yw0qeBFEgkC', + } + ], + 'condition': 'checkout_mac', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_system_sdk': { + 'packages': [ + { + 'package': 'chromium/third_party/android_system_sdk', + 'version': 'no8ss5nRg6uYDM08HboypuIQuix7bS1kVqRGyWmwP-YC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_build_tools/aapt2': { + 'packages': [ + { + 'package': 'chromium/third_party/android_build_tools/aapt2', + 'version': 'R2k5wwOlIaS6sjv2TIyHotiPJod-6KqnZO8NH-KFK8sC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_build_tools/bundletool': { + 'packages': [ + { + 'package': 'chromium/third_party/android_tools_bundletool', + 'version': 'Yyhy8FicC3R0ATRzWqGNh4ffsCLz_0nu_BjmNPAdhvIC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/boringssl/src': + 'https://boringssl.googlesource.com/boringssl.git@74161f485b5d54fe963cbd3d081b718ec84d2e00', + 'src/third_party/breakpad/breakpad': + 'https://chromium.googlesource.com/breakpad/breakpad.git@e3a62dc5502dec6ab451061769d7efaf5b7ffba8', + 'src/third_party/catapult': + 'https://chromium.googlesource.com/catapult.git@d56f379356d9fdbafd64e4f8545ac89e37485fab', + 'src/third_party/ced/src': { + 'url': 'https://chromium.googlesource.com/external/github.com/google/compact_enc_det.git@ba412eaaacd3186085babcd901679a48863c7dd5', + }, + 'src/third_party/colorama/src': + 'https://chromium.googlesource.com/external/colorama.git@799604a1041e9b3bc5d2789ecbd7e8db2e18e6b8', + 'src/third_party/depot_tools': + 'https://chromium.googlesource.com/chromium/tools/depot_tools.git@65eb98d6ed3a80086d31a54f3c0165c7b466a967', + 'src/third_party/ffmpeg': + 'https://chromium.googlesource.com/chromium/third_party/ffmpeg.git@d2dd36c03501e995e8ce2d792d834392b2e62bfe', + 'src/third_party/findbugs': { + 'url': 'https://chromium.googlesource.com/chromium/deps/findbugs.git@4275d9ac8610db6b1bc9a5e887f97e41b33fac67', + 'condition': 'checkout_android', + }, + # Used for embedded builds. CrOS & Linux use the system version. + 'src/third_party/fontconfig/src': { + 'url': 'https://chromium.googlesource.com/external/fontconfig.git@452be8125f0e2a18a7dfef469e05d19374d36307', + 'condition': 'checkout_linux', + }, + 'src/third_party/freetype/src': + 'https://chromium.googlesource.com/chromium/src/third_party/freetype2.git@f9f6adb625c48ef15b5d61a3ac1709a068ea95a3', + 'src/third_party/harfbuzz-ng/src': + 'https://chromium.googlesource.com/external/github.com/harfbuzz/harfbuzz.git@d03eecb4d63e1cdac77a08d081179c28440b2d18', + 'src/third_party/google_benchmark/src': { + 'url': 'https://chromium.googlesource.com/external/github.com/google/benchmark.git@367119482ff4abc3d73e4a109b410090fc281337', + }, + # WebRTC-only dependency (not present in Chromium). + 'src/third_party/gtest-parallel': + 'https://chromium.googlesource.com/external/github.com/google/gtest-parallel@aabba21acd68a8814c70a6c2937f1625de715411', + 'src/third_party/google-truth': { + 'packages': [ + { + 'package': 'chromium/third_party/google-truth', + 'version': 'u8oovXxp24lStqX4d54htRovta-75Sy2w7ijg1TL07gC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/googletest/src': + 'https://chromium.googlesource.com/external/github.com/google/googletest.git@4fe018038f87675c083d0cfb6a6b57c274fb1753', + 'src/third_party/icu': { + 'url': 'https://chromium.googlesource.com/chromium/deps/icu.git@79326efe26e5440f530963704c3c0ff965b3a4ac', + }, + 'src/third_party/jdk': { + 'packages': [ + { + 'package': 'chromium/third_party/jdk', + 'version': 'PfRSnxe8Od6WU4zBXomq-zsgcJgWmm3z4gMQNB-r2QcC', + }, + { + 'package': 'chromium/third_party/jdk/extras', + 'version': 'fkhuOQ3r-zKtWEdKplpo6k0vKkjl-LY_rJTmtzFCQN4C', + }, + ], + 'condition': 'host_os == "linux" and checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/jsoncpp/source': + 'https://chromium.googlesource.com/external/github.com/open-source-parsers/jsoncpp.git@645250b6690785be60ab6780ce4b58698d884d11', # from svn 248 + 'src/third_party/junit/src': { + 'url': 'https://chromium.googlesource.com/external/junit.git@64155f8a9babcfcf4263cf4d08253a1556e75481', + 'condition': 'checkout_android', + }, + # Used for building libFuzzers (only supports Linux). + 'src/third_party/libFuzzer/src': + 'https://chromium.googlesource.com/chromium/llvm-project/compiler-rt/lib/fuzzer.git@debe7d2d1982e540fbd6bd78604bf001753f9e74', + 'src/third_party/libjpeg_turbo': + 'https://chromium.googlesource.com/chromium/deps/libjpeg_turbo.git@9d4f8005bc6c888e66b00fd00188531ee9bd3344', + 'src/third_party/libsrtp': + 'https://chromium.googlesource.com/chromium/deps/libsrtp.git@650611720ecc23e0e6b32b0e3100f8b4df91696c', + 'src/third_party/libaom/source/libaom': + 'https://aomedia.googlesource.com/aom.git@4dcbd921c3a686185c705eeccb154da52858d23b', + 'src/third_party/libunwindstack': { + 'url': 'https://chromium.googlesource.com/chromium/src/third_party/libunwindstack.git@11659d420a71e7323b379ea8781f07c6f384bc7e', + 'condition': 'checkout_android', + }, + 'src/third_party/perfetto': + 'https://android.googlesource.com/platform/external/perfetto.git@c4451adcce85638c2c52dbcc8606c04144500aa8', + 'src/third_party/libvpx/source/libvpx': + 'https://chromium.googlesource.com/webm/libvpx.git@a1cee8dc919df1980d802e1a9bce1259ec34cba8', + 'src/third_party/libyuv': + 'https://chromium.googlesource.com/libyuv/libyuv.git@6afd9becdf58822b1da6770598d8597c583ccfad', + 'src/third_party/lss': { + 'url': 'https://chromium.googlesource.com/linux-syscall-support.git@f70e2f1641e280e777edfdad7f73a2cfa38139c7', + 'condition': 'checkout_android or checkout_linux', + }, + 'src/third_party/mockito/src': { + 'url': 'https://chromium.googlesource.com/external/mockito/mockito.git@04a2a289a4222f80ad20717c25144981210d2eac', + 'condition': 'checkout_android', + }, + + # Used by boringssl. + 'src/third_party/nasm': { + 'url': 'https://chromium.googlesource.com/chromium/deps/nasm.git@19f3fad68da99277b2882939d3b2fa4c4b8d51d9' + }, + + 'src/third_party/openh264/src': + 'https://chromium.googlesource.com/external/github.com/cisco/openh264@3dd5b80bc4f172dd82925bb259cb7c82348409c5', + 'src/third_party/r8': { + 'packages': [ + { + 'package': 'chromium/third_party/r8', + 'version': 'vvymFSkKtWKWNmfz0PL_0H8MD8V40P--A9aUfxfpF6QC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/proguard': { + 'packages': [ + { + 'package': 'chromium/third_party/proguard', + 'version': 'Fd91BJFVlmiO6c46YMTsdy7n2f5Sk2hVVGlzPLvqZPsC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + 'src/third_party/requests/src': { + 'url': 'https://chromium.googlesource.com/external/github.com/kennethreitz/requests.git@refs/tags/v2.23.0', + 'condition': 'checkout_android', + }, + 'src/third_party/ub-uiautomator/lib': { + 'url': 'https://chromium.googlesource.com/chromium/third_party/ub-uiautomator.git@00270549ce3161ae72ceb24712618ea28b4f9434', + 'condition': 'checkout_android', + }, + 'src/third_party/usrsctp/usrsctplib': + 'https://chromium.googlesource.com/external/github.com/sctplab/usrsctp@a8c51df76caae94254b1e59999405f739467490e', + # Dependency used by libjpeg-turbo. + 'src/third_party/yasm/binaries': { + 'url': 'https://chromium.googlesource.com/chromium/deps/yasm/binaries.git@52f9b3f4b0aa06da24ef8b123058bb61ee468881', + 'condition': 'checkout_win', + }, + 'src/tools': + 'https://chromium.googlesource.com/chromium/src/tools@ae2d09af5d3b541f25b9bf47c454667a1f84d58b', + 'src/tools/swarming_client': + 'https://chromium.googlesource.com/infra/luci/client-py.git@4c095d04179dc725a300085ae21fe3b79900d072', + + 'src/third_party/accessibility_test_framework': { + 'packages': [ + { + 'package': 'chromium/third_party/accessibility-test-framework', + 'version': 'b5ec1e56e58e56bc1a0c77d43111c37f9b512c8a', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_support_test_runner': { + 'packages': [ + { + 'package': 'chromium/third_party/android_support_test_runner', + 'version': '96d4bf848cd210fdcbca6bcc8c1b4b39cbd93141', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/bazel': { + 'packages': [ + { + 'package': 'chromium/third_party/bazel', + 'version': 'VjMsf48QUWw8n7XtJP2AuSjIGmbQeYdWdwyxVvIRLmAC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/bouncycastle': { + 'packages': [ + { + 'package': 'chromium/third_party/bouncycastle', + 'version': 'c078e87552ba26e776566fdaf0f22cd8712743d0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/byte_buddy': { + 'packages': [ + { + 'package': 'chromium/third_party/byte_buddy', + 'version': 'c9b53316603fc2d997c899c7ca1707f809b918cd', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/byte_buddy/android_sdk_build_tools_25_0_2': { + 'packages': [ + { + 'package': 'chromium/third_party/android_sdk/public/build-tools', + 'version': 'kwIs2vdfTm93yEP8LG5aSnchN4BVEdVxbqQtF4XpPdkC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/espresso': { + 'packages': [ + { + 'package': 'chromium/third_party/espresso', + 'version': 'y8fIfH8Leo2cPm7iGCYnBxZpwOlgLv8rm2mlcmJlvGsC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/guava': { + 'packages': [ + { + 'package': 'chromium/third_party/guava', + 'version': 'y8Zx7cKTiOunLhOrfC4hOt5kDQrLJ_Rq7ISDmXkPdYsC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/hamcrest': { + 'packages': [ + { + 'package': 'chromium/third_party/hamcrest', + 'version': '37eccfc658fe79695d6abb6dd497463c4372032f', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_ndk': { + 'url': 'https://chromium.googlesource.com/android_ndk.git@27c0a8d090c666a50e40fceb4ee5b40b1a2d3f87', + 'condition': 'checkout_android', + }, + + 'src/third_party/android_sdk/public': { + 'packages': [ + { + 'package': 'chromium/third_party/android_sdk/public/build-tools/30.0.1', + 'version': '8LZujEmLjSh0g3JciDA3cslSptxKs9HOa_iUPXkOeYQC', + }, + { + 'package': 'chromium/third_party/android_sdk/public/emulator', + 'version': 'A4EvXZUIuQho0QRDJopMUpgyp6NA3aiDQjGKPUKbowMC', + }, + { + 'package': 'chromium/third_party/android_sdk/public/extras', + 'version': 'ppQ4TnqDvBHQ3lXx5KPq97egzF5X2FFyOrVHkGmiTMQC', + }, + { + 'package': 'chromium/third_party/android_sdk/public/patcher', + 'version': 'I6FNMhrXlpB-E1lOhMlvld7xt9lBVNOO83KIluXDyA0C', + }, + { + 'package': 'chromium/third_party/android_sdk/public/platform-tools', + 'version': '8tF0AOj7Dwlv4j7_nfkhxWB0jzrvWWYjEIpirt8FIWYC', + }, + { + 'package': 'chromium/third_party/android_sdk/public/platforms/android-30', + 'version': 'YMUu9EHNZ__2Xcxl-KsaSf-dI5TMt_P62IseUVsxktMC', + }, + { + 'package': 'chromium/third_party/android_sdk/public/sources/android-29', + 'version': '4gxhM8E62bvZpQs7Q3d0DinQaW0RLCIefhXrQBFkNy8C', + }, + { + 'package': 'chromium/third_party/android_sdk/public/cmdline-tools', + 'version': 'ijpIFSitwBfaEdO9VXBGPqDHUVzPimXy_whw3aHTN9oC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/icu4j': { + 'packages': [ + { + 'package': 'chromium/third_party/icu4j', + 'version': 'e87e5bed2b4935913ee26a3ebd0b723ee2344354', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/objenesis': { + 'packages': [ + { + 'package': 'chromium/third_party/objenesis', + 'version': 'tknDblENYi8IaJYyD6tUahUyHYZlzJ_Y74_QZSz4DpIC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/robolectric': { + 'packages': [ + { + 'package': 'chromium/third_party/robolectric', + 'version': 'iC6RDM5EH3GEAzR-1shW_Mg0FeeNE5shq1okkFfuuNQC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/sqlite4java': { + 'packages': [ + { + 'package': 'chromium/third_party/sqlite4java', + 'version': 'LofjKH9dgXIAJhRYCPQlMFywSwxYimrfDeBmaHc-Z5EC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/turbine': { + 'packages': [ + { + 'package': 'chromium/third_party/turbine', + 'version': 'O_jNDJ4VdwYKBSDbd2BJ3mknaTFoVkvE7Po8XIiKy8sC', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/turbine/src': { + 'url': 'https://chromium.googlesource.com/external/github.com/google/turbine.git' + '@' + '0f2a5024fe4a9bb745bcd5ac7c655cebe11649bc', + 'condition': 'checkout_android', + }, + + 'src/third_party/xstream': { + 'packages': [ + { + 'package': 'chromium/third_party/xstream', + 'version': '4278b1b78b86ab7a1a29e64d5aec9a47a9aab0fe', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/tools/luci-go': { + 'packages': [ + { + 'package': 'infra/tools/luci/isolate/${{platform}}', + 'version': 'git_revision:b022173f8069cf8001d4cf2a87ce7c5f0eae220f', + }, + { + 'package': 'infra/tools/luci/isolated/${{platform}}', + 'version': 'git_revision:b022173f8069cf8001d4cf2a87ce7c5f0eae220f', + }, + { + 'package': 'infra/tools/luci/swarming/${{platform}}', + 'version': 'git_revision:b022173f8069cf8001d4cf2a87ce7c5f0eae220f', + }, + ], + 'dep_type': 'cipd', + }, + + # Everything coming after this is automatically updated by the auto-roller. + # === ANDROID_DEPS Generated Code Start === + # Generated by //third_party/android_deps/fetch_all.py + 'src/third_party/android_deps/libs/android_arch_core_common': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/android_arch_core_common', + 'version': 'version:1.1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/android_arch_core_runtime': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/android_arch_core_runtime', + 'version': 'version:1.1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/android_arch_lifecycle_common': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_common', + 'version': 'version:1.1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/android_arch_lifecycle_common_java8': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_common_java8', + 'version': 'version:1.1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/android_arch_lifecycle_livedata': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_livedata', + 'version': 'version:1.1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/android_arch_lifecycle_livedata_core': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_livedata_core', + 'version': 'version:1.1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/android_arch_lifecycle_runtime': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_runtime', + 'version': 'version:1.1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/android_arch_lifecycle_viewmodel': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/android_arch_lifecycle_viewmodel', + 'version': 'version:1.1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_activity_activity': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_activity_activity', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_annotation_annotation': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_annotation_annotation', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_annotation_annotation_experimental': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_annotation_annotation_experimental', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_appcompat_appcompat': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_appcompat_appcompat', + 'version': 'version:1.2.0-beta01-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_appcompat_appcompat_resources': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_appcompat_appcompat_resources', + 'version': 'version:1.2.0-beta01-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_arch_core_core_common': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_arch_core_core_common', + 'version': 'version:2.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_arch_core_core_runtime': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_arch_core_core_runtime', + 'version': 'version:2.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_asynclayoutinflater_asynclayoutinflater': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_asynclayoutinflater_asynclayoutinflater', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_cardview_cardview': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_cardview_cardview', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_collection_collection': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_collection_collection', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_concurrent_concurrent_futures': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_concurrent_concurrent_futures', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_coordinatorlayout_coordinatorlayout': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_coordinatorlayout_coordinatorlayout', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_core_core': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_core_core', + 'version': 'version:1.3.0-beta01-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_cursoradapter_cursoradapter': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_cursoradapter_cursoradapter', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_customview_customview': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_customview_customview', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_documentfile_documentfile': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_documentfile_documentfile', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_drawerlayout_drawerlayout': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_drawerlayout_drawerlayout', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_exifinterface_exifinterface': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_exifinterface_exifinterface', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_fragment_fragment': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_fragment_fragment', + 'version': 'version:1.2.5-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_gridlayout_gridlayout': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_gridlayout_gridlayout', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_interpolator_interpolator': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_interpolator_interpolator', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_leanback_leanback': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_leanback_leanback', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_leanback_leanback_preference': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_leanback_leanback_preference', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_legacy_legacy_preference_v14': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_legacy_legacy_preference_v14', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_legacy_legacy_support_core_ui': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_legacy_legacy_support_core_ui', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_legacy_legacy_support_core_utils': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_legacy_legacy_support_core_utils', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_legacy_legacy_support_v13': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_legacy_legacy_support_v13', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_legacy_legacy_support_v4': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_legacy_legacy_support_v4', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_lifecycle_lifecycle_common': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_common', + 'version': 'version:2.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_lifecycle_lifecycle_common_java8': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_common_java8', + 'version': 'version:2.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_lifecycle_lifecycle_livedata': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_livedata', + 'version': 'version:2.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_lifecycle_lifecycle_livedata_core': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_livedata_core', + 'version': 'version:2.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_lifecycle_lifecycle_runtime': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_runtime', + 'version': 'version:2.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel', + 'version': 'version:2.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel_savedstate': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_lifecycle_lifecycle_viewmodel_savedstate', + 'version': 'version:2.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_loader_loader': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_loader_loader', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_localbroadcastmanager_localbroadcastmanager': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_localbroadcastmanager_localbroadcastmanager', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_media_media': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_media_media', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_mediarouter_mediarouter': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_mediarouter_mediarouter', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_multidex_multidex': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_multidex_multidex', + 'version': 'version:2.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_palette_palette': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_palette_palette', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_preference_preference': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_preference_preference', + 'version': 'version:1.1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_print_print': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_print_print', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_recyclerview_recyclerview': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_recyclerview_recyclerview', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_savedstate_savedstate': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_savedstate_savedstate', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_slice_slice_builders': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_slice_slice_builders', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_slice_slice_core': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_slice_slice_core', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_slidingpanelayout_slidingpanelayout': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_slidingpanelayout_slidingpanelayout', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_swiperefreshlayout_swiperefreshlayout': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_swiperefreshlayout_swiperefreshlayout', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_core': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_core', + 'version': 'version:1.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_contrib', + 'version': 'version:3.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_core': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_core', + 'version': 'version:3.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_idling_resource': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_idling_resource', + 'version': 'version:3.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_intents': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_intents', + 'version': 'version:3.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_espresso_espresso_web': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_espresso_espresso_web', + 'version': 'version:3.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_ext_junit': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_ext_junit', + 'version': 'version:1.1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_monitor': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_monitor', + 'version': 'version:1.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_rules': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_rules', + 'version': 'version:1.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_runner': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_runner', + 'version': 'version:1.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_test_uiautomator_uiautomator': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_test_uiautomator_uiautomator', + 'version': 'version:2.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_transition_transition': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_transition_transition', + 'version': 'version:1.2.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_tvprovider_tvprovider': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_tvprovider_tvprovider', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable_animated': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_vectordrawable_vectordrawable_animated', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_versionedparcelable_versionedparcelable': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_versionedparcelable_versionedparcelable', + 'version': 'version:1.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_viewpager2_viewpager2': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_viewpager2_viewpager2', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_viewpager_viewpager': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_viewpager_viewpager', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/androidx_webkit_webkit': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/androidx_webkit_webkit', + 'version': 'version:1.3.0-rc01-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/backport_util_concurrent_backport_util_concurrent': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/backport_util_concurrent_backport_util_concurrent', + 'version': 'version:3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/classworlds_classworlds': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/classworlds_classworlds', + 'version': 'version:1.1-alpha-2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_animated_vector_drawable': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_animated_vector_drawable', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_appcompat_v7': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_appcompat_v7', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_asynclayoutinflater': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_asynclayoutinflater', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_cardview_v7': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_cardview_v7', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_collections': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_collections', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_coordinatorlayout': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_coordinatorlayout', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_cursoradapter': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_cursoradapter', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_customview': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_customview', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_design': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_design', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_documentfile': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_documentfile', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_drawerlayout': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_drawerlayout', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_gridlayout_v7': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_gridlayout_v7', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_interpolator': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_interpolator', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_leanback_v17': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_leanback_v17', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_loader': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_loader', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_localbroadcastmanager': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_localbroadcastmanager', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_mediarouter_v7': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_mediarouter_v7', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_multidex': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_multidex', + 'version': 'version:1.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_palette_v7': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_palette_v7', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_preference_leanback_v17': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_preference_leanback_v17', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_preference_v14': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_preference_v14', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_preference_v7': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_preference_v7', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_print': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_print', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_recyclerview_v7': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_recyclerview_v7', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_slidingpanelayout': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_slidingpanelayout', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_support_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_annotations', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_support_compat': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_compat', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_support_core_ui': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_core_ui', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_support_core_utils': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_core_utils', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_support_fragment': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_fragment', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_support_media_compat': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_media_compat', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_support_v13': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_v13', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_support_v4': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_v4', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_support_vector_drawable': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_support_vector_drawable', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_swiperefreshlayout': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_swiperefreshlayout', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_transition': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_transition', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_versionedparcelable': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_versionedparcelable', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_support_viewpager': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_support_viewpager', + 'version': 'version:28.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_tools_build_jetifier_jetifier_core': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_tools_build_jetifier_jetifier_core', + 'version': 'version:1.0.0-beta08-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_tools_build_jetifier_jetifier_processor': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_tools_build_jetifier_jetifier_processor', + 'version': 'version:1.0.0-beta08-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_android_tools_desugar_jdk_libs', + 'version': 'version:1.0.5-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_github_ben_manes_caffeine_caffeine': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_github_ben_manes_caffeine_caffeine', + 'version': 'version:2.8.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_github_kevinstern_software_and_algorithms': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_github_kevinstern_software_and_algorithms', + 'version': 'version:1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_auth': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_auth_api_phone': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth_api_phone', + 'version': 'version:17.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_auth_base': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_auth_base', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_base': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_base', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_basement': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_basement', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_cast': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_cast', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_cast_framework': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_cast_framework', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_clearcut': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_clearcut', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_fido': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_fido', + 'version': 'version:18.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_flags': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_flags', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_gcm': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_gcm', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_iid': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_iid', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_instantapps': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_instantapps', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_location': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_location', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_phenotype': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_phenotype', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_places_placereport': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_places_placereport', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_stats': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_stats', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_tasks': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_tasks', + 'version': 'version:17.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_vision': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_vision', + 'version': 'version:18.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_gms_play_services_vision_common': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_gms_play_services_vision_common', + 'version': 'version:18.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_android_material_material': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_android_material_material', + 'version': 'version:1.2.0-alpha06-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_auto_auto_common': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_auto_auto_common', + 'version': 'version:0.10-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_auto_service_auto_service': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_auto_service_auto_service', + 'version': 'version:1.0-rc6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_auto_service_auto_service_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_auto_service_auto_service_annotations', + 'version': 'version:1.0-rc6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_auto_value_auto_value_annotations', + 'version': 'version:1.7-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_code_findbugs_jFormatString': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_code_findbugs_jformatstring', + 'version': 'version:3.0.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_code_findbugs_jsr305': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_code_findbugs_jsr305', + 'version': 'version:3.0.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_code_gson_gson': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_code_gson_gson', + 'version': 'version:2.8.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_dagger_dagger': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger', + 'version': 'version:2.26-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_dagger_dagger_compiler': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger_compiler', + 'version': 'version:2.26-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_dagger_dagger_producers': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger_producers', + 'version': 'version:2.26-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_dagger_dagger_spi': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_dagger_dagger_spi', + 'version': 'version:2.26-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_errorprone_error_prone_annotation': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotation', + 'version': 'version:2.4.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_errorprone_error_prone_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_annotations', + 'version': 'version:2.4.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_errorprone_error_prone_check_api': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_check_api', + 'version': 'version:2.4.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_errorprone_error_prone_core': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_core', + 'version': 'version:2.4.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_errorprone_error_prone_type_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_error_prone_type_annotations', + 'version': 'version:2.4.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_errorprone_javac': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_javac', + 'version': 'version:9+181-r4173-1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_errorprone_javac_shaded': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_errorprone_javac_shaded', + 'version': 'version:9-dev-r4023-3-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_googlejavaformat_google_java_format': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_googlejavaformat_google_java_format', + 'version': 'version:1.5-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_guava_failureaccess': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_guava_failureaccess', + 'version': 'version:1.0.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_guava_guava': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_guava_guava', + 'version': 'version:27.1-jre-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_guava_listenablefuture': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_guava_listenablefuture', + 'version': 'version:1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_j2objc_j2objc_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_j2objc_j2objc_annotations', + 'version': 'version:1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_protobuf_protobuf_java': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_protobuf_protobuf_java', + 'version': 'version:3.4.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_google_protobuf_protobuf_javalite': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_google_protobuf_protobuf_javalite', + 'version': 'version:3.12.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_googlecode_java_diff_utils_diffutils': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_googlecode_java_diff_utils_diffutils', + 'version': 'version:1.3.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_squareup_javapoet': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_squareup_javapoet', + 'version': 'version:1.11.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/com_squareup_javawriter': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/com_squareup_javawriter', + 'version': 'version:2.1.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/commons_cli_commons_cli': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/commons_cli_commons_cli', + 'version': 'version:1.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/javax_annotation_javax_annotation_api': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/javax_annotation_javax_annotation_api', + 'version': 'version:1.3.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/javax_annotation_jsr250_api': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/javax_annotation_jsr250_api', + 'version': 'version:1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/javax_inject_javax_inject': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/javax_inject_javax_inject', + 'version': 'version:1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/nekohtml_nekohtml': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/nekohtml_nekohtml', + 'version': 'version:1.9.6.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/nekohtml_xercesMinimal': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/nekohtml_xercesminimal', + 'version': 'version:1.9.6.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/net_ltgt_gradle_incap_incap': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/net_ltgt_gradle_incap_incap', + 'version': 'version:0.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/net_sf_kxml_kxml2': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/net_sf_kxml_kxml2', + 'version': 'version:2.3.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_ant_ant': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_ant_ant', + 'version': 'version:1.8.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_ant_ant_launcher': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_ant_ant_launcher', + 'version': 'version:1.8.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_ant_tasks': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_ant_tasks', + 'version': 'version:2.1.3-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_artifact': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_artifact', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_artifact_manager': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_artifact_manager', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_error_diagnostics': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_error_diagnostics', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_model': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_model', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_plugin_registry': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_plugin_registry', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_profile': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_profile', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_project': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_project', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_repository_metadata': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_repository_metadata', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_maven_settings': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_maven_settings', + 'version': 'version:2.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_file': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_file', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_lightweight': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_lightweight', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_shared': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_http_shared', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_apache_maven_wagon_wagon_provider_api': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_apache_maven_wagon_wagon_provider_api', + 'version': 'version:1.0-beta-6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ccil_cowan_tagsoup_tagsoup', + 'version': 'version:1.2.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_checkerframework_checker_compat_qual': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_checker_compat_qual', + 'version': 'version:2.5.3-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_checkerframework_checker_qual': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_checker_qual', + 'version': 'version:2.10.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_checkerframework_dataflow_shaded': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_checkerframework_dataflow_shaded', + 'version': 'version:3.1.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_mojo_animal_sniffer_annotations', + 'version': 'version:1.17-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_container_default': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_container_default', + 'version': 'version:1.0-alpha-9-stable-1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_interpolation': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_interpolation', + 'version': 'version:1.11-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_codehaus_plexus_plexus_utils': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_codehaus_plexus_plexus_utils', + 'version': 'version:1.5.15-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_jdom_jdom2': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_jdom_jdom2', + 'version': 'version:2.0.6-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_jetbrains_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_annotations', + 'version': 'version:13.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib', + 'version': 'version:1.3.50-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib_common': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlin_kotlin_stdlib_common', + 'version': 'version:1.3.50-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_metadata_jvm': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_jetbrains_kotlinx_kotlinx_metadata_jvm', + 'version': 'version:0.1.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ow2_asm_asm': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm', + 'version': 'version:7.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ow2_asm_asm_analysis': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_analysis', + 'version': 'version:7.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ow2_asm_asm_commons': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_commons', + 'version': 'version:7.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ow2_asm_asm_tree': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_tree', + 'version': 'version:7.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_ow2_asm_asm_util': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_ow2_asm_asm_util', + 'version': 'version:7.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_pcollections_pcollections': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_pcollections_pcollections', + 'version': 'version:2.1.2-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_annotations': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_annotations', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_junit': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_junit', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_pluginapi': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_pluginapi', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_plugins_maven_dependency_resolver': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_plugins_maven_dependency_resolver', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_resources': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_resources', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_robolectric': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_robolectric', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_sandbox': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_sandbox', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadowapi': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadowapi', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadows_framework': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_framework', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadows_multidex': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_multidex', + 'version': 'version:4.3.1-cr1', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_shadows_playservices': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_shadows_playservices', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_utils': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_robolectric_utils_reflector': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_robolectric_utils_reflector', + 'version': 'version:4.3.1-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + 'src/third_party/android_deps/libs/org_threeten_threeten_extra': { + 'packages': [ + { + 'package': 'chromium/third_party/android_deps/libs/org_threeten_threeten_extra', + 'version': 'version:1.5.0-cr0', + }, + ], + 'condition': 'checkout_android', + 'dep_type': 'cipd', + }, + + # === ANDROID_DEPS Generated Code End === +} + +hooks = [ + { + # This clobbers when necessary (based on get_landmines.py). It should be + # an early hook but it will need to be run after syncing Chromium and + # setting up the links, so the script actually exists. + 'name': 'landmines', + 'pattern': '.', + 'action': [ + 'python', + 'src/build/landmines.py', + '--landmine-scripts', + 'src/tools_webrtc/get_landmines.py', + '--src-dir', + 'src', + ], + }, + { + # Ensure that the DEPS'd "depot_tools" has its self-update capability + # disabled. + 'name': 'disable_depot_tools_selfupdate', + 'pattern': '.', + 'action': [ + 'python', + 'src/third_party/depot_tools/update_depot_tools_toggle.py', + '--disable', + ], + }, + { + 'name': 'sysroot_arm', + 'pattern': '.', + 'condition': 'checkout_linux and checkout_arm', + 'action': ['python', 'src/build/linux/sysroot_scripts/install-sysroot.py', + '--arch=arm'], + }, + { + 'name': 'sysroot_arm64', + 'pattern': '.', + 'condition': 'checkout_linux and checkout_arm64', + 'action': ['python', 'src/build/linux/sysroot_scripts/install-sysroot.py', + '--arch=arm64'], + }, + { + 'name': 'sysroot_x86', + 'pattern': '.', + 'condition': 'checkout_linux and (checkout_x86 or checkout_x64)', + # TODO(mbonadei): change to --arch=x86. + 'action': ['python', 'src/build/linux/sysroot_scripts/install-sysroot.py', + '--arch=i386'], + }, + { + 'name': 'sysroot_mips', + 'pattern': '.', + 'condition': 'checkout_linux and checkout_mips', + # TODO(mbonadei): change to --arch=mips. + 'action': ['python', 'src/build/linux/sysroot_scripts/install-sysroot.py', + '--arch=mipsel'], + }, + { + 'name': 'sysroot_x64', + 'pattern': '.', + 'condition': 'checkout_linux and checkout_x64', + # TODO(mbonadei): change to --arch=x64. + 'action': ['python', 'src/build/linux/sysroot_scripts/install-sysroot.py', + '--arch=amd64'], + }, + { + # Case-insensitivity for the Win SDK. Must run before win_toolchain below. + 'name': 'ciopfs_linux', + 'pattern': '.', + 'condition': 'checkout_win and host_os == "linux"', + 'action': [ 'python', + 'src/third_party/depot_tools/download_from_google_storage.py', + '--no_resume', + '--no_auth', + '--bucket', 'chromium-browser-clang/ciopfs', + '-s', 'src/build/ciopfs.sha1', + ] + }, + { + # Update the Windows toolchain if necessary. Must run before 'clang' below. + 'name': 'win_toolchain', + 'pattern': '.', + 'condition': 'checkout_win', + 'action': ['python', 'src/build/vs_toolchain.py', 'update', '--force'], + }, + { + # Update the Mac toolchain if necessary. + 'name': 'mac_toolchain', + 'pattern': '.', + 'condition': 'checkout_mac', + 'action': ['python', 'src/build/mac_toolchain.py'], + }, + { + # Note: On Win, this should run after win_toolchain, as it may use it. + 'name': 'clang', + 'pattern': '.', + 'action': ['python', 'src/tools/clang/scripts/update.py'], + }, + { + # Update LASTCHANGE. + 'name': 'lastchange', + 'pattern': '.', + 'action': ['python', 'src/build/util/lastchange.py', + '-o', 'src/build/util/LASTCHANGE'], + }, + # Pull clang-format binaries using checked-in hashes. + { + 'name': 'clang_format_win', + 'pattern': '.', + 'condition': 'host_os == "win"', + 'action': [ 'download_from_google_storage', + '--no_resume', + '--platform=win32', + '--no_auth', + '--bucket', 'chromium-clang-format', + '-s', 'src/buildtools/win/clang-format.exe.sha1', + ], + }, + { + 'name': 'clang_format_mac', + 'pattern': '.', + 'condition': 'host_os == "mac"', + 'action': [ 'download_from_google_storage', + '--no_resume', + '--platform=darwin', + '--no_auth', + '--bucket', 'chromium-clang-format', + '-s', 'src/buildtools/mac/clang-format.sha1', + ], + }, + { + 'name': 'clang_format_linux', + 'pattern': '.', + 'condition': 'host_os == "linux"', + 'action': [ 'download_from_google_storage', + '--no_resume', + '--platform=linux*', + '--no_auth', + '--bucket', 'chromium-clang-format', + '-s', 'src/buildtools/linux64/clang-format.sha1', + ], + }, + # Pull rc binaries using checked-in hashes. + { + 'name': 'rc_win', + 'pattern': '.', + 'condition': 'checkout_win and host_os == "win"', + 'action': [ 'python', + 'src/third_party/depot_tools/download_from_google_storage.py', + '--no_resume', + '--no_auth', + '--bucket', 'chromium-browser-clang/rc', + '-s', 'src/build/toolchain/win/rc/win/rc.exe.sha1', + ], + }, + { + 'name': 'rc_mac', + 'pattern': '.', + 'condition': 'checkout_win and host_os == "mac"', + 'action': [ 'python', + 'src/third_party/depot_tools/download_from_google_storage.py', + '--no_resume', + '--no_auth', + '--bucket', 'chromium-browser-clang/rc', + '-s', 'src/build/toolchain/win/rc/mac/rc.sha1', + ], + }, + { + 'name': 'rc_linux', + 'pattern': '.', + 'condition': 'checkout_win and host_os == "linux"', + 'action': [ 'python', + 'src/third_party/depot_tools/download_from_google_storage.py', + '--no_resume', + '--no_auth', + '--bucket', 'chromium-browser-clang/rc', + '-s', 'src/build/toolchain/win/rc/linux64/rc.sha1', + ], + }, + { + 'name': 'test_fonts', + 'pattern': '.', + 'action': [ 'download_from_google_storage', + '--no_resume', + '--extract', + '--no_auth', + '--bucket', 'chromium-fonts', + '-s', 'src/third_party/test_fonts/test_fonts.tar.gz.sha1', + ], + }, + { + 'name': 'msan_chained_origins', + 'pattern': '.', + 'condition': 'checkout_instrumented_libraries', + 'action': [ 'python', + 'src/third_party/depot_tools/download_from_google_storage.py', + "--no_resume", + "--no_auth", + "--bucket", "chromium-instrumented-libraries", + "-s", "src/third_party/instrumented_libraries/binaries/msan-chained-origins-trusty.tgz.sha1", + ], + }, + { + 'name': 'msan_no_origins', + 'pattern': '.', + 'condition': 'checkout_instrumented_libraries', + 'action': [ 'python', + 'src/third_party/depot_tools/download_from_google_storage.py', + "--no_resume", + "--no_auth", + "--bucket", "chromium-instrumented-libraries", + "-s", "src/third_party/instrumented_libraries/binaries/msan-no-origins-trusty.tgz.sha1", + ], + }, + { + # Download test resources, i.e. video and audio files from Google Storage. + 'pattern': '.', + 'action': ['download_from_google_storage', + '--directory', + '--recursive', + '--num_threads=10', + '--no_auth', + '--quiet', + '--bucket', 'chromium-webrtc-resources', + 'src/resources'], + }, +] + +recursedeps = [] + +# Define rules for which include paths are allowed in our source. +include_rules = [ + # Base is only used to build Android APK tests and may not be referenced by + # WebRTC production code. + "-base", + "-chromium", + "+external/webrtc/webrtc", # Android platform build. + "+libyuv", + + # These should eventually move out of here. + "+common_types.h", + + "+WebRTC", + "+api", + "+modules/include", + "+rtc_base", + "+test", + "+rtc_tools", + + # Abseil allowlist. Keep this in sync with abseil-in-webrtc.md. + "+absl/algorithm/algorithm.h", + "+absl/algorithm/container.h", + "+absl/base/attributes.h", + "+absl/base/config.h", + "+absl/base/const_init.h", + "+absl/base/macros.h", + "+absl/container/inlined_vector.h", + "+absl/memory/memory.h", + "+absl/meta/type_traits.h", + "+absl/strings/ascii.h", + "+absl/strings/match.h", + "+absl/strings/str_replace.h", + "+absl/strings/string_view.h", + "+absl/types/optional.h", + "+absl/types/variant.h", + + # Abseil flags are allowed in tests and tools. + "+absl/flags", +] + +specific_include_rules = { + "webrtc_lib_link_test\.cc": [ + "+media/engine", + "+modules/audio_device", + "+modules/audio_processing", + ] +} diff --git a/ENG_REVIEW_OWNERS b/ENG_REVIEW_OWNERS new file mode 100644 index 0000000..de5f240 --- /dev/null +++ b/ENG_REVIEW_OWNERS @@ -0,0 +1,11 @@ +# Current list of eng reviewers mostly for the purpose of reviewing +# dependencies on third-party directories (because //ENG_REVIEW_OWNERS is +# included by //third_party/OWNERS). + +# People listed in this file will only have to coordinate with chromium's eng +# review owners to ensure that the added dependency was OK. + +danilchap@webrtc.org +kwiberg@webrtc.org +mbonadei@webrtc.org +phoglund@webrtc.org diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..4c41b7b --- /dev/null +++ b/LICENSE @@ -0,0 +1,29 @@ +Copyright (c) 2011, The WebRTC project authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + + * Neither the name of Google nor the names of its contributors may + be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/OWNERS b/OWNERS new file mode 100644 index 0000000..d6a7842 --- /dev/null +++ b/OWNERS @@ -0,0 +1,22 @@ +henrika@webrtc.org +juberti@webrtc.org +kwiberg@webrtc.org +mflodman@webrtc.org +stefan@webrtc.org +tommi@webrtc.org +per-file .gitignore=* +per-file .gn=mbonadei@webrtc.org +per-file *.gn=mbonadei@webrtc.org +per-file *.gni=mbonadei@webrtc.org +per-file AUTHORS=* +per-file DEPS=* +per-file pylintrc=phoglund@webrtc.org +per-file WATCHLISTS=* +per-file abseil-in-webrtc.md=danilchap@webrtc.org +per-file abseil-in-webrtc.md=kwiberg@webrtc.org +per-file abseil-in-webrtc.md=mbonadei@webrtc.org +per-file style-guide.md=danilchap@webrtc.org +per-file style-guide.md=kwiberg@webrtc.org +per-file native-api.md=kwiberg@webrtc.org + +# COMPONENT: Internals>WebRTC diff --git a/PATENTS b/PATENTS new file mode 100644 index 0000000..190607a --- /dev/null +++ b/PATENTS @@ -0,0 +1,24 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the WebRTC code package. + +Google hereby grants to you a perpetual, worldwide, non-exclusive, +no-charge, irrevocable (except as stated in this section) patent +license to make, have made, use, offer to sell, sell, import, +transfer, and otherwise run, modify and propagate the contents of this +implementation of the WebRTC code package, where such license applies +only to those patent claims, both currently owned by Google and +acquired in the future, licensable by Google that are necessarily +infringed by this implementation of the WebRTC code package. This +grant does not include claims that would be infringed only as a +consequence of further modification of this implementation. If you or +your agent or exclusive licensee institute or order or agree to the +institution of patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that this +implementation of the WebRTC code package or any code incorporated +within this implementation of the WebRTC code package constitutes +direct or contributory patent infringement, or inducement of patent +infringement, then any patent rights granted to you under this License +for this implementation of the WebRTC code package shall terminate as +of the date such litigation is filed. diff --git a/PRESUBMIT.py b/PRESUBMIT.py new file mode 100755 index 0000000..fad8627 --- /dev/null +++ b/PRESUBMIT.py @@ -0,0 +1,1291 @@ +# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import json +import os +import re +import sys +from collections import defaultdict +from contextlib import contextmanager + +# Files and directories that are *skipped* by cpplint in the presubmit script. +CPPLINT_EXCEPTIONS = [ + 'api/video_codecs/video_decoder.h', + 'common_types.cc', + 'common_types.h', + 'examples/objc', + 'media/base/stream_params.h', + 'media/base/video_common.h', + 'media/sctp/sctp_transport.cc', + 'modules/audio_coding', + 'modules/audio_device', + 'modules/audio_processing', + 'modules/desktop_capture', + 'modules/include/module_common_types.h', + 'modules/utility', + 'modules/video_capture', + 'p2p/base/pseudo_tcp.cc', + 'p2p/base/pseudo_tcp.h', + 'rtc_base', + 'sdk/android/src/jni', + 'sdk/objc', + 'system_wrappers', + 'test', + 'tools_webrtc', + 'voice_engine', +] + +# These filters will always be removed, even if the caller specifies a filter +# set, as they are problematic or broken in some way. +# +# Justifications for each filter: +# - build/c++11 : Rvalue ref checks are unreliable (false positives), +# include file and feature blocklists are +# google3-specific. +# - runtime/references : Mutable references are not banned by the Google +# C++ style guide anymore (starting from May 2020). +# - whitespace/operators: Same as above (doesn't seem sufficient to eliminate +# all move-related errors). +DISABLED_LINT_FILTERS = [ + '-build/c++11', + '-runtime/references', + '-whitespace/operators', +] + +# List of directories of "supported" native APIs. That means changes to headers +# will be done in a compatible way following this scheme: +# 1. Non-breaking changes are made. +# 2. The old APIs as marked as deprecated (with comments). +# 3. Deprecation is announced to discuss-webrtc@googlegroups.com and +# webrtc-users@google.com (internal list). +# 4. (later) The deprecated APIs are removed. +NATIVE_API_DIRS = ( + 'api', # All subdirectories of api/ are included as well. + 'media/base', + 'media/engine', + 'modules/audio_device/include', + 'pc', +) + +# These directories should not be used but are maintained only to avoid breaking +# some legacy downstream code. +LEGACY_API_DIRS = ( + 'common_audio/include', + 'modules/audio_coding/include', + 'modules/audio_processing/include', + 'modules/congestion_controller/include', + 'modules/include', + 'modules/remote_bitrate_estimator/include', + 'modules/rtp_rtcp/include', + 'modules/rtp_rtcp/source', + 'modules/utility/include', + 'modules/video_coding/codecs/h264/include', + 'modules/video_coding/codecs/vp8/include', + 'modules/video_coding/codecs/vp9/include', + 'modules/video_coding/include', + 'rtc_base', + 'system_wrappers/include', +) + +# NOTE: The set of directories in API_DIRS should be the same as those +# listed in the table in native-api.md. +API_DIRS = NATIVE_API_DIRS[:] + LEGACY_API_DIRS[:] + +# TARGET_RE matches a GN target, and extracts the target name and the contents. +TARGET_RE = re.compile( + r'(?P\s*)(?P\w+)\("(?P\w+)"\) {' + r'(?P.*?)' + r'(?P=indent)}', + re.MULTILINE | re.DOTALL) + +# SOURCES_RE matches a block of sources inside a GN target. +SOURCES_RE = re.compile(r'sources \+?= \[(?P.*?)\]', + re.MULTILINE | re.DOTALL) + +# DEPS_RE matches a block of sources inside a GN target. +DEPS_RE = re.compile(r'\bdeps \+?= \[(?P.*?)\]', + re.MULTILINE | re.DOTALL) + +# FILE_PATH_RE matchies a file path. +FILE_PATH_RE = re.compile(r'"(?P(\w|\/)+)(?P\.\w+)"') + + +def FindSrcDirPath(starting_dir): + """Returns the abs path to the src/ dir of the project.""" + src_dir = starting_dir + while os.path.basename(src_dir) != 'src': + src_dir = os.path.normpath(os.path.join(src_dir, os.pardir)) + return src_dir + + +@contextmanager +def _AddToPath(*paths): + original_sys_path = sys.path + sys.path.extend(paths) + try: + yield + finally: + # Restore sys.path to what it was before. + sys.path = original_sys_path + + +def VerifyNativeApiHeadersListIsValid(input_api, output_api): + """Ensures the list of native API header directories is up to date.""" + non_existing_paths = [] + native_api_full_paths = [ + input_api.os_path.join(input_api.PresubmitLocalPath(), + *path.split('/')) for path in API_DIRS] + for path in native_api_full_paths: + if not os.path.isdir(path): + non_existing_paths.append(path) + if non_existing_paths: + return [output_api.PresubmitError( + 'Directories to native API headers have changed which has made the ' + 'list in PRESUBMIT.py outdated.\nPlease update it to the current ' + 'location of our native APIs.', + non_existing_paths)] + return [] + + +API_CHANGE_MSG = """ +You seem to be changing native API header files. Please make sure that you: + 1. Make compatible changes that don't break existing clients. Usually + this is done by keeping the existing method signatures unchanged. + 2. Mark the old stuff as deprecated (see RTC_DEPRECATED macro). + 3. Create a timeline and plan for when the deprecated stuff will be + removed. (The amount of time we give users to change their code + should be informed by how much work it is for them. If they just + need to replace one name with another or something equally + simple, 1-2 weeks might be good; if they need to do serious work, + up to 3 months may be called for.) + 4. Update/inform existing downstream code owners to stop using the + deprecated stuff. (Send announcements to + discuss-webrtc@googlegroups.com and webrtc-users@google.com.) + 5. Remove the deprecated stuff, once the agreed-upon amount of time + has passed. +Related files: +""" + + +def CheckNativeApiHeaderChanges(input_api, output_api): + """Checks to remind proper changing of native APIs.""" + files = [] + source_file_filter = lambda x: input_api.FilterSourceFile( + x, files_to_check=[r'.+\.(gn|gni|h)$']) + for f in input_api.AffectedSourceFiles(source_file_filter): + for path in API_DIRS: + dn = os.path.dirname(f.LocalPath()) + if path == 'api': + # Special case: Subdirectories included. + if dn == 'api' or dn.startswith('api/'): + files.append(f.LocalPath()) + else: + # Normal case: Subdirectories not included. + if dn == path: + files.append(f.LocalPath()) + + if files: + return [output_api.PresubmitNotifyResult(API_CHANGE_MSG, files)] + return [] + + +def CheckNoIOStreamInHeaders(input_api, output_api, + source_file_filter): + """Checks to make sure no .h files include .""" + files = [] + pattern = input_api.re.compile(r'^#include\s*', + input_api.re.MULTILINE) + file_filter = lambda x: (input_api.FilterSourceFile(x) + and source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if not f.LocalPath().endswith('.h'): + continue + contents = input_api.ReadFile(f) + if pattern.search(contents): + files.append(f) + + if len(files): + return [output_api.PresubmitError( + 'Do not #include in header files, since it inserts static ' + + 'initialization into every file including the header. Instead, ' + + '#include . See http://crbug.com/94794', + files)] + return [] + + +def CheckNoPragmaOnce(input_api, output_api, + source_file_filter): + """Make sure that banned functions are not used.""" + files = [] + pattern = input_api.re.compile(r'^#pragma\s+once', + input_api.re.MULTILINE) + file_filter = lambda x: (input_api.FilterSourceFile(x) + and source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if not f.LocalPath().endswith('.h'): + continue + contents = input_api.ReadFile(f) + if pattern.search(contents): + files.append(f) + + if files: + return [output_api.PresubmitError( + 'Do not use #pragma once in header files.\n' + 'See http://www.chromium.org/developers/coding-style#TOC-File-headers', + files)] + return [] + + +def CheckNoFRIEND_TEST(input_api, output_api, # pylint: disable=invalid-name + source_file_filter): + """Make sure that gtest's FRIEND_TEST() macro is not used, the + FRIEND_TEST_ALL_PREFIXES() macro from testsupport/gtest_prod_util.h should be + used instead since that allows for FLAKY_, FAILS_ and DISABLED_ prefixes.""" + problems = [] + + file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) + and source_file_filter(f)) + for f in input_api.AffectedFiles(file_filter=file_filter): + for line_num, line in f.ChangedContents(): + if 'FRIEND_TEST(' in line: + problems.append(' %s:%d' % (f.LocalPath(), line_num)) + + if not problems: + return [] + return [output_api.PresubmitPromptWarning('WebRTC\'s code should not use ' + 'gtest\'s FRIEND_TEST() macro. Include testsupport/gtest_prod_util.h and ' + 'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))] + + +def IsLintDisabled(disabled_paths, file_path): + """ Checks if a file is disabled for lint check.""" + for path in disabled_paths: + if file_path == path or os.path.dirname(file_path).startswith(path): + return True + return False + + +def CheckApprovedFilesLintClean(input_api, output_api, + source_file_filter=None): + """Checks that all new or non-exempt .cc and .h files pass cpplint.py. + This check is based on CheckChangeLintsClean in + depot_tools/presubmit_canned_checks.py but has less filters and only checks + added files.""" + result = [] + + # Initialize cpplint. + import cpplint + # Access to a protected member _XX of a client class + # pylint: disable=W0212 + cpplint._cpplint_state.ResetErrorCounts() + + lint_filters = cpplint._Filters() + lint_filters.extend(DISABLED_LINT_FILTERS) + cpplint._SetFilters(','.join(lint_filters)) + + # Create a platform independent exempt list for cpplint. + disabled_paths = [input_api.os_path.join(*path.split('/')) + for path in CPPLINT_EXCEPTIONS] + + # Use the strictest verbosity level for cpplint.py (level 1) which is the + # default when running cpplint.py from command line. To make it possible to + # work with not-yet-converted code, we're only applying it to new (or + # moved/renamed) files and files not listed in CPPLINT_EXCEPTIONS. + verbosity_level = 1 + files = [] + for f in input_api.AffectedSourceFiles(source_file_filter): + # Note that moved/renamed files also count as added. + if f.Action() == 'A' or not IsLintDisabled(disabled_paths, + f.LocalPath()): + files.append(f.AbsoluteLocalPath()) + + for file_name in files: + cpplint.ProcessFile(file_name, verbosity_level) + + if cpplint._cpplint_state.error_count > 0: + if input_api.is_committing: + res_type = output_api.PresubmitError + else: + res_type = output_api.PresubmitPromptWarning + result = [res_type('Changelist failed cpplint.py check.')] + + return result + + +def CheckNoSourcesAbove(input_api, gn_files, output_api): + # Disallow referencing source files with paths above the GN file location. + source_pattern = input_api.re.compile(r' +sources \+?= \[(.*?)\]', + re.MULTILINE | re.DOTALL) + file_pattern = input_api.re.compile(r'"((\.\./.*?)|(//.*?))"') + violating_gn_files = set() + violating_source_entries = [] + for gn_file in gn_files: + contents = input_api.ReadFile(gn_file) + for source_block_match in source_pattern.finditer(contents): + # Find all source list entries starting with ../ in the source block + # (exclude overrides entries). + for file_list_match in file_pattern.finditer(source_block_match.group(1)): + source_file = file_list_match.group(1) + if 'overrides/' not in source_file: + violating_source_entries.append(source_file) + violating_gn_files.add(gn_file) + if violating_gn_files: + return [output_api.PresubmitError( + 'Referencing source files above the directory of the GN file is not ' + 'allowed. Please introduce new GN targets in the proper location ' + 'instead.\n' + 'Invalid source entries:\n' + '%s\n' + 'Violating GN files:' % '\n'.join(violating_source_entries), + items=violating_gn_files)] + return [] + + +def CheckAbseilDependencies(input_api, gn_files, output_api): + """Checks that Abseil dependencies are declared in `absl_deps`.""" + absl_re = re.compile(r'third_party/abseil-cpp', re.MULTILINE | re.DOTALL) + target_types_to_check = [ + 'rtc_library', + 'rtc_source_set', + 'rtc_static_library', + 'webrtc_fuzzer_test', + ] + error_msg = ('Abseil dependencies in target "%s" (file: %s) ' + 'should be moved to the "absl_deps" parameter.') + errors = [] + + for gn_file in gn_files: + gn_file_content = input_api.ReadFile(gn_file) + for target_match in TARGET_RE.finditer(gn_file_content): + target_type = target_match.group('target_type') + target_name = target_match.group('target_name') + target_contents = target_match.group('target_contents') + if target_type in target_types_to_check: + for deps_match in DEPS_RE.finditer(target_contents): + deps = deps_match.group('deps').splitlines() + for dep in deps: + if re.search(absl_re, dep): + errors.append( + output_api.PresubmitError(error_msg % (target_name, + gn_file.LocalPath()))) + break # no need to warn more than once per target + return errors + + +def CheckNoMixingSources(input_api, gn_files, output_api): + """Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target. + + See bugs.webrtc.org/7743 for more context. + """ + + def _MoreThanOneSourceUsed(*sources_lists): + sources_used = 0 + for source_list in sources_lists: + if len(source_list): + sources_used += 1 + return sources_used > 1 + + errors = defaultdict(lambda: []) + for gn_file in gn_files: + gn_file_content = input_api.ReadFile(gn_file) + for target_match in TARGET_RE.finditer(gn_file_content): + # list_of_sources is a list of tuples of the form + # (c_files, cc_files, objc_files) that keeps track of all the sources + # defined in a target. A GN target can have more that on definition of + # sources (since it supports if/else statements). + # E.g.: + # rtc_static_library("foo") { + # if (is_win) { + # sources = [ "foo.cc" ] + # } else { + # sources = [ "foo.mm" ] + # } + # } + # This is allowed and the presubmit check should support this case. + list_of_sources = [] + c_files = [] + cc_files = [] + objc_files = [] + target_name = target_match.group('target_name') + target_contents = target_match.group('target_contents') + for sources_match in SOURCES_RE.finditer(target_contents): + if '+=' not in sources_match.group(0): + if c_files or cc_files or objc_files: + list_of_sources.append((c_files, cc_files, objc_files)) + c_files = [] + cc_files = [] + objc_files = [] + for file_match in FILE_PATH_RE.finditer(sources_match.group(1)): + file_path = file_match.group('file_path') + extension = file_match.group('extension') + if extension == '.c': + c_files.append(file_path + extension) + if extension == '.cc': + cc_files.append(file_path + extension) + if extension in ['.m', '.mm']: + objc_files.append(file_path + extension) + list_of_sources.append((c_files, cc_files, objc_files)) + for c_files_list, cc_files_list, objc_files_list in list_of_sources: + if _MoreThanOneSourceUsed(c_files_list, cc_files_list, objc_files_list): + all_sources = sorted(c_files_list + cc_files_list + objc_files_list) + errors[gn_file.LocalPath()].append((target_name, all_sources)) + if errors: + return [output_api.PresubmitError( + 'GN targets cannot mix .c, .cc and .m (or .mm) source files.\n' + 'Please create a separate target for each collection of sources.\n' + 'Mixed sources: \n' + '%s\n' + 'Violating GN files:\n%s\n' % (json.dumps(errors, indent=2), + '\n'.join(errors.keys())))] + return [] + + +def CheckNoPackageBoundaryViolations(input_api, gn_files, output_api): + cwd = input_api.PresubmitLocalPath() + with _AddToPath(input_api.os_path.join( + cwd, 'tools_webrtc', 'presubmit_checks_lib')): + from check_package_boundaries import CheckPackageBoundaries + build_files = [os.path.join(cwd, gn_file.LocalPath()) for gn_file in gn_files] + errors = CheckPackageBoundaries(cwd, build_files)[:5] + if errors: + return [output_api.PresubmitError( + 'There are package boundary violations in the following GN files:', + long_text='\n\n'.join(str(err) for err in errors))] + return [] + + +def _ReportFileAndLine(filename, line_num): + """Default error formatter for _FindNewViolationsOfRule.""" + return '%s (line %s)' % (filename, line_num) + + +def CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, output_api, + error_formatter=_ReportFileAndLine): + """Make sure that warning suppression flags are not added wihtout a reason.""" + msg = ('Usage of //build/config/clang:extra_warnings is discouraged ' + 'in WebRTC.\n' + 'If you are not adding this code (e.g. you are just moving ' + 'existing code) or you want to add an exception,\n' + 'you can add a comment on the line that causes the problem:\n\n' + '"-Wno-odr" # no-presubmit-check TODO(bugs.webrtc.org/BUG_ID)\n' + '\n' + 'Affected files:\n') + errors = [] # 2-element tuples with (file, line number) + clang_warn_re = input_api.re.compile(r'//build/config/clang:extra_warnings') + no_presubmit_re = input_api.re.compile( + r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') + for f in gn_files: + for line_num, line in f.ChangedContents(): + if clang_warn_re.search(line) and not no_presubmit_re.search(line): + errors.append(error_formatter(f.LocalPath(), line_num)) + if errors: + return [output_api.PresubmitError(msg, errors)] + return [] + + +def CheckNoTestCaseUsageIsAdded(input_api, output_api, source_file_filter, + error_formatter=_ReportFileAndLine): + error_msg = ('Usage of legacy GoogleTest API detected!\nPlease use the ' + 'new API: https://github.com/google/googletest/blob/master/' + 'googletest/docs/primer.md#beware-of-the-nomenclature.\n' + 'Affected files:\n') + errors = [] # 2-element tuples with (file, line number) + test_case_re = input_api.re.compile(r'TEST_CASE') + file_filter = lambda f: (source_file_filter(f) + and f.LocalPath().endswith('.cc')) + for f in input_api.AffectedSourceFiles(file_filter): + for line_num, line in f.ChangedContents(): + if test_case_re.search(line): + errors.append(error_formatter(f.LocalPath(), line_num)) + if errors: + return [output_api.PresubmitError(error_msg, errors)] + return [] + + +def CheckNoStreamUsageIsAdded(input_api, output_api, + source_file_filter, + error_formatter=_ReportFileAndLine): + """Make sure that no more dependencies on stringstream are added.""" + error_msg = ('Usage of , and in WebRTC is ' + 'deprecated.\n' + 'This includes the following types:\n' + 'std::istringstream, std::ostringstream, std::wistringstream, ' + 'std::wostringstream,\n' + 'std::wstringstream, std::ostream, std::wostream, std::istream,' + 'std::wistream,\n' + 'std::iostream, std::wiostream.\n' + 'If you are not adding this code (e.g. you are just moving ' + 'existing code),\n' + 'you can add a comment on the line that causes the problem:\n\n' + '#include // no-presubmit-check TODO(webrtc:8982)\n' + 'std::ostream& F() { // no-presubmit-check TODO(webrtc:8982)\n' + '\n' + 'If you are adding new code, consider using ' + 'rtc::SimpleStringBuilder\n' + '(in rtc_base/strings/string_builder.h).\n' + 'Affected files:\n') + errors = [] # 2-element tuples with (file, line number) + include_re = input_api.re.compile(r'#include <(i|o|s)stream>') + usage_re = input_api.re.compile(r'std::(w|i|o|io|wi|wo|wio)(string)*stream') + no_presubmit_re = input_api.re.compile( + r'// no-presubmit-check TODO\(webrtc:8982\)') + file_filter = lambda x: (input_api.FilterSourceFile(x) + and source_file_filter(x)) + + def _IsException(file_path): + is_test = any(file_path.endswith(x) for x in ['_test.cc', '_tests.cc', + '_unittest.cc', + '_unittests.cc']) + return (file_path.startswith('examples') or + file_path.startswith('test') or + is_test) + + + for f in input_api.AffectedSourceFiles(file_filter): + # Usage of stringstream is allowed under examples/ and in tests. + if f.LocalPath() == 'PRESUBMIT.py' or _IsException(f.LocalPath()): + continue + for line_num, line in f.ChangedContents(): + if ((include_re.search(line) or usage_re.search(line)) + and not no_presubmit_re.search(line)): + errors.append(error_formatter(f.LocalPath(), line_num)) + if errors: + return [output_api.PresubmitError(error_msg, errors)] + return [] + + +def CheckPublicDepsIsNotUsed(gn_files, input_api, output_api): + """Checks that public_deps is not used without a good reason.""" + result = [] + no_presubmit_check_re = input_api.re.compile( + r'# no-presubmit-check TODO\(webrtc:\d+\)') + error_msg = ('public_deps is not recommended in WebRTC BUILD.gn files ' + 'because it doesn\'t map well to downstream build systems.\n' + 'Used in: %s (line %d).\n' + 'If you are not adding this code (e.g. you are just moving ' + 'existing code) or you have a good reason, you can add this ' + 'comment (verbatim) on the line that causes the problem:\n\n' + 'public_deps = [ # no-presubmit-check TODO(webrtc:8603)\n') + for affected_file in gn_files: + for (line_number, affected_line) in affected_file.ChangedContents(): + if 'public_deps' in affected_line: + surpressed = no_presubmit_check_re.search(affected_line) + if not surpressed: + result.append( + output_api.PresubmitError(error_msg % (affected_file.LocalPath(), + line_number))) + return result + + +def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api): + result = [] + error_msg = ('check_includes overrides are not allowed since it can cause ' + 'incorrect dependencies to form. It effectively means that your ' + 'module can include any .h file without depending on its ' + 'corresponding target. There are some exceptional cases when ' + 'this is allowed: if so, get approval from a .gn owner in the ' + 'root OWNERS file.\n' + 'Used in: %s (line %d).') + no_presubmit_re = input_api.re.compile( + r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)') + for affected_file in gn_files: + for (line_number, affected_line) in affected_file.ChangedContents(): + if ('check_includes' in affected_line + and not no_presubmit_re.search(affected_line)): + result.append( + output_api.PresubmitError(error_msg % (affected_file.LocalPath(), + line_number))) + return result + + +def CheckGnChanges(input_api, output_api): + file_filter = lambda x: (input_api.FilterSourceFile( + x, files_to_check=(r'.+\.(gn|gni)$',), + files_to_skip=(r'.*/presubmit_checks_lib/testdata/.*',))) + + gn_files = [] + for f in input_api.AffectedSourceFiles(file_filter): + gn_files.append(f) + + result = [] + if gn_files: + result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api)) + result.extend(CheckNoMixingSources(input_api, gn_files, output_api)) + result.extend(CheckAbseilDependencies(input_api, gn_files, output_api)) + result.extend(CheckNoPackageBoundaryViolations(input_api, gn_files, + output_api)) + result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api)) + result.extend(CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api)) + result.extend(CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, + output_api)) + return result + + +def CheckGnGen(input_api, output_api): + """Runs `gn gen --check` with default args to detect mismatches between + #includes and dependencies in the BUILD.gn files, as well as general build + errors. + """ + with _AddToPath(input_api.os_path.join( + input_api.PresubmitLocalPath(), 'tools_webrtc', 'presubmit_checks_lib')): + from build_helpers import RunGnCheck + errors = RunGnCheck(FindSrcDirPath(input_api.PresubmitLocalPath()))[:5] + if errors: + return [output_api.PresubmitPromptWarning( + 'Some #includes do not match the build dependency graph. Please run:\n' + ' gn gen --check ', + long_text='\n\n'.join(errors))] + return [] + + +def CheckUnwantedDependencies(input_api, output_api, source_file_filter): + """Runs checkdeps on #include statements added in this + change. Breaking - rules is an error, breaking ! rules is a + warning. + """ + # Copied from Chromium's src/PRESUBMIT.py. + + # We need to wait until we have an input_api object and use this + # roundabout construct to import checkdeps because this file is + # eval-ed and thus doesn't have __file__. + src_path = FindSrcDirPath(input_api.PresubmitLocalPath()) + checkdeps_path = input_api.os_path.join(src_path, 'buildtools', 'checkdeps') + if not os.path.exists(checkdeps_path): + return [output_api.PresubmitError( + 'Cannot find checkdeps at %s\nHave you run "gclient sync" to ' + 'download all the DEPS entries?' % checkdeps_path)] + with _AddToPath(checkdeps_path): + import checkdeps + from cpp_checker import CppChecker + from rules import Rule + + added_includes = [] + for f in input_api.AffectedFiles(file_filter=source_file_filter): + if not CppChecker.IsCppFile(f.LocalPath()): + continue + + changed_lines = [line for _, line in f.ChangedContents()] + added_includes.append([f.LocalPath(), changed_lines]) + + deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath()) + + error_descriptions = [] + warning_descriptions = [] + for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes( + added_includes): + description_with_path = '%s\n %s' % (path, rule_description) + if rule_type == Rule.DISALLOW: + error_descriptions.append(description_with_path) + else: + warning_descriptions.append(description_with_path) + + results = [] + if error_descriptions: + results.append(output_api.PresubmitError( + 'You added one or more #includes that violate checkdeps rules.\n' + 'Check that the DEPS files in these locations contain valid rules.\n' + 'See https://cs.chromium.org/chromium/src/buildtools/checkdeps/ for ' + 'more details about checkdeps.', + error_descriptions)) + if warning_descriptions: + results.append(output_api.PresubmitPromptOrNotify( + 'You added one or more #includes of files that are temporarily\n' + 'allowed but being removed. Can you avoid introducing the\n' + '#include? See relevant DEPS file(s) for details and contacts.\n' + 'See https://cs.chromium.org/chromium/src/buildtools/checkdeps/ for ' + 'more details about checkdeps.', + warning_descriptions)) + return results + + +def CheckCommitMessageBugEntry(input_api, output_api): + """Check that bug entries are well-formed in commit message.""" + bogus_bug_msg = ( + 'Bogus Bug entry: %s. Please specify the issue tracker prefix and the ' + 'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.') + results = [] + for bug in input_api.change.BugsFromDescription(): + bug = bug.strip() + if bug.lower() == 'none': + continue + if 'b/' not in bug and ':' not in bug: + try: + if int(bug) > 100000: + # Rough indicator for current chromium bugs. + prefix_guess = 'chromium' + else: + prefix_guess = 'webrtc' + results.append('Bug entry requires issue tracker prefix, e.g. %s:%s' % + (prefix_guess, bug)) + except ValueError: + results.append(bogus_bug_msg % bug) + elif not (re.match(r'\w+:\d+', bug) or re.match(r'b/\d+', bug)): + results.append(bogus_bug_msg % bug) + return [output_api.PresubmitError(r) for r in results] + + +def CheckChangeHasBugField(input_api, output_api): + """Requires that the changelist is associated with a bug. + + This check is stricter than the one in depot_tools/presubmit_canned_checks.py + since it fails the presubmit if the bug field is missing or doesn't contain + a bug reference. + + This supports both 'BUG=' and 'Bug:' since we are in the process of migrating + to Gerrit and it encourages the usage of 'Bug:'. + """ + if input_api.change.BugsFromDescription(): + return [] + else: + return [output_api.PresubmitError( + 'The "Bug: [bug number]" footer is mandatory. Please create a bug and ' + 'reference it using either of:\n' + ' * https://bugs.webrtc.org - reference it using Bug: webrtc:XXXX\n' + ' * https://crbug.com - reference it using Bug: chromium:XXXXXX')] + + +def CheckJSONParseErrors(input_api, output_api, source_file_filter): + """Check that JSON files do not contain syntax errors.""" + + def FilterFile(affected_file): + return (input_api.os_path.splitext(affected_file.LocalPath())[1] == '.json' + and source_file_filter(affected_file)) + + def GetJSONParseError(input_api, filename): + try: + contents = input_api.ReadFile(filename) + input_api.json.loads(contents) + except ValueError as e: + return e + return None + + results = [] + for affected_file in input_api.AffectedFiles( + file_filter=FilterFile, include_deletes=False): + parse_error = GetJSONParseError(input_api, + affected_file.AbsoluteLocalPath()) + if parse_error: + results.append(output_api.PresubmitError('%s could not be parsed: %s' % + (affected_file.LocalPath(), + parse_error))) + return results + + +def RunPythonTests(input_api, output_api): + def Join(*args): + return input_api.os_path.join(input_api.PresubmitLocalPath(), *args) + + test_directories = [ + input_api.PresubmitLocalPath(), + Join('rtc_tools', 'py_event_log_analyzer'), + Join('audio', 'test', 'unittests'), + ] + [ + root for root, _, files in os.walk(Join('tools_webrtc')) + if any(f.endswith('_test.py') for f in files) + ] + + tests = [] + for directory in test_directories: + tests.extend( + input_api.canned_checks.GetUnitTestsInDirectory( + input_api, + output_api, + directory, + allowlist=[r'.+_test\.py$'])) + return input_api.RunTests(tests, parallel=True) + + +def CheckUsageOfGoogleProtobufNamespace(input_api, output_api, + source_file_filter): + """Checks that the namespace google::protobuf has not been used.""" + files = [] + pattern = input_api.re.compile(r'google::protobuf') + proto_utils_path = os.path.join('rtc_base', 'protobuf_utils.h') + file_filter = lambda x: (input_api.FilterSourceFile(x) + and source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']: + continue + contents = input_api.ReadFile(f) + if pattern.search(contents): + files.append(f) + + if files: + return [output_api.PresubmitError( + 'Please avoid to use namespace `google::protobuf` directly.\n' + 'Add a using directive in `%s` and include that header instead.' + % proto_utils_path, files)] + return [] + + +def _LicenseHeader(input_api): + """Returns the license header regexp.""" + # Accept any year number from 2003 to the current year + current_year = int(input_api.time.strftime('%Y')) + allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1))) + years_re = '(' + '|'.join(allowed_years) + ')' + license_header = ( + r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. ' + r'All [Rr]ights [Rr]eserved\.\n' + r'.*?\n' + r'.*? Use of this source code is governed by a BSD-style license\n' + r'.*? that can be found in the LICENSE file in the root of the source\n' + r'.*? tree\. An additional intellectual property rights grant can be ' + r'found\n' + r'.*? in the file PATENTS\. All contributing project authors may\n' + r'.*? be found in the AUTHORS file in the root of the source tree\.\n' + ) % { + 'year': years_re, + } + return license_header + + +def CommonChecks(input_api, output_api): + """Checks common to both upload and commit.""" + results = [] + # Filter out files that are in objc or ios dirs from being cpplint-ed since + # they do not follow C++ lint rules. + exception_list = input_api.DEFAULT_FILES_TO_SKIP + ( + r".*\bobjc[\\\/].*", + r".*objc\.[hcm]+$", + ) + source_file_filter = lambda x: input_api.FilterSourceFile(x, None, + exception_list) + results.extend(CheckApprovedFilesLintClean( + input_api, output_api, source_file_filter)) + results.extend(input_api.canned_checks.CheckLicense( + input_api, output_api, _LicenseHeader(input_api))) + results.extend(input_api.canned_checks.RunPylint(input_api, output_api, + files_to_skip=(r'^base[\\\/].*\.py$', + r'^build[\\\/].*\.py$', + r'^buildtools[\\\/].*\.py$', + r'^infra[\\\/].*\.py$', + r'^ios[\\\/].*\.py$', + r'^out.*[\\\/].*\.py$', + r'^testing[\\\/].*\.py$', + r'^third_party[\\\/].*\.py$', + r'^tools[\\\/].*\.py$', + # TODO(phoglund): should arguably be checked. + r'^tools_webrtc[\\\/]mb[\\\/].*\.py$', + r'^xcodebuild.*[\\\/].*\.py$',), + pylintrc='pylintrc')) + + # TODO(nisse): talk/ is no more, so make below checks simpler? + # WebRTC can't use the presubmit_canned_checks.PanProjectChecks function since + # we need to have different license checks in talk/ and webrtc/ directories. + # Instead, hand-picked checks are included below. + + # .m and .mm files are ObjC files. For simplicity we will consider .h files in + # ObjC subdirectories ObjC headers. + objc_filter_list = (r'.+\.m$', r'.+\.mm$', r'.+objc\/.+\.h$') + # Skip long-lines check for DEPS and GN files. + build_file_filter_list = (r'.+\.gn$', r'.+\.gni$', 'DEPS') + # Also we will skip most checks for third_party directory. + third_party_filter_list = (r'^third_party[\\\/].+',) + eighty_char_sources = lambda x: input_api.FilterSourceFile(x, + files_to_skip=build_file_filter_list + objc_filter_list + + third_party_filter_list) + hundred_char_sources = lambda x: input_api.FilterSourceFile(x, + files_to_check=objc_filter_list) + non_third_party_sources = lambda x: input_api.FilterSourceFile(x, + files_to_skip=third_party_filter_list) + + results.extend(input_api.canned_checks.CheckLongLines( + input_api, output_api, maxlen=80, source_file_filter=eighty_char_sources)) + results.extend(input_api.canned_checks.CheckLongLines( + input_api, output_api, maxlen=100, + source_file_filter=hundred_char_sources)) + results.extend(input_api.canned_checks.CheckChangeHasNoTabs( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend(input_api.canned_checks.CheckAuthorizedAuthor( + input_api, output_api, bot_whitelist=[ + 'chromium-webrtc-autoroll@webrtc-ci.iam.gserviceaccount.com' + ])) + results.extend(input_api.canned_checks.CheckChangeTodoHasOwner( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend(input_api.canned_checks.CheckPatchFormatted( + input_api, output_api)) + results.extend(CheckNativeApiHeaderChanges(input_api, output_api)) + results.extend(CheckNoIOStreamInHeaders( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend(CheckNoPragmaOnce( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend(CheckNoFRIEND_TEST( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend(CheckGnChanges(input_api, output_api)) + results.extend(CheckUnwantedDependencies( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend(CheckJSONParseErrors( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend(RunPythonTests(input_api, output_api)) + results.extend(CheckUsageOfGoogleProtobufNamespace( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend(CheckOrphanHeaders( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend(CheckNewlineAtTheEndOfProtoFiles( + input_api, output_api, source_file_filter=non_third_party_sources)) + results.extend(CheckNoStreamUsageIsAdded( + input_api, output_api, non_third_party_sources)) + results.extend(CheckNoTestCaseUsageIsAdded( + input_api, output_api, non_third_party_sources)) + results.extend(CheckAddedDepsHaveTargetApprovals(input_api, output_api)) + results.extend(CheckApiDepsFileIsUpToDate(input_api, output_api)) + results.extend(CheckAbslMemoryInclude( + input_api, output_api, non_third_party_sources)) + results.extend(CheckBannedAbslMakeUnique( + input_api, output_api, non_third_party_sources)) + results.extend(CheckObjcApiSymbols( + input_api, output_api, non_third_party_sources)) + return results + + +def CheckApiDepsFileIsUpToDate(input_api, output_api): + """Check that 'include_rules' in api/DEPS is up to date. + + The file api/DEPS must be kept up to date in order to avoid to avoid to + include internal header from WebRTC's api/ headers. + + This check is focused on ensuring that 'include_rules' contains a deny + rule for each root level directory. More focused allow rules can be + added to 'specific_include_rules'. + """ + results = [] + api_deps = os.path.join(input_api.PresubmitLocalPath(), 'api', 'DEPS') + with open(api_deps) as f: + deps_content = _ParseDeps(f.read()) + + include_rules = deps_content.get('include_rules', []) + dirs_to_skip = set(['api', 'docs']) + + # Only check top level directories affected by the current CL. + dirs_to_check = set() + for f in input_api.AffectedFiles(): + path_tokens = [t for t in f.LocalPath().split(os.sep) if t] + if len(path_tokens) > 1: + if (path_tokens[0] not in dirs_to_skip and + os.path.isdir(os.path.join(input_api.PresubmitLocalPath(), + path_tokens[0]))): + dirs_to_check.add(path_tokens[0]) + + missing_include_rules = set() + for p in dirs_to_check: + rule = '-%s' % p + if rule not in include_rules: + missing_include_rules.add(rule) + + if missing_include_rules: + error_msg = [ + 'include_rules = [\n', + ' ...\n', + ] + + for r in sorted(missing_include_rules): + error_msg.append(' "%s",\n' % str(r)) + + error_msg.append(' ...\n') + error_msg.append(']\n') + + results.append(output_api.PresubmitError( + 'New root level directory detected! WebRTC api/ headers should ' + 'not #include headers from \n' + 'the new directory, so please update "include_rules" in file\n' + '"%s". Example:\n%s\n' % (api_deps, ''.join(error_msg)))) + + return results + +def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter): + file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) + and source_file_filter(f)) + + files = [] + for f in input_api.AffectedFiles( + include_deletes=False, file_filter=file_filter): + for _, line in f.ChangedContents(): + if 'absl::make_unique' in line: + files.append(f) + break + + if len(files): + return [output_api.PresubmitError( + 'Please use std::make_unique instead of absl::make_unique.\n' + 'Affected files:', + files)] + return [] + +def CheckObjcApiSymbols(input_api, output_api, source_file_filter): + rtc_objc_export = re.compile(r'RTC_OBJC_EXPORT(.|\n){26}', + re.MULTILINE | re.DOTALL) + file_filter = lambda f: (f.LocalPath().endswith(('.h')) + and source_file_filter(f)) + + files = [] + file_filter = lambda x: (input_api.FilterSourceFile(x) + and source_file_filter(x)) + for f in input_api.AffectedSourceFiles(file_filter): + if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath(): + continue + contents = input_api.ReadFile(f) + for match in rtc_objc_export.finditer(contents): + export_block = match.group(0) + if 'RTC_OBJC_TYPE' not in export_block: + files.append(f.LocalPath()) + + if len(files): + return [output_api.PresubmitError( + 'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' + + 'macro.\n\n' + + 'For example:\n' + + 'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' + + 'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' + + 'Please fix the following files:', + files)] + return [] + +def CheckAbslMemoryInclude(input_api, output_api, source_file_filter): + pattern = input_api.re.compile( + r'^#include\s*"absl/memory/memory.h"', input_api.re.MULTILINE) + file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) + and source_file_filter(f)) + + files = [] + for f in input_api.AffectedFiles( + include_deletes=False, file_filter=file_filter): + contents = input_api.ReadFile(f) + if pattern.search(contents): + continue + for _, line in f.ChangedContents(): + if 'absl::WrapUnique' in line: + files.append(f) + break + + if len(files): + return [output_api.PresubmitError( + 'Please include "absl/memory/memory.h" header for absl::WrapUnique.\n' + 'This header may or may not be included transitively depending on the ' + 'C++ standard version.', + files)] + return [] + +def CheckChangeOnUpload(input_api, output_api): + results = [] + results.extend(CommonChecks(input_api, output_api)) + results.extend(CheckGnGen(input_api, output_api)) + results.extend( + input_api.canned_checks.CheckGNFormatted(input_api, output_api)) + return results + + +def CheckChangeOnCommit(input_api, output_api): + results = [] + results.extend(CommonChecks(input_api, output_api)) + results.extend(VerifyNativeApiHeadersListIsValid(input_api, output_api)) + results.extend(input_api.canned_checks.CheckOwners(input_api, output_api)) + results.extend(input_api.canned_checks.CheckChangeWasUploaded( + input_api, output_api)) + results.extend(input_api.canned_checks.CheckChangeHasDescription( + input_api, output_api)) + results.extend(CheckChangeHasBugField(input_api, output_api)) + results.extend(CheckCommitMessageBugEntry(input_api, output_api)) + results.extend(input_api.canned_checks.CheckTreeIsOpen( + input_api, output_api, + json_url='http://webrtc-status.appspot.com/current?format=json')) + return results + + +def CheckOrphanHeaders(input_api, output_api, source_file_filter): + # We need to wait until we have an input_api object and use this + # roundabout construct to import prebubmit_checks_lib because this file is + # eval-ed and thus doesn't have __file__. + error_msg = """{} should be listed in {}.""" + results = [] + exempt_paths = [ + os.path.join('tools_webrtc', 'ios', 'SDK'), + ] + with _AddToPath(input_api.os_path.join( + input_api.PresubmitLocalPath(), 'tools_webrtc', 'presubmit_checks_lib')): + from check_orphan_headers import GetBuildGnPathFromFilePath + from check_orphan_headers import IsHeaderInBuildGn + + file_filter = lambda x: input_api.FilterSourceFile( + x, files_to_skip=exempt_paths) and source_file_filter(x) + for f in input_api.AffectedSourceFiles(file_filter): + if f.LocalPath().endswith('.h'): + file_path = os.path.abspath(f.LocalPath()) + root_dir = os.getcwd() + gn_file_path = GetBuildGnPathFromFilePath(file_path, os.path.exists, + root_dir) + in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path) + if not in_build_gn: + results.append(output_api.PresubmitError(error_msg.format( + f.LocalPath(), os.path.relpath(gn_file_path)))) + return results + + +def CheckNewlineAtTheEndOfProtoFiles(input_api, output_api, source_file_filter): + """Checks that all .proto files are terminated with a newline.""" + error_msg = 'File {} must end with exactly one newline.' + results = [] + file_filter = lambda x: input_api.FilterSourceFile( + x, files_to_check=(r'.+\.proto$',)) and source_file_filter(x) + for f in input_api.AffectedSourceFiles(file_filter): + file_path = f.LocalPath() + with open(file_path) as f: + lines = f.readlines() + if len(lines) > 0 and not lines[-1].endswith('\n'): + results.append(output_api.PresubmitError(error_msg.format(file_path))) + return results + + +def _ExtractAddRulesFromParsedDeps(parsed_deps): + """Extract the rules that add dependencies from a parsed DEPS file. + + Args: + parsed_deps: the locals dictionary from evaluating the DEPS file.""" + add_rules = set() + add_rules.update([ + rule[1:] for rule in parsed_deps.get('include_rules', []) + if rule.startswith('+') or rule.startswith('!') + ]) + for _, rules in parsed_deps.get('specific_include_rules', + {}).iteritems(): + add_rules.update([ + rule[1:] for rule in rules + if rule.startswith('+') or rule.startswith('!') + ]) + return add_rules + + +def _ParseDeps(contents): + """Simple helper for parsing DEPS files.""" + # Stubs for handling special syntax in the root DEPS file. + class VarImpl(object): + + def __init__(self, local_scope): + self._local_scope = local_scope + + def Lookup(self, var_name): + """Implements the Var syntax.""" + try: + return self._local_scope['vars'][var_name] + except KeyError: + raise Exception('Var is not defined: %s' % var_name) + + local_scope = {} + global_scope = { + 'Var': VarImpl(local_scope).Lookup, + } + exec contents in global_scope, local_scope + return local_scope + + +def _CalculateAddedDeps(os_path, old_contents, new_contents): + """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns + a set of DEPS entries that we should look up. + + For a directory (rather than a specific filename) we fake a path to + a specific filename by adding /DEPS. This is chosen as a file that + will seldom or never be subject to per-file include_rules. + """ + # We ignore deps entries on auto-generated directories. + auto_generated_dirs = ['grit', 'jni'] + + old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents)) + new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents)) + + added_deps = new_deps.difference(old_deps) + + results = set() + for added_dep in added_deps: + if added_dep.split('/')[0] in auto_generated_dirs: + continue + # Assume that a rule that ends in .h is a rule for a specific file. + if added_dep.endswith('.h'): + results.add(added_dep) + else: + results.add(os_path.join(added_dep, 'DEPS')) + return results + + +def CheckAddedDepsHaveTargetApprovals(input_api, output_api): + """When a dependency prefixed with + is added to a DEPS file, we + want to make sure that the change is reviewed by an OWNER of the + target file or directory, to avoid layering violations from being + introduced. This check verifies that this happens. + """ + virtual_depended_on_files = set() + + file_filter = lambda f: not input_api.re.match( + r"^third_party[\\\/](WebKit|blink)[\\\/].*", f.LocalPath()) + for f in input_api.AffectedFiles(include_deletes=False, + file_filter=file_filter): + filename = input_api.os_path.basename(f.LocalPath()) + if filename == 'DEPS': + virtual_depended_on_files.update(_CalculateAddedDeps( + input_api.os_path, + '\n'.join(f.OldContents()), + '\n'.join(f.NewContents()))) + + if not virtual_depended_on_files: + return [] + + if input_api.is_committing: + if input_api.tbr: + return [output_api.PresubmitNotifyResult( + '--tbr was specified, skipping OWNERS check for DEPS additions')] + if input_api.dry_run: + return [output_api.PresubmitNotifyResult( + 'This is a dry run, skipping OWNERS check for DEPS additions')] + if not input_api.change.issue: + return [output_api.PresubmitError( + "DEPS approval by OWNERS check failed: this change has " + "no change number, so we can't check it for approvals.")] + output = output_api.PresubmitError + else: + output = output_api.PresubmitNotifyResult + + owners_db = input_api.owners_db + owner_email, reviewers = ( + input_api.canned_checks.GetCodereviewOwnerAndReviewers( + input_api, + owners_db.email_regexp, + approval_needed=input_api.is_committing)) + + owner_email = owner_email or input_api.change.author_email + + reviewers_plus_owner = set(reviewers) + if owner_email: + reviewers_plus_owner.add(owner_email) + missing_files = owners_db.files_not_covered_by(virtual_depended_on_files, + reviewers_plus_owner) + + # We strip the /DEPS part that was added by + # _FilesToCheckForIncomingDeps to fake a path to a file in a + # directory. + def StripDeps(path): + start_deps = path.rfind('/DEPS') + if start_deps != -1: + return path[:start_deps] + else: + return path + unapproved_dependencies = ["'+%s'," % StripDeps(path) + for path in missing_files] + + if unapproved_dependencies: + output_list = [ + output('You need LGTM from owners of depends-on paths in DEPS that were ' + 'modified in this CL:\n %s' % + '\n '.join(sorted(unapproved_dependencies)))] + suggested_owners = owners_db.reviewers_for(missing_files, owner_email) + output_list.append(output( + 'Suggested missing target path OWNERS:\n %s' % + '\n '.join(suggested_owners or []))) + return output_list + + return [] diff --git a/README.chromium b/README.chromium new file mode 100644 index 0000000..58c8da8 --- /dev/null +++ b/README.chromium @@ -0,0 +1,14 @@ +Name: WebRTC +URL: http://www.webrtc.org +Version: 90 +CPEPrefix: cpe:/a:webrtc_project:webrtc:90 +License: BSD +License File: LICENSE + +Description: +WebRTC provides real time voice and video processing +functionality to enable the implementation of +PeerConnection/MediaStream. + +Third party code used in this project is described +in the file LICENSE_THIRD_PARTY. diff --git a/README.md b/README.md new file mode 100644 index 0000000..e91fb16 --- /dev/null +++ b/README.md @@ -0,0 +1,30 @@ +**WebRTC is a free, open software project** that provides browsers and mobile +applications with Real-Time Communications (RTC) capabilities via simple APIs. +The WebRTC components have been optimized to best serve this purpose. + +**Our mission:** To enable rich, high-quality RTC applications to be +developed for the browser, mobile platforms, and IoT devices, and allow them +all to communicate via a common set of protocols. + +The WebRTC initiative is a project supported by Google, Mozilla and Opera, +amongst others. + +### Development + +See [here][native-dev] for instructions on how to get started +developing with the native code. + +[Authoritative list](native-api.md) of directories that contain the +native API header files. + +### More info + + * Official web site: http://www.webrtc.org + * Master source code repo: https://webrtc.googlesource.com/src + * Samples and reference apps: https://github.com/webrtc + * Mailing list: http://groups.google.com/group/discuss-webrtc + * Continuous build: http://build.chromium.org/p/client.webrtc + * [Coding style guide](style-guide.md) + * [Code of conduct](CODE_OF_CONDUCT.md) + +[native-dev]: https://webrtc.googlesource.com/src/+/refs/heads/master/docs/native-code/index.md diff --git a/WATCHLISTS b/WATCHLISTS new file mode 100644 index 0000000..cf4ea32 --- /dev/null +++ b/WATCHLISTS @@ -0,0 +1,189 @@ +# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +# Inspired by chromium.org: +# http://dev.chromium.org/developers/contributing-code/watchlists + +{ + 'WATCHLIST_DEFINITIONS': { + 'this_file': { + 'filepath': '^WATCHLISTS$', + }, + 'all_webrtc': { + # NOTE: if you like this you might like webrtc-reviews@webrtc.org! + 'filepath': '^.*', + }, + 'root_files': { + # Non-recursive contents of ./ + 'filepath': '^[^/]*$', + }, + 'build_files': { + 'filepath': '\.gni$|\.gn$', + }, + 'java_files': { + 'filepath': '\.java$|\.xml$', + }, + 'api': { + 'filepath': 'api/.*', + }, + 'audio': { + 'filepath': 'audio/.*', + }, + 'base': { + 'filepath': 'rtc_base/.*', + }, + 'call': { + 'filepath': 'call/.*', + }, + 'media_engine': { + 'filepath': 'media/engine/.*', + }, + 'video': { + 'filepath': 'video/.*', + }, + 'common_audio': { + 'filepath': 'common_audio/.*', + }, + 'common_video': { + 'filepath': 'common_video/.*', + }, + 'video_capture': { + 'filepath': 'modules/video_capture/.*', + }, + 'audio_device': { + 'filepath': 'modules/audio_device/.*', + }, + 'audio_coding': { + 'filepath': 'modules/audio_coding/.*', + }, + 'neteq': { + 'filepath': 'modules/audio_coding/neteq/.*', + }, + 'audio_mixer': { + 'filepath': 'modules/audio_mixer/.*', + }, + 'audio_processing': { + 'filepath': 'modules/audio_processing/.*', + }, + 'video_coding': { + 'filepath': 'modules/video_coding/.*', + }, + 'video_processing': { + 'filepath': 'modules/video_processing/.*', + }, + 'bitrate_controller': { + 'filepath': 'modules/bitrate_controller/.*' + }, + 'congestion_controller': { + 'filepath': 'modules/congestion_controller/.*' + }, + 'remote_bitrate_estimator': { + 'filepath': 'modules/remote_bitrate_estimator/.*' + }, + 'pacing': { + 'filepath': 'modules/pacing/.*' + }, + 'rtp_rtcp': { + 'filepath': 'modules/rtp_rtcp/.*' + }, + 'system_wrappers': { + 'filepath': 'system_wrappers/.*', + }, + 'pc': { + 'filepath': '^pc/.*', + }, + }, + + 'WATCHLISTS': { + 'this_file': [], + 'all_webrtc': [], + 'root_files': ['peah@webrtc.org', + 'qiang.lu@intel.com', + 'yujie.mao@webrtc.org'], + 'build_files': ['mbonadei@webrtc.org'], + 'common_audio': ['alessiob@webrtc.org', + 'aluebs@webrtc.org', + 'audio-team@agora.io', + 'minyue@webrtc.org', + 'peah@webrtc.org', + 'saza@webrtc.org'], + 'audio': ['peah@webrtc.org'], + 'api': ['kwiberg@webrtc.org','peah@webrtc.org'], + 'base': ['kwiberg@webrtc.org'], + 'call': ['mflodman@webrtc.org', + 'stefan@webrtc.org'], + 'video': ['mflodman@webrtc.org', + 'stefan@webrtc.org', + 'video-team@agora.io', + 'yujie.mao@webrtc.org', + 'zhengzhonghou@agora.io'], + 'video_capture': ['mflodman@webrtc.org', + 'perkj@webrtc.org', + 'sdk-team@agora.io', + 'zhengzhonghou@agora.io'], + 'audio_device': ['audio-team@agora.io', + 'henrika@webrtc.org', + 'peah@webrtc.org', + 'saza@webrtc.org', + 'sdk-team@agora.io'], + 'audio_coding': ['alessiob@webrtc.org', + 'audio-team@agora.io', + 'henrik.lundin@webrtc.org', + 'kwiberg@webrtc.org', + 'minyue@webrtc.org', + 'peah@webrtc.org', + 'saza@webrtc.org'], + 'neteq': ['alessiob@webrtc.org', + 'audio-team@agora.io', + 'henrik.lundin@webrtc.org', + 'minyue@webrtc.org', + 'saza@webrtc.org'], + 'audio_mixer': ['aleloi@webrtc.org', + 'henrik.lundin@webrtc.org', + 'peah@webrtc.org', + 'saza@webrtc.org'], + 'audio_processing': ['alessiob@webrtc.org', + 'aluebs@webrtc.org', + 'audio-team@agora.io', + 'fhernqvist@webrtc.org', + 'henrik.lundin@webrtc.org', + 'kwiberg@webrtc.org', + 'minyue@webrtc.org', + 'peah@webrtc.org', + 'saza@webrtc.org'], + 'video_coding': ['mflodman@webrtc.org', + 'stefan@webrtc.org', + 'video-team@agora.io', + 'zhengzhonghou@agora.io'], + 'video_processing': ['stefan@webrtc.org', + 'video-team@agora.io', + 'zhengzhonghou@agora.io'], + 'bitrate_controller': ['mflodman@webrtc.org', + 'stefan@webrtc.org', + 'srte@webrtc.org', + 'zhuangzesen@agora.io'], + 'congestion_controller': ['srte@webrtc.org'], + 'remote_bitrate_estimator': ['mflodman@webrtc.org', + 'stefan@webrtc.org', + 'zhuangzesen@agora.io'], + 'pacing': ['mflodman@webrtc.org', + 'srte@webrtc.org', + 'stefan@webrtc.org', + 'zhuangzesen@agora.io'], + 'rtp_rtcp': ['mflodman@webrtc.org', + 'stefan@webrtc.org', + 'danilchap@webrtc.org', + 'zhuangzesen@agora.io'], + 'system_wrappers': ['fengyue@agora.io', + 'henrika@webrtc.org', + 'mflodman@webrtc.org', + 'peah@webrtc.org', + 'zhengzhonghou@agora.io'], + 'pc': ['steveanton+watch@webrtc.org'], + }, +} diff --git a/abseil-in-webrtc.md b/abseil-in-webrtc.md new file mode 100644 index 0000000..da03af0 --- /dev/null +++ b/abseil-in-webrtc.md @@ -0,0 +1,74 @@ +# Using Abseil in WebRTC + +You may use a subset of the utilities provided by the [Abseil][abseil] +library when writing WebRTC C++ code. Below, we list the explicitly +*allowed* and the explicitly *disallowed* subsets of Abseil; if you +find yourself in need of something that isn’t in either subset, +please add it to the *allowed* subset in this doc in the same CL that +adds the first use. + +[abseil]: https://abseil.io/about/ + +## **Allowed** + +* `absl::InlinedVector` +* `absl::WrapUnique` +* `absl::optional` and related stuff from `absl/types/optional.h`. +* `absl::string_view` +* The functions in `absl/strings/ascii.h`, `absl/strings/match.h`, + and `absl/strings/str_replace.h`. +* `absl::is_trivially_copy_constructible`, + `absl::is_trivially_copy_assignable`, and + `absl::is_trivially_destructible` from `absl/meta/type_traits.h`. +* `absl::variant` and related stuff from `absl/types/variant.h`. +* The functions in `absl/algorithm/algorithm.h` and + `absl/algorithm/container.h`. +* `absl/base/const_init.h` for mutex initialization. +* The macros in `absl/base/attributes.h`, `absl/base/config.h` and + `absl/base/macros.h`. + + +## **Disallowed** + +### `absl::make_unique` + +*Use `std::make_unique` instead.* + +### `absl::Mutex` + +*Use `webrtc::Mutex` instead.* + +Chromium has a ban on new static initializers, and `absl::Mutex` uses +one. To make `absl::Mutex` available, we would need to nicely ask the +Abseil team to remove that initializer (like they already did for a +spinlock initializer). Additionally, `absl::Mutex` handles time in a +way that may not be compatible with the rest of WebRTC. + +### `absl::Span` + +*Use `rtc::ArrayView` instead.* + +`absl::Span` differs from `rtc::ArrayView` on several points, and both +of them differ from the `std::span` that was voted into +C++20—and `std::span` is likely to undergo further changes +before C++20 is finalized. We should just keep using `rtc::ArrayView` +and avoid `absl::Span` until C++20 is finalized and the Abseil team +has decided if they will change `absl::Span` to match. +[Bug](https://bugs.webrtc.org/9214). + +### `absl::StrCat`, `absl::StrAppend`, `absl::StrJoin`, `absl::StrSplit` + +*Use `rtc::SimpleStringBuilder` to build strings.* + +These are optimized for speed, not binary size. Even `StrCat` calls +with a modest number of arguments can easily add several hundred bytes +to the binary. + +## How to depend on Abseil + +For build targets `rtc_library`, `rtc_source_set` and `rtc_static_library`, +dependencies on Abseil need to be listed in `absl_deps` instead of `deps`. + +This is needed in order to support the Abseil component build in Chromium. In +such build mode, WebRTC will depend on a unique Abseil build target what will +generate a shared library. diff --git a/api/BUILD.gn b/api/BUILD.gn new file mode 100644 index 0000000..6b4f04d --- /dev/null +++ b/api/BUILD.gn @@ -0,0 +1,1054 @@ +# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +group("api") { + visibility = [ "*" ] + deps = [] + + if (!build_with_mozilla) { + deps += [ ":libjingle_peerconnection_api" ] + } +} + +rtc_source_set("call_api") { + visibility = [ "*" ] + sources = [ "call/audio_sink.h" ] +} + +rtc_source_set("callfactory_api") { + visibility = [ "*" ] + sources = [ "call/call_factory_interface.h" ] + deps = [ "../rtc_base/system:rtc_export" ] +} + +if (!build_with_chromium) { + rtc_library("create_peerconnection_factory") { + visibility = [ "*" ] + allow_poison = [ "default_task_queue" ] + sources = [ + "create_peerconnection_factory.cc", + "create_peerconnection_factory.h", + ] + deps = [ + ":callfactory_api", + ":libjingle_peerconnection_api", + ":scoped_refptr", + "../api/rtc_event_log:rtc_event_log_factory", + "../media:rtc_audio_video", + "../media:rtc_media_base", + "../modules/audio_device:audio_device_api", + "../modules/audio_processing:api", + "../pc:peerconnection", + "../rtc_base", + "../rtc_base:rtc_base_approved", + "audio:audio_mixer_api", + "audio_codecs:audio_codecs_api", + "task_queue:default_task_queue_factory", + "video_codecs:video_codecs_api", + ] + } +} + +rtc_library("rtp_headers") { + visibility = [ "*" ] + sources = [ + "rtp_headers.cc", + "rtp_headers.h", + ] + deps = [ + ":array_view", + "..:webrtc_common", + "units:timestamp", + "video:video_rtp_headers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("rtp_packet_info") { + visibility = [ "*" ] + sources = [ + "rtp_packet_info.cc", + "rtp_packet_info.h", + "rtp_packet_infos.h", + ] + deps = [ + ":array_view", + ":refcountedbase", + ":rtp_headers", + ":scoped_refptr", + "..:webrtc_common", + "../rtc_base:rtc_base_approved", + "../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("media_stream_interface") { + visibility = [ "*" ] + sources = [ + "media_stream_interface.cc", + "media_stream_interface.h", + "notifier.h", + ] + deps = [ + ":audio_options_api", + ":rtp_parameters", + ":scoped_refptr", + "../modules/audio_processing:audio_processing_statistics", + "../rtc_base:checks", + "../rtc_base:refcount", + "../rtc_base/system:rtc_export", + "video:recordable_encoded_frame", + "video:video_frame", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("libjingle_peerconnection_api") { + visibility = [ "*" ] + cflags = [] + sources = [ + "candidate.cc", + "candidate.h", + "crypto_params.h", + "data_channel_interface.cc", + "data_channel_interface.h", + "dtls_transport_interface.cc", + "dtls_transport_interface.h", + "dtmf_sender_interface.h", + "ice_transport_interface.h", + "jsep.cc", + "jsep.h", + "jsep_ice_candidate.cc", + "jsep_ice_candidate.h", + "jsep_session_description.h", + "media_stream_proxy.h", + "media_stream_track_proxy.h", + "peer_connection_factory_proxy.h", + "peer_connection_interface.cc", + "peer_connection_interface.h", + "peer_connection_proxy.h", + "proxy.cc", + "proxy.h", + "rtp_receiver_interface.cc", + "rtp_receiver_interface.h", + "rtp_sender_interface.cc", + "rtp_sender_interface.h", + "rtp_transceiver_interface.cc", + "rtp_transceiver_interface.h", + "sctp_transport_interface.cc", + "sctp_transport_interface.h", + "set_local_description_observer_interface.h", + "set_remote_description_observer_interface.h", + "stats_types.cc", + "stats_types.h", + "turn_customizer.h", + "uma_metrics.h", + "video_track_source_proxy.h", + ] + deps = [ + ":array_view", + ":audio_options_api", + ":callfactory_api", + ":fec_controller_api", + ":frame_transformer_interface", + ":libjingle_logging_api", + ":media_stream_interface", + ":network_state_predictor_api", + ":packet_socket_factory", + ":priority", + ":rtc_error", + ":rtc_stats_api", + ":rtp_packet_info", + ":rtp_parameters", + ":rtp_transceiver_direction", + ":scoped_refptr", + "adaptation:resource_adaptation_api", + "audio:audio_mixer_api", + "audio_codecs:audio_codecs_api", + "crypto:frame_decryptor_interface", + "crypto:frame_encryptor_interface", + "crypto:options", + "neteq:neteq_api", + "rtc_event_log", + "task_queue", + "transport:bitrate_settings", + "transport:enums", + "transport:network_control", + "transport:webrtc_key_value_config", + "transport/rtp:rtp_source", + "units:data_rate", + "units:timestamp", + "video:encoded_image", + "video:video_frame", + "video:video_rtp_headers", + + # Basically, don't add stuff here. You might break sensitive downstream + # targets like pnacl. API should not depend on anything outside of this + # file, really. All these should arguably go away in time. + "..:webrtc_common", + "../media:rtc_media_base", + "../media:rtc_media_config", + "../modules/audio_processing:audio_processing_statistics", + "../rtc_base", + "../rtc_base:checks", + "../rtc_base:deprecation", + "../rtc_base:rtc_base_approved", + "../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_source_set("frame_transformer_interface") { + visibility = [ "*" ] + sources = [ "frame_transformer_interface.h" ] + deps = [ + ":scoped_refptr", + "../rtc_base:refcount", + "video:encoded_frame", + "video:video_frame_metadata", + ] +} + +rtc_library("rtc_error") { + visibility = [ "*" ] + sources = [ + "rtc_error.cc", + "rtc_error.h", + ] + deps = [ + "../rtc_base:checks", + "../rtc_base:logging", + "../rtc_base:macromagic", + "../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("packet_socket_factory") { + visibility = [ "*" ] + sources = [ + "async_resolver_factory.h", + "packet_socket_factory.h", + ] + deps = [ + "../rtc_base:rtc_base", + "../rtc_base/system:rtc_export", + ] +} + +rtc_source_set("scoped_refptr") { + visibility = [ "*" ] + sources = [ "scoped_refptr.h" ] +} + +rtc_source_set("video_quality_test_fixture_api") { + visibility = [ "*" ] + testonly = true + sources = [ "test/video_quality_test_fixture.h" ] + deps = [ + ":fec_controller_api", + ":libjingle_peerconnection_api", + ":network_state_predictor_api", + ":rtp_parameters", + ":simulated_network_api", + "../call:fake_network", + "../call:rtp_interfaces", + "../test:test_common", + "../test:video_test_common", + "transport:bitrate_settings", + "transport:network_control", + "video_codecs:video_codecs_api", + ] +} + +rtc_source_set("video_quality_analyzer_api") { + visibility = [ "*" ] + testonly = true + sources = [ "test/video_quality_analyzer_interface.h" ] + + deps = [ + ":array_view", + ":stats_observer_interface", + "video:encoded_image", + "video:video_frame", + "video:video_rtp_headers", + "video_codecs:video_codecs_api", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_source_set("track_id_stream_info_map") { + visibility = [ "*" ] + sources = [ "test/track_id_stream_info_map.h" ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +rtc_source_set("rtp_transceiver_direction") { + visibility = [ "*" ] + sources = [ "rtp_transceiver_direction.h" ] +} + +rtc_source_set("priority") { + sources = [ "priority.h" ] +} + +rtc_library("rtp_parameters") { + visibility = [ "*" ] + sources = [ + "media_types.cc", + "media_types.h", + "rtp_parameters.cc", + "rtp_parameters.h", + ] + deps = [ + ":array_view", + ":priority", + ":rtp_transceiver_direction", + "../rtc_base:checks", + "../rtc_base:stringutils", + "../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +if (is_android) { + java_cpp_enum("priority_enums") { + sources = [ "priority.h" ] + } +} + +rtc_source_set("audio_quality_analyzer_api") { + visibility = [ "*" ] + testonly = true + sources = [ "test/audio_quality_analyzer_interface.h" ] + + deps = [ + ":stats_observer_interface", + ":track_id_stream_info_map", + ] +} + +rtc_source_set("stats_observer_interface") { + visibility = [ "*" ] + testonly = true + sources = [ "test/stats_observer_interface.h" ] + + deps = [ ":rtc_stats_api" ] + + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +rtc_source_set("peer_connection_quality_test_fixture_api") { + visibility = [ "*" ] + testonly = true + sources = [ "test/peerconnection_quality_test_fixture.h" ] + + deps = [ + ":audio_quality_analyzer_api", + ":callfactory_api", + ":fec_controller_api", + ":frame_generator_api", + ":function_view", + ":libjingle_peerconnection_api", + ":media_stream_interface", + ":network_state_predictor_api", + ":packet_socket_factory", + ":rtp_parameters", + ":simulated_network_api", + ":stats_observer_interface", + ":track_id_stream_info_map", + ":video_quality_analyzer_api", + "../media:rtc_media_base", + "../rtc_base:rtc_base", + "rtc_event_log", + "task_queue", + "transport:network_control", + "units:time_delta", + "video:video_frame", + "video_codecs:video_codecs_api", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_source_set("frame_generator_api") { + visibility = [ "*" ] + testonly = true + sources = [ "test/frame_generator_interface.h" ] + + deps = [ + ":scoped_refptr", + "video:video_frame", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("test_dependency_factory") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/test_dependency_factory.cc", + "test/test_dependency_factory.h", + ] + deps = [ + ":video_quality_test_fixture_api", + "../rtc_base:checks", + "../rtc_base:platform_thread_types", + ] +} + +if (rtc_include_tests) { + rtc_library("create_video_quality_test_fixture_api") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/create_video_quality_test_fixture.cc", + "test/create_video_quality_test_fixture.h", + ] + deps = [ + ":fec_controller_api", + ":network_state_predictor_api", + ":scoped_refptr", + ":video_quality_test_fixture_api", + "../video:video_quality_test", + ] + } + + # TODO(srte): Move to network_emulation sub directory. + rtc_library("create_network_emulation_manager") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/create_network_emulation_manager.cc", + "test/create_network_emulation_manager.h", + ] + deps = [ + ":network_emulation_manager_api", + "../test/network:emulated_network", + ] + } + + rtc_library("create_peerconnection_quality_test_fixture") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/create_peerconnection_quality_test_fixture.cc", + "test/create_peerconnection_quality_test_fixture.h", + ] + + deps = [ + ":audio_quality_analyzer_api", + ":peer_connection_quality_test_fixture_api", + ":time_controller", + ":video_quality_analyzer_api", + "../test/pc/e2e:peerconnection_quality_test", + ] + } +} + +rtc_library("create_frame_generator") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/create_frame_generator.cc", + "test/create_frame_generator.h", + ] + deps = [ + ":frame_generator_api", + "../rtc_base:checks", + "../system_wrappers", + "../test:frame_generator_impl", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("create_peer_connection_quality_test_frame_generator") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/create_peer_connection_quality_test_frame_generator.cc", + "test/create_peer_connection_quality_test_frame_generator.h", + ] + deps = [ + ":create_frame_generator", + ":frame_generator_api", + ":peer_connection_quality_test_fixture_api", + "../rtc_base:checks", + "../test:fileutils", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("libjingle_logging_api") { + visibility = [ "*" ] + sources = [ "rtc_event_log_output.h" ] +} + +rtc_library("rtc_event_log_output_file") { + visibility = [ "*" ] + sources = [ + "rtc_event_log_output_file.cc", + "rtc_event_log_output_file.h", + ] + + deps = [ + ":libjingle_logging_api", + "../rtc_base:checks", + "../rtc_base:rtc_base_approved", + "../rtc_base/system:file_wrapper", + "rtc_event_log", + ] +} + +rtc_source_set("rtc_stats_api") { + visibility = [ "*" ] + cflags = [] + sources = [ + "stats/rtc_stats.h", + "stats/rtc_stats_collector_callback.h", + "stats/rtc_stats_report.h", + "stats/rtcstats_objects.h", + ] + + deps = [ + ":scoped_refptr", + "../rtc_base:checks", + "../rtc_base:rtc_base_approved", + "../rtc_base/system:rtc_export", + ] +} + +rtc_library("audio_options_api") { + visibility = [ "*" ] + sources = [ + "audio_options.cc", + "audio_options.h", + ] + + deps = [ + ":array_view", + "../rtc_base:stringutils", + "../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("transport_api") { + visibility = [ "*" ] + sources = [ + "call/transport.cc", + "call/transport.h", + ] +} + +rtc_source_set("bitrate_allocation") { + visibility = [ "*" ] + sources = [ "call/bitrate_allocation.h" ] + deps = [ + "units:data_rate", + "units:time_delta", + ] +} + +# TODO(srte): Move to network_emulation sub directory. +rtc_source_set("simulated_network_api") { + visibility = [ "*" ] + sources = [ "test/simulated_network.h" ] + deps = [ "../rtc_base" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +# TODO(srte): Move to network_emulation sub directory. +rtc_source_set("network_emulation_manager_api") { + visibility = [ "*" ] + sources = [ + "test/network_emulation_manager.cc", + "test/network_emulation_manager.h", + ] + deps = [ + ":array_view", + ":simulated_network_api", + ":time_controller", + "../call:simulated_network", + "../rtc_base", + "test/network_emulation", + "units:data_rate", + "units:data_size", + "units:timestamp", + ] +} + +rtc_source_set("time_controller") { + visibility = [ "*" ] + sources = [ + "test/time_controller.cc", + "test/time_controller.h", + ] + + deps = [ + "../modules/utility", + "../rtc_base", + "../rtc_base/synchronization:yield_policy", + "../system_wrappers", + "task_queue", + "units:time_delta", + "units:timestamp", + ] +} + +rtc_source_set("fec_controller_api") { + visibility = [ "*" ] + sources = [ + "fec_controller.h", + "fec_controller_override.h", + ] + + deps = [ + "../modules:module_fec_api", + "video:video_frame_type", + ] +} + +rtc_source_set("network_state_predictor_api") { + visibility = [ "*" ] + sources = [ "network_state_predictor.h" ] +} + +rtc_source_set("array_view") { + visibility = [ "*" ] + sources = [ "array_view.h" ] + deps = [ + "../rtc_base:checks", + "../rtc_base:type_traits", + ] +} + +rtc_source_set("refcountedbase") { + visibility = [ "*" ] + sources = [ "ref_counted_base.h" ] + deps = [ "../rtc_base:rtc_base_approved" ] +} + +rtc_library("ice_transport_factory") { + visibility = [ "*" ] + sources = [ + "ice_transport_factory.cc", + "ice_transport_factory.h", + ] + deps = [ + ":libjingle_peerconnection_api", + ":packet_socket_factory", + ":scoped_refptr", + "../p2p:rtc_p2p", + "../rtc_base", + "../rtc_base/system:rtc_export", + "rtc_event_log:rtc_event_log", + ] +} + +rtc_library("neteq_simulator_api") { + visibility = [ "*" ] + sources = [ + "test/neteq_simulator.cc", + "test/neteq_simulator.h", + ] +} + +rtc_source_set("function_view") { + visibility = [ "*" ] + sources = [ "function_view.h" ] + deps = [ "../rtc_base:checks" ] +} + +if (rtc_include_tests) { + if (rtc_enable_protobuf) { + rtc_library("audioproc_f_api") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/audioproc_float.cc", + "test/audioproc_float.h", + ] + + deps = [ + "../modules/audio_processing", + "../modules/audio_processing:api", + "../modules/audio_processing:audioproc_f_impl", + ] + } + + rtc_library("neteq_simulator_factory") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/neteq_simulator_factory.cc", + "test/neteq_simulator_factory.h", + ] + deps = [ + ":neteq_simulator_api", + "../modules/audio_coding:neteq_test_factory", + "../rtc_base:checks", + "neteq:neteq_api", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/flags:parse", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] + } + } + + rtc_source_set("simulcast_test_fixture_api") { + visibility = [ "*" ] + testonly = true + sources = [ "test/simulcast_test_fixture.h" ] + } + + rtc_library("create_simulcast_test_fixture_api") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/create_simulcast_test_fixture.cc", + "test/create_simulcast_test_fixture.h", + ] + deps = [ + ":simulcast_test_fixture_api", + "../modules/video_coding:simulcast_test_fixture_impl", + "../rtc_base:rtc_base_approved", + "video_codecs:video_codecs_api", + ] + } + + rtc_library("videocodec_test_fixture_api") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/videocodec_test_fixture.h", + "test/videocodec_test_stats.cc", + "test/videocodec_test_stats.h", + ] + deps = [ + "../media:rtc_h264_profile_id", + "../modules/video_coding:video_codec_interface", + "../rtc_base:stringutils", + "video:video_frame_type", + "video_codecs:video_codecs_api", + ] + } + + rtc_library("create_videocodec_test_fixture_api") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/create_videocodec_test_fixture.cc", + "test/create_videocodec_test_fixture.h", + ] + deps = [ + ":videocodec_test_fixture_api", + "../modules/video_coding:video_codecs_test_framework", + "../modules/video_coding:videocodec_test_impl", + "../rtc_base:rtc_base_approved", + "video_codecs:video_codecs_api", + ] + } + + rtc_source_set("mock_audio_mixer") { + testonly = true + sources = [ "test/mock_audio_mixer.h" ] + + deps = [ + "../test:test_support", + "audio:audio_mixer_api", + ] + } + + rtc_source_set("mock_fec_controller_override") { + testonly = true + sources = [ "test/mock_fec_controller_override.h" ] + deps = [ + ":fec_controller_api", + "../test:test_support", + ] + } + + rtc_library("mock_frame_encryptor") { + testonly = true + sources = [ "test/mock_frame_encryptor.h" ] + deps = [ + # For api/crypto/frame_encryptor_interface.h + ":libjingle_peerconnection_api", + "../test:test_support", + "crypto:frame_encryptor_interface", + ] + } + + rtc_library("mock_frame_decryptor") { + testonly = true + sources = [ "test/mock_frame_decryptor.h" ] + deps = [ + ":libjingle_peerconnection_api", + "../test:test_support", + "crypto:frame_decryptor_interface", + ] + } + + rtc_library("fake_frame_encryptor") { + testonly = true + sources = [ + "test/fake_frame_encryptor.cc", + "test/fake_frame_encryptor.h", + ] + deps = [ + ":array_view", + ":libjingle_peerconnection_api", + ":rtp_parameters", + "..:webrtc_common", + "../rtc_base:checks", + "../rtc_base:rtc_base_approved", + "crypto:frame_encryptor_interface", + ] + } + + rtc_library("fake_frame_decryptor") { + testonly = true + sources = [ + "test/fake_frame_decryptor.cc", + "test/fake_frame_decryptor.h", + ] + deps = [ + ":array_view", + ":libjingle_peerconnection_api", + ":rtp_parameters", + "..:webrtc_common", + "../rtc_base:checks", + "../rtc_base:rtc_base_approved", + "crypto:frame_decryptor_interface", + ] + } + + rtc_source_set("dummy_peer_connection") { + visibility = [ "*" ] + testonly = true + sources = [ "test/dummy_peer_connection.h" ] + + deps = [ + ":libjingle_peerconnection_api", + ":rtc_error", + "../rtc_base:checks", + "../rtc_base:refcount", + ] + } + + rtc_source_set("mock_peerconnectioninterface") { + visibility = [ "*" ] + testonly = true + sources = [ "test/mock_peerconnectioninterface.h" ] + + deps = [ + ":libjingle_peerconnection_api", + "../test:test_support", + ] + } + + rtc_source_set("mock_peer_connection_factory_interface") { + visibility = [ "*" ] + testonly = true + sources = [ "test/mock_peer_connection_factory_interface.h" ] + + deps = [ + ":libjingle_peerconnection_api", + "../test:test_support", + ] + } + + rtc_source_set("mock_rtp") { + testonly = true + sources = [ + "test/mock_rtpreceiver.h", + "test/mock_rtpsender.h", + ] + + deps = [ + ":libjingle_peerconnection_api", + "../test:test_support", + ] + } + + rtc_source_set("mock_transformable_video_frame") { + testonly = true + sources = [ "test/mock_transformable_video_frame.h" ] + + deps = [ + ":frame_transformer_interface", + "../test:test_support", + ] + } + + rtc_source_set("mock_video_bitrate_allocator") { + testonly = true + sources = [ "test/mock_video_bitrate_allocator.h" ] + + deps = [ + "../api/video:video_bitrate_allocator", + "../test:test_support", + ] + } + + rtc_source_set("mock_video_bitrate_allocator_factory") { + testonly = true + sources = [ "test/mock_video_bitrate_allocator_factory.h" ] + + deps = [ + "../api/video:video_bitrate_allocator_factory", + "../test:test_support", + ] + } + + rtc_source_set("mock_video_codec_factory") { + testonly = true + sources = [ + "test/mock_video_decoder_factory.h", + "test/mock_video_encoder_factory.h", + ] + + deps = [ + "../api/video_codecs:video_codecs_api", + "../test:test_support", + ] + } + + rtc_library("mock_video_decoder") { + visibility = [ "*" ] + + testonly = true + sources = [ "test/mock_video_decoder.h" ] + + deps = [ + "../api/video_codecs:video_codecs_api", + "../test:test_support", + ] + } + + rtc_library("mock_video_encoder") { + visibility = [ "*" ] + + testonly = true + sources = [ "test/mock_video_encoder.h" ] + + deps = [ + "../api/video_codecs:video_codecs_api", + "../test:test_support", + ] + } + + rtc_library("create_time_controller") { + visibility = [ "*" ] + testonly = true + sources = [ + "test/create_time_controller.cc", + "test/create_time_controller.h", + ] + + deps = [ + ":callfactory_api", + ":time_controller", + "../call", + "../call:call_interfaces", + "../test/time_controller", + ] + } + + rtc_library("rtc_api_unittests") { + testonly = true + + sources = [ + "array_view_unittest.cc", + "function_view_unittest.cc", + "rtc_error_unittest.cc", + "rtc_event_log_output_file_unittest.cc", + "rtp_packet_info_unittest.cc", + "rtp_packet_infos_unittest.cc", + "rtp_parameters_unittest.cc", + "scoped_refptr_unittest.cc", + "test/create_time_controller_unittest.cc", + ] + + deps = [ + ":array_view", + ":create_time_controller", + ":function_view", + ":libjingle_peerconnection_api", + ":rtc_error", + ":rtc_event_log_output_file", + ":rtp_packet_info", + ":rtp_parameters", + ":scoped_refptr", + ":time_controller", + "../rtc_base:checks", + "../rtc_base:gunit_helpers", + "../rtc_base:rtc_base_approved", + "../rtc_base:rtc_task_queue", + "../rtc_base/task_utils:repeating_task", + "../test:fileutils", + "../test:test_support", + "task_queue:task_queue_default_factory_unittests", + "units:time_delta", + "units:timestamp", + "units:units_unittests", + "video:video_unittests", + ] + } + + rtc_library("compile_all_headers") { + testonly = true + + sources = [ "test/compile_all_headers.cc" ] + + deps = [ + ":dummy_peer_connection", + ":fake_frame_decryptor", + ":fake_frame_encryptor", + ":mock_audio_mixer", + ":mock_frame_decryptor", + ":mock_frame_encryptor", + ":mock_peer_connection_factory_interface", + ":mock_peerconnectioninterface", + ":mock_rtp", + ":mock_transformable_video_frame", + ":mock_video_bitrate_allocator", + ":mock_video_bitrate_allocator_factory", + ":mock_video_codec_factory", + ":mock_video_decoder", + ":mock_video_encoder", + ":rtc_api_unittests", + "units:units_unittests", + ] + } +} diff --git a/api/DEPS b/api/DEPS new file mode 100644 index 0000000..4b93438 --- /dev/null +++ b/api/DEPS @@ -0,0 +1,310 @@ +# This is supposed to be a complete list of top-level directories, +# excepting only api/ itself. +include_rules = [ + "-audio", + "-base", + "-build", + "-buildtools", + "-build_overrides", + "-call", + "-common_audio", + "-common_video", + "-data", + "-examples", + "-ios", + "-infra", + "-logging", + "-media", + "-modules", + "-out", + "-p2p", + "-pc", + "-resources", + "-rtc_base", + "-rtc_tools", + "-sdk", + "-stats", + "-style-guide", + "-system_wrappers", + "-test", + "-testing", + "-third_party", + "-tools", + "-tools_webrtc", + "-video", + "-external/webrtc/webrtc", # Android platform build. + "-libyuv", + "-common_types.h", + "-WebRTC", +] + +specific_include_rules = { + # Some internal headers are allowed even in API headers: + ".*\.h": [ + "+rtc_base/checks.h", + "+rtc_base/system/rtc_export.h", + "+rtc_base/system/rtc_export_template.h", + "+rtc_base/units/unit_base.h", + "+rtc_base/deprecation.h", + ], + + "array_view\.h": [ + "+rtc_base/type_traits.h", + ], + + # Needed because AudioEncoderOpus is in the wrong place for + # backwards compatibilty reasons. See + # https://bugs.chromium.org/p/webrtc/issues/detail?id=7847 + "audio_encoder_opus\.h": [ + "+modules/audio_coding/codecs/opus/audio_encoder_opus.h", + ], + + "async_resolver_factory\.h": [ + "+rtc_base/async_resolver_interface.h", + ], + + "candidate\.h": [ + "+rtc_base/network_constants.h", + "+rtc_base/socket_address.h", + ], + + "data_channel_interface\.h": [ + "+rtc_base/copy_on_write_buffer.h", + "+rtc_base/ref_count.h", + ], + + "data_channel_transport_interface\.h": [ + "+rtc_base/copy_on_write_buffer.h", + ], + + "dtls_transport_interface\.h": [ + "+rtc_base/ref_count.h", + "+rtc_base/ssl_certificate.h", + ], + + "dtmf_sender_interface\.h": [ + "+rtc_base/ref_count.h", + ], + + "fec_controller\.h": [ + "+modules/include/module_fec_types.h", + ], + + "frame_transformer_interface\.h": [ + "+rtc_base/ref_count.h", + ], + + "ice_transport_interface\.h": [ + "+rtc_base/ref_count.h", + ], + + "jsep\.h": [ + "+rtc_base/ref_count.h", + ], + + "jsep_ice_candidate\.h": [ + "+rtc_base/constructor_magic.h", + ], + + "jsep_session_description\.h": [ + "+rtc_base/constructor_magic.h", + ], + + "media_stream_interface\.h": [ + "+modules/audio_processing/include/audio_processing_statistics.h", + "+rtc_base/ref_count.h", + ], + + "packet_socket_factory\.h": [ + "+rtc_base/proxy_info.h", + "+rtc_base/async_packet_socket.h", + ], + + "peer_connection_factory_proxy\.h": [ + "+rtc_base/bind.h", + ], + + "peer_connection_interface\.h": [ + "+media/base/media_config.h", + "+media/base/media_engine.h", + "+p2p/base/port_allocator.h", + "+rtc_base/network_monitor_factory.h", + "+rtc_base/rtc_certificate.h", + "+rtc_base/rtc_certificate_generator.h", + "+rtc_base/socket_address.h", + "+rtc_base/ssl_certificate.h", + "+rtc_base/ssl_stream_adapter.h", + ], + + "proxy\.h": [ + "+rtc_base/event.h", + "+rtc_base/message_handler.h", # Inherits from it. + "+rtc_base/ref_counted_object.h", + "+rtc_base/thread.h", + ], + + "ref_counted_base\.h": [ + "+rtc_base/constructor_magic.h", + "+rtc_base/ref_count.h", + "+rtc_base/ref_counter.h", + ], + + "rtc_error\.h": [ + "+rtc_base/logging.h", + ], + "rtc_event_log_output_file.h": [ + # For private member and constructor. + "+rtc_base/system/file_wrapper.h", + ], + "rtp_receiver_interface\.h": [ + "+rtc_base/ref_count.h", + ], + + "rtp_sender_interface\.h": [ + "+rtc_base/ref_count.h", + ], + + "rtp_transceiver_interface\.h": [ + "+rtc_base/ref_count.h", + ], + + "sctp_transport_interface\.h": [ + "+rtc_base/ref_count.h", + ], + + "set_local_description_observer_interface\.h": [ + "+rtc_base/ref_count.h", + ], + "set_remote_description_observer_interface\.h": [ + "+rtc_base/ref_count.h", + ], + + "stats_types\.h": [ + "+rtc_base/constructor_magic.h", + "+rtc_base/ref_count.h", + "+rtc_base/string_encode.h", + "+rtc_base/thread_checker.h", + ], + + "uma_metrics\.h": [ + "+rtc_base/ref_count.h", + ], + + "audio_frame\.h": [ + "+rtc_base/constructor_magic.h", + ], + + "audio_mixer\.h": [ + "+rtc_base/ref_count.h", + ], + + "audio_decoder\.h": [ + "+rtc_base/buffer.h", + "+rtc_base/constructor_magic.h", + ], + + "audio_decoder_factory\.h": [ + "+rtc_base/ref_count.h", + ], + + "audio_decoder_factory_template\.h": [ + "+rtc_base/ref_counted_object.h", + ], + + "audio_encoder\.h": [ + "+rtc_base/buffer.h", + ], + + "audio_encoder_factory\.h": [ + "+rtc_base/ref_count.h", + ], + + "audio_encoder_factory_template\.h": [ + "+rtc_base/ref_counted_object.h", + ], + + "frame_decryptor_interface\.h": [ + "+rtc_base/ref_count.h", + ], + + "frame_encryptor_interface\.h": [ + "+rtc_base/ref_count.h", + ], + + "rtc_stats_collector_callback\.h": [ + "+rtc_base/ref_count.h", + ], + + "rtc_stats_report\.h": [ + "+rtc_base/ref_count.h", + "+rtc_base/ref_counted_object.h", + ], + + "audioproc_float\.h": [ + "+modules/audio_processing/include/audio_processing.h", + ], + + "echo_detector_creator\.h": [ + "+modules/audio_processing/include/audio_processing.h", + ], + + "fake_frame_decryptor\.h": [ + "+rtc_base/ref_counted_object.h", + ], + + "fake_frame_encryptor\.h": [ + "+rtc_base/ref_counted_object.h", + ], + + "mock.*\.h": [ + "+test/gmock.h", + ], + + "simulated_network\.h": [ + "+rtc_base/random.h", + "+rtc_base/thread_annotations.h", + ], + + "test_dependency_factory\.h": [ + "+rtc_base/thread_checker.h", + ], + + "time_controller\.h": [ + "+rtc_base/thread.h", + ], + + "videocodec_test_fixture\.h": [ + "+modules/video_coding/include/video_codec_interface.h" + ], + + "video_encoder_config\.h": [ + "+rtc_base/ref_count.h", + ], + + # .cc files in api/ should not be restricted in what they can #include, + # so we re-add all the top-level directories here. (That's because .h + # files leak their #includes to whoever's #including them, but .cc files + # do not since no one #includes them.) + ".*\.cc": [ + "+audio", + "+call", + "+common_audio", + "+common_video", + "+examples", + "+logging", + "+media", + "+modules", + "+p2p", + "+pc", + "+rtc_base", + "+rtc_tools", + "+sdk", + "+stats", + "+system_wrappers", + "+test", + "+tools", + "+tools_webrtc", + "+video", + "+third_party", + ], +} diff --git a/api/DESIGN.md b/api/DESIGN.md new file mode 100644 index 0000000..0a2f36e --- /dev/null +++ b/api/DESIGN.md @@ -0,0 +1,71 @@ +# Design considerations + +The header files in this directory form the API to the WebRTC library +that is intended for client applications' use. + +This API is designed to be used on top of a multithreaded runtime. + +The public API functions are designed to be called from a single thread* +(the "client thread"), and can do internal dispatching to the thread +where activity needs to happen. Those threads can be passed in by the +client, typically as arguments to factory constructors, or they can be +created by the library if factory constructors that don't take threads +are used. + +Many of the functions are designed to be used in an asynchronous manner, +where a function is called to initiate an activity, and a callback will +be called when the activity is completed, or a handler function will +be called on an observer object when interesting events happen. + +Note: Often, even functions that look like simple functions (such as +information query functions) will need to jump between threads to perform +their function - which means that things may happen on other threads +between calls; writing "increment(x); increment(x)" is not a safe +way to increment X by exactly two, since the increment function may have +jumped to another thread that already had a queue of things to handle, +causing large amounts of other activity to have intervened between +the two calls. + +(*) The term "thread" is used here to denote any construct that guarantees +sequential execution - other names for such constructs are task runners +and sequenced task queues. + +# Client threads and callbacks + +At the moment, the API does not give any guarantee on which thread* the +callbacks and events are called on. So it's best to write all callback +and event handlers like this (pseudocode): +
+void ObserverClass::Handler(event) {
+  if (!called_on_client_thread()) {
+    dispatch_to_client_thread(bind(handler(event)));
+    return;
+  }
+  // Process event, we're now on the right thread
+}
+
+In the future, the implementation may change to always call the callbacks +and event handlers on the client thread. + +# Implementation considerations + +The C++ classes that are part of the public API are also used to derive +classes that form part of the implementation. + +This should not directly concern users of the API, but may matter if one +wants to look at how the WebRTC library is implemented, or for legacy code +that directly accesses internal APIs. + +Many APIs are defined in terms of a "proxy object", which will do a blocking +dispatch of the function to another thread, and an "implementation object" +which will do the actual +work, but can only be created, invoked and destroyed on its "home thread". + +Usually, the classes are named "xxxInterface" (in api/), "xxxProxy" and +"xxx" (not in api/). WebRTC users should only need to depend on the files +in api/. In many cases, the "xxxProxy" and "xxx" classes are subclasses +of "xxxInterface", but this property is an implementation feature only, +and should not be relied upon. + +The threading properties of these internal APIs are NOT documented in +this note, and need to be understood by inspecting those classes. diff --git a/api/OWNERS b/api/OWNERS new file mode 100644 index 0000000..4cf3915 --- /dev/null +++ b/api/OWNERS @@ -0,0 +1,14 @@ +crodbro@webrtc.org +deadbeef@webrtc.org +hta@webrtc.org +juberti@webrtc.org +kwiberg@webrtc.org +magjed@webrtc.org +perkj@webrtc.org +tkchin@webrtc.org +tommi@webrtc.org + +per-file peer_connection*=hbos@webrtc.org + +per-file DEPS=mbonadei@webrtc.org +per-file DEPS=kwiberg@webrtc.org diff --git a/api/README.md b/api/README.md new file mode 100644 index 0000000..4cc7993 --- /dev/null +++ b/api/README.md @@ -0,0 +1,24 @@ +# How to write code in the `api/` directory + +Mostly, just follow the regular [style guide](../style-guide.md), but: + +* Note that `api/` code is not exempt from the “`.h` and `.cc` files come in + pairs†rule, so if you declare something in `api/path/to/foo.h`, it should be + defined in `api/path/to/foo.cc`. +* Headers in `api/` should, if possible, not `#include` headers outside `api/`. + It’s not always possible to avoid this, but be aware that it adds to a small + mountain of technical debt that we’re trying to shrink. +* `.cc` files in `api/`, on the other hand, are free to `#include` headers + outside `api/`. + +That is, the preferred way for `api/` code to access non-`api/` code is to call +it from a `.cc` file, so that users of our API headers won’t transitively +`#include` non-public headers. + +For headers in `api/` that need to refer to non-public types, forward +declarations are often a lesser evil than including non-public header files. The +usual [rules](../style-guide.md#forward-declarations) still apply, though. + +`.cc` files in `api/` should preferably be kept reasonably small. If a +substantial implementation is needed, consider putting it with our non-public +code, and just call it from the `api/` `.cc` file. diff --git a/api/adaptation/BUILD.gn b/api/adaptation/BUILD.gn new file mode 100644 index 0000000..dc4c737 --- /dev/null +++ b/api/adaptation/BUILD.gn @@ -0,0 +1,23 @@ +# Copyright(c) 2020 The WebRTC project authors.All Rights Reserved. +# +# Use of this source code is governed by a BSD - style license +# that can be found in the LICENSE file in the root of the source +# tree.An additional intellectual property rights grant can be found +# in the file PATENTS.All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_source_set("resource_adaptation_api") { + visibility = [ "*" ] + sources = [ + "resource.cc", + "resource.h", + ] + deps = [ + "../../api:scoped_refptr", + "../../rtc_base:refcount", + "../../rtc_base:rtc_base_approved", + "../../rtc_base/system:rtc_export", + ] +} diff --git a/api/adaptation/DEPS b/api/adaptation/DEPS new file mode 100644 index 0000000..cab7fb8 --- /dev/null +++ b/api/adaptation/DEPS @@ -0,0 +1,7 @@ +specific_include_rules = { + "resource\.h": [ + # ref_count.h is a public_deps of rtc_base_approved. Necessary because of + # rtc::RefCountInterface. + "+rtc_base/ref_count.h", + ], +} \ No newline at end of file diff --git a/api/adaptation/resource.cc b/api/adaptation/resource.cc new file mode 100644 index 0000000..0a9c83a --- /dev/null +++ b/api/adaptation/resource.cc @@ -0,0 +1,30 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/adaptation/resource.h" + +namespace webrtc { + +const char* ResourceUsageStateToString(ResourceUsageState usage_state) { + switch (usage_state) { + case ResourceUsageState::kOveruse: + return "kOveruse"; + case ResourceUsageState::kUnderuse: + return "kUnderuse"; + } +} + +ResourceListener::~ResourceListener() {} + +Resource::Resource() {} + +Resource::~Resource() {} + +} // namespace webrtc diff --git a/api/adaptation/resource.h b/api/adaptation/resource.h new file mode 100644 index 0000000..9b39680 --- /dev/null +++ b/api/adaptation/resource.h @@ -0,0 +1,67 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ADAPTATION_RESOURCE_H_ +#define API_ADAPTATION_RESOURCE_H_ + +#include + +#include "api/scoped_refptr.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class Resource; + +enum class ResourceUsageState { + // Action is needed to minimze the load on this resource. + kOveruse, + // Increasing the load on this resource is desired, if possible. + kUnderuse, +}; + +RTC_EXPORT const char* ResourceUsageStateToString( + ResourceUsageState usage_state); + +class RTC_EXPORT ResourceListener { + public: + virtual ~ResourceListener(); + + virtual void OnResourceUsageStateMeasured( + rtc::scoped_refptr resource, + ResourceUsageState usage_state) = 0; +}; + +// A Resource monitors an implementation-specific resource. It may report +// kOveruse or kUnderuse when resource usage is high or low enough that we +// should perform some sort of mitigation to fulfil the resource's constraints. +// +// The methods on this interface are invoked on the adaptation task queue. +// Resource usage measurements may be performed on an any task queue. +// +// The Resource is reference counted to prevent use-after-free when posting +// between task queues. As such, the implementation MUST NOT make any +// assumptions about which task queue Resource is destructed on. +class RTC_EXPORT Resource : public rtc::RefCountInterface { + public: + Resource(); + // Destruction may happen on any task queue. + ~Resource() override; + + virtual std::string Name() const = 0; + // The |listener| may be informed of resource usage measurements on any task + // queue, but not after this method is invoked with the null argument. + virtual void SetResourceListener(ResourceListener* listener) = 0; +}; + +} // namespace webrtc + +#endif // API_ADAPTATION_RESOURCE_H_ diff --git a/api/array_view.h b/api/array_view.h new file mode 100644 index 0000000..a66369a --- /dev/null +++ b/api/array_view.h @@ -0,0 +1,315 @@ +/* + * Copyright 2015 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ARRAY_VIEW_H_ +#define API_ARRAY_VIEW_H_ + +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/type_traits.h" + +namespace rtc { + +// tl;dr: rtc::ArrayView is the same thing as gsl::span from the Guideline +// Support Library. +// +// Many functions read from or write to arrays. The obvious way to do this is +// to use two arguments, a pointer to the first element and an element count: +// +// bool Contains17(const int* arr, size_t size) { +// for (size_t i = 0; i < size; ++i) { +// if (arr[i] == 17) +// return true; +// } +// return false; +// } +// +// This is flexible, since it doesn't matter how the array is stored (C array, +// std::vector, rtc::Buffer, ...), but it's error-prone because the caller has +// to correctly specify the array length: +// +// Contains17(arr, arraysize(arr)); // C array +// Contains17(arr.data(), arr.size()); // std::vector +// Contains17(arr, size); // pointer + size +// ... +// +// It's also kind of messy to have two separate arguments for what is +// conceptually a single thing. +// +// Enter rtc::ArrayView. It contains a T pointer (to an array it doesn't +// own) and a count, and supports the basic things you'd expect, such as +// indexing and iteration. It allows us to write our function like this: +// +// bool Contains17(rtc::ArrayView arr) { +// for (auto e : arr) { +// if (e == 17) +// return true; +// } +// return false; +// } +// +// And even better, because a bunch of things will implicitly convert to +// ArrayView, we can call it like this: +// +// Contains17(arr); // C array +// Contains17(arr); // std::vector +// Contains17(rtc::ArrayView(arr, size)); // pointer + size +// Contains17(nullptr); // nullptr -> empty ArrayView +// ... +// +// ArrayView stores both a pointer and a size, but you may also use +// ArrayView, which has a size that's fixed at compile time (which means +// it only has to store the pointer). +// +// One important point is that ArrayView and ArrayView are +// different types, which allow and don't allow mutation of the array elements, +// respectively. The implicit conversions work just like you'd hope, so that +// e.g. vector will convert to either ArrayView or ArrayView, but const vector will convert only to ArrayView. +// (ArrayView itself can be the source type in such conversions, so +// ArrayView will convert to ArrayView.) +// +// Note: ArrayView is tiny (just a pointer and a count if variable-sized, just +// a pointer if fix-sized) and trivially copyable, so it's probably cheaper to +// pass it by value than by const reference. + +namespace impl { + +// Magic constant for indicating that the size of an ArrayView is variable +// instead of fixed. +enum : std::ptrdiff_t { kArrayViewVarSize = -4711 }; + +// Base class for ArrayViews of fixed nonzero size. +template +class ArrayViewBase { + static_assert(Size > 0, "ArrayView size must be variable or non-negative"); + + public: + ArrayViewBase(T* data, size_t size) : data_(data) {} + + static constexpr size_t size() { return Size; } + static constexpr bool empty() { return false; } + T* data() const { return data_; } + + protected: + static constexpr bool fixed_size() { return true; } + + private: + T* data_; +}; + +// Specialized base class for ArrayViews of fixed zero size. +template +class ArrayViewBase { + public: + explicit ArrayViewBase(T* data, size_t size) {} + + static constexpr size_t size() { return 0; } + static constexpr bool empty() { return true; } + T* data() const { return nullptr; } + + protected: + static constexpr bool fixed_size() { return true; } +}; + +// Specialized base class for ArrayViews of variable size. +template +class ArrayViewBase { + public: + ArrayViewBase(T* data, size_t size) + : data_(size == 0 ? nullptr : data), size_(size) {} + + size_t size() const { return size_; } + bool empty() const { return size_ == 0; } + T* data() const { return data_; } + + protected: + static constexpr bool fixed_size() { return false; } + + private: + T* data_; + size_t size_; +}; + +} // namespace impl + +template +class ArrayView final : public impl::ArrayViewBase { + public: + using value_type = T; + using const_iterator = const T*; + + // Construct an ArrayView from a pointer and a length. + template + ArrayView(U* data, size_t size) + : impl::ArrayViewBase::ArrayViewBase(data, size) { + RTC_DCHECK_EQ(size == 0 ? nullptr : data, this->data()); + RTC_DCHECK_EQ(size, this->size()); + RTC_DCHECK_EQ(!this->data(), + this->size() == 0); // data is null iff size == 0. + } + + // Construct an empty ArrayView. Note that fixed-size ArrayViews of size > 0 + // cannot be empty. + ArrayView() : ArrayView(nullptr, 0) {} + ArrayView(std::nullptr_t) // NOLINT + : ArrayView() {} + ArrayView(std::nullptr_t, size_t size) + : ArrayView(static_cast(nullptr), size) { + static_assert(Size == 0 || Size == impl::kArrayViewVarSize, ""); + RTC_DCHECK_EQ(0, size); + } + + // Construct an ArrayView from a C-style array. + template + ArrayView(U (&array)[N]) // NOLINT + : ArrayView(array, N) { + static_assert(Size == N || Size == impl::kArrayViewVarSize, + "Array size must match ArrayView size"); + } + + // (Only if size is fixed.) Construct a fixed size ArrayView from a + // non-const std::array instance. For an ArrayView with variable size, the + // used ctor is ArrayView(U& u) instead. + template (N)>::type* = nullptr> + ArrayView(std::array& u) // NOLINT + : ArrayView(u.data(), u.size()) {} + + // (Only if size is fixed.) Construct a fixed size ArrayView where T is + // const from a const(expr) std::array instance. For an ArrayView with + // variable size, the used ctor is ArrayView(U& u) instead. + template (N)>::type* = nullptr> + ArrayView(const std::array& u) // NOLINT + : ArrayView(u.data(), u.size()) {} + + // (Only if size is fixed.) Construct an ArrayView from any type U that has a + // static constexpr size() method whose return value is equal to Size, and a + // data() method whose return value converts implicitly to T*. In particular, + // this means we allow conversion from ArrayView to ArrayView, but not the other way around. We also don't allow conversion from + // ArrayView to ArrayView, or from ArrayView to ArrayView when M != N. + template < + typename U, + typename std::enable_if::value>::type* = nullptr> + ArrayView(U& u) // NOLINT + : ArrayView(u.data(), u.size()) { + static_assert(U::size() == Size, "Sizes must match exactly"); + } + template < + typename U, + typename std::enable_if::value>::type* = nullptr> + ArrayView(const U& u) // NOLINT(runtime/explicit) + : ArrayView(u.data(), u.size()) { + static_assert(U::size() == Size, "Sizes must match exactly"); + } + + // (Only if size is variable.) Construct an ArrayView from any type U that + // has a size() method whose return value converts implicitly to size_t, and + // a data() method whose return value converts implicitly to T*. In + // particular, this means we allow conversion from ArrayView to + // ArrayView, but not the other way around. Other allowed + // conversions include + // ArrayView to ArrayView or ArrayView, + // std::vector to ArrayView or ArrayView, + // const std::vector to ArrayView, + // rtc::Buffer to ArrayView or ArrayView, and + // const rtc::Buffer to ArrayView. + template < + typename U, + typename std::enable_if::value>::type* = nullptr> + ArrayView(U& u) // NOLINT + : ArrayView(u.data(), u.size()) {} + template < + typename U, + typename std::enable_if::value>::type* = nullptr> + ArrayView(const U& u) // NOLINT(runtime/explicit) + : ArrayView(u.data(), u.size()) {} + + // Indexing and iteration. These allow mutation even if the ArrayView is + // const, because the ArrayView doesn't own the array. (To prevent mutation, + // use a const element type.) + T& operator[](size_t idx) const { + RTC_DCHECK_LT(idx, this->size()); + RTC_DCHECK(this->data()); + return this->data()[idx]; + } + T* begin() const { return this->data(); } + T* end() const { return this->data() + this->size(); } + const T* cbegin() const { return this->data(); } + const T* cend() const { return this->data() + this->size(); } + + ArrayView subview(size_t offset, size_t size) const { + return offset < this->size() + ? ArrayView(this->data() + offset, + std::min(size, this->size() - offset)) + : ArrayView(); + } + ArrayView subview(size_t offset) const { + return subview(offset, this->size()); + } +}; + +// Comparing two ArrayViews compares their (pointer,size) pairs; it does *not* +// dereference the pointers. +template +bool operator==(const ArrayView& a, const ArrayView& b) { + return a.data() == b.data() && a.size() == b.size(); +} +template +bool operator!=(const ArrayView& a, const ArrayView& b) { + return !(a == b); +} + +// Variable-size ArrayViews are the size of two pointers; fixed-size ArrayViews +// are the size of one pointer. (And as a special case, fixed-size ArrayViews +// of size 0 require no storage.) +static_assert(sizeof(ArrayView) == 2 * sizeof(int*), ""); +static_assert(sizeof(ArrayView) == sizeof(int*), ""); +static_assert(std::is_empty>::value, ""); + +template +inline ArrayView MakeArrayView(T* data, size_t size) { + return ArrayView(data, size); +} + +// Only for primitive types that have the same size and aligment. +// Allow reinterpret cast of the array view to another primitive type of the +// same size. +// Template arguments order is (U, T, Size) to allow deduction of the template +// arguments in client calls: reinterpret_array_view(array_view). +template +inline ArrayView reinterpret_array_view(ArrayView view) { + static_assert(sizeof(U) == sizeof(T) && alignof(U) == alignof(T), + "ArrayView reinterpret_cast is only supported for casting " + "between views that represent the same chunk of memory."); + static_assert( + std::is_fundamental::value && std::is_fundamental::value, + "ArrayView reinterpret_cast is only supported for casting between " + "fundamental types."); + return ArrayView(reinterpret_cast(view.data()), view.size()); +} + +} // namespace rtc + +#endif // API_ARRAY_VIEW_H_ diff --git a/api/array_view_unittest.cc b/api/array_view_unittest.cc new file mode 100644 index 0000000..0357f68 --- /dev/null +++ b/api/array_view_unittest.cc @@ -0,0 +1,578 @@ +/* + * Copyright 2015 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/array_view.h" + +#include +#include +#include +#include +#include + +#include "rtc_base/buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/gunit.h" +#include "test/gmock.h" + +namespace rtc { + +namespace { + +using ::testing::ElementsAre; +using ::testing::IsEmpty; + +template +size_t Call(ArrayView av) { + return av.size(); +} + +template +void CallFixed(ArrayView av) {} + +} // namespace + +TEST(ArrayViewDeathTest, TestConstructFromPtrAndArray) { + char arr[] = "Arrr!"; + const char carr[] = "Carrr!"; + EXPECT_EQ(6u, Call(arr)); + EXPECT_EQ(7u, Call(carr)); + EXPECT_EQ(6u, Call(arr)); + // Call(carr); // Compile error, because can't drop const. + // Call(arr); // Compile error, because incompatible types. + ArrayView x; + EXPECT_EQ(0u, x.size()); + EXPECT_EQ(nullptr, x.data()); + ArrayView y = arr; + EXPECT_EQ(6u, y.size()); + EXPECT_EQ(arr, y.data()); + ArrayView yf = arr; + static_assert(yf.size() == 6, ""); + EXPECT_EQ(arr, yf.data()); + ArrayView z(arr + 1, 3); + EXPECT_EQ(3u, z.size()); + EXPECT_EQ(arr + 1, z.data()); + ArrayView zf(arr + 1, 3); + static_assert(zf.size() == 3, ""); + EXPECT_EQ(arr + 1, zf.data()); + ArrayView w(arr, 2); + EXPECT_EQ(2u, w.size()); + EXPECT_EQ(arr, w.data()); + ArrayView wf(arr, 2); + static_assert(wf.size() == 2, ""); + EXPECT_EQ(arr, wf.data()); + ArrayView q(arr, 0); + EXPECT_EQ(0u, q.size()); + EXPECT_EQ(nullptr, q.data()); + ArrayView qf(arr, 0); + static_assert(qf.size() == 0, ""); + EXPECT_EQ(nullptr, qf.data()); +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + // DCHECK error (nullptr with nonzero size). + EXPECT_DEATH(ArrayView(static_cast(nullptr), 5), ""); +#endif + // These are compile errors, because incompatible types. + // ArrayView m = arr; + // ArrayView n(arr + 2, 2); +} + +TEST(ArrayViewTest, TestCopyConstructorVariableLvalue) { + char arr[] = "Arrr!"; + ArrayView x = arr; + EXPECT_EQ(6u, x.size()); + EXPECT_EQ(arr, x.data()); + ArrayView y = x; // Copy non-const -> non-const. + EXPECT_EQ(6u, y.size()); + EXPECT_EQ(arr, y.data()); + ArrayView z = x; // Copy non-const -> const. + EXPECT_EQ(6u, z.size()); + EXPECT_EQ(arr, z.data()); + ArrayView w = z; // Copy const -> const. + EXPECT_EQ(6u, w.size()); + EXPECT_EQ(arr, w.data()); + // ArrayView v = z; // Compile error, because can't drop const. +} + +TEST(ArrayViewTest, TestCopyConstructorVariableRvalue) { + char arr[] = "Arrr!"; + ArrayView x = arr; + EXPECT_EQ(6u, x.size()); + EXPECT_EQ(arr, x.data()); + ArrayView y = std::move(x); // Copy non-const -> non-const. + EXPECT_EQ(6u, y.size()); + EXPECT_EQ(arr, y.data()); + ArrayView z = std::move(x); // Copy non-const -> const. + EXPECT_EQ(6u, z.size()); + EXPECT_EQ(arr, z.data()); + ArrayView w = std::move(z); // Copy const -> const. + EXPECT_EQ(6u, w.size()); + EXPECT_EQ(arr, w.data()); + // ArrayView v = std::move(z); // Error, because can't drop const. +} + +TEST(ArrayViewTest, TestCopyConstructorFixedLvalue) { + char arr[] = "Arrr!"; + ArrayView x = arr; + static_assert(x.size() == 6, ""); + EXPECT_EQ(arr, x.data()); + + // Copy fixed -> fixed. + ArrayView y = x; // Copy non-const -> non-const. + static_assert(y.size() == 6, ""); + EXPECT_EQ(arr, y.data()); + ArrayView z = x; // Copy non-const -> const. + static_assert(z.size() == 6, ""); + EXPECT_EQ(arr, z.data()); + ArrayView w = z; // Copy const -> const. + static_assert(w.size() == 6, ""); + EXPECT_EQ(arr, w.data()); + // ArrayView v = z; // Compile error, because can't drop const. + + // Copy fixed -> variable. + ArrayView yv = x; // Copy non-const -> non-const. + EXPECT_EQ(6u, yv.size()); + EXPECT_EQ(arr, yv.data()); + ArrayView zv = x; // Copy non-const -> const. + EXPECT_EQ(6u, zv.size()); + EXPECT_EQ(arr, zv.data()); + ArrayView wv = z; // Copy const -> const. + EXPECT_EQ(6u, wv.size()); + EXPECT_EQ(arr, wv.data()); + // ArrayView vv = z; // Compile error, because can't drop const. +} + +TEST(ArrayViewTest, TestCopyConstructorFixedRvalue) { + char arr[] = "Arrr!"; + ArrayView x = arr; + static_assert(x.size() == 6, ""); + EXPECT_EQ(arr, x.data()); + + // Copy fixed -> fixed. + ArrayView y = std::move(x); // Copy non-const -> non-const. + static_assert(y.size() == 6, ""); + EXPECT_EQ(arr, y.data()); + ArrayView z = std::move(x); // Copy non-const -> const. + static_assert(z.size() == 6, ""); + EXPECT_EQ(arr, z.data()); + ArrayView w = std::move(z); // Copy const -> const. + static_assert(w.size() == 6, ""); + EXPECT_EQ(arr, w.data()); + // ArrayView v = std::move(z); // Error, because can't drop const. + + // Copy fixed -> variable. + ArrayView yv = std::move(x); // Copy non-const -> non-const. + EXPECT_EQ(6u, yv.size()); + EXPECT_EQ(arr, yv.data()); + ArrayView zv = std::move(x); // Copy non-const -> const. + EXPECT_EQ(6u, zv.size()); + EXPECT_EQ(arr, zv.data()); + ArrayView wv = std::move(z); // Copy const -> const. + EXPECT_EQ(6u, wv.size()); + EXPECT_EQ(arr, wv.data()); + // ArrayView vv = std::move(z); // Error, because can't drop const. +} + +TEST(ArrayViewTest, TestCopyAssignmentVariableLvalue) { + char arr[] = "Arrr!"; + ArrayView x(arr); + EXPECT_EQ(6u, x.size()); + EXPECT_EQ(arr, x.data()); + ArrayView y; + y = x; // Copy non-const -> non-const. + EXPECT_EQ(6u, y.size()); + EXPECT_EQ(arr, y.data()); + ArrayView z; + z = x; // Copy non-const -> const. + EXPECT_EQ(6u, z.size()); + EXPECT_EQ(arr, z.data()); + ArrayView w; + w = z; // Copy const -> const. + EXPECT_EQ(6u, w.size()); + EXPECT_EQ(arr, w.data()); + // ArrayView v; + // v = z; // Compile error, because can't drop const. +} + +TEST(ArrayViewTest, TestCopyAssignmentVariableRvalue) { + char arr[] = "Arrr!"; + ArrayView x(arr); + EXPECT_EQ(6u, x.size()); + EXPECT_EQ(arr, x.data()); + ArrayView y; + y = std::move(x); // Copy non-const -> non-const. + EXPECT_EQ(6u, y.size()); + EXPECT_EQ(arr, y.data()); + ArrayView z; + z = std::move(x); // Copy non-const -> const. + EXPECT_EQ(6u, z.size()); + EXPECT_EQ(arr, z.data()); + ArrayView w; + w = std::move(z); // Copy const -> const. + EXPECT_EQ(6u, w.size()); + EXPECT_EQ(arr, w.data()); + // ArrayView v; + // v = std::move(z); // Compile error, because can't drop const. +} + +TEST(ArrayViewTest, TestCopyAssignmentFixedLvalue) { + char arr[] = "Arrr!"; + char init[] = "Init!"; + ArrayView x(arr); + EXPECT_EQ(arr, x.data()); + + // Copy fixed -> fixed. + ArrayView y(init); + y = x; // Copy non-const -> non-const. + EXPECT_EQ(arr, y.data()); + ArrayView z(init); + z = x; // Copy non-const -> const. + EXPECT_EQ(arr, z.data()); + ArrayView w(init); + w = z; // Copy const -> const. + EXPECT_EQ(arr, w.data()); + // ArrayView v(init); + // v = z; // Compile error, because can't drop const. + + // Copy fixed -> variable. + ArrayView yv; + yv = x; // Copy non-const -> non-const. + EXPECT_EQ(6u, yv.size()); + EXPECT_EQ(arr, yv.data()); + ArrayView zv; + zv = x; // Copy non-const -> const. + EXPECT_EQ(6u, zv.size()); + EXPECT_EQ(arr, zv.data()); + ArrayView wv; + wv = z; // Copy const -> const. + EXPECT_EQ(6u, wv.size()); + EXPECT_EQ(arr, wv.data()); + // ArrayView v; + // v = z; // Compile error, because can't drop const. +} + +TEST(ArrayViewTest, TestCopyAssignmentFixedRvalue) { + char arr[] = "Arrr!"; + char init[] = "Init!"; + ArrayView x(arr); + EXPECT_EQ(arr, x.data()); + + // Copy fixed -> fixed. + ArrayView y(init); + y = std::move(x); // Copy non-const -> non-const. + EXPECT_EQ(arr, y.data()); + ArrayView z(init); + z = std::move(x); // Copy non-const -> const. + EXPECT_EQ(arr, z.data()); + ArrayView w(init); + w = std::move(z); // Copy const -> const. + EXPECT_EQ(arr, w.data()); + // ArrayView v(init); + // v = std::move(z); // Compile error, because can't drop const. + + // Copy fixed -> variable. + ArrayView yv; + yv = std::move(x); // Copy non-const -> non-const. + EXPECT_EQ(6u, yv.size()); + EXPECT_EQ(arr, yv.data()); + ArrayView zv; + zv = std::move(x); // Copy non-const -> const. + EXPECT_EQ(6u, zv.size()); + EXPECT_EQ(arr, zv.data()); + ArrayView wv; + wv = std::move(z); // Copy const -> const. + EXPECT_EQ(6u, wv.size()); + EXPECT_EQ(arr, wv.data()); + // ArrayView v; + // v = std::move(z); // Compile error, because can't drop const. +} + +TEST(ArrayViewTest, TestStdArray) { + EXPECT_EQ(4u, Call(std::array{1, 2, 3, 4})); + CallFixed(std::array{2, 3, 4}); + constexpr size_t size = 5; + std::array arr{}; + // Fixed size view. + rtc::ArrayView arr_view_fixed(arr); + EXPECT_EQ(arr.data(), arr_view_fixed.data()); + static_assert(size == arr_view_fixed.size(), ""); + // Variable size view. + rtc::ArrayView arr_view(arr); + EXPECT_EQ(arr.data(), arr_view.data()); + EXPECT_EQ(size, arr_view.size()); +} + +TEST(ArrayViewTest, TestConstStdArray) { + constexpr size_t size = 5; + + constexpr std::array constexpr_arr{}; + rtc::ArrayView constexpr_arr_view(constexpr_arr); + EXPECT_EQ(constexpr_arr.data(), constexpr_arr_view.data()); + static_assert(constexpr_arr.size() == constexpr_arr_view.size(), ""); + + const std::array const_arr{}; + rtc::ArrayView const_arr_view(const_arr); + EXPECT_EQ(const_arr.data(), const_arr_view.data()); + static_assert(const_arr.size() == const_arr_view.size(), ""); + + std::array non_const_arr{}; + rtc::ArrayView non_const_arr_view(non_const_arr); + EXPECT_EQ(non_const_arr.data(), non_const_arr_view.data()); + static_assert(non_const_arr.size() == non_const_arr_view.size(), ""); +} + +TEST(ArrayViewTest, TestStdVector) { + EXPECT_EQ(3u, Call(std::vector{4, 5, 6})); + std::vector v; + v.push_back(3); + v.push_back(11); + EXPECT_EQ(2u, Call(v)); + EXPECT_EQ(2u, Call(v)); + // Call(v); // Compile error, because incompatible types. + ArrayView x = v; + EXPECT_EQ(2u, x.size()); + EXPECT_EQ(v.data(), x.data()); + ArrayView y; + y = v; + EXPECT_EQ(2u, y.size()); + EXPECT_EQ(v.data(), y.data()); + // ArrayView d = v; // Compile error, because incompatible types. + const std::vector cv; + EXPECT_EQ(0u, Call(cv)); + // Call(cv); // Compile error, because can't drop const. + ArrayView z = cv; + EXPECT_EQ(0u, z.size()); + EXPECT_EQ(nullptr, z.data()); + // ArrayView w = cv; // Compile error, because can't drop const. +} + +TEST(ArrayViewTest, TestRtcBuffer) { + rtc::Buffer b = "so buffer"; + EXPECT_EQ(10u, Call(b)); + EXPECT_EQ(10u, Call(b)); + // Call(b); // Compile error, because incompatible types. + ArrayView x = b; + EXPECT_EQ(10u, x.size()); + EXPECT_EQ(b.data(), x.data()); + ArrayView y; + y = b; + EXPECT_EQ(10u, y.size()); + EXPECT_EQ(b.data(), y.data()); + // ArrayView d = b; // Compile error, because incompatible types. + const rtc::Buffer cb = "very const"; + EXPECT_EQ(11u, Call(cb)); + // Call(cb); // Compile error, because can't drop const. + ArrayView z = cb; + EXPECT_EQ(11u, z.size()); + EXPECT_EQ(cb.data(), z.data()); + // ArrayView w = cb; // Compile error, because can't drop const. +} + +TEST(ArrayViewTest, TestSwapVariable) { + const char arr[] = "Arrr!"; + const char aye[] = "Aye, Cap'n!"; + ArrayView x(arr); + EXPECT_EQ(6u, x.size()); + EXPECT_EQ(arr, x.data()); + ArrayView y(aye); + EXPECT_EQ(12u, y.size()); + EXPECT_EQ(aye, y.data()); + using std::swap; + swap(x, y); + EXPECT_EQ(12u, x.size()); + EXPECT_EQ(aye, x.data()); + EXPECT_EQ(6u, y.size()); + EXPECT_EQ(arr, y.data()); + // ArrayView z; + // swap(x, z); // Compile error, because can't drop const. +} + +TEST(FixArrayViewTest, TestSwapFixed) { + const char arr[] = "Arr!"; + char aye[] = "Aye!"; + ArrayView x(arr); + EXPECT_EQ(arr, x.data()); + ArrayView y(aye); + EXPECT_EQ(aye, y.data()); + using std::swap; + swap(x, y); + EXPECT_EQ(aye, x.data()); + EXPECT_EQ(arr, y.data()); + // ArrayView z(aye); + // swap(x, z); // Compile error, because can't drop const. + // ArrayView w(aye, 4); + // swap(x, w); // Compile error, because different sizes. +} + +TEST(ArrayViewDeathTest, TestIndexing) { + char arr[] = "abcdefg"; + ArrayView x(arr); + const ArrayView y(arr); + ArrayView z(arr); + EXPECT_EQ(8u, x.size()); + EXPECT_EQ(8u, y.size()); + EXPECT_EQ(8u, z.size()); + EXPECT_EQ('b', x[1]); + EXPECT_EQ('c', y[2]); + EXPECT_EQ('d', z[3]); + x[3] = 'X'; + y[2] = 'Y'; + // z[1] = 'Z'; // Compile error, because z's element type is const char. + EXPECT_EQ('b', x[1]); + EXPECT_EQ('Y', y[2]); + EXPECT_EQ('X', z[3]); +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + EXPECT_DEATH(z[8], ""); // DCHECK error (index out of bounds). +#endif +} + +TEST(ArrayViewTest, TestIterationEmpty) { + // Variable-size. + ArrayView>>> av; + EXPECT_EQ(av.begin(), av.end()); + EXPECT_EQ(av.cbegin(), av.cend()); + for (auto& e : av) { + EXPECT_TRUE(false); + EXPECT_EQ(42u, e.size()); // Dummy use of e to prevent unused var warning. + } + + // Fixed-size. + ArrayView>>, 0> af; + EXPECT_EQ(af.begin(), af.end()); + EXPECT_EQ(af.cbegin(), af.cend()); + for (auto& e : af) { + EXPECT_TRUE(false); + EXPECT_EQ(42u, e.size()); // Dummy use of e to prevent unused var warning. + } +} + +TEST(ArrayViewTest, TestIterationVariable) { + char arr[] = "Arrr!"; + ArrayView av(arr); + EXPECT_EQ('A', *av.begin()); + EXPECT_EQ('A', *av.cbegin()); + EXPECT_EQ('\0', *(av.end() - 1)); + EXPECT_EQ('\0', *(av.cend() - 1)); + char i = 0; + for (auto& e : av) { + EXPECT_EQ(arr + i, &e); + e = 's' + i; + ++i; + } + i = 0; + for (auto& e : ArrayView(av)) { + EXPECT_EQ(arr + i, &e); + // e = 'q' + i; // Compile error, because e is a const char&. + ++i; + } +} + +TEST(ArrayViewTest, TestIterationFixed) { + char arr[] = "Arrr!"; + ArrayView av(arr); + EXPECT_EQ('A', *av.begin()); + EXPECT_EQ('A', *av.cbegin()); + EXPECT_EQ('\0', *(av.end() - 1)); + EXPECT_EQ('\0', *(av.cend() - 1)); + char i = 0; + for (auto& e : av) { + EXPECT_EQ(arr + i, &e); + e = 's' + i; + ++i; + } + i = 0; + for (auto& e : ArrayView(av)) { + EXPECT_EQ(arr + i, &e); + // e = 'q' + i; // Compile error, because e is a const char&. + ++i; + } +} + +TEST(ArrayViewTest, TestEmpty) { + EXPECT_TRUE(ArrayView().empty()); + const int a[] = {1, 2, 3}; + EXPECT_FALSE(ArrayView(a).empty()); + + static_assert(ArrayView::empty(), ""); + static_assert(!ArrayView::empty(), ""); +} + +TEST(ArrayViewTest, TestCompare) { + int a[] = {1, 2, 3}; + int b[] = {1, 2, 3}; + + EXPECT_EQ(ArrayView(a), ArrayView(a)); + EXPECT_EQ((ArrayView(a)), (ArrayView(a))); + EXPECT_EQ(ArrayView(a), (ArrayView(a))); + EXPECT_EQ(ArrayView(), ArrayView()); + EXPECT_EQ(ArrayView(), ArrayView(a, 0)); + EXPECT_EQ(ArrayView(a, 0), ArrayView(b, 0)); + EXPECT_EQ((ArrayView(a, 0)), ArrayView()); + + EXPECT_NE(ArrayView(a), ArrayView(b)); + EXPECT_NE((ArrayView(a)), (ArrayView(b))); + EXPECT_NE((ArrayView(a)), ArrayView(b)); + EXPECT_NE(ArrayView(a), ArrayView()); + EXPECT_NE(ArrayView(a), ArrayView(a, 2)); + EXPECT_NE((ArrayView(a)), (ArrayView(a, 2))); +} + +TEST(ArrayViewTest, TestSubViewVariable) { + int a[] = {1, 2, 3}; + ArrayView av(a); + + EXPECT_EQ(av.subview(0), av); + + EXPECT_THAT(av.subview(1), ElementsAre(2, 3)); + EXPECT_THAT(av.subview(2), ElementsAre(3)); + EXPECT_THAT(av.subview(3), IsEmpty()); + EXPECT_THAT(av.subview(4), IsEmpty()); + + EXPECT_THAT(av.subview(1, 0), IsEmpty()); + EXPECT_THAT(av.subview(1, 1), ElementsAre(2)); + EXPECT_THAT(av.subview(1, 2), ElementsAre(2, 3)); + EXPECT_THAT(av.subview(1, 3), ElementsAre(2, 3)); +} + +TEST(ArrayViewTest, TestSubViewFixed) { + int a[] = {1, 2, 3}; + ArrayView av(a); + + EXPECT_EQ(av.subview(0), av); + + EXPECT_THAT(av.subview(1), ElementsAre(2, 3)); + EXPECT_THAT(av.subview(2), ElementsAre(3)); + EXPECT_THAT(av.subview(3), IsEmpty()); + EXPECT_THAT(av.subview(4), IsEmpty()); + + EXPECT_THAT(av.subview(1, 0), IsEmpty()); + EXPECT_THAT(av.subview(1, 1), ElementsAre(2)); + EXPECT_THAT(av.subview(1, 2), ElementsAre(2, 3)); + EXPECT_THAT(av.subview(1, 3), ElementsAre(2, 3)); +} + +TEST(ArrayViewTest, TestReinterpretCastFixedSize) { + uint8_t bytes[] = {1, 2, 3}; + ArrayView uint8_av(bytes); + ArrayView int8_av = reinterpret_array_view(uint8_av); + EXPECT_EQ(int8_av.size(), uint8_av.size()); + EXPECT_EQ(int8_av[0], 1); + EXPECT_EQ(int8_av[1], 2); + EXPECT_EQ(int8_av[2], 3); +} + +TEST(ArrayViewTest, TestReinterpretCastVariableSize) { + std::vector v = {1, 2, 3}; + ArrayView int8_av(v); + ArrayView uint8_av = reinterpret_array_view(int8_av); + EXPECT_EQ(int8_av.size(), uint8_av.size()); + EXPECT_EQ(uint8_av[0], 1); + EXPECT_EQ(uint8_av[1], 2); + EXPECT_EQ(uint8_av[2], 3); +} +} // namespace rtc diff --git a/api/async_resolver_factory.h b/api/async_resolver_factory.h new file mode 100644 index 0000000..93d3f79 --- /dev/null +++ b/api/async_resolver_factory.h @@ -0,0 +1,32 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ASYNC_RESOLVER_FACTORY_H_ +#define API_ASYNC_RESOLVER_FACTORY_H_ + +#include "rtc_base/async_resolver_interface.h" + +namespace webrtc { + +// An abstract factory for creating AsyncResolverInterfaces. This allows +// client applications to provide WebRTC with their own mechanism for +// performing DNS resolution. +class AsyncResolverFactory { + public: + AsyncResolverFactory() = default; + virtual ~AsyncResolverFactory() = default; + + // The caller should call Destroy on the returned object to delete it. + virtual rtc::AsyncResolverInterface* Create() = 0; +}; + +} // namespace webrtc + +#endif // API_ASYNC_RESOLVER_FACTORY_H_ diff --git a/api/audio/BUILD.gn b/api/audio/BUILD.gn new file mode 100644 index 0000000..117e5cc --- /dev/null +++ b/api/audio/BUILD.gn @@ -0,0 +1,103 @@ +# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_library("audio_frame_api") { + visibility = [ "*" ] + sources = [ + "audio_frame.cc", + "audio_frame.h", + "channel_layout.cc", + "channel_layout.h", + ] + + deps = [ + "..:rtp_packet_info", + "../../rtc_base:checks", + "../../rtc_base:rtc_base_approved", + ] +} + +rtc_source_set("audio_mixer_api") { + visibility = [ "*" ] + sources = [ "audio_mixer.h" ] + + deps = [ + ":audio_frame_api", + "../../rtc_base:rtc_base_approved", + ] +} + +rtc_library("aec3_config") { + visibility = [ "*" ] + sources = [ + "echo_canceller3_config.cc", + "echo_canceller3_config.h", + ] + deps = [ + "../../rtc_base:checks", + "../../rtc_base:rtc_base_approved", + "../../rtc_base:safe_minmax", + "../../rtc_base/system:rtc_export", + ] +} + +rtc_library("aec3_config_json") { + visibility = [ "*" ] + allow_poison = [ "rtc_json" ] + sources = [ + "echo_canceller3_config_json.cc", + "echo_canceller3_config_json.h", + ] + deps = [ + ":aec3_config", + "../../rtc_base:checks", + "../../rtc_base:rtc_base_approved", + "../../rtc_base:rtc_json", + "../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +rtc_library("aec3_factory") { + visibility = [ "*" ] + configs += [ "../../modules/audio_processing:apm_debug_dump" ] + sources = [ + "echo_canceller3_factory.cc", + "echo_canceller3_factory.h", + ] + + deps = [ + ":aec3_config", + ":echo_control", + "../../modules/audio_processing/aec3", + "../../rtc_base:rtc_base_approved", + "../../rtc_base/system:rtc_export", + ] +} + +rtc_source_set("echo_control") { + visibility = [ "*" ] + sources = [ "echo_control.h" ] + deps = [ "../../rtc_base:checks" ] +} + +rtc_source_set("echo_detector_creator") { + visibility = [ "*" ] + sources = [ + "echo_detector_creator.cc", + "echo_detector_creator.h", + ] + deps = [ + "../../api:scoped_refptr", + "../../modules/audio_processing:api", + "../../modules/audio_processing:audio_processing", + "../../rtc_base:refcount", + ] +} diff --git a/api/audio/OWNERS b/api/audio/OWNERS new file mode 100644 index 0000000..bb499b4 --- /dev/null +++ b/api/audio/OWNERS @@ -0,0 +1,2 @@ +gustaf@webrtc.org +peah@webrtc.org diff --git a/api/audio/audio_frame.cc b/api/audio/audio_frame.cc new file mode 100644 index 0000000..c6e5cf4 --- /dev/null +++ b/api/audio/audio_frame.cc @@ -0,0 +1,164 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio/audio_frame.h" + +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { + +AudioFrame::AudioFrame() { + // Visual Studio doesn't like this in the class definition. + static_assert(sizeof(data_) == kMaxDataSizeBytes, "kMaxDataSizeBytes"); +} + +void swap(AudioFrame& a, AudioFrame& b) { + using std::swap; + swap(a.timestamp_, b.timestamp_); + swap(a.elapsed_time_ms_, b.elapsed_time_ms_); + swap(a.ntp_time_ms_, b.ntp_time_ms_); + swap(a.samples_per_channel_, b.samples_per_channel_); + swap(a.sample_rate_hz_, b.sample_rate_hz_); + swap(a.num_channels_, b.num_channels_); + swap(a.channel_layout_, b.channel_layout_); + swap(a.speech_type_, b.speech_type_); + swap(a.vad_activity_, b.vad_activity_); + swap(a.profile_timestamp_ms_, b.profile_timestamp_ms_); + swap(a.packet_infos_, b.packet_infos_); + const size_t length_a = a.samples_per_channel_ * a.num_channels_; + const size_t length_b = b.samples_per_channel_ * b.num_channels_; + RTC_DCHECK_LE(length_a, AudioFrame::kMaxDataSizeSamples); + RTC_DCHECK_LE(length_b, AudioFrame::kMaxDataSizeSamples); + std::swap_ranges(a.data_, a.data_ + std::max(length_a, length_b), b.data_); + swap(a.muted_, b.muted_); + swap(a.absolute_capture_timestamp_ms_, b.absolute_capture_timestamp_ms_); +} + +void AudioFrame::Reset() { + ResetWithoutMuting(); + muted_ = true; +} + +void AudioFrame::ResetWithoutMuting() { + // TODO(wu): Zero is a valid value for |timestamp_|. We should initialize + // to an invalid value, or add a new member to indicate invalidity. + timestamp_ = 0; + elapsed_time_ms_ = -1; + ntp_time_ms_ = -1; + samples_per_channel_ = 0; + sample_rate_hz_ = 0; + num_channels_ = 0; + channel_layout_ = CHANNEL_LAYOUT_NONE; + speech_type_ = kUndefined; + vad_activity_ = kVadUnknown; + profile_timestamp_ms_ = 0; + packet_infos_ = RtpPacketInfos(); + absolute_capture_timestamp_ms_ = absl::nullopt; +} + +void AudioFrame::UpdateFrame(uint32_t timestamp, + const int16_t* data, + size_t samples_per_channel, + int sample_rate_hz, + SpeechType speech_type, + VADActivity vad_activity, + size_t num_channels) { + timestamp_ = timestamp; + samples_per_channel_ = samples_per_channel; + sample_rate_hz_ = sample_rate_hz; + speech_type_ = speech_type; + vad_activity_ = vad_activity; + num_channels_ = num_channels; + channel_layout_ = GuessChannelLayout(num_channels); + if (channel_layout_ != CHANNEL_LAYOUT_UNSUPPORTED) { + RTC_DCHECK_EQ(num_channels, ChannelLayoutToChannelCount(channel_layout_)); + } + + const size_t length = samples_per_channel * num_channels; + RTC_CHECK_LE(length, kMaxDataSizeSamples); + if (data != nullptr) { + memcpy(data_, data, sizeof(int16_t) * length); + muted_ = false; + } else { + muted_ = true; + } +} + +void AudioFrame::CopyFrom(const AudioFrame& src) { + if (this == &src) + return; + + timestamp_ = src.timestamp_; + elapsed_time_ms_ = src.elapsed_time_ms_; + ntp_time_ms_ = src.ntp_time_ms_; + packet_infos_ = src.packet_infos_; + muted_ = src.muted(); + samples_per_channel_ = src.samples_per_channel_; + sample_rate_hz_ = src.sample_rate_hz_; + speech_type_ = src.speech_type_; + vad_activity_ = src.vad_activity_; + num_channels_ = src.num_channels_; + channel_layout_ = src.channel_layout_; + absolute_capture_timestamp_ms_ = src.absolute_capture_timestamp_ms(); + + const size_t length = samples_per_channel_ * num_channels_; + RTC_CHECK_LE(length, kMaxDataSizeSamples); + if (!src.muted()) { + memcpy(data_, src.data(), sizeof(int16_t) * length); + muted_ = false; + } +} + +void AudioFrame::UpdateProfileTimeStamp() { + profile_timestamp_ms_ = rtc::TimeMillis(); +} + +int64_t AudioFrame::ElapsedProfileTimeMs() const { + if (profile_timestamp_ms_ == 0) { + // Profiling has not been activated. + return -1; + } + return rtc::TimeSince(profile_timestamp_ms_); +} + +const int16_t* AudioFrame::data() const { + return muted_ ? empty_data() : data_; +} + +// TODO(henrik.lundin) Can we skip zeroing the buffer? +// See https://bugs.chromium.org/p/webrtc/issues/detail?id=5647. +int16_t* AudioFrame::mutable_data() { + if (muted_) { + memset(data_, 0, kMaxDataSizeBytes); + muted_ = false; + } + return data_; +} + +void AudioFrame::Mute() { + muted_ = true; +} + +bool AudioFrame::muted() const { + return muted_; +} + +// static +const int16_t* AudioFrame::empty_data() { + static int16_t* null_data = new int16_t[kMaxDataSizeSamples](); + return &null_data[0]; +} + +} // namespace webrtc diff --git a/api/audio/audio_frame.h b/api/audio/audio_frame.h new file mode 100644 index 0000000..78539f5 --- /dev/null +++ b/api/audio/audio_frame.h @@ -0,0 +1,177 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_AUDIO_FRAME_H_ +#define API_AUDIO_AUDIO_FRAME_H_ + +#include +#include + +#include + +#include "api/audio/channel_layout.h" +#include "api/rtp_packet_infos.h" +#include "rtc_base/constructor_magic.h" + +namespace webrtc { + +/* This class holds up to 120 ms of super-wideband (32 kHz) stereo audio. It + * allows for adding and subtracting frames while keeping track of the resulting + * states. + * + * Notes + * - This is a de-facto api, not designed for external use. The AudioFrame class + * is in need of overhaul or even replacement, and anyone depending on it + * should be prepared for that. + * - The total number of samples is samples_per_channel_ * num_channels_. + * - Stereo data is interleaved starting with the left channel. + */ +class AudioFrame { + public: + // Using constexpr here causes linker errors unless the variable also has an + // out-of-class definition, which is impractical in this header-only class. + // (This makes no sense because it compiles as an enum value, which we most + // certainly cannot take the address of, just fine.) C++17 introduces inline + // variables which should allow us to switch to constexpr and keep this a + // header-only class. + enum : size_t { + // Stereo, 32 kHz, 120 ms (2 * 32 * 120) + // Stereo, 192 kHz, 20 ms (2 * 192 * 20) + kMaxDataSizeSamples = 7680, + kMaxDataSizeBytes = kMaxDataSizeSamples * sizeof(int16_t), + }; + + enum VADActivity { kVadActive = 0, kVadPassive = 1, kVadUnknown = 2 }; + enum SpeechType { + kNormalSpeech = 0, + kPLC = 1, + kCNG = 2, + kPLCCNG = 3, + kCodecPLC = 5, + kUndefined = 4 + }; + + AudioFrame(); + + friend void swap(AudioFrame& a, AudioFrame& b); + + // Resets all members to their default state. + void Reset(); + // Same as Reset(), but leaves mute state unchanged. Muting a frame requires + // the buffer to be zeroed on the next call to mutable_data(). Callers + // intending to write to the buffer immediately after Reset() can instead use + // ResetWithoutMuting() to skip this wasteful zeroing. + void ResetWithoutMuting(); + + void UpdateFrame(uint32_t timestamp, + const int16_t* data, + size_t samples_per_channel, + int sample_rate_hz, + SpeechType speech_type, + VADActivity vad_activity, + size_t num_channels = 1); + + void CopyFrom(const AudioFrame& src); + + // Sets a wall-time clock timestamp in milliseconds to be used for profiling + // of time between two points in the audio chain. + // Example: + // t0: UpdateProfileTimeStamp() + // t1: ElapsedProfileTimeMs() => t1 - t0 [msec] + void UpdateProfileTimeStamp(); + // Returns the time difference between now and when UpdateProfileTimeStamp() + // was last called. Returns -1 if UpdateProfileTimeStamp() has not yet been + // called. + int64_t ElapsedProfileTimeMs() const; + + // data() returns a zeroed static buffer if the frame is muted. + // mutable_frame() always returns a non-static buffer; the first call to + // mutable_frame() zeros the non-static buffer and marks the frame unmuted. + const int16_t* data() const; + int16_t* mutable_data(); + + // Prefer to mute frames using AudioFrameOperations::Mute. + void Mute(); + // Frame is muted by default. + bool muted() const; + + size_t max_16bit_samples() const { return kMaxDataSizeSamples; } + size_t samples_per_channel() const { return samples_per_channel_; } + size_t num_channels() const { return num_channels_; } + ChannelLayout channel_layout() const { return channel_layout_; } + int sample_rate_hz() const { return sample_rate_hz_; } + + void set_absolute_capture_timestamp_ms( + int64_t absolute_capture_time_stamp_ms) { + absolute_capture_timestamp_ms_ = absolute_capture_time_stamp_ms; + } + + absl::optional absolute_capture_timestamp_ms() const { + return absolute_capture_timestamp_ms_; + } + + // RTP timestamp of the first sample in the AudioFrame. + uint32_t timestamp_ = 0; + // Time since the first frame in milliseconds. + // -1 represents an uninitialized value. + int64_t elapsed_time_ms_ = -1; + // NTP time of the estimated capture time in local timebase in milliseconds. + // -1 represents an uninitialized value. + int64_t ntp_time_ms_ = -1; + size_t samples_per_channel_ = 0; + int sample_rate_hz_ = 0; + size_t num_channels_ = 0; + ChannelLayout channel_layout_ = CHANNEL_LAYOUT_NONE; + SpeechType speech_type_ = kUndefined; + VADActivity vad_activity_ = kVadUnknown; + // Monotonically increasing timestamp intended for profiling of audio frames. + // Typically used for measuring elapsed time between two different points in + // the audio path. No lock is used to save resources and we are thread safe + // by design. + // TODO(nisse@webrtc.org): consider using absl::optional. + int64_t profile_timestamp_ms_ = 0; + + // Information about packets used to assemble this audio frame. This is needed + // by |SourceTracker| when the frame is delivered to the RTCRtpReceiver's + // MediaStreamTrack, in order to implement getContributingSources(). See: + // https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getcontributingsources + // + // TODO(bugs.webrtc.org/10757): + // Note that this information might not be fully accurate since we currently + // don't have a proper way to track it across the audio sync buffer. The + // sync buffer is the small sample-holding buffer located after the audio + // decoder and before where samples are assembled into output frames. + // + // |RtpPacketInfos| may also be empty if the audio samples did not come from + // RTP packets. E.g. if the audio were locally generated by packet loss + // concealment, comfort noise generation, etc. + RtpPacketInfos packet_infos_; + + private: + // A permanently zeroed out buffer to represent muted frames. This is a + // header-only class, so the only way to avoid creating a separate empty + // buffer per translation unit is to wrap a static in an inline function. + static const int16_t* empty_data(); + + int16_t data_[kMaxDataSizeSamples]; + bool muted_ = true; + + // Absolute capture timestamp when this audio frame was originally captured. + // This is only valid for audio frames captured on this machine. The absolute + // capture timestamp of a received frame is found in |packet_infos_|. + // This timestamp MUST be based on the same clock as rtc::TimeMillis(). + absl::optional absolute_capture_timestamp_ms_; + + RTC_DISALLOW_COPY_AND_ASSIGN(AudioFrame); +}; + +} // namespace webrtc + +#endif // API_AUDIO_AUDIO_FRAME_H_ diff --git a/api/audio/audio_mixer.h b/api/audio/audio_mixer.h new file mode 100644 index 0000000..b290cfa --- /dev/null +++ b/api/audio/audio_mixer.h @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_AUDIO_MIXER_H_ +#define API_AUDIO_AUDIO_MIXER_H_ + +#include + +#include "api/audio/audio_frame.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// WORK IN PROGRESS +// This class is under development and is not yet intended for for use outside +// of WebRtc/Libjingle. +class AudioMixer : public rtc::RefCountInterface { + public: + // A callback class that all mixer participants must inherit from/implement. + class Source { + public: + enum class AudioFrameInfo { + kNormal, // The samples in audio_frame are valid and should be used. + kMuted, // The samples in audio_frame should not be used, but + // should be implicitly interpreted as zero. Other + // fields in audio_frame may be read and should + // contain meaningful values. + kError, // The audio_frame will not be used. + }; + + // Overwrites |audio_frame|. The data_ field is overwritten with + // 10 ms of new audio (either 1 or 2 interleaved channels) at + // |sample_rate_hz|. All fields in |audio_frame| must be updated. + virtual AudioFrameInfo GetAudioFrameWithInfo(int sample_rate_hz, + AudioFrame* audio_frame) = 0; + + // A way for a mixer implementation to distinguish participants. + virtual int Ssrc() const = 0; + + // A way for this source to say that GetAudioFrameWithInfo called + // with this sample rate or higher will not cause quality loss. + virtual int PreferredSampleRate() const = 0; + + virtual ~Source() {} + }; + + // Returns true if adding was successful. A source is never added + // twice. Addition and removal can happen on different threads. + virtual bool AddSource(Source* audio_source) = 0; + + // Removal is never attempted if a source has not been successfully + // added to the mixer. + virtual void RemoveSource(Source* audio_source) = 0; + + // Performs mixing by asking registered audio sources for audio. The + // mixed result is placed in the provided AudioFrame. This method + // will only be called from a single thread. The channels argument + // specifies the number of channels of the mix result. The mixer + // should mix at a rate that doesn't cause quality loss of the + // sources' audio. The mixing rate is one of the rates listed in + // AudioProcessing::NativeRate. All fields in + // |audio_frame_for_mixing| must be updated. + virtual void Mix(size_t number_of_channels, + AudioFrame* audio_frame_for_mixing) = 0; + + protected: + // Since the mixer is reference counted, the destructor may be + // called from any thread. + ~AudioMixer() override {} +}; +} // namespace webrtc + +#endif // API_AUDIO_AUDIO_MIXER_H_ diff --git a/api/audio/channel_layout.cc b/api/audio/channel_layout.cc new file mode 100644 index 0000000..567f4d9 --- /dev/null +++ b/api/audio/channel_layout.cc @@ -0,0 +1,282 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio/channel_layout.h" + +#include + +#include "rtc_base/arraysize.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +static const int kLayoutToChannels[] = { + 0, // CHANNEL_LAYOUT_NONE + 0, // CHANNEL_LAYOUT_UNSUPPORTED + 1, // CHANNEL_LAYOUT_MONO + 2, // CHANNEL_LAYOUT_STEREO + 3, // CHANNEL_LAYOUT_2_1 + 3, // CHANNEL_LAYOUT_SURROUND + 4, // CHANNEL_LAYOUT_4_0 + 4, // CHANNEL_LAYOUT_2_2 + 4, // CHANNEL_LAYOUT_QUAD + 5, // CHANNEL_LAYOUT_5_0 + 6, // CHANNEL_LAYOUT_5_1 + 5, // CHANNEL_LAYOUT_5_0_BACK + 6, // CHANNEL_LAYOUT_5_1_BACK + 7, // CHANNEL_LAYOUT_7_0 + 8, // CHANNEL_LAYOUT_7_1 + 8, // CHANNEL_LAYOUT_7_1_WIDE + 2, // CHANNEL_LAYOUT_STEREO_DOWNMIX + 3, // CHANNEL_LAYOUT_2POINT1 + 4, // CHANNEL_LAYOUT_3_1 + 5, // CHANNEL_LAYOUT_4_1 + 6, // CHANNEL_LAYOUT_6_0 + 6, // CHANNEL_LAYOUT_6_0_FRONT + 6, // CHANNEL_LAYOUT_HEXAGONAL + 7, // CHANNEL_LAYOUT_6_1 + 7, // CHANNEL_LAYOUT_6_1_BACK + 7, // CHANNEL_LAYOUT_6_1_FRONT + 7, // CHANNEL_LAYOUT_7_0_FRONT + 8, // CHANNEL_LAYOUT_7_1_WIDE_BACK + 8, // CHANNEL_LAYOUT_OCTAGONAL + 0, // CHANNEL_LAYOUT_DISCRETE + 3, // CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC + 5, // CHANNEL_LAYOUT_4_1_QUAD_SIDE + 0, // CHANNEL_LAYOUT_BITSTREAM +}; + +// The channel orderings for each layout as specified by FFmpeg. Each value +// represents the index of each channel in each layout. Values of -1 mean the +// channel at that index is not used for that layout. For example, the left side +// surround sound channel in FFmpeg's 5.1 layout is in the 5th position (because +// the order is L, R, C, LFE, LS, RS), so +// kChannelOrderings[CHANNEL_LAYOUT_5_1][SIDE_LEFT] = 4; +static const int kChannelOrderings[CHANNEL_LAYOUT_MAX + 1][CHANNELS_MAX + 1] = { + // FL | FR | FC | LFE | BL | BR | FLofC | FRofC | BC | SL | SR + + // CHANNEL_LAYOUT_NONE + {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_UNSUPPORTED + {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_MONO + {-1, -1, 0, -1, -1, -1, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_STEREO + {0, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_2_1 + {0, 1, -1, -1, -1, -1, -1, -1, 2, -1, -1}, + + // CHANNEL_LAYOUT_SURROUND + {0, 1, 2, -1, -1, -1, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_4_0 + {0, 1, 2, -1, -1, -1, -1, -1, 3, -1, -1}, + + // CHANNEL_LAYOUT_2_2 + {0, 1, -1, -1, -1, -1, -1, -1, -1, 2, 3}, + + // CHANNEL_LAYOUT_QUAD + {0, 1, -1, -1, 2, 3, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_5_0 + {0, 1, 2, -1, -1, -1, -1, -1, -1, 3, 4}, + + // CHANNEL_LAYOUT_5_1 + {0, 1, 2, 3, -1, -1, -1, -1, -1, 4, 5}, + + // FL | FR | FC | LFE | BL | BR | FLofC | FRofC | BC | SL | SR + + // CHANNEL_LAYOUT_5_0_BACK + {0, 1, 2, -1, 3, 4, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_5_1_BACK + {0, 1, 2, 3, 4, 5, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_7_0 + {0, 1, 2, -1, 5, 6, -1, -1, -1, 3, 4}, + + // CHANNEL_LAYOUT_7_1 + {0, 1, 2, 3, 6, 7, -1, -1, -1, 4, 5}, + + // CHANNEL_LAYOUT_7_1_WIDE + {0, 1, 2, 3, -1, -1, 6, 7, -1, 4, 5}, + + // CHANNEL_LAYOUT_STEREO_DOWNMIX + {0, 1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_2POINT1 + {0, 1, -1, 2, -1, -1, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_3_1 + {0, 1, 2, 3, -1, -1, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_4_1 + {0, 1, 2, 4, -1, -1, -1, -1, 3, -1, -1}, + + // CHANNEL_LAYOUT_6_0 + {0, 1, 2, -1, -1, -1, -1, -1, 5, 3, 4}, + + // CHANNEL_LAYOUT_6_0_FRONT + {0, 1, -1, -1, -1, -1, 4, 5, -1, 2, 3}, + + // FL | FR | FC | LFE | BL | BR | FLofC | FRofC | BC | SL | SR + + // CHANNEL_LAYOUT_HEXAGONAL + {0, 1, 2, -1, 3, 4, -1, -1, 5, -1, -1}, + + // CHANNEL_LAYOUT_6_1 + {0, 1, 2, 3, -1, -1, -1, -1, 6, 4, 5}, + + // CHANNEL_LAYOUT_6_1_BACK + {0, 1, 2, 3, 4, 5, -1, -1, 6, -1, -1}, + + // CHANNEL_LAYOUT_6_1_FRONT + {0, 1, -1, 6, -1, -1, 4, 5, -1, 2, 3}, + + // CHANNEL_LAYOUT_7_0_FRONT + {0, 1, 2, -1, -1, -1, 5, 6, -1, 3, 4}, + + // CHANNEL_LAYOUT_7_1_WIDE_BACK + {0, 1, 2, 3, 4, 5, 6, 7, -1, -1, -1}, + + // CHANNEL_LAYOUT_OCTAGONAL + {0, 1, 2, -1, 5, 6, -1, -1, 7, 3, 4}, + + // CHANNEL_LAYOUT_DISCRETE + {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC + {0, 1, 2, -1, -1, -1, -1, -1, -1, -1, -1}, + + // CHANNEL_LAYOUT_4_1_QUAD_SIDE + {0, 1, -1, 4, -1, -1, -1, -1, -1, 2, 3}, + + // CHANNEL_LAYOUT_BITSTREAM + {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1}, + + // FL | FR | FC | LFE | BL | BR | FLofC | FRofC | BC | SL | SR +}; + +int ChannelLayoutToChannelCount(ChannelLayout layout) { + RTC_DCHECK_LT(static_cast(layout), arraysize(kLayoutToChannels)); + RTC_DCHECK_LE(kLayoutToChannels[layout], kMaxConcurrentChannels); + return kLayoutToChannels[layout]; +} + +// Converts a channel count into a channel layout. +ChannelLayout GuessChannelLayout(int channels) { + switch (channels) { + case 1: + return CHANNEL_LAYOUT_MONO; + case 2: + return CHANNEL_LAYOUT_STEREO; + case 3: + return CHANNEL_LAYOUT_SURROUND; + case 4: + return CHANNEL_LAYOUT_QUAD; + case 5: + return CHANNEL_LAYOUT_5_0; + case 6: + return CHANNEL_LAYOUT_5_1; + case 7: + return CHANNEL_LAYOUT_6_1; + case 8: + return CHANNEL_LAYOUT_7_1; + default: + RTC_DLOG(LS_WARNING) << "Unsupported channel count: " << channels; + } + return CHANNEL_LAYOUT_UNSUPPORTED; +} + +int ChannelOrder(ChannelLayout layout, Channels channel) { + RTC_DCHECK_LT(static_cast(layout), arraysize(kChannelOrderings)); + RTC_DCHECK_LT(static_cast(channel), arraysize(kChannelOrderings[0])); + return kChannelOrderings[layout][channel]; +} + +const char* ChannelLayoutToString(ChannelLayout layout) { + switch (layout) { + case CHANNEL_LAYOUT_NONE: + return "NONE"; + case CHANNEL_LAYOUT_UNSUPPORTED: + return "UNSUPPORTED"; + case CHANNEL_LAYOUT_MONO: + return "MONO"; + case CHANNEL_LAYOUT_STEREO: + return "STEREO"; + case CHANNEL_LAYOUT_2_1: + return "2.1"; + case CHANNEL_LAYOUT_SURROUND: + return "SURROUND"; + case CHANNEL_LAYOUT_4_0: + return "4.0"; + case CHANNEL_LAYOUT_2_2: + return "QUAD_SIDE"; + case CHANNEL_LAYOUT_QUAD: + return "QUAD"; + case CHANNEL_LAYOUT_5_0: + return "5.0"; + case CHANNEL_LAYOUT_5_1: + return "5.1"; + case CHANNEL_LAYOUT_5_0_BACK: + return "5.0_BACK"; + case CHANNEL_LAYOUT_5_1_BACK: + return "5.1_BACK"; + case CHANNEL_LAYOUT_7_0: + return "7.0"; + case CHANNEL_LAYOUT_7_1: + return "7.1"; + case CHANNEL_LAYOUT_7_1_WIDE: + return "7.1_WIDE"; + case CHANNEL_LAYOUT_STEREO_DOWNMIX: + return "STEREO_DOWNMIX"; + case CHANNEL_LAYOUT_2POINT1: + return "2POINT1"; + case CHANNEL_LAYOUT_3_1: + return "3.1"; + case CHANNEL_LAYOUT_4_1: + return "4.1"; + case CHANNEL_LAYOUT_6_0: + return "6.0"; + case CHANNEL_LAYOUT_6_0_FRONT: + return "6.0_FRONT"; + case CHANNEL_LAYOUT_HEXAGONAL: + return "HEXAGONAL"; + case CHANNEL_LAYOUT_6_1: + return "6.1"; + case CHANNEL_LAYOUT_6_1_BACK: + return "6.1_BACK"; + case CHANNEL_LAYOUT_6_1_FRONT: + return "6.1_FRONT"; + case CHANNEL_LAYOUT_7_0_FRONT: + return "7.0_FRONT"; + case CHANNEL_LAYOUT_7_1_WIDE_BACK: + return "7.1_WIDE_BACK"; + case CHANNEL_LAYOUT_OCTAGONAL: + return "OCTAGONAL"; + case CHANNEL_LAYOUT_DISCRETE: + return "DISCRETE"; + case CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC: + return "STEREO_AND_KEYBOARD_MIC"; + case CHANNEL_LAYOUT_4_1_QUAD_SIDE: + return "4.1_QUAD_SIDE"; + case CHANNEL_LAYOUT_BITSTREAM: + return "BITSTREAM"; + } + RTC_NOTREACHED() << "Invalid channel layout provided: " << layout; + return ""; +} + +} // namespace webrtc diff --git a/api/audio/channel_layout.h b/api/audio/channel_layout.h new file mode 100644 index 0000000..175aee7 --- /dev/null +++ b/api/audio/channel_layout.h @@ -0,0 +1,165 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CHANNEL_LAYOUT_H_ +#define API_AUDIO_CHANNEL_LAYOUT_H_ + +namespace webrtc { + +// This file is derived from Chromium's base/channel_layout.h. + +// Enumerates the various representations of the ordering of audio channels. +// Logged to UMA, so never reuse a value, always add new/greater ones! +enum ChannelLayout { + CHANNEL_LAYOUT_NONE = 0, + CHANNEL_LAYOUT_UNSUPPORTED = 1, + + // Front C + CHANNEL_LAYOUT_MONO = 2, + + // Front L, Front R + CHANNEL_LAYOUT_STEREO = 3, + + // Front L, Front R, Back C + CHANNEL_LAYOUT_2_1 = 4, + + // Front L, Front R, Front C + CHANNEL_LAYOUT_SURROUND = 5, + + // Front L, Front R, Front C, Back C + CHANNEL_LAYOUT_4_0 = 6, + + // Front L, Front R, Side L, Side R + CHANNEL_LAYOUT_2_2 = 7, + + // Front L, Front R, Back L, Back R + CHANNEL_LAYOUT_QUAD = 8, + + // Front L, Front R, Front C, Side L, Side R + CHANNEL_LAYOUT_5_0 = 9, + + // Front L, Front R, Front C, LFE, Side L, Side R + CHANNEL_LAYOUT_5_1 = 10, + + // Front L, Front R, Front C, Back L, Back R + CHANNEL_LAYOUT_5_0_BACK = 11, + + // Front L, Front R, Front C, LFE, Back L, Back R + CHANNEL_LAYOUT_5_1_BACK = 12, + + // Front L, Front R, Front C, Side L, Side R, Back L, Back R + CHANNEL_LAYOUT_7_0 = 13, + + // Front L, Front R, Front C, LFE, Side L, Side R, Back L, Back R + CHANNEL_LAYOUT_7_1 = 14, + + // Front L, Front R, Front C, LFE, Side L, Side R, Front LofC, Front RofC + CHANNEL_LAYOUT_7_1_WIDE = 15, + + // Stereo L, Stereo R + CHANNEL_LAYOUT_STEREO_DOWNMIX = 16, + + // Stereo L, Stereo R, LFE + CHANNEL_LAYOUT_2POINT1 = 17, + + // Stereo L, Stereo R, Front C, LFE + CHANNEL_LAYOUT_3_1 = 18, + + // Stereo L, Stereo R, Front C, Rear C, LFE + CHANNEL_LAYOUT_4_1 = 19, + + // Stereo L, Stereo R, Front C, Side L, Side R, Back C + CHANNEL_LAYOUT_6_0 = 20, + + // Stereo L, Stereo R, Side L, Side R, Front LofC, Front RofC + CHANNEL_LAYOUT_6_0_FRONT = 21, + + // Stereo L, Stereo R, Front C, Rear L, Rear R, Rear C + CHANNEL_LAYOUT_HEXAGONAL = 22, + + // Stereo L, Stereo R, Front C, LFE, Side L, Side R, Rear Center + CHANNEL_LAYOUT_6_1 = 23, + + // Stereo L, Stereo R, Front C, LFE, Back L, Back R, Rear Center + CHANNEL_LAYOUT_6_1_BACK = 24, + + // Stereo L, Stereo R, Side L, Side R, Front LofC, Front RofC, LFE + CHANNEL_LAYOUT_6_1_FRONT = 25, + + // Front L, Front R, Front C, Side L, Side R, Front LofC, Front RofC + CHANNEL_LAYOUT_7_0_FRONT = 26, + + // Front L, Front R, Front C, LFE, Back L, Back R, Front LofC, Front RofC + CHANNEL_LAYOUT_7_1_WIDE_BACK = 27, + + // Front L, Front R, Front C, Side L, Side R, Rear L, Back R, Back C. + CHANNEL_LAYOUT_OCTAGONAL = 28, + + // Channels are not explicitly mapped to speakers. + CHANNEL_LAYOUT_DISCRETE = 29, + + // Front L, Front R, Front C. Front C contains the keyboard mic audio. This + // layout is only intended for input for WebRTC. The Front C channel + // is stripped away in the WebRTC audio input pipeline and never seen outside + // of that. + CHANNEL_LAYOUT_STEREO_AND_KEYBOARD_MIC = 30, + + // Front L, Front R, Side L, Side R, LFE + CHANNEL_LAYOUT_4_1_QUAD_SIDE = 31, + + // Actual channel layout is specified in the bitstream and the actual channel + // count is unknown at Chromium media pipeline level (useful for audio + // pass-through mode). + CHANNEL_LAYOUT_BITSTREAM = 32, + + // Max value, must always equal the largest entry ever logged. + CHANNEL_LAYOUT_MAX = CHANNEL_LAYOUT_BITSTREAM +}; + +// Note: Do not reorder or reassign these values; other code depends on their +// ordering to operate correctly. E.g., CoreAudio channel layout computations. +enum Channels { + LEFT = 0, + RIGHT, + CENTER, + LFE, + BACK_LEFT, + BACK_RIGHT, + LEFT_OF_CENTER, + RIGHT_OF_CENTER, + BACK_CENTER, + SIDE_LEFT, + SIDE_RIGHT, + CHANNELS_MAX = + SIDE_RIGHT, // Must always equal the largest value ever logged. +}; + +// The maximum number of concurrently active channels for all possible layouts. +// ChannelLayoutToChannelCount() will never return a value higher than this. +constexpr int kMaxConcurrentChannels = 8; + +// Returns the expected channel position in an interleaved stream. Values of -1 +// mean the channel at that index is not used for that layout. Values range +// from 0 to ChannelLayoutToChannelCount(layout) - 1. +int ChannelOrder(ChannelLayout layout, Channels channel); + +// Returns the number of channels in a given ChannelLayout. +int ChannelLayoutToChannelCount(ChannelLayout layout); + +// Given the number of channels, return the best layout, +// or return CHANNEL_LAYOUT_UNSUPPORTED if there is no good match. +ChannelLayout GuessChannelLayout(int channels); + +// Returns a string representation of the channel layout. +const char* ChannelLayoutToString(ChannelLayout layout); + +} // namespace webrtc + +#endif // API_AUDIO_CHANNEL_LAYOUT_H_ diff --git a/api/audio/echo_canceller3_config.cc b/api/audio/echo_canceller3_config.cc new file mode 100644 index 0000000..aeb809e --- /dev/null +++ b/api/audio/echo_canceller3_config.cc @@ -0,0 +1,270 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/audio/echo_canceller3_config.h" + +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_minmax.h" + +namespace webrtc { +namespace { +bool Limit(float* value, float min, float max) { + float clamped = rtc::SafeClamp(*value, min, max); + clamped = std::isfinite(clamped) ? clamped : min; + bool res = *value == clamped; + *value = clamped; + return res; +} + +bool Limit(size_t* value, size_t min, size_t max) { + size_t clamped = rtc::SafeClamp(*value, min, max); + bool res = *value == clamped; + *value = clamped; + return res; +} + +bool Limit(int* value, int min, int max) { + int clamped = rtc::SafeClamp(*value, min, max); + bool res = *value == clamped; + *value = clamped; + return res; +} + +bool FloorLimit(size_t* value, size_t min) { + size_t clamped = *value >= min ? *value : min; + bool res = *value == clamped; + *value = clamped; + return res; +} + +} // namespace + +EchoCanceller3Config::EchoCanceller3Config() = default; +EchoCanceller3Config::EchoCanceller3Config(const EchoCanceller3Config& e) = + default; +EchoCanceller3Config& EchoCanceller3Config::operator=( + const EchoCanceller3Config& e) = default; +EchoCanceller3Config::Delay::Delay() = default; +EchoCanceller3Config::Delay::Delay(const EchoCanceller3Config::Delay& e) = + default; +EchoCanceller3Config::Delay& EchoCanceller3Config::Delay::operator=( + const Delay& e) = default; + +EchoCanceller3Config::EchoModel::EchoModel() = default; +EchoCanceller3Config::EchoModel::EchoModel( + const EchoCanceller3Config::EchoModel& e) = default; +EchoCanceller3Config::EchoModel& EchoCanceller3Config::EchoModel::operator=( + const EchoModel& e) = default; + +EchoCanceller3Config::Suppressor::Suppressor() = default; +EchoCanceller3Config::Suppressor::Suppressor( + const EchoCanceller3Config::Suppressor& e) = default; +EchoCanceller3Config::Suppressor& EchoCanceller3Config::Suppressor::operator=( + const Suppressor& e) = default; + +EchoCanceller3Config::Suppressor::MaskingThresholds::MaskingThresholds( + float enr_transparent, + float enr_suppress, + float emr_transparent) + : enr_transparent(enr_transparent), + enr_suppress(enr_suppress), + emr_transparent(emr_transparent) {} +EchoCanceller3Config::Suppressor::MaskingThresholds::MaskingThresholds( + const EchoCanceller3Config::Suppressor::MaskingThresholds& e) = default; +EchoCanceller3Config::Suppressor::MaskingThresholds& +EchoCanceller3Config::Suppressor::MaskingThresholds::operator=( + const MaskingThresholds& e) = default; + +EchoCanceller3Config::Suppressor::Tuning::Tuning(MaskingThresholds mask_lf, + MaskingThresholds mask_hf, + float max_inc_factor, + float max_dec_factor_lf) + : mask_lf(mask_lf), + mask_hf(mask_hf), + max_inc_factor(max_inc_factor), + max_dec_factor_lf(max_dec_factor_lf) {} +EchoCanceller3Config::Suppressor::Tuning::Tuning( + const EchoCanceller3Config::Suppressor::Tuning& e) = default; +EchoCanceller3Config::Suppressor::Tuning& +EchoCanceller3Config::Suppressor::Tuning::operator=(const Tuning& e) = default; + +bool EchoCanceller3Config::Validate(EchoCanceller3Config* config) { + RTC_DCHECK(config); + EchoCanceller3Config* c = config; + bool res = true; + + if (c->delay.down_sampling_factor != 4 && + c->delay.down_sampling_factor != 8) { + c->delay.down_sampling_factor = 4; + res = false; + } + + res = res & Limit(&c->delay.default_delay, 0, 5000); + res = res & Limit(&c->delay.num_filters, 0, 5000); + res = res & Limit(&c->delay.delay_headroom_samples, 0, 5000); + res = res & Limit(&c->delay.hysteresis_limit_blocks, 0, 5000); + res = res & Limit(&c->delay.fixed_capture_delay_samples, 0, 5000); + res = res & Limit(&c->delay.delay_estimate_smoothing, 0.f, 1.f); + res = res & Limit(&c->delay.delay_candidate_detection_threshold, 0.f, 1.f); + res = res & Limit(&c->delay.delay_selection_thresholds.initial, 1, 250); + res = res & Limit(&c->delay.delay_selection_thresholds.converged, 1, 250); + + res = res & FloorLimit(&c->filter.refined.length_blocks, 1); + res = res & Limit(&c->filter.refined.leakage_converged, 0.f, 1000.f); + res = res & Limit(&c->filter.refined.leakage_diverged, 0.f, 1000.f); + res = res & Limit(&c->filter.refined.error_floor, 0.f, 1000.f); + res = res & Limit(&c->filter.refined.error_ceil, 0.f, 100000000.f); + res = res & Limit(&c->filter.refined.noise_gate, 0.f, 100000000.f); + + res = res & FloorLimit(&c->filter.refined_initial.length_blocks, 1); + res = res & Limit(&c->filter.refined_initial.leakage_converged, 0.f, 1000.f); + res = res & Limit(&c->filter.refined_initial.leakage_diverged, 0.f, 1000.f); + res = res & Limit(&c->filter.refined_initial.error_floor, 0.f, 1000.f); + res = res & Limit(&c->filter.refined_initial.error_ceil, 0.f, 100000000.f); + res = res & Limit(&c->filter.refined_initial.noise_gate, 0.f, 100000000.f); + + if (c->filter.refined.length_blocks < + c->filter.refined_initial.length_blocks) { + c->filter.refined_initial.length_blocks = c->filter.refined.length_blocks; + res = false; + } + + res = res & FloorLimit(&c->filter.coarse.length_blocks, 1); + res = res & Limit(&c->filter.coarse.rate, 0.f, 1.f); + res = res & Limit(&c->filter.coarse.noise_gate, 0.f, 100000000.f); + + res = res & FloorLimit(&c->filter.coarse_initial.length_blocks, 1); + res = res & Limit(&c->filter.coarse_initial.rate, 0.f, 1.f); + res = res & Limit(&c->filter.coarse_initial.noise_gate, 0.f, 100000000.f); + + if (c->filter.coarse.length_blocks < c->filter.coarse_initial.length_blocks) { + c->filter.coarse_initial.length_blocks = c->filter.coarse.length_blocks; + res = false; + } + + res = res & Limit(&c->filter.config_change_duration_blocks, 0, 100000); + res = res & Limit(&c->filter.initial_state_seconds, 0.f, 100.f); + + res = res & Limit(&c->erle.min, 1.f, 100000.f); + res = res & Limit(&c->erle.max_l, 1.f, 100000.f); + res = res & Limit(&c->erle.max_h, 1.f, 100000.f); + if (c->erle.min > c->erle.max_l || c->erle.min > c->erle.max_h) { + c->erle.min = std::min(c->erle.max_l, c->erle.max_h); + res = false; + } + res = res & Limit(&c->erle.num_sections, 1, c->filter.refined.length_blocks); + + res = res & Limit(&c->ep_strength.default_gain, 0.f, 1000000.f); + res = res & Limit(&c->ep_strength.default_len, -1.f, 1.f); + + res = + res & Limit(&c->echo_audibility.low_render_limit, 0.f, 32768.f * 32768.f); + res = res & + Limit(&c->echo_audibility.normal_render_limit, 0.f, 32768.f * 32768.f); + res = res & Limit(&c->echo_audibility.floor_power, 0.f, 32768.f * 32768.f); + res = res & Limit(&c->echo_audibility.audibility_threshold_lf, 0.f, + 32768.f * 32768.f); + res = res & Limit(&c->echo_audibility.audibility_threshold_mf, 0.f, + 32768.f * 32768.f); + res = res & Limit(&c->echo_audibility.audibility_threshold_hf, 0.f, + 32768.f * 32768.f); + + res = res & + Limit(&c->render_levels.active_render_limit, 0.f, 32768.f * 32768.f); + res = res & Limit(&c->render_levels.poor_excitation_render_limit, 0.f, + 32768.f * 32768.f); + res = res & Limit(&c->render_levels.poor_excitation_render_limit_ds8, 0.f, + 32768.f * 32768.f); + + res = res & Limit(&c->echo_model.noise_floor_hold, 0, 1000); + res = res & Limit(&c->echo_model.min_noise_floor_power, 0, 2000000.f); + res = res & Limit(&c->echo_model.stationary_gate_slope, 0, 1000000.f); + res = res & Limit(&c->echo_model.noise_gate_power, 0, 1000000.f); + res = res & Limit(&c->echo_model.noise_gate_slope, 0, 1000000.f); + res = res & Limit(&c->echo_model.render_pre_window_size, 0, 100); + res = res & Limit(&c->echo_model.render_post_window_size, 0, 100); + + res = res & Limit(&c->comfort_noise.noise_floor_dbfs, -200.f, 0.f); + + res = res & Limit(&c->suppressor.nearend_average_blocks, 1, 5000); + + res = res & + Limit(&c->suppressor.normal_tuning.mask_lf.enr_transparent, 0.f, 100.f); + res = res & + Limit(&c->suppressor.normal_tuning.mask_lf.enr_suppress, 0.f, 100.f); + res = res & + Limit(&c->suppressor.normal_tuning.mask_lf.emr_transparent, 0.f, 100.f); + res = res & + Limit(&c->suppressor.normal_tuning.mask_hf.enr_transparent, 0.f, 100.f); + res = res & + Limit(&c->suppressor.normal_tuning.mask_hf.enr_suppress, 0.f, 100.f); + res = res & + Limit(&c->suppressor.normal_tuning.mask_hf.emr_transparent, 0.f, 100.f); + res = res & Limit(&c->suppressor.normal_tuning.max_inc_factor, 0.f, 100.f); + res = res & Limit(&c->suppressor.normal_tuning.max_dec_factor_lf, 0.f, 100.f); + + res = res & Limit(&c->suppressor.nearend_tuning.mask_lf.enr_transparent, 0.f, + 100.f); + res = res & + Limit(&c->suppressor.nearend_tuning.mask_lf.enr_suppress, 0.f, 100.f); + res = res & Limit(&c->suppressor.nearend_tuning.mask_lf.emr_transparent, 0.f, + 100.f); + res = res & Limit(&c->suppressor.nearend_tuning.mask_hf.enr_transparent, 0.f, + 100.f); + res = res & + Limit(&c->suppressor.nearend_tuning.mask_hf.enr_suppress, 0.f, 100.f); + res = res & Limit(&c->suppressor.nearend_tuning.mask_hf.emr_transparent, 0.f, + 100.f); + res = res & Limit(&c->suppressor.nearend_tuning.max_inc_factor, 0.f, 100.f); + res = + res & Limit(&c->suppressor.nearend_tuning.max_dec_factor_lf, 0.f, 100.f); + + res = res & Limit(&c->suppressor.dominant_nearend_detection.enr_threshold, + 0.f, 1000000.f); + res = res & Limit(&c->suppressor.dominant_nearend_detection.snr_threshold, + 0.f, 1000000.f); + res = res & Limit(&c->suppressor.dominant_nearend_detection.hold_duration, 0, + 10000); + res = res & Limit(&c->suppressor.dominant_nearend_detection.trigger_threshold, + 0, 10000); + + res = res & + Limit(&c->suppressor.subband_nearend_detection.nearend_average_blocks, + 1, 1024); + res = + res & Limit(&c->suppressor.subband_nearend_detection.subband1.low, 0, 65); + res = res & Limit(&c->suppressor.subband_nearend_detection.subband1.high, + c->suppressor.subband_nearend_detection.subband1.low, 65); + res = + res & Limit(&c->suppressor.subband_nearend_detection.subband2.low, 0, 65); + res = res & Limit(&c->suppressor.subband_nearend_detection.subband2.high, + c->suppressor.subband_nearend_detection.subband2.low, 65); + res = res & Limit(&c->suppressor.subband_nearend_detection.nearend_threshold, + 0.f, 1.e24f); + res = res & Limit(&c->suppressor.subband_nearend_detection.snr_threshold, 0.f, + 1.e24f); + + res = res & Limit(&c->suppressor.high_bands_suppression.enr_threshold, 0.f, + 1000000.f); + res = res & Limit(&c->suppressor.high_bands_suppression.max_gain_during_echo, + 0.f, 1.f); + res = res & Limit(&c->suppressor.high_bands_suppression + .anti_howling_activation_threshold, + 0.f, 32768.f * 32768.f); + res = res & Limit(&c->suppressor.high_bands_suppression.anti_howling_gain, + 0.f, 1.f); + + res = res & Limit(&c->suppressor.floor_first_increase, 0.f, 1000000.f); + + return res; +} +} // namespace webrtc diff --git a/api/audio/echo_canceller3_config.h b/api/audio/echo_canceller3_config.h new file mode 100644 index 0000000..a505625 --- /dev/null +++ b/api/audio/echo_canceller3_config.h @@ -0,0 +1,227 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_ECHO_CANCELLER3_CONFIG_H_ +#define API_AUDIO_ECHO_CANCELLER3_CONFIG_H_ + +#include // size_t + +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Configuration struct for EchoCanceller3 +struct RTC_EXPORT EchoCanceller3Config { + // Checks and updates the config parameters to lie within (mostly) reasonable + // ranges. Returns true if and only of the config did not need to be changed. + static bool Validate(EchoCanceller3Config* config); + + EchoCanceller3Config(); + EchoCanceller3Config(const EchoCanceller3Config& e); + EchoCanceller3Config& operator=(const EchoCanceller3Config& other); + + struct Buffering { + size_t excess_render_detection_interval_blocks = 250; + size_t max_allowed_excess_render_blocks = 8; + } buffering; + + struct Delay { + Delay(); + Delay(const Delay& e); + Delay& operator=(const Delay& e); + size_t default_delay = 5; + size_t down_sampling_factor = 4; + size_t num_filters = 5; + size_t delay_headroom_samples = 32; + size_t hysteresis_limit_blocks = 1; + size_t fixed_capture_delay_samples = 0; + float delay_estimate_smoothing = 0.7f; + float delay_candidate_detection_threshold = 0.2f; + struct DelaySelectionThresholds { + int initial; + int converged; + } delay_selection_thresholds = {5, 20}; + bool use_external_delay_estimator = false; + bool log_warning_on_delay_changes = false; + struct AlignmentMixing { + bool downmix; + bool adaptive_selection; + float activity_power_threshold; + bool prefer_first_two_channels; + }; + AlignmentMixing render_alignment_mixing = {false, true, 10000.f, true}; + AlignmentMixing capture_alignment_mixing = {false, true, 10000.f, false}; + } delay; + + struct Filter { + struct RefinedConfiguration { + size_t length_blocks; + float leakage_converged; + float leakage_diverged; + float error_floor; + float error_ceil; + float noise_gate; + }; + + struct CoarseConfiguration { + size_t length_blocks; + float rate; + float noise_gate; + }; + + RefinedConfiguration refined = {13, 0.00005f, 0.05f, + 0.001f, 2.f, 20075344.f}; + CoarseConfiguration coarse = {13, 0.7f, 20075344.f}; + + RefinedConfiguration refined_initial = {12, 0.005f, 0.5f, + 0.001f, 2.f, 20075344.f}; + CoarseConfiguration coarse_initial = {12, 0.9f, 20075344.f}; + + size_t config_change_duration_blocks = 250; + float initial_state_seconds = 2.5f; + bool conservative_initial_phase = false; + bool enable_coarse_filter_output_usage = true; + bool use_linear_filter = true; + bool export_linear_aec_output = false; + } filter; + + struct Erle { + float min = 1.f; + float max_l = 4.f; + float max_h = 1.5f; + bool onset_detection = true; + size_t num_sections = 1; + bool clamp_quality_estimate_to_zero = true; + bool clamp_quality_estimate_to_one = true; + } erle; + + struct EpStrength { + float default_gain = 1.f; + float default_len = 0.83f; + bool echo_can_saturate = true; + bool bounded_erl = false; + } ep_strength; + + struct EchoAudibility { + float low_render_limit = 4 * 64.f; + float normal_render_limit = 64.f; + float floor_power = 2 * 64.f; + float audibility_threshold_lf = 10; + float audibility_threshold_mf = 10; + float audibility_threshold_hf = 10; + bool use_stationarity_properties = false; + bool use_stationarity_properties_at_init = false; + } echo_audibility; + + struct RenderLevels { + float active_render_limit = 100.f; + float poor_excitation_render_limit = 150.f; + float poor_excitation_render_limit_ds8 = 20.f; + float render_power_gain_db = 0.f; + } render_levels; + + struct EchoRemovalControl { + bool has_clock_drift = false; + bool linear_and_stable_echo_path = false; + } echo_removal_control; + + struct EchoModel { + EchoModel(); + EchoModel(const EchoModel& e); + EchoModel& operator=(const EchoModel& e); + size_t noise_floor_hold = 50; + float min_noise_floor_power = 1638400.f; + float stationary_gate_slope = 10.f; + float noise_gate_power = 27509.42f; + float noise_gate_slope = 0.3f; + size_t render_pre_window_size = 1; + size_t render_post_window_size = 1; + } echo_model; + + struct ComfortNoise { + float noise_floor_dbfs = -96.03406f; + } comfort_noise; + + struct Suppressor { + Suppressor(); + Suppressor(const Suppressor& e); + Suppressor& operator=(const Suppressor& e); + + size_t nearend_average_blocks = 4; + + struct MaskingThresholds { + MaskingThresholds(float enr_transparent, + float enr_suppress, + float emr_transparent); + MaskingThresholds(const MaskingThresholds& e); + MaskingThresholds& operator=(const MaskingThresholds& e); + float enr_transparent; + float enr_suppress; + float emr_transparent; + }; + + struct Tuning { + Tuning(MaskingThresholds mask_lf, + MaskingThresholds mask_hf, + float max_inc_factor, + float max_dec_factor_lf); + Tuning(const Tuning& e); + Tuning& operator=(const Tuning& e); + MaskingThresholds mask_lf; + MaskingThresholds mask_hf; + float max_inc_factor; + float max_dec_factor_lf; + }; + + Tuning normal_tuning = Tuning(MaskingThresholds(.3f, .4f, .3f), + MaskingThresholds(.07f, .1f, .3f), + 2.0f, + 0.25f); + Tuning nearend_tuning = Tuning(MaskingThresholds(1.09f, 1.1f, .3f), + MaskingThresholds(.1f, .3f, .3f), + 2.0f, + 0.25f); + + struct DominantNearendDetection { + float enr_threshold = .25f; + float enr_exit_threshold = 10.f; + float snr_threshold = 30.f; + int hold_duration = 50; + int trigger_threshold = 12; + bool use_during_initial_phase = true; + } dominant_nearend_detection; + + struct SubbandNearendDetection { + size_t nearend_average_blocks = 1; + struct SubbandRegion { + size_t low; + size_t high; + }; + SubbandRegion subband1 = {1, 1}; + SubbandRegion subband2 = {1, 1}; + float nearend_threshold = 1.f; + float snr_threshold = 1.f; + } subband_nearend_detection; + + bool use_subband_nearend_detection = false; + + struct HighBandsSuppression { + float enr_threshold = 1.f; + float max_gain_during_echo = 1.f; + float anti_howling_activation_threshold = 25.f; + float anti_howling_gain = 0.01f; + } high_bands_suppression; + + float floor_first_increase = 0.00001f; + } suppressor; +}; +} // namespace webrtc + +#endif // API_AUDIO_ECHO_CANCELLER3_CONFIG_H_ diff --git a/api/audio/echo_canceller3_config_json.cc b/api/audio/echo_canceller3_config_json.cc new file mode 100644 index 0000000..f5c1249 --- /dev/null +++ b/api/audio/echo_canceller3_config_json.cc @@ -0,0 +1,682 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/audio/echo_canceller3_config_json.h" + +#include + +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/json.h" +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { +namespace { +void ReadParam(const Json::Value& root, std::string param_name, bool* param) { + RTC_DCHECK(param); + bool v; + if (rtc::GetBoolFromJsonObject(root, param_name, &v)) { + *param = v; + } +} + +void ReadParam(const Json::Value& root, std::string param_name, size_t* param) { + RTC_DCHECK(param); + int v; + if (rtc::GetIntFromJsonObject(root, param_name, &v) && v >= 0) { + *param = v; + } +} + +void ReadParam(const Json::Value& root, std::string param_name, int* param) { + RTC_DCHECK(param); + int v; + if (rtc::GetIntFromJsonObject(root, param_name, &v)) { + *param = v; + } +} + +void ReadParam(const Json::Value& root, std::string param_name, float* param) { + RTC_DCHECK(param); + double v; + if (rtc::GetDoubleFromJsonObject(root, param_name, &v)) { + *param = static_cast(v); + } +} + +void ReadParam(const Json::Value& root, + std::string param_name, + EchoCanceller3Config::Filter::RefinedConfiguration* param) { + RTC_DCHECK(param); + Json::Value json_array; + if (rtc::GetValueFromJsonObject(root, param_name, &json_array)) { + std::vector v; + rtc::JsonArrayToDoubleVector(json_array, &v); + if (v.size() != 6) { + RTC_LOG(LS_ERROR) << "Incorrect array size for " << param_name; + return; + } + param->length_blocks = static_cast(v[0]); + param->leakage_converged = static_cast(v[1]); + param->leakage_diverged = static_cast(v[2]); + param->error_floor = static_cast(v[3]); + param->error_ceil = static_cast(v[4]); + param->noise_gate = static_cast(v[5]); + } +} + +void ReadParam(const Json::Value& root, + std::string param_name, + EchoCanceller3Config::Filter::CoarseConfiguration* param) { + RTC_DCHECK(param); + Json::Value json_array; + if (rtc::GetValueFromJsonObject(root, param_name, &json_array)) { + std::vector v; + rtc::JsonArrayToDoubleVector(json_array, &v); + if (v.size() != 3) { + RTC_LOG(LS_ERROR) << "Incorrect array size for " << param_name; + return; + } + param->length_blocks = static_cast(v[0]); + param->rate = static_cast(v[1]); + param->noise_gate = static_cast(v[2]); + } +} + +void ReadParam(const Json::Value& root, + std::string param_name, + EchoCanceller3Config::Delay::AlignmentMixing* param) { + RTC_DCHECK(param); + + Json::Value subsection; + if (rtc::GetValueFromJsonObject(root, param_name, &subsection)) { + ReadParam(subsection, "downmix", ¶m->downmix); + ReadParam(subsection, "adaptive_selection", ¶m->adaptive_selection); + ReadParam(subsection, "activity_power_threshold", + ¶m->activity_power_threshold); + ReadParam(subsection, "prefer_first_two_channels", + ¶m->prefer_first_two_channels); + } +} + +void ReadParam( + const Json::Value& root, + std::string param_name, + EchoCanceller3Config::Suppressor::SubbandNearendDetection::SubbandRegion* + param) { + RTC_DCHECK(param); + Json::Value json_array; + if (rtc::GetValueFromJsonObject(root, param_name, &json_array)) { + std::vector v; + rtc::JsonArrayToIntVector(json_array, &v); + if (v.size() != 2) { + RTC_LOG(LS_ERROR) << "Incorrect array size for " << param_name; + return; + } + param->low = static_cast(v[0]); + param->high = static_cast(v[1]); + } +} + +void ReadParam(const Json::Value& root, + std::string param_name, + EchoCanceller3Config::Suppressor::MaskingThresholds* param) { + RTC_DCHECK(param); + Json::Value json_array; + if (rtc::GetValueFromJsonObject(root, param_name, &json_array)) { + std::vector v; + rtc::JsonArrayToDoubleVector(json_array, &v); + if (v.size() != 3) { + RTC_LOG(LS_ERROR) << "Incorrect array size for " << param_name; + return; + } + param->enr_transparent = static_cast(v[0]); + param->enr_suppress = static_cast(v[1]); + param->emr_transparent = static_cast(v[2]); + } +} +} // namespace + +void Aec3ConfigFromJsonString(absl::string_view json_string, + EchoCanceller3Config* config, + bool* parsing_successful) { + RTC_DCHECK(config); + RTC_DCHECK(parsing_successful); + EchoCanceller3Config& cfg = *config; + cfg = EchoCanceller3Config(); + *parsing_successful = true; + + Json::Value root; + bool success = Json::Reader().parse(std::string(json_string), root); + if (!success) { + RTC_LOG(LS_ERROR) << "Incorrect JSON format: " << json_string; + *parsing_successful = false; + return; + } + + Json::Value aec3_root; + success = rtc::GetValueFromJsonObject(root, "aec3", &aec3_root); + if (!success) { + RTC_LOG(LS_ERROR) << "Missing AEC3 config field: " << json_string; + *parsing_successful = false; + return; + } + + Json::Value section; + if (rtc::GetValueFromJsonObject(aec3_root, "buffering", §ion)) { + ReadParam(section, "excess_render_detection_interval_blocks", + &cfg.buffering.excess_render_detection_interval_blocks); + ReadParam(section, "max_allowed_excess_render_blocks", + &cfg.buffering.max_allowed_excess_render_blocks); + } + + if (rtc::GetValueFromJsonObject(aec3_root, "delay", §ion)) { + ReadParam(section, "default_delay", &cfg.delay.default_delay); + ReadParam(section, "down_sampling_factor", &cfg.delay.down_sampling_factor); + ReadParam(section, "num_filters", &cfg.delay.num_filters); + ReadParam(section, "delay_headroom_samples", + &cfg.delay.delay_headroom_samples); + ReadParam(section, "hysteresis_limit_blocks", + &cfg.delay.hysteresis_limit_blocks); + ReadParam(section, "fixed_capture_delay_samples", + &cfg.delay.fixed_capture_delay_samples); + ReadParam(section, "delay_estimate_smoothing", + &cfg.delay.delay_estimate_smoothing); + ReadParam(section, "delay_candidate_detection_threshold", + &cfg.delay.delay_candidate_detection_threshold); + + Json::Value subsection; + if (rtc::GetValueFromJsonObject(section, "delay_selection_thresholds", + &subsection)) { + ReadParam(subsection, "initial", + &cfg.delay.delay_selection_thresholds.initial); + ReadParam(subsection, "converged", + &cfg.delay.delay_selection_thresholds.converged); + } + + ReadParam(section, "use_external_delay_estimator", + &cfg.delay.use_external_delay_estimator); + ReadParam(section, "log_warning_on_delay_changes", + &cfg.delay.log_warning_on_delay_changes); + + ReadParam(section, "render_alignment_mixing", + &cfg.delay.render_alignment_mixing); + ReadParam(section, "capture_alignment_mixing", + &cfg.delay.capture_alignment_mixing); + } + + if (rtc::GetValueFromJsonObject(aec3_root, "filter", §ion)) { + ReadParam(section, "refined", &cfg.filter.refined); + ReadParam(section, "coarse", &cfg.filter.coarse); + ReadParam(section, "refined_initial", &cfg.filter.refined_initial); + ReadParam(section, "coarse_initial", &cfg.filter.coarse_initial); + ReadParam(section, "config_change_duration_blocks", + &cfg.filter.config_change_duration_blocks); + ReadParam(section, "initial_state_seconds", + &cfg.filter.initial_state_seconds); + ReadParam(section, "conservative_initial_phase", + &cfg.filter.conservative_initial_phase); + ReadParam(section, "enable_coarse_filter_output_usage", + &cfg.filter.enable_coarse_filter_output_usage); + ReadParam(section, "use_linear_filter", &cfg.filter.use_linear_filter); + ReadParam(section, "export_linear_aec_output", + &cfg.filter.export_linear_aec_output); + } + + if (rtc::GetValueFromJsonObject(aec3_root, "erle", §ion)) { + ReadParam(section, "min", &cfg.erle.min); + ReadParam(section, "max_l", &cfg.erle.max_l); + ReadParam(section, "max_h", &cfg.erle.max_h); + ReadParam(section, "onset_detection", &cfg.erle.onset_detection); + ReadParam(section, "num_sections", &cfg.erle.num_sections); + ReadParam(section, "clamp_quality_estimate_to_zero", + &cfg.erle.clamp_quality_estimate_to_zero); + ReadParam(section, "clamp_quality_estimate_to_one", + &cfg.erle.clamp_quality_estimate_to_one); + } + + if (rtc::GetValueFromJsonObject(aec3_root, "ep_strength", §ion)) { + ReadParam(section, "default_gain", &cfg.ep_strength.default_gain); + ReadParam(section, "default_len", &cfg.ep_strength.default_len); + ReadParam(section, "echo_can_saturate", &cfg.ep_strength.echo_can_saturate); + ReadParam(section, "bounded_erl", &cfg.ep_strength.bounded_erl); + } + + if (rtc::GetValueFromJsonObject(aec3_root, "echo_audibility", §ion)) { + ReadParam(section, "low_render_limit", + &cfg.echo_audibility.low_render_limit); + ReadParam(section, "normal_render_limit", + &cfg.echo_audibility.normal_render_limit); + + ReadParam(section, "floor_power", &cfg.echo_audibility.floor_power); + ReadParam(section, "audibility_threshold_lf", + &cfg.echo_audibility.audibility_threshold_lf); + ReadParam(section, "audibility_threshold_mf", + &cfg.echo_audibility.audibility_threshold_mf); + ReadParam(section, "audibility_threshold_hf", + &cfg.echo_audibility.audibility_threshold_hf); + ReadParam(section, "use_stationarity_properties", + &cfg.echo_audibility.use_stationarity_properties); + ReadParam(section, "use_stationarity_properties_at_init", + &cfg.echo_audibility.use_stationarity_properties_at_init); + } + + if (rtc::GetValueFromJsonObject(aec3_root, "render_levels", §ion)) { + ReadParam(section, "active_render_limit", + &cfg.render_levels.active_render_limit); + ReadParam(section, "poor_excitation_render_limit", + &cfg.render_levels.poor_excitation_render_limit); + ReadParam(section, "poor_excitation_render_limit_ds8", + &cfg.render_levels.poor_excitation_render_limit_ds8); + ReadParam(section, "render_power_gain_db", + &cfg.render_levels.render_power_gain_db); + } + + if (rtc::GetValueFromJsonObject(aec3_root, "echo_removal_control", + §ion)) { + ReadParam(section, "has_clock_drift", + &cfg.echo_removal_control.has_clock_drift); + ReadParam(section, "linear_and_stable_echo_path", + &cfg.echo_removal_control.linear_and_stable_echo_path); + } + + if (rtc::GetValueFromJsonObject(aec3_root, "echo_model", §ion)) { + Json::Value subsection; + ReadParam(section, "noise_floor_hold", &cfg.echo_model.noise_floor_hold); + ReadParam(section, "min_noise_floor_power", + &cfg.echo_model.min_noise_floor_power); + ReadParam(section, "stationary_gate_slope", + &cfg.echo_model.stationary_gate_slope); + ReadParam(section, "noise_gate_power", &cfg.echo_model.noise_gate_power); + ReadParam(section, "noise_gate_slope", &cfg.echo_model.noise_gate_slope); + ReadParam(section, "render_pre_window_size", + &cfg.echo_model.render_pre_window_size); + ReadParam(section, "render_post_window_size", + &cfg.echo_model.render_post_window_size); + } + + if (rtc::GetValueFromJsonObject(aec3_root, "comfort_noise", §ion)) { + ReadParam(section, "noise_floor_dbfs", &cfg.comfort_noise.noise_floor_dbfs); + } + + Json::Value subsection; + if (rtc::GetValueFromJsonObject(aec3_root, "suppressor", §ion)) { + ReadParam(section, "nearend_average_blocks", + &cfg.suppressor.nearend_average_blocks); + + if (rtc::GetValueFromJsonObject(section, "normal_tuning", &subsection)) { + ReadParam(subsection, "mask_lf", &cfg.suppressor.normal_tuning.mask_lf); + ReadParam(subsection, "mask_hf", &cfg.suppressor.normal_tuning.mask_hf); + ReadParam(subsection, "max_inc_factor", + &cfg.suppressor.normal_tuning.max_inc_factor); + ReadParam(subsection, "max_dec_factor_lf", + &cfg.suppressor.normal_tuning.max_dec_factor_lf); + } + + if (rtc::GetValueFromJsonObject(section, "nearend_tuning", &subsection)) { + ReadParam(subsection, "mask_lf", &cfg.suppressor.nearend_tuning.mask_lf); + ReadParam(subsection, "mask_hf", &cfg.suppressor.nearend_tuning.mask_hf); + ReadParam(subsection, "max_inc_factor", + &cfg.suppressor.nearend_tuning.max_inc_factor); + ReadParam(subsection, "max_dec_factor_lf", + &cfg.suppressor.nearend_tuning.max_dec_factor_lf); + } + + if (rtc::GetValueFromJsonObject(section, "dominant_nearend_detection", + &subsection)) { + ReadParam(subsection, "enr_threshold", + &cfg.suppressor.dominant_nearend_detection.enr_threshold); + ReadParam(subsection, "enr_exit_threshold", + &cfg.suppressor.dominant_nearend_detection.enr_exit_threshold); + ReadParam(subsection, "snr_threshold", + &cfg.suppressor.dominant_nearend_detection.snr_threshold); + ReadParam(subsection, "hold_duration", + &cfg.suppressor.dominant_nearend_detection.hold_duration); + ReadParam(subsection, "trigger_threshold", + &cfg.suppressor.dominant_nearend_detection.trigger_threshold); + ReadParam( + subsection, "use_during_initial_phase", + &cfg.suppressor.dominant_nearend_detection.use_during_initial_phase); + } + + if (rtc::GetValueFromJsonObject(section, "subband_nearend_detection", + &subsection)) { + ReadParam( + subsection, "nearend_average_blocks", + &cfg.suppressor.subband_nearend_detection.nearend_average_blocks); + ReadParam(subsection, "subband1", + &cfg.suppressor.subband_nearend_detection.subband1); + ReadParam(subsection, "subband2", + &cfg.suppressor.subband_nearend_detection.subband2); + ReadParam(subsection, "nearend_threshold", + &cfg.suppressor.subband_nearend_detection.nearend_threshold); + ReadParam(subsection, "snr_threshold", + &cfg.suppressor.subband_nearend_detection.snr_threshold); + } + + ReadParam(section, "use_subband_nearend_detection", + &cfg.suppressor.use_subband_nearend_detection); + + if (rtc::GetValueFromJsonObject(section, "high_bands_suppression", + &subsection)) { + ReadParam(subsection, "enr_threshold", + &cfg.suppressor.high_bands_suppression.enr_threshold); + ReadParam(subsection, "max_gain_during_echo", + &cfg.suppressor.high_bands_suppression.max_gain_during_echo); + ReadParam(subsection, "anti_howling_activation_threshold", + &cfg.suppressor.high_bands_suppression + .anti_howling_activation_threshold); + ReadParam(subsection, "anti_howling_gain", + &cfg.suppressor.high_bands_suppression.anti_howling_gain); + } + + ReadParam(section, "floor_first_increase", + &cfg.suppressor.floor_first_increase); + } +} + +EchoCanceller3Config Aec3ConfigFromJsonString(absl::string_view json_string) { + EchoCanceller3Config cfg; + bool not_used; + Aec3ConfigFromJsonString(json_string, &cfg, ¬_used); + return cfg; +} + +std::string Aec3ConfigToJsonString(const EchoCanceller3Config& config) { + rtc::StringBuilder ost; + ost << "{"; + ost << "\"aec3\": {"; + ost << "\"buffering\": {"; + ost << "\"excess_render_detection_interval_blocks\": " + << config.buffering.excess_render_detection_interval_blocks << ","; + ost << "\"max_allowed_excess_render_blocks\": " + << config.buffering.max_allowed_excess_render_blocks; + ost << "},"; + + ost << "\"delay\": {"; + ost << "\"default_delay\": " << config.delay.default_delay << ","; + ost << "\"down_sampling_factor\": " << config.delay.down_sampling_factor + << ","; + ost << "\"num_filters\": " << config.delay.num_filters << ","; + ost << "\"delay_headroom_samples\": " << config.delay.delay_headroom_samples + << ","; + ost << "\"hysteresis_limit_blocks\": " << config.delay.hysteresis_limit_blocks + << ","; + ost << "\"fixed_capture_delay_samples\": " + << config.delay.fixed_capture_delay_samples << ","; + ost << "\"delay_estimate_smoothing\": " + << config.delay.delay_estimate_smoothing << ","; + ost << "\"delay_candidate_detection_threshold\": " + << config.delay.delay_candidate_detection_threshold << ","; + + ost << "\"delay_selection_thresholds\": {"; + ost << "\"initial\": " << config.delay.delay_selection_thresholds.initial + << ","; + ost << "\"converged\": " << config.delay.delay_selection_thresholds.converged; + ost << "},"; + + ost << "\"use_external_delay_estimator\": " + << (config.delay.use_external_delay_estimator ? "true" : "false") << ","; + ost << "\"log_warning_on_delay_changes\": " + << (config.delay.log_warning_on_delay_changes ? "true" : "false") << ","; + + ost << "\"render_alignment_mixing\": {"; + ost << "\"downmix\": " + << (config.delay.render_alignment_mixing.downmix ? "true" : "false") + << ","; + ost << "\"adaptive_selection\": " + << (config.delay.render_alignment_mixing.adaptive_selection ? "true" + : "false") + << ","; + ost << "\"activity_power_threshold\": " + << config.delay.render_alignment_mixing.activity_power_threshold << ","; + ost << "\"prefer_first_two_channels\": " + << (config.delay.render_alignment_mixing.prefer_first_two_channels + ? "true" + : "false"); + ost << "},"; + + ost << "\"capture_alignment_mixing\": {"; + ost << "\"downmix\": " + << (config.delay.capture_alignment_mixing.downmix ? "true" : "false") + << ","; + ost << "\"adaptive_selection\": " + << (config.delay.capture_alignment_mixing.adaptive_selection ? "true" + : "false") + << ","; + ost << "\"activity_power_threshold\": " + << config.delay.capture_alignment_mixing.activity_power_threshold << ","; + ost << "\"prefer_first_two_channels\": " + << (config.delay.capture_alignment_mixing.prefer_first_two_channels + ? "true" + : "false"); + ost << "}"; + ost << "},"; + + ost << "\"filter\": {"; + + ost << "\"refined\": ["; + ost << config.filter.refined.length_blocks << ","; + ost << config.filter.refined.leakage_converged << ","; + ost << config.filter.refined.leakage_diverged << ","; + ost << config.filter.refined.error_floor << ","; + ost << config.filter.refined.error_ceil << ","; + ost << config.filter.refined.noise_gate; + ost << "],"; + + ost << "\"coarse\": ["; + ost << config.filter.coarse.length_blocks << ","; + ost << config.filter.coarse.rate << ","; + ost << config.filter.coarse.noise_gate; + ost << "],"; + + ost << "\"refined_initial\": ["; + ost << config.filter.refined_initial.length_blocks << ","; + ost << config.filter.refined_initial.leakage_converged << ","; + ost << config.filter.refined_initial.leakage_diverged << ","; + ost << config.filter.refined_initial.error_floor << ","; + ost << config.filter.refined_initial.error_ceil << ","; + ost << config.filter.refined_initial.noise_gate; + ost << "],"; + + ost << "\"coarse_initial\": ["; + ost << config.filter.coarse_initial.length_blocks << ","; + ost << config.filter.coarse_initial.rate << ","; + ost << config.filter.coarse_initial.noise_gate; + ost << "],"; + + ost << "\"config_change_duration_blocks\": " + << config.filter.config_change_duration_blocks << ","; + ost << "\"initial_state_seconds\": " << config.filter.initial_state_seconds + << ","; + ost << "\"conservative_initial_phase\": " + << (config.filter.conservative_initial_phase ? "true" : "false") << ","; + ost << "\"enable_coarse_filter_output_usage\": " + << (config.filter.enable_coarse_filter_output_usage ? "true" : "false") + << ","; + ost << "\"use_linear_filter\": " + << (config.filter.use_linear_filter ? "true" : "false") << ","; + ost << "\"export_linear_aec_output\": " + << (config.filter.export_linear_aec_output ? "true" : "false"); + + ost << "},"; + + ost << "\"erle\": {"; + ost << "\"min\": " << config.erle.min << ","; + ost << "\"max_l\": " << config.erle.max_l << ","; + ost << "\"max_h\": " << config.erle.max_h << ","; + ost << "\"onset_detection\": " + << (config.erle.onset_detection ? "true" : "false") << ","; + ost << "\"num_sections\": " << config.erle.num_sections << ","; + ost << "\"clamp_quality_estimate_to_zero\": " + << (config.erle.clamp_quality_estimate_to_zero ? "true" : "false") << ","; + ost << "\"clamp_quality_estimate_to_one\": " + << (config.erle.clamp_quality_estimate_to_one ? "true" : "false"); + ost << "},"; + + ost << "\"ep_strength\": {"; + ost << "\"default_gain\": " << config.ep_strength.default_gain << ","; + ost << "\"default_len\": " << config.ep_strength.default_len << ","; + ost << "\"echo_can_saturate\": " + << (config.ep_strength.echo_can_saturate ? "true" : "false") << ","; + ost << "\"bounded_erl\": " + << (config.ep_strength.bounded_erl ? "true" : "false"); + + ost << "},"; + + ost << "\"echo_audibility\": {"; + ost << "\"low_render_limit\": " << config.echo_audibility.low_render_limit + << ","; + ost << "\"normal_render_limit\": " + << config.echo_audibility.normal_render_limit << ","; + ost << "\"floor_power\": " << config.echo_audibility.floor_power << ","; + ost << "\"audibility_threshold_lf\": " + << config.echo_audibility.audibility_threshold_lf << ","; + ost << "\"audibility_threshold_mf\": " + << config.echo_audibility.audibility_threshold_mf << ","; + ost << "\"audibility_threshold_hf\": " + << config.echo_audibility.audibility_threshold_hf << ","; + ost << "\"use_stationarity_properties\": " + << (config.echo_audibility.use_stationarity_properties ? "true" : "false") + << ","; + ost << "\"use_stationarity_properties_at_init\": " + << (config.echo_audibility.use_stationarity_properties_at_init ? "true" + : "false"); + ost << "},"; + + ost << "\"render_levels\": {"; + ost << "\"active_render_limit\": " << config.render_levels.active_render_limit + << ","; + ost << "\"poor_excitation_render_limit\": " + << config.render_levels.poor_excitation_render_limit << ","; + ost << "\"poor_excitation_render_limit_ds8\": " + << config.render_levels.poor_excitation_render_limit_ds8 << ","; + ost << "\"render_power_gain_db\": " + << config.render_levels.render_power_gain_db; + ost << "},"; + + ost << "\"echo_removal_control\": {"; + ost << "\"has_clock_drift\": " + << (config.echo_removal_control.has_clock_drift ? "true" : "false") + << ","; + ost << "\"linear_and_stable_echo_path\": " + << (config.echo_removal_control.linear_and_stable_echo_path ? "true" + : "false"); + + ost << "},"; + + ost << "\"echo_model\": {"; + ost << "\"noise_floor_hold\": " << config.echo_model.noise_floor_hold << ","; + ost << "\"min_noise_floor_power\": " + << config.echo_model.min_noise_floor_power << ","; + ost << "\"stationary_gate_slope\": " + << config.echo_model.stationary_gate_slope << ","; + ost << "\"noise_gate_power\": " << config.echo_model.noise_gate_power << ","; + ost << "\"noise_gate_slope\": " << config.echo_model.noise_gate_slope << ","; + ost << "\"render_pre_window_size\": " + << config.echo_model.render_pre_window_size << ","; + ost << "\"render_post_window_size\": " + << config.echo_model.render_post_window_size; + ost << "},"; + + ost << "\"comfort_noise\": {"; + ost << "\"noise_floor_dbfs\": " << config.comfort_noise.noise_floor_dbfs; + ost << "},"; + + ost << "\"suppressor\": {"; + ost << "\"nearend_average_blocks\": " + << config.suppressor.nearend_average_blocks << ","; + ost << "\"normal_tuning\": {"; + ost << "\"mask_lf\": ["; + ost << config.suppressor.normal_tuning.mask_lf.enr_transparent << ","; + ost << config.suppressor.normal_tuning.mask_lf.enr_suppress << ","; + ost << config.suppressor.normal_tuning.mask_lf.emr_transparent; + ost << "],"; + ost << "\"mask_hf\": ["; + ost << config.suppressor.normal_tuning.mask_hf.enr_transparent << ","; + ost << config.suppressor.normal_tuning.mask_hf.enr_suppress << ","; + ost << config.suppressor.normal_tuning.mask_hf.emr_transparent; + ost << "],"; + ost << "\"max_inc_factor\": " + << config.suppressor.normal_tuning.max_inc_factor << ","; + ost << "\"max_dec_factor_lf\": " + << config.suppressor.normal_tuning.max_dec_factor_lf; + ost << "},"; + ost << "\"nearend_tuning\": {"; + ost << "\"mask_lf\": ["; + ost << config.suppressor.nearend_tuning.mask_lf.enr_transparent << ","; + ost << config.suppressor.nearend_tuning.mask_lf.enr_suppress << ","; + ost << config.suppressor.nearend_tuning.mask_lf.emr_transparent; + ost << "],"; + ost << "\"mask_hf\": ["; + ost << config.suppressor.nearend_tuning.mask_hf.enr_transparent << ","; + ost << config.suppressor.nearend_tuning.mask_hf.enr_suppress << ","; + ost << config.suppressor.nearend_tuning.mask_hf.emr_transparent; + ost << "],"; + ost << "\"max_inc_factor\": " + << config.suppressor.nearend_tuning.max_inc_factor << ","; + ost << "\"max_dec_factor_lf\": " + << config.suppressor.nearend_tuning.max_dec_factor_lf; + ost << "},"; + ost << "\"dominant_nearend_detection\": {"; + ost << "\"enr_threshold\": " + << config.suppressor.dominant_nearend_detection.enr_threshold << ","; + ost << "\"enr_exit_threshold\": " + << config.suppressor.dominant_nearend_detection.enr_exit_threshold << ","; + ost << "\"snr_threshold\": " + << config.suppressor.dominant_nearend_detection.snr_threshold << ","; + ost << "\"hold_duration\": " + << config.suppressor.dominant_nearend_detection.hold_duration << ","; + ost << "\"trigger_threshold\": " + << config.suppressor.dominant_nearend_detection.trigger_threshold << ","; + ost << "\"use_during_initial_phase\": " + << config.suppressor.dominant_nearend_detection.use_during_initial_phase; + ost << "},"; + ost << "\"subband_nearend_detection\": {"; + ost << "\"nearend_average_blocks\": " + << config.suppressor.subband_nearend_detection.nearend_average_blocks + << ","; + ost << "\"subband1\": ["; + ost << config.suppressor.subband_nearend_detection.subband1.low << ","; + ost << config.suppressor.subband_nearend_detection.subband1.high; + ost << "],"; + ost << "\"subband2\": ["; + ost << config.suppressor.subband_nearend_detection.subband2.low << ","; + ost << config.suppressor.subband_nearend_detection.subband2.high; + ost << "],"; + ost << "\"nearend_threshold\": " + << config.suppressor.subband_nearend_detection.nearend_threshold << ","; + ost << "\"snr_threshold\": " + << config.suppressor.subband_nearend_detection.snr_threshold; + ost << "},"; + ost << "\"use_subband_nearend_detection\": " + << config.suppressor.use_subband_nearend_detection << ","; + ost << "\"high_bands_suppression\": {"; + ost << "\"enr_threshold\": " + << config.suppressor.high_bands_suppression.enr_threshold << ","; + ost << "\"max_gain_during_echo\": " + << config.suppressor.high_bands_suppression.max_gain_during_echo << ","; + ost << "\"anti_howling_activation_threshold\": " + << config.suppressor.high_bands_suppression + .anti_howling_activation_threshold + << ","; + ost << "\"anti_howling_gain\": " + << config.suppressor.high_bands_suppression.anti_howling_gain; + ost << "},"; + ost << "\"floor_first_increase\": " << config.suppressor.floor_first_increase; + ost << "}"; + ost << "}"; + ost << "}"; + + return ost.Release(); +} +} // namespace webrtc diff --git a/api/audio/echo_canceller3_config_json.h b/api/audio/echo_canceller3_config_json.h new file mode 100644 index 0000000..ecee954 --- /dev/null +++ b/api/audio/echo_canceller3_config_json.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_ECHO_CANCELLER3_CONFIG_JSON_H_ +#define API_AUDIO_ECHO_CANCELLER3_CONFIG_JSON_H_ + +#include + +#include "absl/strings/string_view.h" +#include "api/audio/echo_canceller3_config.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { +// Parses a JSON-encoded string into an Aec3 config. Fields corresponds to +// substruct names, with the addition that there must be a top-level node +// "aec3". Produces default config values for anything that cannot be parsed +// from the string. If any error was found in the parsing, parsing_successful is +// set to false. +RTC_EXPORT void Aec3ConfigFromJsonString(absl::string_view json_string, + EchoCanceller3Config* config, + bool* parsing_successful); + +// To be deprecated. +// Parses a JSON-encoded string into an Aec3 config. Fields corresponds to +// substruct names, with the addition that there must be a top-level node +// "aec3". Returns default config values for anything that cannot be parsed from +// the string. +RTC_EXPORT EchoCanceller3Config +Aec3ConfigFromJsonString(absl::string_view json_string); + +// Encodes an Aec3 config in JSON format. Fields corresponds to substruct names, +// with the addition that the top-level node is named "aec3". +RTC_EXPORT std::string Aec3ConfigToJsonString( + const EchoCanceller3Config& config); + +} // namespace webrtc + +#endif // API_AUDIO_ECHO_CANCELLER3_CONFIG_JSON_H_ diff --git a/api/audio/echo_canceller3_factory.cc b/api/audio/echo_canceller3_factory.cc new file mode 100644 index 0000000..d65a726 --- /dev/null +++ b/api/audio/echo_canceller3_factory.cc @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/audio/echo_canceller3_factory.h" + +#include + +#include "modules/audio_processing/aec3/echo_canceller3.h" + +namespace webrtc { + +EchoCanceller3Factory::EchoCanceller3Factory() {} + +EchoCanceller3Factory::EchoCanceller3Factory(const EchoCanceller3Config& config) + : config_(config) {} + +std::unique_ptr EchoCanceller3Factory::Create( + int sample_rate_hz, + int num_render_channels, + int num_capture_channels) { + return std::make_unique( + config_, sample_rate_hz, num_render_channels, num_capture_channels); +} + +} // namespace webrtc diff --git a/api/audio/echo_canceller3_factory.h b/api/audio/echo_canceller3_factory.h new file mode 100644 index 0000000..8b53800 --- /dev/null +++ b/api/audio/echo_canceller3_factory.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_ECHO_CANCELLER3_FACTORY_H_ +#define API_AUDIO_ECHO_CANCELLER3_FACTORY_H_ + +#include + +#include "api/audio/echo_canceller3_config.h" +#include "api/audio/echo_control.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class RTC_EXPORT EchoCanceller3Factory : public EchoControlFactory { + public: + // Factory producing EchoCanceller3 instances with the default configuration. + EchoCanceller3Factory(); + + // Factory producing EchoCanceller3 instances with the specified + // configuration. + explicit EchoCanceller3Factory(const EchoCanceller3Config& config); + + // Creates an EchoCanceller3 with a specified channel count and sampling rate. + std::unique_ptr Create(int sample_rate_hz, + int num_render_channels, + int num_capture_channels) override; + + private: + const EchoCanceller3Config config_; +}; +} // namespace webrtc + +#endif // API_AUDIO_ECHO_CANCELLER3_FACTORY_H_ diff --git a/api/audio/echo_control.h b/api/audio/echo_control.h new file mode 100644 index 0000000..8d567bf --- /dev/null +++ b/api/audio/echo_control.h @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_ECHO_CONTROL_H_ +#define API_AUDIO_ECHO_CONTROL_H_ + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +class AudioBuffer; + +// Interface for an acoustic echo cancellation (AEC) submodule. +class EchoControl { + public: + // Analysis (not changing) of the render signal. + virtual void AnalyzeRender(AudioBuffer* render) = 0; + + // Analysis (not changing) of the capture signal. + virtual void AnalyzeCapture(AudioBuffer* capture) = 0; + + // Processes the capture signal in order to remove the echo. + virtual void ProcessCapture(AudioBuffer* capture, bool level_change) = 0; + + // As above, but also returns the linear filter output. + virtual void ProcessCapture(AudioBuffer* capture, + AudioBuffer* linear_output, + bool level_change) = 0; + + struct Metrics { + double echo_return_loss; + double echo_return_loss_enhancement; + int delay_ms; + }; + + // Collect current metrics from the echo controller. + virtual Metrics GetMetrics() const = 0; + + // Provides an optional external estimate of the audio buffer delay. + virtual void SetAudioBufferDelay(int delay_ms) = 0; + + // Returns wheter the signal is altered. + virtual bool ActiveProcessing() const = 0; + + virtual ~EchoControl() {} +}; + +// Interface for a factory that creates EchoControllers. +class EchoControlFactory { + public: + virtual std::unique_ptr Create(int sample_rate_hz, + int num_render_channels, + int num_capture_channels) = 0; + + virtual ~EchoControlFactory() = default; +}; +} // namespace webrtc + +#endif // API_AUDIO_ECHO_CONTROL_H_ diff --git a/api/audio/echo_detector_creator.cc b/api/audio/echo_detector_creator.cc new file mode 100644 index 0000000..4c3d9e6 --- /dev/null +++ b/api/audio/echo_detector_creator.cc @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/audio/echo_detector_creator.h" + +#include "modules/audio_processing/residual_echo_detector.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +rtc::scoped_refptr CreateEchoDetector() { + return new rtc::RefCountedObject(); +} + +} // namespace webrtc diff --git a/api/audio/echo_detector_creator.h b/api/audio/echo_detector_creator.h new file mode 100644 index 0000000..5ba171d --- /dev/null +++ b/api/audio/echo_detector_creator.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_ECHO_DETECTOR_CREATOR_H_ +#define API_AUDIO_ECHO_DETECTOR_CREATOR_H_ + +#include "api/scoped_refptr.h" +#include "modules/audio_processing/include/audio_processing.h" + +namespace webrtc { + +// Returns an instance of the WebRTC implementation of a residual echo detector. +// It can be provided to the webrtc::AudioProcessingBuilder to obtain the +// usual residual echo metrics. +rtc::scoped_refptr CreateEchoDetector(); + +} // namespace webrtc + +#endif // API_AUDIO_ECHO_DETECTOR_CREATOR_H_ diff --git a/api/audio/test/BUILD.gn b/api/audio/test/BUILD.gn new file mode 100644 index 0000000..d62baf1 --- /dev/null +++ b/api/audio/test/BUILD.gn @@ -0,0 +1,31 @@ +# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +if (rtc_include_tests) { + rtc_library("audio_api_unittests") { + testonly = true + sources = [ + "audio_frame_unittest.cc", + "echo_canceller3_config_json_unittest.cc", + "echo_canceller3_config_unittest.cc", + ] + deps = [ + "..:aec3_config", + "..:aec3_config_json", + "..:audio_frame_api", + "../../../rtc_base:rtc_base_approved", + "../../../test:test_support", + ] + } +} diff --git a/api/audio/test/audio_frame_unittest.cc b/api/audio/test/audio_frame_unittest.cc new file mode 100644 index 0000000..f8d3318 --- /dev/null +++ b/api/audio/test/audio_frame_unittest.cc @@ -0,0 +1,186 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio/audio_frame.h" + +#include +#include // memcmp + +#include "test/gtest.h" + +namespace webrtc { + +namespace { + +bool AllSamplesAre(int16_t sample, const AudioFrame& frame) { + const int16_t* frame_data = frame.data(); + for (size_t i = 0; i < frame.max_16bit_samples(); i++) { + if (frame_data[i] != sample) { + return false; + } + } + return true; +} + +constexpr uint32_t kTimestamp = 27; +constexpr int kSampleRateHz = 16000; +constexpr size_t kNumChannelsMono = 1; +constexpr size_t kNumChannelsStereo = 2; +constexpr size_t kNumChannels5_1 = 6; +constexpr size_t kSamplesPerChannel = kSampleRateHz / 100; + +} // namespace + +TEST(AudioFrameTest, FrameStartsMuted) { + AudioFrame frame; + EXPECT_TRUE(frame.muted()); + EXPECT_TRUE(AllSamplesAre(0, frame)); +} + +TEST(AudioFrameTest, UnmutedFrameIsInitiallyZeroed) { + AudioFrame frame; + frame.mutable_data(); + EXPECT_FALSE(frame.muted()); + EXPECT_TRUE(AllSamplesAre(0, frame)); +} + +TEST(AudioFrameTest, MutedFrameBufferIsZeroed) { + AudioFrame frame; + int16_t* frame_data = frame.mutable_data(); + for (size_t i = 0; i < frame.max_16bit_samples(); i++) { + frame_data[i] = 17; + } + ASSERT_TRUE(AllSamplesAre(17, frame)); + frame.Mute(); + EXPECT_TRUE(frame.muted()); + EXPECT_TRUE(AllSamplesAre(0, frame)); +} + +TEST(AudioFrameTest, UpdateFrameMono) { + AudioFrame frame; + int16_t samples[kNumChannelsMono * kSamplesPerChannel] = {17}; + frame.UpdateFrame(kTimestamp, samples, kSamplesPerChannel, kSampleRateHz, + AudioFrame::kPLC, AudioFrame::kVadActive, kNumChannelsMono); + + EXPECT_EQ(kTimestamp, frame.timestamp_); + EXPECT_EQ(kSamplesPerChannel, frame.samples_per_channel()); + EXPECT_EQ(kSampleRateHz, frame.sample_rate_hz()); + EXPECT_EQ(AudioFrame::kPLC, frame.speech_type_); + EXPECT_EQ(AudioFrame::kVadActive, frame.vad_activity_); + EXPECT_EQ(kNumChannelsMono, frame.num_channels()); + EXPECT_EQ(CHANNEL_LAYOUT_MONO, frame.channel_layout()); + + EXPECT_FALSE(frame.muted()); + EXPECT_EQ(0, memcmp(samples, frame.data(), sizeof(samples))); + + frame.UpdateFrame(kTimestamp, nullptr /* data*/, kSamplesPerChannel, + kSampleRateHz, AudioFrame::kPLC, AudioFrame::kVadActive, + kNumChannelsMono); + EXPECT_TRUE(frame.muted()); + EXPECT_TRUE(AllSamplesAre(0, frame)); +} + +TEST(AudioFrameTest, UpdateFrameMultiChannel) { + AudioFrame frame; + frame.UpdateFrame(kTimestamp, nullptr /* data */, kSamplesPerChannel, + kSampleRateHz, AudioFrame::kPLC, AudioFrame::kVadActive, + kNumChannelsStereo); + EXPECT_EQ(kSamplesPerChannel, frame.samples_per_channel()); + EXPECT_EQ(kNumChannelsStereo, frame.num_channels()); + EXPECT_EQ(CHANNEL_LAYOUT_STEREO, frame.channel_layout()); + + frame.UpdateFrame(kTimestamp, nullptr /* data */, kSamplesPerChannel, + kSampleRateHz, AudioFrame::kPLC, AudioFrame::kVadActive, + kNumChannels5_1); + EXPECT_EQ(kSamplesPerChannel, frame.samples_per_channel()); + EXPECT_EQ(kNumChannels5_1, frame.num_channels()); + EXPECT_EQ(CHANNEL_LAYOUT_5_1, frame.channel_layout()); +} + +TEST(AudioFrameTest, CopyFrom) { + AudioFrame frame1; + AudioFrame frame2; + + int16_t samples[kNumChannelsMono * kSamplesPerChannel] = {17}; + frame2.UpdateFrame(kTimestamp, samples, kSamplesPerChannel, kSampleRateHz, + AudioFrame::kPLC, AudioFrame::kVadActive, + kNumChannelsMono); + frame1.CopyFrom(frame2); + + EXPECT_EQ(frame2.timestamp_, frame1.timestamp_); + EXPECT_EQ(frame2.samples_per_channel_, frame1.samples_per_channel_); + EXPECT_EQ(frame2.sample_rate_hz_, frame1.sample_rate_hz_); + EXPECT_EQ(frame2.speech_type_, frame1.speech_type_); + EXPECT_EQ(frame2.vad_activity_, frame1.vad_activity_); + EXPECT_EQ(frame2.num_channels_, frame1.num_channels_); + + EXPECT_EQ(frame2.muted(), frame1.muted()); + EXPECT_EQ(0, memcmp(frame2.data(), frame1.data(), sizeof(samples))); + + frame2.UpdateFrame(kTimestamp, nullptr /* data */, kSamplesPerChannel, + kSampleRateHz, AudioFrame::kPLC, AudioFrame::kVadActive, + kNumChannelsMono); + frame1.CopyFrom(frame2); + + EXPECT_EQ(frame2.muted(), frame1.muted()); + EXPECT_EQ(0, memcmp(frame2.data(), frame1.data(), sizeof(samples))); +} + +TEST(AudioFrameTest, SwapFrames) { + AudioFrame frame1, frame2; + int16_t samples1[kNumChannelsMono * kSamplesPerChannel]; + for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) { + samples1[i] = i; + } + frame1.UpdateFrame(kTimestamp, samples1, kSamplesPerChannel, kSampleRateHz, + AudioFrame::kPLC, AudioFrame::kVadActive, + kNumChannelsMono); + frame1.set_absolute_capture_timestamp_ms(12345678); + const auto frame1_channel_layout = frame1.channel_layout(); + + int16_t samples2[(kNumChannelsMono + 1) * (kSamplesPerChannel + 1)]; + for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1); + ++i) { + samples2[i] = 1000 + i; + } + frame2.UpdateFrame(kTimestamp + 1, samples2, kSamplesPerChannel + 1, + kSampleRateHz + 1, AudioFrame::kNormalSpeech, + AudioFrame::kVadPassive, kNumChannelsMono + 1); + const auto frame2_channel_layout = frame2.channel_layout(); + + swap(frame1, frame2); + + EXPECT_EQ(kTimestamp + 1, frame1.timestamp_); + ASSERT_EQ(kSamplesPerChannel + 1, frame1.samples_per_channel_); + EXPECT_EQ(kSampleRateHz + 1, frame1.sample_rate_hz_); + EXPECT_EQ(AudioFrame::kNormalSpeech, frame1.speech_type_); + EXPECT_EQ(AudioFrame::kVadPassive, frame1.vad_activity_); + ASSERT_EQ(kNumChannelsMono + 1, frame1.num_channels_); + for (size_t i = 0; i < (kNumChannelsMono + 1) * (kSamplesPerChannel + 1); + ++i) { + EXPECT_EQ(samples2[i], frame1.data()[i]); + } + EXPECT_FALSE(frame1.absolute_capture_timestamp_ms()); + EXPECT_EQ(frame2_channel_layout, frame1.channel_layout()); + + EXPECT_EQ(kTimestamp, frame2.timestamp_); + ASSERT_EQ(kSamplesPerChannel, frame2.samples_per_channel_); + EXPECT_EQ(kSampleRateHz, frame2.sample_rate_hz_); + EXPECT_EQ(AudioFrame::kPLC, frame2.speech_type_); + EXPECT_EQ(AudioFrame::kVadActive, frame2.vad_activity_); + ASSERT_EQ(kNumChannelsMono, frame2.num_channels_); + for (size_t i = 0; i < kNumChannelsMono * kSamplesPerChannel; ++i) { + EXPECT_EQ(samples1[i], frame2.data()[i]); + } + EXPECT_EQ(12345678, frame2.absolute_capture_timestamp_ms()); + EXPECT_EQ(frame1_channel_layout, frame2.channel_layout()); +} + +} // namespace webrtc diff --git a/api/audio/test/echo_canceller3_config_json_unittest.cc b/api/audio/test/echo_canceller3_config_json_unittest.cc new file mode 100644 index 0000000..a149c17 --- /dev/null +++ b/api/audio/test/echo_canceller3_config_json_unittest.cc @@ -0,0 +1,70 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio/echo_canceller3_config_json.h" + +#include "api/audio/echo_canceller3_config.h" +#include "test/gtest.h" + +namespace webrtc { + +TEST(EchoCanceller3JsonHelpers, ToStringAndParseJson) { + EchoCanceller3Config cfg; + cfg.delay.down_sampling_factor = 1u; + cfg.delay.log_warning_on_delay_changes = true; + cfg.filter.refined.error_floor = 2.f; + cfg.filter.coarse_initial.length_blocks = 3u; + cfg.comfort_noise.noise_floor_dbfs = 100.f; + cfg.suppressor.normal_tuning.mask_hf.enr_suppress = .5f; + cfg.suppressor.subband_nearend_detection.nearend_average_blocks = 3; + cfg.suppressor.subband_nearend_detection.subband1 = {1, 3}; + cfg.suppressor.subband_nearend_detection.subband1 = {4, 5}; + cfg.suppressor.subband_nearend_detection.nearend_threshold = 2.f; + cfg.suppressor.subband_nearend_detection.snr_threshold = 100.f; + std::string json_string = Aec3ConfigToJsonString(cfg); + EchoCanceller3Config cfg_transformed = Aec3ConfigFromJsonString(json_string); + + // Expect unchanged values to remain default. + EXPECT_EQ(cfg.ep_strength.default_len, + cfg_transformed.ep_strength.default_len); + EXPECT_EQ(cfg.suppressor.normal_tuning.mask_lf.enr_suppress, + cfg_transformed.suppressor.normal_tuning.mask_lf.enr_suppress); + + // Expect changed values to carry through the transformation. + EXPECT_EQ(cfg.delay.down_sampling_factor, + cfg_transformed.delay.down_sampling_factor); + EXPECT_EQ(cfg.delay.log_warning_on_delay_changes, + cfg_transformed.delay.log_warning_on_delay_changes); + EXPECT_EQ(cfg.filter.coarse_initial.length_blocks, + cfg_transformed.filter.coarse_initial.length_blocks); + EXPECT_EQ(cfg.filter.refined.error_floor, + cfg_transformed.filter.refined.error_floor); + EXPECT_EQ(cfg.comfort_noise.noise_floor_dbfs, + cfg_transformed.comfort_noise.noise_floor_dbfs); + EXPECT_EQ(cfg.suppressor.normal_tuning.mask_hf.enr_suppress, + cfg_transformed.suppressor.normal_tuning.mask_hf.enr_suppress); + EXPECT_EQ(cfg.suppressor.subband_nearend_detection.nearend_average_blocks, + cfg_transformed.suppressor.subband_nearend_detection + .nearend_average_blocks); + EXPECT_EQ(cfg.suppressor.subband_nearend_detection.subband1.low, + cfg_transformed.suppressor.subband_nearend_detection.subband1.low); + EXPECT_EQ(cfg.suppressor.subband_nearend_detection.subband1.high, + cfg_transformed.suppressor.subband_nearend_detection.subband1.high); + EXPECT_EQ(cfg.suppressor.subband_nearend_detection.subband2.low, + cfg_transformed.suppressor.subband_nearend_detection.subband2.low); + EXPECT_EQ(cfg.suppressor.subband_nearend_detection.subband2.high, + cfg_transformed.suppressor.subband_nearend_detection.subband2.high); + EXPECT_EQ( + cfg.suppressor.subband_nearend_detection.nearend_threshold, + cfg_transformed.suppressor.subband_nearend_detection.nearend_threshold); + EXPECT_EQ(cfg.suppressor.subband_nearend_detection.snr_threshold, + cfg_transformed.suppressor.subband_nearend_detection.snr_threshold); +} +} // namespace webrtc diff --git a/api/audio/test/echo_canceller3_config_unittest.cc b/api/audio/test/echo_canceller3_config_unittest.cc new file mode 100644 index 0000000..91312a0 --- /dev/null +++ b/api/audio/test/echo_canceller3_config_unittest.cc @@ -0,0 +1,46 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio/echo_canceller3_config.h" + +#include "api/audio/echo_canceller3_config_json.h" +#include "test/gtest.h" + +namespace webrtc { + +TEST(EchoCanceller3Config, ValidConfigIsNotModified) { + EchoCanceller3Config config; + EXPECT_TRUE(EchoCanceller3Config::Validate(&config)); + EchoCanceller3Config default_config; + EXPECT_EQ(Aec3ConfigToJsonString(config), + Aec3ConfigToJsonString(default_config)); +} + +TEST(EchoCanceller3Config, InvalidConfigIsCorrected) { + // Change a parameter and validate. + EchoCanceller3Config config; + config.echo_model.min_noise_floor_power = -1600000.f; + EXPECT_FALSE(EchoCanceller3Config::Validate(&config)); + EXPECT_GE(config.echo_model.min_noise_floor_power, 0.f); + // Verify remaining parameters are unchanged. + EchoCanceller3Config default_config; + config.echo_model.min_noise_floor_power = + default_config.echo_model.min_noise_floor_power; + EXPECT_EQ(Aec3ConfigToJsonString(config), + Aec3ConfigToJsonString(default_config)); +} + +TEST(EchoCanceller3Config, ValidatedConfigsAreValid) { + EchoCanceller3Config config; + config.delay.down_sampling_factor = 983; + EXPECT_FALSE(EchoCanceller3Config::Validate(&config)); + EXPECT_TRUE(EchoCanceller3Config::Validate(&config)); +} +} // namespace webrtc diff --git a/api/audio_codecs/BUILD.gn b/api/audio_codecs/BUILD.gn new file mode 100644 index 0000000..b6292de --- /dev/null +++ b/api/audio_codecs/BUILD.gn @@ -0,0 +1,146 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +rtc_library("audio_codecs_api") { + visibility = [ "*" ] + sources = [ + "audio_codec_pair_id.cc", + "audio_codec_pair_id.h", + "audio_decoder.cc", + "audio_decoder.h", + "audio_decoder_factory.h", + "audio_decoder_factory_template.h", + "audio_encoder.cc", + "audio_encoder.h", + "audio_encoder_factory.h", + "audio_encoder_factory_template.h", + "audio_format.cc", + "audio_format.h", + ] + deps = [ + "..:array_view", + "..:bitrate_allocation", + "..:scoped_refptr", + "../../rtc_base:checks", + "../../rtc_base:deprecation", + "../../rtc_base:rtc_base_approved", + "../../rtc_base:sanitizer", + "../../rtc_base/system:rtc_export", + "../units:time_delta", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("builtin_audio_decoder_factory") { + visibility = [ "*" ] + allow_poison = [ "audio_codecs" ] + sources = [ + "builtin_audio_decoder_factory.cc", + "builtin_audio_decoder_factory.h", + ] + deps = [ + ":audio_codecs_api", + "..:scoped_refptr", + "../../rtc_base:rtc_base_approved", + "L16:audio_decoder_L16", + "g711:audio_decoder_g711", + "g722:audio_decoder_g722", + "isac:audio_decoder_isac", + ] + defines = [] + if (rtc_include_ilbc) { + deps += [ "ilbc:audio_decoder_ilbc" ] + defines += [ "WEBRTC_USE_BUILTIN_ILBC=1" ] + } else { + defines += [ "WEBRTC_USE_BUILTIN_ILBC=0" ] + } + if (rtc_include_opus) { + deps += [ + "opus:audio_decoder_multiopus", + "opus:audio_decoder_opus", + ] + defines += [ "WEBRTC_USE_BUILTIN_OPUS=1" ] + } else { + defines += [ "WEBRTC_USE_BUILTIN_OPUS=0" ] + } +} + +rtc_library("builtin_audio_encoder_factory") { + visibility = [ "*" ] + allow_poison = [ "audio_codecs" ] + sources = [ + "builtin_audio_encoder_factory.cc", + "builtin_audio_encoder_factory.h", + ] + deps = [ + ":audio_codecs_api", + "..:scoped_refptr", + "../../rtc_base:rtc_base_approved", + "L16:audio_encoder_L16", + "g711:audio_encoder_g711", + "g722:audio_encoder_g722", + "isac:audio_encoder_isac", + ] + defines = [] + if (rtc_include_ilbc) { + deps += [ "ilbc:audio_encoder_ilbc" ] + defines += [ "WEBRTC_USE_BUILTIN_ILBC=1" ] + } else { + defines += [ "WEBRTC_USE_BUILTIN_ILBC=0" ] + } + if (rtc_include_opus) { + deps += [ + "opus:audio_encoder_multiopus", + "opus:audio_encoder_opus", + ] + defines += [ "WEBRTC_USE_BUILTIN_OPUS=1" ] + } else { + defines += [ "WEBRTC_USE_BUILTIN_OPUS=0" ] + } +} + +rtc_library("opus_audio_decoder_factory") { + visibility = [ "*" ] + allow_poison = [ "audio_codecs" ] + sources = [ + "opus_audio_decoder_factory.cc", + "opus_audio_decoder_factory.h", + ] + deps = [ + ":audio_codecs_api", + "..:scoped_refptr", + "../../rtc_base:rtc_base_approved", + "opus:audio_decoder_multiopus", + "opus:audio_decoder_opus", + ] +} + +rtc_library("opus_audio_encoder_factory") { + visibility = [ "*" ] + allow_poison = [ "audio_codecs" ] + sources = [ + "opus_audio_encoder_factory.cc", + "opus_audio_encoder_factory.h", + ] + deps = [ + ":audio_codecs_api", + "..:scoped_refptr", + "../../rtc_base:rtc_base_approved", + "opus:audio_encoder_multiopus", + "opus:audio_encoder_opus", + ] +} diff --git a/api/audio_codecs/L16/BUILD.gn b/api/audio_codecs/L16/BUILD.gn new file mode 100644 index 0000000..1f7a1e5 --- /dev/null +++ b/api/audio_codecs/L16/BUILD.gn @@ -0,0 +1,52 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +rtc_library("audio_encoder_L16") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_encoder_L16.cc", + "audio_encoder_L16.h", + ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:pcm16b", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base:safe_minmax", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("audio_decoder_L16") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_decoder_L16.cc", + "audio_decoder_L16.h", + ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:pcm16b", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} diff --git a/api/audio_codecs/L16/audio_decoder_L16.cc b/api/audio_codecs/L16/audio_decoder_L16.cc new file mode 100644 index 0000000..57c9e76 --- /dev/null +++ b/api/audio_codecs/L16/audio_decoder_L16.cc @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/L16/audio_decoder_L16.h" + +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h" +#include "modules/audio_coding/codecs/pcm16b/pcm16b_common.h" +#include "rtc_base/numerics/safe_conversions.h" + +namespace webrtc { + +absl::optional AudioDecoderL16::SdpToConfig( + const SdpAudioFormat& format) { + Config config; + config.sample_rate_hz = format.clockrate_hz; + config.num_channels = rtc::checked_cast(format.num_channels); + return absl::EqualsIgnoreCase(format.name, "L16") && config.IsOk() + ? absl::optional(config) + : absl::nullopt; +} + +void AudioDecoderL16::AppendSupportedDecoders( + std::vector* specs) { + Pcm16BAppendSupportedCodecSpecs(specs); +} + +std::unique_ptr AudioDecoderL16::MakeAudioDecoder( + const Config& config, + absl::optional /*codec_pair_id*/) { + return config.IsOk() ? std::make_unique( + config.sample_rate_hz, config.num_channels) + : nullptr; +} + +} // namespace webrtc diff --git a/api/audio_codecs/L16/audio_decoder_L16.h b/api/audio_codecs/L16/audio_decoder_L16.h new file mode 100644 index 0000000..f0be036 --- /dev/null +++ b/api/audio_codecs/L16/audio_decoder_L16.h @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_L16_AUDIO_DECODER_L16_H_ +#define API_AUDIO_CODECS_L16_AUDIO_DECODER_L16_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// L16 decoder API for use as a template parameter to +// CreateAudioDecoderFactory<...>(). +struct RTC_EXPORT AudioDecoderL16 { + struct Config { + bool IsOk() const { + return (sample_rate_hz == 8000 || sample_rate_hz == 16000 || + sample_rate_hz == 32000 || sample_rate_hz == 48000) && + num_channels >= 1; + } + int sample_rate_hz = 8000; + int num_channels = 1; + }; + static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + const Config& config, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_L16_AUDIO_DECODER_L16_H_ diff --git a/api/audio_codecs/L16/audio_encoder_L16.cc b/api/audio_codecs/L16/audio_encoder_L16.cc new file mode 100644 index 0000000..507c8d7 --- /dev/null +++ b/api/audio_codecs/L16/audio_encoder_L16.cc @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/L16/audio_encoder_L16.h" + +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h" +#include "modules/audio_coding/codecs/pcm16b/pcm16b_common.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/string_to_number.h" + +namespace webrtc { + +absl::optional AudioEncoderL16::SdpToConfig( + const SdpAudioFormat& format) { + if (!rtc::IsValueInRangeForNumericType(format.num_channels)) { + return absl::nullopt; + } + Config config; + config.sample_rate_hz = format.clockrate_hz; + config.num_channels = rtc::dchecked_cast(format.num_channels); + auto ptime_iter = format.parameters.find("ptime"); + if (ptime_iter != format.parameters.end()) { + const auto ptime = rtc::StringToNumber(ptime_iter->second); + if (ptime && *ptime > 0) { + config.frame_size_ms = rtc::SafeClamp(10 * (*ptime / 10), 10, 60); + } + } + return absl::EqualsIgnoreCase(format.name, "L16") && config.IsOk() + ? absl::optional(config) + : absl::nullopt; +} + +void AudioEncoderL16::AppendSupportedEncoders( + std::vector* specs) { + Pcm16BAppendSupportedCodecSpecs(specs); +} + +AudioCodecInfo AudioEncoderL16::QueryAudioEncoder( + const AudioEncoderL16::Config& config) { + RTC_DCHECK(config.IsOk()); + return {config.sample_rate_hz, + rtc::dchecked_cast(config.num_channels), + config.sample_rate_hz * config.num_channels * 16}; +} + +std::unique_ptr AudioEncoderL16::MakeAudioEncoder( + const AudioEncoderL16::Config& config, + int payload_type, + absl::optional /*codec_pair_id*/) { + RTC_DCHECK(config.IsOk()); + AudioEncoderPcm16B::Config c; + c.sample_rate_hz = config.sample_rate_hz; + c.num_channels = config.num_channels; + c.frame_size_ms = config.frame_size_ms; + c.payload_type = payload_type; + return std::make_unique(c); +} + +} // namespace webrtc diff --git a/api/audio_codecs/L16/audio_encoder_L16.h b/api/audio_codecs/L16/audio_encoder_L16.h new file mode 100644 index 0000000..b410286 --- /dev/null +++ b/api/audio_codecs/L16/audio_encoder_L16.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_L16_AUDIO_ENCODER_L16_H_ +#define API_AUDIO_CODECS_L16_AUDIO_ENCODER_L16_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// L16 encoder API for use as a template parameter to +// CreateAudioEncoderFactory<...>(). +struct RTC_EXPORT AudioEncoderL16 { + struct Config { + bool IsOk() const { + return (sample_rate_hz == 8000 || sample_rate_hz == 16000 || + sample_rate_hz == 32000 || sample_rate_hz == 48000) && + num_channels >= 1 && frame_size_ms > 0 && frame_size_ms <= 120 && + frame_size_ms % 10 == 0; + } + int sample_rate_hz = 8000; + int num_channels = 1; + int frame_size_ms = 10; + }; + static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const Config& config); + static std::unique_ptr MakeAudioEncoder( + const Config& config, + int payload_type, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_L16_AUDIO_ENCODER_L16_H_ diff --git a/api/audio_codecs/OWNERS b/api/audio_codecs/OWNERS new file mode 100644 index 0000000..fe41785 --- /dev/null +++ b/api/audio_codecs/OWNERS @@ -0,0 +1 @@ +kwiberg@webrtc.org diff --git a/api/audio_codecs/audio_codec_pair_id.cc b/api/audio_codecs/audio_codec_pair_id.cc new file mode 100644 index 0000000..6cb51ed --- /dev/null +++ b/api/audio_codecs/audio_codec_pair_id.cc @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/audio_codec_pair_id.h" + +#include +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +namespace { + +// Returns a new value that it has never returned before. You may call it at +// most 2^63 times in the lifetime of the program. Note: The returned values +// may be easily predictable. +uint64_t GetNextId() { + static std::atomic next_id(0); + + // Atomically increment `next_id`, and return the previous value. Relaxed + // memory order is sufficient, since all we care about is that different + // callers return different values. + const uint64_t new_id = next_id.fetch_add(1, std::memory_order_relaxed); + + // This check isn't atomic with the increment, so if we start 2^63 + 1 + // invocations of GetNextId() in parallel, the last one to do the atomic + // increment could return the ID 0 before any of the others had time to + // trigger this DCHECK. We blithely assume that this won't happen. + RTC_DCHECK_LT(new_id, uint64_t{1} << 63) << "Used up all ID values"; + + return new_id; +} + +// Make an integer ID more unpredictable. This is a 1:1 mapping, so you can +// feed it any value, but the idea is that you can feed it a sequence such as +// 0, 1, 2, ... and get a new sequence that isn't as trivially predictable, so +// that users won't rely on it being consecutive or increasing or anything like +// that. +constexpr uint64_t ObfuscateId(uint64_t id) { + // Any nonzero coefficient that's relatively prime to 2^64 (that is, any odd + // number) and any constant will give a 1:1 mapping. These high-entropy + // values will prevent the sequence from being trivially predictable. + // + // Both the multiplication and the addition going to overflow almost always, + // but that's fine---we *want* arithmetic mod 2^64. + return uint64_t{0x85fdb20e1294309a} + uint64_t{0xc516ef5c37462469} * id; +} + +// The first ten values. Verified against the Python function +// +// def f(n): +// return (0x85fdb20e1294309a + 0xc516ef5c37462469 * n) % 2**64 +// +// Callers should obviously not depend on these exact values... +// +// (On Visual C++, we have to disable warning C4307 (integral constant +// overflow), even though unsigned integers have perfectly well-defined +// overflow behavior.) +#ifdef _MSC_VER +#pragma warning(push) +#pragma warning(disable : 4307) +#endif +static_assert(ObfuscateId(0) == uint64_t{0x85fdb20e1294309a}, ""); +static_assert(ObfuscateId(1) == uint64_t{0x4b14a16a49da5503}, ""); +static_assert(ObfuscateId(2) == uint64_t{0x102b90c68120796c}, ""); +static_assert(ObfuscateId(3) == uint64_t{0xd5428022b8669dd5}, ""); +static_assert(ObfuscateId(4) == uint64_t{0x9a596f7eefacc23e}, ""); +static_assert(ObfuscateId(5) == uint64_t{0x5f705edb26f2e6a7}, ""); +static_assert(ObfuscateId(6) == uint64_t{0x24874e375e390b10}, ""); +static_assert(ObfuscateId(7) == uint64_t{0xe99e3d93957f2f79}, ""); +static_assert(ObfuscateId(8) == uint64_t{0xaeb52cefccc553e2}, ""); +static_assert(ObfuscateId(9) == uint64_t{0x73cc1c4c040b784b}, ""); +#ifdef _MSC_VER +#pragma warning(pop) +#endif + +} // namespace + +AudioCodecPairId AudioCodecPairId::Create() { + return AudioCodecPairId(ObfuscateId(GetNextId())); +} + +} // namespace webrtc diff --git a/api/audio_codecs/audio_codec_pair_id.h b/api/audio_codecs/audio_codec_pair_id.h new file mode 100644 index 0000000..b10f14e --- /dev/null +++ b/api/audio_codecs/audio_codec_pair_id.h @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_CODEC_PAIR_ID_H_ +#define API_AUDIO_CODECS_AUDIO_CODEC_PAIR_ID_H_ + +#include + +#include + +namespace webrtc { + +class AudioCodecPairId final { + public: + // Copyable, but not default constructible. + AudioCodecPairId() = delete; + AudioCodecPairId(const AudioCodecPairId&) = default; + AudioCodecPairId(AudioCodecPairId&&) = default; + AudioCodecPairId& operator=(const AudioCodecPairId&) = default; + AudioCodecPairId& operator=(AudioCodecPairId&&) = default; + + friend void swap(AudioCodecPairId& a, AudioCodecPairId& b) { + using std::swap; + swap(a.id_, b.id_); + } + + // Creates a new ID, unequal to any previously created ID. + static AudioCodecPairId Create(); + + // IDs can be tested for equality. + friend bool operator==(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ == b.id_; + } + friend bool operator!=(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ != b.id_; + } + + // Comparisons. The ordering of ID values is completely arbitrary, but + // stable, so it's useful e.g. if you want to use IDs as keys in an ordered + // map. + friend bool operator<(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ < b.id_; + } + friend bool operator<=(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ <= b.id_; + } + friend bool operator>=(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ >= b.id_; + } + friend bool operator>(AudioCodecPairId a, AudioCodecPairId b) { + return a.id_ > b.id_; + } + + // Returns a numeric representation of the ID. The numeric values are + // completely arbitrary, but stable, collision-free, and reasonably evenly + // distributed, so they are e.g. useful as hash values in unordered maps. + uint64_t NumericRepresentation() const { return id_; } + + private: + explicit AudioCodecPairId(uint64_t id) : id_(id) {} + + uint64_t id_; +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_CODEC_PAIR_ID_H_ diff --git a/api/audio_codecs/audio_decoder.cc b/api/audio_codecs/audio_decoder.cc new file mode 100644 index 0000000..97cda27 --- /dev/null +++ b/api/audio_codecs/audio_decoder.cc @@ -0,0 +1,170 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/audio_decoder.h" + +#include + +#include +#include + +#include "api/array_view.h" +#include "rtc_base/checks.h" +#include "rtc_base/sanitizer.h" +#include "rtc_base/trace_event.h" + +namespace webrtc { + +namespace { + +class OldStyleEncodedFrame final : public AudioDecoder::EncodedAudioFrame { + public: + OldStyleEncodedFrame(AudioDecoder* decoder, rtc::Buffer&& payload) + : decoder_(decoder), payload_(std::move(payload)) {} + + size_t Duration() const override { + const int ret = decoder_->PacketDuration(payload_.data(), payload_.size()); + return ret < 0 ? 0 : static_cast(ret); + } + + absl::optional Decode( + rtc::ArrayView decoded) const override { + auto speech_type = AudioDecoder::kSpeech; + const int ret = decoder_->Decode( + payload_.data(), payload_.size(), decoder_->SampleRateHz(), + decoded.size() * sizeof(int16_t), decoded.data(), &speech_type); + return ret < 0 ? absl::nullopt + : absl::optional( + {static_cast(ret), speech_type}); + } + + private: + AudioDecoder* const decoder_; + const rtc::Buffer payload_; +}; + +} // namespace + +bool AudioDecoder::EncodedAudioFrame::IsDtxPacket() const { + return false; +} + +AudioDecoder::ParseResult::ParseResult() = default; +AudioDecoder::ParseResult::ParseResult(ParseResult&& b) = default; +AudioDecoder::ParseResult::ParseResult(uint32_t timestamp, + int priority, + std::unique_ptr frame) + : timestamp(timestamp), priority(priority), frame(std::move(frame)) { + RTC_DCHECK_GE(priority, 0); +} + +AudioDecoder::ParseResult::~ParseResult() = default; + +AudioDecoder::ParseResult& AudioDecoder::ParseResult::operator=( + ParseResult&& b) = default; + +std::vector AudioDecoder::ParsePayload( + rtc::Buffer&& payload, + uint32_t timestamp) { + std::vector results; + std::unique_ptr frame( + new OldStyleEncodedFrame(this, std::move(payload))); + results.emplace_back(timestamp, 0, std::move(frame)); + return results; +} + +int AudioDecoder::Decode(const uint8_t* encoded, + size_t encoded_len, + int sample_rate_hz, + size_t max_decoded_bytes, + int16_t* decoded, + SpeechType* speech_type) { + TRACE_EVENT0("webrtc", "AudioDecoder::Decode"); + rtc::MsanCheckInitialized(rtc::MakeArrayView(encoded, encoded_len)); + int duration = PacketDuration(encoded, encoded_len); + if (duration >= 0 && + duration * Channels() * sizeof(int16_t) > max_decoded_bytes) { + return -1; + } + return DecodeInternal(encoded, encoded_len, sample_rate_hz, decoded, + speech_type); +} + +int AudioDecoder::DecodeRedundant(const uint8_t* encoded, + size_t encoded_len, + int sample_rate_hz, + size_t max_decoded_bytes, + int16_t* decoded, + SpeechType* speech_type) { + TRACE_EVENT0("webrtc", "AudioDecoder::DecodeRedundant"); + rtc::MsanCheckInitialized(rtc::MakeArrayView(encoded, encoded_len)); + int duration = PacketDurationRedundant(encoded, encoded_len); + if (duration >= 0 && + duration * Channels() * sizeof(int16_t) > max_decoded_bytes) { + return -1; + } + return DecodeRedundantInternal(encoded, encoded_len, sample_rate_hz, decoded, + speech_type); +} + +int AudioDecoder::DecodeRedundantInternal(const uint8_t* encoded, + size_t encoded_len, + int sample_rate_hz, + int16_t* decoded, + SpeechType* speech_type) { + return DecodeInternal(encoded, encoded_len, sample_rate_hz, decoded, + speech_type); +} + +bool AudioDecoder::HasDecodePlc() const { + return false; +} + +size_t AudioDecoder::DecodePlc(size_t num_frames, int16_t* decoded) { + return 0; +} + +// TODO(bugs.webrtc.org/9676): Remove default implementation. +void AudioDecoder::GeneratePlc(size_t /*requested_samples_per_channel*/, + rtc::BufferT* /*concealment_audio*/) {} + +int AudioDecoder::ErrorCode() { + return 0; +} + +int AudioDecoder::PacketDuration(const uint8_t* encoded, + size_t encoded_len) const { + return kNotImplemented; +} + +int AudioDecoder::PacketDurationRedundant(const uint8_t* encoded, + size_t encoded_len) const { + return kNotImplemented; +} + +bool AudioDecoder::PacketHasFec(const uint8_t* encoded, + size_t encoded_len) const { + return false; +} + +AudioDecoder::SpeechType AudioDecoder::ConvertSpeechType(int16_t type) { + switch (type) { + case 0: // TODO(hlundin): Both iSAC and Opus return 0 for speech. + case 1: + return kSpeech; + case 2: + return kComfortNoise; + default: + assert(false); + return kSpeech; + } +} + +} // namespace webrtc diff --git a/api/audio_codecs/audio_decoder.h b/api/audio_codecs/audio_decoder.h new file mode 100644 index 0000000..557ffe2 --- /dev/null +++ b/api/audio_codecs/audio_decoder.h @@ -0,0 +1,193 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_DECODER_H_ +#define API_AUDIO_CODECS_AUDIO_DECODER_H_ + +#include +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" + +namespace webrtc { + +class AudioDecoder { + public: + enum SpeechType { + kSpeech = 1, + kComfortNoise = 2, + }; + + // Used by PacketDuration below. Save the value -1 for errors. + enum { kNotImplemented = -2 }; + + AudioDecoder() = default; + virtual ~AudioDecoder() = default; + + class EncodedAudioFrame { + public: + struct DecodeResult { + size_t num_decoded_samples; + SpeechType speech_type; + }; + + virtual ~EncodedAudioFrame() = default; + + // Returns the duration in samples-per-channel of this audio frame. + // If no duration can be ascertained, returns zero. + virtual size_t Duration() const = 0; + + // Returns true if this packet contains DTX. + virtual bool IsDtxPacket() const; + + // Decodes this frame of audio and writes the result in |decoded|. + // |decoded| must be large enough to store as many samples as indicated by a + // call to Duration() . On success, returns an absl::optional containing the + // total number of samples across all channels, as well as whether the + // decoder produced comfort noise or speech. On failure, returns an empty + // absl::optional. Decode may be called at most once per frame object. + virtual absl::optional Decode( + rtc::ArrayView decoded) const = 0; + }; + + struct ParseResult { + ParseResult(); + ParseResult(uint32_t timestamp, + int priority, + std::unique_ptr frame); + ParseResult(ParseResult&& b); + ~ParseResult(); + + ParseResult& operator=(ParseResult&& b); + + // The timestamp of the frame is in samples per channel. + uint32_t timestamp; + // The relative priority of the frame compared to other frames of the same + // payload and the same timeframe. A higher value means a lower priority. + // The highest priority is zero - negative values are not allowed. + int priority; + std::unique_ptr frame; + }; + + // Let the decoder parse this payload and prepare zero or more decodable + // frames. Each frame must be between 10 ms and 120 ms long. The caller must + // ensure that the AudioDecoder object outlives any frame objects returned by + // this call. The decoder is free to swap or move the data from the |payload| + // buffer. |timestamp| is the input timestamp, in samples, corresponding to + // the start of the payload. + virtual std::vector ParsePayload(rtc::Buffer&& payload, + uint32_t timestamp); + + // TODO(bugs.webrtc.org/10098): The Decode and DecodeRedundant methods are + // obsolete; callers should call ParsePayload instead. For now, subclasses + // must still implement DecodeInternal. + + // Decodes |encode_len| bytes from |encoded| and writes the result in + // |decoded|. The maximum bytes allowed to be written into |decoded| is + // |max_decoded_bytes|. Returns the total number of samples across all + // channels. If the decoder produced comfort noise, |speech_type| + // is set to kComfortNoise, otherwise it is kSpeech. The desired output + // sample rate is provided in |sample_rate_hz|, which must be valid for the + // codec at hand. + int Decode(const uint8_t* encoded, + size_t encoded_len, + int sample_rate_hz, + size_t max_decoded_bytes, + int16_t* decoded, + SpeechType* speech_type); + + // Same as Decode(), but interfaces to the decoders redundant decode function. + // The default implementation simply calls the regular Decode() method. + int DecodeRedundant(const uint8_t* encoded, + size_t encoded_len, + int sample_rate_hz, + size_t max_decoded_bytes, + int16_t* decoded, + SpeechType* speech_type); + + // Indicates if the decoder implements the DecodePlc method. + virtual bool HasDecodePlc() const; + + // Calls the packet-loss concealment of the decoder to update the state after + // one or several lost packets. The caller has to make sure that the + // memory allocated in |decoded| should accommodate |num_frames| frames. + virtual size_t DecodePlc(size_t num_frames, int16_t* decoded); + + // Asks the decoder to generate packet-loss concealment and append it to the + // end of |concealment_audio|. The concealment audio should be in + // channel-interleaved format, with as many channels as the last decoded + // packet produced. The implementation must produce at least + // requested_samples_per_channel, or nothing at all. This is a signal to the + // caller to conceal the loss with other means. If the implementation provides + // concealment samples, it is also responsible for "stitching" it together + // with the decoded audio on either side of the concealment. + // Note: The default implementation of GeneratePlc will be deleted soon. All + // implementations must provide their own, which can be a simple as a no-op. + // TODO(bugs.webrtc.org/9676): Remove default impementation. + virtual void GeneratePlc(size_t requested_samples_per_channel, + rtc::BufferT* concealment_audio); + + // Resets the decoder state (empty buffers etc.). + virtual void Reset() = 0; + + // Returns the last error code from the decoder. + virtual int ErrorCode(); + + // Returns the duration in samples-per-channel of the payload in |encoded| + // which is |encoded_len| bytes long. Returns kNotImplemented if no duration + // estimate is available, or -1 in case of an error. + virtual int PacketDuration(const uint8_t* encoded, size_t encoded_len) const; + + // Returns the duration in samples-per-channel of the redandant payload in + // |encoded| which is |encoded_len| bytes long. Returns kNotImplemented if no + // duration estimate is available, or -1 in case of an error. + virtual int PacketDurationRedundant(const uint8_t* encoded, + size_t encoded_len) const; + + // Detects whether a packet has forward error correction. The packet is + // comprised of the samples in |encoded| which is |encoded_len| bytes long. + // Returns true if the packet has FEC and false otherwise. + virtual bool PacketHasFec(const uint8_t* encoded, size_t encoded_len) const; + + // Returns the actual sample rate of the decoder's output. This value may not + // change during the lifetime of the decoder. + virtual int SampleRateHz() const = 0; + + // The number of channels in the decoder's output. This value may not change + // during the lifetime of the decoder. + virtual size_t Channels() const = 0; + + protected: + static SpeechType ConvertSpeechType(int16_t type); + + virtual int DecodeInternal(const uint8_t* encoded, + size_t encoded_len, + int sample_rate_hz, + int16_t* decoded, + SpeechType* speech_type) = 0; + + virtual int DecodeRedundantInternal(const uint8_t* encoded, + size_t encoded_len, + int sample_rate_hz, + int16_t* decoded, + SpeechType* speech_type); + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoder); +}; + +} // namespace webrtc +#endif // API_AUDIO_CODECS_AUDIO_DECODER_H_ diff --git a/api/audio_codecs/audio_decoder_factory.h b/api/audio_codecs/audio_decoder_factory.h new file mode 100644 index 0000000..c36a0e1 --- /dev/null +++ b/api/audio_codecs/audio_decoder_factory.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_H_ +#define API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// A factory that creates AudioDecoders. +class AudioDecoderFactory : public rtc::RefCountInterface { + public: + virtual std::vector GetSupportedDecoders() = 0; + + virtual bool IsSupportedDecoder(const SdpAudioFormat& format) = 0; + + // Create a new decoder instance. The `codec_pair_id` argument is used to link + // encoders and decoders that talk to the same remote entity: if a + // AudioEncoderFactory::MakeAudioEncoder() and a + // AudioDecoderFactory::MakeAudioDecoder() call receive non-null IDs that + // compare equal, the factory implementations may assume that the encoder and + // decoder form a pair. (The intended use case for this is to set up + // communication between the AudioEncoder and AudioDecoder instances, which is + // needed for some codecs with built-in bandwidth adaptation.) + // + // Note: Implementations need to be robust against combinations other than + // one encoder, one decoder getting the same ID; such decoders must still + // work. + virtual std::unique_ptr MakeAudioDecoder( + const SdpAudioFormat& format, + absl::optional codec_pair_id) = 0; +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_H_ diff --git a/api/audio_codecs/audio_decoder_factory_template.h b/api/audio_codecs/audio_decoder_factory_template.h new file mode 100644 index 0000000..e628cb6 --- /dev/null +++ b/api/audio_codecs/audio_decoder_factory_template.h @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_TEMPLATE_H_ +#define API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_TEMPLATE_H_ + +#include +#include + +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +namespace audio_decoder_factory_template_impl { + +template +struct Helper; + +// Base case: 0 template parameters. +template <> +struct Helper<> { + static void AppendSupportedDecoders(std::vector* specs) {} + static bool IsSupportedDecoder(const SdpAudioFormat& format) { return false; } + static std::unique_ptr MakeAudioDecoder( + const SdpAudioFormat& format, + absl::optional codec_pair_id) { + return nullptr; + } +}; + +// Inductive case: Called with n + 1 template parameters; calls subroutines +// with n template parameters. +template +struct Helper { + static void AppendSupportedDecoders(std::vector* specs) { + T::AppendSupportedDecoders(specs); + Helper::AppendSupportedDecoders(specs); + } + static bool IsSupportedDecoder(const SdpAudioFormat& format) { + auto opt_config = T::SdpToConfig(format); + static_assert(std::is_same>::value, + "T::SdpToConfig() must return a value of type " + "absl::optional"); + return opt_config ? true : Helper::IsSupportedDecoder(format); + } + static std::unique_ptr MakeAudioDecoder( + const SdpAudioFormat& format, + absl::optional codec_pair_id) { + auto opt_config = T::SdpToConfig(format); + return opt_config ? T::MakeAudioDecoder(*opt_config, codec_pair_id) + : Helper::MakeAudioDecoder(format, codec_pair_id); + } +}; + +template +class AudioDecoderFactoryT : public AudioDecoderFactory { + public: + std::vector GetSupportedDecoders() override { + std::vector specs; + Helper::AppendSupportedDecoders(&specs); + return specs; + } + + bool IsSupportedDecoder(const SdpAudioFormat& format) override { + return Helper::IsSupportedDecoder(format); + } + + std::unique_ptr MakeAudioDecoder( + const SdpAudioFormat& format, + absl::optional codec_pair_id) override { + return Helper::MakeAudioDecoder(format, codec_pair_id); + } +}; + +} // namespace audio_decoder_factory_template_impl + +// Make an AudioDecoderFactory that can create instances of the given decoders. +// +// Each decoder type is given as a template argument to the function; it should +// be a struct with the following static member functions: +// +// // Converts |audio_format| to a ConfigType instance. Returns an empty +// // optional if |audio_format| doesn't correctly specify a decoder of our +// // type. +// absl::optional SdpToConfig(const SdpAudioFormat& audio_format); +// +// // Appends zero or more AudioCodecSpecs to the list that will be returned +// // by AudioDecoderFactory::GetSupportedDecoders(). +// void AppendSupportedDecoders(std::vector* specs); +// +// // Creates an AudioDecoder for the specified format. Used to implement +// // AudioDecoderFactory::MakeAudioDecoder(). +// std::unique_ptr MakeAudioDecoder( +// const ConfigType& config, +// absl::optional codec_pair_id); +// +// ConfigType should be a type that encapsulates all the settings needed to +// create an AudioDecoder. T::Config (where T is the decoder struct) should +// either be the config type, or an alias for it. +// +// Whenever it tries to do something, the new factory will try each of the +// decoder types in the order they were specified in the template argument +// list, stopping at the first one that claims to be able to do the job. +// +// TODO(kwiberg): Point at CreateBuiltinAudioDecoderFactory() for an example of +// how it is used. +template +rtc::scoped_refptr CreateAudioDecoderFactory() { + // There's no technical reason we couldn't allow zero template parameters, + // but such a factory couldn't create any decoders, and callers can do this + // by mistake by simply forgetting the <> altogether. So we forbid it in + // order to prevent caller foot-shooting. + static_assert(sizeof...(Ts) >= 1, + "Caller must give at least one template parameter"); + + return rtc::scoped_refptr( + new rtc::RefCountedObject< + audio_decoder_factory_template_impl::AudioDecoderFactoryT>()); +} + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_DECODER_FACTORY_TEMPLATE_H_ diff --git a/api/audio_codecs/audio_encoder.cc b/api/audio_codecs/audio_encoder.cc new file mode 100644 index 0000000..cd4d200 --- /dev/null +++ b/api/audio_codecs/audio_encoder.cc @@ -0,0 +1,113 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/audio_encoder.h" + +#include "rtc_base/checks.h" +#include "rtc_base/trace_event.h" + +namespace webrtc { + +ANAStats::ANAStats() = default; +ANAStats::~ANAStats() = default; +ANAStats::ANAStats(const ANAStats&) = default; + +AudioEncoder::EncodedInfo::EncodedInfo() = default; +AudioEncoder::EncodedInfo::EncodedInfo(const EncodedInfo&) = default; +AudioEncoder::EncodedInfo::EncodedInfo(EncodedInfo&&) = default; +AudioEncoder::EncodedInfo::~EncodedInfo() = default; +AudioEncoder::EncodedInfo& AudioEncoder::EncodedInfo::operator=( + const EncodedInfo&) = default; +AudioEncoder::EncodedInfo& AudioEncoder::EncodedInfo::operator=(EncodedInfo&&) = + default; + +int AudioEncoder::RtpTimestampRateHz() const { + return SampleRateHz(); +} + +AudioEncoder::EncodedInfo AudioEncoder::Encode( + uint32_t rtp_timestamp, + rtc::ArrayView audio, + rtc::Buffer* encoded) { + TRACE_EVENT0("webrtc", "AudioEncoder::Encode"); + RTC_CHECK_EQ(audio.size(), + static_cast(NumChannels() * SampleRateHz() / 100)); + + const size_t old_size = encoded->size(); + EncodedInfo info = EncodeImpl(rtp_timestamp, audio, encoded); + RTC_CHECK_EQ(encoded->size() - old_size, info.encoded_bytes); + return info; +} + +bool AudioEncoder::SetFec(bool enable) { + return !enable; +} + +bool AudioEncoder::SetDtx(bool enable) { + return !enable; +} + +bool AudioEncoder::GetDtx() const { + return false; +} + +bool AudioEncoder::SetApplication(Application application) { + return false; +} + +void AudioEncoder::SetMaxPlaybackRate(int frequency_hz) {} + +void AudioEncoder::SetTargetBitrate(int target_bps) {} + +rtc::ArrayView> +AudioEncoder::ReclaimContainedEncoders() { + return nullptr; +} + +bool AudioEncoder::EnableAudioNetworkAdaptor(const std::string& config_string, + RtcEventLog* event_log) { + return false; +} + +void AudioEncoder::DisableAudioNetworkAdaptor() {} + +void AudioEncoder::OnReceivedUplinkPacketLossFraction( + float uplink_packet_loss_fraction) {} + +void AudioEncoder::OnReceivedUplinkRecoverablePacketLossFraction( + float uplink_recoverable_packet_loss_fraction) { + RTC_NOTREACHED(); +} + +void AudioEncoder::OnReceivedTargetAudioBitrate(int target_audio_bitrate_bps) { + OnReceivedUplinkBandwidth(target_audio_bitrate_bps, absl::nullopt); +} + +void AudioEncoder::OnReceivedUplinkBandwidth( + int target_audio_bitrate_bps, + absl::optional bwe_period_ms) {} + +void AudioEncoder::OnReceivedUplinkAllocation(BitrateAllocationUpdate update) { + OnReceivedUplinkBandwidth(update.target_bitrate.bps(), + update.bwe_period.ms()); +} + +void AudioEncoder::OnReceivedRtt(int rtt_ms) {} + +void AudioEncoder::OnReceivedOverhead(size_t overhead_bytes_per_packet) {} + +void AudioEncoder::SetReceiverFrameLengthRange(int min_frame_length_ms, + int max_frame_length_ms) {} + +ANAStats AudioEncoder::GetANAStats() const { + return ANAStats(); +} + +} // namespace webrtc diff --git a/api/audio_codecs/audio_encoder.h b/api/audio_codecs/audio_encoder.h new file mode 100644 index 0000000..fd2d948 --- /dev/null +++ b/api/audio_codecs/audio_encoder.h @@ -0,0 +1,257 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_ENCODER_H_ +#define API_AUDIO_CODECS_AUDIO_ENCODER_H_ + +#include +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/call/bitrate_allocation.h" +#include "api/units/time_delta.h" +#include "rtc_base/buffer.h" +#include "rtc_base/deprecation.h" + +namespace webrtc { + +class RtcEventLog; + +// Statistics related to Audio Network Adaptation. +struct ANAStats { + ANAStats(); + ANAStats(const ANAStats&); + ~ANAStats(); + // Number of actions taken by the ANA bitrate controller since the start of + // the call. If this value is not set, it indicates that the bitrate + // controller is disabled. + absl::optional bitrate_action_counter; + // Number of actions taken by the ANA channel controller since the start of + // the call. If this value is not set, it indicates that the channel + // controller is disabled. + absl::optional channel_action_counter; + // Number of actions taken by the ANA DTX controller since the start of the + // call. If this value is not set, it indicates that the DTX controller is + // disabled. + absl::optional dtx_action_counter; + // Number of actions taken by the ANA FEC controller since the start of the + // call. If this value is not set, it indicates that the FEC controller is + // disabled. + absl::optional fec_action_counter; + // Number of times the ANA frame length controller decided to increase the + // frame length since the start of the call. If this value is not set, it + // indicates that the frame length controller is disabled. + absl::optional frame_length_increase_counter; + // Number of times the ANA frame length controller decided to decrease the + // frame length since the start of the call. If this value is not set, it + // indicates that the frame length controller is disabled. + absl::optional frame_length_decrease_counter; + // The uplink packet loss fractions as set by the ANA FEC controller. If this + // value is not set, it indicates that the ANA FEC controller is not active. + absl::optional uplink_packet_loss_fraction; +}; + +// This is the interface class for encoders in AudioCoding module. Each codec +// type must have an implementation of this class. +class AudioEncoder { + public: + // Used for UMA logging of codec usage. The same codecs, with the + // same values, must be listed in + // src/tools/metrics/histograms/histograms.xml in chromium to log + // correct values. + enum class CodecType { + kOther = 0, // Codec not specified, and/or not listed in this enum + kOpus = 1, + kIsac = 2, + kPcmA = 3, + kPcmU = 4, + kG722 = 5, + kIlbc = 6, + + // Number of histogram bins in the UMA logging of codec types. The + // total number of different codecs that are logged cannot exceed this + // number. + kMaxLoggedAudioCodecTypes + }; + + struct EncodedInfoLeaf { + size_t encoded_bytes = 0; + uint32_t encoded_timestamp = 0; + int payload_type = 0; + bool send_even_if_empty = false; + bool speech = true; + CodecType encoder_type = CodecType::kOther; + }; + + // This is the main struct for auxiliary encoding information. Each encoded + // packet should be accompanied by one EncodedInfo struct, containing the + // total number of |encoded_bytes|, the |encoded_timestamp| and the + // |payload_type|. If the packet contains redundant encodings, the |redundant| + // vector will be populated with EncodedInfoLeaf structs. Each struct in the + // vector represents one encoding; the order of structs in the vector is the + // same as the order in which the actual payloads are written to the byte + // stream. When EncoderInfoLeaf structs are present in the vector, the main + // struct's |encoded_bytes| will be the sum of all the |encoded_bytes| in the + // vector. + struct EncodedInfo : public EncodedInfoLeaf { + EncodedInfo(); + EncodedInfo(const EncodedInfo&); + EncodedInfo(EncodedInfo&&); + ~EncodedInfo(); + EncodedInfo& operator=(const EncodedInfo&); + EncodedInfo& operator=(EncodedInfo&&); + + std::vector redundant; + }; + + virtual ~AudioEncoder() = default; + + // Returns the input sample rate in Hz and the number of input channels. + // These are constants set at instantiation time. + virtual int SampleRateHz() const = 0; + virtual size_t NumChannels() const = 0; + + // Returns the rate at which the RTP timestamps are updated. The default + // implementation returns SampleRateHz(). + virtual int RtpTimestampRateHz() const; + + // Returns the number of 10 ms frames the encoder will put in the next + // packet. This value may only change when Encode() outputs a packet; i.e., + // the encoder may vary the number of 10 ms frames from packet to packet, but + // it must decide the length of the next packet no later than when outputting + // the preceding packet. + virtual size_t Num10MsFramesInNextPacket() const = 0; + + // Returns the maximum value that can be returned by + // Num10MsFramesInNextPacket(). + virtual size_t Max10MsFramesInAPacket() const = 0; + + // Returns the current target bitrate in bits/s. The value -1 means that the + // codec adapts the target automatically, and a current target cannot be + // provided. + virtual int GetTargetBitrate() const = 0; + + // Accepts one 10 ms block of input audio (i.e., SampleRateHz() / 100 * + // NumChannels() samples). Multi-channel audio must be sample-interleaved. + // The encoder appends zero or more bytes of output to |encoded| and returns + // additional encoding information. Encode() checks some preconditions, calls + // EncodeImpl() which does the actual work, and then checks some + // postconditions. + EncodedInfo Encode(uint32_t rtp_timestamp, + rtc::ArrayView audio, + rtc::Buffer* encoded); + + // Resets the encoder to its starting state, discarding any input that has + // been fed to the encoder but not yet emitted in a packet. + virtual void Reset() = 0; + + // Enables or disables codec-internal FEC (forward error correction). Returns + // true if the codec was able to comply. The default implementation returns + // true when asked to disable FEC and false when asked to enable it (meaning + // that FEC isn't supported). + virtual bool SetFec(bool enable); + + // Enables or disables codec-internal VAD/DTX. Returns true if the codec was + // able to comply. The default implementation returns true when asked to + // disable DTX and false when asked to enable it (meaning that DTX isn't + // supported). + virtual bool SetDtx(bool enable); + + // Returns the status of codec-internal DTX. The default implementation always + // returns false. + virtual bool GetDtx() const; + + // Sets the application mode. Returns true if the codec was able to comply. + // The default implementation just returns false. + enum class Application { kSpeech, kAudio }; + virtual bool SetApplication(Application application); + + // Tells the encoder about the highest sample rate the decoder is expected to + // use when decoding the bitstream. The encoder would typically use this + // information to adjust the quality of the encoding. The default + // implementation does nothing. + virtual void SetMaxPlaybackRate(int frequency_hz); + + // This is to be deprecated. Please use |OnReceivedTargetAudioBitrate| + // instead. + // Tells the encoder what average bitrate we'd like it to produce. The + // encoder is free to adjust or disregard the given bitrate (the default + // implementation does the latter). + RTC_DEPRECATED virtual void SetTargetBitrate(int target_bps); + + // Causes this encoder to let go of any other encoders it contains, and + // returns a pointer to an array where they are stored (which is required to + // live as long as this encoder). Unless the returned array is empty, you may + // not call any methods on this encoder afterwards, except for the + // destructor. The default implementation just returns an empty array. + // NOTE: This method is subject to change. Do not call or override it. + virtual rtc::ArrayView> + ReclaimContainedEncoders(); + + // Enables audio network adaptor. Returns true if successful. + virtual bool EnableAudioNetworkAdaptor(const std::string& config_string, + RtcEventLog* event_log); + + // Disables audio network adaptor. + virtual void DisableAudioNetworkAdaptor(); + + // Provides uplink packet loss fraction to this encoder to allow it to adapt. + // |uplink_packet_loss_fraction| is in the range [0.0, 1.0]. + virtual void OnReceivedUplinkPacketLossFraction( + float uplink_packet_loss_fraction); + + RTC_DEPRECATED virtual void OnReceivedUplinkRecoverablePacketLossFraction( + float uplink_recoverable_packet_loss_fraction); + + // Provides target audio bitrate to this encoder to allow it to adapt. + virtual void OnReceivedTargetAudioBitrate(int target_bps); + + // Provides target audio bitrate and corresponding probing interval of + // the bandwidth estimator to this encoder to allow it to adapt. + virtual void OnReceivedUplinkBandwidth(int target_audio_bitrate_bps, + absl::optional bwe_period_ms); + + // Provides target audio bitrate and corresponding probing interval of + // the bandwidth estimator to this encoder to allow it to adapt. + virtual void OnReceivedUplinkAllocation(BitrateAllocationUpdate update); + + // Provides RTT to this encoder to allow it to adapt. + virtual void OnReceivedRtt(int rtt_ms); + + // Provides overhead to this encoder to adapt. The overhead is the number of + // bytes that will be added to each packet the encoder generates. + virtual void OnReceivedOverhead(size_t overhead_bytes_per_packet); + + // To allow encoder to adapt its frame length, it must be provided the frame + // length range that receivers can accept. + virtual void SetReceiverFrameLengthRange(int min_frame_length_ms, + int max_frame_length_ms); + + // Get statistics related to audio network adaptation. + virtual ANAStats GetANAStats() const; + + // The range of frame lengths that are supported or nullopt if there's no sch + // information. This is used to calculated the full bitrate range, including + // overhead. + virtual absl::optional> GetFrameLengthRange() + const = 0; + + protected: + // Subclasses implement this to perform the actual encoding. Called by + // Encode(). + virtual EncodedInfo EncodeImpl(uint32_t rtp_timestamp, + rtc::ArrayView audio, + rtc::Buffer* encoded) = 0; +}; +} // namespace webrtc +#endif // API_AUDIO_CODECS_AUDIO_ENCODER_H_ diff --git a/api/audio_codecs/audio_encoder_factory.h b/api/audio_codecs/audio_encoder_factory.h new file mode 100644 index 0000000..48995a8 --- /dev/null +++ b/api/audio_codecs/audio_encoder_factory.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_H_ +#define API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// A factory that creates AudioEncoders. +class AudioEncoderFactory : public rtc::RefCountInterface { + public: + // Returns a prioritized list of audio codecs, to use for signaling etc. + virtual std::vector GetSupportedEncoders() = 0; + + // Returns information about how this format would be encoded, provided it's + // supported. More format and format variations may be supported than those + // returned by GetSupportedEncoders(). + virtual absl::optional QueryAudioEncoder( + const SdpAudioFormat& format) = 0; + + // Creates an AudioEncoder for the specified format. The encoder will tags its + // payloads with the specified payload type. The `codec_pair_id` argument is + // used to link encoders and decoders that talk to the same remote entity: if + // a AudioEncoderFactory::MakeAudioEncoder() and a + // AudioDecoderFactory::MakeAudioDecoder() call receive non-null IDs that + // compare equal, the factory implementations may assume that the encoder and + // decoder form a pair. (The intended use case for this is to set up + // communication between the AudioEncoder and AudioDecoder instances, which is + // needed for some codecs with built-in bandwidth adaptation.) + // + // Note: Implementations need to be robust against combinations other than + // one encoder, one decoder getting the same ID; such encoders must still + // work. + // + // TODO(ossu): Try to avoid audio encoders having to know their payload type. + virtual std::unique_ptr MakeAudioEncoder( + int payload_type, + const SdpAudioFormat& format, + absl::optional codec_pair_id) = 0; +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_H_ diff --git a/api/audio_codecs/audio_encoder_factory_template.h b/api/audio_codecs/audio_encoder_factory_template.h new file mode 100644 index 0000000..74cb053 --- /dev/null +++ b/api/audio_codecs/audio_encoder_factory_template.h @@ -0,0 +1,152 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_TEMPLATE_H_ +#define API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_TEMPLATE_H_ + +#include +#include + +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +namespace audio_encoder_factory_template_impl { + +template +struct Helper; + +// Base case: 0 template parameters. +template <> +struct Helper<> { + static void AppendSupportedEncoders(std::vector* specs) {} + static absl::optional QueryAudioEncoder( + const SdpAudioFormat& format) { + return absl::nullopt; + } + static std::unique_ptr MakeAudioEncoder( + int payload_type, + const SdpAudioFormat& format, + absl::optional codec_pair_id) { + return nullptr; + } +}; + +// Inductive case: Called with n + 1 template parameters; calls subroutines +// with n template parameters. +template +struct Helper { + static void AppendSupportedEncoders(std::vector* specs) { + T::AppendSupportedEncoders(specs); + Helper::AppendSupportedEncoders(specs); + } + static absl::optional QueryAudioEncoder( + const SdpAudioFormat& format) { + auto opt_config = T::SdpToConfig(format); + static_assert(std::is_same>::value, + "T::SdpToConfig() must return a value of type " + "absl::optional"); + return opt_config ? absl::optional( + T::QueryAudioEncoder(*opt_config)) + : Helper::QueryAudioEncoder(format); + } + static std::unique_ptr MakeAudioEncoder( + int payload_type, + const SdpAudioFormat& format, + absl::optional codec_pair_id) { + auto opt_config = T::SdpToConfig(format); + if (opt_config) { + return T::MakeAudioEncoder(*opt_config, payload_type, codec_pair_id); + } else { + return Helper::MakeAudioEncoder(payload_type, format, + codec_pair_id); + } + } +}; + +template +class AudioEncoderFactoryT : public AudioEncoderFactory { + public: + std::vector GetSupportedEncoders() override { + std::vector specs; + Helper::AppendSupportedEncoders(&specs); + return specs; + } + + absl::optional QueryAudioEncoder( + const SdpAudioFormat& format) override { + return Helper::QueryAudioEncoder(format); + } + + std::unique_ptr MakeAudioEncoder( + int payload_type, + const SdpAudioFormat& format, + absl::optional codec_pair_id) override { + return Helper::MakeAudioEncoder(payload_type, format, codec_pair_id); + } +}; + +} // namespace audio_encoder_factory_template_impl + +// Make an AudioEncoderFactory that can create instances of the given encoders. +// +// Each encoder type is given as a template argument to the function; it should +// be a struct with the following static member functions: +// +// // Converts |audio_format| to a ConfigType instance. Returns an empty +// // optional if |audio_format| doesn't correctly specify an encoder of our +// // type. +// absl::optional SdpToConfig(const SdpAudioFormat& audio_format); +// +// // Appends zero or more AudioCodecSpecs to the list that will be returned +// // by AudioEncoderFactory::GetSupportedEncoders(). +// void AppendSupportedEncoders(std::vector* specs); +// +// // Returns information about how this format would be encoded. Used to +// // implement AudioEncoderFactory::QueryAudioEncoder(). +// AudioCodecInfo QueryAudioEncoder(const ConfigType& config); +// +// // Creates an AudioEncoder for the specified format. Used to implement +// // AudioEncoderFactory::MakeAudioEncoder(). +// std::unique_ptr MakeAudioEncoder( +// const ConfigType& config, +// int payload_type, +// absl::optional codec_pair_id); +// +// ConfigType should be a type that encapsulates all the settings needed to +// create an AudioEncoder. T::Config (where T is the encoder struct) should +// either be the config type, or an alias for it. +// +// Whenever it tries to do something, the new factory will try each of the +// encoders in the order they were specified in the template argument list, +// stopping at the first one that claims to be able to do the job. +// +// TODO(kwiberg): Point at CreateBuiltinAudioEncoderFactory() for an example of +// how it is used. +template +rtc::scoped_refptr CreateAudioEncoderFactory() { + // There's no technical reason we couldn't allow zero template parameters, + // but such a factory couldn't create any encoders, and callers can do this + // by mistake by simply forgetting the <> altogether. So we forbid it in + // order to prevent caller foot-shooting. + static_assert(sizeof...(Ts) >= 1, + "Caller must give at least one template parameter"); + + return rtc::scoped_refptr( + new rtc::RefCountedObject< + audio_encoder_factory_template_impl::AudioEncoderFactoryT>()); +} + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_ENCODER_FACTORY_TEMPLATE_H_ diff --git a/api/audio_codecs/audio_format.cc b/api/audio_codecs/audio_format.cc new file mode 100644 index 0000000..2a529a4 --- /dev/null +++ b/api/audio_codecs/audio_format.cc @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/audio_format.h" + +#include + +#include "absl/strings/match.h" + +namespace webrtc { + +SdpAudioFormat::SdpAudioFormat(const SdpAudioFormat&) = default; +SdpAudioFormat::SdpAudioFormat(SdpAudioFormat&&) = default; + +SdpAudioFormat::SdpAudioFormat(absl::string_view name, + int clockrate_hz, + size_t num_channels) + : name(name), clockrate_hz(clockrate_hz), num_channels(num_channels) {} + +SdpAudioFormat::SdpAudioFormat(absl::string_view name, + int clockrate_hz, + size_t num_channels, + const Parameters& param) + : name(name), + clockrate_hz(clockrate_hz), + num_channels(num_channels), + parameters(param) {} + +SdpAudioFormat::SdpAudioFormat(absl::string_view name, + int clockrate_hz, + size_t num_channels, + Parameters&& param) + : name(name), + clockrate_hz(clockrate_hz), + num_channels(num_channels), + parameters(std::move(param)) {} + +bool SdpAudioFormat::Matches(const SdpAudioFormat& o) const { + return absl::EqualsIgnoreCase(name, o.name) && + clockrate_hz == o.clockrate_hz && num_channels == o.num_channels; +} + +SdpAudioFormat::~SdpAudioFormat() = default; +SdpAudioFormat& SdpAudioFormat::operator=(const SdpAudioFormat&) = default; +SdpAudioFormat& SdpAudioFormat::operator=(SdpAudioFormat&&) = default; + +bool operator==(const SdpAudioFormat& a, const SdpAudioFormat& b) { + return absl::EqualsIgnoreCase(a.name, b.name) && + a.clockrate_hz == b.clockrate_hz && a.num_channels == b.num_channels && + a.parameters == b.parameters; +} + +AudioCodecInfo::AudioCodecInfo(int sample_rate_hz, + size_t num_channels, + int bitrate_bps) + : AudioCodecInfo(sample_rate_hz, + num_channels, + bitrate_bps, + bitrate_bps, + bitrate_bps) {} + +AudioCodecInfo::AudioCodecInfo(int sample_rate_hz, + size_t num_channels, + int default_bitrate_bps, + int min_bitrate_bps, + int max_bitrate_bps) + : sample_rate_hz(sample_rate_hz), + num_channels(num_channels), + default_bitrate_bps(default_bitrate_bps), + min_bitrate_bps(min_bitrate_bps), + max_bitrate_bps(max_bitrate_bps) { + RTC_DCHECK_GT(sample_rate_hz, 0); + RTC_DCHECK_GT(num_channels, 0); + RTC_DCHECK_GE(min_bitrate_bps, 0); + RTC_DCHECK_LE(min_bitrate_bps, default_bitrate_bps); + RTC_DCHECK_GE(max_bitrate_bps, default_bitrate_bps); +} + +} // namespace webrtc diff --git a/api/audio_codecs/audio_format.h b/api/audio_codecs/audio_format.h new file mode 100644 index 0000000..9f61729 --- /dev/null +++ b/api/audio_codecs/audio_format.h @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_AUDIO_FORMAT_H_ +#define API_AUDIO_CODECS_AUDIO_FORMAT_H_ + +#include + +#include +#include + +#include "absl/strings/string_view.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// SDP specification for a single audio codec. +struct RTC_EXPORT SdpAudioFormat { + using Parameters = std::map; + + SdpAudioFormat(const SdpAudioFormat&); + SdpAudioFormat(SdpAudioFormat&&); + SdpAudioFormat(absl::string_view name, int clockrate_hz, size_t num_channels); + SdpAudioFormat(absl::string_view name, + int clockrate_hz, + size_t num_channels, + const Parameters& param); + SdpAudioFormat(absl::string_view name, + int clockrate_hz, + size_t num_channels, + Parameters&& param); + ~SdpAudioFormat(); + + // Returns true if this format is compatible with |o|. In SDP terminology: + // would it represent the same codec between an offer and an answer? As + // opposed to operator==, this method disregards codec parameters. + bool Matches(const SdpAudioFormat& o) const; + + SdpAudioFormat& operator=(const SdpAudioFormat&); + SdpAudioFormat& operator=(SdpAudioFormat&&); + + friend bool operator==(const SdpAudioFormat& a, const SdpAudioFormat& b); + friend bool operator!=(const SdpAudioFormat& a, const SdpAudioFormat& b) { + return !(a == b); + } + + std::string name; + int clockrate_hz; + size_t num_channels; + Parameters parameters; +}; + +// Information about how an audio format is treated by the codec implementation. +// Contains basic information, such as sample rate and number of channels, which +// isn't uniformly presented by SDP. Also contains flags indicating support for +// integrating with other parts of WebRTC, like external VAD and comfort noise +// level calculation. +// +// To avoid API breakage, and make the code clearer, AudioCodecInfo should not +// be directly initializable with any flags indicating optional support. If it +// were, these initializers would break any time a new flag was added. It's also +// more difficult to understand: +// AudioCodecInfo info{16000, 1, 32000, true, false, false, true, true}; +// than +// AudioCodecInfo info(16000, 1, 32000); +// info.allow_comfort_noise = true; +// info.future_flag_b = true; +// info.future_flag_c = true; +struct AudioCodecInfo { + AudioCodecInfo(int sample_rate_hz, size_t num_channels, int bitrate_bps); + AudioCodecInfo(int sample_rate_hz, + size_t num_channels, + int default_bitrate_bps, + int min_bitrate_bps, + int max_bitrate_bps); + AudioCodecInfo(const AudioCodecInfo& b) = default; + ~AudioCodecInfo() = default; + + bool operator==(const AudioCodecInfo& b) const { + return sample_rate_hz == b.sample_rate_hz && + num_channels == b.num_channels && + default_bitrate_bps == b.default_bitrate_bps && + min_bitrate_bps == b.min_bitrate_bps && + max_bitrate_bps == b.max_bitrate_bps && + allow_comfort_noise == b.allow_comfort_noise && + supports_network_adaption == b.supports_network_adaption; + } + + bool operator!=(const AudioCodecInfo& b) const { return !(*this == b); } + + bool HasFixedBitrate() const { + RTC_DCHECK_GE(min_bitrate_bps, 0); + RTC_DCHECK_LE(min_bitrate_bps, default_bitrate_bps); + RTC_DCHECK_GE(max_bitrate_bps, default_bitrate_bps); + return min_bitrate_bps == max_bitrate_bps; + } + + int sample_rate_hz; + size_t num_channels; + int default_bitrate_bps; + int min_bitrate_bps; + int max_bitrate_bps; + + bool allow_comfort_noise = true; // This codec can be used with an external + // comfort noise generator. + bool supports_network_adaption = false; // This codec can adapt to varying + // network conditions. +}; + +// AudioCodecSpec ties an audio format to specific information about the codec +// and its implementation. +struct AudioCodecSpec { + bool operator==(const AudioCodecSpec& b) const { + return format == b.format && info == b.info; + } + + bool operator!=(const AudioCodecSpec& b) const { return !(*this == b); } + + SdpAudioFormat format; + AudioCodecInfo info; +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_AUDIO_FORMAT_H_ diff --git a/api/audio_codecs/builtin_audio_decoder_factory.cc b/api/audio_codecs/builtin_audio_decoder_factory.cc new file mode 100644 index 0000000..963cfe5 --- /dev/null +++ b/api/audio_codecs/builtin_audio_decoder_factory.cc @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/builtin_audio_decoder_factory.h" + +#include +#include + +#include "api/audio_codecs/L16/audio_decoder_L16.h" +#include "api/audio_codecs/audio_decoder_factory_template.h" +#include "api/audio_codecs/g711/audio_decoder_g711.h" +#include "api/audio_codecs/g722/audio_decoder_g722.h" +#if WEBRTC_USE_BUILTIN_ILBC +#include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" // nogncheck +#endif +#include "api/audio_codecs/isac/audio_decoder_isac.h" +#if WEBRTC_USE_BUILTIN_OPUS +#include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h" +#include "api/audio_codecs/opus/audio_decoder_opus.h" // nogncheck +#endif + +namespace webrtc { + +namespace { + +// Modify an audio decoder to not advertise support for anything. +template +struct NotAdvertised { + using Config = typename T::Config; + static absl::optional SdpToConfig( + const SdpAudioFormat& audio_format) { + return T::SdpToConfig(audio_format); + } + static void AppendSupportedDecoders(std::vector* specs) { + // Don't advertise support for anything. + } + static std::unique_ptr MakeAudioDecoder( + const Config& config, + absl::optional codec_pair_id = absl::nullopt) { + return T::MakeAudioDecoder(config, codec_pair_id); + } +}; + +} // namespace + +rtc::scoped_refptr CreateBuiltinAudioDecoderFactory() { + return CreateAudioDecoderFactory< + +#if WEBRTC_USE_BUILTIN_OPUS + AudioDecoderOpus, NotAdvertised, +#endif + + AudioDecoderIsac, AudioDecoderG722, + +#if WEBRTC_USE_BUILTIN_ILBC + AudioDecoderIlbc, +#endif + + AudioDecoderG711, NotAdvertised>(); +} + +} // namespace webrtc diff --git a/api/audio_codecs/builtin_audio_decoder_factory.h b/api/audio_codecs/builtin_audio_decoder_factory.h new file mode 100644 index 0000000..72e1e3d --- /dev/null +++ b/api/audio_codecs/builtin_audio_decoder_factory.h @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_ +#define API_AUDIO_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_ + +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/scoped_refptr.h" + +namespace webrtc { + +// Creates a new factory that can create the built-in types of audio decoders. +// Note: This will link with all the code implementing those codecs, so if you +// only need a subset of the codecs, consider using +// CreateAudioDecoderFactory<...codecs listed here...>() or +// CreateOpusAudioDecoderFactory() instead. +rtc::scoped_refptr CreateBuiltinAudioDecoderFactory(); + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_BUILTIN_AUDIO_DECODER_FACTORY_H_ diff --git a/api/audio_codecs/builtin_audio_encoder_factory.cc b/api/audio_codecs/builtin_audio_encoder_factory.cc new file mode 100644 index 0000000..99fac09 --- /dev/null +++ b/api/audio_codecs/builtin_audio_encoder_factory.cc @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/builtin_audio_encoder_factory.h" + +#include +#include + +#include "api/audio_codecs/L16/audio_encoder_L16.h" +#include "api/audio_codecs/audio_encoder_factory_template.h" +#include "api/audio_codecs/g711/audio_encoder_g711.h" +#include "api/audio_codecs/g722/audio_encoder_g722.h" +#if WEBRTC_USE_BUILTIN_ILBC +#include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" // nogncheck +#endif +#include "api/audio_codecs/isac/audio_encoder_isac.h" +#if WEBRTC_USE_BUILTIN_OPUS +#include "api/audio_codecs/opus/audio_encoder_multi_channel_opus.h" +#include "api/audio_codecs/opus/audio_encoder_opus.h" // nogncheck +#endif + +namespace webrtc { + +namespace { + +// Modify an audio encoder to not advertise support for anything. +template +struct NotAdvertised { + using Config = typename T::Config; + static absl::optional SdpToConfig( + const SdpAudioFormat& audio_format) { + return T::SdpToConfig(audio_format); + } + static void AppendSupportedEncoders(std::vector* specs) { + // Don't advertise support for anything. + } + static AudioCodecInfo QueryAudioEncoder(const Config& config) { + return T::QueryAudioEncoder(config); + } + static std::unique_ptr MakeAudioEncoder( + const Config& config, + int payload_type, + absl::optional codec_pair_id = absl::nullopt) { + return T::MakeAudioEncoder(config, payload_type, codec_pair_id); + } +}; + +} // namespace + +rtc::scoped_refptr CreateBuiltinAudioEncoderFactory() { + return CreateAudioEncoderFactory< + +#if WEBRTC_USE_BUILTIN_OPUS + AudioEncoderOpus, NotAdvertised, +#endif + + AudioEncoderIsac, AudioEncoderG722, + +#if WEBRTC_USE_BUILTIN_ILBC + AudioEncoderIlbc, +#endif + + AudioEncoderG711, NotAdvertised>(); +} + +} // namespace webrtc diff --git a/api/audio_codecs/builtin_audio_encoder_factory.h b/api/audio_codecs/builtin_audio_encoder_factory.h new file mode 100644 index 0000000..f833de1 --- /dev/null +++ b/api/audio_codecs/builtin_audio_encoder_factory.h @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_BUILTIN_AUDIO_ENCODER_FACTORY_H_ +#define API_AUDIO_CODECS_BUILTIN_AUDIO_ENCODER_FACTORY_H_ + +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/scoped_refptr.h" + +namespace webrtc { + +// Creates a new factory that can create the built-in types of audio encoders. +// Note: This will link with all the code implementing those codecs, so if you +// only need a subset of the codecs, consider using +// CreateAudioEncoderFactory<...codecs listed here...>() or +// CreateOpusAudioEncoderFactory() instead. +rtc::scoped_refptr CreateBuiltinAudioEncoderFactory(); + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_BUILTIN_AUDIO_ENCODER_FACTORY_H_ diff --git a/api/audio_codecs/g711/BUILD.gn b/api/audio_codecs/g711/BUILD.gn new file mode 100644 index 0000000..92d77be --- /dev/null +++ b/api/audio_codecs/g711/BUILD.gn @@ -0,0 +1,52 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +rtc_library("audio_encoder_g711") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_encoder_g711.cc", + "audio_encoder_g711.h", + ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:g711", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base:safe_minmax", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("audio_decoder_g711") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_decoder_g711.cc", + "audio_decoder_g711.h", + ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:g711", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} diff --git a/api/audio_codecs/g711/audio_decoder_g711.cc b/api/audio_codecs/g711/audio_decoder_g711.cc new file mode 100644 index 0000000..57e3741 --- /dev/null +++ b/api/audio_codecs/g711/audio_decoder_g711.cc @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/g711/audio_decoder_g711.h" + +#include +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/g711/audio_decoder_pcm.h" +#include "rtc_base/numerics/safe_conversions.h" + +namespace webrtc { + +absl::optional AudioDecoderG711::SdpToConfig( + const SdpAudioFormat& format) { + const bool is_pcmu = absl::EqualsIgnoreCase(format.name, "PCMU"); + const bool is_pcma = absl::EqualsIgnoreCase(format.name, "PCMA"); + if (format.clockrate_hz == 8000 && format.num_channels >= 1 && + (is_pcmu || is_pcma)) { + Config config; + config.type = is_pcmu ? Config::Type::kPcmU : Config::Type::kPcmA; + config.num_channels = rtc::dchecked_cast(format.num_channels); + RTC_DCHECK(config.IsOk()); + return config; + } else { + return absl::nullopt; + } +} + +void AudioDecoderG711::AppendSupportedDecoders( + std::vector* specs) { + for (const char* type : {"PCMU", "PCMA"}) { + specs->push_back({{type, 8000, 1}, {8000, 1, 64000}}); + } +} + +std::unique_ptr AudioDecoderG711::MakeAudioDecoder( + const Config& config, + absl::optional /*codec_pair_id*/) { + RTC_DCHECK(config.IsOk()); + switch (config.type) { + case Config::Type::kPcmU: + return std::make_unique(config.num_channels); + case Config::Type::kPcmA: + return std::make_unique(config.num_channels); + default: + return nullptr; + } +} + +} // namespace webrtc diff --git a/api/audio_codecs/g711/audio_decoder_g711.h b/api/audio_codecs/g711/audio_decoder_g711.h new file mode 100644 index 0000000..ccd1ee0 --- /dev/null +++ b/api/audio_codecs/g711/audio_decoder_g711.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_G711_AUDIO_DECODER_G711_H_ +#define API_AUDIO_CODECS_G711_AUDIO_DECODER_G711_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// G711 decoder API for use as a template parameter to +// CreateAudioDecoderFactory<...>(). +struct RTC_EXPORT AudioDecoderG711 { + struct Config { + enum class Type { kPcmU, kPcmA }; + bool IsOk() const { + return (type == Type::kPcmU || type == Type::kPcmA) && num_channels >= 1; + } + Type type; + int num_channels; + }; + static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + const Config& config, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_G711_AUDIO_DECODER_G711_H_ diff --git a/api/audio_codecs/g711/audio_encoder_g711.cc b/api/audio_codecs/g711/audio_encoder_g711.cc new file mode 100644 index 0000000..ab95ad4 --- /dev/null +++ b/api/audio_codecs/g711/audio_encoder_g711.cc @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/g711/audio_encoder_g711.h" + +#include +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/g711/audio_encoder_pcm.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/string_to_number.h" + +namespace webrtc { + +absl::optional AudioEncoderG711::SdpToConfig( + const SdpAudioFormat& format) { + const bool is_pcmu = absl::EqualsIgnoreCase(format.name, "PCMU"); + const bool is_pcma = absl::EqualsIgnoreCase(format.name, "PCMA"); + if (format.clockrate_hz == 8000 && format.num_channels >= 1 && + (is_pcmu || is_pcma)) { + Config config; + config.type = is_pcmu ? Config::Type::kPcmU : Config::Type::kPcmA; + config.num_channels = rtc::dchecked_cast(format.num_channels); + config.frame_size_ms = 20; + auto ptime_iter = format.parameters.find("ptime"); + if (ptime_iter != format.parameters.end()) { + const auto ptime = rtc::StringToNumber(ptime_iter->second); + if (ptime && *ptime > 0) { + config.frame_size_ms = rtc::SafeClamp(10 * (*ptime / 10), 10, 60); + } + } + RTC_DCHECK(config.IsOk()); + return config; + } else { + return absl::nullopt; + } +} + +void AudioEncoderG711::AppendSupportedEncoders( + std::vector* specs) { + for (const char* type : {"PCMU", "PCMA"}) { + specs->push_back({{type, 8000, 1}, {8000, 1, 64000}}); + } +} + +AudioCodecInfo AudioEncoderG711::QueryAudioEncoder(const Config& config) { + RTC_DCHECK(config.IsOk()); + return {8000, rtc::dchecked_cast(config.num_channels), + 64000 * config.num_channels}; +} + +std::unique_ptr AudioEncoderG711::MakeAudioEncoder( + const Config& config, + int payload_type, + absl::optional /*codec_pair_id*/) { + RTC_DCHECK(config.IsOk()); + switch (config.type) { + case Config::Type::kPcmU: { + AudioEncoderPcmU::Config impl_config; + impl_config.num_channels = config.num_channels; + impl_config.frame_size_ms = config.frame_size_ms; + impl_config.payload_type = payload_type; + return std::make_unique(impl_config); + } + case Config::Type::kPcmA: { + AudioEncoderPcmA::Config impl_config; + impl_config.num_channels = config.num_channels; + impl_config.frame_size_ms = config.frame_size_ms; + impl_config.payload_type = payload_type; + return std::make_unique(impl_config); + } + default: { + return nullptr; + } + } +} + +} // namespace webrtc diff --git a/api/audio_codecs/g711/audio_encoder_g711.h b/api/audio_codecs/g711/audio_encoder_g711.h new file mode 100644 index 0000000..23ae18b --- /dev/null +++ b/api/audio_codecs/g711/audio_encoder_g711.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_G711_AUDIO_ENCODER_G711_H_ +#define API_AUDIO_CODECS_G711_AUDIO_ENCODER_G711_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// G711 encoder API for use as a template parameter to +// CreateAudioEncoderFactory<...>(). +struct RTC_EXPORT AudioEncoderG711 { + struct Config { + enum class Type { kPcmU, kPcmA }; + bool IsOk() const { + return (type == Type::kPcmU || type == Type::kPcmA) && + frame_size_ms > 0 && frame_size_ms % 10 == 0 && num_channels >= 1; + } + Type type = Type::kPcmU; + int num_channels = 1; + int frame_size_ms = 20; + }; + static absl::optional SdpToConfig( + const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const Config& config); + static std::unique_ptr MakeAudioEncoder( + const Config& config, + int payload_type, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_G711_AUDIO_ENCODER_G711_H_ diff --git a/api/audio_codecs/g722/BUILD.gn b/api/audio_codecs/g722/BUILD.gn new file mode 100644 index 0000000..a186eab --- /dev/null +++ b/api/audio_codecs/g722/BUILD.gn @@ -0,0 +1,58 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +rtc_source_set("audio_encoder_g722_config") { + visibility = [ "*" ] + sources = [ "audio_encoder_g722_config.h" ] +} + +rtc_library("audio_encoder_g722") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_encoder_g722.cc", + "audio_encoder_g722.h", + ] + deps = [ + ":audio_encoder_g722_config", + "..:audio_codecs_api", + "../../../modules/audio_coding:g722", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base:safe_minmax", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("audio_decoder_g722") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_decoder_g722.cc", + "audio_decoder_g722.h", + ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:g722", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} diff --git a/api/audio_codecs/g722/audio_decoder_g722.cc b/api/audio_codecs/g722/audio_decoder_g722.cc new file mode 100644 index 0000000..29b6d5d --- /dev/null +++ b/api/audio_codecs/g722/audio_decoder_g722.cc @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/g722/audio_decoder_g722.h" + +#include +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/g722/audio_decoder_g722.h" +#include "rtc_base/numerics/safe_conversions.h" + +namespace webrtc { + +absl::optional AudioDecoderG722::SdpToConfig( + const SdpAudioFormat& format) { + return absl::EqualsIgnoreCase(format.name, "G722") && + format.clockrate_hz == 8000 && + (format.num_channels == 1 || format.num_channels == 2) + ? absl::optional( + Config{rtc::dchecked_cast(format.num_channels)}) + : absl::nullopt; +} + +void AudioDecoderG722::AppendSupportedDecoders( + std::vector* specs) { + specs->push_back({{"G722", 8000, 1}, {16000, 1, 64000}}); +} + +std::unique_ptr AudioDecoderG722::MakeAudioDecoder( + Config config, + absl::optional /*codec_pair_id*/) { + switch (config.num_channels) { + case 1: + return std::make_unique(); + case 2: + return std::make_unique(); + default: + return nullptr; + } +} + +} // namespace webrtc diff --git a/api/audio_codecs/g722/audio_decoder_g722.h b/api/audio_codecs/g722/audio_decoder_g722.h new file mode 100644 index 0000000..2a67492 --- /dev/null +++ b/api/audio_codecs/g722/audio_decoder_g722.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_G722_AUDIO_DECODER_G722_H_ +#define API_AUDIO_CODECS_G722_AUDIO_DECODER_G722_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// G722 decoder API for use as a template parameter to +// CreateAudioDecoderFactory<...>(). +struct RTC_EXPORT AudioDecoderG722 { + struct Config { + bool IsOk() const { return num_channels == 1 || num_channels == 2; } + int num_channels; + }; + static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + Config config, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_G722_AUDIO_DECODER_G722_H_ diff --git a/api/audio_codecs/g722/audio_encoder_g722.cc b/api/audio_codecs/g722/audio_encoder_g722.cc new file mode 100644 index 0000000..12c1746 --- /dev/null +++ b/api/audio_codecs/g722/audio_encoder_g722.cc @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/g722/audio_encoder_g722.h" + +#include +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/g722/audio_encoder_g722.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/string_to_number.h" + +namespace webrtc { + +absl::optional AudioEncoderG722::SdpToConfig( + const SdpAudioFormat& format) { + if (!absl::EqualsIgnoreCase(format.name, "g722") || + format.clockrate_hz != 8000) { + return absl::nullopt; + } + + AudioEncoderG722Config config; + config.num_channels = rtc::checked_cast(format.num_channels); + auto ptime_iter = format.parameters.find("ptime"); + if (ptime_iter != format.parameters.end()) { + auto ptime = rtc::StringToNumber(ptime_iter->second); + if (ptime && *ptime > 0) { + const int whole_packets = *ptime / 10; + config.frame_size_ms = rtc::SafeClamp(whole_packets * 10, 10, 60); + } + } + return config.IsOk() ? absl::optional(config) + : absl::nullopt; +} + +void AudioEncoderG722::AppendSupportedEncoders( + std::vector* specs) { + const SdpAudioFormat fmt = {"G722", 8000, 1}; + const AudioCodecInfo info = QueryAudioEncoder(*SdpToConfig(fmt)); + specs->push_back({fmt, info}); +} + +AudioCodecInfo AudioEncoderG722::QueryAudioEncoder( + const AudioEncoderG722Config& config) { + RTC_DCHECK(config.IsOk()); + return {16000, rtc::dchecked_cast(config.num_channels), + 64000 * config.num_channels}; +} + +std::unique_ptr AudioEncoderG722::MakeAudioEncoder( + const AudioEncoderG722Config& config, + int payload_type, + absl::optional /*codec_pair_id*/) { + RTC_DCHECK(config.IsOk()); + return std::make_unique(config, payload_type); +} + +} // namespace webrtc diff --git a/api/audio_codecs/g722/audio_encoder_g722.h b/api/audio_codecs/g722/audio_encoder_g722.h new file mode 100644 index 0000000..327c0af --- /dev/null +++ b/api/audio_codecs/g722/audio_encoder_g722.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_H_ +#define API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/g722/audio_encoder_g722_config.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// G722 encoder API for use as a template parameter to +// CreateAudioEncoderFactory<...>(). +struct RTC_EXPORT AudioEncoderG722 { + using Config = AudioEncoderG722Config; + static absl::optional SdpToConfig( + const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const AudioEncoderG722Config& config); + static std::unique_ptr MakeAudioEncoder( + const AudioEncoderG722Config& config, + int payload_type, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_H_ diff --git a/api/audio_codecs/g722/audio_encoder_g722_config.h b/api/audio_codecs/g722/audio_encoder_g722_config.h new file mode 100644 index 0000000..2878985 --- /dev/null +++ b/api/audio_codecs/g722/audio_encoder_g722_config.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_CONFIG_H_ +#define API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_CONFIG_H_ + +namespace webrtc { + +struct AudioEncoderG722Config { + bool IsOk() const { + return frame_size_ms > 0 && frame_size_ms % 10 == 0 && num_channels >= 1; + } + int frame_size_ms = 20; + int num_channels = 1; +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_G722_AUDIO_ENCODER_G722_CONFIG_H_ diff --git a/api/audio_codecs/ilbc/BUILD.gn b/api/audio_codecs/ilbc/BUILD.gn new file mode 100644 index 0000000..b6a5045 --- /dev/null +++ b/api/audio_codecs/ilbc/BUILD.gn @@ -0,0 +1,56 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +rtc_source_set("audio_encoder_ilbc_config") { + visibility = [ "*" ] + sources = [ "audio_encoder_ilbc_config.h" ] +} + +rtc_library("audio_encoder_ilbc") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_encoder_ilbc.cc", + "audio_encoder_ilbc.h", + ] + deps = [ + ":audio_encoder_ilbc_config", + "..:audio_codecs_api", + "../../../modules/audio_coding:ilbc", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base:safe_minmax", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("audio_decoder_ilbc") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_decoder_ilbc.cc", + "audio_decoder_ilbc.h", + ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:ilbc", + "../../../rtc_base:rtc_base_approved", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} diff --git a/api/audio_codecs/ilbc/audio_decoder_ilbc.cc b/api/audio_codecs/ilbc/audio_decoder_ilbc.cc new file mode 100644 index 0000000..d0aae90 --- /dev/null +++ b/api/audio_codecs/ilbc/audio_decoder_ilbc.cc @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" + +#include +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h" + +namespace webrtc { + +absl::optional AudioDecoderIlbc::SdpToConfig( + const SdpAudioFormat& format) { + return absl::EqualsIgnoreCase(format.name, "ILBC") && + format.clockrate_hz == 8000 && format.num_channels == 1 + ? absl::optional(Config()) + : absl::nullopt; +} + +void AudioDecoderIlbc::AppendSupportedDecoders( + std::vector* specs) { + specs->push_back({{"ILBC", 8000, 1}, {8000, 1, 13300}}); +} + +std::unique_ptr AudioDecoderIlbc::MakeAudioDecoder( + Config config, + absl::optional /*codec_pair_id*/) { + return std::make_unique(); +} + +} // namespace webrtc diff --git a/api/audio_codecs/ilbc/audio_decoder_ilbc.h b/api/audio_codecs/ilbc/audio_decoder_ilbc.h new file mode 100644 index 0000000..9ab8479 --- /dev/null +++ b/api/audio_codecs/ilbc/audio_decoder_ilbc.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ +#define API_AUDIO_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" + +namespace webrtc { + +// ILBC decoder API for use as a template parameter to +// CreateAudioDecoderFactory<...>(). +struct AudioDecoderIlbc { + struct Config {}; // Empty---no config values needed! + static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + Config config, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ILBC_AUDIO_DECODER_ILBC_H_ diff --git a/api/audio_codecs/ilbc/audio_encoder_ilbc.cc b/api/audio_codecs/ilbc/audio_encoder_ilbc.cc new file mode 100644 index 0000000..bd653b7 --- /dev/null +++ b/api/audio_codecs/ilbc/audio_encoder_ilbc.cc @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" + +#include +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/numerics/safe_minmax.h" +#include "rtc_base/string_to_number.h" + +namespace webrtc { +namespace { +int GetIlbcBitrate(int ptime) { + switch (ptime) { + case 20: + case 40: + // 38 bytes per frame of 20 ms => 15200 bits/s. + return 15200; + case 30: + case 60: + // 50 bytes per frame of 30 ms => (approx) 13333 bits/s. + return 13333; + default: + FATAL(); + } +} +} // namespace + +absl::optional AudioEncoderIlbc::SdpToConfig( + const SdpAudioFormat& format) { + if (!absl::EqualsIgnoreCase(format.name.c_str(), "ILBC") || + format.clockrate_hz != 8000 || format.num_channels != 1) { + return absl::nullopt; + } + + AudioEncoderIlbcConfig config; + auto ptime_iter = format.parameters.find("ptime"); + if (ptime_iter != format.parameters.end()) { + auto ptime = rtc::StringToNumber(ptime_iter->second); + if (ptime && *ptime > 0) { + const int whole_packets = *ptime / 10; + config.frame_size_ms = rtc::SafeClamp(whole_packets * 10, 20, 60); + } + } + return config.IsOk() ? absl::optional(config) + : absl::nullopt; +} + +void AudioEncoderIlbc::AppendSupportedEncoders( + std::vector* specs) { + const SdpAudioFormat fmt = {"ILBC", 8000, 1}; + const AudioCodecInfo info = QueryAudioEncoder(*SdpToConfig(fmt)); + specs->push_back({fmt, info}); +} + +AudioCodecInfo AudioEncoderIlbc::QueryAudioEncoder( + const AudioEncoderIlbcConfig& config) { + RTC_DCHECK(config.IsOk()); + return {8000, 1, GetIlbcBitrate(config.frame_size_ms)}; +} + +std::unique_ptr AudioEncoderIlbc::MakeAudioEncoder( + const AudioEncoderIlbcConfig& config, + int payload_type, + absl::optional /*codec_pair_id*/) { + RTC_DCHECK(config.IsOk()); + return std::make_unique(config, payload_type); +} + +} // namespace webrtc diff --git a/api/audio_codecs/ilbc/audio_encoder_ilbc.h b/api/audio_codecs/ilbc/audio_encoder_ilbc.h new file mode 100644 index 0000000..e4aeca7 --- /dev/null +++ b/api/audio_codecs/ilbc/audio_encoder_ilbc.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ +#define API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/ilbc/audio_encoder_ilbc_config.h" + +namespace webrtc { + +// ILBC encoder API for use as a template parameter to +// CreateAudioEncoderFactory<...>(). +struct AudioEncoderIlbc { + using Config = AudioEncoderIlbcConfig; + static absl::optional SdpToConfig( + const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const AudioEncoderIlbcConfig& config); + static std::unique_ptr MakeAudioEncoder( + const AudioEncoderIlbcConfig& config, + int payload_type, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_H_ diff --git a/api/audio_codecs/ilbc/audio_encoder_ilbc_config.h b/api/audio_codecs/ilbc/audio_encoder_ilbc_config.h new file mode 100644 index 0000000..4d82f99 --- /dev/null +++ b/api/audio_codecs/ilbc/audio_encoder_ilbc_config.h @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_CONFIG_H_ +#define API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_CONFIG_H_ + +namespace webrtc { + +struct AudioEncoderIlbcConfig { + bool IsOk() const { + return (frame_size_ms == 20 || frame_size_ms == 30 || frame_size_ms == 40 || + frame_size_ms == 60); + } + int frame_size_ms = 30; // Valid values are 20, 30, 40, and 60 ms. + // Note that frame size 40 ms produces encodings with two 20 ms frames in + // them, and frame size 60 ms consists of two 30 ms frames. +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ILBC_AUDIO_ENCODER_ILBC_CONFIG_H_ diff --git a/api/audio_codecs/isac/BUILD.gn b/api/audio_codecs/isac/BUILD.gn new file mode 100644 index 0000000..6ff6e5f --- /dev/null +++ b/api/audio_codecs/isac/BUILD.gn @@ -0,0 +1,133 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +# The targets with _fix and _float suffixes unconditionally use the +# fixed-point and floating-point iSAC implementations, respectively. +# The targets without suffixes pick one of the implementations based +# on cleverly chosen criteria. + +rtc_source_set("audio_encoder_isac") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + public = [ "audio_encoder_isac.h" ] + public_configs = [ ":isac_config" ] + if (current_cpu == "arm") { + deps = [ ":audio_encoder_isac_fix" ] + } else { + deps = [ ":audio_encoder_isac_float" ] + } +} + +rtc_source_set("audio_decoder_isac") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + public = [ "audio_decoder_isac.h" ] + public_configs = [ ":isac_config" ] + if (current_cpu == "arm") { + deps = [ ":audio_decoder_isac_fix" ] + } else { + deps = [ ":audio_decoder_isac_float" ] + } +} + +config("isac_config") { + visibility = [ ":*" ] + if (current_cpu == "arm") { + defines = [ + "WEBRTC_USE_BUILTIN_ISAC_FIX=1", + "WEBRTC_USE_BUILTIN_ISAC_FLOAT=0", + ] + } else { + defines = [ + "WEBRTC_USE_BUILTIN_ISAC_FIX=0", + "WEBRTC_USE_BUILTIN_ISAC_FLOAT=1", + ] + } +} + +rtc_library("audio_encoder_isac_fix") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_encoder_isac_fix.cc", + "audio_encoder_isac_fix.h", + ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:isac_fix", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("audio_decoder_isac_fix") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_decoder_isac_fix.cc", + "audio_decoder_isac_fix.h", + ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:isac_fix", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("audio_encoder_isac_float") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_encoder_isac_float.cc", + "audio_encoder_isac_float.h", + ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:isac", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("audio_decoder_isac_float") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_decoder_isac_float.cc", + "audio_decoder_isac_float.h", + ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:isac", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} diff --git a/api/audio_codecs/isac/audio_decoder_isac.h b/api/audio_codecs/isac/audio_decoder_isac.h new file mode 100644 index 0000000..f4e9331 --- /dev/null +++ b/api/audio_codecs/isac/audio_decoder_isac.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ + +#if WEBRTC_USE_BUILTIN_ISAC_FIX && !WEBRTC_USE_BUILTIN_ISAC_FLOAT +#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" // nogncheck +#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT && !WEBRTC_USE_BUILTIN_ISAC_FIX +#include "api/audio_codecs/isac/audio_decoder_isac_float.h" // nogncheck +#else +#error "Must choose either fix or float" +#endif + +namespace webrtc { + +#if WEBRTC_USE_BUILTIN_ISAC_FIX +using AudioDecoderIsac = AudioDecoderIsacFix; +#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT +using AudioDecoderIsac = AudioDecoderIsacFloat; +#endif + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_H_ diff --git a/api/audio_codecs/isac/audio_decoder_isac_fix.cc b/api/audio_codecs/isac/audio_decoder_isac_fix.cc new file mode 100644 index 0000000..21d0da3 --- /dev/null +++ b/api/audio_codecs/isac/audio_decoder_isac_fix.cc @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" + +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/isac/fix/include/audio_decoder_isacfix.h" + +namespace webrtc { + +absl::optional AudioDecoderIsacFix::SdpToConfig( + const SdpAudioFormat& format) { + return absl::EqualsIgnoreCase(format.name, "ISAC") && + format.clockrate_hz == 16000 && format.num_channels == 1 + ? absl::optional(Config()) + : absl::nullopt; +} + +void AudioDecoderIsacFix::AppendSupportedDecoders( + std::vector* specs) { + specs->push_back({{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}); +} + +std::unique_ptr AudioDecoderIsacFix::MakeAudioDecoder( + Config config, + absl::optional /*codec_pair_id*/) { + AudioDecoderIsacFixImpl::Config c; + c.sample_rate_hz = 16000; + return std::make_unique(c); +} + +} // namespace webrtc diff --git a/api/audio_codecs/isac/audio_decoder_isac_fix.h b/api/audio_codecs/isac/audio_decoder_isac_fix.h new file mode 100644 index 0000000..200914a --- /dev/null +++ b/api/audio_codecs/isac/audio_decoder_isac_fix.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// iSAC decoder API (fixed-point implementation) for use as a template +// parameter to CreateAudioDecoderFactory<...>(). +struct RTC_EXPORT AudioDecoderIsacFix { + struct Config {}; // Empty---no config values needed! + static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + Config config, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FIX_H_ diff --git a/api/audio_codecs/isac/audio_decoder_isac_float.cc b/api/audio_codecs/isac/audio_decoder_isac_float.cc new file mode 100644 index 0000000..4efc2ea --- /dev/null +++ b/api/audio_codecs/isac/audio_decoder_isac_float.cc @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/isac/audio_decoder_isac_float.h" + +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/isac/main/include/audio_decoder_isac.h" + +namespace webrtc { + +absl::optional +AudioDecoderIsacFloat::SdpToConfig(const SdpAudioFormat& format) { + if (absl::EqualsIgnoreCase(format.name, "ISAC") && + (format.clockrate_hz == 16000 || format.clockrate_hz == 32000) && + format.num_channels == 1) { + Config config; + config.sample_rate_hz = format.clockrate_hz; + return config; + } else { + return absl::nullopt; + } +} + +void AudioDecoderIsacFloat::AppendSupportedDecoders( + std::vector* specs) { + specs->push_back({{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}); + specs->push_back({{"ISAC", 32000, 1}, {32000, 1, 56000, 10000, 56000}}); +} + +std::unique_ptr AudioDecoderIsacFloat::MakeAudioDecoder( + Config config, + absl::optional /*codec_pair_id*/) { + RTC_DCHECK(config.IsOk()); + AudioDecoderIsacFloatImpl::Config c; + c.sample_rate_hz = config.sample_rate_hz; + return std::make_unique(c); +} + +} // namespace webrtc diff --git a/api/audio_codecs/isac/audio_decoder_isac_float.h b/api/audio_codecs/isac/audio_decoder_isac_float.h new file mode 100644 index 0000000..e78f8b8 --- /dev/null +++ b/api/audio_codecs/isac/audio_decoder_isac_float.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// iSAC decoder API (floating-point implementation) for use as a template +// parameter to CreateAudioDecoderFactory<...>(). +struct RTC_EXPORT AudioDecoderIsacFloat { + struct Config { + bool IsOk() const { + return sample_rate_hz == 16000 || sample_rate_hz == 32000; + } + int sample_rate_hz = 16000; + }; + static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + Config config, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_DECODER_ISAC_FLOAT_H_ diff --git a/api/audio_codecs/isac/audio_encoder_isac.h b/api/audio_codecs/isac/audio_encoder_isac.h new file mode 100644 index 0000000..3cb0a1f --- /dev/null +++ b/api/audio_codecs/isac/audio_encoder_isac.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ + +#if WEBRTC_USE_BUILTIN_ISAC_FIX && !WEBRTC_USE_BUILTIN_ISAC_FLOAT +#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" // nogncheck +#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT && !WEBRTC_USE_BUILTIN_ISAC_FIX +#include "api/audio_codecs/isac/audio_encoder_isac_float.h" // nogncheck +#else +#error "Must choose either fix or float" +#endif + +namespace webrtc { + +#if WEBRTC_USE_BUILTIN_ISAC_FIX +using AudioEncoderIsac = AudioEncoderIsacFix; +#elif WEBRTC_USE_BUILTIN_ISAC_FLOAT +using AudioEncoderIsac = AudioEncoderIsacFloat; +#endif + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_H_ diff --git a/api/audio_codecs/isac/audio_encoder_isac_fix.cc b/api/audio_codecs/isac/audio_encoder_isac_fix.cc new file mode 100644 index 0000000..7cf55b9 --- /dev/null +++ b/api/audio_codecs/isac/audio_encoder_isac_fix.cc @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" + +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/isac/fix/include/audio_encoder_isacfix.h" +#include "rtc_base/string_to_number.h" + +namespace webrtc { + +absl::optional AudioEncoderIsacFix::SdpToConfig( + const SdpAudioFormat& format) { + if (absl::EqualsIgnoreCase(format.name, "ISAC") && + format.clockrate_hz == 16000 && format.num_channels == 1) { + Config config; + const auto ptime_iter = format.parameters.find("ptime"); + if (ptime_iter != format.parameters.end()) { + const auto ptime = rtc::StringToNumber(ptime_iter->second); + if (ptime && *ptime >= 60) { + config.frame_size_ms = 60; + } + } + return config; + } else { + return absl::nullopt; + } +} + +void AudioEncoderIsacFix::AppendSupportedEncoders( + std::vector* specs) { + const SdpAudioFormat fmt = {"ISAC", 16000, 1}; + const AudioCodecInfo info = QueryAudioEncoder(*SdpToConfig(fmt)); + specs->push_back({fmt, info}); +} + +AudioCodecInfo AudioEncoderIsacFix::QueryAudioEncoder( + AudioEncoderIsacFix::Config config) { + RTC_DCHECK(config.IsOk()); + return {16000, 1, 32000, 10000, 32000}; +} + +std::unique_ptr AudioEncoderIsacFix::MakeAudioEncoder( + AudioEncoderIsacFix::Config config, + int payload_type, + absl::optional /*codec_pair_id*/) { + RTC_DCHECK(config.IsOk()); + AudioEncoderIsacFixImpl::Config c; + c.frame_size_ms = config.frame_size_ms; + c.bit_rate = config.bit_rate; + c.payload_type = payload_type; + return std::make_unique(c); +} + +} // namespace webrtc diff --git a/api/audio_codecs/isac/audio_encoder_isac_fix.h b/api/audio_codecs/isac/audio_encoder_isac_fix.h new file mode 100644 index 0000000..e50d9f5 --- /dev/null +++ b/api/audio_codecs/isac/audio_encoder_isac_fix.h @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// iSAC encoder API (fixed-point implementation) for use as a template +// parameter to CreateAudioEncoderFactory<...>(). +struct RTC_EXPORT AudioEncoderIsacFix { + struct Config { + bool IsOk() const { + if (frame_size_ms != 30 && frame_size_ms != 60) { + return false; + } + if (bit_rate < 10000 || bit_rate > 32000) { + return false; + } + return true; + } + int frame_size_ms = 30; + int bit_rate = 32000; // Limit on short-term average bit rate, in bits/s. + }; + static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(Config config); + static std::unique_ptr MakeAudioEncoder( + Config config, + int payload_type, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FIX_H_ diff --git a/api/audio_codecs/isac/audio_encoder_isac_float.cc b/api/audio_codecs/isac/audio_encoder_isac_float.cc new file mode 100644 index 0000000..6f684c8 --- /dev/null +++ b/api/audio_codecs/isac/audio_encoder_isac_float.cc @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/isac/audio_encoder_isac_float.h" + +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/isac/main/include/audio_encoder_isac.h" +#include "rtc_base/string_to_number.h" + +namespace webrtc { + +absl::optional +AudioEncoderIsacFloat::SdpToConfig(const SdpAudioFormat& format) { + if (absl::EqualsIgnoreCase(format.name, "ISAC") && + (format.clockrate_hz == 16000 || format.clockrate_hz == 32000) && + format.num_channels == 1) { + Config config; + config.sample_rate_hz = format.clockrate_hz; + config.bit_rate = format.clockrate_hz == 16000 ? 32000 : 56000; + if (config.sample_rate_hz == 16000) { + // For sample rate 16 kHz, optionally use 60 ms frames, instead of the + // default 30 ms. + const auto ptime_iter = format.parameters.find("ptime"); + if (ptime_iter != format.parameters.end()) { + const auto ptime = rtc::StringToNumber(ptime_iter->second); + if (ptime && *ptime >= 60) { + config.frame_size_ms = 60; + } + } + } + return config; + } else { + return absl::nullopt; + } +} + +void AudioEncoderIsacFloat::AppendSupportedEncoders( + std::vector* specs) { + for (int sample_rate_hz : {16000, 32000}) { + const SdpAudioFormat fmt = {"ISAC", sample_rate_hz, 1}; + const AudioCodecInfo info = QueryAudioEncoder(*SdpToConfig(fmt)); + specs->push_back({fmt, info}); + } +} + +AudioCodecInfo AudioEncoderIsacFloat::QueryAudioEncoder( + const AudioEncoderIsacFloat::Config& config) { + RTC_DCHECK(config.IsOk()); + constexpr int min_bitrate = 10000; + const int max_bitrate = config.sample_rate_hz == 16000 ? 32000 : 56000; + const int default_bitrate = max_bitrate; + return {config.sample_rate_hz, 1, default_bitrate, min_bitrate, max_bitrate}; +} + +std::unique_ptr AudioEncoderIsacFloat::MakeAudioEncoder( + const AudioEncoderIsacFloat::Config& config, + int payload_type, + absl::optional /*codec_pair_id*/) { + RTC_DCHECK(config.IsOk()); + AudioEncoderIsacFloatImpl::Config c; + c.payload_type = payload_type; + c.sample_rate_hz = config.sample_rate_hz; + c.frame_size_ms = config.frame_size_ms; + c.bit_rate = config.bit_rate; + return std::make_unique(c); +} + +} // namespace webrtc diff --git a/api/audio_codecs/isac/audio_encoder_isac_float.h b/api/audio_codecs/isac/audio_encoder_isac_float.h new file mode 100644 index 0000000..0cb9c17 --- /dev/null +++ b/api/audio_codecs/isac/audio_encoder_isac_float.h @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ +#define API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// iSAC encoder API (floating-point implementation) for use as a template +// parameter to CreateAudioEncoderFactory<...>(). +struct RTC_EXPORT AudioEncoderIsacFloat { + struct Config { + bool IsOk() const { + switch (sample_rate_hz) { + case 16000: + if (frame_size_ms != 30 && frame_size_ms != 60) { + return false; + } + if (bit_rate < 10000 || bit_rate > 32000) { + return false; + } + return true; + case 32000: + if (frame_size_ms != 30) { + return false; + } + if (bit_rate < 10000 || bit_rate > 56000) { + return false; + } + return true; + default: + return false; + } + } + int sample_rate_hz = 16000; + int frame_size_ms = 30; + int bit_rate = 32000; // Limit on short-term average bit rate, in bits/s. + }; + static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const Config& config); + static std::unique_ptr MakeAudioEncoder( + const Config& config, + int payload_type, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_ISAC_AUDIO_ENCODER_ISAC_FLOAT_H_ diff --git a/api/audio_codecs/opus/BUILD.gn b/api/audio_codecs/opus/BUILD.gn new file mode 100644 index 0000000..586e9b3 --- /dev/null +++ b/api/audio_codecs/opus/BUILD.gn @@ -0,0 +1,112 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +rtc_library("audio_encoder_opus_config") { + visibility = [ "*" ] + sources = [ + "audio_encoder_multi_channel_opus_config.cc", + "audio_encoder_multi_channel_opus_config.h", + "audio_encoder_opus_config.cc", + "audio_encoder_opus_config.h", + ] + deps = [ + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + defines = [] + if (rtc_opus_variable_complexity) { + defines += [ "WEBRTC_OPUS_VARIABLE_COMPLEXITY=1" ] + } else { + defines += [ "WEBRTC_OPUS_VARIABLE_COMPLEXITY=0" ] + } +} + +rtc_source_set("audio_decoder_opus_config") { + visibility = [ "*" ] + sources = [ "audio_decoder_multi_channel_opus_config.h" ] +} + +rtc_library("audio_encoder_opus") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + public = [ "audio_encoder_opus.h" ] + sources = [ "audio_encoder_opus.cc" ] + deps = [ + ":audio_encoder_opus_config", + "..:audio_codecs_api", + "../../../modules/audio_coding:webrtc_opus", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("audio_decoder_opus") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_decoder_opus.cc", + "audio_decoder_opus.h", + ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:webrtc_opus", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("audio_encoder_multiopus") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + public = [ "audio_encoder_multi_channel_opus.h" ] + sources = [ "audio_encoder_multi_channel_opus.cc" ] + deps = [ + "..:audio_codecs_api", + "../../../modules/audio_coding:webrtc_multiopus", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + "../opus:audio_encoder_opus_config", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("audio_decoder_multiopus") { + visibility = [ "*" ] + poisonous = [ "audio_codecs" ] + sources = [ + "audio_decoder_multi_channel_opus.cc", + "audio_decoder_multi_channel_opus.h", + ] + deps = [ + ":audio_decoder_opus_config", + "..:audio_codecs_api", + "../../../modules/audio_coding:webrtc_multiopus", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} diff --git a/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc b/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc new file mode 100644 index 0000000..6ba2b6d --- /dev/null +++ b/api/audio_codecs/opus/audio_decoder_multi_channel_opus.cc @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h" + +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h" + +namespace webrtc { + +absl::optional +AudioDecoderMultiChannelOpus::SdpToConfig(const SdpAudioFormat& format) { + return AudioDecoderMultiChannelOpusImpl::SdpToConfig(format); +} + +void AudioDecoderMultiChannelOpus::AppendSupportedDecoders( + std::vector* specs) { + // To get full utilization of the surround support of the Opus lib, we can + // mark which channel is the low frequency effects (LFE). But that is not done + // ATM. + { + AudioCodecInfo surround_5_1_opus_info{48000, 6, + /* default_bitrate_bps= */ 128000}; + surround_5_1_opus_info.allow_comfort_noise = false; + surround_5_1_opus_info.supports_network_adaption = false; + SdpAudioFormat opus_format({"multiopus", + 48000, + 6, + {{"minptime", "10"}, + {"useinbandfec", "1"}, + {"channel_mapping", "0,4,1,2,3,5"}, + {"num_streams", "4"}, + {"coupled_streams", "2"}}}); + specs->push_back({std::move(opus_format), surround_5_1_opus_info}); + } + { + AudioCodecInfo surround_7_1_opus_info{48000, 8, + /* default_bitrate_bps= */ 200000}; + surround_7_1_opus_info.allow_comfort_noise = false; + surround_7_1_opus_info.supports_network_adaption = false; + SdpAudioFormat opus_format({"multiopus", + 48000, + 8, + {{"minptime", "10"}, + {"useinbandfec", "1"}, + {"channel_mapping", "0,6,1,2,3,4,5,7"}, + {"num_streams", "5"}, + {"coupled_streams", "3"}}}); + specs->push_back({std::move(opus_format), surround_7_1_opus_info}); + } +} + +std::unique_ptr AudioDecoderMultiChannelOpus::MakeAudioDecoder( + AudioDecoderMultiChannelOpusConfig config, + absl::optional /*codec_pair_id*/) { + return AudioDecoderMultiChannelOpusImpl::MakeAudioDecoder(config); +} +} // namespace webrtc diff --git a/api/audio_codecs/opus/audio_decoder_multi_channel_opus.h b/api/audio_codecs/opus/audio_decoder_multi_channel_opus.h new file mode 100644 index 0000000..b5ca0fe --- /dev/null +++ b/api/audio_codecs/opus/audio_decoder_multi_channel_opus.h @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_DECODER_MULTI_CHANNEL_OPUS_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_DECODER_MULTI_CHANNEL_OPUS_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Opus decoder API for use as a template parameter to +// CreateAudioDecoderFactory<...>(). +struct RTC_EXPORT AudioDecoderMultiChannelOpus { + using Config = AudioDecoderMultiChannelOpusConfig; + static absl::optional SdpToConfig( + const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + AudioDecoderMultiChannelOpusConfig config, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_OPUS_AUDIO_DECODER_MULTI_CHANNEL_OPUS_H_ diff --git a/api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h b/api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h new file mode 100644 index 0000000..30bc76e --- /dev/null +++ b/api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h @@ -0,0 +1,63 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_DECODER_MULTI_CHANNEL_OPUS_CONFIG_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_DECODER_MULTI_CHANNEL_OPUS_CONFIG_H_ + +#include + +namespace webrtc { +struct AudioDecoderMultiChannelOpusConfig { + // The number of channels that the decoder will output. + int num_channels; + + // Number of mono or stereo encoded Opus streams. + int num_streams; + + // Number of channel pairs coupled together, see RFC 7845 section + // 5.1.1. Has to be less than the number of streams. + int coupled_streams; + + // Channel mapping table, defines the mapping from encoded streams to output + // channels. See RFC 7845 section 5.1.1. + std::vector channel_mapping; + + bool IsOk() const { + if (num_channels < 0 || num_streams < 0 || coupled_streams < 0) { + return false; + } + if (num_streams < coupled_streams) { + return false; + } + if (channel_mapping.size() != static_cast(num_channels)) { + return false; + } + + // Every mono stream codes one channel, every coupled stream codes two. This + // is the total coded channel count: + const int max_coded_channel = num_streams + coupled_streams; + for (const auto& x : channel_mapping) { + // Coded channels >= max_coded_channel don't exist. Except for 255, which + // tells Opus to put silence in output channel x. + if (x >= max_coded_channel && x != 255) { + return false; + } + } + + if (num_channels > 255 || max_coded_channel >= 255) { + return false; + } + return true; + } +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_OPUS_AUDIO_DECODER_MULTI_CHANNEL_OPUS_CONFIG_H_ diff --git a/api/audio_codecs/opus/audio_decoder_opus.cc b/api/audio_codecs/opus/audio_decoder_opus.cc new file mode 100644 index 0000000..6b4e0d3 --- /dev/null +++ b/api/audio_codecs/opus/audio_decoder_opus.cc @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/opus/audio_decoder_opus.h" + +#include +#include +#include + +#include "absl/strings/match.h" +#include "modules/audio_coding/codecs/opus/audio_decoder_opus.h" + +namespace webrtc { + +bool AudioDecoderOpus::Config::IsOk() const { + if (sample_rate_hz != 16000 && sample_rate_hz != 48000) { + // Unsupported sample rate. (libopus supports a few other rates as + // well; we can add support for them when needed.) + return false; + } + if (num_channels != 1 && num_channels != 2) { + return false; + } + return true; +} + +absl::optional AudioDecoderOpus::SdpToConfig( + const SdpAudioFormat& format) { + const auto num_channels = [&]() -> absl::optional { + auto stereo = format.parameters.find("stereo"); + if (stereo != format.parameters.end()) { + if (stereo->second == "0") { + return 1; + } else if (stereo->second == "1") { + return 2; + } else { + return absl::nullopt; // Bad stereo parameter. + } + } + return 1; // Default to mono. + }(); + if (absl::EqualsIgnoreCase(format.name, "opus") && + format.clockrate_hz == 48000 && format.num_channels == 2 && + num_channels) { + Config config; + config.num_channels = *num_channels; + RTC_DCHECK(config.IsOk()); + return config; + } else { + return absl::nullopt; + } +} + +void AudioDecoderOpus::AppendSupportedDecoders( + std::vector* specs) { + AudioCodecInfo opus_info{48000, 1, 64000, 6000, 510000}; + opus_info.allow_comfort_noise = false; + opus_info.supports_network_adaption = true; + SdpAudioFormat opus_format( + {"opus", 48000, 2, {{"minptime", "10"}, {"useinbandfec", "1"}}}); + specs->push_back({std::move(opus_format), opus_info}); +} + +std::unique_ptr AudioDecoderOpus::MakeAudioDecoder( + Config config, + absl::optional /*codec_pair_id*/) { + RTC_DCHECK(config.IsOk()); + return std::make_unique(config.num_channels, + config.sample_rate_hz); +} + +} // namespace webrtc diff --git a/api/audio_codecs/opus/audio_decoder_opus.h b/api/audio_codecs/opus/audio_decoder_opus.h new file mode 100644 index 0000000..ec0f61d --- /dev/null +++ b/api/audio_codecs/opus/audio_decoder_opus.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_DECODER_OPUS_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_DECODER_OPUS_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Opus decoder API for use as a template parameter to +// CreateAudioDecoderFactory<...>(). +struct RTC_EXPORT AudioDecoderOpus { + struct Config { + bool IsOk() const; // Checks if the values are currently OK. + int sample_rate_hz = 48000; + int num_channels = 1; + }; + static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedDecoders(std::vector* specs); + static std::unique_ptr MakeAudioDecoder( + Config config, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_OPUS_AUDIO_DECODER_OPUS_H_ diff --git a/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc b/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc new file mode 100644 index 0000000..758eaae --- /dev/null +++ b/api/audio_codecs/opus/audio_encoder_multi_channel_opus.cc @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/opus/audio_encoder_multi_channel_opus.h" + +#include + +#include "modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h" + +namespace webrtc { + +absl::optional +AudioEncoderMultiChannelOpus::SdpToConfig(const SdpAudioFormat& format) { + return AudioEncoderMultiChannelOpusImpl::SdpToConfig(format); +} + +void AudioEncoderMultiChannelOpus::AppendSupportedEncoders( + std::vector* specs) { + // To get full utilization of the surround support of the Opus lib, we can + // mark which channel is the low frequency effects (LFE). But that is not done + // ATM. + { + AudioCodecInfo surround_5_1_opus_info{48000, 6, + /* default_bitrate_bps= */ 128000}; + surround_5_1_opus_info.allow_comfort_noise = false; + surround_5_1_opus_info.supports_network_adaption = false; + SdpAudioFormat opus_format({"multiopus", + 48000, + 6, + {{"minptime", "10"}, + {"useinbandfec", "1"}, + {"channel_mapping", "0,4,1,2,3,5"}, + {"num_streams", "4"}, + {"coupled_streams", "2"}}}); + specs->push_back({std::move(opus_format), surround_5_1_opus_info}); + } + { + AudioCodecInfo surround_7_1_opus_info{48000, 8, + /* default_bitrate_bps= */ 200000}; + surround_7_1_opus_info.allow_comfort_noise = false; + surround_7_1_opus_info.supports_network_adaption = false; + SdpAudioFormat opus_format({"multiopus", + 48000, + 8, + {{"minptime", "10"}, + {"useinbandfec", "1"}, + {"channel_mapping", "0,6,1,2,3,4,5,7"}, + {"num_streams", "5"}, + {"coupled_streams", "3"}}}); + specs->push_back({std::move(opus_format), surround_7_1_opus_info}); + } +} + +AudioCodecInfo AudioEncoderMultiChannelOpus::QueryAudioEncoder( + const AudioEncoderMultiChannelOpusConfig& config) { + return AudioEncoderMultiChannelOpusImpl::QueryAudioEncoder(config); +} + +std::unique_ptr AudioEncoderMultiChannelOpus::MakeAudioEncoder( + const AudioEncoderMultiChannelOpusConfig& config, + int payload_type, + absl::optional /*codec_pair_id*/) { + return AudioEncoderMultiChannelOpusImpl::MakeAudioEncoder(config, + payload_type); +} + +} // namespace webrtc diff --git a/api/audio_codecs/opus/audio_encoder_multi_channel_opus.h b/api/audio_codecs/opus/audio_encoder_multi_channel_opus.h new file mode 100644 index 0000000..977a3a4 --- /dev/null +++ b/api/audio_codecs/opus/audio_encoder_multi_channel_opus.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_MULTI_CHANNEL_OPUS_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_MULTI_CHANNEL_OPUS_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Opus encoder API for use as a template parameter to +// CreateAudioEncoderFactory<...>(). +struct RTC_EXPORT AudioEncoderMultiChannelOpus { + using Config = AudioEncoderMultiChannelOpusConfig; + static absl::optional SdpToConfig(const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const Config& config); + static std::unique_ptr MakeAudioEncoder( + const Config& config, + int payload_type, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_MULTI_CHANNEL_OPUS_H_ diff --git a/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc b/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc new file mode 100644 index 0000000..f01caf1 --- /dev/null +++ b/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.cc @@ -0,0 +1,106 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h" + +namespace webrtc { + +namespace { +constexpr int kDefaultComplexity = 9; +} // namespace + +AudioEncoderMultiChannelOpusConfig::AudioEncoderMultiChannelOpusConfig() + : frame_size_ms(kDefaultFrameSizeMs), + num_channels(1), + application(ApplicationMode::kVoip), + bitrate_bps(32000), + fec_enabled(false), + cbr_enabled(false), + dtx_enabled(false), + max_playback_rate_hz(48000), + complexity(kDefaultComplexity), + num_streams(-1), + coupled_streams(-1) {} +AudioEncoderMultiChannelOpusConfig::AudioEncoderMultiChannelOpusConfig( + const AudioEncoderMultiChannelOpusConfig&) = default; +AudioEncoderMultiChannelOpusConfig::~AudioEncoderMultiChannelOpusConfig() = + default; +AudioEncoderMultiChannelOpusConfig& AudioEncoderMultiChannelOpusConfig:: +operator=(const AudioEncoderMultiChannelOpusConfig&) = default; + +bool AudioEncoderMultiChannelOpusConfig::IsOk() const { + if (frame_size_ms <= 0 || frame_size_ms % 10 != 0) + return false; + if (num_channels < 0 || num_channels >= 255) { + return false; + } + if (bitrate_bps < kMinBitrateBps || bitrate_bps > kMaxBitrateBps) + return false; + if (complexity < 0 || complexity > 10) + return false; + + // Check the lengths: + if (num_channels < 0 || num_streams < 0 || coupled_streams < 0) { + return false; + } + if (num_streams < coupled_streams) { + return false; + } + if (channel_mapping.size() != static_cast(num_channels)) { + return false; + } + + // Every mono stream codes one channel, every coupled stream codes two. This + // is the total coded channel count: + const int max_coded_channel = num_streams + coupled_streams; + for (const auto& x : channel_mapping) { + // Coded channels >= max_coded_channel don't exist. Except for 255, which + // tells Opus to ignore input channel x. + if (x >= max_coded_channel && x != 255) { + return false; + } + } + + // Inverse mapping. + constexpr int kNotSet = -1; + std::vector coded_channels_to_input_channels(max_coded_channel, kNotSet); + for (size_t i = 0; i < num_channels; ++i) { + if (channel_mapping[i] == 255) { + continue; + } + + // If it's not ignored, put it in the inverted mapping. But first check if + // we've told Opus to use another input channel for this coded channel: + const int coded_channel = channel_mapping[i]; + if (coded_channels_to_input_channels[coded_channel] != kNotSet) { + // Coded channel `coded_channel` comes from both input channels + // `coded_channels_to_input_channels[coded_channel]` and `i`. + return false; + } + + coded_channels_to_input_channels[coded_channel] = i; + } + + // Check that we specified what input the encoder should use to produce + // every coded channel. + for (int i = 0; i < max_coded_channel; ++i) { + if (coded_channels_to_input_channels[i] == kNotSet) { + // Coded channel `i` has unspecified input channel. + return false; + } + } + + if (num_channels > 255 || max_coded_channel >= 255) { + return false; + } + return true; +} + +} // namespace webrtc diff --git a/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h b/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h new file mode 100644 index 0000000..9b51246 --- /dev/null +++ b/api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_MULTI_CHANNEL_OPUS_CONFIG_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_MULTI_CHANNEL_OPUS_CONFIG_H_ + +#include + +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/opus/audio_encoder_opus_config.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +struct RTC_EXPORT AudioEncoderMultiChannelOpusConfig { + static constexpr int kDefaultFrameSizeMs = 20; + + // Opus API allows a min bitrate of 500bps, but Opus documentation suggests + // bitrate should be in the range of 6000 to 510000, inclusive. + static constexpr int kMinBitrateBps = 6000; + static constexpr int kMaxBitrateBps = 510000; + + AudioEncoderMultiChannelOpusConfig(); + AudioEncoderMultiChannelOpusConfig(const AudioEncoderMultiChannelOpusConfig&); + ~AudioEncoderMultiChannelOpusConfig(); + AudioEncoderMultiChannelOpusConfig& operator=( + const AudioEncoderMultiChannelOpusConfig&); + + int frame_size_ms; + size_t num_channels; + enum class ApplicationMode { kVoip, kAudio }; + ApplicationMode application; + int bitrate_bps; + bool fec_enabled; + bool cbr_enabled; + bool dtx_enabled; + int max_playback_rate_hz; + std::vector supported_frame_lengths_ms; + + int complexity; + + // Number of mono/stereo Opus streams. + int num_streams; + + // Number of channel pairs coupled together, see RFC 7845 section + // 5.1.1. Has to be less than the number of streams + int coupled_streams; + + // Channel mapping table, defines the mapping from encoded streams to input + // channels. See RFC 7845 section 5.1.1. + std::vector channel_mapping; + + bool IsOk() const; +}; + +} // namespace webrtc +#endif // API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_MULTI_CHANNEL_OPUS_CONFIG_H_ diff --git a/api/audio_codecs/opus/audio_encoder_opus.cc b/api/audio_codecs/opus/audio_encoder_opus.cc new file mode 100644 index 0000000..36d82b3 --- /dev/null +++ b/api/audio_codecs/opus/audio_encoder_opus.cc @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/opus/audio_encoder_opus.h" + +#include "modules/audio_coding/codecs/opus/audio_encoder_opus.h" + +namespace webrtc { + +absl::optional AudioEncoderOpus::SdpToConfig( + const SdpAudioFormat& format) { + return AudioEncoderOpusImpl::SdpToConfig(format); +} + +void AudioEncoderOpus::AppendSupportedEncoders( + std::vector* specs) { + AudioEncoderOpusImpl::AppendSupportedEncoders(specs); +} + +AudioCodecInfo AudioEncoderOpus::QueryAudioEncoder( + const AudioEncoderOpusConfig& config) { + return AudioEncoderOpusImpl::QueryAudioEncoder(config); +} + +std::unique_ptr AudioEncoderOpus::MakeAudioEncoder( + const AudioEncoderOpusConfig& config, + int payload_type, + absl::optional /*codec_pair_id*/) { + return AudioEncoderOpusImpl::MakeAudioEncoder(config, payload_type); +} + +} // namespace webrtc diff --git a/api/audio_codecs/opus/audio_encoder_opus.h b/api/audio_codecs/opus/audio_encoder_opus.h new file mode 100644 index 0000000..03cb0d6 --- /dev/null +++ b/api/audio_codecs/opus/audio_encoder_opus.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_encoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/audio_codecs/opus/audio_encoder_opus_config.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Opus encoder API for use as a template parameter to +// CreateAudioEncoderFactory<...>(). +struct RTC_EXPORT AudioEncoderOpus { + using Config = AudioEncoderOpusConfig; + static absl::optional SdpToConfig( + const SdpAudioFormat& audio_format); + static void AppendSupportedEncoders(std::vector* specs); + static AudioCodecInfo QueryAudioEncoder(const AudioEncoderOpusConfig& config); + static std::unique_ptr MakeAudioEncoder( + const AudioEncoderOpusConfig& config, + int payload_type, + absl::optional codec_pair_id = absl::nullopt); +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_H_ diff --git a/api/audio_codecs/opus/audio_encoder_opus_config.cc b/api/audio_codecs/opus/audio_encoder_opus_config.cc new file mode 100644 index 0000000..2f36d02 --- /dev/null +++ b/api/audio_codecs/opus/audio_encoder_opus_config.cc @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/opus/audio_encoder_opus_config.h" + +namespace webrtc { + +namespace { + +#if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS) || defined(WEBRTC_ARCH_ARM) +// If we are on Android, iOS and/or ARM, use a lower complexity setting by +// default, to save encoder complexity. +constexpr int kDefaultComplexity = 5; +#else +constexpr int kDefaultComplexity = 9; +#endif + +constexpr int kDefaultLowRateComplexity = + WEBRTC_OPUS_VARIABLE_COMPLEXITY ? 9 : kDefaultComplexity; + +} // namespace + +constexpr int AudioEncoderOpusConfig::kDefaultFrameSizeMs; +constexpr int AudioEncoderOpusConfig::kMinBitrateBps; +constexpr int AudioEncoderOpusConfig::kMaxBitrateBps; + +AudioEncoderOpusConfig::AudioEncoderOpusConfig() + : frame_size_ms(kDefaultFrameSizeMs), + sample_rate_hz(48000), + num_channels(1), + application(ApplicationMode::kVoip), + bitrate_bps(32000), + fec_enabled(false), + cbr_enabled(false), + max_playback_rate_hz(48000), + complexity(kDefaultComplexity), + low_rate_complexity(kDefaultLowRateComplexity), + complexity_threshold_bps(12500), + complexity_threshold_window_bps(1500), + dtx_enabled(false), + uplink_bandwidth_update_interval_ms(200), + payload_type(-1) {} +AudioEncoderOpusConfig::AudioEncoderOpusConfig(const AudioEncoderOpusConfig&) = + default; +AudioEncoderOpusConfig::~AudioEncoderOpusConfig() = default; +AudioEncoderOpusConfig& AudioEncoderOpusConfig::operator=( + const AudioEncoderOpusConfig&) = default; + +bool AudioEncoderOpusConfig::IsOk() const { + if (frame_size_ms <= 0 || frame_size_ms % 10 != 0) + return false; + if (sample_rate_hz != 16000 && sample_rate_hz != 48000) { + // Unsupported input sample rate. (libopus supports a few other rates as + // well; we can add support for them when needed.) + return false; + } + if (num_channels < 0 || num_channels >= 255) { + return false; + } + if (!bitrate_bps) + return false; + if (*bitrate_bps < kMinBitrateBps || *bitrate_bps > kMaxBitrateBps) + return false; + if (complexity < 0 || complexity > 10) + return false; + if (low_rate_complexity < 0 || low_rate_complexity > 10) + return false; + return true; +} +} // namespace webrtc diff --git a/api/audio_codecs/opus/audio_encoder_opus_config.h b/api/audio_codecs/opus/audio_encoder_opus_config.h new file mode 100644 index 0000000..3c412b7 --- /dev/null +++ b/api/audio_codecs/opus/audio_encoder_opus_config.h @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_CONFIG_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_CONFIG_H_ + +#include + +#include + +#include "absl/types/optional.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +struct RTC_EXPORT AudioEncoderOpusConfig { + static constexpr int kDefaultFrameSizeMs = 20; + + // Opus API allows a min bitrate of 500bps, but Opus documentation suggests + // bitrate should be in the range of 6000 to 510000, inclusive. + static constexpr int kMinBitrateBps = 6000; + static constexpr int kMaxBitrateBps = 510000; + + AudioEncoderOpusConfig(); + AudioEncoderOpusConfig(const AudioEncoderOpusConfig&); + ~AudioEncoderOpusConfig(); + AudioEncoderOpusConfig& operator=(const AudioEncoderOpusConfig&); + + bool IsOk() const; // Checks if the values are currently OK. + + int frame_size_ms; + int sample_rate_hz; + size_t num_channels; + enum class ApplicationMode { kVoip, kAudio }; + ApplicationMode application; + + // NOTE: This member must always be set. + // TODO(kwiberg): Turn it into just an int. + absl::optional bitrate_bps; + + bool fec_enabled; + bool cbr_enabled; + int max_playback_rate_hz; + + // |complexity| is used when the bitrate goes above + // |complexity_threshold_bps| + |complexity_threshold_window_bps|; + // |low_rate_complexity| is used when the bitrate falls below + // |complexity_threshold_bps| - |complexity_threshold_window_bps|. In the + // interval in the middle, we keep using the most recent of the two + // complexity settings. + int complexity; + int low_rate_complexity; + int complexity_threshold_bps; + int complexity_threshold_window_bps; + + bool dtx_enabled; + std::vector supported_frame_lengths_ms; + int uplink_bandwidth_update_interval_ms; + + // NOTE: This member isn't necessary, and will soon go away. See + // https://bugs.chromium.org/p/webrtc/issues/detail?id=7847 + int payload_type; +}; + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_OPUS_CONFIG_H_ diff --git a/api/audio_codecs/opus_audio_decoder_factory.cc b/api/audio_codecs/opus_audio_decoder_factory.cc new file mode 100644 index 0000000..ed68f25 --- /dev/null +++ b/api/audio_codecs/opus_audio_decoder_factory.cc @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/opus_audio_decoder_factory.h" + +#include +#include + +#include "api/audio_codecs/audio_decoder_factory_template.h" +#include "api/audio_codecs/opus/audio_decoder_multi_channel_opus.h" +#include "api/audio_codecs/opus/audio_decoder_opus.h" + +namespace webrtc { + +namespace { + +// Modify an audio decoder to not advertise support for anything. +template +struct NotAdvertised { + using Config = typename T::Config; + static absl::optional SdpToConfig( + const SdpAudioFormat& audio_format) { + return T::SdpToConfig(audio_format); + } + static void AppendSupportedDecoders(std::vector* specs) { + // Don't advertise support for anything. + } + static std::unique_ptr MakeAudioDecoder( + const Config& config, + absl::optional codec_pair_id = absl::nullopt) { + return T::MakeAudioDecoder(config, codec_pair_id); + } +}; + +} // namespace + +rtc::scoped_refptr CreateOpusAudioDecoderFactory() { + return CreateAudioDecoderFactory< + AudioDecoderOpus, NotAdvertised>(); +} + +} // namespace webrtc diff --git a/api/audio_codecs/opus_audio_decoder_factory.h b/api/audio_codecs/opus_audio_decoder_factory.h new file mode 100644 index 0000000..b4f497f --- /dev/null +++ b/api/audio_codecs/opus_audio_decoder_factory.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_DECODER_FACTORY_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_DECODER_FACTORY_H_ + +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/scoped_refptr.h" + +namespace webrtc { + +// Creates a new factory that can create only Opus audio decoders. Works like +// CreateAudioDecoderFactory(), but is easier to use and is +// not inline because it isn't a template. +rtc::scoped_refptr CreateOpusAudioDecoderFactory(); + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_OPUS_AUDIO_DECODER_FACTORY_H_ diff --git a/api/audio_codecs/opus_audio_encoder_factory.cc b/api/audio_codecs/opus_audio_encoder_factory.cc new file mode 100644 index 0000000..5f0c714 --- /dev/null +++ b/api/audio_codecs/opus_audio_encoder_factory.cc @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/opus_audio_encoder_factory.h" + +#include +#include + +#include "api/audio_codecs/audio_encoder_factory_template.h" +#include "api/audio_codecs/opus/audio_encoder_multi_channel_opus.h" +#include "api/audio_codecs/opus/audio_encoder_opus.h" + +namespace webrtc { +namespace { + +// Modify an audio encoder to not advertise support for anything. +template +struct NotAdvertised { + using Config = typename T::Config; + static absl::optional SdpToConfig( + const SdpAudioFormat& audio_format) { + return T::SdpToConfig(audio_format); + } + static void AppendSupportedEncoders(std::vector* specs) { + // Don't advertise support for anything. + } + static AudioCodecInfo QueryAudioEncoder(const Config& config) { + return T::QueryAudioEncoder(config); + } + static std::unique_ptr MakeAudioEncoder( + const Config& config, + int payload_type, + absl::optional codec_pair_id = absl::nullopt) { + return T::MakeAudioEncoder(config, payload_type, codec_pair_id); + } +}; + +} // namespace + +rtc::scoped_refptr CreateOpusAudioEncoderFactory() { + return CreateAudioEncoderFactory< + AudioEncoderOpus, NotAdvertised>(); +} + +} // namespace webrtc diff --git a/api/audio_codecs/opus_audio_encoder_factory.h b/api/audio_codecs/opus_audio_encoder_factory.h new file mode 100644 index 0000000..8c1683b --- /dev/null +++ b/api/audio_codecs/opus_audio_encoder_factory.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_FACTORY_H_ +#define API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_FACTORY_H_ + +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/scoped_refptr.h" + +namespace webrtc { + +// Creates a new factory that can create only Opus audio encoders. Works like +// CreateAudioEncoderFactory(), but is easier to use and is +// not inline because it isn't a template. +rtc::scoped_refptr CreateOpusAudioEncoderFactory(); + +} // namespace webrtc + +#endif // API_AUDIO_CODECS_OPUS_AUDIO_ENCODER_FACTORY_H_ diff --git a/api/audio_codecs/test/BUILD.gn b/api/audio_codecs/test/BUILD.gn new file mode 100644 index 0000000..575f062 --- /dev/null +++ b/api/audio_codecs/test/BUILD.gn @@ -0,0 +1,43 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +if (rtc_include_tests) { + rtc_library("audio_codecs_api_unittests") { + testonly = true + sources = [ + "audio_decoder_factory_template_unittest.cc", + "audio_encoder_factory_template_unittest.cc", + ] + deps = [ + "..:audio_codecs_api", + "../../../rtc_base:rtc_base_approved", + "../../../test:audio_codec_mocks", + "../../../test:test_support", + "../L16:audio_decoder_L16", + "../L16:audio_encoder_L16", + "../g711:audio_decoder_g711", + "../g711:audio_encoder_g711", + "../g722:audio_decoder_g722", + "../g722:audio_encoder_g722", + "../ilbc:audio_decoder_ilbc", + "../ilbc:audio_encoder_ilbc", + "../isac:audio_decoder_isac_fix", + "../isac:audio_decoder_isac_float", + "../isac:audio_encoder_isac_fix", + "../isac:audio_encoder_isac_float", + "../opus:audio_decoder_opus", + "../opus:audio_encoder_opus", + ] + } +} diff --git a/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc b/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc new file mode 100644 index 0000000..0e2e8c2 --- /dev/null +++ b/api/audio_codecs/test/audio_decoder_factory_template_unittest.cc @@ -0,0 +1,256 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/audio_decoder_factory_template.h" + +#include + +#include "api/audio_codecs/L16/audio_decoder_L16.h" +#include "api/audio_codecs/g711/audio_decoder_g711.h" +#include "api/audio_codecs/g722/audio_decoder_g722.h" +#include "api/audio_codecs/ilbc/audio_decoder_ilbc.h" +#include "api/audio_codecs/isac/audio_decoder_isac_fix.h" +#include "api/audio_codecs/isac/audio_decoder_isac_float.h" +#include "api/audio_codecs/opus/audio_decoder_opus.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/mock_audio_decoder.h" + +namespace webrtc { + +namespace { + +struct BogusParams { + static SdpAudioFormat AudioFormat() { return {"bogus", 8000, 1}; } + static AudioCodecInfo CodecInfo() { return {8000, 1, 12345}; } +}; + +struct ShamParams { + static SdpAudioFormat AudioFormat() { + return {"sham", 16000, 2, {{"param", "value"}}}; + } + static AudioCodecInfo CodecInfo() { return {16000, 2, 23456}; } +}; + +template +struct AudioDecoderFakeApi { + struct Config { + SdpAudioFormat audio_format; + }; + + static absl::optional SdpToConfig( + const SdpAudioFormat& audio_format) { + if (Params::AudioFormat() == audio_format) { + Config config = {audio_format}; + return config; + } else { + return absl::nullopt; + } + } + + static void AppendSupportedDecoders(std::vector* specs) { + specs->push_back({Params::AudioFormat(), Params::CodecInfo()}); + } + + static AudioCodecInfo QueryAudioDecoder(const Config&) { + return Params::CodecInfo(); + } + + static std::unique_ptr MakeAudioDecoder( + const Config&, + absl::optional /*codec_pair_id*/ = absl::nullopt) { + auto dec = std::make_unique>(); + EXPECT_CALL(*dec, SampleRateHz()) + .WillOnce(::testing::Return(Params::CodecInfo().sample_rate_hz)); + EXPECT_CALL(*dec, Die()); + return std::move(dec); + } +}; + +} // namespace + +TEST(AudioDecoderFactoryTemplateTest, NoDecoderTypes) { + rtc::scoped_refptr factory( + new rtc::RefCountedObject< + audio_decoder_factory_template_impl::AudioDecoderFactoryT<>>()); + EXPECT_THAT(factory->GetSupportedDecoders(), ::testing::IsEmpty()); + EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioDecoder({"bar", 16000, 1}, absl::nullopt)); +} + +TEST(AudioDecoderFactoryTemplateTest, OneDecoderType) { + auto factory = CreateAudioDecoderFactory>(); + EXPECT_THAT(factory->GetSupportedDecoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"bogus", 8000, 1}, {8000, 1, 12345}})); + EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1})); + EXPECT_TRUE(factory->IsSupportedDecoder({"bogus", 8000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioDecoder({"bar", 16000, 1}, absl::nullopt)); + auto dec = factory->MakeAudioDecoder({"bogus", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, dec); + EXPECT_EQ(8000, dec->SampleRateHz()); +} + +TEST(AudioDecoderFactoryTemplateTest, TwoDecoderTypes) { + auto factory = CreateAudioDecoderFactory, + AudioDecoderFakeApi>(); + EXPECT_THAT(factory->GetSupportedDecoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"bogus", 8000, 1}, {8000, 1, 12345}}, + AudioCodecSpec{{"sham", 16000, 2, {{"param", "value"}}}, + {16000, 2, 23456}})); + EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1})); + EXPECT_TRUE(factory->IsSupportedDecoder({"bogus", 8000, 1})); + EXPECT_TRUE( + factory->IsSupportedDecoder({"sham", 16000, 2, {{"param", "value"}}})); + EXPECT_EQ(nullptr, + factory->MakeAudioDecoder({"bar", 16000, 1}, absl::nullopt)); + auto dec1 = factory->MakeAudioDecoder({"bogus", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, dec1); + EXPECT_EQ(8000, dec1->SampleRateHz()); + EXPECT_EQ(nullptr, + factory->MakeAudioDecoder({"sham", 16000, 2}, absl::nullopt)); + auto dec2 = factory->MakeAudioDecoder( + {"sham", 16000, 2, {{"param", "value"}}}, absl::nullopt); + ASSERT_NE(nullptr, dec2); + EXPECT_EQ(16000, dec2->SampleRateHz()); +} + +TEST(AudioDecoderFactoryTemplateTest, G711) { + auto factory = CreateAudioDecoderFactory(); + EXPECT_THAT(factory->GetSupportedDecoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"PCMU", 8000, 1}, {8000, 1, 64000}}, + AudioCodecSpec{{"PCMA", 8000, 1}, {8000, 1, 64000}})); + EXPECT_FALSE(factory->IsSupportedDecoder({"G711", 8000, 1})); + EXPECT_TRUE(factory->IsSupportedDecoder({"PCMU", 8000, 1})); + EXPECT_TRUE(factory->IsSupportedDecoder({"pcma", 8000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioDecoder({"pcmu", 16000, 1}, absl::nullopt)); + auto dec1 = factory->MakeAudioDecoder({"pcmu", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, dec1); + EXPECT_EQ(8000, dec1->SampleRateHz()); + auto dec2 = factory->MakeAudioDecoder({"PCMA", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, dec2); + EXPECT_EQ(8000, dec2->SampleRateHz()); +} + +TEST(AudioDecoderFactoryTemplateTest, G722) { + auto factory = CreateAudioDecoderFactory(); + EXPECT_THAT(factory->GetSupportedDecoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"G722", 8000, 1}, {16000, 1, 64000}})); + EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1})); + EXPECT_TRUE(factory->IsSupportedDecoder({"G722", 8000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioDecoder({"bar", 16000, 1}, absl::nullopt)); + auto dec1 = factory->MakeAudioDecoder({"G722", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, dec1); + EXPECT_EQ(16000, dec1->SampleRateHz()); + EXPECT_EQ(1u, dec1->Channels()); + auto dec2 = factory->MakeAudioDecoder({"G722", 8000, 2}, absl::nullopt); + ASSERT_NE(nullptr, dec2); + EXPECT_EQ(16000, dec2->SampleRateHz()); + EXPECT_EQ(2u, dec2->Channels()); + auto dec3 = factory->MakeAudioDecoder({"G722", 8000, 3}, absl::nullopt); + ASSERT_EQ(nullptr, dec3); +} + +TEST(AudioDecoderFactoryTemplateTest, Ilbc) { + auto factory = CreateAudioDecoderFactory(); + EXPECT_THAT(factory->GetSupportedDecoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"ILBC", 8000, 1}, {8000, 1, 13300}})); + EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1})); + EXPECT_TRUE(factory->IsSupportedDecoder({"ilbc", 8000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioDecoder({"bar", 8000, 1}, absl::nullopt)); + auto dec = factory->MakeAudioDecoder({"ilbc", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, dec); + EXPECT_EQ(8000, dec->SampleRateHz()); +} + +TEST(AudioDecoderFactoryTemplateTest, IsacFix) { + auto factory = CreateAudioDecoderFactory(); + EXPECT_THAT(factory->GetSupportedDecoders(), + ::testing::ElementsAre(AudioCodecSpec{ + {"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}})); + EXPECT_FALSE(factory->IsSupportedDecoder({"isac", 16000, 2})); + EXPECT_TRUE(factory->IsSupportedDecoder({"isac", 16000, 1})); + EXPECT_FALSE(factory->IsSupportedDecoder({"isac", 32000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioDecoder({"isac", 8000, 1}, absl::nullopt)); + auto dec = factory->MakeAudioDecoder({"isac", 16000, 1}, absl::nullopt); + ASSERT_NE(nullptr, dec); + EXPECT_EQ(16000, dec->SampleRateHz()); +} + +TEST(AudioDecoderFactoryTemplateTest, IsacFloat) { + auto factory = CreateAudioDecoderFactory(); + EXPECT_THAT( + factory->GetSupportedDecoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}, + AudioCodecSpec{{"ISAC", 32000, 1}, {32000, 1, 56000, 10000, 56000}})); + EXPECT_FALSE(factory->IsSupportedDecoder({"isac", 16000, 2})); + EXPECT_TRUE(factory->IsSupportedDecoder({"isac", 16000, 1})); + EXPECT_TRUE(factory->IsSupportedDecoder({"isac", 32000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioDecoder({"isac", 8000, 1}, absl::nullopt)); + auto dec1 = factory->MakeAudioDecoder({"isac", 16000, 1}, absl::nullopt); + ASSERT_NE(nullptr, dec1); + EXPECT_EQ(16000, dec1->SampleRateHz()); + auto dec2 = factory->MakeAudioDecoder({"isac", 32000, 1}, absl::nullopt); + ASSERT_NE(nullptr, dec2); + EXPECT_EQ(32000, dec2->SampleRateHz()); +} + +TEST(AudioDecoderFactoryTemplateTest, L16) { + auto factory = CreateAudioDecoderFactory(); + EXPECT_THAT( + factory->GetSupportedDecoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"L16", 8000, 1}, {8000, 1, 8000 * 16}}, + AudioCodecSpec{{"L16", 16000, 1}, {16000, 1, 16000 * 16}}, + AudioCodecSpec{{"L16", 32000, 1}, {32000, 1, 32000 * 16}}, + AudioCodecSpec{{"L16", 8000, 2}, {8000, 2, 8000 * 16 * 2}}, + AudioCodecSpec{{"L16", 16000, 2}, {16000, 2, 16000 * 16 * 2}}, + AudioCodecSpec{{"L16", 32000, 2}, {32000, 2, 32000 * 16 * 2}})); + EXPECT_FALSE(factory->IsSupportedDecoder({"foo", 8000, 1})); + EXPECT_TRUE(factory->IsSupportedDecoder({"L16", 48000, 1})); + EXPECT_FALSE(factory->IsSupportedDecoder({"L16", 96000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioDecoder({"L16", 8000, 0}, absl::nullopt)); + auto dec = factory->MakeAudioDecoder({"L16", 48000, 2}, absl::nullopt); + ASSERT_NE(nullptr, dec); + EXPECT_EQ(48000, dec->SampleRateHz()); +} + +TEST(AudioDecoderFactoryTemplateTest, Opus) { + auto factory = CreateAudioDecoderFactory(); + AudioCodecInfo opus_info{48000, 1, 64000, 6000, 510000}; + opus_info.allow_comfort_noise = false; + opus_info.supports_network_adaption = true; + const SdpAudioFormat opus_format( + {"opus", 48000, 2, {{"minptime", "10"}, {"useinbandfec", "1"}}}); + EXPECT_THAT(factory->GetSupportedDecoders(), + ::testing::ElementsAre(AudioCodecSpec{opus_format, opus_info})); + EXPECT_FALSE(factory->IsSupportedDecoder({"opus", 48000, 1})); + EXPECT_TRUE(factory->IsSupportedDecoder({"opus", 48000, 2})); + EXPECT_EQ(nullptr, + factory->MakeAudioDecoder({"bar", 16000, 1}, absl::nullopt)); + auto dec = factory->MakeAudioDecoder({"opus", 48000, 2}, absl::nullopt); + ASSERT_NE(nullptr, dec); + EXPECT_EQ(48000, dec->SampleRateHz()); +} + +} // namespace webrtc diff --git a/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc b/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc new file mode 100644 index 0000000..95ea855 --- /dev/null +++ b/api/audio_codecs/test/audio_encoder_factory_template_unittest.cc @@ -0,0 +1,266 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_codecs/audio_encoder_factory_template.h" + +#include + +#include "api/audio_codecs/L16/audio_encoder_L16.h" +#include "api/audio_codecs/g711/audio_encoder_g711.h" +#include "api/audio_codecs/g722/audio_encoder_g722.h" +#include "api/audio_codecs/ilbc/audio_encoder_ilbc.h" +#include "api/audio_codecs/isac/audio_encoder_isac_fix.h" +#include "api/audio_codecs/isac/audio_encoder_isac_float.h" +#include "api/audio_codecs/opus/audio_encoder_opus.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/mock_audio_encoder.h" + +namespace webrtc { + +namespace { + +struct BogusParams { + static SdpAudioFormat AudioFormat() { return {"bogus", 8000, 1}; } + static AudioCodecInfo CodecInfo() { return {8000, 1, 12345}; } +}; + +struct ShamParams { + static SdpAudioFormat AudioFormat() { + return {"sham", 16000, 2, {{"param", "value"}}}; + } + static AudioCodecInfo CodecInfo() { return {16000, 2, 23456}; } +}; + +template +struct AudioEncoderFakeApi { + struct Config { + SdpAudioFormat audio_format; + }; + + static absl::optional SdpToConfig( + const SdpAudioFormat& audio_format) { + if (Params::AudioFormat() == audio_format) { + Config config = {audio_format}; + return config; + } else { + return absl::nullopt; + } + } + + static void AppendSupportedEncoders(std::vector* specs) { + specs->push_back({Params::AudioFormat(), Params::CodecInfo()}); + } + + static AudioCodecInfo QueryAudioEncoder(const Config&) { + return Params::CodecInfo(); + } + + static std::unique_ptr MakeAudioEncoder( + const Config&, + int payload_type, + absl::optional /*codec_pair_id*/ = absl::nullopt) { + auto enc = std::make_unique>(); + EXPECT_CALL(*enc, SampleRateHz()) + .WillOnce(::testing::Return(Params::CodecInfo().sample_rate_hz)); + return std::move(enc); + } +}; + +} // namespace + +TEST(AudioEncoderFactoryTemplateTest, NoEncoderTypes) { + rtc::scoped_refptr factory( + new rtc::RefCountedObject< + audio_encoder_factory_template_impl::AudioEncoderFactoryT<>>()); + EXPECT_THAT(factory->GetSupportedEncoders(), ::testing::IsEmpty()); + EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioEncoder(17, {"bar", 16000, 1}, absl::nullopt)); +} + +TEST(AudioEncoderFactoryTemplateTest, OneEncoderType) { + auto factory = CreateAudioEncoderFactory>(); + EXPECT_THAT(factory->GetSupportedEncoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"bogus", 8000, 1}, {8000, 1, 12345}})); + EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); + EXPECT_EQ(AudioCodecInfo(8000, 1, 12345), + factory->QueryAudioEncoder({"bogus", 8000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioEncoder(17, {"bar", 16000, 1}, absl::nullopt)); + auto enc = factory->MakeAudioEncoder(17, {"bogus", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, enc); + EXPECT_EQ(8000, enc->SampleRateHz()); +} + +TEST(AudioEncoderFactoryTemplateTest, TwoEncoderTypes) { + auto factory = CreateAudioEncoderFactory, + AudioEncoderFakeApi>(); + EXPECT_THAT(factory->GetSupportedEncoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"bogus", 8000, 1}, {8000, 1, 12345}}, + AudioCodecSpec{{"sham", 16000, 2, {{"param", "value"}}}, + {16000, 2, 23456}})); + EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); + EXPECT_EQ(AudioCodecInfo(8000, 1, 12345), + factory->QueryAudioEncoder({"bogus", 8000, 1})); + EXPECT_EQ( + AudioCodecInfo(16000, 2, 23456), + factory->QueryAudioEncoder({"sham", 16000, 2, {{"param", "value"}}})); + EXPECT_EQ(nullptr, + factory->MakeAudioEncoder(17, {"bar", 16000, 1}, absl::nullopt)); + auto enc1 = factory->MakeAudioEncoder(17, {"bogus", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, enc1); + EXPECT_EQ(8000, enc1->SampleRateHz()); + EXPECT_EQ(nullptr, + factory->MakeAudioEncoder(17, {"sham", 16000, 2}, absl::nullopt)); + auto enc2 = factory->MakeAudioEncoder( + 17, {"sham", 16000, 2, {{"param", "value"}}}, absl::nullopt); + ASSERT_NE(nullptr, enc2); + EXPECT_EQ(16000, enc2->SampleRateHz()); +} + +TEST(AudioEncoderFactoryTemplateTest, G711) { + auto factory = CreateAudioEncoderFactory(); + EXPECT_THAT(factory->GetSupportedEncoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"PCMU", 8000, 1}, {8000, 1, 64000}}, + AudioCodecSpec{{"PCMA", 8000, 1}, {8000, 1, 64000}})); + EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"PCMA", 16000, 1})); + EXPECT_EQ(AudioCodecInfo(8000, 1, 64000), + factory->QueryAudioEncoder({"PCMA", 8000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioEncoder(17, {"PCMU", 16000, 1}, absl::nullopt)); + auto enc1 = factory->MakeAudioEncoder(17, {"PCMU", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, enc1); + EXPECT_EQ(8000, enc1->SampleRateHz()); + auto enc2 = factory->MakeAudioEncoder(17, {"PCMA", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, enc2); + EXPECT_EQ(8000, enc2->SampleRateHz()); +} + +TEST(AudioEncoderFactoryTemplateTest, G722) { + auto factory = CreateAudioEncoderFactory(); + EXPECT_THAT(factory->GetSupportedEncoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"G722", 8000, 1}, {16000, 1, 64000}})); + EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); + EXPECT_EQ(AudioCodecInfo(16000, 1, 64000), + factory->QueryAudioEncoder({"G722", 8000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioEncoder(17, {"bar", 16000, 1}, absl::nullopt)); + auto enc = factory->MakeAudioEncoder(17, {"G722", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, enc); + EXPECT_EQ(16000, enc->SampleRateHz()); +} + +TEST(AudioEncoderFactoryTemplateTest, Ilbc) { + auto factory = CreateAudioEncoderFactory(); + EXPECT_THAT(factory->GetSupportedEncoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"ILBC", 8000, 1}, {8000, 1, 13333}})); + EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); + EXPECT_EQ(AudioCodecInfo(8000, 1, 13333), + factory->QueryAudioEncoder({"ilbc", 8000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioEncoder(17, {"bar", 8000, 1}, absl::nullopt)); + auto enc = factory->MakeAudioEncoder(17, {"ilbc", 8000, 1}, absl::nullopt); + ASSERT_NE(nullptr, enc); + EXPECT_EQ(8000, enc->SampleRateHz()); +} + +TEST(AudioEncoderFactoryTemplateTest, IsacFix) { + auto factory = CreateAudioEncoderFactory(); + EXPECT_THAT(factory->GetSupportedEncoders(), + ::testing::ElementsAre(AudioCodecSpec{ + {"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}})); + EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"isac", 16000, 2})); + EXPECT_EQ(AudioCodecInfo(16000, 1, 32000, 10000, 32000), + factory->QueryAudioEncoder({"isac", 16000, 1})); + EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"isac", 32000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioEncoder(17, {"isac", 8000, 1}, absl::nullopt)); + auto enc1 = factory->MakeAudioEncoder(17, {"isac", 16000, 1}, absl::nullopt); + ASSERT_NE(nullptr, enc1); + EXPECT_EQ(16000, enc1->SampleRateHz()); + EXPECT_EQ(3u, enc1->Num10MsFramesInNextPacket()); + auto enc2 = factory->MakeAudioEncoder( + 17, {"isac", 16000, 1, {{"ptime", "60"}}}, absl::nullopt); + ASSERT_NE(nullptr, enc2); + EXPECT_EQ(6u, enc2->Num10MsFramesInNextPacket()); +} + +TEST(AudioEncoderFactoryTemplateTest, IsacFloat) { + auto factory = CreateAudioEncoderFactory(); + EXPECT_THAT( + factory->GetSupportedEncoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"ISAC", 16000, 1}, {16000, 1, 32000, 10000, 32000}}, + AudioCodecSpec{{"ISAC", 32000, 1}, {32000, 1, 56000, 10000, 56000}})); + EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"isac", 16000, 2})); + EXPECT_EQ(AudioCodecInfo(16000, 1, 32000, 10000, 32000), + factory->QueryAudioEncoder({"isac", 16000, 1})); + EXPECT_EQ(AudioCodecInfo(32000, 1, 56000, 10000, 56000), + factory->QueryAudioEncoder({"isac", 32000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioEncoder(17, {"isac", 8000, 1}, absl::nullopt)); + auto enc1 = factory->MakeAudioEncoder(17, {"isac", 16000, 1}, absl::nullopt); + ASSERT_NE(nullptr, enc1); + EXPECT_EQ(16000, enc1->SampleRateHz()); + auto enc2 = factory->MakeAudioEncoder(17, {"isac", 32000, 1}, absl::nullopt); + ASSERT_NE(nullptr, enc2); + EXPECT_EQ(32000, enc2->SampleRateHz()); +} + +TEST(AudioEncoderFactoryTemplateTest, L16) { + auto factory = CreateAudioEncoderFactory(); + EXPECT_THAT( + factory->GetSupportedEncoders(), + ::testing::ElementsAre( + AudioCodecSpec{{"L16", 8000, 1}, {8000, 1, 8000 * 16}}, + AudioCodecSpec{{"L16", 16000, 1}, {16000, 1, 16000 * 16}}, + AudioCodecSpec{{"L16", 32000, 1}, {32000, 1, 32000 * 16}}, + AudioCodecSpec{{"L16", 8000, 2}, {8000, 2, 8000 * 16 * 2}}, + AudioCodecSpec{{"L16", 16000, 2}, {16000, 2, 16000 * 16 * 2}}, + AudioCodecSpec{{"L16", 32000, 2}, {32000, 2, 32000 * 16 * 2}})); + EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"L16", 8000, 0})); + EXPECT_EQ(AudioCodecInfo(48000, 1, 48000 * 16), + factory->QueryAudioEncoder({"L16", 48000, 1})); + EXPECT_EQ(nullptr, + factory->MakeAudioEncoder(17, {"L16", 8000, 0}, absl::nullopt)); + auto enc = factory->MakeAudioEncoder(17, {"L16", 48000, 2}, absl::nullopt); + ASSERT_NE(nullptr, enc); + EXPECT_EQ(48000, enc->SampleRateHz()); +} + +TEST(AudioEncoderFactoryTemplateTest, Opus) { + auto factory = CreateAudioEncoderFactory(); + AudioCodecInfo info = {48000, 1, 32000, 6000, 510000}; + info.allow_comfort_noise = false; + info.supports_network_adaption = true; + EXPECT_THAT( + factory->GetSupportedEncoders(), + ::testing::ElementsAre(AudioCodecSpec{ + {"opus", 48000, 2, {{"minptime", "10"}, {"useinbandfec", "1"}}}, + info})); + EXPECT_EQ(absl::nullopt, factory->QueryAudioEncoder({"foo", 8000, 1})); + EXPECT_EQ( + info, + factory->QueryAudioEncoder( + {"opus", 48000, 2, {{"minptime", "10"}, {"useinbandfec", "1"}}})); + EXPECT_EQ(nullptr, + factory->MakeAudioEncoder(17, {"bar", 16000, 1}, absl::nullopt)); + auto enc = factory->MakeAudioEncoder(17, {"opus", 48000, 2}, absl::nullopt); + ASSERT_NE(nullptr, enc); + EXPECT_EQ(48000, enc->SampleRateHz()); +} + +} // namespace webrtc diff --git a/api/audio_options.cc b/api/audio_options.cc new file mode 100644 index 0000000..6832bbe --- /dev/null +++ b/api/audio_options.cc @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/audio_options.h" + +#include "api/array_view.h" +#include "rtc_base/strings/string_builder.h" + +namespace cricket { +namespace { + +template +void ToStringIfSet(rtc::SimpleStringBuilder* result, + const char* key, + const absl::optional& val) { + if (val) { + (*result) << key << ": " << *val << ", "; + } +} + +template +void SetFrom(absl::optional* s, const absl::optional& o) { + if (o) { + *s = o; + } +} + +} // namespace + +AudioOptions::AudioOptions() = default; +AudioOptions::~AudioOptions() = default; + +void AudioOptions::SetAll(const AudioOptions& change) { + SetFrom(&echo_cancellation, change.echo_cancellation); +#if defined(WEBRTC_IOS) + SetFrom(&ios_force_software_aec_HACK, change.ios_force_software_aec_HACK); +#endif + SetFrom(&auto_gain_control, change.auto_gain_control); + SetFrom(&noise_suppression, change.noise_suppression); + SetFrom(&highpass_filter, change.highpass_filter); + SetFrom(&stereo_swapping, change.stereo_swapping); + SetFrom(&audio_jitter_buffer_max_packets, + change.audio_jitter_buffer_max_packets); + SetFrom(&audio_jitter_buffer_fast_accelerate, + change.audio_jitter_buffer_fast_accelerate); + SetFrom(&audio_jitter_buffer_min_delay_ms, + change.audio_jitter_buffer_min_delay_ms); + SetFrom(&audio_jitter_buffer_enable_rtx_handling, + change.audio_jitter_buffer_enable_rtx_handling); + SetFrom(&typing_detection, change.typing_detection); + SetFrom(&experimental_agc, change.experimental_agc); + SetFrom(&experimental_ns, change.experimental_ns); + SetFrom(&residual_echo_detector, change.residual_echo_detector); + SetFrom(&tx_agc_target_dbov, change.tx_agc_target_dbov); + SetFrom(&tx_agc_digital_compression_gain, + change.tx_agc_digital_compression_gain); + SetFrom(&tx_agc_limiter, change.tx_agc_limiter); + SetFrom(&combined_audio_video_bwe, change.combined_audio_video_bwe); + SetFrom(&audio_network_adaptor, change.audio_network_adaptor); + SetFrom(&audio_network_adaptor_config, change.audio_network_adaptor_config); +} + +bool AudioOptions::operator==(const AudioOptions& o) const { + return echo_cancellation == o.echo_cancellation && +#if defined(WEBRTC_IOS) + ios_force_software_aec_HACK == o.ios_force_software_aec_HACK && +#endif + auto_gain_control == o.auto_gain_control && + noise_suppression == o.noise_suppression && + highpass_filter == o.highpass_filter && + stereo_swapping == o.stereo_swapping && + audio_jitter_buffer_max_packets == o.audio_jitter_buffer_max_packets && + audio_jitter_buffer_fast_accelerate == + o.audio_jitter_buffer_fast_accelerate && + audio_jitter_buffer_min_delay_ms == + o.audio_jitter_buffer_min_delay_ms && + audio_jitter_buffer_enable_rtx_handling == + o.audio_jitter_buffer_enable_rtx_handling && + typing_detection == o.typing_detection && + experimental_agc == o.experimental_agc && + experimental_ns == o.experimental_ns && + residual_echo_detector == o.residual_echo_detector && + tx_agc_target_dbov == o.tx_agc_target_dbov && + tx_agc_digital_compression_gain == o.tx_agc_digital_compression_gain && + tx_agc_limiter == o.tx_agc_limiter && + combined_audio_video_bwe == o.combined_audio_video_bwe && + audio_network_adaptor == o.audio_network_adaptor && + audio_network_adaptor_config == o.audio_network_adaptor_config; +} + +std::string AudioOptions::ToString() const { + char buffer[1024]; + rtc::SimpleStringBuilder result(buffer); + result << "AudioOptions {"; + ToStringIfSet(&result, "aec", echo_cancellation); +#if defined(WEBRTC_IOS) + ToStringIfSet(&result, "ios_force_software_aec_HACK", + ios_force_software_aec_HACK); +#endif + ToStringIfSet(&result, "agc", auto_gain_control); + ToStringIfSet(&result, "ns", noise_suppression); + ToStringIfSet(&result, "hf", highpass_filter); + ToStringIfSet(&result, "swap", stereo_swapping); + ToStringIfSet(&result, "audio_jitter_buffer_max_packets", + audio_jitter_buffer_max_packets); + ToStringIfSet(&result, "audio_jitter_buffer_fast_accelerate", + audio_jitter_buffer_fast_accelerate); + ToStringIfSet(&result, "audio_jitter_buffer_min_delay_ms", + audio_jitter_buffer_min_delay_ms); + ToStringIfSet(&result, "audio_jitter_buffer_enable_rtx_handling", + audio_jitter_buffer_enable_rtx_handling); + ToStringIfSet(&result, "typing", typing_detection); + ToStringIfSet(&result, "experimental_agc", experimental_agc); + ToStringIfSet(&result, "experimental_ns", experimental_ns); + ToStringIfSet(&result, "residual_echo_detector", residual_echo_detector); + ToStringIfSet(&result, "tx_agc_target_dbov", tx_agc_target_dbov); + ToStringIfSet(&result, "tx_agc_digital_compression_gain", + tx_agc_digital_compression_gain); + ToStringIfSet(&result, "tx_agc_limiter", tx_agc_limiter); + ToStringIfSet(&result, "combined_audio_video_bwe", combined_audio_video_bwe); + ToStringIfSet(&result, "audio_network_adaptor", audio_network_adaptor); + result << "}"; + return result.str(); +} + +} // namespace cricket diff --git a/api/audio_options.h b/api/audio_options.h new file mode 100644 index 0000000..1b0d1ad --- /dev/null +++ b/api/audio_options.h @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_AUDIO_OPTIONS_H_ +#define API_AUDIO_OPTIONS_H_ + +#include + +#include + +#include "absl/types/optional.h" +#include "rtc_base/system/rtc_export.h" + +namespace cricket { + +// Options that can be applied to a VoiceMediaChannel or a VoiceMediaEngine. +// Used to be flags, but that makes it hard to selectively apply options. +// We are moving all of the setting of options to structs like this, +// but some things currently still use flags. +struct RTC_EXPORT AudioOptions { + AudioOptions(); + ~AudioOptions(); + void SetAll(const AudioOptions& change); + + bool operator==(const AudioOptions& o) const; + bool operator!=(const AudioOptions& o) const { return !(*this == o); } + + std::string ToString() const; + + // Audio processing that attempts to filter away the output signal from + // later inbound pickup. + absl::optional echo_cancellation; +#if defined(WEBRTC_IOS) + // Forces software echo cancellation on iOS. This is a temporary workaround + // (until Apple fixes the bug) for a device with non-functioning AEC. May + // improve performance on that particular device, but will cause unpredictable + // behavior in all other cases. See http://bugs.webrtc.org/8682. + absl::optional ios_force_software_aec_HACK; +#endif + // Audio processing to adjust the sensitivity of the local mic dynamically. + absl::optional auto_gain_control; + // Audio processing to filter out background noise. + absl::optional noise_suppression; + // Audio processing to remove background noise of lower frequencies. + absl::optional highpass_filter; + // Audio processing to swap the left and right channels. + absl::optional stereo_swapping; + // Audio receiver jitter buffer (NetEq) max capacity in number of packets. + absl::optional audio_jitter_buffer_max_packets; + // Audio receiver jitter buffer (NetEq) fast accelerate mode. + absl::optional audio_jitter_buffer_fast_accelerate; + // Audio receiver jitter buffer (NetEq) minimum target delay in milliseconds. + absl::optional audio_jitter_buffer_min_delay_ms; + // Audio receiver jitter buffer (NetEq) should handle retransmitted packets. + absl::optional audio_jitter_buffer_enable_rtx_handling; + // Audio processing to detect typing. + absl::optional typing_detection; + absl::optional experimental_agc; + absl::optional experimental_ns; + // Note that tx_agc_* only applies to non-experimental AGC. + absl::optional residual_echo_detector; + absl::optional tx_agc_target_dbov; + absl::optional tx_agc_digital_compression_gain; + absl::optional tx_agc_limiter; + // Enable combined audio+bandwidth BWE. + // TODO(pthatcher): This flag is set from the + // "googCombinedAudioVideoBwe", but not used anywhere. So delete it, + // and check if any other AudioOptions members are unused. + absl::optional combined_audio_video_bwe; + // Enable audio network adaptor. + // TODO(webrtc:11717): Remove this API in favor of adaptivePtime in + // RtpEncodingParameters. + absl::optional audio_network_adaptor; + // Config string for audio network adaptor. + absl::optional audio_network_adaptor_config; +}; + +} // namespace cricket + +#endif // API_AUDIO_OPTIONS_H_ diff --git a/api/call/audio_sink.h b/api/call/audio_sink.h new file mode 100644 index 0000000..fa4c3f6 --- /dev/null +++ b/api/call/audio_sink.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CALL_AUDIO_SINK_H_ +#define API_CALL_AUDIO_SINK_H_ + +#if defined(WEBRTC_POSIX) && !defined(__STDC_FORMAT_MACROS) +// Avoid conflict with format_macros.h. +#define __STDC_FORMAT_MACROS +#endif + +#include +#include + +namespace webrtc { + +// Represents a simple push audio sink. +class AudioSinkInterface { + public: + virtual ~AudioSinkInterface() {} + + struct Data { + Data(const int16_t* data, + size_t samples_per_channel, + int sample_rate, + size_t channels, + uint32_t timestamp) + : data(data), + samples_per_channel(samples_per_channel), + sample_rate(sample_rate), + channels(channels), + timestamp(timestamp) {} + + const int16_t* data; // The actual 16bit audio data. + size_t samples_per_channel; // Number of frames in the buffer. + int sample_rate; // Sample rate in Hz. + size_t channels; // Number of channels in the audio data. + uint32_t timestamp; // The RTP timestamp of the first sample. + }; + + virtual void OnData(const Data& audio) = 0; +}; + +} // namespace webrtc + +#endif // API_CALL_AUDIO_SINK_H_ diff --git a/api/call/bitrate_allocation.h b/api/call/bitrate_allocation.h new file mode 100644 index 0000000..13c7f74 --- /dev/null +++ b/api/call/bitrate_allocation.h @@ -0,0 +1,45 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_CALL_BITRATE_ALLOCATION_H_ +#define API_CALL_BITRATE_ALLOCATION_H_ + +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" + +namespace webrtc { + +// BitrateAllocationUpdate provides information to allocated streams about their +// bitrate allocation. It originates from the BitrateAllocater class and is +// propagated from there. +struct BitrateAllocationUpdate { + // The allocated target bitrate. Media streams should produce this amount of + // data. (Note that this may include packet overhead depending on + // configuration.) + DataRate target_bitrate = DataRate::Zero(); + // The allocated part of the estimated link capacity. This is more stable than + // the target as it is based on the underlying link capacity estimate. This + // should be used to change encoder configuration when the cost of change is + // high. + DataRate stable_target_bitrate = DataRate::Zero(); + // Predicted packet loss ratio. + double packet_loss_ratio = 0; + // Predicted round trip time. + TimeDelta round_trip_time = TimeDelta::PlusInfinity(); + // |bwe_period| is deprecated, use |stable_target_bitrate| allocation instead. + TimeDelta bwe_period = TimeDelta::PlusInfinity(); + // Congestion window pushback bitrate reduction fraction. Used in + // VideoStreamEncoder to reduce the bitrate by the given fraction + // by dropping frames. + double cwnd_reduce_ratio = 0; +}; + +} // namespace webrtc + +#endif // API_CALL_BITRATE_ALLOCATION_H_ diff --git a/api/call/call_factory_interface.h b/api/call/call_factory_interface.h new file mode 100644 index 0000000..6051409 --- /dev/null +++ b/api/call/call_factory_interface.h @@ -0,0 +1,38 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CALL_CALL_FACTORY_INTERFACE_H_ +#define API_CALL_CALL_FACTORY_INTERFACE_H_ + +#include + +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// These classes are not part of the API, and are treated as opaque pointers. +class Call; +struct CallConfig; + +// This interface exists to allow webrtc to be optionally built without media +// support (i.e., if only being used for data channels). PeerConnectionFactory +// is constructed with a CallFactoryInterface, which may or may not be null. +class CallFactoryInterface { + public: + virtual ~CallFactoryInterface() {} + + virtual Call* CreateCall(const CallConfig& config) = 0; +}; + +RTC_EXPORT std::unique_ptr CreateCallFactory(); + +} // namespace webrtc + +#endif // API_CALL_CALL_FACTORY_INTERFACE_H_ diff --git a/api/call/transport.cc b/api/call/transport.cc new file mode 100644 index 0000000..bcadc76 --- /dev/null +++ b/api/call/transport.cc @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/call/transport.h" + +#include + +namespace webrtc { + +PacketOptions::PacketOptions() = default; + +PacketOptions::PacketOptions(const PacketOptions&) = default; + +PacketOptions::~PacketOptions() = default; + +} // namespace webrtc diff --git a/api/call/transport.h b/api/call/transport.h new file mode 100644 index 0000000..2a2a87a --- /dev/null +++ b/api/call/transport.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CALL_TRANSPORT_H_ +#define API_CALL_TRANSPORT_H_ + +#include +#include + +#include + +namespace webrtc { + +// TODO(holmer): Look into unifying this with the PacketOptions in +// asyncpacketsocket.h. +struct PacketOptions { + PacketOptions(); + PacketOptions(const PacketOptions&); + ~PacketOptions(); + + // A 16 bits positive id. Negative ids are invalid and should be interpreted + // as packet_id not being set. + int packet_id = -1; + // Additional data bound to the RTP packet for use in application code, + // outside of WebRTC. + std::vector application_data; + // Whether this is a retransmission of an earlier packet. + bool is_retransmit = false; + bool included_in_feedback = false; + bool included_in_allocation = false; +}; + +class Transport { + public: + virtual bool SendRtp(const uint8_t* packet, + size_t length, + const PacketOptions& options) = 0; + virtual bool SendRtcp(const uint8_t* packet, size_t length) = 0; + + protected: + virtual ~Transport() {} +}; + +} // namespace webrtc + +#endif // API_CALL_TRANSPORT_H_ diff --git a/api/candidate.cc b/api/candidate.cc new file mode 100644 index 0000000..c857f89 --- /dev/null +++ b/api/candidate.cc @@ -0,0 +1,143 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/candidate.h" + +#include "rtc_base/helpers.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/strings/string_builder.h" + +namespace cricket { + +Candidate::Candidate() + : id_(rtc::CreateRandomString(8)), + component_(0), + priority_(0), + network_type_(rtc::ADAPTER_TYPE_UNKNOWN), + generation_(0), + network_id_(0), + network_cost_(0) {} + +Candidate::Candidate(int component, + const std::string& protocol, + const rtc::SocketAddress& address, + uint32_t priority, + const std::string& username, + const std::string& password, + const std::string& type, + uint32_t generation, + const std::string& foundation, + uint16_t network_id, + uint16_t network_cost) + : id_(rtc::CreateRandomString(8)), + component_(component), + protocol_(protocol), + address_(address), + priority_(priority), + username_(username), + password_(password), + type_(type), + network_type_(rtc::ADAPTER_TYPE_UNKNOWN), + generation_(generation), + foundation_(foundation), + network_id_(network_id), + network_cost_(network_cost) {} + +Candidate::Candidate(const Candidate&) = default; + +Candidate::~Candidate() = default; + +bool Candidate::IsEquivalent(const Candidate& c) const { + // We ignore the network name, since that is just debug information, and + // the priority and the network cost, since they should be the same if the + // rest are. + return (component_ == c.component_) && (protocol_ == c.protocol_) && + (address_ == c.address_) && (username_ == c.username_) && + (password_ == c.password_) && (type_ == c.type_) && + (generation_ == c.generation_) && (foundation_ == c.foundation_) && + (related_address_ == c.related_address_) && + (network_id_ == c.network_id_); +} + +bool Candidate::MatchesForRemoval(const Candidate& c) const { + return component_ == c.component_ && protocol_ == c.protocol_ && + address_ == c.address_; +} + +std::string Candidate::ToStringInternal(bool sensitive) const { + rtc::StringBuilder ost; + std::string address = + sensitive ? address_.ToSensitiveString() : address_.ToString(); + ost << "Cand[" << transport_name_ << ":" << foundation_ << ":" << component_ + << ":" << protocol_ << ":" << priority_ << ":" << address << ":" << type_ + << ":" << related_address_.ToString() << ":" << username_ << ":" + << password_ << ":" << network_id_ << ":" << network_cost_ << ":" + << generation_ << "]"; + return ost.Release(); +} + +uint32_t Candidate::GetPriority(uint32_t type_preference, + int network_adapter_preference, + int relay_preference) const { + // RFC 5245 - 4.1.2.1. + // priority = (2^24)*(type preference) + + // (2^8)*(local preference) + + // (2^0)*(256 - component ID) + + // |local_preference| length is 2 bytes, 0-65535 inclusive. + // In our implemenation we will partion local_preference into + // 0 1 + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // | NIC Pref | Addr Pref | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // NIC Type - Type of the network adapter e.g. 3G/Wifi/Wired. + // Addr Pref - Address preference value as per RFC 3484. + // local preference = (NIC Type << 8 | Addr_Pref) - relay preference. + + int addr_pref = IPAddressPrecedence(address_.ipaddr()); + int local_preference = + ((network_adapter_preference << 8) | addr_pref) + relay_preference; + + return (type_preference << 24) | (local_preference << 8) | (256 - component_); +} + +bool Candidate::operator==(const Candidate& o) const { + return id_ == o.id_ && component_ == o.component_ && + protocol_ == o.protocol_ && relay_protocol_ == o.relay_protocol_ && + address_ == o.address_ && priority_ == o.priority_ && + username_ == o.username_ && password_ == o.password_ && + type_ == o.type_ && network_name_ == o.network_name_ && + network_type_ == o.network_type_ && generation_ == o.generation_ && + foundation_ == o.foundation_ && + related_address_ == o.related_address_ && tcptype_ == o.tcptype_ && + transport_name_ == o.transport_name_ && network_id_ == o.network_id_; +} + +bool Candidate::operator!=(const Candidate& o) const { + return !(*this == o); +} + +Candidate Candidate::ToSanitizedCopy(bool use_hostname_address, + bool filter_related_address) const { + Candidate copy(*this); + if (use_hostname_address) { + rtc::SocketAddress hostname_only_addr(address().hostname(), + address().port()); + copy.set_address(hostname_only_addr); + } + if (filter_related_address) { + copy.set_related_address( + rtc::EmptySocketAddressWithFamily(copy.address().family())); + } + return copy; +} + +} // namespace cricket diff --git a/api/candidate.h b/api/candidate.h new file mode 100644 index 0000000..7452055 --- /dev/null +++ b/api/candidate.h @@ -0,0 +1,205 @@ +/* + * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CANDIDATE_H_ +#define API_CANDIDATE_H_ + +#include +#include + +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/system/rtc_export.h" + +namespace cricket { + +// Candidate for ICE based connection discovery. +// TODO(phoglund): remove things in here that are not needed in the public API. + +class RTC_EXPORT Candidate { + public: + Candidate(); + // TODO(pthatcher): Match the ordering and param list as per RFC 5245 + // candidate-attribute syntax. http://tools.ietf.org/html/rfc5245#section-15.1 + Candidate(int component, + const std::string& protocol, + const rtc::SocketAddress& address, + uint32_t priority, + const std::string& username, + const std::string& password, + const std::string& type, + uint32_t generation, + const std::string& foundation, + uint16_t network_id = 0, + uint16_t network_cost = 0); + Candidate(const Candidate&); + ~Candidate(); + + const std::string& id() const { return id_; } + void set_id(const std::string& id) { id_ = id; } + + int component() const { return component_; } + void set_component(int component) { component_ = component; } + + const std::string& protocol() const { return protocol_; } + void set_protocol(const std::string& protocol) { protocol_ = protocol; } + + // The protocol used to talk to relay. + const std::string& relay_protocol() const { return relay_protocol_; } + void set_relay_protocol(const std::string& protocol) { + relay_protocol_ = protocol; + } + + const rtc::SocketAddress& address() const { return address_; } + void set_address(const rtc::SocketAddress& address) { address_ = address; } + + uint32_t priority() const { return priority_; } + void set_priority(const uint32_t priority) { priority_ = priority; } + + // TODO(pthatcher): Remove once Chromium's jingle/glue/utils.cc + // doesn't use it. + // Maps old preference (which was 0.0-1.0) to match priority (which + // is 0-2^32-1) to to match RFC 5245, section 4.1.2.1. Also see + // https://docs.google.com/a/google.com/document/d/ + // 1iNQDiwDKMh0NQOrCqbj3DKKRT0Dn5_5UJYhmZO-t7Uc/edit + float preference() const { + // The preference value is clamped to two decimal precision. + return static_cast(((priority_ >> 24) * 100 / 127) / 100.0); + } + + // TODO(pthatcher): Remove once Chromium's jingle/glue/utils.cc + // doesn't use it. + void set_preference(float preference) { + // Limiting priority to UINT_MAX when value exceeds uint32_t max. + // This can happen for e.g. when preference = 3. + uint64_t prio_val = static_cast(preference * 127) << 24; + priority_ = static_cast( + std::min(prio_val, static_cast(UINT_MAX))); + } + + // TODO(honghaiz): Change to usernameFragment or ufrag. + const std::string& username() const { return username_; } + void set_username(const std::string& username) { username_ = username; } + + const std::string& password() const { return password_; } + void set_password(const std::string& password) { password_ = password; } + + const std::string& type() const { return type_; } + void set_type(const std::string& type) { type_ = type; } + + const std::string& network_name() const { return network_name_; } + void set_network_name(const std::string& network_name) { + network_name_ = network_name; + } + + rtc::AdapterType network_type() const { return network_type_; } + void set_network_type(rtc::AdapterType network_type) { + network_type_ = network_type; + } + + // Candidates in a new generation replace those in the old generation. + uint32_t generation() const { return generation_; } + void set_generation(uint32_t generation) { generation_ = generation; } + + // |network_cost| measures the cost/penalty of using this candidate. A network + // cost of 0 indicates this candidate can be used freely. A value of + // rtc::kNetworkCostMax indicates it should be used only as the last resort. + void set_network_cost(uint16_t network_cost) { + RTC_DCHECK_LE(network_cost, rtc::kNetworkCostMax); + network_cost_ = network_cost; + } + uint16_t network_cost() const { return network_cost_; } + + // An ID assigned to the network hosting the candidate. + uint16_t network_id() const { return network_id_; } + void set_network_id(uint16_t network_id) { network_id_ = network_id; } + + const std::string& foundation() const { return foundation_; } + void set_foundation(const std::string& foundation) { + foundation_ = foundation; + } + + const rtc::SocketAddress& related_address() const { return related_address_; } + void set_related_address(const rtc::SocketAddress& related_address) { + related_address_ = related_address; + } + const std::string& tcptype() const { return tcptype_; } + void set_tcptype(const std::string& tcptype) { tcptype_ = tcptype; } + + // The name of the transport channel of this candidate. + // TODO(phoglund): remove. + const std::string& transport_name() const { return transport_name_; } + void set_transport_name(const std::string& transport_name) { + transport_name_ = transport_name; + } + + // The URL of the ICE server which this candidate is gathered from. + const std::string& url() const { return url_; } + void set_url(const std::string& url) { url_ = url; } + + // Determines whether this candidate is equivalent to the given one. + bool IsEquivalent(const Candidate& c) const; + + // Determines whether this candidate can be considered equivalent to the + // given one when looking for a matching candidate to remove. + bool MatchesForRemoval(const Candidate& c) const; + + std::string ToString() const { return ToStringInternal(false); } + + std::string ToSensitiveString() const { return ToStringInternal(true); } + + uint32_t GetPriority(uint32_t type_preference, + int network_adapter_preference, + int relay_preference) const; + + bool operator==(const Candidate& o) const; + bool operator!=(const Candidate& o) const; + + // Returns a sanitized copy configured by the given booleans. If + // |use_host_address| is true, the returned copy has its IP removed from + // |address()|, which leads |address()| to be a hostname address. If + // |filter_related_address|, the returned copy has its related address reset + // to the wildcard address (i.e. 0.0.0.0 for IPv4 and :: for IPv6). Note that + // setting both booleans to false returns an identical copy to the original + // candidate. + Candidate ToSanitizedCopy(bool use_hostname_address, + bool filter_related_address) const; + + private: + std::string ToStringInternal(bool sensitive) const; + + std::string id_; + int component_; + std::string protocol_; + std::string relay_protocol_; + rtc::SocketAddress address_; + uint32_t priority_; + std::string username_; + std::string password_; + std::string type_; + std::string network_name_; + rtc::AdapterType network_type_; + uint32_t generation_; + std::string foundation_; + rtc::SocketAddress related_address_; + std::string tcptype_; + std::string transport_name_; + uint16_t network_id_; + uint16_t network_cost_; + std::string url_; +}; + +} // namespace cricket + +#endif // API_CANDIDATE_H_ diff --git a/api/create_peerconnection_factory.cc b/api/create_peerconnection_factory.cc new file mode 100644 index 0000000..6223150 --- /dev/null +++ b/api/create_peerconnection_factory.cc @@ -0,0 +1,68 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/create_peerconnection_factory.h" + +#include +#include + +#include "api/call/call_factory_interface.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_event_log/rtc_event_log_factory.h" +#include "api/scoped_refptr.h" +#include "api/task_queue/default_task_queue_factory.h" +#include "media/base/media_engine.h" +#include "media/engine/webrtc_media_engine.h" +#include "modules/audio_device/include/audio_device.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +rtc::scoped_refptr CreatePeerConnectionFactory( + rtc::Thread* network_thread, + rtc::Thread* worker_thread, + rtc::Thread* signaling_thread, + rtc::scoped_refptr default_adm, + rtc::scoped_refptr audio_encoder_factory, + rtc::scoped_refptr audio_decoder_factory, + std::unique_ptr video_encoder_factory, + std::unique_ptr video_decoder_factory, + rtc::scoped_refptr audio_mixer, + rtc::scoped_refptr audio_processing) { + PeerConnectionFactoryDependencies dependencies; + dependencies.network_thread = network_thread; + dependencies.worker_thread = worker_thread; + dependencies.signaling_thread = signaling_thread; + dependencies.task_queue_factory = CreateDefaultTaskQueueFactory(); + dependencies.call_factory = CreateCallFactory(); + dependencies.event_log_factory = std::make_unique( + dependencies.task_queue_factory.get()); + + cricket::MediaEngineDependencies media_dependencies; + media_dependencies.task_queue_factory = dependencies.task_queue_factory.get(); + media_dependencies.adm = std::move(default_adm); + media_dependencies.audio_encoder_factory = std::move(audio_encoder_factory); + media_dependencies.audio_decoder_factory = std::move(audio_decoder_factory); + if (audio_processing) { + media_dependencies.audio_processing = std::move(audio_processing); + } else { + media_dependencies.audio_processing = AudioProcessingBuilder().Create(); + } + media_dependencies.audio_mixer = std::move(audio_mixer); + media_dependencies.video_encoder_factory = std::move(video_encoder_factory); + media_dependencies.video_decoder_factory = std::move(video_decoder_factory); + dependencies.media_engine = + cricket::CreateMediaEngine(std::move(media_dependencies)); + + return CreateModularPeerConnectionFactory(std::move(dependencies)); +} + +} // namespace webrtc diff --git a/api/create_peerconnection_factory.h b/api/create_peerconnection_factory.h new file mode 100644 index 0000000..ac50736 --- /dev/null +++ b/api/create_peerconnection_factory.h @@ -0,0 +1,54 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CREATE_PEERCONNECTION_FACTORY_H_ +#define API_CREATE_PEERCONNECTION_FACTORY_H_ + +#include + +#include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/peer_connection_interface.h" +#include "api/scoped_refptr.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" + +namespace rtc { +// TODO(bugs.webrtc.org/9987): Move rtc::Thread to api/ or expose a better +// type. At the moment, rtc::Thread is not part of api/ so it cannot be +// included in order to avoid to leak internal types. +class Thread; +} // namespace rtc + +namespace webrtc { + +class AudioDeviceModule; +class AudioProcessing; + +// Create a new instance of PeerConnectionFactoryInterface with optional video +// codec factories. These video factories represents all video codecs, i.e. no +// extra internal video codecs will be added. +RTC_EXPORT rtc::scoped_refptr +CreatePeerConnectionFactory( + rtc::Thread* network_thread, + rtc::Thread* worker_thread, + rtc::Thread* signaling_thread, + rtc::scoped_refptr default_adm, + rtc::scoped_refptr audio_encoder_factory, + rtc::scoped_refptr audio_decoder_factory, + std::unique_ptr video_encoder_factory, + std::unique_ptr video_decoder_factory, + rtc::scoped_refptr audio_mixer, + rtc::scoped_refptr audio_processing); + +} // namespace webrtc + +#endif // API_CREATE_PEERCONNECTION_FACTORY_H_ diff --git a/api/crypto/BUILD.gn b/api/crypto/BUILD.gn new file mode 100644 index 0000000..70626f6 --- /dev/null +++ b/api/crypto/BUILD.gn @@ -0,0 +1,49 @@ +# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +group("crypto") { + deps = [ + ":frame_decryptor_interface", + ":frame_encryptor_interface", + ":options", + ] +} + +rtc_library("options") { + visibility = [ "*" ] + sources = [ + "crypto_options.cc", + "crypto_options.h", + ] + deps = [ + "../../rtc_base:rtc_base", + "../../rtc_base/system:rtc_export", + ] +} + +rtc_source_set("frame_decryptor_interface") { + visibility = [ "*" ] + sources = [ "frame_decryptor_interface.h" ] + deps = [ + "..:array_view", + "..:rtp_parameters", + "../../rtc_base:refcount", + ] +} + +rtc_source_set("frame_encryptor_interface") { + visibility = [ "*" ] + sources = [ "frame_encryptor_interface.h" ] + deps = [ + "..:array_view", + "..:rtp_parameters", + "../../rtc_base:refcount", + ] +} diff --git a/api/crypto/crypto_options.cc b/api/crypto/crypto_options.cc new file mode 100644 index 0000000..f47e844 --- /dev/null +++ b/api/crypto/crypto_options.cc @@ -0,0 +1,89 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/crypto/crypto_options.h" + +#include "rtc_base/ssl_stream_adapter.h" + +namespace webrtc { + +CryptoOptions::CryptoOptions() {} + +CryptoOptions::CryptoOptions(const CryptoOptions& other) { + srtp = other.srtp; + sframe = other.sframe; +} + +CryptoOptions::~CryptoOptions() {} + +// static +CryptoOptions CryptoOptions::NoGcm() { + CryptoOptions options; + options.srtp.enable_gcm_crypto_suites = false; + return options; +} + +std::vector CryptoOptions::GetSupportedDtlsSrtpCryptoSuites() const { + std::vector crypto_suites; + // Note: SRTP_AES128_CM_SHA1_80 is what is required to be supported (by + // draft-ietf-rtcweb-security-arch), but SRTP_AES128_CM_SHA1_32 is allowed as + // well, and saves a few bytes per packet if it ends up selected. + // As the cipher suite is potentially insecure, it will only be used if + // enabled by both peers. + if (srtp.enable_aes128_sha1_32_crypto_cipher) { + crypto_suites.push_back(rtc::SRTP_AES128_CM_SHA1_32); + } + if (srtp.enable_aes128_sha1_80_crypto_cipher) { + crypto_suites.push_back(rtc::SRTP_AES128_CM_SHA1_80); + } + + // Note: GCM cipher suites are not the top choice since they increase the + // packet size. In order to negotiate them the other side must not support + // SRTP_AES128_CM_SHA1_80. + if (srtp.enable_gcm_crypto_suites) { + crypto_suites.push_back(rtc::SRTP_AEAD_AES_256_GCM); + crypto_suites.push_back(rtc::SRTP_AEAD_AES_128_GCM); + } + RTC_CHECK(!crypto_suites.empty()); + return crypto_suites; +} + +bool CryptoOptions::operator==(const CryptoOptions& other) const { + struct data_being_tested_for_equality { + struct Srtp { + bool enable_gcm_crypto_suites; + bool enable_aes128_sha1_32_crypto_cipher; + bool enable_aes128_sha1_80_crypto_cipher; + bool enable_encrypted_rtp_header_extensions; + } srtp; + struct SFrame { + bool require_frame_encryption; + } sframe; + }; + static_assert(sizeof(data_being_tested_for_equality) == sizeof(*this), + "Did you add something to CryptoOptions and forget to " + "update operator==?"); + + return srtp.enable_gcm_crypto_suites == other.srtp.enable_gcm_crypto_suites && + srtp.enable_aes128_sha1_32_crypto_cipher == + other.srtp.enable_aes128_sha1_32_crypto_cipher && + srtp.enable_aes128_sha1_80_crypto_cipher == + other.srtp.enable_aes128_sha1_80_crypto_cipher && + srtp.enable_encrypted_rtp_header_extensions == + other.srtp.enable_encrypted_rtp_header_extensions && + sframe.require_frame_encryption == + other.sframe.require_frame_encryption; +} + +bool CryptoOptions::operator!=(const CryptoOptions& other) const { + return !(*this == other); +} + +} // namespace webrtc diff --git a/api/crypto/crypto_options.h b/api/crypto/crypto_options.h new file mode 100644 index 0000000..5f6cea6 --- /dev/null +++ b/api/crypto/crypto_options.h @@ -0,0 +1,72 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CRYPTO_CRYPTO_OPTIONS_H_ +#define API_CRYPTO_CRYPTO_OPTIONS_H_ + +#include + +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// CryptoOptions defines advanced cryptographic settings for native WebRTC. +// These settings must be passed into PeerConnectionFactoryInterface::Options +// and are only applicable to native use cases of WebRTC. +struct RTC_EXPORT CryptoOptions { + CryptoOptions(); + CryptoOptions(const CryptoOptions& other); + ~CryptoOptions(); + + // Helper method to return an instance of the CryptoOptions with GCM crypto + // suites disabled. This method should be used instead of depending on current + // default values set by the constructor. + static CryptoOptions NoGcm(); + + // Returns a list of the supported DTLS-SRTP Crypto suites based on this set + // of crypto options. + std::vector GetSupportedDtlsSrtpCryptoSuites() const; + + bool operator==(const CryptoOptions& other) const; + bool operator!=(const CryptoOptions& other) const; + + // SRTP Related Peer Connection options. + struct Srtp { + // Enable GCM crypto suites from RFC 7714 for SRTP. GCM will only be used + // if both sides enable it. + bool enable_gcm_crypto_suites = false; + + // If set to true, the (potentially insecure) crypto cipher + // SRTP_AES128_CM_SHA1_32 will be included in the list of supported ciphers + // during negotiation. It will only be used if both peers support it and no + // other ciphers get preferred. + bool enable_aes128_sha1_32_crypto_cipher = false; + + // The most commonly used cipher. Can be disabled, mostly for testing + // purposes. + bool enable_aes128_sha1_80_crypto_cipher = true; + + // If set to true, encrypted RTP header extensions as defined in RFC 6904 + // will be negotiated. They will only be used if both peers support them. + bool enable_encrypted_rtp_header_extensions = false; + } srtp; + + // Options to be used when the FrameEncryptor / FrameDecryptor APIs are used. + struct SFrame { + // If set all RtpSenders must have an FrameEncryptor attached to them before + // they are allowed to send packets. All RtpReceivers must have a + // FrameDecryptor attached to them before they are able to receive packets. + bool require_frame_encryption = false; + } sframe; +}; + +} // namespace webrtc + +#endif // API_CRYPTO_CRYPTO_OPTIONS_H_ diff --git a/api/crypto/frame_decryptor_interface.h b/api/crypto/frame_decryptor_interface.h new file mode 100644 index 0000000..2f6bdac --- /dev/null +++ b/api/crypto/frame_decryptor_interface.h @@ -0,0 +1,76 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CRYPTO_FRAME_DECRYPTOR_INTERFACE_H_ +#define API_CRYPTO_FRAME_DECRYPTOR_INTERFACE_H_ + +#include + +#include "api/array_view.h" +#include "api/media_types.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// FrameDecryptorInterface allows users to provide a custom decryption +// implementation for all incoming audio and video frames. The user must also +// provide a FrameEncryptorInterface to be able to encrypt the frames being +// sent out of the device. Note this is an additional layer of encyrption in +// addition to the standard SRTP mechanism and is not intended to be used +// without it. You may assume that this interface will have the same lifetime +// as the RTPReceiver it is attached to. It must only be attached to one +// RTPReceiver. Additional data may be null. +class FrameDecryptorInterface : public rtc::RefCountInterface { + public: + // The Status enum represents all possible states that can be + // returned when attempting to decrypt a frame. kRecoverable indicates that + // there was an error with the given frame and so it should not be passed to + // the decoder, however it hints that the receive stream is still decryptable + // which is important for determining when to send key frame requests + // kUnknown should never be returned by the implementor. + enum class Status { kOk, kRecoverable, kFailedToDecrypt, kUnknown }; + + struct Result { + Result(Status status, size_t bytes_written) + : status(status), bytes_written(bytes_written) {} + + bool IsOk() const { return status == Status::kOk; } + + const Status status; + const size_t bytes_written; + }; + + ~FrameDecryptorInterface() override {} + + // Attempts to decrypt the encrypted frame. You may assume the frame size will + // be allocated to the size returned from GetMaxPlaintextSize. You may assume + // that the frames are in order if SRTP is enabled. The stream is not provided + // here and it is up to the implementor to transport this information to the + // receiver if they care about it. You must set bytes_written to how many + // bytes you wrote to in the frame buffer. kOk must be returned if successful, + // kRecoverable should be returned if the failure was due to something other + // than a decryption failure. kFailedToDecrypt should be returned in all other + // cases. + virtual Result Decrypt(cricket::MediaType media_type, + const std::vector& csrcs, + rtc::ArrayView additional_data, + rtc::ArrayView encrypted_frame, + rtc::ArrayView frame) = 0; + + // Returns the total required length in bytes for the output of the + // decryption. This can be larger than the actual number of bytes you need but + // must never be smaller as it informs the size of the frame buffer. + virtual size_t GetMaxPlaintextByteSize(cricket::MediaType media_type, + size_t encrypted_frame_size) = 0; +}; + +} // namespace webrtc + +#endif // API_CRYPTO_FRAME_DECRYPTOR_INTERFACE_H_ diff --git a/api/crypto/frame_encryptor_interface.h b/api/crypto/frame_encryptor_interface.h new file mode 100644 index 0000000..1452b80 --- /dev/null +++ b/api/crypto/frame_encryptor_interface.h @@ -0,0 +1,54 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CRYPTO_FRAME_ENCRYPTOR_INTERFACE_H_ +#define API_CRYPTO_FRAME_ENCRYPTOR_INTERFACE_H_ + +#include "api/array_view.h" +#include "api/media_types.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// FrameEncryptorInterface allows users to provide a custom encryption +// implementation to encrypt all outgoing audio and video frames. The user must +// also provide a FrameDecryptorInterface to be able to decrypt the frames on +// the receiving device. Note this is an additional layer of encryption in +// addition to the standard SRTP mechanism and is not intended to be used +// without it. Implementations of this interface will have the same lifetime as +// the RTPSenders it is attached to. Additional data may be null. +class FrameEncryptorInterface : public rtc::RefCountInterface { + public: + ~FrameEncryptorInterface() override {} + + // Attempts to encrypt the provided frame. You may assume the encrypted_frame + // will match the size returned by GetMaxCiphertextByteSize for a give frame. + // You may assume that the frames will arrive in order if SRTP is enabled. + // The ssrc will simply identify which stream the frame is travelling on. You + // must set bytes_written to the number of bytes you wrote in the + // encrypted_frame. 0 must be returned if successful all other numbers can be + // selected by the implementer to represent error codes. + virtual int Encrypt(cricket::MediaType media_type, + uint32_t ssrc, + rtc::ArrayView additional_data, + rtc::ArrayView frame, + rtc::ArrayView encrypted_frame, + size_t* bytes_written) = 0; + + // Returns the total required length in bytes for the output of the + // encryption. This can be larger than the actual number of bytes you need but + // must never be smaller as it informs the size of the encrypted_frame buffer. + virtual size_t GetMaxCiphertextByteSize(cricket::MediaType media_type, + size_t frame_size) = 0; +}; + +} // namespace webrtc + +#endif // API_CRYPTO_FRAME_ENCRYPTOR_INTERFACE_H_ diff --git a/api/crypto_params.h b/api/crypto_params.h new file mode 100644 index 0000000..5da352c --- /dev/null +++ b/api/crypto_params.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_CRYPTO_PARAMS_H_ +#define API_CRYPTO_PARAMS_H_ + +#include + +namespace cricket { + +// Parameters for SRTP negotiation, as described in RFC 4568. +// TODO(benwright) - Rename to SrtpCryptoParams as these only apply to SRTP and +// not generic crypto parameters for WebRTC. +struct CryptoParams { + CryptoParams() : tag(0) {} + CryptoParams(int t, + const std::string& cs, + const std::string& kp, + const std::string& sp) + : tag(t), cipher_suite(cs), key_params(kp), session_params(sp) {} + + bool Matches(const CryptoParams& params) const { + return (tag == params.tag && cipher_suite == params.cipher_suite); + } + + int tag; + std::string cipher_suite; + std::string key_params; + std::string session_params; +}; + +} // namespace cricket + +#endif // API_CRYPTO_PARAMS_H_ diff --git a/api/data_channel_interface.cc b/api/data_channel_interface.cc new file mode 100644 index 0000000..d299ced --- /dev/null +++ b/api/data_channel_interface.cc @@ -0,0 +1,43 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/data_channel_interface.h" + +namespace webrtc { + +bool DataChannelInterface::ordered() const { + return false; +} + +uint16_t DataChannelInterface::maxRetransmitTime() const { + return 0; +} + +uint16_t DataChannelInterface::maxRetransmits() const { + return 0; +} + +absl::optional DataChannelInterface::maxRetransmitsOpt() const { + return absl::nullopt; +} + +absl::optional DataChannelInterface::maxPacketLifeTime() const { + return absl::nullopt; +} + +std::string DataChannelInterface::protocol() const { + return std::string(); +} + +bool DataChannelInterface::negotiated() const { + return false; +} + +} // namespace webrtc diff --git a/api/data_channel_interface.h b/api/data_channel_interface.h new file mode 100644 index 0000000..5b2b126 --- /dev/null +++ b/api/data_channel_interface.h @@ -0,0 +1,197 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces for DataChannels +// http://dev.w3.org/2011/webrtc/editor/webrtc.html#rtcdatachannel + +#ifndef API_DATA_CHANNEL_INTERFACE_H_ +#define API_DATA_CHANNEL_INTERFACE_H_ + +#include +#include + +#include + +#include "absl/types/optional.h" +#include "api/priority.h" +#include "api/rtc_error.h" +#include "rtc_base/checks.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// C++ version of: https://www.w3.org/TR/webrtc/#idl-def-rtcdatachannelinit +// TODO(deadbeef): Use absl::optional for the "-1 if unset" things. +struct DataChannelInit { + // Deprecated. Reliability is assumed, and channel will be unreliable if + // maxRetransmitTime or MaxRetransmits is set. + bool reliable = false; + + // True if ordered delivery is required. + bool ordered = true; + + // The max period of time in milliseconds in which retransmissions will be + // sent. After this time, no more retransmissions will be sent. + // + // Cannot be set along with |maxRetransmits|. + // This is called |maxPacketLifeTime| in the WebRTC JS API. + absl::optional maxRetransmitTime; + + // The max number of retransmissions. + // + // Cannot be set along with |maxRetransmitTime|. + absl::optional maxRetransmits; + + // This is set by the application and opaque to the WebRTC implementation. + std::string protocol; + + // True if the channel has been externally negotiated and we do not send an + // in-band signalling in the form of an "open" message. If this is true, |id| + // below must be set; otherwise it should be unset and will be negotiated + // in-band. + bool negotiated = false; + + // The stream id, or SID, for SCTP data channels. -1 if unset (see above). + int id = -1; + + // https://w3c.github.io/webrtc-priority/#new-rtcdatachannelinit-member + absl::optional priority; +}; + +// At the JavaScript level, data can be passed in as a string or a blob, so +// this structure's |binary| flag tells whether the data should be interpreted +// as binary or text. +struct DataBuffer { + DataBuffer(const rtc::CopyOnWriteBuffer& data, bool binary) + : data(data), binary(binary) {} + // For convenience for unit tests. + explicit DataBuffer(const std::string& text) + : data(text.data(), text.length()), binary(false) {} + size_t size() const { return data.size(); } + + rtc::CopyOnWriteBuffer data; + // Indicates if the received data contains UTF-8 or binary data. + // Note that the upper layers are left to verify the UTF-8 encoding. + // TODO(jiayl): prefer to use an enum instead of a bool. + bool binary; +}; + +// Used to implement RTCDataChannel events. +// +// The code responding to these callbacks should unwind the stack before +// using any other webrtc APIs; re-entrancy is not supported. +class DataChannelObserver { + public: + // The data channel state have changed. + virtual void OnStateChange() = 0; + // A data buffer was successfully received. + virtual void OnMessage(const DataBuffer& buffer) = 0; + // The data channel's buffered_amount has changed. + virtual void OnBufferedAmountChange(uint64_t sent_data_size) {} + + protected: + virtual ~DataChannelObserver() = default; +}; + +class RTC_EXPORT DataChannelInterface : public rtc::RefCountInterface { + public: + // C++ version of: https://www.w3.org/TR/webrtc/#idl-def-rtcdatachannelstate + // Unlikely to change, but keep in sync with DataChannel.java:State and + // RTCDataChannel.h:RTCDataChannelState. + enum DataState { + kConnecting, + kOpen, // The DataChannel is ready to send data. + kClosing, + kClosed + }; + + static const char* DataStateString(DataState state) { + switch (state) { + case kConnecting: + return "connecting"; + case kOpen: + return "open"; + case kClosing: + return "closing"; + case kClosed: + return "closed"; + } + RTC_CHECK(false) << "Unknown DataChannel state: " << state; + return ""; + } + + // Used to receive events from the data channel. Only one observer can be + // registered at a time. UnregisterObserver should be called before the + // observer object is destroyed. + virtual void RegisterObserver(DataChannelObserver* observer) = 0; + virtual void UnregisterObserver() = 0; + + // The label attribute represents a label that can be used to distinguish this + // DataChannel object from other DataChannel objects. + virtual std::string label() const = 0; + + // The accessors below simply return the properties from the DataChannelInit + // the data channel was constructed with. + virtual bool reliable() const = 0; + // TODO(deadbeef): Remove these dummy implementations when all classes have + // implemented these APIs. They should all just return the values the + // DataChannel was created with. + virtual bool ordered() const; + // TODO(hta): Deprecate and remove the following two functions. + virtual uint16_t maxRetransmitTime() const; + virtual uint16_t maxRetransmits() const; + virtual absl::optional maxRetransmitsOpt() const; + virtual absl::optional maxPacketLifeTime() const; + virtual std::string protocol() const; + virtual bool negotiated() const; + + // Returns the ID from the DataChannelInit, if it was negotiated out-of-band. + // If negotiated in-band, this ID will be populated once the DTLS role is + // determined, and until then this will return -1. + virtual int id() const = 0; + virtual Priority priority() const { return Priority::kLow; } + virtual DataState state() const = 0; + // When state is kClosed, and the DataChannel was not closed using + // the closing procedure, returns the error information about the closing. + // The default implementation returns "no error". + virtual RTCError error() const { return RTCError(); } + virtual uint32_t messages_sent() const = 0; + virtual uint64_t bytes_sent() const = 0; + virtual uint32_t messages_received() const = 0; + virtual uint64_t bytes_received() const = 0; + + // Returns the number of bytes of application data (UTF-8 text and binary + // data) that have been queued using Send but have not yet been processed at + // the SCTP level. See comment above Send below. + virtual uint64_t buffered_amount() const = 0; + + // Begins the graceful data channel closing procedure. See: + // https://tools.ietf.org/html/draft-ietf-rtcweb-data-channel-13#section-6.7 + virtual void Close() = 0; + + // Sends |data| to the remote peer. If the data can't be sent at the SCTP + // level (due to congestion control), it's buffered at the data channel level, + // up to a maximum of 16MB. If Send is called while this buffer is full, the + // data channel will be closed abruptly. + // + // So, it's important to use buffered_amount() and OnBufferedAmountChange to + // ensure the data channel is used efficiently but without filling this + // buffer. + virtual bool Send(const DataBuffer& buffer) = 0; + + protected: + ~DataChannelInterface() override = default; +}; + +} // namespace webrtc + +#endif // API_DATA_CHANNEL_INTERFACE_H_ diff --git a/api/dtls_transport_interface.cc b/api/dtls_transport_interface.cc new file mode 100644 index 0000000..a68ff8f --- /dev/null +++ b/api/dtls_transport_interface.cc @@ -0,0 +1,55 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/dtls_transport_interface.h" + +namespace webrtc { + +DtlsTransportInformation::DtlsTransportInformation() + : state_(DtlsTransportState::kNew) {} + +DtlsTransportInformation::DtlsTransportInformation(DtlsTransportState state) + : state_(state) {} + +DtlsTransportInformation::DtlsTransportInformation( + DtlsTransportState state, + absl::optional tls_version, + absl::optional ssl_cipher_suite, + absl::optional srtp_cipher_suite, + std::unique_ptr remote_ssl_certificates) + : state_(state), + tls_version_(tls_version), + ssl_cipher_suite_(ssl_cipher_suite), + srtp_cipher_suite_(srtp_cipher_suite), + remote_ssl_certificates_(std::move(remote_ssl_certificates)) {} + +DtlsTransportInformation::DtlsTransportInformation( + const DtlsTransportInformation& c) + : state_(c.state()), + tls_version_(c.tls_version_), + ssl_cipher_suite_(c.ssl_cipher_suite_), + srtp_cipher_suite_(c.srtp_cipher_suite_), + remote_ssl_certificates_(c.remote_ssl_certificates() + ? c.remote_ssl_certificates()->Clone() + : nullptr) {} + +DtlsTransportInformation& DtlsTransportInformation::operator=( + const DtlsTransportInformation& c) { + state_ = c.state(); + tls_version_ = c.tls_version_; + ssl_cipher_suite_ = c.ssl_cipher_suite_; + srtp_cipher_suite_ = c.srtp_cipher_suite_; + remote_ssl_certificates_ = c.remote_ssl_certificates() + ? c.remote_ssl_certificates()->Clone() + : nullptr; + return *this; +} + +} // namespace webrtc diff --git a/api/dtls_transport_interface.h b/api/dtls_transport_interface.h new file mode 100644 index 0000000..86715b0 --- /dev/null +++ b/api/dtls_transport_interface.h @@ -0,0 +1,108 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_DTLS_TRANSPORT_INTERFACE_H_ +#define API_DTLS_TRANSPORT_INTERFACE_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/ice_transport_interface.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// States of a DTLS transport, corresponding to the JS API specification. +// http://w3c.github.io/webrtc-pc/#dom-rtcdtlstransportstate +enum class DtlsTransportState { + kNew, // Has not started negotiating yet. + kConnecting, // In the process of negotiating a secure connection. + kConnected, // Completed negotiation and verified fingerprints. + kClosed, // Intentionally closed. + kFailed, // Failure due to an error or failing to verify a remote + // fingerprint. + kNumValues +}; + +// This object gives snapshot information about the changeable state of a +// DTLSTransport. +class RTC_EXPORT DtlsTransportInformation { + public: + DtlsTransportInformation(); + explicit DtlsTransportInformation(DtlsTransportState state); + DtlsTransportInformation( + DtlsTransportState state, + absl::optional tls_version, + absl::optional ssl_cipher_suite, + absl::optional srtp_cipher_suite, + std::unique_ptr remote_ssl_certificates); + // Copy and assign + DtlsTransportInformation(const DtlsTransportInformation& c); + DtlsTransportInformation& operator=(const DtlsTransportInformation& c); + // Move + DtlsTransportInformation(DtlsTransportInformation&& other) = default; + DtlsTransportInformation& operator=(DtlsTransportInformation&& other) = + default; + + DtlsTransportState state() const { return state_; } + absl::optional tls_version() const { return tls_version_; } + absl::optional ssl_cipher_suite() const { return ssl_cipher_suite_; } + absl::optional srtp_cipher_suite() const { return srtp_cipher_suite_; } + // The accessor returns a temporary pointer, it does not release ownership. + const rtc::SSLCertChain* remote_ssl_certificates() const { + return remote_ssl_certificates_.get(); + } + + private: + DtlsTransportState state_; + absl::optional tls_version_; + absl::optional ssl_cipher_suite_; + absl::optional srtp_cipher_suite_; + std::unique_ptr remote_ssl_certificates_; +}; + +class DtlsTransportObserverInterface { + public: + // This callback carries information about the state of the transport. + // The argument is a pass-by-value snapshot of the state. + virtual void OnStateChange(DtlsTransportInformation info) = 0; + // This callback is called when an error occurs, causing the transport + // to go to the kFailed state. + virtual void OnError(RTCError error) = 0; + + protected: + virtual ~DtlsTransportObserverInterface() = default; +}; + +// A DTLS transport, as represented to the outside world. +// This object is created on the network thread, and can only be +// accessed on that thread, except for functions explicitly marked otherwise. +// References can be held by other threads, and destruction can therefore +// be initiated by other threads. +class DtlsTransportInterface : public rtc::RefCountInterface { + public: + // Returns a pointer to the ICE transport that is owned by the DTLS transport. + virtual rtc::scoped_refptr ice_transport() = 0; + // Returns information on the state of the DtlsTransport. + // This function can be called from other threads. + virtual DtlsTransportInformation Information() = 0; + // Observer management. + virtual void RegisterObserver(DtlsTransportObserverInterface* observer) = 0; + virtual void UnregisterObserver() = 0; +}; + +} // namespace webrtc + +#endif // API_DTLS_TRANSPORT_INTERFACE_H_ diff --git a/api/dtmf_sender_interface.h b/api/dtmf_sender_interface.h new file mode 100644 index 0000000..7c0e2ce --- /dev/null +++ b/api/dtmf_sender_interface.h @@ -0,0 +1,125 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_DTMF_SENDER_INTERFACE_H_ +#define API_DTMF_SENDER_INTERFACE_H_ + +#include + +#include "api/media_stream_interface.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// DtmfSender callback interface, used to implement RTCDtmfSender events. +// Applications should implement this interface to get notifications from the +// DtmfSender. +class DtmfSenderObserverInterface { + public: + // Triggered when DTMF |tone| is sent. + // If |tone| is empty that means the DtmfSender has sent out all the given + // tones. + // The callback includes the state of the tone buffer at the time when + // the tone finished playing. + virtual void OnToneChange(const std::string& tone, + const std::string& tone_buffer) {} + // DEPRECATED: Older API without tone buffer. + // TODO(bugs.webrtc.org/9725): Remove old API and default implementation + // when old callers are gone. + virtual void OnToneChange(const std::string& tone) {} + + protected: + virtual ~DtmfSenderObserverInterface() = default; +}; + +// The interface of native implementation of the RTCDTMFSender defined by the +// WebRTC W3C Editor's Draft. +// See: https://www.w3.org/TR/webrtc/#peer-to-peer-dtmf +class DtmfSenderInterface : public rtc::RefCountInterface { + public: + // Provides the spec compliant default 2 second delay for the ',' character. + static const int kDtmfDefaultCommaDelayMs = 2000; + + // Used to receive events from the DTMF sender. Only one observer can be + // registered at a time. UnregisterObserver should be called before the + // observer object is destroyed. + virtual void RegisterObserver(DtmfSenderObserverInterface* observer) = 0; + virtual void UnregisterObserver() = 0; + + // Returns true if this DtmfSender is capable of sending DTMF. Otherwise + // returns false. To be able to send DTMF, the associated RtpSender must be + // able to send packets, and a "telephone-event" codec must be negotiated. + virtual bool CanInsertDtmf() = 0; + + // Queues a task that sends the DTMF |tones|. The |tones| parameter is treated + // as a series of characters. The characters 0 through 9, A through D, #, and + // * generate the associated DTMF tones. The characters a to d are equivalent + // to A to D. The character ',' indicates a delay of 2 seconds before + // processing the next character in the tones parameter. + // + // Unrecognized characters are ignored. + // + // The |duration| parameter indicates the duration in ms to use for each + // character passed in the |tones| parameter. The duration cannot be more + // than 6000 or less than 70. + // + // The |inter_tone_gap| parameter indicates the gap between tones in ms. The + // |inter_tone_gap| must be at least 50 ms but should be as short as + // possible. + // + // The |comma_delay| parameter indicates the delay after the ',' + // character. InsertDtmf specifies |comma_delay| as an argument + // with a default value of 2 seconds as per the WebRTC spec. This parameter + // allows users to comply with legacy WebRTC clients. The |comma_delay| + // must be at least 50 ms. + // + // If InsertDtmf is called on the same object while an existing task for this + // object to generate DTMF is still running, the previous task is canceled. + // Returns true on success and false on failure. + virtual bool InsertDtmf(const std::string& tones, + int duration, + int inter_tone_gap) { + return InsertDtmf(tones, duration, inter_tone_gap, + kDtmfDefaultCommaDelayMs); + } + virtual bool InsertDtmf(const std::string& tones, + int duration, + int inter_tone_gap, + int comma_delay) { + // TODO(bugs.webrtc.org/165700): Remove once downstream implementations + // override this signature rather than the 3-parameter one. + return InsertDtmf(tones, duration, inter_tone_gap); + } + + // Returns the tones remaining to be played out. + virtual std::string tones() const = 0; + + // Returns the current tone duration value in ms. + // This value will be the value last set via the InsertDtmf() method, or the + // default value of 100 ms if InsertDtmf() was never called. + virtual int duration() const = 0; + + // Returns the current value of the between-tone gap in ms. + // This value will be the value last set via the InsertDtmf() method, or the + // default value of 50 ms if InsertDtmf() was never called. + virtual int inter_tone_gap() const = 0; + + // Returns the current value of the "," character delay in ms. + // This value will be the value last set via the InsertDtmf() method, or the + // default value of 2000 ms if InsertDtmf() was never called. + virtual int comma_delay() const { return kDtmfDefaultCommaDelayMs; } + + protected: + ~DtmfSenderInterface() override = default; +}; + +} // namespace webrtc + +#endif // API_DTMF_SENDER_INTERFACE_H_ diff --git a/api/fec_controller.h b/api/fec_controller.h new file mode 100644 index 0000000..3e5f7bb --- /dev/null +++ b/api/fec_controller.h @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_FEC_CONTROLLER_H_ +#define API_FEC_CONTROLLER_H_ + +#include +#include + +#include "api/video/video_frame_type.h" +#include "modules/include/module_fec_types.h" + +namespace webrtc { +// TODO(yinwa): work in progress. API in class FecController should not be +// used by other users until this comment is removed. + +// Callback class used for telling the user about how to configure the FEC, +// and the rates sent the last second is returned to the VCM. +class VCMProtectionCallback { + public: + virtual int ProtectionRequest(const FecProtectionParams* delta_params, + const FecProtectionParams* key_params, + uint32_t* sent_video_rate_bps, + uint32_t* sent_nack_rate_bps, + uint32_t* sent_fec_rate_bps) = 0; + + protected: + virtual ~VCMProtectionCallback() {} +}; + +// FecController calculates how much of the allocated network +// capacity that can be used by an encoder and how much that +// is needed for redundant packets such as FEC and NACK. It uses an +// implementation of |VCMProtectionCallback| to set new FEC parameters and get +// the bitrate currently used for FEC and NACK. +// Usage: +// Setup by calling SetProtectionMethod and SetEncodingData. +// For each encoded image, call UpdateWithEncodedData. +// Each time the bandwidth estimate change, call UpdateFecRates. UpdateFecRates +// will return the bitrate that can be used by an encoder. +// A lock is used to protect internal states, so methods can be called on an +// arbitrary thread. +class FecController { + public: + virtual ~FecController() {} + + virtual void SetProtectionCallback( + VCMProtectionCallback* protection_callback) = 0; + virtual void SetProtectionMethod(bool enable_fec, bool enable_nack) = 0; + + // Informs loss protectoin logic of initial encoding state. + virtual void SetEncodingData(size_t width, + size_t height, + size_t num_temporal_layers, + size_t max_payload_size) = 0; + + // Returns target rate for the encoder given the channel parameters. + // Inputs: estimated_bitrate_bps - the estimated network bitrate in bits/s. + // actual_framerate - encoder frame rate. + // fraction_lost - packet loss rate in % in the network. + // loss_mask_vector - packet loss mask since last time this method + // was called. round_trip_time_ms - round trip time in milliseconds. + virtual uint32_t UpdateFecRates(uint32_t estimated_bitrate_bps, + int actual_framerate, + uint8_t fraction_lost, + std::vector loss_mask_vector, + int64_t round_trip_time_ms) = 0; + + // Informs of encoded output. + virtual void UpdateWithEncodedData( + size_t encoded_image_length, + VideoFrameType encoded_image_frametype) = 0; + + // Returns whether this FEC Controller needs Loss Vector Mask as input. + virtual bool UseLossVectorMask() = 0; +}; + +class FecControllerFactoryInterface { + public: + virtual std::unique_ptr CreateFecController() = 0; + virtual ~FecControllerFactoryInterface() = default; +}; + +} // namespace webrtc +#endif // API_FEC_CONTROLLER_H_ diff --git a/api/fec_controller_override.h b/api/fec_controller_override.h new file mode 100644 index 0000000..233812f --- /dev/null +++ b/api/fec_controller_override.h @@ -0,0 +1,28 @@ +/* Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This is an EXPERIMENTAL interface. + +#ifndef API_FEC_CONTROLLER_OVERRIDE_H_ +#define API_FEC_CONTROLLER_OVERRIDE_H_ + +namespace webrtc { + +// Interface for temporarily overriding FecController's bitrate allocation. +class FecControllerOverride { + public: + virtual void SetFecAllowed(bool fec_allowed) = 0; + + protected: + virtual ~FecControllerOverride() = default; +}; + +} // namespace webrtc + +#endif // API_FEC_CONTROLLER_OVERRIDE_H_ diff --git a/api/frame_transformer_interface.h b/api/frame_transformer_interface.h new file mode 100644 index 0000000..2cfe6ed --- /dev/null +++ b/api/frame_transformer_interface.h @@ -0,0 +1,99 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_FRAME_TRANSFORMER_INTERFACE_H_ +#define API_FRAME_TRANSFORMER_INTERFACE_H_ + +#include +#include + +#include "api/scoped_refptr.h" +#include "api/video/encoded_frame.h" +#include "api/video/video_frame_metadata.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// Owns the frame payload data. +class TransformableFrameInterface { + public: + virtual ~TransformableFrameInterface() = default; + + // Returns the frame payload data. The data is valid until the next non-const + // method call. + virtual rtc::ArrayView GetData() const = 0; + + // Copies |data| into the owned frame payload data. + virtual void SetData(rtc::ArrayView data) = 0; + + virtual uint32_t GetTimestamp() const = 0; + virtual uint32_t GetSsrc() const = 0; +}; + +class TransformableVideoFrameInterface : public TransformableFrameInterface { + public: + virtual ~TransformableVideoFrameInterface() = default; + virtual bool IsKeyFrame() const = 0; + + // Returns data needed in the frame transformation logic; for example, + // when the transformation applied to the frame is encryption/decryption, the + // additional data holds the serialized generic frame descriptor extension + // calculated in webrtc::RtpDescriptorAuthentication. + // TODO(bugs.webrtc.org/11380) remove from interface once + // webrtc::RtpDescriptorAuthentication is exposed in api/. + virtual std::vector GetAdditionalData() const = 0; + + virtual const VideoFrameMetadata& GetMetadata() const = 0; +}; + +// Extends the TransformableFrameInterface to expose audio-specific information. +class TransformableAudioFrameInterface : public TransformableFrameInterface { + public: + virtual ~TransformableAudioFrameInterface() = default; + + // Exposes the frame header, enabling the interface clients to use the + // information in the header as needed, for example to compile the list of + // csrcs. + virtual const RTPHeader& GetHeader() const = 0; +}; + +// Objects implement this interface to be notified with the transformed frame. +class TransformedFrameCallback : public rtc::RefCountInterface { + public: + virtual void OnTransformedFrame( + std::unique_ptr frame) = 0; + + protected: + ~TransformedFrameCallback() override = default; +}; + +// Transforms encoded frames. The transformed frame is sent in a callback using +// the TransformedFrameCallback interface (see above). +class FrameTransformerInterface : public rtc::RefCountInterface { + public: + // Transforms |frame| using the implementing class' processing logic. + virtual void Transform( + std::unique_ptr transformable_frame) = 0; + + virtual void RegisterTransformedFrameCallback( + rtc::scoped_refptr) {} + virtual void RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr, + uint32_t ssrc) {} + virtual void UnregisterTransformedFrameCallback() {} + virtual void UnregisterTransformedFrameSinkCallback(uint32_t ssrc) {} + + protected: + ~FrameTransformerInterface() override = default; +}; + +} // namespace webrtc + +#endif // API_FRAME_TRANSFORMER_INTERFACE_H_ diff --git a/api/function_view.h b/api/function_view.h new file mode 100644 index 0000000..5ae1bd6 --- /dev/null +++ b/api/function_view.h @@ -0,0 +1,130 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_FUNCTION_VIEW_H_ +#define API_FUNCTION_VIEW_H_ + +#include +#include + +#include "rtc_base/checks.h" + +// Just like std::function, FunctionView will wrap any callable and hide its +// actual type, exposing only its signature. But unlike std::function, +// FunctionView doesn't own its callable---it just points to it. Thus, it's a +// good choice mainly as a function argument when the callable argument will +// not be called again once the function has returned. +// +// Its constructors are implicit, so that callers won't have to convert lambdas +// and other callables to FunctionView explicitly. This is +// safe because FunctionView is only a reference to the real callable. +// +// Example use: +// +// void SomeFunction(rtc::FunctionView index_transform); +// ... +// SomeFunction([](int i) { return 2 * i + 1; }); +// +// Note: FunctionView is tiny (essentially just two pointers) and trivially +// copyable, so it's probably cheaper to pass it by value than by const +// reference. + +namespace rtc { + +template +class FunctionView; // Undefined. + +template +class FunctionView final { + public: + // Constructor for lambdas and other callables; it accepts every type of + // argument except those noted in its enable_if call. + template < + typename F, + typename std::enable_if< + // Not for function pointers; we have another constructor for that + // below. + !std::is_function::type>::type>::value && + + // Not for nullptr; we have another constructor for that below. + !std::is_same::type>::value && + + // Not for FunctionView objects; we have another constructor for that + // (the implicitly declared copy constructor). + !std::is_same::type>::type>::value>::type* = nullptr> + FunctionView(F&& f) + : call_(CallVoidPtr::type>) { + f_.void_ptr = &f; + } + + // Constructor that accepts function pointers. If the argument is null, the + // result is an empty FunctionView. + template < + typename F, + typename std::enable_if::type>::type>::value>::type* = + nullptr> + FunctionView(F&& f) + : call_(f ? CallFunPtr::type> : nullptr) { + f_.fun_ptr = reinterpret_cast(f); + } + + // Constructor that accepts nullptr. It creates an empty FunctionView. + template ::type>::value>::type* = nullptr> + FunctionView(F&& f) : call_(nullptr) {} + + // Default constructor. Creates an empty FunctionView. + FunctionView() : call_(nullptr) {} + + RetT operator()(ArgT... args) const { + RTC_DCHECK(call_); + return call_(f_, std::forward(args)...); + } + + // Returns true if we have a function, false if we don't (i.e., we're null). + explicit operator bool() const { return !!call_; } + + private: + union VoidUnion { + void* void_ptr; + void (*fun_ptr)(); + }; + + template + static RetT CallVoidPtr(VoidUnion vu, ArgT... args) { + return (*static_cast(vu.void_ptr))(std::forward(args)...); + } + template + static RetT CallFunPtr(VoidUnion vu, ArgT... args) { + return (reinterpret_cast::type>(vu.fun_ptr))( + std::forward(args)...); + } + + // A pointer to the callable thing, with type information erased. It's a + // union because we have to use separate types depending on if the callable + // thing is a function pointer or something else. + VoidUnion f_; + + // Pointer to a dispatch function that knows the type of the callable thing + // that's stored in f_, and how to call it. A FunctionView object is empty + // (null) iff call_ is null. + RetT (*call_)(VoidUnion, ArgT...); +}; + +} // namespace rtc + +#endif // API_FUNCTION_VIEW_H_ diff --git a/api/function_view_unittest.cc b/api/function_view_unittest.cc new file mode 100644 index 0000000..156ea5c --- /dev/null +++ b/api/function_view_unittest.cc @@ -0,0 +1,176 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/function_view.h" + +#include +#include + +#include "test/gtest.h" + +namespace rtc { + +namespace { + +int CallWith33(rtc::FunctionView fv) { + return fv ? fv(33) : -1; +} + +int Add33(int x) { + return x + 33; +} + +} // namespace + +// Test the main use case of FunctionView: implicitly converting a callable +// argument. +TEST(FunctionViewTest, ImplicitConversion) { + EXPECT_EQ(38, CallWith33([](int x) { return x + 5; })); + EXPECT_EQ(66, CallWith33(Add33)); + EXPECT_EQ(-1, CallWith33(nullptr)); +} + +TEST(FunctionViewTest, IntIntLambdaWithoutState) { + auto f = [](int x) { return x + 1; }; + EXPECT_EQ(18, f(17)); + rtc::FunctionView fv(f); + EXPECT_TRUE(fv); + EXPECT_EQ(18, fv(17)); +} + +TEST(FunctionViewTest, IntVoidLambdaWithState) { + int x = 13; + auto f = [x]() mutable { return ++x; }; + rtc::FunctionView fv(f); + EXPECT_TRUE(fv); + EXPECT_EQ(14, f()); + EXPECT_EQ(15, fv()); + EXPECT_EQ(16, f()); + EXPECT_EQ(17, fv()); +} + +TEST(FunctionViewTest, IntIntFunction) { + rtc::FunctionView fv(Add33); + EXPECT_TRUE(fv); + EXPECT_EQ(50, fv(17)); +} + +TEST(FunctionViewTest, IntIntFunctionPointer) { + rtc::FunctionView fv(&Add33); + EXPECT_TRUE(fv); + EXPECT_EQ(50, fv(17)); +} + +TEST(FunctionViewTest, Null) { + // These two call constructors that statically construct null FunctionViews. + EXPECT_FALSE(rtc::FunctionView()); + EXPECT_FALSE(rtc::FunctionView(nullptr)); + + // This calls the constructor for function pointers. + EXPECT_FALSE(rtc::FunctionView(reinterpret_cast(0))); +} + +// Ensure that FunctionView handles move-only arguments and return values. +TEST(FunctionViewTest, UniquePtrPassthrough) { + auto f = [](std::unique_ptr x) { return x; }; + rtc::FunctionView(std::unique_ptr)> fv(f); + std::unique_ptr x(new int); + int* x_addr = x.get(); + auto y = fv(std::move(x)); + EXPECT_EQ(x_addr, y.get()); +} + +TEST(FunctionViewTest, CopyConstructor) { + auto f17 = [] { return 17; }; + rtc::FunctionView fv1(f17); + rtc::FunctionView fv2(fv1); + EXPECT_EQ(17, fv1()); + EXPECT_EQ(17, fv2()); +} + +TEST(FunctionViewTest, MoveConstructorIsCopy) { + auto f17 = [] { return 17; }; + rtc::FunctionView fv1(f17); + rtc::FunctionView fv2(std::move(fv1)); // NOLINT + EXPECT_EQ(17, fv1()); + EXPECT_EQ(17, fv2()); +} + +TEST(FunctionViewTest, CopyAssignment) { + auto f17 = [] { return 17; }; + rtc::FunctionView fv1(f17); + auto f23 = [] { return 23; }; + rtc::FunctionView fv2(f23); + EXPECT_EQ(17, fv1()); + EXPECT_EQ(23, fv2()); + fv2 = fv1; + EXPECT_EQ(17, fv1()); + EXPECT_EQ(17, fv2()); +} + +TEST(FunctionViewTest, MoveAssignmentIsCopy) { + auto f17 = [] { return 17; }; + rtc::FunctionView fv1(f17); + auto f23 = [] { return 23; }; + rtc::FunctionView fv2(f23); + EXPECT_EQ(17, fv1()); + EXPECT_EQ(23, fv2()); + fv2 = std::move(fv1); // NOLINT + EXPECT_EQ(17, fv1()); + EXPECT_EQ(17, fv2()); +} + +TEST(FunctionViewTest, Swap) { + auto f17 = [] { return 17; }; + rtc::FunctionView fv1(f17); + auto f23 = [] { return 23; }; + rtc::FunctionView fv2(f23); + EXPECT_EQ(17, fv1()); + EXPECT_EQ(23, fv2()); + using std::swap; + swap(fv1, fv2); + EXPECT_EQ(23, fv1()); + EXPECT_EQ(17, fv2()); +} + +// Ensure that when you copy-construct a FunctionView, the new object points to +// the same function as the old one (as opposed to the new object pointing to +// the old one). +TEST(FunctionViewTest, CopyConstructorChaining) { + auto f17 = [] { return 17; }; + rtc::FunctionView fv1(f17); + rtc::FunctionView fv2(fv1); + EXPECT_EQ(17, fv1()); + EXPECT_EQ(17, fv2()); + auto f23 = [] { return 23; }; + fv1 = f23; + EXPECT_EQ(23, fv1()); + EXPECT_EQ(17, fv2()); +} + +// Ensure that when you assign one FunctionView to another, we actually make a +// copy (as opposed to making the second FunctionView point to the first one). +TEST(FunctionViewTest, CopyAssignmentChaining) { + auto f17 = [] { return 17; }; + rtc::FunctionView fv1(f17); + rtc::FunctionView fv2; + EXPECT_TRUE(fv1); + EXPECT_EQ(17, fv1()); + EXPECT_FALSE(fv2); + fv2 = fv1; + EXPECT_EQ(17, fv1()); + EXPECT_EQ(17, fv2()); + auto f23 = [] { return 23; }; + fv1 = f23; + EXPECT_EQ(23, fv1()); + EXPECT_EQ(17, fv2()); +} + +} // namespace rtc diff --git a/api/ice_transport_factory.cc b/api/ice_transport_factory.cc new file mode 100644 index 0000000..c32d7d2 --- /dev/null +++ b/api/ice_transport_factory.cc @@ -0,0 +1,66 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/ice_transport_factory.h" + +#include +#include + +#include "p2p/base/ice_transport_internal.h" +#include "p2p/base/p2p_transport_channel.h" +#include "p2p/base/port_allocator.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +namespace { + +// This implementation of IceTransportInterface is used in cases where +// the only reference to the P2PTransport will be through this class. +// It must be constructed, accessed and destroyed on the signaling thread. +class IceTransportWithTransportChannel : public IceTransportInterface { + public: + IceTransportWithTransportChannel( + std::unique_ptr internal) + : internal_(std::move(internal)) {} + + ~IceTransportWithTransportChannel() override { + RTC_DCHECK_RUN_ON(&thread_checker_); + } + + cricket::IceTransportInternal* internal() override { + RTC_DCHECK_RUN_ON(&thread_checker_); + return internal_.get(); + } + + private: + const rtc::ThreadChecker thread_checker_{}; + const std::unique_ptr internal_ + RTC_GUARDED_BY(thread_checker_); +}; + +} // namespace + +rtc::scoped_refptr CreateIceTransport( + cricket::PortAllocator* port_allocator) { + IceTransportInit init; + init.set_port_allocator(port_allocator); + return CreateIceTransport(std::move(init)); +} + +rtc::scoped_refptr CreateIceTransport( + IceTransportInit init) { + return new rtc::RefCountedObject( + std::make_unique( + "", 0, init.port_allocator(), init.async_resolver_factory(), + init.event_log())); +} + +} // namespace webrtc diff --git a/api/ice_transport_factory.h b/api/ice_transport_factory.h new file mode 100644 index 0000000..a9fd04e --- /dev/null +++ b/api/ice_transport_factory.h @@ -0,0 +1,47 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ICE_TRANSPORT_FACTORY_H_ +#define API_ICE_TRANSPORT_FACTORY_H_ + +#include "api/ice_transport_interface.h" +#include "api/scoped_refptr.h" +#include "rtc_base/system/rtc_export.h" + +namespace cricket { +class PortAllocator; +} // namespace cricket + +namespace webrtc { + +// Static factory for an IceTransport object that can be created +// without using a webrtc::PeerConnection. +// The returned object must be accessed and destroyed on the thread that +// created it. +// The PortAllocator must outlive the created IceTransportInterface object. +// TODO(steveanton): Remove in favor of the overload that takes +// IceTransportInit. +RTC_EXPORT rtc::scoped_refptr CreateIceTransport( + cricket::PortAllocator* port_allocator); + +// Static factory for an IceTransport object that can be created +// without using a webrtc::PeerConnection. +// The returned object must be accessed and destroyed on the thread that +// created it. +// |init.port_allocator()| is required and must outlive the created +// IceTransportInterface object. +// |init.async_resolver_factory()| and |init.event_log()| are optional, but if +// provided must outlive the created IceTransportInterface object. +RTC_EXPORT rtc::scoped_refptr CreateIceTransport( + IceTransportInit); + +} // namespace webrtc + +#endif // API_ICE_TRANSPORT_FACTORY_H_ diff --git a/api/ice_transport_interface.h b/api/ice_transport_interface.h new file mode 100644 index 0000000..d2f1edc --- /dev/null +++ b/api/ice_transport_interface.h @@ -0,0 +1,95 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_ICE_TRANSPORT_INTERFACE_H_ +#define API_ICE_TRANSPORT_INTERFACE_H_ + +#include + +#include "api/async_resolver_factory.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_count.h" + +namespace cricket { +class IceTransportInternal; +class PortAllocator; +} // namespace cricket + +namespace webrtc { + +// An ICE transport, as represented to the outside world. +// This object is refcounted, and is therefore alive until the +// last holder has released it. +class IceTransportInterface : public rtc::RefCountInterface { + public: + // Accessor for the internal representation of an ICE transport. + // The returned object can only be safely used on the signalling thread. + // TODO(crbug.com/907849): Add API calls for the functions that have to + // be exposed to clients, and stop allowing access to the + // cricket::IceTransportInternal API. + virtual cricket::IceTransportInternal* internal() = 0; +}; + +struct IceTransportInit final { + public: + IceTransportInit() = default; + IceTransportInit(const IceTransportInit&) = delete; + IceTransportInit(IceTransportInit&&) = default; + IceTransportInit& operator=(const IceTransportInit&) = delete; + IceTransportInit& operator=(IceTransportInit&&) = default; + + cricket::PortAllocator* port_allocator() { return port_allocator_; } + void set_port_allocator(cricket::PortAllocator* port_allocator) { + port_allocator_ = port_allocator; + } + + AsyncResolverFactory* async_resolver_factory() { + return async_resolver_factory_; + } + void set_async_resolver_factory( + AsyncResolverFactory* async_resolver_factory) { + async_resolver_factory_ = async_resolver_factory; + } + + RtcEventLog* event_log() { return event_log_; } + void set_event_log(RtcEventLog* event_log) { event_log_ = event_log; } + + private: + cricket::PortAllocator* port_allocator_ = nullptr; + AsyncResolverFactory* async_resolver_factory_ = nullptr; + RtcEventLog* event_log_ = nullptr; +}; + +// TODO(qingsi): The factory interface is defined in this file instead of its +// namesake file ice_transport_factory.h to avoid the extra dependency on p2p/ +// introduced there by the p2p/-dependent factory methods. Move the factory +// methods to a different file or rename it. +class IceTransportFactory { + public: + virtual ~IceTransportFactory() = default; + // As a refcounted object, the returned ICE transport may outlive the host + // construct into which its reference is given, e.g. a peer connection. As a + // result, the returned ICE transport should not hold references to any object + // that the transport does not own and that has a lifetime bound to the host + // construct. Also, assumptions on the thread safety of the returned transport + // should be clarified by implementations. For example, a peer connection + // requires the returned transport to be constructed and destroyed on the + // network thread and an ICE transport factory that intends to work with a + // peer connection should offer transports compatible with these assumptions. + virtual rtc::scoped_refptr CreateIceTransport( + const std::string& transport_name, + int component, + IceTransportInit init) = 0; +}; + +} // namespace webrtc +#endif // API_ICE_TRANSPORT_INTERFACE_H_ diff --git a/api/jsep.cc b/api/jsep.cc new file mode 100644 index 0000000..5fdc890 --- /dev/null +++ b/api/jsep.cc @@ -0,0 +1,57 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/jsep.h" + +namespace webrtc { + +std::string IceCandidateInterface::server_url() const { + return ""; +} + +size_t SessionDescriptionInterface::RemoveCandidates( + const std::vector& candidates) { + return 0; +} + +const char SessionDescriptionInterface::kOffer[] = "offer"; +const char SessionDescriptionInterface::kPrAnswer[] = "pranswer"; +const char SessionDescriptionInterface::kAnswer[] = "answer"; +const char SessionDescriptionInterface::kRollback[] = "rollback"; + +const char* SdpTypeToString(SdpType type) { + switch (type) { + case SdpType::kOffer: + return SessionDescriptionInterface::kOffer; + case SdpType::kPrAnswer: + return SessionDescriptionInterface::kPrAnswer; + case SdpType::kAnswer: + return SessionDescriptionInterface::kAnswer; + case SdpType::kRollback: + return SessionDescriptionInterface::kRollback; + } + return ""; +} + +absl::optional SdpTypeFromString(const std::string& type_str) { + if (type_str == SessionDescriptionInterface::kOffer) { + return SdpType::kOffer; + } else if (type_str == SessionDescriptionInterface::kPrAnswer) { + return SdpType::kPrAnswer; + } else if (type_str == SessionDescriptionInterface::kAnswer) { + return SdpType::kAnswer; + } else if (type_str == SessionDescriptionInterface::kRollback) { + return SdpType::kRollback; + } else { + return absl::nullopt; + } +} + +} // namespace webrtc diff --git a/api/jsep.h b/api/jsep.h new file mode 100644 index 0000000..cf8aeb0 --- /dev/null +++ b/api/jsep.h @@ -0,0 +1,247 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains declarations of interfaces that wrap SDP-related +// constructs; session descriptions and ICE candidates. The inner "cricket::" +// objects shouldn't be accessed directly; the intention is that an application +// using the PeerConnection API only creates these objects from strings, and +// them passes them into the PeerConnection. +// +// Though in the future, we're planning to provide an SDP parsing API, with a +// structure more friendly than cricket::SessionDescription. + +#ifndef API_JSEP_H_ +#define API_JSEP_H_ + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/rtc_error.h" +#include "rtc_base/deprecation.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" + +namespace cricket { +class Candidate; +class SessionDescription; +} // namespace cricket + +namespace webrtc { + +struct SdpParseError { + public: + // The sdp line that causes the error. + std::string line; + // Explains the error. + std::string description; +}; + +// Class representation of an ICE candidate. +// +// An instance of this interface is supposed to be owned by one class at +// a time and is therefore not expected to be thread safe. +// +// An instance can be created by CreateIceCandidate. +class RTC_EXPORT IceCandidateInterface { + public: + virtual ~IceCandidateInterface() {} + // If present, this is the value of the "a=mid" attribute of the candidate's + // m= section in SDP, which identifies the m= section. + virtual std::string sdp_mid() const = 0; + // This indicates the index (starting at zero) of m= section this candidate + // is associated with. Needed when an endpoint doesn't support MIDs. + virtual int sdp_mline_index() const = 0; + // Only for use internally. + virtual const cricket::Candidate& candidate() const = 0; + // The URL of the ICE server which this candidate was gathered from. + // TODO(zhihuang): Remove the default implementation once the subclasses + // implement this method. + virtual std::string server_url() const; + // Creates a SDP-ized form of this candidate. + virtual bool ToString(std::string* out) const = 0; +}; + +// Creates a IceCandidateInterface based on SDP string. +// Returns null if the sdp string can't be parsed. +// |error| may be null. +RTC_EXPORT IceCandidateInterface* CreateIceCandidate(const std::string& sdp_mid, + int sdp_mline_index, + const std::string& sdp, + SdpParseError* error); + +// Creates an IceCandidateInterface based on a parsed candidate structure. +RTC_EXPORT std::unique_ptr CreateIceCandidate( + const std::string& sdp_mid, + int sdp_mline_index, + const cricket::Candidate& candidate); + +// This class represents a collection of candidates for a specific m= section. +// Used in SessionDescriptionInterface. +class IceCandidateCollection { + public: + virtual ~IceCandidateCollection() {} + virtual size_t count() const = 0; + // Returns true if an equivalent |candidate| exist in the collection. + virtual bool HasCandidate(const IceCandidateInterface* candidate) const = 0; + virtual const IceCandidateInterface* at(size_t index) const = 0; +}; + +// Enum that describes the type of the SessionDescriptionInterface. +// Corresponds to RTCSdpType in the WebRTC specification. +// https://w3c.github.io/webrtc-pc/#dom-rtcsdptype +enum class SdpType { + kOffer, // Description must be treated as an SDP offer. + kPrAnswer, // Description must be treated as an SDP answer, but not a final + // answer. + kAnswer, // Description must be treated as an SDP final answer, and the + // offer-answer exchange must be considered complete after + // receiving this. + kRollback // Resets any pending offers and sets signaling state back to + // stable. +}; + +// Returns the string form of the given SDP type. String forms are defined in +// SessionDescriptionInterface. +RTC_EXPORT const char* SdpTypeToString(SdpType type); + +// Returns the SdpType from its string form. The string form can be one of the +// constants defined in SessionDescriptionInterface. Passing in any other string +// results in nullopt. +absl::optional SdpTypeFromString(const std::string& type_str); + +// Class representation of an SDP session description. +// +// An instance of this interface is supposed to be owned by one class at a time +// and is therefore not expected to be thread safe. +// +// An instance can be created by CreateSessionDescription. +class RTC_EXPORT SessionDescriptionInterface { + public: + // String representations of the supported SDP types. + static const char kOffer[]; + static const char kPrAnswer[]; + static const char kAnswer[]; + static const char kRollback[]; + + virtual ~SessionDescriptionInterface() {} + + // Only for use internally. + virtual cricket::SessionDescription* description() = 0; + virtual const cricket::SessionDescription* description() const = 0; + + // Get the session id and session version, which are defined based on + // RFC 4566 for the SDP o= line. + virtual std::string session_id() const = 0; + virtual std::string session_version() const = 0; + + // Returns the type of this session description as an SdpType. Descriptions of + // the various types are found in the SdpType documentation. + // TODO(steveanton): Remove default implementation once Chromium has been + // updated. + virtual SdpType GetType() const; + + // kOffer/kPrAnswer/kAnswer + // TODO(steveanton): Remove this in favor of |GetType| that returns SdpType. + virtual std::string type() const = 0; + + // Adds the specified candidate to the description. + // + // Ownership is not transferred. + // + // Returns false if the session description does not have a media section + // that corresponds to |candidate.sdp_mid()| or + // |candidate.sdp_mline_index()|. + virtual bool AddCandidate(const IceCandidateInterface* candidate) = 0; + + // Removes the candidates from the description, if found. + // + // Returns the number of candidates removed. + virtual size_t RemoveCandidates( + const std::vector& candidates); + + // Returns the number of m= sections in the session description. + virtual size_t number_of_mediasections() const = 0; + + // Returns a collection of all candidates that belong to a certain m= + // section. + virtual const IceCandidateCollection* candidates( + size_t mediasection_index) const = 0; + + // Serializes the description to SDP. + virtual bool ToString(std::string* out) const = 0; +}; + +// Creates a SessionDescriptionInterface based on the SDP string and the type. +// Returns null if the sdp string can't be parsed or the type is unsupported. +// |error| may be null. +// TODO(steveanton): This function is deprecated. Please use the functions below +// which take an SdpType enum instead. Remove this once it is no longer used. +RTC_EXPORT SessionDescriptionInterface* CreateSessionDescription( + const std::string& type, + const std::string& sdp, + SdpParseError* error); + +// Creates a SessionDescriptionInterface based on the SDP string and the type. +// Returns null if the SDP string cannot be parsed. +// If using the signature with |error_out|, details of the parsing error may be +// written to |error_out| if it is not null. +RTC_EXPORT std::unique_ptr +CreateSessionDescription(SdpType type, const std::string& sdp); +RTC_EXPORT std::unique_ptr +CreateSessionDescription(SdpType type, + const std::string& sdp, + SdpParseError* error_out); + +// Creates a SessionDescriptionInterface based on a parsed SDP structure and the +// given type, ID and version. +std::unique_ptr CreateSessionDescription( + SdpType type, + const std::string& session_id, + const std::string& session_version, + std::unique_ptr description); + +// CreateOffer and CreateAnswer callback interface. +class RTC_EXPORT CreateSessionDescriptionObserver + : public rtc::RefCountInterface { + public: + // This callback transfers the ownership of the |desc|. + // TODO(deadbeef): Make this take an std::unique_ptr<> to avoid confusion + // around ownership. + virtual void OnSuccess(SessionDescriptionInterface* desc) = 0; + // The OnFailure callback takes an RTCError, which consists of an + // error code and a string. + // RTCError is non-copyable, so it must be passed using std::move. + // Earlier versions of the API used a string argument. This version + // is removed; its functionality was the same as passing + // error.message. + virtual void OnFailure(RTCError error) = 0; + + protected: + ~CreateSessionDescriptionObserver() override = default; +}; + +// SetLocalDescription and SetRemoteDescription callback interface. +class RTC_EXPORT SetSessionDescriptionObserver : public rtc::RefCountInterface { + public: + virtual void OnSuccess() = 0; + // See description in CreateSessionDescriptionObserver for OnFailure. + virtual void OnFailure(RTCError error) = 0; + + protected: + ~SetSessionDescriptionObserver() override = default; +}; + +} // namespace webrtc + +#endif // API_JSEP_H_ diff --git a/api/jsep_ice_candidate.cc b/api/jsep_ice_candidate.cc new file mode 100644 index 0000000..e18e6e2 --- /dev/null +++ b/api/jsep_ice_candidate.cc @@ -0,0 +1,76 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/jsep_ice_candidate.h" + +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/memory/memory.h" + +namespace webrtc { + +std::string JsepIceCandidate::sdp_mid() const { + return sdp_mid_; +} + +int JsepIceCandidate::sdp_mline_index() const { + return sdp_mline_index_; +} + +const cricket::Candidate& JsepIceCandidate::candidate() const { + return candidate_; +} + +std::string JsepIceCandidate::server_url() const { + return candidate_.url(); +} + +JsepCandidateCollection::JsepCandidateCollection() = default; + +JsepCandidateCollection::JsepCandidateCollection(JsepCandidateCollection&& o) + : candidates_(std::move(o.candidates_)) {} + +size_t JsepCandidateCollection::count() const { + return candidates_.size(); +} + +void JsepCandidateCollection::add(JsepIceCandidate* candidate) { + candidates_.push_back(absl::WrapUnique(candidate)); +} + +const IceCandidateInterface* JsepCandidateCollection::at(size_t index) const { + return candidates_[index].get(); +} + +bool JsepCandidateCollection::HasCandidate( + const IceCandidateInterface* candidate) const { + return absl::c_any_of( + candidates_, [&](const std::unique_ptr& entry) { + return entry->sdp_mid() == candidate->sdp_mid() && + entry->sdp_mline_index() == candidate->sdp_mline_index() && + entry->candidate().IsEquivalent(candidate->candidate()); + }); +} + +size_t JsepCandidateCollection::remove(const cricket::Candidate& candidate) { + auto iter = absl::c_find_if( + candidates_, [&](const std::unique_ptr& c) { + return candidate.MatchesForRemoval(c->candidate()); + }); + if (iter != candidates_.end()) { + candidates_.erase(iter); + return 1; + } + return 0; +} + +} // namespace webrtc diff --git a/api/jsep_ice_candidate.h b/api/jsep_ice_candidate.h new file mode 100644 index 0000000..4ee84cf --- /dev/null +++ b/api/jsep_ice_candidate.h @@ -0,0 +1,87 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// TODO(deadbeef): Move this out of api/; it's an implementation detail and +// shouldn't be used externally. + +#ifndef API_JSEP_ICE_CANDIDATE_H_ +#define API_JSEP_ICE_CANDIDATE_H_ + +#include + +#include +#include +#include + +#include "api/candidate.h" +#include "api/jsep.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Implementation of IceCandidateInterface. +class RTC_EXPORT JsepIceCandidate : public IceCandidateInterface { + public: + JsepIceCandidate(const std::string& sdp_mid, int sdp_mline_index); + JsepIceCandidate(const std::string& sdp_mid, + int sdp_mline_index, + const cricket::Candidate& candidate); + ~JsepIceCandidate() override; + // |err| may be null. + bool Initialize(const std::string& sdp, SdpParseError* err); + void SetCandidate(const cricket::Candidate& candidate) { + candidate_ = candidate; + } + + std::string sdp_mid() const override; + int sdp_mline_index() const override; + const cricket::Candidate& candidate() const override; + + std::string server_url() const override; + + bool ToString(std::string* out) const override; + + private: + std::string sdp_mid_; + int sdp_mline_index_; + cricket::Candidate candidate_; + + RTC_DISALLOW_COPY_AND_ASSIGN(JsepIceCandidate); +}; + +// Implementation of IceCandidateCollection which stores JsepIceCandidates. +class JsepCandidateCollection : public IceCandidateCollection { + public: + JsepCandidateCollection(); + // Move constructor is defined so that a vector of JsepCandidateCollections + // can be resized. + JsepCandidateCollection(JsepCandidateCollection&& o); + size_t count() const override; + bool HasCandidate(const IceCandidateInterface* candidate) const override; + // Adds and takes ownership of the JsepIceCandidate. + // TODO(deadbeef): Make this use an std::unique_ptr<>, so ownership logic is + // more clear. + virtual void add(JsepIceCandidate* candidate); + const IceCandidateInterface* at(size_t index) const override; + // Removes the candidate that has a matching address and protocol. + // + // Returns the number of candidates that were removed. + size_t remove(const cricket::Candidate& candidate); + + private: + std::vector> candidates_; + + RTC_DISALLOW_COPY_AND_ASSIGN(JsepCandidateCollection); +}; + +} // namespace webrtc + +#endif // API_JSEP_ICE_CANDIDATE_H_ diff --git a/api/jsep_session_description.h b/api/jsep_session_description.h new file mode 100644 index 0000000..79e15e2 --- /dev/null +++ b/api/jsep_session_description.h @@ -0,0 +1,90 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// TODO(deadbeef): Move this out of api/; it's an implementation detail and +// shouldn't be used externally. + +#ifndef API_JSEP_SESSION_DESCRIPTION_H_ +#define API_JSEP_SESSION_DESCRIPTION_H_ + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "api/candidate.h" +#include "api/jsep.h" +#include "api/jsep_ice_candidate.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/deprecation.h" + +namespace cricket { +class SessionDescription; +} + +namespace webrtc { + +// Implementation of SessionDescriptionInterface. +class JsepSessionDescription : public SessionDescriptionInterface { + public: + explicit JsepSessionDescription(SdpType type); + // TODO(steveanton): Remove this once callers have switched to SdpType. + explicit JsepSessionDescription(const std::string& type); + JsepSessionDescription( + SdpType type, + std::unique_ptr description, + absl::string_view session_id, + absl::string_view session_version); + virtual ~JsepSessionDescription(); + + // Takes ownership of |description|. + bool Initialize(std::unique_ptr description, + const std::string& session_id, + const std::string& session_version); + + virtual cricket::SessionDescription* description() { + return description_.get(); + } + virtual const cricket::SessionDescription* description() const { + return description_.get(); + } + virtual std::string session_id() const { return session_id_; } + virtual std::string session_version() const { return session_version_; } + virtual SdpType GetType() const { return type_; } + virtual std::string type() const { return SdpTypeToString(type_); } + // Allows changing the type. Used for testing. + virtual bool AddCandidate(const IceCandidateInterface* candidate); + virtual size_t RemoveCandidates( + const std::vector& candidates); + virtual size_t number_of_mediasections() const; + virtual const IceCandidateCollection* candidates( + size_t mediasection_index) const; + virtual bool ToString(std::string* out) const; + + static const int kDefaultVideoCodecId; + static const char kDefaultVideoCodecName[]; + + private: + std::unique_ptr description_; + std::string session_id_; + std::string session_version_; + SdpType type_; + std::vector candidate_collection_; + + bool GetMediasectionIndex(const IceCandidateInterface* candidate, + size_t* index); + int GetMediasectionIndex(const cricket::Candidate& candidate); + + RTC_DISALLOW_COPY_AND_ASSIGN(JsepSessionDescription); +}; + +} // namespace webrtc + +#endif // API_JSEP_SESSION_DESCRIPTION_H_ diff --git a/api/media_stream_interface.cc b/api/media_stream_interface.cc new file mode 100644 index 0000000..e079079 --- /dev/null +++ b/api/media_stream_interface.cc @@ -0,0 +1,38 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/media_stream_interface.h" +#include "api/media_types.h" + +namespace webrtc { + +const char* const MediaStreamTrackInterface::kVideoKind = + cricket::kMediaTypeVideo; +const char* const MediaStreamTrackInterface::kAudioKind = + cricket::kMediaTypeAudio; + +VideoTrackInterface::ContentHint VideoTrackInterface::content_hint() const { + return ContentHint::kNone; +} + +bool AudioTrackInterface::GetSignalLevel(int* level) { + return false; +} + +rtc::scoped_refptr +AudioTrackInterface::GetAudioProcessor() { + return nullptr; +} + +const cricket::AudioOptions AudioSourceInterface::options() const { + return {}; +} + +} // namespace webrtc diff --git a/api/media_stream_interface.h b/api/media_stream_interface.h new file mode 100644 index 0000000..bd4a2c0 --- /dev/null +++ b/api/media_stream_interface.h @@ -0,0 +1,336 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces for MediaStream, MediaTrack and MediaSource. +// These interfaces are used for implementing MediaStream and MediaTrack as +// defined in http://dev.w3.org/2011/webrtc/editor/webrtc.html#stream-api. These +// interfaces must be used only with PeerConnection. + +#ifndef API_MEDIA_STREAM_INTERFACE_H_ +#define API_MEDIA_STREAM_INTERFACE_H_ + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_options.h" +#include "api/scoped_refptr.h" +#include "api/video/recordable_encoded_frame.h" +#include "api/video/video_frame.h" +#include "api/video/video_sink_interface.h" +#include "api/video/video_source_interface.h" +#include "modules/audio_processing/include/audio_processing_statistics.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Generic observer interface. +class ObserverInterface { + public: + virtual void OnChanged() = 0; + + protected: + virtual ~ObserverInterface() {} +}; + +class NotifierInterface { + public: + virtual void RegisterObserver(ObserverInterface* observer) = 0; + virtual void UnregisterObserver(ObserverInterface* observer) = 0; + + virtual ~NotifierInterface() {} +}; + +// Base class for sources. A MediaStreamTrack has an underlying source that +// provides media. A source can be shared by multiple tracks. +class RTC_EXPORT MediaSourceInterface : public rtc::RefCountInterface, + public NotifierInterface { + public: + enum SourceState { kInitializing, kLive, kEnded, kMuted }; + + virtual SourceState state() const = 0; + + virtual bool remote() const = 0; + + protected: + ~MediaSourceInterface() override = default; +}; + +// C++ version of MediaStreamTrack. +// See: https://www.w3.org/TR/mediacapture-streams/#mediastreamtrack +class RTC_EXPORT MediaStreamTrackInterface : public rtc::RefCountInterface, + public NotifierInterface { + public: + enum TrackState { + kLive, + kEnded, + }; + + static const char* const kAudioKind; + static const char* const kVideoKind; + + // The kind() method must return kAudioKind only if the object is a + // subclass of AudioTrackInterface, and kVideoKind only if the + // object is a subclass of VideoTrackInterface. It is typically used + // to protect a static_cast<> to the corresponding subclass. + virtual std::string kind() const = 0; + + // Track identifier. + virtual std::string id() const = 0; + + // A disabled track will produce silence (if audio) or black frames (if + // video). Can be disabled and re-enabled. + virtual bool enabled() const = 0; + virtual bool set_enabled(bool enable) = 0; + + // Live or ended. A track will never be live again after becoming ended. + virtual TrackState state() const = 0; + + protected: + ~MediaStreamTrackInterface() override = default; +}; + +// VideoTrackSourceInterface is a reference counted source used for +// VideoTracks. The same source can be used by multiple VideoTracks. +// VideoTrackSourceInterface is designed to be invoked on the signaling thread +// except for rtc::VideoSourceInterface methods that will be invoked +// on the worker thread via a VideoTrack. A custom implementation of a source +// can inherit AdaptedVideoTrackSource instead of directly implementing this +// interface. +class VideoTrackSourceInterface : public MediaSourceInterface, + public rtc::VideoSourceInterface { + public: + struct Stats { + // Original size of captured frame, before video adaptation. + int input_width; + int input_height; + }; + + // Indicates that parameters suitable for screencasts should be automatically + // applied to RtpSenders. + // TODO(perkj): Remove these once all known applications have moved to + // explicitly setting suitable parameters for screencasts and don't need this + // implicit behavior. + virtual bool is_screencast() const = 0; + + // Indicates that the encoder should denoise video before encoding it. + // If it is not set, the default configuration is used which is different + // depending on video codec. + // TODO(perkj): Remove this once denoising is done by the source, and not by + // the encoder. + virtual absl::optional needs_denoising() const = 0; + + // Returns false if no stats are available, e.g, for a remote source, or a + // source which has not seen its first frame yet. + // + // Implementation should avoid blocking. + virtual bool GetStats(Stats* stats) = 0; + + // Returns true if encoded output can be enabled in the source. + virtual bool SupportsEncodedOutput() const = 0; + + // Reliably cause a key frame to be generated in encoded output. + // TODO(bugs.webrtc.org/11115): find optimal naming. + virtual void GenerateKeyFrame() = 0; + + // Add an encoded video sink to the source and additionally cause + // a key frame to be generated from the source. The sink will be + // invoked from a decoder queue. + // TODO(bugs.webrtc.org/11114): make pure virtual once downstream project + // adapts. + virtual void AddEncodedSink( + rtc::VideoSinkInterface* sink) = 0; + + // Removes an encoded video sink from the source. + virtual void RemoveEncodedSink( + rtc::VideoSinkInterface* sink) = 0; + + protected: + ~VideoTrackSourceInterface() override = default; +}; + +// VideoTrackInterface is designed to be invoked on the signaling thread except +// for rtc::VideoSourceInterface methods that must be invoked +// on the worker thread. +// PeerConnectionFactory::CreateVideoTrack can be used for creating a VideoTrack +// that ensures thread safety and that all methods are called on the right +// thread. +class RTC_EXPORT VideoTrackInterface + : public MediaStreamTrackInterface, + public rtc::VideoSourceInterface { + public: + // Video track content hint, used to override the source is_screencast + // property. + // See https://crbug.com/653531 and https://w3c.github.io/mst-content-hint. + enum class ContentHint { kNone, kFluid, kDetailed, kText }; + + // Register a video sink for this track. Used to connect the track to the + // underlying video engine. + void AddOrUpdateSink(rtc::VideoSinkInterface* sink, + const rtc::VideoSinkWants& wants) override {} + void RemoveSink(rtc::VideoSinkInterface* sink) override {} + + virtual VideoTrackSourceInterface* GetSource() const = 0; + + virtual ContentHint content_hint() const; + virtual void set_content_hint(ContentHint hint) {} + + protected: + ~VideoTrackInterface() override = default; +}; + +// Interface for receiving audio data from a AudioTrack. +class AudioTrackSinkInterface { + public: + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames) { + RTC_NOTREACHED() << "This method must be overridden, or not used."; + } + + // In this method, |absolute_capture_timestamp_ms|, when available, is + // supposed to deliver the timestamp when this audio frame was originally + // captured. This timestamp MUST be based on the same clock as + // rtc::TimeMillis(). + virtual void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) { + // TODO(bugs.webrtc.org/10739): Deprecate the old OnData and make this one + // pure virtual. + return OnData(audio_data, bits_per_sample, sample_rate, number_of_channels, + number_of_frames); + } + + protected: + virtual ~AudioTrackSinkInterface() {} +}; + +// AudioSourceInterface is a reference counted source used for AudioTracks. +// The same source can be used by multiple AudioTracks. +class RTC_EXPORT AudioSourceInterface : public MediaSourceInterface { + public: + class AudioObserver { + public: + virtual void OnSetVolume(double volume) = 0; + + protected: + virtual ~AudioObserver() {} + }; + + // TODO(deadbeef): Makes all the interfaces pure virtual after they're + // implemented in chromium. + + // Sets the volume of the source. |volume| is in the range of [0, 10]. + // TODO(tommi): This method should be on the track and ideally volume should + // be applied in the track in a way that does not affect clones of the track. + virtual void SetVolume(double volume) {} + + // Registers/unregisters observers to the audio source. + virtual void RegisterAudioObserver(AudioObserver* observer) {} + virtual void UnregisterAudioObserver(AudioObserver* observer) {} + + // TODO(tommi): Make pure virtual. + virtual void AddSink(AudioTrackSinkInterface* sink) {} + virtual void RemoveSink(AudioTrackSinkInterface* sink) {} + + // Returns options for the AudioSource. + // (for some of the settings this approach is broken, e.g. setting + // audio network adaptation on the source is the wrong layer of abstraction). + virtual const cricket::AudioOptions options() const; +}; + +// Interface of the audio processor used by the audio track to collect +// statistics. +class AudioProcessorInterface : public rtc::RefCountInterface { + public: + struct AudioProcessorStatistics { + bool typing_noise_detected = false; + AudioProcessingStats apm_statistics; + }; + + // Get audio processor statistics. The |has_remote_tracks| argument should be + // set if there are active remote tracks (this would usually be true during + // a call). If there are no remote tracks some of the stats will not be set by + // the AudioProcessor, because they only make sense if there is at least one + // remote track. + virtual AudioProcessorStatistics GetStats(bool has_remote_tracks) = 0; + + protected: + ~AudioProcessorInterface() override = default; +}; + +class RTC_EXPORT AudioTrackInterface : public MediaStreamTrackInterface { + public: + // TODO(deadbeef): Figure out if the following interface should be const or + // not. + virtual AudioSourceInterface* GetSource() const = 0; + + // Add/Remove a sink that will receive the audio data from the track. + virtual void AddSink(AudioTrackSinkInterface* sink) = 0; + virtual void RemoveSink(AudioTrackSinkInterface* sink) = 0; + + // Get the signal level from the audio track. + // Return true on success, otherwise false. + // TODO(deadbeef): Change the interface to int GetSignalLevel() and pure + // virtual after it's implemented in chromium. + virtual bool GetSignalLevel(int* level); + + // Get the audio processor used by the audio track. Return null if the track + // does not have any processor. + // TODO(deadbeef): Make the interface pure virtual. + virtual rtc::scoped_refptr GetAudioProcessor(); + + protected: + ~AudioTrackInterface() override = default; +}; + +typedef std::vector > AudioTrackVector; +typedef std::vector > VideoTrackVector; + +// C++ version of https://www.w3.org/TR/mediacapture-streams/#mediastream. +// +// A major difference is that remote audio/video tracks (received by a +// PeerConnection/RtpReceiver) are not synchronized simply by adding them to +// the same stream; a session description with the correct "a=msid" attributes +// must be pushed down. +// +// Thus, this interface acts as simply a container for tracks. +class MediaStreamInterface : public rtc::RefCountInterface, + public NotifierInterface { + public: + virtual std::string id() const = 0; + + virtual AudioTrackVector GetAudioTracks() = 0; + virtual VideoTrackVector GetVideoTracks() = 0; + virtual rtc::scoped_refptr FindAudioTrack( + const std::string& track_id) = 0; + virtual rtc::scoped_refptr FindVideoTrack( + const std::string& track_id) = 0; + + virtual bool AddTrack(AudioTrackInterface* track) = 0; + virtual bool AddTrack(VideoTrackInterface* track) = 0; + virtual bool RemoveTrack(AudioTrackInterface* track) = 0; + virtual bool RemoveTrack(VideoTrackInterface* track) = 0; + + protected: + ~MediaStreamInterface() override = default; +}; + +} // namespace webrtc + +#endif // API_MEDIA_STREAM_INTERFACE_H_ diff --git a/api/media_stream_proxy.h b/api/media_stream_proxy.h new file mode 100644 index 0000000..5169679 --- /dev/null +++ b/api/media_stream_proxy.h @@ -0,0 +1,44 @@ +/* + * Copyright 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_MEDIA_STREAM_PROXY_H_ +#define API_MEDIA_STREAM_PROXY_H_ + +#include + +#include "api/media_stream_interface.h" +#include "api/proxy.h" + +namespace webrtc { + +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. +BEGIN_SIGNALING_PROXY_MAP(MediaStream) +PROXY_SIGNALING_THREAD_DESTRUCTOR() +PROXY_CONSTMETHOD0(std::string, id) +PROXY_METHOD0(AudioTrackVector, GetAudioTracks) +PROXY_METHOD0(VideoTrackVector, GetVideoTracks) +PROXY_METHOD1(rtc::scoped_refptr, + FindAudioTrack, + const std::string&) +PROXY_METHOD1(rtc::scoped_refptr, + FindVideoTrack, + const std::string&) +PROXY_METHOD1(bool, AddTrack, AudioTrackInterface*) +PROXY_METHOD1(bool, AddTrack, VideoTrackInterface*) +PROXY_METHOD1(bool, RemoveTrack, AudioTrackInterface*) +PROXY_METHOD1(bool, RemoveTrack, VideoTrackInterface*) +PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) +PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) +END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_MEDIA_STREAM_PROXY_H_ diff --git a/api/media_stream_track_proxy.h b/api/media_stream_track_proxy.h new file mode 100644 index 0000000..d3dc255 --- /dev/null +++ b/api/media_stream_track_proxy.h @@ -0,0 +1,65 @@ +/* + * Copyright 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file includes proxy classes for tracks. The purpose is +// to make sure tracks are only accessed from the signaling thread. + +#ifndef API_MEDIA_STREAM_TRACK_PROXY_H_ +#define API_MEDIA_STREAM_TRACK_PROXY_H_ + +#include + +#include "api/media_stream_interface.h" +#include "api/proxy.h" + +namespace webrtc { + +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. + +BEGIN_SIGNALING_PROXY_MAP(AudioTrack) +PROXY_SIGNALING_THREAD_DESTRUCTOR() +PROXY_CONSTMETHOD0(std::string, kind) +PROXY_CONSTMETHOD0(std::string, id) +PROXY_CONSTMETHOD0(TrackState, state) +PROXY_CONSTMETHOD0(bool, enabled) +PROXY_CONSTMETHOD0(AudioSourceInterface*, GetSource) +PROXY_METHOD1(void, AddSink, AudioTrackSinkInterface*) +PROXY_METHOD1(void, RemoveSink, AudioTrackSinkInterface*) +PROXY_METHOD1(bool, GetSignalLevel, int*) +PROXY_METHOD0(rtc::scoped_refptr, GetAudioProcessor) +PROXY_METHOD1(bool, set_enabled, bool) +PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) +PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) +END_PROXY_MAP() + +BEGIN_PROXY_MAP(VideoTrack) +PROXY_SIGNALING_THREAD_DESTRUCTOR() +PROXY_CONSTMETHOD0(std::string, kind) +PROXY_CONSTMETHOD0(std::string, id) +PROXY_CONSTMETHOD0(TrackState, state) +PROXY_CONSTMETHOD0(bool, enabled) +PROXY_METHOD1(bool, set_enabled, bool) +PROXY_CONSTMETHOD0(ContentHint, content_hint) +PROXY_METHOD1(void, set_content_hint, ContentHint) +PROXY_WORKER_METHOD2(void, + AddOrUpdateSink, + rtc::VideoSinkInterface*, + const rtc::VideoSinkWants&) +PROXY_WORKER_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) +PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource) + +PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) +PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) +END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_MEDIA_STREAM_TRACK_PROXY_H_ diff --git a/api/media_types.cc b/api/media_types.cc new file mode 100644 index 0000000..6bc6938 --- /dev/null +++ b/api/media_types.cc @@ -0,0 +1,35 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/media_types.h" + +#include "rtc_base/checks.h" + +namespace cricket { + +const char kMediaTypeVideo[] = "video"; +const char kMediaTypeAudio[] = "audio"; +const char kMediaTypeData[] = "data"; + +std::string MediaTypeToString(MediaType type) { + switch (type) { + case MEDIA_TYPE_AUDIO: + return kMediaTypeAudio; + case MEDIA_TYPE_VIDEO: + return kMediaTypeVideo; + case MEDIA_TYPE_DATA: + return kMediaTypeData; + } + FATAL(); + // Not reachable; avoids compile warning. + return ""; +} + +} // namespace cricket diff --git a/api/media_types.h b/api/media_types.h new file mode 100644 index 0000000..8c6ba3d --- /dev/null +++ b/api/media_types.h @@ -0,0 +1,39 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_MEDIA_TYPES_H_ +#define API_MEDIA_TYPES_H_ + +#include + +#include "rtc_base/system/rtc_export.h" + +// The cricket and webrtc have separate definitions for what a media type is. +// They're not compatible. Watch out for this. + +namespace cricket { + +enum MediaType { MEDIA_TYPE_AUDIO, MEDIA_TYPE_VIDEO, MEDIA_TYPE_DATA }; + +extern const char kMediaTypeAudio[]; +extern const char kMediaTypeVideo[]; +extern const char kMediaTypeData[]; + +RTC_EXPORT std::string MediaTypeToString(MediaType type); + +} // namespace cricket + +namespace webrtc { + +enum class MediaType { ANY, AUDIO, VIDEO, DATA }; + +} // namespace webrtc + +#endif // API_MEDIA_TYPES_H_ diff --git a/api/neteq/BUILD.gn b/api/neteq/BUILD.gn new file mode 100644 index 0000000..4e85c4d --- /dev/null +++ b/api/neteq/BUILD.gn @@ -0,0 +1,97 @@ +# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_source_set("neteq_api") { + visibility = [ "*" ] + sources = [ + "neteq.cc", + "neteq.h", + "neteq_factory.h", + ] + + deps = [ + "..:rtp_headers", + "..:rtp_packet_info", + "..:scoped_refptr", + "../../rtc_base:rtc_base_approved", + "../../system_wrappers:system_wrappers", + "../audio_codecs:audio_codecs_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("custom_neteq_factory") { + visibility = [ "*" ] + sources = [ + "custom_neteq_factory.cc", + "custom_neteq_factory.h", + ] + + deps = [ + ":neteq_api", + ":neteq_controller_api", + "..:scoped_refptr", + "../../modules/audio_coding:neteq", + "../../system_wrappers:system_wrappers", + "../audio_codecs:audio_codecs_api", + ] +} + +rtc_source_set("neteq_controller_api") { + visibility = [ "*" ] + sources = [ + "neteq_controller.h", + "neteq_controller_factory.h", + ] + + deps = [ + ":neteq_api", + ":tick_timer", + "../../rtc_base:rtc_base_approved", + "../../system_wrappers:system_wrappers", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("default_neteq_controller_factory") { + visibility = [ "*" ] + sources = [ + "default_neteq_controller_factory.cc", + "default_neteq_controller_factory.h", + ] + + deps = [ + ":neteq_controller_api", + "../../modules/audio_coding:neteq", + ] +} + +rtc_source_set("tick_timer") { + visibility = [ "*" ] + sources = [ + "tick_timer.cc", + "tick_timer.h", + ] + deps = [ + "../../rtc_base:checks", + "../../rtc_base:rtc_base_approved", + ] +} + +rtc_source_set("tick_timer_unittest") { + visibility = [ "*" ] + testonly = true + sources = [ "tick_timer_unittest.cc" ] + deps = [ + ":tick_timer", + "../../test:test_support", + "//testing/gtest", + ] +} diff --git a/api/neteq/DEPS b/api/neteq/DEPS new file mode 100644 index 0000000..6c1c602 --- /dev/null +++ b/api/neteq/DEPS @@ -0,0 +1,14 @@ +specific_include_rules = { + "custom_neteq_factory\.h": [ + "+system_wrappers/include/clock.h", + ], + "default_neteq_factory\.h": [ + "+system_wrappers/include/clock.h", + ], + "neteq_controller\.h": [ + "+system_wrappers/include/clock.h", + ], + "neteq_factory\.h": [ + "+system_wrappers/include/clock.h", + ], +} diff --git a/api/neteq/OWNERS b/api/neteq/OWNERS new file mode 100644 index 0000000..da88798 --- /dev/null +++ b/api/neteq/OWNERS @@ -0,0 +1,2 @@ +ivoc@webrtc.org +henrik.lundin@webrtc.org diff --git a/api/neteq/custom_neteq_factory.cc b/api/neteq/custom_neteq_factory.cc new file mode 100644 index 0000000..b2df5df --- /dev/null +++ b/api/neteq/custom_neteq_factory.cc @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/neteq/custom_neteq_factory.h" + +#include + +#include "modules/audio_coding/neteq/neteq_impl.h" + +namespace webrtc { + +CustomNetEqFactory::CustomNetEqFactory( + std::unique_ptr controller_factory) + : controller_factory_(std::move(controller_factory)) {} + +CustomNetEqFactory::~CustomNetEqFactory() = default; + +std::unique_ptr CustomNetEqFactory::CreateNetEq( + const NetEq::Config& config, + const rtc::scoped_refptr& decoder_factory, + Clock* clock) const { + return std::make_unique( + config, NetEqImpl::Dependencies(config, clock, decoder_factory, + *controller_factory_)); +} + +} // namespace webrtc diff --git a/api/neteq/custom_neteq_factory.h b/api/neteq/custom_neteq_factory.h new file mode 100644 index 0000000..d080f68 --- /dev/null +++ b/api/neteq/custom_neteq_factory.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_NETEQ_CUSTOM_NETEQ_FACTORY_H_ +#define API_NETEQ_CUSTOM_NETEQ_FACTORY_H_ + +#include + +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/neteq/neteq_controller_factory.h" +#include "api/neteq/neteq_factory.h" +#include "api/scoped_refptr.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +// This factory can be used to generate NetEq instances that make use of a +// custom NetEqControllerFactory. +class CustomNetEqFactory : public NetEqFactory { + public: + explicit CustomNetEqFactory( + std::unique_ptr controller_factory); + ~CustomNetEqFactory() override; + CustomNetEqFactory(const CustomNetEqFactory&) = delete; + CustomNetEqFactory& operator=(const CustomNetEqFactory&) = delete; + + std::unique_ptr CreateNetEq( + const NetEq::Config& config, + const rtc::scoped_refptr& decoder_factory, + Clock* clock) const override; + + private: + std::unique_ptr controller_factory_; +}; + +} // namespace webrtc +#endif // API_NETEQ_CUSTOM_NETEQ_FACTORY_H_ diff --git a/api/neteq/default_neteq_controller_factory.cc b/api/neteq/default_neteq_controller_factory.cc new file mode 100644 index 0000000..22274dc --- /dev/null +++ b/api/neteq/default_neteq_controller_factory.cc @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/neteq/default_neteq_controller_factory.h" +#include "modules/audio_coding/neteq/decision_logic.h" + +namespace webrtc { + +DefaultNetEqControllerFactory::DefaultNetEqControllerFactory() = default; +DefaultNetEqControllerFactory::~DefaultNetEqControllerFactory() = default; + +std::unique_ptr +DefaultNetEqControllerFactory::CreateNetEqController( + const NetEqController::Config& config) const { + return std::make_unique(config); +} + +} // namespace webrtc diff --git a/api/neteq/default_neteq_controller_factory.h b/api/neteq/default_neteq_controller_factory.h new file mode 100644 index 0000000..611afc2 --- /dev/null +++ b/api/neteq/default_neteq_controller_factory.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_NETEQ_DEFAULT_NETEQ_CONTROLLER_FACTORY_H_ +#define API_NETEQ_DEFAULT_NETEQ_CONTROLLER_FACTORY_H_ + +#include + +#include "api/neteq/neteq_controller_factory.h" + +namespace webrtc { + +// This NetEqControllerFactory will use WebRTC's built-in controller logic. +class DefaultNetEqControllerFactory : public NetEqControllerFactory { + public: + DefaultNetEqControllerFactory(); + ~DefaultNetEqControllerFactory() override; + DefaultNetEqControllerFactory(const DefaultNetEqControllerFactory&) = delete; + DefaultNetEqControllerFactory& operator=( + const DefaultNetEqControllerFactory&) = delete; + + std::unique_ptr CreateNetEqController( + const NetEqController::Config& config) const override; +}; + +} // namespace webrtc +#endif // API_NETEQ_DEFAULT_NETEQ_CONTROLLER_FACTORY_H_ diff --git a/api/neteq/neteq.cc b/api/neteq/neteq.cc new file mode 100644 index 0000000..e8ef4db --- /dev/null +++ b/api/neteq/neteq.cc @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/neteq/neteq.h" + +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { + +NetEq::Config::Config() = default; +NetEq::Config::Config(const Config&) = default; +NetEq::Config::Config(Config&&) = default; +NetEq::Config::~Config() = default; +NetEq::Config& NetEq::Config::operator=(const Config&) = default; +NetEq::Config& NetEq::Config::operator=(Config&&) = default; + +std::string NetEq::Config::ToString() const { + char buf[1024]; + rtc::SimpleStringBuilder ss(buf); + ss << "sample_rate_hz=" << sample_rate_hz << ", enable_post_decode_vad=" + << (enable_post_decode_vad ? "true" : "false") + << ", max_packets_in_buffer=" << max_packets_in_buffer + << ", min_delay_ms=" << min_delay_ms << ", enable_fast_accelerate=" + << (enable_fast_accelerate ? "true" : "false") + << ", enable_muted_state=" << (enable_muted_state ? "true" : "false") + << ", enable_rtx_handling=" << (enable_rtx_handling ? "true" : "false") + << ", extra_output_delay_ms=" << extra_output_delay_ms; + return ss.str(); +} + +} // namespace webrtc diff --git a/api/neteq/neteq.h b/api/neteq/neteq.h new file mode 100644 index 0000000..15ad3aa --- /dev/null +++ b/api/neteq/neteq.h @@ -0,0 +1,333 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_NETEQ_NETEQ_H_ +#define API_NETEQ_NETEQ_H_ + +#include // Provide access to size_t. + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/audio_codecs/audio_codec_pair_id.h" +#include "api/audio_codecs/audio_decoder.h" +#include "api/audio_codecs/audio_format.h" +#include "api/rtp_headers.h" +#include "api/scoped_refptr.h" + +namespace webrtc { + +// Forward declarations. +class AudioFrame; +class AudioDecoderFactory; +class Clock; + +struct NetEqNetworkStatistics { + uint16_t current_buffer_size_ms; // Current jitter buffer size in ms. + uint16_t preferred_buffer_size_ms; // Target buffer size in ms. + uint16_t jitter_peaks_found; // 1 if adding extra delay due to peaky + // jitter; 0 otherwise. + uint16_t packet_loss_rate; // Loss rate (network + late) in Q14. + uint16_t expand_rate; // Fraction (of original stream) of synthesized + // audio inserted through expansion (in Q14). + uint16_t speech_expand_rate; // Fraction (of original stream) of synthesized + // speech inserted through expansion (in Q14). + uint16_t preemptive_rate; // Fraction of data inserted through pre-emptive + // expansion (in Q14). + uint16_t accelerate_rate; // Fraction of data removed through acceleration + // (in Q14). + uint16_t secondary_decoded_rate; // Fraction of data coming from FEC/RED + // decoding (in Q14). + uint16_t secondary_discarded_rate; // Fraction of discarded FEC/RED data (in + // Q14). + size_t added_zero_samples; // Number of zero samples added in "off" mode. + // Statistics for packet waiting times, i.e., the time between a packet + // arrives until it is decoded. + int mean_waiting_time_ms; + int median_waiting_time_ms; + int min_waiting_time_ms; + int max_waiting_time_ms; +}; + +// NetEq statistics that persist over the lifetime of the class. +// These metrics are never reset. +struct NetEqLifetimeStatistics { + // Stats below correspond to similarly-named fields in the WebRTC stats spec. + // https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats + uint64_t total_samples_received = 0; + uint64_t concealed_samples = 0; + uint64_t concealment_events = 0; + uint64_t jitter_buffer_delay_ms = 0; + uint64_t jitter_buffer_emitted_count = 0; + uint64_t jitter_buffer_target_delay_ms = 0; + uint64_t inserted_samples_for_deceleration = 0; + uint64_t removed_samples_for_acceleration = 0; + uint64_t silent_concealed_samples = 0; + uint64_t fec_packets_received = 0; + uint64_t fec_packets_discarded = 0; + // Below stats are not part of the spec. + uint64_t delayed_packet_outage_samples = 0; + // This is sum of relative packet arrival delays of received packets so far. + // Since end-to-end delay of a packet is difficult to measure and is not + // necessarily useful for measuring jitter buffer performance, we report a + // relative packet arrival delay. The relative packet arrival delay of a + // packet is defined as the arrival delay compared to the first packet + // received, given that it had zero delay. To avoid clock drift, the "first" + // packet can be made dynamic. + uint64_t relative_packet_arrival_delay_ms = 0; + uint64_t jitter_buffer_packets_received = 0; + // An interruption is a loss-concealment event lasting at least 150 ms. The + // two stats below count the number os such events and the total duration of + // these events. + int32_t interruption_count = 0; + int32_t total_interruption_duration_ms = 0; +}; + +// Metrics that describe the operations performed in NetEq, and the internal +// state. +struct NetEqOperationsAndState { + // These sample counters are cumulative, and don't reset. As a reference, the + // total number of output samples can be found in + // NetEqLifetimeStatistics::total_samples_received. + uint64_t preemptive_samples = 0; + uint64_t accelerate_samples = 0; + // Count of the number of buffer flushes. + uint64_t packet_buffer_flushes = 0; + // The number of primary packets that were discarded. + uint64_t discarded_primary_packets = 0; + // The statistics below are not cumulative. + // The waiting time of the last decoded packet. + uint64_t last_waiting_time_ms = 0; + // The sum of the packet and jitter buffer size in ms. + uint64_t current_buffer_size_ms = 0; + // The current frame size in ms. + uint64_t current_frame_size_ms = 0; + // Flag to indicate that the next packet is available. + bool next_packet_available = false; +}; + +// This is the interface class for NetEq. +class NetEq { + public: + struct Config { + Config(); + Config(const Config&); + Config(Config&&); + ~Config(); + Config& operator=(const Config&); + Config& operator=(Config&&); + + std::string ToString() const; + + int sample_rate_hz = 16000; // Initial value. Will change with input data. + bool enable_post_decode_vad = false; + size_t max_packets_in_buffer = 200; + int max_delay_ms = 0; + int min_delay_ms = 0; + bool enable_fast_accelerate = false; + bool enable_muted_state = false; + bool enable_rtx_handling = false; + absl::optional codec_pair_id; + bool for_test_no_time_stretching = false; // Use only for testing. + // Adds extra delay to the output of NetEq, without affecting jitter or + // loss behavior. This is mainly for testing. Value must be a non-negative + // multiple of 10 ms. + int extra_output_delay_ms = 0; + }; + + enum ReturnCodes { kOK = 0, kFail = -1 }; + + enum class Operation { + kNormal, + kMerge, + kExpand, + kAccelerate, + kFastAccelerate, + kPreemptiveExpand, + kRfc3389Cng, + kRfc3389CngNoPacket, + kCodecInternalCng, + kDtmf, + kUndefined, + }; + + enum class Mode { + kNormal, + kExpand, + kMerge, + kAccelerateSuccess, + kAccelerateLowEnergy, + kAccelerateFail, + kPreemptiveExpandSuccess, + kPreemptiveExpandLowEnergy, + kPreemptiveExpandFail, + kRfc3389Cng, + kCodecInternalCng, + kCodecPlc, + kDtmf, + kError, + kUndefined, + }; + + // Return type for GetDecoderFormat. + struct DecoderFormat { + int sample_rate_hz; + int num_channels; + SdpAudioFormat sdp_format; + }; + + // Creates a new NetEq object, with parameters set in |config|. The |config| + // object will only have to be valid for the duration of the call to this + // method. + static NetEq* Create( + const NetEq::Config& config, + Clock* clock, + const rtc::scoped_refptr& decoder_factory); + + virtual ~NetEq() {} + + // Inserts a new packet into NetEq. + // Returns 0 on success, -1 on failure. + virtual int InsertPacket(const RTPHeader& rtp_header, + rtc::ArrayView payload) = 0; + + // Lets NetEq know that a packet arrived with an empty payload. This typically + // happens when empty packets are used for probing the network channel, and + // these packets use RTP sequence numbers from the same series as the actual + // audio packets. + virtual void InsertEmptyPacket(const RTPHeader& rtp_header) = 0; + + // Instructs NetEq to deliver 10 ms of audio data. The data is written to + // |audio_frame|. All data in |audio_frame| is wiped; |data_|, |speech_type_|, + // |num_channels_|, |sample_rate_hz_|, |samples_per_channel_|, and + // |vad_activity_| are updated upon success. If an error is returned, some + // fields may not have been updated, or may contain inconsistent values. + // If muted state is enabled (through Config::enable_muted_state), |muted| + // may be set to true after a prolonged expand period. When this happens, the + // |data_| in |audio_frame| is not written, but should be interpreted as being + // all zeros. For testing purposes, an override can be supplied in the + // |action_override| argument, which will cause NetEq to take this action + // next, instead of the action it would normally choose. + // Returns kOK on success, or kFail in case of an error. + virtual int GetAudio( + AudioFrame* audio_frame, + bool* muted, + absl::optional action_override = absl::nullopt) = 0; + + // Replaces the current set of decoders with the given one. + virtual void SetCodecs(const std::map& codecs) = 0; + + // Associates |rtp_payload_type| with the given codec, which NetEq will + // instantiate when it needs it. Returns true iff successful. + virtual bool RegisterPayloadType(int rtp_payload_type, + const SdpAudioFormat& audio_format) = 0; + + // Removes |rtp_payload_type| from the codec database. Returns 0 on success, + // -1 on failure. Removing a payload type that is not registered is ok and + // will not result in an error. + virtual int RemovePayloadType(uint8_t rtp_payload_type) = 0; + + // Removes all payload types from the codec database. + virtual void RemoveAllPayloadTypes() = 0; + + // Sets a minimum delay in millisecond for packet buffer. The minimum is + // maintained unless a higher latency is dictated by channel condition. + // Returns true if the minimum is successfully applied, otherwise false is + // returned. + virtual bool SetMinimumDelay(int delay_ms) = 0; + + // Sets a maximum delay in milliseconds for packet buffer. The latency will + // not exceed the given value, even required delay (given the channel + // conditions) is higher. Calling this method has the same effect as setting + // the |max_delay_ms| value in the NetEq::Config struct. + virtual bool SetMaximumDelay(int delay_ms) = 0; + + // Sets a base minimum delay in milliseconds for packet buffer. The minimum + // delay which is set via |SetMinimumDelay| can't be lower than base minimum + // delay. Calling this method is similar to setting the |min_delay_ms| value + // in the NetEq::Config struct. Returns true if the base minimum is + // successfully applied, otherwise false is returned. + virtual bool SetBaseMinimumDelayMs(int delay_ms) = 0; + + // Returns current value of base minimum delay in milliseconds. + virtual int GetBaseMinimumDelayMs() const = 0; + + // Returns the current target delay in ms. This includes any extra delay + // requested through SetMinimumDelay. + virtual int TargetDelayMs() const = 0; + + // Returns the current total delay (packet buffer and sync buffer) in ms, + // with smoothing applied to even out short-time fluctuations due to jitter. + // The packet buffer part of the delay is not updated during DTX/CNG periods. + virtual int FilteredCurrentDelayMs() const = 0; + + // Writes the current network statistics to |stats|. The statistics are reset + // after the call. + virtual int NetworkStatistics(NetEqNetworkStatistics* stats) = 0; + + // Returns a copy of this class's lifetime statistics. These statistics are + // never reset. + virtual NetEqLifetimeStatistics GetLifetimeStatistics() const = 0; + + // Returns statistics about the performed operations and internal state. These + // statistics are never reset. + virtual NetEqOperationsAndState GetOperationsAndState() const = 0; + + // Enables post-decode VAD. When enabled, GetAudio() will return + // kOutputVADPassive when the signal contains no speech. + virtual void EnableVad() = 0; + + // Disables post-decode VAD. + virtual void DisableVad() = 0; + + // Returns the RTP timestamp for the last sample delivered by GetAudio(). + // The return value will be empty if no valid timestamp is available. + virtual absl::optional GetPlayoutTimestamp() const = 0; + + // Returns the sample rate in Hz of the audio produced in the last GetAudio + // call. If GetAudio has not been called yet, the configured sample rate + // (Config::sample_rate_hz) is returned. + virtual int last_output_sample_rate_hz() const = 0; + + // Returns the decoder info for the given payload type. Returns empty if no + // such payload type was registered. + virtual absl::optional GetDecoderFormat( + int payload_type) const = 0; + + // Flushes both the packet buffer and the sync buffer. + virtual void FlushBuffers() = 0; + + // Enables NACK and sets the maximum size of the NACK list, which should be + // positive and no larger than Nack::kNackListSizeLimit. If NACK is already + // enabled then the maximum NACK list size is modified accordingly. + virtual void EnableNack(size_t max_nack_list_size) = 0; + + virtual void DisableNack() = 0; + + // Returns a list of RTP sequence numbers corresponding to packets to be + // retransmitted, given an estimate of the round-trip time in milliseconds. + virtual std::vector GetNackList( + int64_t round_trip_time_ms) const = 0; + + // Returns a vector containing the timestamps of the packets that were decoded + // in the last GetAudio call. If no packets were decoded in the last call, the + // vector is empty. + // Mainly intended for testing. + virtual std::vector LastDecodedTimestamps() const = 0; + + // Returns the length of the audio yet to play in the sync buffer. + // Mainly intended for testing. + virtual int SyncBufferSizeMs() const = 0; +}; + +} // namespace webrtc +#endif // API_NETEQ_NETEQ_H_ diff --git a/api/neteq/neteq_controller.h b/api/neteq/neteq_controller.h new file mode 100644 index 0000000..1d47eac --- /dev/null +++ b/api/neteq/neteq_controller.h @@ -0,0 +1,181 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_NETEQ_NETEQ_CONTROLLER_H_ +#define API_NETEQ_NETEQ_CONTROLLER_H_ + +#include +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/neteq/neteq.h" +#include "api/neteq/tick_timer.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +// Decides the actions that NetEq should take. This affects the behavior of the +// jitter buffer, and how it reacts to network conditions. +// This class will undergo substantial refactoring in the near future, and the +// API is expected to undergo significant changes. A target API is given below: +// +// class NetEqController { +// public: +// // Resets object to a clean state. +// void Reset(); +// // Given NetEq status, make a decision. +// Operation GetDecision(NetEqStatus neteq_status); +// // Register every packet received. +// void RegisterPacket(PacketInfo packet_info); +// // Register empty packet. +// void RegisterEmptyPacket(); +// // Register a codec switching. +// void CodecSwithed(); +// // Sets the sample rate. +// void SetSampleRate(int fs_hz); +// // Sets the packet length in samples. +// void SetPacketLengthSamples(); +// // Sets maximum delay. +// void SetMaximumDelay(int delay_ms); +// // Sets mininum delay. +// void SetMinimumDelay(int delay_ms); +// // Sets base mininum delay. +// void SetBaseMinimumDelay(int delay_ms); +// // Gets target buffer level. +// int GetTargetBufferLevelMs() const; +// // Gets filtered buffer level. +// int GetFilteredBufferLevel() const; +// // Gets base minimum delay. +// int GetBaseMinimumDelay() const; +// } + +class NetEqController { + public: + // This struct is used to create a NetEqController. + struct Config { + bool allow_time_stretching; + bool enable_rtx_handling; + int max_packets_in_buffer; + int base_min_delay_ms; + TickTimer* tick_timer; + webrtc::Clock* clock = nullptr; + }; + + struct PacketInfo { + uint32_t timestamp; + bool is_dtx; + bool is_cng; + }; + + struct PacketBufferInfo { + bool dtx_or_cng; + size_t num_samples; + size_t span_samples; + size_t span_samples_no_dtx; + size_t num_packets; + }; + + struct NetEqStatus { + uint32_t target_timestamp; + int16_t expand_mutefactor; + size_t last_packet_samples; + absl::optional next_packet; + NetEq::Mode last_mode; + bool play_dtmf; + size_t generated_noise_samples; + PacketBufferInfo packet_buffer_info; + size_t sync_buffer_samples; + }; + + virtual ~NetEqController() = default; + + // Resets object to a clean state. + virtual void Reset() = 0; + + // Resets parts of the state. Typically done when switching codecs. + virtual void SoftReset() = 0; + + // Given info about the latest received packet, and current jitter buffer + // status, returns the operation. |target_timestamp| and |expand_mutefactor| + // are provided for reference. |last_packet_samples| is the number of samples + // obtained from the last decoded frame. If there is a packet available, it + // should be supplied in |packet|. The mode resulting from the last call to + // NetEqImpl::GetAudio is supplied in |last_mode|. If there is a DTMF event to + // play, |play_dtmf| should be set to true. The output variable + // |reset_decoder| will be set to true if a reset is required; otherwise it is + // left unchanged (i.e., it can remain true if it was true before the call). + virtual NetEq::Operation GetDecision(const NetEqStatus& status, + bool* reset_decoder) = 0; + + // Inform NetEqController that an empty packet has arrived. + virtual void RegisterEmptyPacket() = 0; + + // Sets the sample rate and the output block size. + virtual void SetSampleRate(int fs_hz, size_t output_size_samples) = 0; + + // Sets a minimum or maximum delay in millisecond. + // Returns true if the delay bound is successfully applied, otherwise false. + virtual bool SetMaximumDelay(int delay_ms) = 0; + virtual bool SetMinimumDelay(int delay_ms) = 0; + + // Sets a base minimum delay in milliseconds for packet buffer. The effective + // minimum delay can't be lower than base minimum delay, even if a lower value + // is set using SetMinimumDelay. + // Returns true if the base minimum is successfully applied, otherwise false. + virtual bool SetBaseMinimumDelay(int delay_ms) = 0; + virtual int GetBaseMinimumDelay() const = 0; + + // These methods test the |cng_state_| for different conditions. + virtual bool CngRfc3389On() const = 0; + virtual bool CngOff() const = 0; + + // Resets the |cng_state_| to kCngOff. + virtual void SetCngOff() = 0; + + // Reports back to DecisionLogic whether the decision to do expand remains or + // not. Note that this is necessary, since an expand decision can be changed + // to kNormal in NetEqImpl::GetDecision if there is still enough data in the + // sync buffer. + virtual void ExpandDecision(NetEq::Operation operation) = 0; + + // Adds |value| to |sample_memory_|. + virtual void AddSampleMemory(int32_t value) = 0; + + // Returns the target buffer level in ms. + virtual int TargetLevelMs() = 0; + + // Notify the NetEqController that a packet has arrived. Returns the relative + // arrival delay, if it can be computed. + virtual absl::optional PacketArrived(bool last_cng_or_dtmf, + size_t packet_length_samples, + bool should_update_stats, + uint16_t main_sequence_number, + uint32_t main_timestamp, + int fs_hz) = 0; + + // Returns true if a peak was found. + virtual bool PeakFound() const = 0; + + // Get the filtered buffer level in samples. + virtual int GetFilteredBufferLevel() const = 0; + + // Accessors and mutators. + virtual void set_sample_memory(int32_t value) = 0; + virtual size_t noise_fast_forward() const = 0; + virtual size_t packet_length_samples() const = 0; + virtual void set_packet_length_samples(size_t value) = 0; + virtual void set_prev_time_scale(bool value) = 0; +}; + +} // namespace webrtc +#endif // API_NETEQ_NETEQ_CONTROLLER_H_ diff --git a/api/neteq/neteq_controller_factory.h b/api/neteq/neteq_controller_factory.h new file mode 100644 index 0000000..6478fce --- /dev/null +++ b/api/neteq/neteq_controller_factory.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_NETEQ_NETEQ_CONTROLLER_FACTORY_H_ +#define API_NETEQ_NETEQ_CONTROLLER_FACTORY_H_ + +#include + +#include "api/neteq/neteq_controller.h" + +namespace webrtc { + +// Creates NetEqController instances using the settings provided in the config +// struct. +class NetEqControllerFactory { + public: + virtual ~NetEqControllerFactory() = default; + + // Creates a new NetEqController object, with parameters set in |config|. + virtual std::unique_ptr CreateNetEqController( + const NetEqController::Config& config) const = 0; +}; + +} // namespace webrtc +#endif // API_NETEQ_NETEQ_CONTROLLER_FACTORY_H_ diff --git a/api/neteq/neteq_factory.h b/api/neteq/neteq_factory.h new file mode 100644 index 0000000..65cf9eb --- /dev/null +++ b/api/neteq/neteq_factory.h @@ -0,0 +1,37 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_NETEQ_NETEQ_FACTORY_H_ +#define API_NETEQ_NETEQ_FACTORY_H_ + +#include + +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/neteq/neteq.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { + +// Creates NetEq instances using the settings provided in the config struct. +class NetEqFactory { + public: + virtual ~NetEqFactory() = default; + + // Creates a new NetEq object, with parameters set in |config|. The |config| + // object will only have to be valid for the duration of the call to this + // method. + virtual std::unique_ptr CreateNetEq( + const NetEq::Config& config, + const rtc::scoped_refptr& decoder_factory, + Clock* clock) const = 0; +}; + +} // namespace webrtc +#endif // API_NETEQ_NETEQ_FACTORY_H_ diff --git a/api/neteq/tick_timer.cc b/api/neteq/tick_timer.cc new file mode 100644 index 0000000..8f60bf4 --- /dev/null +++ b/api/neteq/tick_timer.cc @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/neteq/tick_timer.h" + +namespace webrtc { + +TickTimer::Stopwatch::Stopwatch(const TickTimer& ticktimer) + : ticktimer_(ticktimer), starttick_(ticktimer.ticks()) {} + +TickTimer::Countdown::Countdown(const TickTimer& ticktimer, + uint64_t ticks_to_count) + : stopwatch_(ticktimer.GetNewStopwatch()), + ticks_to_count_(ticks_to_count) {} + +TickTimer::Countdown::~Countdown() = default; + +} // namespace webrtc diff --git a/api/neteq/tick_timer.h b/api/neteq/tick_timer.h new file mode 100644 index 0000000..e3f54a4 --- /dev/null +++ b/api/neteq/tick_timer.h @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_NETEQ_TICK_TIMER_H_ +#define API_NETEQ_TICK_TIMER_H_ + +#include + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +// Implements a time counter. The counter is advanced with the Increment() +// methods, and is queried with the ticks() accessor. It is assumed that one +// "tick" of the counter corresponds to 10 ms. +// A TickTimer object can provide two types of associated time-measuring +// objects: Stopwatch and Countdown. +class TickTimer { + public: + // Stopwatch measures time elapsed since it was started, by querying the + // associated TickTimer for the current time. The intended use is to request a + // new Stopwatch object from a TickTimer object with the GetNewStopwatch() + // method. Note: since the Stopwatch object contains a reference to the + // TickTimer it is associated with, it cannot outlive the TickTimer. + class Stopwatch { + public: + explicit Stopwatch(const TickTimer& ticktimer); + + uint64_t ElapsedTicks() const { return ticktimer_.ticks() - starttick_; } + + uint64_t ElapsedMs() const { + const uint64_t elapsed_ticks = ticktimer_.ticks() - starttick_; + const int ms_per_tick = ticktimer_.ms_per_tick(); + return elapsed_ticks < UINT64_MAX / ms_per_tick + ? elapsed_ticks * ms_per_tick + : UINT64_MAX; + } + + private: + const TickTimer& ticktimer_; + const uint64_t starttick_; + }; + + // Countdown counts down from a given start value with each tick of the + // associated TickTimer, until zero is reached. The Finished() method will + // return true if zero has been reached, false otherwise. The intended use is + // to request a new Countdown object from a TickTimer object with the + // GetNewCountdown() method. Note: since the Countdown object contains a + // reference to the TickTimer it is associated with, it cannot outlive the + // TickTimer. + class Countdown { + public: + Countdown(const TickTimer& ticktimer, uint64_t ticks_to_count); + + ~Countdown(); + + bool Finished() const { + return stopwatch_->ElapsedTicks() >= ticks_to_count_; + } + + private: + const std::unique_ptr stopwatch_; + const uint64_t ticks_to_count_; + }; + + TickTimer() : TickTimer(10) {} + explicit TickTimer(int ms_per_tick) : ms_per_tick_(ms_per_tick) { + RTC_DCHECK_GT(ms_per_tick_, 0); + } + + TickTimer(const TickTimer&) = delete; + TickTimer& operator=(const TickTimer&) = delete; + + void Increment() { ++ticks_; } + + // Mainly intended for testing. + void Increment(uint64_t x) { ticks_ += x; } + + uint64_t ticks() const { return ticks_; } + + int ms_per_tick() const { return ms_per_tick_; } + + // Returns a new Stopwatch object, based on the current TickTimer. Note that + // the new Stopwatch object contains a reference to the current TickTimer, + // and must therefore not outlive the TickTimer. + std::unique_ptr GetNewStopwatch() const { + return std::unique_ptr(new Stopwatch(*this)); + } + + // Returns a new Countdown object, based on the current TickTimer. Note that + // the new Countdown object contains a reference to the current TickTimer, + // and must therefore not outlive the TickTimer. + std::unique_ptr GetNewCountdown(uint64_t ticks_to_count) const { + return std::unique_ptr(new Countdown(*this, ticks_to_count)); + } + + private: + uint64_t ticks_ = 0; + const int ms_per_tick_; +}; + +} // namespace webrtc +#endif // API_NETEQ_TICK_TIMER_H_ diff --git a/api/neteq/tick_timer_unittest.cc b/api/neteq/tick_timer_unittest.cc new file mode 100644 index 0000000..863c011 --- /dev/null +++ b/api/neteq/tick_timer_unittest.cc @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/neteq/tick_timer.h" + +#include + +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +// Verify that the default value for ms_per_tick is 10. +TEST(TickTimer, DefaultMsPerTick) { + TickTimer tt; + EXPECT_EQ(10, tt.ms_per_tick()); +} + +TEST(TickTimer, CustomMsPerTick) { + TickTimer tt(17); + EXPECT_EQ(17, tt.ms_per_tick()); +} + +TEST(TickTimer, Increment) { + TickTimer tt; + EXPECT_EQ(0u, tt.ticks()); + tt.Increment(); + EXPECT_EQ(1u, tt.ticks()); + + for (int i = 0; i < 17; ++i) { + tt.Increment(); + } + EXPECT_EQ(18u, tt.ticks()); + + tt.Increment(17); + EXPECT_EQ(35u, tt.ticks()); +} + +TEST(TickTimer, WrapAround) { + TickTimer tt; + tt.Increment(UINT64_MAX); + EXPECT_EQ(UINT64_MAX, tt.ticks()); + tt.Increment(); + EXPECT_EQ(0u, tt.ticks()); +} + +TEST(TickTimer, Stopwatch) { + TickTimer tt; + // Increment it a "random" number of steps. + tt.Increment(17); + + std::unique_ptr sw = tt.GetNewStopwatch(); + ASSERT_TRUE(sw); + + EXPECT_EQ(0u, sw->ElapsedTicks()); // Starts at zero. + EXPECT_EQ(0u, sw->ElapsedMs()); + tt.Increment(); + EXPECT_EQ(1u, sw->ElapsedTicks()); // Increases with the TickTimer. + EXPECT_EQ(10u, sw->ElapsedMs()); +} + +TEST(TickTimer, StopwatchWrapAround) { + TickTimer tt; + tt.Increment(UINT64_MAX); + + std::unique_ptr sw = tt.GetNewStopwatch(); + ASSERT_TRUE(sw); + + tt.Increment(); + EXPECT_EQ(0u, tt.ticks()); + EXPECT_EQ(1u, sw->ElapsedTicks()); + EXPECT_EQ(10u, sw->ElapsedMs()); + + tt.Increment(); + EXPECT_EQ(1u, tt.ticks()); + EXPECT_EQ(2u, sw->ElapsedTicks()); + EXPECT_EQ(20u, sw->ElapsedMs()); +} + +TEST(TickTimer, StopwatchMsOverflow) { + TickTimer tt; + std::unique_ptr sw = tt.GetNewStopwatch(); + ASSERT_TRUE(sw); + + tt.Increment(UINT64_MAX / 10); + EXPECT_EQ(UINT64_MAX, sw->ElapsedMs()); + + tt.Increment(); + EXPECT_EQ(UINT64_MAX, sw->ElapsedMs()); + + tt.Increment(UINT64_MAX - tt.ticks()); + EXPECT_EQ(UINT64_MAX, tt.ticks()); + EXPECT_EQ(UINT64_MAX, sw->ElapsedMs()); +} + +TEST(TickTimer, StopwatchWithCustomTicktime) { + const int kMsPerTick = 17; + TickTimer tt(kMsPerTick); + std::unique_ptr sw = tt.GetNewStopwatch(); + ASSERT_TRUE(sw); + + EXPECT_EQ(0u, sw->ElapsedMs()); + tt.Increment(); + EXPECT_EQ(static_cast(kMsPerTick), sw->ElapsedMs()); +} + +TEST(TickTimer, Countdown) { + TickTimer tt; + // Increment it a "random" number of steps. + tt.Increment(4711); + + std::unique_ptr cd = tt.GetNewCountdown(17); + ASSERT_TRUE(cd); + + EXPECT_FALSE(cd->Finished()); + tt.Increment(); + EXPECT_FALSE(cd->Finished()); + + tt.Increment(16); // Total increment is now 17. + EXPECT_TRUE(cd->Finished()); + + // Further increments do not change the state. + tt.Increment(); + EXPECT_TRUE(cd->Finished()); + tt.Increment(1234); + EXPECT_TRUE(cd->Finished()); +} +} // namespace webrtc diff --git a/api/network_state_predictor.h b/api/network_state_predictor.h new file mode 100644 index 0000000..9cf5ab6 --- /dev/null +++ b/api/network_state_predictor.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_NETWORK_STATE_PREDICTOR_H_ +#define API_NETWORK_STATE_PREDICTOR_H_ + +#include +#include + +namespace webrtc { + +enum class BandwidthUsage { + kBwNormal = 0, + kBwUnderusing = 1, + kBwOverusing = 2, + kLast +}; + +// TODO(yinwa): work in progress. API in class NetworkStatePredictor should not +// be used by other users until this comment is removed. + +// NetworkStatePredictor predict network state based on current network metrics. +// Usage: +// Setup by calling Initialize. +// For each update, call Update. Update returns network state +// prediction. +class NetworkStatePredictor { + public: + virtual ~NetworkStatePredictor() {} + + // Returns current network state prediction. + // Inputs: send_time_ms - packet send time. + // arrival_time_ms - packet arrival time. + // network_state - computed network state. + virtual BandwidthUsage Update(int64_t send_time_ms, + int64_t arrival_time_ms, + BandwidthUsage network_state) = 0; +}; + +class NetworkStatePredictorFactoryInterface { + public: + virtual std::unique_ptr + CreateNetworkStatePredictor() = 0; + virtual ~NetworkStatePredictorFactoryInterface() = default; +}; + +} // namespace webrtc + +#endif // API_NETWORK_STATE_PREDICTOR_H_ diff --git a/api/notifier.h b/api/notifier.h new file mode 100644 index 0000000..c03b104 --- /dev/null +++ b/api/notifier.h @@ -0,0 +1,60 @@ +/* + * Copyright 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_NOTIFIER_H_ +#define API_NOTIFIER_H_ + +#include + +#include "api/media_stream_interface.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +// Implements a template version of a notifier. +// TODO(deadbeef): This is an implementation detail; move out of api/. +template +class Notifier : public T { + public: + Notifier() {} + + virtual void RegisterObserver(ObserverInterface* observer) { + RTC_DCHECK(observer != nullptr); + observers_.push_back(observer); + } + + virtual void UnregisterObserver(ObserverInterface* observer) { + for (std::list::iterator it = observers_.begin(); + it != observers_.end(); it++) { + if (*it == observer) { + observers_.erase(it); + break; + } + } + } + + void FireOnChanged() { + // Copy the list of observers to avoid a crash if the observer object + // unregisters as a result of the OnChanged() call. If the same list is used + // UnregisterObserver will affect the list make the iterator invalid. + std::list observers = observers_; + for (std::list::iterator it = observers.begin(); + it != observers.end(); ++it) { + (*it)->OnChanged(); + } + } + + protected: + std::list observers_; +}; + +} // namespace webrtc + +#endif // API_NOTIFIER_H_ diff --git a/api/packet_socket_factory.h b/api/packet_socket_factory.h new file mode 100644 index 0000000..1e9f470 --- /dev/null +++ b/api/packet_socket_factory.h @@ -0,0 +1,81 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_PACKET_SOCKET_FACTORY_H_ +#define API_PACKET_SOCKET_FACTORY_H_ + +#include +#include + +#include "rtc_base/async_packet_socket.h" +#include "rtc_base/proxy_info.h" +#include "rtc_base/system/rtc_export.h" + +namespace rtc { + +class SSLCertificateVerifier; +class AsyncResolverInterface; + +struct PacketSocketTcpOptions { + PacketSocketTcpOptions() = default; + ~PacketSocketTcpOptions() = default; + + int opts = 0; + std::vector tls_alpn_protocols; + std::vector tls_elliptic_curves; + // An optional custom SSL certificate verifier that an API user can provide to + // inject their own certificate verification logic (not available to users + // outside of the WebRTC repo). + SSLCertificateVerifier* tls_cert_verifier = nullptr; +}; + +class RTC_EXPORT PacketSocketFactory { + public: + enum Options { + OPT_STUN = 0x04, + + // The TLS options below are mutually exclusive. + OPT_TLS = 0x02, // Real and secure TLS. + OPT_TLS_FAKE = 0x01, // Fake TLS with a dummy SSL handshake. + OPT_TLS_INSECURE = 0x08, // Insecure TLS without certificate validation. + + // Deprecated, use OPT_TLS_FAKE. + OPT_SSLTCP = OPT_TLS_FAKE, + }; + + PacketSocketFactory() = default; + virtual ~PacketSocketFactory() = default; + + virtual AsyncPacketSocket* CreateUdpSocket(const SocketAddress& address, + uint16_t min_port, + uint16_t max_port) = 0; + virtual AsyncPacketSocket* CreateServerTcpSocket( + const SocketAddress& local_address, + uint16_t min_port, + uint16_t max_port, + int opts) = 0; + + virtual AsyncPacketSocket* CreateClientTcpSocket( + const SocketAddress& local_address, + const SocketAddress& remote_address, + const ProxyInfo& proxy_info, + const std::string& user_agent, + const PacketSocketTcpOptions& tcp_options) = 0; + + virtual AsyncResolverInterface* CreateAsyncResolver() = 0; + + private: + PacketSocketFactory(const PacketSocketFactory&) = delete; + PacketSocketFactory& operator=(const PacketSocketFactory&) = delete; +}; + +} // namespace rtc + +#endif // API_PACKET_SOCKET_FACTORY_H_ diff --git a/api/peer_connection_factory_proxy.h b/api/peer_connection_factory_proxy.h new file mode 100644 index 0000000..e33fb45 --- /dev/null +++ b/api/peer_connection_factory_proxy.h @@ -0,0 +1,65 @@ +/* + * Copyright 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_PEER_CONNECTION_FACTORY_PROXY_H_ +#define API_PEER_CONNECTION_FACTORY_PROXY_H_ + +#include +#include +#include + +#include "api/peer_connection_interface.h" +#include "api/proxy.h" +#include "rtc_base/bind.h" + +namespace webrtc { + +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. +BEGIN_SIGNALING_PROXY_MAP(PeerConnectionFactory) +PROXY_SIGNALING_THREAD_DESTRUCTOR() +PROXY_METHOD1(void, SetOptions, const Options&) +PROXY_METHOD4(rtc::scoped_refptr, + CreatePeerConnection, + const PeerConnectionInterface::RTCConfiguration&, + std::unique_ptr, + std::unique_ptr, + PeerConnectionObserver*) +PROXY_METHOD2(rtc::scoped_refptr, + CreatePeerConnection, + const PeerConnectionInterface::RTCConfiguration&, + PeerConnectionDependencies) +PROXY_CONSTMETHOD1(webrtc::RtpCapabilities, + GetRtpSenderCapabilities, + cricket::MediaType) +PROXY_CONSTMETHOD1(webrtc::RtpCapabilities, + GetRtpReceiverCapabilities, + cricket::MediaType) +PROXY_METHOD1(rtc::scoped_refptr, + CreateLocalMediaStream, + const std::string&) +PROXY_METHOD1(rtc::scoped_refptr, + CreateAudioSource, + const cricket::AudioOptions&) +PROXY_METHOD2(rtc::scoped_refptr, + CreateVideoTrack, + const std::string&, + VideoTrackSourceInterface*) +PROXY_METHOD2(rtc::scoped_refptr, + CreateAudioTrack, + const std::string&, + AudioSourceInterface*) +PROXY_METHOD2(bool, StartAecDump, FILE*, int64_t) +PROXY_METHOD0(void, StopAecDump) +END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_PEER_CONNECTION_FACTORY_PROXY_H_ diff --git a/api/peer_connection_interface.cc b/api/peer_connection_interface.cc new file mode 100644 index 0000000..f82e84b --- /dev/null +++ b/api/peer_connection_interface.cc @@ -0,0 +1,100 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/peer_connection_interface.h" + +#include "api/dtls_transport_interface.h" +#include "api/sctp_transport_interface.h" + +namespace webrtc { + +PeerConnectionInterface::IceServer::IceServer() = default; +PeerConnectionInterface::IceServer::IceServer(const IceServer& rhs) = default; +PeerConnectionInterface::IceServer::~IceServer() = default; + +PeerConnectionInterface::RTCConfiguration::RTCConfiguration() = default; + +PeerConnectionInterface::RTCConfiguration::RTCConfiguration( + const RTCConfiguration& rhs) = default; + +PeerConnectionInterface::RTCConfiguration::RTCConfiguration( + RTCConfigurationType type) { + if (type == RTCConfigurationType::kAggressive) { + // These parameters are also defined in Java and IOS configurations, + // so their values may be overwritten by the Java or IOS configuration. + bundle_policy = kBundlePolicyMaxBundle; + rtcp_mux_policy = kRtcpMuxPolicyRequire; + ice_connection_receiving_timeout = kAggressiveIceConnectionReceivingTimeout; + + // These parameters are not defined in Java or IOS configuration, + // so their values will not be overwritten. + enable_ice_renomination = true; + redetermine_role_on_ice_restart = false; + } +} + +PeerConnectionInterface::RTCConfiguration::~RTCConfiguration() = default; + +RTCError PeerConnectionInterface::RemoveTrackNew( + rtc::scoped_refptr sender) { + return RTCError(RemoveTrack(sender) ? RTCErrorType::NONE + : RTCErrorType::INTERNAL_ERROR); +} + +RTCError PeerConnectionInterface::SetConfiguration( + const PeerConnectionInterface::RTCConfiguration& config) { + return RTCError(); +} + +PeerConnectionDependencies::PeerConnectionDependencies( + PeerConnectionObserver* observer_in) + : observer(observer_in) {} + +PeerConnectionDependencies::PeerConnectionDependencies( + PeerConnectionDependencies&&) = default; + +PeerConnectionDependencies::~PeerConnectionDependencies() = default; + +PeerConnectionFactoryDependencies::PeerConnectionFactoryDependencies() = + default; + +PeerConnectionFactoryDependencies::PeerConnectionFactoryDependencies( + PeerConnectionFactoryDependencies&&) = default; + +PeerConnectionFactoryDependencies::~PeerConnectionFactoryDependencies() = + default; + +rtc::scoped_refptr +PeerConnectionFactoryInterface::CreatePeerConnection( + const PeerConnectionInterface::RTCConfiguration& configuration, + std::unique_ptr allocator, + std::unique_ptr cert_generator, + PeerConnectionObserver* observer) { + return nullptr; +} + +rtc::scoped_refptr +PeerConnectionFactoryInterface::CreatePeerConnection( + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies dependencies) { + return nullptr; +} + +RtpCapabilities PeerConnectionFactoryInterface::GetRtpSenderCapabilities( + cricket::MediaType kind) const { + return {}; +} + +RtpCapabilities PeerConnectionFactoryInterface::GetRtpReceiverCapabilities( + cricket::MediaType kind) const { + return {}; +} + +} // namespace webrtc diff --git a/api/peer_connection_interface.h b/api/peer_connection_interface.h new file mode 100644 index 0000000..09317b8 --- /dev/null +++ b/api/peer_connection_interface.h @@ -0,0 +1,1501 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains the PeerConnection interface as defined in +// https://w3c.github.io/webrtc-pc/#peer-to-peer-connections +// +// The PeerConnectionFactory class provides factory methods to create +// PeerConnection, MediaStream and MediaStreamTrack objects. +// +// The following steps are needed to setup a typical call using WebRTC: +// +// 1. Create a PeerConnectionFactoryInterface. Check constructors for more +// information about input parameters. +// +// 2. Create a PeerConnection object. Provide a configuration struct which +// points to STUN and/or TURN servers used to generate ICE candidates, and +// provide an object that implements the PeerConnectionObserver interface, +// which is used to receive callbacks from the PeerConnection. +// +// 3. Create local MediaStreamTracks using the PeerConnectionFactory and add +// them to PeerConnection by calling AddTrack (or legacy method, AddStream). +// +// 4. Create an offer, call SetLocalDescription with it, serialize it, and send +// it to the remote peer +// +// 5. Once an ICE candidate has been gathered, the PeerConnection will call the +// observer function OnIceCandidate. The candidates must also be serialized and +// sent to the remote peer. +// +// 6. Once an answer is received from the remote peer, call +// SetRemoteDescription with the remote answer. +// +// 7. Once a remote candidate is received from the remote peer, provide it to +// the PeerConnection by calling AddIceCandidate. +// +// The receiver of a call (assuming the application is "call"-based) can decide +// to accept or reject the call; this decision will be taken by the application, +// not the PeerConnection. +// +// If the application decides to accept the call, it should: +// +// 1. Create PeerConnectionFactoryInterface if it doesn't exist. +// +// 2. Create a new PeerConnection. +// +// 3. Provide the remote offer to the new PeerConnection object by calling +// SetRemoteDescription. +// +// 4. Generate an answer to the remote offer by calling CreateAnswer and send it +// back to the remote peer. +// +// 5. Provide the local answer to the new PeerConnection by calling +// SetLocalDescription with the answer. +// +// 6. Provide the remote ICE candidates by calling AddIceCandidate. +// +// 7. Once a candidate has been gathered, the PeerConnection will call the +// observer function OnIceCandidate. Send these candidates to the remote peer. + +#ifndef API_PEER_CONNECTION_INTERFACE_H_ +#define API_PEER_CONNECTION_INTERFACE_H_ + +#include + +#include +#include +#include + +#include "api/adaptation/resource.h" +#include "api/async_resolver_factory.h" +#include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_decoder_factory.h" +#include "api/audio_codecs/audio_encoder_factory.h" +#include "api/audio_options.h" +#include "api/call/call_factory_interface.h" +#include "api/crypto/crypto_options.h" +#include "api/data_channel_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/fec_controller.h" +#include "api/ice_transport_interface.h" +#include "api/jsep.h" +#include "api/media_stream_interface.h" +#include "api/neteq/neteq_factory.h" +#include "api/network_state_predictor.h" +#include "api/packet_socket_factory.h" +#include "api/rtc_error.h" +#include "api/rtc_event_log/rtc_event_log_factory_interface.h" +#include "api/rtc_event_log_output.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_interface.h" +#include "api/sctp_transport_interface.h" +#include "api/set_local_description_observer_interface.h" +#include "api/set_remote_description_observer_interface.h" +#include "api/stats/rtc_stats_collector_callback.h" +#include "api/stats_types.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/enums.h" +#include "api/transport/network_control.h" +#include "api/transport/webrtc_key_value_config.h" +#include "api/turn_customizer.h" +#include "media/base/media_config.h" +#include "media/base/media_engine.h" +// TODO(bugs.webrtc.org/7447): We plan to provide a way to let applications +// inject a PacketSocketFactory and/or NetworkManager, and not expose +// PortAllocator in the PeerConnection api. +#include "p2p/base/port_allocator.h" // nogncheck +#include "rtc_base/network_monitor_factory.h" +#include "rtc_base/rtc_certificate.h" +#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/socket_address.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/system/rtc_export.h" + +namespace rtc { +class Thread; +} // namespace rtc + +namespace webrtc { + +// MediaStream container interface. +class StreamCollectionInterface : public rtc::RefCountInterface { + public: + // TODO(ronghuawu): Update the function names to c++ style, e.g. find -> Find. + virtual size_t count() = 0; + virtual MediaStreamInterface* at(size_t index) = 0; + virtual MediaStreamInterface* find(const std::string& label) = 0; + virtual MediaStreamTrackInterface* FindAudioTrack(const std::string& id) = 0; + virtual MediaStreamTrackInterface* FindVideoTrack(const std::string& id) = 0; + + protected: + // Dtor protected as objects shouldn't be deleted via this interface. + ~StreamCollectionInterface() override = default; +}; + +class StatsObserver : public rtc::RefCountInterface { + public: + virtual void OnComplete(const StatsReports& reports) = 0; + + protected: + ~StatsObserver() override = default; +}; + +enum class SdpSemantics { kPlanB, kUnifiedPlan }; + +class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { + public: + // See https://w3c.github.io/webrtc-pc/#dom-rtcsignalingstate + enum SignalingState { + kStable, + kHaveLocalOffer, + kHaveLocalPrAnswer, + kHaveRemoteOffer, + kHaveRemotePrAnswer, + kClosed, + }; + + // See https://w3c.github.io/webrtc-pc/#dom-rtcicegatheringstate + enum IceGatheringState { + kIceGatheringNew, + kIceGatheringGathering, + kIceGatheringComplete + }; + + // See https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnectionstate + enum class PeerConnectionState { + kNew, + kConnecting, + kConnected, + kDisconnected, + kFailed, + kClosed, + }; + + // See https://w3c.github.io/webrtc-pc/#dom-rtciceconnectionstate + enum IceConnectionState { + kIceConnectionNew, + kIceConnectionChecking, + kIceConnectionConnected, + kIceConnectionCompleted, + kIceConnectionFailed, + kIceConnectionDisconnected, + kIceConnectionClosed, + kIceConnectionMax, + }; + + // TLS certificate policy. + enum TlsCertPolicy { + // For TLS based protocols, ensure the connection is secure by not + // circumventing certificate validation. + kTlsCertPolicySecure, + // For TLS based protocols, disregard security completely by skipping + // certificate validation. This is insecure and should never be used unless + // security is irrelevant in that particular context. + kTlsCertPolicyInsecureNoCheck, + }; + + struct RTC_EXPORT IceServer { + IceServer(); + IceServer(const IceServer&); + ~IceServer(); + + // TODO(jbauch): Remove uri when all code using it has switched to urls. + // List of URIs associated with this server. Valid formats are described + // in RFC7064 and RFC7065, and more may be added in the future. The "host" + // part of the URI may contain either an IP address or a hostname. + std::string uri; + std::vector urls; + std::string username; + std::string password; + TlsCertPolicy tls_cert_policy = kTlsCertPolicySecure; + // If the URIs in |urls| only contain IP addresses, this field can be used + // to indicate the hostname, which may be necessary for TLS (using the SNI + // extension). If |urls| itself contains the hostname, this isn't + // necessary. + std::string hostname; + // List of protocols to be used in the TLS ALPN extension. + std::vector tls_alpn_protocols; + // List of elliptic curves to be used in the TLS elliptic curves extension. + std::vector tls_elliptic_curves; + + bool operator==(const IceServer& o) const { + return uri == o.uri && urls == o.urls && username == o.username && + password == o.password && tls_cert_policy == o.tls_cert_policy && + hostname == o.hostname && + tls_alpn_protocols == o.tls_alpn_protocols && + tls_elliptic_curves == o.tls_elliptic_curves; + } + bool operator!=(const IceServer& o) const { return !(*this == o); } + }; + typedef std::vector IceServers; + + enum IceTransportsType { + // TODO(pthatcher): Rename these kTransporTypeXXX, but update + // Chromium at the same time. + kNone, + kRelay, + kNoHost, + kAll + }; + + // https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24#section-4.1.1 + enum BundlePolicy { + kBundlePolicyBalanced, + kBundlePolicyMaxBundle, + kBundlePolicyMaxCompat + }; + + // https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24#section-4.1.1 + enum RtcpMuxPolicy { + kRtcpMuxPolicyNegotiate, + kRtcpMuxPolicyRequire, + }; + + enum TcpCandidatePolicy { + kTcpCandidatePolicyEnabled, + kTcpCandidatePolicyDisabled + }; + + enum CandidateNetworkPolicy { + kCandidateNetworkPolicyAll, + kCandidateNetworkPolicyLowCost + }; + + enum ContinualGatheringPolicy { GATHER_ONCE, GATHER_CONTINUALLY }; + + enum class RTCConfigurationType { + // A configuration that is safer to use, despite not having the best + // performance. Currently this is the default configuration. + kSafe, + // An aggressive configuration that has better performance, although it + // may be riskier and may need extra support in the application. + kAggressive + }; + + // TODO(hbos): Change into class with private data and public getters. + // TODO(nisse): In particular, accessing fields directly from an + // application is brittle, since the organization mirrors the + // organization of the implementation, which isn't stable. So we + // need getters and setters at least for fields which applications + // are interested in. + struct RTC_EXPORT RTCConfiguration { + // This struct is subject to reorganization, both for naming + // consistency, and to group settings to match where they are used + // in the implementation. To do that, we need getter and setter + // methods for all settings which are of interest to applications, + // Chrome in particular. + + RTCConfiguration(); + RTCConfiguration(const RTCConfiguration&); + explicit RTCConfiguration(RTCConfigurationType type); + ~RTCConfiguration(); + + bool operator==(const RTCConfiguration& o) const; + bool operator!=(const RTCConfiguration& o) const; + + bool dscp() const { return media_config.enable_dscp; } + void set_dscp(bool enable) { media_config.enable_dscp = enable; } + + bool cpu_adaptation() const { + return media_config.video.enable_cpu_adaptation; + } + void set_cpu_adaptation(bool enable) { + media_config.video.enable_cpu_adaptation = enable; + } + + bool suspend_below_min_bitrate() const { + return media_config.video.suspend_below_min_bitrate; + } + void set_suspend_below_min_bitrate(bool enable) { + media_config.video.suspend_below_min_bitrate = enable; + } + + bool prerenderer_smoothing() const { + return media_config.video.enable_prerenderer_smoothing; + } + void set_prerenderer_smoothing(bool enable) { + media_config.video.enable_prerenderer_smoothing = enable; + } + + bool experiment_cpu_load_estimator() const { + return media_config.video.experiment_cpu_load_estimator; + } + void set_experiment_cpu_load_estimator(bool enable) { + media_config.video.experiment_cpu_load_estimator = enable; + } + + int audio_rtcp_report_interval_ms() const { + return media_config.audio.rtcp_report_interval_ms; + } + void set_audio_rtcp_report_interval_ms(int audio_rtcp_report_interval_ms) { + media_config.audio.rtcp_report_interval_ms = + audio_rtcp_report_interval_ms; + } + + int video_rtcp_report_interval_ms() const { + return media_config.video.rtcp_report_interval_ms; + } + void set_video_rtcp_report_interval_ms(int video_rtcp_report_interval_ms) { + media_config.video.rtcp_report_interval_ms = + video_rtcp_report_interval_ms; + } + + static const int kUndefined = -1; + // Default maximum number of packets in the audio jitter buffer. + static const int kAudioJitterBufferMaxPackets = 200; + // ICE connection receiving timeout for aggressive configuration. + static const int kAggressiveIceConnectionReceivingTimeout = 1000; + + //////////////////////////////////////////////////////////////////////// + // The below few fields mirror the standard RTCConfiguration dictionary: + // https://w3c.github.io/webrtc-pc/#rtcconfiguration-dictionary + //////////////////////////////////////////////////////////////////////// + + // TODO(pthatcher): Rename this ice_servers, but update Chromium + // at the same time. + IceServers servers; + // TODO(pthatcher): Rename this ice_transport_type, but update + // Chromium at the same time. + IceTransportsType type = kAll; + BundlePolicy bundle_policy = kBundlePolicyBalanced; + RtcpMuxPolicy rtcp_mux_policy = kRtcpMuxPolicyRequire; + std::vector> certificates; + int ice_candidate_pool_size = 0; + + ////////////////////////////////////////////////////////////////////////// + // The below fields correspond to constraints from the deprecated + // constraints interface for constructing a PeerConnection. + // + // absl::optional fields can be "missing", in which case the implementation + // default will be used. + ////////////////////////////////////////////////////////////////////////// + + // If set to true, don't gather IPv6 ICE candidates. + // TODO(deadbeef): Remove this? IPv6 support has long stopped being + // experimental + bool disable_ipv6 = false; + + // If set to true, don't gather IPv6 ICE candidates on Wi-Fi. + // Only intended to be used on specific devices. Certain phones disable IPv6 + // when the screen is turned off and it would be better to just disable the + // IPv6 ICE candidates on Wi-Fi in those cases. + bool disable_ipv6_on_wifi = false; + + // By default, the PeerConnection will use a limited number of IPv6 network + // interfaces, in order to avoid too many ICE candidate pairs being created + // and delaying ICE completion. + // + // Can be set to INT_MAX to effectively disable the limit. + int max_ipv6_networks = cricket::kDefaultMaxIPv6Networks; + + // Exclude link-local network interfaces + // from consideration for gathering ICE candidates. + bool disable_link_local_networks = false; + + // If set to true, use RTP data channels instead of SCTP. + // TODO(deadbeef): Remove this. We no longer commit to supporting RTP data + // channels, though some applications are still working on moving off of + // them. + bool enable_rtp_data_channel = false; + + // Minimum bitrate at which screencast video tracks will be encoded at. + // This means adding padding bits up to this bitrate, which can help + // when switching from a static scene to one with motion. + absl::optional screencast_min_bitrate; + + // Use new combined audio/video bandwidth estimation? + absl::optional combined_audio_video_bwe; + + // TODO(bugs.webrtc.org/9891) - Move to crypto_options + // Can be used to disable DTLS-SRTP. This should never be done, but can be + // useful for testing purposes, for example in setting up a loopback call + // with a single PeerConnection. + absl::optional enable_dtls_srtp; + + ///////////////////////////////////////////////// + // The below fields are not part of the standard. + ///////////////////////////////////////////////// + + // Can be used to disable TCP candidate generation. + TcpCandidatePolicy tcp_candidate_policy = kTcpCandidatePolicyEnabled; + + // Can be used to avoid gathering candidates for a "higher cost" network, + // if a lower cost one exists. For example, if both Wi-Fi and cellular + // interfaces are available, this could be used to avoid using the cellular + // interface. + CandidateNetworkPolicy candidate_network_policy = + kCandidateNetworkPolicyAll; + + // The maximum number of packets that can be stored in the NetEq audio + // jitter buffer. Can be reduced to lower tolerated audio latency. + int audio_jitter_buffer_max_packets = kAudioJitterBufferMaxPackets; + + // Whether to use the NetEq "fast mode" which will accelerate audio quicker + // if it falls behind. + bool audio_jitter_buffer_fast_accelerate = false; + + // The minimum delay in milliseconds for the audio jitter buffer. + int audio_jitter_buffer_min_delay_ms = 0; + + // Whether the audio jitter buffer adapts the delay to retransmitted + // packets. + bool audio_jitter_buffer_enable_rtx_handling = false; + + // Timeout in milliseconds before an ICE candidate pair is considered to be + // "not receiving", after which a lower priority candidate pair may be + // selected. + int ice_connection_receiving_timeout = kUndefined; + + // Interval in milliseconds at which an ICE "backup" candidate pair will be + // pinged. This is a candidate pair which is not actively in use, but may + // be switched to if the active candidate pair becomes unusable. + // + // This is relevant mainly to Wi-Fi/cell handoff; the application may not + // want this backup cellular candidate pair pinged frequently, since it + // consumes data/battery. + int ice_backup_candidate_pair_ping_interval = kUndefined; + + // Can be used to enable continual gathering, which means new candidates + // will be gathered as network interfaces change. Note that if continual + // gathering is used, the candidate removal API should also be used, to + // avoid an ever-growing list of candidates. + ContinualGatheringPolicy continual_gathering_policy = GATHER_ONCE; + + // If set to true, candidate pairs will be pinged in order of most likely + // to work (which means using a TURN server, generally), rather than in + // standard priority order. + bool prioritize_most_likely_ice_candidate_pairs = false; + + // Implementation defined settings. A public member only for the benefit of + // the implementation. Applications must not access it directly, and should + // instead use provided accessor methods, e.g., set_cpu_adaptation. + struct cricket::MediaConfig media_config; + + // If set to true, only one preferred TURN allocation will be used per + // network interface. UDP is preferred over TCP and IPv6 over IPv4. This + // can be used to cut down on the number of candidate pairings. + // Deprecated. TODO(webrtc:11026) Remove this flag once the downstream + // dependency is removed. + bool prune_turn_ports = false; + + // The policy used to prune turn port. + PortPrunePolicy turn_port_prune_policy = NO_PRUNE; + + PortPrunePolicy GetTurnPortPrunePolicy() const { + return prune_turn_ports ? PRUNE_BASED_ON_PRIORITY + : turn_port_prune_policy; + } + + // If set to true, this means the ICE transport should presume TURN-to-TURN + // candidate pairs will succeed, even before a binding response is received. + // This can be used to optimize the initial connection time, since the DTLS + // handshake can begin immediately. + bool presume_writable_when_fully_relayed = false; + + // If true, "renomination" will be added to the ice options in the transport + // description. + // See: https://tools.ietf.org/html/draft-thatcher-ice-renomination-00 + bool enable_ice_renomination = false; + + // If true, the ICE role is re-determined when the PeerConnection sets a + // local transport description that indicates an ICE restart. + // + // This is standard RFC5245 ICE behavior, but causes unnecessary role + // thrashing, so an application may wish to avoid it. This role + // re-determining was removed in ICEbis (ICE v2). + bool redetermine_role_on_ice_restart = true; + + // This flag is only effective when |continual_gathering_policy| is + // GATHER_CONTINUALLY. + // + // If true, after the ICE transport type is changed such that new types of + // ICE candidates are allowed by the new transport type, e.g. from + // IceTransportsType::kRelay to IceTransportsType::kAll, candidates that + // have been gathered by the ICE transport but not matching the previous + // transport type and as a result not observed by PeerConnectionObserver, + // will be surfaced to the observer. + bool surface_ice_candidates_on_ice_transport_type_changed = false; + + // The following fields define intervals in milliseconds at which ICE + // connectivity checks are sent. + // + // We consider ICE is "strongly connected" for an agent when there is at + // least one candidate pair that currently succeeds in connectivity check + // from its direction i.e. sending a STUN ping and receives a STUN ping + // response, AND all candidate pairs have sent a minimum number of pings for + // connectivity (this number is implementation-specific). Otherwise, ICE is + // considered in "weak connectivity". + // + // Note that the above notion of strong and weak connectivity is not defined + // in RFC 5245, and they apply to our current ICE implementation only. + // + // 1) ice_check_interval_strong_connectivity defines the interval applied to + // ALL candidate pairs when ICE is strongly connected, and it overrides the + // default value of this interval in the ICE implementation; + // 2) ice_check_interval_weak_connectivity defines the counterpart for ALL + // pairs when ICE is weakly connected, and it overrides the default value of + // this interval in the ICE implementation; + // 3) ice_check_min_interval defines the minimal interval (equivalently the + // maximum rate) that overrides the above two intervals when either of them + // is less. + absl::optional ice_check_interval_strong_connectivity; + absl::optional ice_check_interval_weak_connectivity; + absl::optional ice_check_min_interval; + + // The min time period for which a candidate pair must wait for response to + // connectivity checks before it becomes unwritable. This parameter + // overrides the default value in the ICE implementation if set. + absl::optional ice_unwritable_timeout; + + // The min number of connectivity checks that a candidate pair must sent + // without receiving response before it becomes unwritable. This parameter + // overrides the default value in the ICE implementation if set. + absl::optional ice_unwritable_min_checks; + + // The min time period for which a candidate pair must wait for response to + // connectivity checks it becomes inactive. This parameter overrides the + // default value in the ICE implementation if set. + absl::optional ice_inactive_timeout; + + // The interval in milliseconds at which STUN candidates will resend STUN + // binding requests to keep NAT bindings open. + absl::optional stun_candidate_keepalive_interval; + + // Optional TurnCustomizer. + // With this class one can modify outgoing TURN messages. + // The object passed in must remain valid until PeerConnection::Close() is + // called. + webrtc::TurnCustomizer* turn_customizer = nullptr; + + // Preferred network interface. + // A candidate pair on a preferred network has a higher precedence in ICE + // than one on an un-preferred network, regardless of priority or network + // cost. + absl::optional network_preference; + + // Configure the SDP semantics used by this PeerConnection. Note that the + // WebRTC 1.0 specification requires kUnifiedPlan semantics. The + // RtpTransceiver API is only available with kUnifiedPlan semantics. + // + // kPlanB will cause PeerConnection to create offers and answers with at + // most one audio and one video m= section with multiple RtpSenders and + // RtpReceivers specified as multiple a=ssrc lines within the section. This + // will also cause PeerConnection to ignore all but the first m= section of + // the same media type. + // + // kUnifiedPlan will cause PeerConnection to create offers and answers with + // multiple m= sections where each m= section maps to one RtpSender and one + // RtpReceiver (an RtpTransceiver), either both audio or both video. This + // will also cause PeerConnection to ignore all but the first a=ssrc lines + // that form a Plan B stream. + // + // For users who wish to send multiple audio/video streams and need to stay + // interoperable with legacy WebRTC implementations or use legacy APIs, + // specify kPlanB. + // + // For all other users, specify kUnifiedPlan. + SdpSemantics sdp_semantics = SdpSemantics::kPlanB; + + // TODO(bugs.webrtc.org/9891) - Move to crypto_options or remove. + // Actively reset the SRTP parameters whenever the DTLS transports + // underneath are reset for every offer/answer negotiation. + // This is only intended to be a workaround for crbug.com/835958 + // WARNING: This would cause RTP/RTCP packets decryption failure if not used + // correctly. This flag will be deprecated soon. Do not rely on it. + bool active_reset_srtp_params = false; + + // Defines advanced optional cryptographic settings related to SRTP and + // frame encryption for native WebRTC. Setting this will overwrite any + // settings set in PeerConnectionFactory (which is deprecated). + absl::optional crypto_options; + + // Configure if we should include the SDP attribute extmap-allow-mixed in + // our offer. Although we currently do support this, it's not included in + // our offer by default due to a previous bug that caused the SDP parser to + // abort parsing if this attribute was present. This is fixed in Chrome 71. + // TODO(webrtc:9985): Change default to true once sufficient time has + // passed. + bool offer_extmap_allow_mixed = false; + + // TURN logging identifier. + // This identifier is added to a TURN allocation + // and it intended to be used to be able to match client side + // logs with TURN server logs. It will not be added if it's an empty string. + std::string turn_logging_id; + + // Added to be able to control rollout of this feature. + bool enable_implicit_rollback = false; + + // Whether network condition based codec switching is allowed. + absl::optional allow_codec_switching; + + // + // Don't forget to update operator== if adding something. + // + }; + + // See: https://www.w3.org/TR/webrtc/#idl-def-rtcofferansweroptions + struct RTCOfferAnswerOptions { + static const int kUndefined = -1; + static const int kMaxOfferToReceiveMedia = 1; + + // The default value for constraint offerToReceiveX:true. + static const int kOfferToReceiveMediaTrue = 1; + + // These options are left as backwards compatibility for clients who need + // "Plan B" semantics. Clients who have switched to "Unified Plan" semantics + // should use the RtpTransceiver API (AddTransceiver) instead. + // + // offer_to_receive_X set to 1 will cause a media description to be + // generated in the offer, even if no tracks of that type have been added. + // Values greater than 1 are treated the same. + // + // If set to 0, the generated directional attribute will not include the + // "recv" direction (meaning it will be "sendonly" or "inactive". + int offer_to_receive_video = kUndefined; + int offer_to_receive_audio = kUndefined; + + bool voice_activity_detection = true; + bool ice_restart = false; + + // If true, will offer to BUNDLE audio/video/data together. Not to be + // confused with RTCP mux (multiplexing RTP and RTCP together). + bool use_rtp_mux = true; + + // If true, "a=packetization: raw" attribute will be offered + // in the SDP for all video payload and accepted in the answer if offered. + bool raw_packetization_for_video = false; + + // This will apply to all video tracks with a Plan B SDP offer/answer. + int num_simulcast_layers = 1; + + // If true: Use SDP format from draft-ietf-mmusic-scdp-sdp-03 + // If false: Use SDP format from draft-ietf-mmusic-sdp-sdp-26 or later + bool use_obsolete_sctp_sdp = false; + + RTCOfferAnswerOptions() = default; + + RTCOfferAnswerOptions(int offer_to_receive_video, + int offer_to_receive_audio, + bool voice_activity_detection, + bool ice_restart, + bool use_rtp_mux) + : offer_to_receive_video(offer_to_receive_video), + offer_to_receive_audio(offer_to_receive_audio), + voice_activity_detection(voice_activity_detection), + ice_restart(ice_restart), + use_rtp_mux(use_rtp_mux) {} + }; + + // Used by GetStats to decide which stats to include in the stats reports. + // |kStatsOutputLevelStandard| includes the standard stats for Javascript API; + // |kStatsOutputLevelDebug| includes both the standard stats and additional + // stats for debugging purposes. + enum StatsOutputLevel { + kStatsOutputLevelStandard, + kStatsOutputLevelDebug, + }; + + // Accessor methods to active local streams. + // This method is not supported with kUnifiedPlan semantics. Please use + // GetSenders() instead. + virtual rtc::scoped_refptr local_streams() = 0; + + // Accessor methods to remote streams. + // This method is not supported with kUnifiedPlan semantics. Please use + // GetReceivers() instead. + virtual rtc::scoped_refptr remote_streams() = 0; + + // Add a new MediaStream to be sent on this PeerConnection. + // Note that a SessionDescription negotiation is needed before the + // remote peer can receive the stream. + // + // This has been removed from the standard in favor of a track-based API. So, + // this is equivalent to simply calling AddTrack for each track within the + // stream, with the one difference that if "stream->AddTrack(...)" is called + // later, the PeerConnection will automatically pick up the new track. Though + // this functionality will be deprecated in the future. + // + // This method is not supported with kUnifiedPlan semantics. Please use + // AddTrack instead. + virtual bool AddStream(MediaStreamInterface* stream) = 0; + + // Remove a MediaStream from this PeerConnection. + // Note that a SessionDescription negotiation is needed before the + // remote peer is notified. + // + // This method is not supported with kUnifiedPlan semantics. Please use + // RemoveTrack instead. + virtual void RemoveStream(MediaStreamInterface* stream) = 0; + + // Add a new MediaStreamTrack to be sent on this PeerConnection, and return + // the newly created RtpSender. The RtpSender will be associated with the + // streams specified in the |stream_ids| list. + // + // Errors: + // - INVALID_PARAMETER: |track| is null, has a kind other than audio or video, + // or a sender already exists for the track. + // - INVALID_STATE: The PeerConnection is closed. + virtual RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids) = 0; + + // Remove an RtpSender from this PeerConnection. + // Returns true on success. + // TODO(steveanton): Replace with signature that returns RTCError. + virtual bool RemoveTrack(RtpSenderInterface* sender) = 0; + + // Plan B semantics: Removes the RtpSender from this PeerConnection. + // Unified Plan semantics: Stop sending on the RtpSender and mark the + // corresponding RtpTransceiver direction as no longer sending. + // + // Errors: + // - INVALID_PARAMETER: |sender| is null or (Plan B only) the sender is not + // associated with this PeerConnection. + // - INVALID_STATE: PeerConnection is closed. + // TODO(bugs.webrtc.org/9534): Rename to RemoveTrack once the other signature + // is removed. + virtual RTCError RemoveTrackNew( + rtc::scoped_refptr sender); + + // AddTransceiver creates a new RtpTransceiver and adds it to the set of + // transceivers. Adding a transceiver will cause future calls to CreateOffer + // to add a media description for the corresponding transceiver. + // + // The initial value of |mid| in the returned transceiver is null. Setting a + // new session description may change it to a non-null value. + // + // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-addtransceiver + // + // Optionally, an RtpTransceiverInit structure can be specified to configure + // the transceiver from construction. If not specified, the transceiver will + // default to having a direction of kSendRecv and not be part of any streams. + // + // These methods are only available when Unified Plan is enabled (see + // RTCConfiguration). + // + // Common errors: + // - INTERNAL_ERROR: The configuration does not have Unified Plan enabled. + + // Adds a transceiver with a sender set to transmit the given track. The kind + // of the transceiver (and sender/receiver) will be derived from the kind of + // the track. + // Errors: + // - INVALID_PARAMETER: |track| is null. + virtual RTCErrorOr> + AddTransceiver(rtc::scoped_refptr track) = 0; + virtual RTCErrorOr> + AddTransceiver(rtc::scoped_refptr track, + const RtpTransceiverInit& init) = 0; + + // Adds a transceiver with the given kind. Can either be MEDIA_TYPE_AUDIO or + // MEDIA_TYPE_VIDEO. + // Errors: + // - INVALID_PARAMETER: |media_type| is not MEDIA_TYPE_AUDIO or + // MEDIA_TYPE_VIDEO. + virtual RTCErrorOr> + AddTransceiver(cricket::MediaType media_type) = 0; + virtual RTCErrorOr> + AddTransceiver(cricket::MediaType media_type, + const RtpTransceiverInit& init) = 0; + + // Creates a sender without a track. Can be used for "early media"/"warmup" + // use cases, where the application may want to negotiate video attributes + // before a track is available to send. + // + // The standard way to do this would be through "addTransceiver", but we + // don't support that API yet. + // + // |kind| must be "audio" or "video". + // + // |stream_id| is used to populate the msid attribute; if empty, one will + // be generated automatically. + // + // This method is not supported with kUnifiedPlan semantics. Please use + // AddTransceiver instead. + virtual rtc::scoped_refptr CreateSender( + const std::string& kind, + const std::string& stream_id) = 0; + + // If Plan B semantics are specified, gets all RtpSenders, created either + // through AddStream, AddTrack, or CreateSender. All senders of a specific + // media type share the same media description. + // + // If Unified Plan semantics are specified, gets the RtpSender for each + // RtpTransceiver. + virtual std::vector> GetSenders() + const = 0; + + // If Plan B semantics are specified, gets all RtpReceivers created when a + // remote description is applied. All receivers of a specific media type share + // the same media description. It is also possible to have a media description + // with no associated RtpReceivers, if the directional attribute does not + // indicate that the remote peer is sending any media. + // + // If Unified Plan semantics are specified, gets the RtpReceiver for each + // RtpTransceiver. + virtual std::vector> GetReceivers() + const = 0; + + // Get all RtpTransceivers, created either through AddTransceiver, AddTrack or + // by a remote description applied with SetRemoteDescription. + // + // Note: This method is only available when Unified Plan is enabled (see + // RTCConfiguration). + virtual std::vector> + GetTransceivers() const = 0; + + // The legacy non-compliant GetStats() API. This correspond to the + // callback-based version of getStats() in JavaScript. The returned metrics + // are UNDOCUMENTED and many of them rely on implementation-specific details. + // The goal is to DELETE THIS VERSION but we can't today because it is heavily + // relied upon by third parties. See https://crbug.com/822696. + // + // This version is wired up into Chrome. Any stats implemented are + // automatically exposed to the Web Platform. This has BYPASSED the Chrome + // release processes for years and lead to cross-browser incompatibility + // issues and web application reliance on Chrome-only behavior. + // + // This API is in "maintenance mode", serious regressions should be fixed but + // adding new stats is highly discouraged. + // + // TODO(hbos): Deprecate and remove this when third parties have migrated to + // the spec-compliant GetStats() API. https://crbug.com/822696 + virtual bool GetStats(StatsObserver* observer, + MediaStreamTrackInterface* track, // Optional + StatsOutputLevel level) = 0; + // The spec-compliant GetStats() API. This correspond to the promise-based + // version of getStats() in JavaScript. Implementation status is described in + // api/stats/rtcstats_objects.h. For more details on stats, see spec: + // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-getstats + // TODO(hbos): Takes shared ownership, use rtc::scoped_refptr<> instead. This + // requires stop overriding the current version in third party or making third + // party calls explicit to avoid ambiguity during switch. Make the future + // version abstract as soon as third party projects implement it. + virtual void GetStats(RTCStatsCollectorCallback* callback) = 0; + // Spec-compliant getStats() performing the stats selection algorithm with the + // sender. https://w3c.github.io/webrtc-pc/#dom-rtcrtpsender-getstats + virtual void GetStats( + rtc::scoped_refptr selector, + rtc::scoped_refptr callback) = 0; + // Spec-compliant getStats() performing the stats selection algorithm with the + // receiver. https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getstats + virtual void GetStats( + rtc::scoped_refptr selector, + rtc::scoped_refptr callback) = 0; + // Clear cached stats in the RTCStatsCollector. + // Exposed for testing while waiting for automatic cache clear to work. + // https://bugs.webrtc.org/8693 + virtual void ClearStatsCache() {} + + // Create a data channel with the provided config, or default config if none + // is provided. Note that an offer/answer negotiation is still necessary + // before the data channel can be used. + // + // Also, calling CreateDataChannel is the only way to get a data "m=" section + // in SDP, so it should be done before CreateOffer is called, if the + // application plans to use data channels. + virtual rtc::scoped_refptr CreateDataChannel( + const std::string& label, + const DataChannelInit* config) = 0; + + // NOTE: For the following 6 methods, it's only safe to dereference the + // SessionDescriptionInterface on signaling_thread() (for example, calling + // ToString). + + // Returns the more recently applied description; "pending" if it exists, and + // otherwise "current". See below. + virtual const SessionDescriptionInterface* local_description() const = 0; + virtual const SessionDescriptionInterface* remote_description() const = 0; + + // A "current" description the one currently negotiated from a complete + // offer/answer exchange. + virtual const SessionDescriptionInterface* current_local_description() + const = 0; + virtual const SessionDescriptionInterface* current_remote_description() + const = 0; + + // A "pending" description is one that's part of an incomplete offer/answer + // exchange (thus, either an offer or a pranswer). Once the offer/answer + // exchange is finished, the "pending" description will become "current". + virtual const SessionDescriptionInterface* pending_local_description() + const = 0; + virtual const SessionDescriptionInterface* pending_remote_description() + const = 0; + + // Tells the PeerConnection that ICE should be restarted. This triggers a need + // for negotiation and subsequent CreateOffer() calls will act as if + // RTCOfferAnswerOptions::ice_restart is true. + // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-restartice + // TODO(hbos): Remove default implementation when downstream projects + // implement this. + virtual void RestartIce() = 0; + + // Create a new offer. + // The CreateSessionDescriptionObserver callback will be called when done. + virtual void CreateOffer(CreateSessionDescriptionObserver* observer, + const RTCOfferAnswerOptions& options) = 0; + + // Create an answer to an offer. + // The CreateSessionDescriptionObserver callback will be called when done. + virtual void CreateAnswer(CreateSessionDescriptionObserver* observer, + const RTCOfferAnswerOptions& options) = 0; + + // Sets the local session description. + // + // According to spec, the local session description MUST be the same as was + // returned by CreateOffer() or CreateAnswer() or else the operation should + // fail. Our implementation however allows some amount of "SDP munging", but + // please note that this is HIGHLY DISCOURAGED. If you do not intent to munge + // SDP, the method below that doesn't take |desc| as an argument will create + // the offer or answer for you. + // + // The observer is invoked as soon as the operation completes, which could be + // before or after the SetLocalDescription() method has exited. + virtual void SetLocalDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) {} + // Creates an offer or answer (depending on current signaling state) and sets + // it as the local session description. + // + // The observer is invoked as soon as the operation completes, which could be + // before or after the SetLocalDescription() method has exited. + virtual void SetLocalDescription( + rtc::scoped_refptr observer) {} + // Like SetLocalDescription() above, but the observer is invoked with a delay + // after the operation completes. This helps avoid recursive calls by the + // observer but also makes it possible for states to change in-between the + // operation completing and the observer getting called. This makes them racy + // for synchronizing peer connection states to the application. + // TODO(https://crbug.com/webrtc/11798): Delete these methods in favor of the + // ones taking SetLocalDescriptionObserverInterface as argument. + virtual void SetLocalDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc) = 0; + virtual void SetLocalDescription(SetSessionDescriptionObserver* observer) {} + + // Sets the remote session description. + // + // (Unlike "SDP munging" before SetLocalDescription(), modifying a remote + // offer or answer is allowed by the spec.) + // + // The observer is invoked as soon as the operation completes, which could be + // before or after the SetRemoteDescription() method has exited. + virtual void SetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) = 0; + // Like SetRemoteDescription() above, but the observer is invoked with a delay + // after the operation completes. This helps avoid recursive calls by the + // observer but also makes it possible for states to change in-between the + // operation completing and the observer getting called. This makes them racy + // for synchronizing peer connection states to the application. + // TODO(https://crbug.com/webrtc/11798): Delete this method in favor of the + // ones taking SetRemoteDescriptionObserverInterface as argument. + virtual void SetRemoteDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc) {} + + virtual PeerConnectionInterface::RTCConfiguration GetConfiguration() = 0; + + // Sets the PeerConnection's global configuration to |config|. + // + // The members of |config| that may be changed are |type|, |servers|, + // |ice_candidate_pool_size| and |prune_turn_ports| (though the candidate + // pool size can't be changed after the first call to SetLocalDescription). + // Note that this means the BUNDLE and RTCP-multiplexing policies cannot be + // changed with this method. + // + // Any changes to STUN/TURN servers or ICE candidate policy will affect the + // next gathering phase, and cause the next call to createOffer to generate + // new ICE credentials, as described in JSEP. This also occurs when + // |prune_turn_ports| changes, for the same reasoning. + // + // If an error occurs, returns false and populates |error| if non-null: + // - INVALID_MODIFICATION if |config| contains a modified parameter other + // than one of the parameters listed above. + // - INVALID_RANGE if |ice_candidate_pool_size| is out of range. + // - SYNTAX_ERROR if parsing an ICE server URL failed. + // - INVALID_PARAMETER if a TURN server is missing |username| or |password|. + // - INTERNAL_ERROR if an unexpected error occurred. + // + // TODO(nisse): Make this pure virtual once all Chrome subclasses of + // PeerConnectionInterface implement it. + virtual RTCError SetConfiguration( + const PeerConnectionInterface::RTCConfiguration& config); + + // Provides a remote candidate to the ICE Agent. + // A copy of the |candidate| will be created and added to the remote + // description. So the caller of this method still has the ownership of the + // |candidate|. + // TODO(hbos): The spec mandates chaining this operation onto the operations + // chain; deprecate and remove this version in favor of the callback-based + // signature. + virtual bool AddIceCandidate(const IceCandidateInterface* candidate) = 0; + // TODO(hbos): Remove default implementation once implemented by downstream + // projects. + virtual void AddIceCandidate(std::unique_ptr candidate, + std::function callback) {} + + // Removes a group of remote candidates from the ICE agent. Needed mainly for + // continual gathering, to avoid an ever-growing list of candidates as + // networks come and go. + virtual bool RemoveIceCandidates( + const std::vector& candidates) = 0; + + // SetBitrate limits the bandwidth allocated for all RTP streams sent by + // this PeerConnection. Other limitations might affect these limits and + // are respected (for example "b=AS" in SDP). + // + // Setting |current_bitrate_bps| will reset the current bitrate estimate + // to the provided value. + virtual RTCError SetBitrate(const BitrateSettings& bitrate) = 0; + + // Enable/disable playout of received audio streams. Enabled by default. Note + // that even if playout is enabled, streams will only be played out if the + // appropriate SDP is also applied. Setting |playout| to false will stop + // playout of the underlying audio device but starts a task which will poll + // for audio data every 10ms to ensure that audio processing happens and the + // audio statistics are updated. + // TODO(henrika): deprecate and remove this. + virtual void SetAudioPlayout(bool playout) {} + + // Enable/disable recording of transmitted audio streams. Enabled by default. + // Note that even if recording is enabled, streams will only be recorded if + // the appropriate SDP is also applied. + // TODO(henrika): deprecate and remove this. + virtual void SetAudioRecording(bool recording) {} + + // Looks up the DtlsTransport associated with a MID value. + // In the Javascript API, DtlsTransport is a property of a sender, but + // because the PeerConnection owns the DtlsTransport in this implementation, + // it is better to look them up on the PeerConnection. + virtual rtc::scoped_refptr LookupDtlsTransportByMid( + const std::string& mid) = 0; + + // Returns the SCTP transport, if any. + virtual rtc::scoped_refptr GetSctpTransport() + const = 0; + + // Returns the current SignalingState. + virtual SignalingState signaling_state() = 0; + + // Returns an aggregate state of all ICE *and* DTLS transports. + // This is left in place to avoid breaking native clients who expect our old, + // nonstandard behavior. + // TODO(jonasolsson): deprecate and remove this. + virtual IceConnectionState ice_connection_state() = 0; + + // Returns an aggregated state of all ICE transports. + virtual IceConnectionState standardized_ice_connection_state() = 0; + + // Returns an aggregated state of all ICE and DTLS transports. + virtual PeerConnectionState peer_connection_state() = 0; + + virtual IceGatheringState ice_gathering_state() = 0; + + // Returns the current state of canTrickleIceCandidates per + // https://w3c.github.io/webrtc-pc/#attributes-1 + virtual absl::optional can_trickle_ice_candidates() { + // TODO(crbug.com/708484): Remove default implementation. + return absl::nullopt; + } + + // When a resource is overused, the PeerConnection will try to reduce the load + // on the sysem, for example by reducing the resolution or frame rate of + // encoded streams. The Resource API allows injecting platform-specific usage + // measurements. The conditions to trigger kOveruse or kUnderuse are up to the + // implementation. + // TODO(hbos): Make pure virtual when implemented by downstream projects. + virtual void AddAdaptationResource(rtc::scoped_refptr resource) {} + + // Start RtcEventLog using an existing output-sink. Takes ownership of + // |output| and passes it on to Call, which will take the ownership. If the + // operation fails the output will be closed and deallocated. The event log + // will send serialized events to the output object every |output_period_ms|. + // Applications using the event log should generally make their own trade-off + // regarding the output period. A long period is generally more efficient, + // with potential drawbacks being more bursty thread usage, and more events + // lost in case the application crashes. If the |output_period_ms| argument is + // omitted, webrtc selects a default deemed to be workable in most cases. + virtual bool StartRtcEventLog(std::unique_ptr output, + int64_t output_period_ms) = 0; + virtual bool StartRtcEventLog(std::unique_ptr output) = 0; + + // Stops logging the RtcEventLog. + virtual void StopRtcEventLog() = 0; + + // Terminates all media, closes the transports, and in general releases any + // resources used by the PeerConnection. This is an irreversible operation. + // + // Note that after this method completes, the PeerConnection will no longer + // use the PeerConnectionObserver interface passed in on construction, and + // thus the observer object can be safely destroyed. + virtual void Close() = 0; + + // The thread on which all PeerConnectionObserver callbacks will be invoked, + // as well as callbacks for other classes such as DataChannelObserver. + // + // Also the only thread on which it's safe to use SessionDescriptionInterface + // pointers. + // TODO(deadbeef): Make pure virtual when all subclasses implement it. + virtual rtc::Thread* signaling_thread() const { return nullptr; } + + protected: + // Dtor protected as objects shouldn't be deleted via this interface. + ~PeerConnectionInterface() override = default; +}; + +// PeerConnection callback interface, used for RTCPeerConnection events. +// Application should implement these methods. +class PeerConnectionObserver { + public: + virtual ~PeerConnectionObserver() = default; + + // Triggered when the SignalingState changed. + virtual void OnSignalingChange( + PeerConnectionInterface::SignalingState new_state) = 0; + + // Triggered when media is received on a new stream from remote peer. + virtual void OnAddStream(rtc::scoped_refptr stream) {} + + // Triggered when a remote peer closes a stream. + virtual void OnRemoveStream(rtc::scoped_refptr stream) { + } + + // Triggered when a remote peer opens a data channel. + virtual void OnDataChannel( + rtc::scoped_refptr data_channel) = 0; + + // Triggered when renegotiation is needed. For example, an ICE restart + // has begun. + virtual void OnRenegotiationNeeded() = 0; + + // Called any time the legacy IceConnectionState changes. + // + // Note that our ICE states lag behind the standard slightly. The most + // notable differences include the fact that "failed" occurs after 15 + // seconds, not 30, and this actually represents a combination ICE + DTLS + // state, so it may be "failed" if DTLS fails while ICE succeeds. + // + // TODO(jonasolsson): deprecate and remove this. + virtual void OnIceConnectionChange( + PeerConnectionInterface::IceConnectionState new_state) {} + + // Called any time the standards-compliant IceConnectionState changes. + virtual void OnStandardizedIceConnectionChange( + PeerConnectionInterface::IceConnectionState new_state) {} + + // Called any time the PeerConnectionState changes. + virtual void OnConnectionChange( + PeerConnectionInterface::PeerConnectionState new_state) {} + + // Called any time the IceGatheringState changes. + virtual void OnIceGatheringChange( + PeerConnectionInterface::IceGatheringState new_state) = 0; + + // A new ICE candidate has been gathered. + virtual void OnIceCandidate(const IceCandidateInterface* candidate) = 0; + + // Gathering of an ICE candidate failed. + // See https://w3c.github.io/webrtc-pc/#event-icecandidateerror + // |host_candidate| is a stringified socket address. + virtual void OnIceCandidateError(const std::string& host_candidate, + const std::string& url, + int error_code, + const std::string& error_text) {} + + // Gathering of an ICE candidate failed. + // See https://w3c.github.io/webrtc-pc/#event-icecandidateerror + virtual void OnIceCandidateError(const std::string& address, + int port, + const std::string& url, + int error_code, + const std::string& error_text) {} + + // Ice candidates have been removed. + // TODO(honghaiz): Make this a pure virtual method when all its subclasses + // implement it. + virtual void OnIceCandidatesRemoved( + const std::vector& candidates) {} + + // Called when the ICE connection receiving status changes. + virtual void OnIceConnectionReceivingChange(bool receiving) {} + + // Called when the selected candidate pair for the ICE connection changes. + virtual void OnIceSelectedCandidatePairChanged( + const cricket::CandidatePairChangeEvent& event) {} + + // This is called when a receiver and its track are created. + // TODO(zhihuang): Make this pure virtual when all subclasses implement it. + // Note: This is called with both Plan B and Unified Plan semantics. Unified + // Plan users should prefer OnTrack, OnAddTrack is only called as backwards + // compatibility (and is called in the exact same situations as OnTrack). + virtual void OnAddTrack( + rtc::scoped_refptr receiver, + const std::vector>& streams) {} + + // This is called when signaling indicates a transceiver will be receiving + // media from the remote endpoint. This is fired during a call to + // SetRemoteDescription. The receiving track can be accessed by: + // |transceiver->receiver()->track()| and its associated streams by + // |transceiver->receiver()->streams()|. + // Note: This will only be called if Unified Plan semantics are specified. + // This behavior is specified in section 2.2.8.2.5 of the "Set the + // RTCSessionDescription" algorithm: + // https://w3c.github.io/webrtc-pc/#set-description + virtual void OnTrack( + rtc::scoped_refptr transceiver) {} + + // Called when signaling indicates that media will no longer be received on a + // track. + // With Plan B semantics, the given receiver will have been removed from the + // PeerConnection and the track muted. + // With Unified Plan semantics, the receiver will remain but the transceiver + // will have changed direction to either sendonly or inactive. + // https://w3c.github.io/webrtc-pc/#process-remote-track-removal + // TODO(hbos,deadbeef): Make pure virtual when all subclasses implement it. + virtual void OnRemoveTrack( + rtc::scoped_refptr receiver) {} + + // Called when an interesting usage is detected by WebRTC. + // An appropriate action is to add information about the context of the + // PeerConnection and write the event to some kind of "interesting events" + // log function. + // The heuristics for defining what constitutes "interesting" are + // implementation-defined. + virtual void OnInterestingUsage(int usage_pattern) {} +}; + +// PeerConnectionDependencies holds all of PeerConnections dependencies. +// A dependency is distinct from a configuration as it defines significant +// executable code that can be provided by a user of the API. +// +// All new dependencies should be added as a unique_ptr to allow the +// PeerConnection object to be the definitive owner of the dependencies +// lifetime making injection safer. +struct RTC_EXPORT PeerConnectionDependencies final { + explicit PeerConnectionDependencies(PeerConnectionObserver* observer_in); + // This object is not copyable or assignable. + PeerConnectionDependencies(const PeerConnectionDependencies&) = delete; + PeerConnectionDependencies& operator=(const PeerConnectionDependencies&) = + delete; + // This object is only moveable. + PeerConnectionDependencies(PeerConnectionDependencies&&); + PeerConnectionDependencies& operator=(PeerConnectionDependencies&&) = default; + ~PeerConnectionDependencies(); + // Mandatory dependencies + PeerConnectionObserver* observer = nullptr; + // Optional dependencies + // TODO(bugs.webrtc.org/7447): remove port allocator once downstream is + // updated. For now, you can only set one of allocator and + // packet_socket_factory, not both. + std::unique_ptr allocator; + std::unique_ptr packet_socket_factory; + std::unique_ptr async_resolver_factory; + std::unique_ptr ice_transport_factory; + std::unique_ptr cert_generator; + std::unique_ptr tls_cert_verifier; + std::unique_ptr + video_bitrate_allocator_factory; +}; + +// PeerConnectionFactoryDependencies holds all of the PeerConnectionFactory +// dependencies. All new dependencies should be added here instead of +// overloading the function. This simplifies dependency injection and makes it +// clear which are mandatory and optional. If possible please allow the peer +// connection factory to take ownership of the dependency by adding a unique_ptr +// to this structure. +struct RTC_EXPORT PeerConnectionFactoryDependencies final { + PeerConnectionFactoryDependencies(); + // This object is not copyable or assignable. + PeerConnectionFactoryDependencies(const PeerConnectionFactoryDependencies&) = + delete; + PeerConnectionFactoryDependencies& operator=( + const PeerConnectionFactoryDependencies&) = delete; + // This object is only moveable. + PeerConnectionFactoryDependencies(PeerConnectionFactoryDependencies&&); + PeerConnectionFactoryDependencies& operator=( + PeerConnectionFactoryDependencies&&) = default; + ~PeerConnectionFactoryDependencies(); + + // Optional dependencies + rtc::Thread* network_thread = nullptr; + rtc::Thread* worker_thread = nullptr; + rtc::Thread* signaling_thread = nullptr; + std::unique_ptr task_queue_factory; + std::unique_ptr media_engine; + std::unique_ptr call_factory; + std::unique_ptr event_log_factory; + std::unique_ptr fec_controller_factory; + std::unique_ptr + network_state_predictor_factory; + std::unique_ptr network_controller_factory; + // This will only be used if CreatePeerConnection is called without a + // |port_allocator|, causing the default allocator and network manager to be + // used. + std::unique_ptr network_monitor_factory; + std::unique_ptr neteq_factory; + std::unique_ptr trials; +}; + +// PeerConnectionFactoryInterface is the factory interface used for creating +// PeerConnection, MediaStream and MediaStreamTrack objects. +// +// The simplest method for obtaiing one, CreatePeerConnectionFactory will +// create the required libjingle threads, socket and network manager factory +// classes for networking if none are provided, though it requires that the +// application runs a message loop on the thread that called the method (see +// explanation below) +// +// If an application decides to provide its own threads and/or implementation +// of networking classes, it should use the alternate +// CreatePeerConnectionFactory method which accepts threads as input, and use +// the CreatePeerConnection version that takes a PortAllocator as an argument. +class RTC_EXPORT PeerConnectionFactoryInterface + : public rtc::RefCountInterface { + public: + class Options { + public: + Options() {} + + // If set to true, created PeerConnections won't enforce any SRTP + // requirement, allowing unsecured media. Should only be used for + // testing/debugging. + bool disable_encryption = false; + + // Deprecated. The only effect of setting this to true is that + // CreateDataChannel will fail, which is not that useful. + bool disable_sctp_data_channels = false; + + // If set to true, any platform-supported network monitoring capability + // won't be used, and instead networks will only be updated via polling. + // + // This only has an effect if a PeerConnection is created with the default + // PortAllocator implementation. + bool disable_network_monitor = false; + + // Sets the network types to ignore. For instance, calling this with + // ADAPTER_TYPE_ETHERNET | ADAPTER_TYPE_LOOPBACK will ignore Ethernet and + // loopback interfaces. + int network_ignore_mask = rtc::kDefaultNetworkIgnoreMask; + + // Sets the maximum supported protocol version. The highest version + // supported by both ends will be used for the connection, i.e. if one + // party supports DTLS 1.0 and the other DTLS 1.2, DTLS 1.0 will be used. + rtc::SSLProtocolVersion ssl_max_version = rtc::SSL_PROTOCOL_DTLS_12; + + // Sets crypto related options, e.g. enabled cipher suites. + CryptoOptions crypto_options = CryptoOptions::NoGcm(); + }; + + // Set the options to be used for subsequently created PeerConnections. + virtual void SetOptions(const Options& options) = 0; + + // The preferred way to create a new peer connection. Simply provide the + // configuration and a PeerConnectionDependencies structure. + // TODO(benwright): Make pure virtual once downstream mock PC factory classes + // are updated. + virtual rtc::scoped_refptr CreatePeerConnection( + const PeerConnectionInterface::RTCConfiguration& configuration, + PeerConnectionDependencies dependencies); + + // Deprecated; |allocator| and |cert_generator| may be null, in which case + // default implementations will be used. + // + // |observer| must not be null. + // + // Note that this method does not take ownership of |observer|; it's the + // responsibility of the caller to delete it. It can be safely deleted after + // Close has been called on the returned PeerConnection, which ensures no + // more observer callbacks will be invoked. + virtual rtc::scoped_refptr CreatePeerConnection( + const PeerConnectionInterface::RTCConfiguration& configuration, + std::unique_ptr allocator, + std::unique_ptr cert_generator, + PeerConnectionObserver* observer); + + // Returns the capabilities of an RTP sender of type |kind|. + // If for some reason you pass in MEDIA_TYPE_DATA, returns an empty structure. + // TODO(orphis): Make pure virtual when all subclasses implement it. + virtual RtpCapabilities GetRtpSenderCapabilities( + cricket::MediaType kind) const; + + // Returns the capabilities of an RTP receiver of type |kind|. + // If for some reason you pass in MEDIA_TYPE_DATA, returns an empty structure. + // TODO(orphis): Make pure virtual when all subclasses implement it. + virtual RtpCapabilities GetRtpReceiverCapabilities( + cricket::MediaType kind) const; + + virtual rtc::scoped_refptr CreateLocalMediaStream( + const std::string& stream_id) = 0; + + // Creates an AudioSourceInterface. + // |options| decides audio processing settings. + virtual rtc::scoped_refptr CreateAudioSource( + const cricket::AudioOptions& options) = 0; + + // Creates a new local VideoTrack. The same |source| can be used in several + // tracks. + virtual rtc::scoped_refptr CreateVideoTrack( + const std::string& label, + VideoTrackSourceInterface* source) = 0; + + // Creates an new AudioTrack. At the moment |source| can be null. + virtual rtc::scoped_refptr CreateAudioTrack( + const std::string& label, + AudioSourceInterface* source) = 0; + + // Starts AEC dump using existing file. Takes ownership of |file| and passes + // it on to VoiceEngine (via other objects) immediately, which will take + // the ownerhip. If the operation fails, the file will be closed. + // A maximum file size in bytes can be specified. When the file size limit is + // reached, logging is stopped automatically. If max_size_bytes is set to a + // value <= 0, no limit will be used, and logging will continue until the + // StopAecDump function is called. + // TODO(webrtc:6463): Delete default implementation when downstream mocks + // classes are updated. + virtual bool StartAecDump(FILE* file, int64_t max_size_bytes) { + return false; + } + + // Stops logging the AEC dump. + virtual void StopAecDump() = 0; + + protected: + // Dtor and ctor protected as objects shouldn't be created or deleted via + // this interface. + PeerConnectionFactoryInterface() {} + ~PeerConnectionFactoryInterface() override = default; +}; + +// CreateModularPeerConnectionFactory is implemented in the "peerconnection" +// build target, which doesn't pull in the implementations of every module +// webrtc may use. +// +// If an application knows it will only require certain modules, it can reduce +// webrtc's impact on its binary size by depending only on the "peerconnection" +// target and the modules the application requires, using +// CreateModularPeerConnectionFactory. For example, if an application +// only uses WebRTC for audio, it can pass in null pointers for the +// video-specific interfaces, and omit the corresponding modules from its +// build. +// +// If |network_thread| or |worker_thread| are null, the PeerConnectionFactory +// will create the necessary thread internally. If |signaling_thread| is null, +// the PeerConnectionFactory will use the thread on which this method is called +// as the signaling thread, wrapping it in an rtc::Thread object if needed. +RTC_EXPORT rtc::scoped_refptr +CreateModularPeerConnectionFactory( + PeerConnectionFactoryDependencies dependencies); + +} // namespace webrtc + +#endif // API_PEER_CONNECTION_INTERFACE_H_ diff --git a/api/peer_connection_proxy.h b/api/peer_connection_proxy.h new file mode 100644 index 0000000..0cc3b3b --- /dev/null +++ b/api/peer_connection_proxy.h @@ -0,0 +1,155 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_PEER_CONNECTION_PROXY_H_ +#define API_PEER_CONNECTION_PROXY_H_ + +#include +#include +#include + +#include "api/peer_connection_interface.h" +#include "api/proxy.h" + +namespace webrtc { + +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. +BEGIN_SIGNALING_PROXY_MAP(PeerConnection) +PROXY_SIGNALING_THREAD_DESTRUCTOR() +PROXY_METHOD0(rtc::scoped_refptr, local_streams) +PROXY_METHOD0(rtc::scoped_refptr, remote_streams) +PROXY_METHOD1(bool, AddStream, MediaStreamInterface*) +PROXY_METHOD1(void, RemoveStream, MediaStreamInterface*) +PROXY_METHOD2(RTCErrorOr>, + AddTrack, + rtc::scoped_refptr, + const std::vector&) +PROXY_METHOD1(bool, RemoveTrack, RtpSenderInterface*) +PROXY_METHOD1(RTCError, RemoveTrackNew, rtc::scoped_refptr) +PROXY_METHOD1(RTCErrorOr>, + AddTransceiver, + rtc::scoped_refptr) +PROXY_METHOD2(RTCErrorOr>, + AddTransceiver, + rtc::scoped_refptr, + const RtpTransceiverInit&) +PROXY_METHOD1(RTCErrorOr>, + AddTransceiver, + cricket::MediaType) +PROXY_METHOD2(RTCErrorOr>, + AddTransceiver, + cricket::MediaType, + const RtpTransceiverInit&) +PROXY_METHOD2(rtc::scoped_refptr, + CreateSender, + const std::string&, + const std::string&) +PROXY_CONSTMETHOD0(std::vector>, + GetSenders) +PROXY_CONSTMETHOD0(std::vector>, + GetReceivers) +PROXY_CONSTMETHOD0(std::vector>, + GetTransceivers) +PROXY_METHOD3(bool, + GetStats, + StatsObserver*, + MediaStreamTrackInterface*, + StatsOutputLevel) +PROXY_METHOD1(void, GetStats, RTCStatsCollectorCallback*) +PROXY_METHOD2(void, + GetStats, + rtc::scoped_refptr, + rtc::scoped_refptr) +PROXY_METHOD2(void, + GetStats, + rtc::scoped_refptr, + rtc::scoped_refptr) +PROXY_METHOD0(void, ClearStatsCache) +PROXY_METHOD2(rtc::scoped_refptr, + CreateDataChannel, + const std::string&, + const DataChannelInit*) +PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, local_description) +PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, remote_description) +PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, + current_local_description) +PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, + current_remote_description) +PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, + pending_local_description) +PROXY_CONSTMETHOD0(const SessionDescriptionInterface*, + pending_remote_description) +PROXY_METHOD0(void, RestartIce) +PROXY_METHOD2(void, + CreateOffer, + CreateSessionDescriptionObserver*, + const RTCOfferAnswerOptions&) +PROXY_METHOD2(void, + CreateAnswer, + CreateSessionDescriptionObserver*, + const RTCOfferAnswerOptions&) +PROXY_METHOD2(void, + SetLocalDescription, + std::unique_ptr, + rtc::scoped_refptr) +PROXY_METHOD1(void, + SetLocalDescription, + rtc::scoped_refptr) +PROXY_METHOD2(void, + SetLocalDescription, + SetSessionDescriptionObserver*, + SessionDescriptionInterface*) +PROXY_METHOD1(void, SetLocalDescription, SetSessionDescriptionObserver*) +PROXY_METHOD2(void, + SetRemoteDescription, + std::unique_ptr, + rtc::scoped_refptr) +PROXY_METHOD2(void, + SetRemoteDescription, + SetSessionDescriptionObserver*, + SessionDescriptionInterface*) +PROXY_METHOD0(PeerConnectionInterface::RTCConfiguration, GetConfiguration) +PROXY_METHOD1(RTCError, + SetConfiguration, + const PeerConnectionInterface::RTCConfiguration&) +PROXY_METHOD1(bool, AddIceCandidate, const IceCandidateInterface*) +PROXY_METHOD2(void, + AddIceCandidate, + std::unique_ptr, + std::function) +PROXY_METHOD1(bool, RemoveIceCandidates, const std::vector&) +PROXY_METHOD1(RTCError, SetBitrate, const BitrateSettings&) +PROXY_METHOD1(void, SetAudioPlayout, bool) +PROXY_METHOD1(void, SetAudioRecording, bool) +PROXY_METHOD1(rtc::scoped_refptr, + LookupDtlsTransportByMid, + const std::string&) +PROXY_CONSTMETHOD0(rtc::scoped_refptr, GetSctpTransport) +PROXY_METHOD0(SignalingState, signaling_state) +PROXY_METHOD0(IceConnectionState, ice_connection_state) +PROXY_METHOD0(IceConnectionState, standardized_ice_connection_state) +PROXY_METHOD0(PeerConnectionState, peer_connection_state) +PROXY_METHOD0(IceGatheringState, ice_gathering_state) +PROXY_METHOD0(absl::optional, can_trickle_ice_candidates) +PROXY_METHOD1(void, AddAdaptationResource, rtc::scoped_refptr) +PROXY_METHOD2(bool, + StartRtcEventLog, + std::unique_ptr, + int64_t) +PROXY_METHOD1(bool, StartRtcEventLog, std::unique_ptr) +PROXY_METHOD0(void, StopRtcEventLog) +PROXY_METHOD0(void, Close) +BYPASS_PROXY_CONSTMETHOD0(rtc::Thread*, signaling_thread) +END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_PEER_CONNECTION_PROXY_H_ diff --git a/api/priority.h b/api/priority.h new file mode 100644 index 0000000..4953e45 --- /dev/null +++ b/api/priority.h @@ -0,0 +1,26 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_PRIORITY_H_ +#define API_PRIORITY_H_ + +namespace webrtc { + +// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc +enum class Priority { + kVeryLow, + kLow, + kMedium, + kHigh, +}; + +} // namespace webrtc + +#endif // API_PRIORITY_H_ diff --git a/api/proxy.cc b/api/proxy.cc new file mode 100644 index 0000000..e668285 --- /dev/null +++ b/api/proxy.cc @@ -0,0 +1,37 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/proxy.h" + +namespace webrtc { +namespace internal { + +SynchronousMethodCall::SynchronousMethodCall(rtc::MessageHandler* proxy) + : proxy_(proxy) {} + +SynchronousMethodCall::~SynchronousMethodCall() = default; + +void SynchronousMethodCall::Invoke(const rtc::Location& posted_from, + rtc::Thread* t) { + if (t->IsCurrent()) { + proxy_->OnMessage(nullptr); + } else { + t->Post(posted_from, this, 0); + e_.Wait(rtc::Event::kForever); + } +} + +void SynchronousMethodCall::OnMessage(rtc::Message*) { + proxy_->OnMessage(nullptr); + e_.Set(); +} + +} // namespace internal +} // namespace webrtc diff --git a/api/proxy.h b/api/proxy.h new file mode 100644 index 0000000..0e5d622 --- /dev/null +++ b/api/proxy.h @@ -0,0 +1,414 @@ +/* + * Copyright 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains Macros for creating proxies for webrtc MediaStream and +// PeerConnection classes. +// TODO(deadbeef): Move this to pc/; this is part of the implementation. + +// +// Example usage: +// +// class TestInterface : public rtc::RefCountInterface { +// public: +// std::string FooA() = 0; +// std::string FooB(bool arg1) const = 0; +// std::string FooC(bool arg1) = 0; +// }; +// +// Note that return types can not be a const reference. +// +// class Test : public TestInterface { +// ... implementation of the interface. +// }; +// +// BEGIN_PROXY_MAP(Test) +// PROXY_SIGNALING_THREAD_DESTRUCTOR() +// PROXY_METHOD0(std::string, FooA) +// PROXY_CONSTMETHOD1(std::string, FooB, arg1) +// PROXY_WORKER_METHOD1(std::string, FooC, arg1) +// END_PROXY_MAP() +// +// Where the destructor and first two methods are invoked on the signaling +// thread, and the third is invoked on the worker thread. +// +// The proxy can be created using +// +// TestProxy::Create(Thread* signaling_thread, Thread* worker_thread, +// TestInterface*). +// +// The variant defined with BEGIN_SIGNALING_PROXY_MAP is unaware of +// the worker thread, and invokes all methods on the signaling thread. +// +// The variant defined with BEGIN_OWNED_PROXY_MAP does not use +// refcounting, and instead just takes ownership of the object being proxied. + +#ifndef API_PROXY_H_ +#define API_PROXY_H_ + +#include +#include +#include +#include +#include + +#include "api/scoped_refptr.h" +#include "rtc_base/event.h" +#include "rtc_base/message_handler.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread.h" + +namespace rtc { +class Location; +} + +namespace webrtc { + +template +class ReturnType { + public: + template + void Invoke(C* c, M m, Args&&... args) { + r_ = (c->*m)(std::forward(args)...); + } + + R moved_result() { return std::move(r_); } + + private: + R r_; +}; + +template <> +class ReturnType { + public: + template + void Invoke(C* c, M m, Args&&... args) { + (c->*m)(std::forward(args)...); + } + + void moved_result() {} +}; + +namespace internal { + +class RTC_EXPORT SynchronousMethodCall : public rtc::MessageData, + public rtc::MessageHandler { + public: + explicit SynchronousMethodCall(rtc::MessageHandler* proxy); + ~SynchronousMethodCall() override; + + void Invoke(const rtc::Location& posted_from, rtc::Thread* t); + + private: + void OnMessage(rtc::Message*) override; + + rtc::Event e_; + rtc::MessageHandler* proxy_; +}; + +} // namespace internal + +template +class MethodCall : public rtc::Message, public rtc::MessageHandler { + public: + typedef R (C::*Method)(Args...); + MethodCall(C* c, Method m, Args&&... args) + : c_(c), + m_(m), + args_(std::forward_as_tuple(std::forward(args)...)) {} + + R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + internal::SynchronousMethodCall(this).Invoke(posted_from, t); + return r_.moved_result(); + } + + private: + void OnMessage(rtc::Message*) { Invoke(std::index_sequence_for()); } + + template + void Invoke(std::index_sequence) { + r_.Invoke(c_, m_, std::move(std::get(args_))...); + } + + C* c_; + Method m_; + ReturnType r_; + std::tuple args_; +}; + +template +class ConstMethodCall : public rtc::Message, public rtc::MessageHandler { + public: + typedef R (C::*Method)(Args...) const; + ConstMethodCall(const C* c, Method m, Args&&... args) + : c_(c), + m_(m), + args_(std::forward_as_tuple(std::forward(args)...)) {} + + R Marshal(const rtc::Location& posted_from, rtc::Thread* t) { + internal::SynchronousMethodCall(this).Invoke(posted_from, t); + return r_.moved_result(); + } + + private: + void OnMessage(rtc::Message*) { Invoke(std::index_sequence_for()); } + + template + void Invoke(std::index_sequence) { + r_.Invoke(c_, m_, std::move(std::get(args_))...); + } + + const C* c_; + Method m_; + ReturnType r_; + std::tuple args_; +}; + +// Helper macros to reduce code duplication. +#define PROXY_MAP_BOILERPLATE(c) \ + template \ + class c##ProxyWithInternal; \ + typedef c##ProxyWithInternal c##Proxy; \ + template \ + class c##ProxyWithInternal : public c##Interface { \ + protected: \ + typedef c##Interface C; \ + \ + public: \ + const INTERNAL_CLASS* internal() const { return c_; } \ + INTERNAL_CLASS* internal() { return c_; } + +// clang-format off +// clang-format would put the semicolon alone, +// leading to a presubmit error (cpplint.py) +#define END_PROXY_MAP() \ + }; +// clang-format on + +#define SIGNALING_PROXY_MAP_BOILERPLATE(c) \ + protected: \ + c##ProxyWithInternal(rtc::Thread* signaling_thread, INTERNAL_CLASS* c) \ + : signaling_thread_(signaling_thread), c_(c) {} \ + \ + private: \ + mutable rtc::Thread* signaling_thread_; + +#define WORKER_PROXY_MAP_BOILERPLATE(c) \ + protected: \ + c##ProxyWithInternal(rtc::Thread* signaling_thread, \ + rtc::Thread* worker_thread, INTERNAL_CLASS* c) \ + : signaling_thread_(signaling_thread), \ + worker_thread_(worker_thread), \ + c_(c) {} \ + \ + private: \ + mutable rtc::Thread* signaling_thread_; \ + mutable rtc::Thread* worker_thread_; + +// Note that the destructor is protected so that the proxy can only be +// destroyed via RefCountInterface. +#define REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ + protected: \ + ~c##ProxyWithInternal() { \ + MethodCall call( \ + this, &c##ProxyWithInternal::DestroyInternal); \ + call.Marshal(RTC_FROM_HERE, destructor_thread()); \ + } \ + \ + private: \ + void DestroyInternal() { c_ = nullptr; } \ + rtc::scoped_refptr c_; + +// Note: This doesn't use a unique_ptr, because it intends to handle a corner +// case where an object's deletion triggers a callback that calls back into +// this proxy object. If relying on a unique_ptr to delete the object, its +// inner pointer would be set to null before this reentrant callback would have +// a chance to run, resulting in a segfault. +#define OWNED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + ~c##ProxyWithInternal() { \ + MethodCall call( \ + this, &c##ProxyWithInternal::DestroyInternal); \ + call.Marshal(RTC_FROM_HERE, destructor_thread()); \ + } \ + \ + private: \ + void DestroyInternal() { delete c_; } \ + INTERNAL_CLASS* c_; + +#define BEGIN_SIGNALING_PROXY_MAP(c) \ + PROXY_MAP_BOILERPLATE(c) \ + SIGNALING_PROXY_MAP_BOILERPLATE(c) \ + REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + static rtc::scoped_refptr Create( \ + rtc::Thread* signaling_thread, INTERNAL_CLASS* c) { \ + return new rtc::RefCountedObject(signaling_thread, \ + c); \ + } + +#define BEGIN_PROXY_MAP(c) \ + PROXY_MAP_BOILERPLATE(c) \ + WORKER_PROXY_MAP_BOILERPLATE(c) \ + REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + static rtc::scoped_refptr Create( \ + rtc::Thread* signaling_thread, rtc::Thread* worker_thread, \ + INTERNAL_CLASS* c) { \ + return new rtc::RefCountedObject(signaling_thread, \ + worker_thread, c); \ + } + +#define BEGIN_OWNED_PROXY_MAP(c) \ + PROXY_MAP_BOILERPLATE(c) \ + WORKER_PROXY_MAP_BOILERPLATE(c) \ + OWNED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + static std::unique_ptr Create( \ + rtc::Thread* signaling_thread, rtc::Thread* worker_thread, \ + std::unique_ptr c) { \ + return std::unique_ptr(new c##ProxyWithInternal( \ + signaling_thread, worker_thread, c.release())); \ + } + +#define PROXY_SIGNALING_THREAD_DESTRUCTOR() \ + private: \ + rtc::Thread* destructor_thread() const { return signaling_thread_; } \ + \ + public: // NOLINTNEXTLINE + +#define PROXY_WORKER_THREAD_DESTRUCTOR() \ + private: \ + rtc::Thread* destructor_thread() const { return worker_thread_; } \ + \ + public: // NOLINTNEXTLINE + +#define PROXY_METHOD0(r, method) \ + r method() override { \ + MethodCall call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_CONSTMETHOD0(r, method) \ + r method() const override { \ + ConstMethodCall call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_METHOD1(r, method, t1) \ + r method(t1 a1) override { \ + MethodCall call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_CONSTMETHOD1(r, method, t1) \ + r method(t1 a1) const override { \ + ConstMethodCall call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_METHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) override { \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_METHOD3(r, method, t1, t2, t3) \ + r method(t1 a1, t2 a2, t3 a3) override { \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_METHOD4(r, method, t1, t2, t3, t4) \ + r method(t1 a1, t2 a2, t3 a3, t4 a4) override { \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3), \ + std::move(a4)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +#define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5) \ + r method(t1 a1, t2 a2, t3 a3, t4 a4, t5 a5) override { \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3), \ + std::move(a4), std::move(a5)); \ + return call.Marshal(RTC_FROM_HERE, signaling_thread_); \ + } + +// Define methods which should be invoked on the worker thread. +#define PROXY_WORKER_METHOD0(r, method) \ + r method() override { \ + MethodCall call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_CONSTMETHOD0(r, method) \ + r method() const override { \ + ConstMethodCall call(c_, &C::method); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_METHOD1(r, method, t1) \ + r method(t1 a1) override { \ + MethodCall call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_CONSTMETHOD1(r, method, t1) \ + r method(t1 a1) const override { \ + ConstMethodCall call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_METHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) override { \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_CONSTMETHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) const override { \ + ConstMethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_METHOD3(r, method, t1, t2, t3) \ + r method(t1 a1, t2 a2, t3 a3) override { \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +#define PROXY_WORKER_CONSTMETHOD3(r, method, t1, t2) \ + r method(t1 a1, t2 a2, t3 a3) const override { \ + ConstMethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(RTC_FROM_HERE, worker_thread_); \ + } + +// For use when returning purely const state (set during construction). +// Use with caution. This method should only be used when the return value will +// always be the same. +#define BYPASS_PROXY_CONSTMETHOD0(r, method) \ + r method() const override { \ + static_assert( \ + std::is_same::value || !std::is_pointer::value, \ + "Type is a pointer"); \ + static_assert(!std::is_reference::value, "Type is a reference"); \ + return c_->method(); \ + } + +} // namespace webrtc + +#endif // API_PROXY_H_ diff --git a/api/ref_counted_base.h b/api/ref_counted_base.h new file mode 100644 index 0000000..a1761db --- /dev/null +++ b/api/ref_counted_base.h @@ -0,0 +1,43 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_REF_COUNTED_BASE_H_ +#define API_REF_COUNTED_BASE_H_ + +#include "rtc_base/constructor_magic.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/ref_counter.h" + +namespace rtc { + +class RefCountedBase { + public: + RefCountedBase() = default; + + void AddRef() const { ref_count_.IncRef(); } + RefCountReleaseStatus Release() const { + const auto status = ref_count_.DecRef(); + if (status == RefCountReleaseStatus::kDroppedLastRef) { + delete this; + } + return status; + } + + protected: + virtual ~RefCountedBase() = default; + + private: + mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; + + RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedBase); +}; + +} // namespace rtc + +#endif // API_REF_COUNTED_BASE_H_ diff --git a/api/rtc_error.cc b/api/rtc_error.cc new file mode 100644 index 0000000..4d3033b --- /dev/null +++ b/api/rtc_error.cc @@ -0,0 +1,80 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtc_error.h" + +#include "rtc_base/arraysize.h" + +namespace { + +const char* kRTCErrorTypeNames[] = { + "NONE", + "UNSUPPORTED_OPERATION", + "UNSUPPORTED_PARAMETER", + "INVALID_PARAMETER", + "INVALID_RANGE", + "SYNTAX_ERROR", + "INVALID_STATE", + "INVALID_MODIFICATION", + "NETWORK_ERROR", + "RESOURCE_EXHAUSTED", + "INTERNAL_ERROR", + "OPERATION_ERROR_WITH_DATA", +}; +static_assert( + static_cast(webrtc::RTCErrorType::OPERATION_ERROR_WITH_DATA) == + (arraysize(kRTCErrorTypeNames) - 1), + "kRTCErrorTypeNames must have as many strings as RTCErrorType " + "has values."); + +const char* kRTCErrorDetailTypeNames[] = { + "NONE", + "DATA_CHANNEL_FAILURE", + "DTLS_FAILURE", + "FINGERPRINT_FAILURE", + "SCTP_FAILURE", + "SDP_SYNTAX_ERROR", + "HARDWARE_ENCODER_NOT_AVAILABLE", + "HARDWARE_ENCODER_ERROR", +}; +static_assert( + static_cast(webrtc::RTCErrorDetailType::HARDWARE_ENCODER_ERROR) == + (arraysize(kRTCErrorDetailTypeNames) - 1), + "kRTCErrorDetailTypeNames must have as many strings as " + "RTCErrorDetailType has values."); + +} // namespace + +namespace webrtc { + +// static +RTCError RTCError::OK() { + return RTCError(); +} + +const char* RTCError::message() const { + return message_.c_str(); +} + +void RTCError::set_message(std::string message) { + message_ = std::move(message); +} + +const char* ToString(RTCErrorType error) { + int index = static_cast(error); + return kRTCErrorTypeNames[index]; +} + +const char* ToString(RTCErrorDetailType error) { + int index = static_cast(error); + return kRTCErrorDetailTypeNames[index]; +} + +} // namespace webrtc diff --git a/api/rtc_error.h b/api/rtc_error.h new file mode 100644 index 0000000..b8cb7f0 --- /dev/null +++ b/api/rtc_error.h @@ -0,0 +1,331 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTC_ERROR_H_ +#define API_RTC_ERROR_H_ + +#ifdef UNIT_TEST +#include +#endif // UNIT_TEST +#include +#include // For std::move. + +#include "absl/types/optional.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Enumeration to represent distinct classes of errors that an application +// may wish to act upon differently. These roughly map to DOMExceptions or +// RTCError "errorDetailEnum" values in the web API, as described in the +// comments below. +enum class RTCErrorType { + // No error. + NONE, + + // An operation is valid, but currently unsupported. + // Maps to OperationError DOMException. + UNSUPPORTED_OPERATION, + + // A supplied parameter is valid, but currently unsupported. + // Maps to OperationError DOMException. + UNSUPPORTED_PARAMETER, + + // General error indicating that a supplied parameter is invalid. + // Maps to InvalidAccessError or TypeError DOMException depending on context. + INVALID_PARAMETER, + + // Slightly more specific than INVALID_PARAMETER; a parameter's value was + // outside the allowed range. + // Maps to RangeError DOMException. + INVALID_RANGE, + + // Slightly more specific than INVALID_PARAMETER; an error occurred while + // parsing string input. + // Maps to SyntaxError DOMException. + SYNTAX_ERROR, + + // The object does not support this operation in its current state. + // Maps to InvalidStateError DOMException. + INVALID_STATE, + + // An attempt was made to modify the object in an invalid way. + // Maps to InvalidModificationError DOMException. + INVALID_MODIFICATION, + + // An error occurred within an underlying network protocol. + // Maps to NetworkError DOMException. + NETWORK_ERROR, + + // Some resource has been exhausted; file handles, hardware resources, ports, + // etc. + // Maps to OperationError DOMException. + RESOURCE_EXHAUSTED, + + // The operation failed due to an internal error. + // Maps to OperationError DOMException. + INTERNAL_ERROR, + + // An error occured that has additional data. + // The additional data is specified in + // https://w3c.github.io/webrtc-pc/#rtcerror-interface + // Maps to RTCError DOMException. + OPERATION_ERROR_WITH_DATA, +}; + +// Detail information, showing what further information should be present. +// https://w3c.github.io/webrtc-pc/#rtcerrordetailtype-enum +enum class RTCErrorDetailType { + NONE, + DATA_CHANNEL_FAILURE, + DTLS_FAILURE, + FINGERPRINT_FAILURE, + SCTP_FAILURE, + SDP_SYNTAX_ERROR, + HARDWARE_ENCODER_NOT_AVAILABLE, + HARDWARE_ENCODER_ERROR, +}; + +// Roughly corresponds to RTCError in the web api. Holds an error type, a +// message, and possibly additional information specific to that error. +// +// Doesn't contain anything beyond a type and message now, but will in the +// future as more errors are implemented. +class RTC_EXPORT RTCError { + public: + // Constructors. + + // Creates a "no error" error. + RTCError() {} + explicit RTCError(RTCErrorType type) : type_(type) {} + + RTCError(RTCErrorType type, std::string message) + : type_(type), message_(std::move(message)) {} + + // In many use cases, it is better to use move than copy, + // but copy and assignment are provided for those cases that need it. + // Note that this has extra overhead because it copies strings. + RTCError(const RTCError& other) = default; + RTCError(RTCError&&) = default; + RTCError& operator=(const RTCError& other) = default; + RTCError& operator=(RTCError&&) = default; + + // Identical to default constructed error. + // + // Preferred over the default constructor for code readability. + static RTCError OK(); + + // Error type. + RTCErrorType type() const { return type_; } + void set_type(RTCErrorType type) { type_ = type; } + + // Human-readable message describing the error. Shouldn't be used for + // anything but logging/diagnostics, since messages are not guaranteed to be + // stable. + const char* message() const; + + void set_message(std::string message); + + RTCErrorDetailType error_detail() const { return error_detail_; } + void set_error_detail(RTCErrorDetailType detail) { error_detail_ = detail; } + absl::optional sctp_cause_code() { return sctp_cause_code_; } + void set_sctp_cause_code(uint16_t cause_code) { + sctp_cause_code_ = cause_code; + } + + // Convenience method for situations where you only care whether or not an + // error occurred. + bool ok() const { return type_ == RTCErrorType::NONE; } + + private: + RTCErrorType type_ = RTCErrorType::NONE; + std::string message_; + RTCErrorDetailType error_detail_ = RTCErrorDetailType::NONE; + absl::optional sctp_cause_code_; +}; + +// Outputs the error as a friendly string. Update this method when adding a new +// error type. +// +// Only intended to be used for logging/diagnostics. The returned char* points +// to literal string that lives for the whole duration of the program. +RTC_EXPORT const char* ToString(RTCErrorType error); +RTC_EXPORT const char* ToString(RTCErrorDetailType error); + +#ifdef UNIT_TEST +inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) + std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) + RTCErrorType error) { + return stream << ToString(error); +} + +inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) + std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) + RTCErrorDetailType error) { + return stream << ToString(error); +} +#endif // UNIT_TEST + +// Helper macro that can be used by implementations to create an error with a +// message and log it. |message| should be a string literal or movable +// std::string. +#define LOG_AND_RETURN_ERROR_EX(type, message, severity) \ + { \ + RTC_DCHECK(type != RTCErrorType::NONE); \ + RTC_LOG(severity) << message << " (" << ToString(type) << ")"; \ + return webrtc::RTCError(type, message); \ + } + +#define LOG_AND_RETURN_ERROR(type, message) \ + LOG_AND_RETURN_ERROR_EX(type, message, LS_ERROR) + +// RTCErrorOr is the union of an RTCError object and a T object. RTCErrorOr +// models the concept of an object that is either a usable value, or an error +// Status explaining why such a value is not present. To this end RTCErrorOr +// does not allow its RTCErrorType value to be RTCErrorType::NONE. This is +// enforced by a debug check in most cases. +// +// The primary use-case for RTCErrorOr is as the return value of a function +// which may fail. For example, CreateRtpSender will fail if the parameters +// could not be successfully applied at the media engine level, but if +// successful will return a unique_ptr to an RtpSender. +// +// Example client usage for a RTCErrorOr>: +// +// RTCErrorOr> result = FooFactory::MakeNewFoo(arg); +// if (result.ok()) { +// std::unique_ptr foo = result.ConsumeValue(); +// foo->DoSomethingCool(); +// } else { +// RTC_LOG(LS_ERROR) << result.error(); +// } +// +// Example factory implementation returning RTCErrorOr>: +// +// RTCErrorOr> FooFactory::MakeNewFoo(int arg) { +// if (arg <= 0) { +// return RTCError(RTCErrorType::INVALID_RANGE, "Arg must be positive"); +// } else { +// return std::unique_ptr(new Foo(arg)); +// } +// } +// +template +class RTCErrorOr { + // Used to convert between RTCErrorOr/RtcErrorOr, when an implicit + // conversion from Foo to Bar exists. + template + friend class RTCErrorOr; + + public: + typedef T element_type; + + // Constructs a new RTCErrorOr with RTCErrorType::INTERNAL_ERROR error. This + // is marked 'explicit' to try to catch cases like 'return {};', where people + // think RTCErrorOr> will be initialized with an empty + // vector, instead of a RTCErrorType::INTERNAL_ERROR error. + RTCErrorOr() : error_(RTCErrorType::INTERNAL_ERROR) {} + + // Constructs a new RTCErrorOr with the given non-ok error. After calling + // this constructor, calls to value() will DCHECK-fail. + // + // NOTE: Not explicit - we want to use RTCErrorOr as a return + // value, so it is convenient and sensible to be able to do 'return + // RTCError(...)' when the return type is RTCErrorOr. + // + // REQUIRES: !error.ok(). This requirement is DCHECKed. + RTCErrorOr(RTCError&& error) : error_(std::move(error)) { // NOLINT + RTC_DCHECK(!error.ok()); + } + + // Constructs a new RTCErrorOr with the given value. After calling this + // constructor, calls to value() will succeed, and calls to error() will + // return a default-constructed RTCError. + // + // NOTE: Not explicit - we want to use RTCErrorOr as a return type + // so it is convenient and sensible to be able to do 'return T()' + // when the return type is RTCErrorOr. + RTCErrorOr(const T& value) : value_(value) {} // NOLINT + RTCErrorOr(T&& value) : value_(std::move(value)) {} // NOLINT + + // Delete the copy constructor and assignment operator; there aren't any use + // cases where you should need to copy an RTCErrorOr, as opposed to moving + // it. Can revisit this decision if use cases arise in the future. + RTCErrorOr(const RTCErrorOr& other) = delete; + RTCErrorOr& operator=(const RTCErrorOr& other) = delete; + + // Move constructor and move-assignment operator. + // + // Visual Studio doesn't support "= default" with move constructors or + // assignment operators (even though they compile, they segfault), so define + // them explicitly. + RTCErrorOr(RTCErrorOr&& other) + : error_(std::move(other.error_)), value_(std::move(other.value_)) {} + RTCErrorOr& operator=(RTCErrorOr&& other) { + error_ = std::move(other.error_); + value_ = std::move(other.value_); + return *this; + } + + // Conversion constructor and assignment operator; T must be copy or move + // constructible from U. + template + RTCErrorOr(RTCErrorOr other) // NOLINT + : error_(std::move(other.error_)), value_(std::move(other.value_)) {} + template + RTCErrorOr& operator=(RTCErrorOr other) { + error_ = std::move(other.error_); + value_ = std::move(other.value_); + return *this; + } + + // Returns a reference to our error. If this contains a T, then returns + // default-constructed RTCError. + const RTCError& error() const { return error_; } + + // Moves the error. Can be useful if, say "CreateFoo" returns an + // RTCErrorOr, and internally calls "CreateBar" which returns an + // RTCErrorOr, and wants to forward the error up the stack. + RTCError MoveError() { return std::move(error_); } + + // Returns this->error().ok() + bool ok() const { return error_.ok(); } + + // Returns a reference to our current value, or DCHECK-fails if !this->ok(). + // + // Can be convenient for the implementation; for example, a method may want + // to access the value in some way before returning it to the next method on + // the stack. + const T& value() const { + RTC_DCHECK(ok()); + return value_; + } + T& value() { + RTC_DCHECK(ok()); + return value_; + } + + // Moves our current value out of this object and returns it, or DCHECK-fails + // if !this->ok(). + T MoveValue() { + RTC_DCHECK(ok()); + return std::move(value_); + } + + private: + RTCError error_; + T value_; +}; + +} // namespace webrtc + +#endif // API_RTC_ERROR_H_ diff --git a/api/rtc_error_unittest.cc b/api/rtc_error_unittest.cc new file mode 100644 index 0000000..ba307d8 --- /dev/null +++ b/api/rtc_error_unittest.cc @@ -0,0 +1,242 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtc_error.h" + +#include + +#include "test/gtest.h" + +namespace { + +const int kDefaultMoveOnlyIntValue = 0xbadf00d; + +// Class that has no copy constructor, ensuring that RTCErrorOr can +struct MoveOnlyInt { + MoveOnlyInt() {} + explicit MoveOnlyInt(int value) : value(value) {} + MoveOnlyInt(const MoveOnlyInt& other) = delete; + MoveOnlyInt& operator=(const MoveOnlyInt& other) = delete; + MoveOnlyInt(MoveOnlyInt&& other) : value(other.value) {} + MoveOnlyInt& operator=(MoveOnlyInt&& other) { + value = other.value; + return *this; + } + + int value = kDefaultMoveOnlyIntValue; +}; + +// Same as above. Used to test conversion from RTCErrorOr to RTCErrorOr +// when A can be converted to B. +struct MoveOnlyInt2 { + MoveOnlyInt2() {} + explicit MoveOnlyInt2(int value) : value(value) {} + MoveOnlyInt2(const MoveOnlyInt2& other) = delete; + MoveOnlyInt2& operator=(const MoveOnlyInt2& other) = delete; + MoveOnlyInt2(MoveOnlyInt2&& other) : value(other.value) {} + MoveOnlyInt2& operator=(MoveOnlyInt2&& other) { + value = other.value; + return *this; + } + + explicit MoveOnlyInt2(MoveOnlyInt&& other) : value(other.value) {} + MoveOnlyInt2& operator=(MoveOnlyInt&& other) { + value = other.value; + return *this; + } + + int value = kDefaultMoveOnlyIntValue; +}; + +} // namespace + +namespace webrtc { + +// Test that the default constructor creates a "no error" error. +TEST(RTCErrorTest, DefaultConstructor) { + RTCError e; + EXPECT_EQ(RTCErrorType::NONE, e.type()); + EXPECT_EQ(std::string(), e.message()); + EXPECT_TRUE(e.ok()); +} + +TEST(RTCErrorTest, NormalConstructors) { + RTCError a(RTCErrorType::INVALID_PARAMETER); + EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, a.type()); + EXPECT_EQ(std::string(), a.message()); + + // Constructor that takes const char* message. + RTCError b(RTCErrorType::UNSUPPORTED_PARAMETER, "foobar"); + EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER, b.type()); + EXPECT_EQ(std::string("foobar"), b.message()); + + // Constructor that takes std::string message. + RTCError c(RTCErrorType::INVALID_RANGE, std::string("new")); + EXPECT_EQ(RTCErrorType::INVALID_RANGE, c.type()); + EXPECT_EQ(std::string("new"), c.message()); +} + +TEST(RTCErrorTest, MoveConstructor) { + // Static string. + RTCError a(RTCErrorType::INVALID_PARAMETER, "foo"); + RTCError b(std::move(a)); + EXPECT_EQ(RTCErrorType::INVALID_PARAMETER, b.type()); + EXPECT_EQ(std::string("foo"), b.message()); + + // Non-static string. + RTCError c(RTCErrorType::UNSUPPORTED_PARAMETER, std::string("bar")); + RTCError d(std::move(c)); + EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER, d.type()); + EXPECT_EQ(std::string("bar"), d.message()); +} + +TEST(RTCErrorTest, MoveAssignment) { + // Try all combinations of "is static string"/"is non-static string" moves. + RTCError e(RTCErrorType::INVALID_PARAMETER, "foo"); + + e = RTCError(RTCErrorType::UNSUPPORTED_PARAMETER, "bar"); + EXPECT_EQ(RTCErrorType::UNSUPPORTED_PARAMETER, e.type()); + EXPECT_EQ(std::string("bar"), e.message()); + + e = RTCError(RTCErrorType::SYNTAX_ERROR, std::string("baz")); + EXPECT_EQ(std::string("baz"), e.message()); + + e = RTCError(RTCErrorType::SYNTAX_ERROR, std::string("another")); + EXPECT_EQ(std::string("another"), e.message()); + + e = RTCError(RTCErrorType::SYNTAX_ERROR, "last"); + EXPECT_EQ(std::string("last"), e.message()); +} + +// Test that the error returned by RTCError::OK() is a "no error" error. +TEST(RTCErrorTest, OKConstant) { + RTCError ok = RTCError::OK(); + EXPECT_EQ(RTCErrorType::NONE, ok.type()); + EXPECT_EQ(std::string(), ok.message()); + EXPECT_TRUE(ok.ok()); +} + +// Test that "error.ok()" behaves as expected. +TEST(RTCErrorTest, OkMethod) { + RTCError success; + RTCError failure(RTCErrorType::INTERNAL_ERROR); + EXPECT_TRUE(success.ok()); + EXPECT_FALSE(failure.ok()); +} + +// Test that a message can be set using either static const strings or +// std::strings. +TEST(RTCErrorTest, SetMessage) { + RTCError e; + // Try all combinations of "is static string"/"is non-static string" calls. + e.set_message("foo"); + EXPECT_EQ(std::string("foo"), e.message()); + + e.set_message("bar"); + EXPECT_EQ(std::string("bar"), e.message()); + + e.set_message(std::string("string")); + EXPECT_EQ(std::string("string"), e.message()); + + e.set_message(std::string("more")); + EXPECT_EQ(std::string("more"), e.message()); + + e.set_message("love to test"); + EXPECT_EQ(std::string("love to test"), e.message()); +} + +// Test that the default constructor creates an "INTERNAL_ERROR". +TEST(RTCErrorOrTest, DefaultConstructor) { + RTCErrorOr e; + EXPECT_EQ(RTCErrorType::INTERNAL_ERROR, e.error().type()); +} + +// Test that an RTCErrorOr can be implicitly constructed from a value. +TEST(RTCErrorOrTest, ImplicitValueConstructor) { + RTCErrorOr e = [] { return MoveOnlyInt(100); }(); + EXPECT_EQ(100, e.value().value); +} + +// Test that an RTCErrorOr can be implicitly constructed from an RTCError. +TEST(RTCErrorOrTest, ImplicitErrorConstructor) { + RTCErrorOr e = [] { + return RTCError(RTCErrorType::SYNTAX_ERROR); + }(); + EXPECT_EQ(RTCErrorType::SYNTAX_ERROR, e.error().type()); +} + +TEST(RTCErrorOrTest, MoveConstructor) { + RTCErrorOr a(MoveOnlyInt(5)); + RTCErrorOr b(std::move(a)); + EXPECT_EQ(5, b.value().value); +} + +TEST(RTCErrorOrTest, MoveAssignment) { + RTCErrorOr a(MoveOnlyInt(5)); + RTCErrorOr b(MoveOnlyInt(10)); + a = std::move(b); + EXPECT_EQ(10, a.value().value); +} + +TEST(RTCErrorOrTest, ConversionConstructor) { + RTCErrorOr a(MoveOnlyInt(1)); + RTCErrorOr b(std::move(a)); +} + +TEST(RTCErrorOrTest, ConversionAssignment) { + RTCErrorOr a(MoveOnlyInt(5)); + RTCErrorOr b(MoveOnlyInt2(10)); + b = std::move(a); + EXPECT_EQ(5, b.value().value); +} + +TEST(RTCErrorOrTest, OkMethod) { + RTCErrorOr success(1337); + RTCErrorOr error = RTCError(RTCErrorType::INTERNAL_ERROR); + EXPECT_TRUE(success.ok()); + EXPECT_FALSE(error.ok()); +} + +TEST(RTCErrorOrTest, MoveError) { + RTCErrorOr e({RTCErrorType::SYNTAX_ERROR, "message"}); + RTCError err = e.MoveError(); + EXPECT_EQ(RTCErrorType::SYNTAX_ERROR, err.type()); + EXPECT_EQ(std::string("message"), err.message()); +} + +TEST(RTCErrorOrTest, MoveValue) { + RTCErrorOr e(MoveOnlyInt(88)); + MoveOnlyInt value = e.MoveValue(); + EXPECT_EQ(88, value.value); +} + +// Death tests. +// Disabled on Android because death tests misbehave on Android, see +// base/test/gtest_util.h. +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + +TEST(RTCErrorOrDeathTest, ConstructWithOkError) { + RTCErrorOr err; + EXPECT_DEATH(err = RTCError::OK(), ""); +} + +TEST(RTCErrorOrDeathTest, DereferenceErrorValue) { + RTCErrorOr error = RTCError(RTCErrorType::INTERNAL_ERROR); + EXPECT_DEATH(error.value(), ""); +} + +TEST(RTCErrorOrDeathTest, MoveErrorValue) { + RTCErrorOr error = RTCError(RTCErrorType::INTERNAL_ERROR); + EXPECT_DEATH(error.MoveValue(), ""); +} + +#endif // RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + +} // namespace webrtc diff --git a/api/rtc_event_log/BUILD.gn b/api/rtc_event_log/BUILD.gn new file mode 100644 index 0000000..e853058 --- /dev/null +++ b/api/rtc_event_log/BUILD.gn @@ -0,0 +1,47 @@ +# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_library("rtc_event_log") { + visibility = [ "*" ] + sources = [ + "rtc_event.cc", + "rtc_event.h", + "rtc_event_log.cc", + "rtc_event_log.h", + "rtc_event_log_factory_interface.h", + ] + + deps = [ + "..:libjingle_logging_api", + "../../rtc_base:checks", + "../../rtc_base:timeutils", + "../task_queue", + ] +} + +rtc_library("rtc_event_log_factory") { + visibility = [ "*" ] + sources = [ + "rtc_event_log_factory.cc", + "rtc_event_log_factory.h", + ] + + deps = [ + ":rtc_event_log", + "../../rtc_base:checks", + "../../rtc_base/system:rtc_export", + "../task_queue", + ] + + if (rtc_enable_protobuf) { + defines = [ "WEBRTC_ENABLE_RTC_EVENT_LOG" ] + deps += [ "../../logging:rtc_event_log_impl" ] + } +} diff --git a/api/rtc_event_log/rtc_event.cc b/api/rtc_event_log/rtc_event.cc new file mode 100644 index 0000000..81e6a4e --- /dev/null +++ b/api/rtc_event_log/rtc_event.cc @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtc_event_log/rtc_event.h" + +#include "rtc_base/time_utils.h" + +namespace webrtc { + +RtcEvent::RtcEvent() : timestamp_us_(rtc::TimeMicros()) {} + +} // namespace webrtc diff --git a/api/rtc_event_log/rtc_event.h b/api/rtc_event_log/rtc_event.h new file mode 100644 index 0000000..51db8f0 --- /dev/null +++ b/api/rtc_event_log/rtc_event.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTC_EVENT_LOG_RTC_EVENT_H_ +#define API_RTC_EVENT_LOG_RTC_EVENT_H_ + +#include + +namespace webrtc { + +// This class allows us to store unencoded RTC events. Subclasses of this class +// store the actual information. This allows us to keep all unencoded events, +// even when their type and associated information differ, in the same buffer. +// Additionally, it prevents dependency leaking - a module that only logs +// events of type RtcEvent_A doesn't need to know about anything associated +// with events of type RtcEvent_B. +class RtcEvent { + public: + // Subclasses of this class have to associate themselves with a unique value + // of Type. This leaks the information of existing subclasses into the + // superclass, but the *actual* information - rtclog::StreamConfig, etc. - + // is kept separate. + enum class Type { + AlrStateEvent, + RouteChangeEvent, + RemoteEstimateEvent, + AudioNetworkAdaptation, + AudioPlayout, + AudioReceiveStreamConfig, + AudioSendStreamConfig, + BweUpdateDelayBased, + BweUpdateLossBased, + DtlsTransportState, + DtlsWritableState, + IceCandidatePairConfig, + IceCandidatePairEvent, + ProbeClusterCreated, + ProbeResultFailure, + ProbeResultSuccess, + RtcpPacketIncoming, + RtcpPacketOutgoing, + RtpPacketIncoming, + RtpPacketOutgoing, + VideoReceiveStreamConfig, + VideoSendStreamConfig, + GenericPacketSent, + GenericPacketReceived, + GenericAckReceived, + FrameDecoded + }; + + RtcEvent(); + virtual ~RtcEvent() = default; + + virtual Type GetType() const = 0; + + virtual bool IsConfigEvent() const = 0; + + int64_t timestamp_ms() const { return timestamp_us_ / 1000; } + int64_t timestamp_us() const { return timestamp_us_; } + + protected: + explicit RtcEvent(int64_t timestamp_us) : timestamp_us_(timestamp_us) {} + + const int64_t timestamp_us_; +}; + +} // namespace webrtc + +#endif // API_RTC_EVENT_LOG_RTC_EVENT_H_ diff --git a/api/rtc_event_log/rtc_event_log.cc b/api/rtc_event_log/rtc_event_log.cc new file mode 100644 index 0000000..56189c0 --- /dev/null +++ b/api/rtc_event_log/rtc_event_log.cc @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtc_event_log/rtc_event_log.h" + +namespace webrtc { + +bool RtcEventLogNull::StartLogging( + std::unique_ptr /*output*/, + int64_t /*output_period_ms*/) { + return false; +} + +} // namespace webrtc diff --git a/api/rtc_event_log/rtc_event_log.h b/api/rtc_event_log/rtc_event_log.h new file mode 100644 index 0000000..cebaf84 --- /dev/null +++ b/api/rtc_event_log/rtc_event_log.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTC_EVENT_LOG_RTC_EVENT_LOG_H_ +#define API_RTC_EVENT_LOG_RTC_EVENT_LOG_H_ + +#include +#include +#include +#include + +#include "api/rtc_event_log/rtc_event.h" +#include "api/rtc_event_log_output.h" +#include "api/task_queue/task_queue_factory.h" + +namespace webrtc { + +class RtcEventLog { + public: + enum : size_t { kUnlimitedOutput = 0 }; + enum : int64_t { kImmediateOutput = 0 }; + + // TODO(eladalon): Get rid of the legacy encoding and this enum once all + // clients have migrated to the new format. + enum class EncodingType { Legacy, NewFormat }; + + virtual ~RtcEventLog() = default; + + // Starts logging to a given output. The output might be limited in size, + // and may close itself once it has reached the maximum size. + virtual bool StartLogging(std::unique_ptr output, + int64_t output_period_ms) = 0; + + // Stops logging to file and waits until the file has been closed, after + // which it would be permissible to read and/or modify it. + virtual void StopLogging() = 0; + + // Stops logging to file and calls |callback| when the file has been closed. + // Note that it is not safe to call any other members, including the + // destructor, until the callback has been called. + // TODO(srte): Remove default implementation when it's safe to do so. + virtual void StopLogging(std::function callback) { + StopLogging(); + callback(); + } + + // Log an RTC event (the type of event is determined by the subclass). + virtual void Log(std::unique_ptr event) = 0; +}; + +// No-op implementation is used if flag is not set, or in tests. +class RtcEventLogNull final : public RtcEventLog { + public: + bool StartLogging(std::unique_ptr output, + int64_t output_period_ms) override; + void StopLogging() override {} + void Log(std::unique_ptr event) override {} +}; + +} // namespace webrtc + +#endif // API_RTC_EVENT_LOG_RTC_EVENT_LOG_H_ diff --git a/api/rtc_event_log/rtc_event_log_factory.cc b/api/rtc_event_log/rtc_event_log_factory.cc new file mode 100644 index 0000000..2013584 --- /dev/null +++ b/api/rtc_event_log/rtc_event_log_factory.cc @@ -0,0 +1,38 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtc_event_log/rtc_event_log_factory.h" + +#include +#include + +#include "rtc_base/checks.h" + +#ifdef WEBRTC_ENABLE_RTC_EVENT_LOG +#include "logging/rtc_event_log/rtc_event_log_impl.h" +#endif + +namespace webrtc { + +RtcEventLogFactory::RtcEventLogFactory(TaskQueueFactory* task_queue_factory) + : task_queue_factory_(task_queue_factory) { + RTC_DCHECK(task_queue_factory_); +} + +std::unique_ptr RtcEventLogFactory::CreateRtcEventLog( + RtcEventLog::EncodingType encoding_type) { +#ifdef WEBRTC_ENABLE_RTC_EVENT_LOG + return std::make_unique(encoding_type, task_queue_factory_); +#else + return std::make_unique(); +#endif +} + +} // namespace webrtc diff --git a/api/rtc_event_log/rtc_event_log_factory.h b/api/rtc_event_log/rtc_event_log_factory.h new file mode 100644 index 0000000..06cc074 --- /dev/null +++ b/api/rtc_event_log/rtc_event_log_factory.h @@ -0,0 +1,37 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTC_EVENT_LOG_RTC_EVENT_LOG_FACTORY_H_ +#define API_RTC_EVENT_LOG_RTC_EVENT_LOG_FACTORY_H_ + +#include + +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/rtc_event_log/rtc_event_log_factory_interface.h" +#include "api/task_queue/task_queue_factory.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class RTC_EXPORT RtcEventLogFactory : public RtcEventLogFactoryInterface { + public: + explicit RtcEventLogFactory(TaskQueueFactory* task_queue_factory); + ~RtcEventLogFactory() override {} + + std::unique_ptr CreateRtcEventLog( + RtcEventLog::EncodingType encoding_type) override; + + private: + TaskQueueFactory* const task_queue_factory_; +}; + +} // namespace webrtc + +#endif // API_RTC_EVENT_LOG_RTC_EVENT_LOG_FACTORY_H_ diff --git a/api/rtc_event_log/rtc_event_log_factory_interface.h b/api/rtc_event_log/rtc_event_log_factory_interface.h new file mode 100644 index 0000000..acc5bcb --- /dev/null +++ b/api/rtc_event_log/rtc_event_log_factory_interface.h @@ -0,0 +1,33 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTC_EVENT_LOG_RTC_EVENT_LOG_FACTORY_INTERFACE_H_ +#define API_RTC_EVENT_LOG_RTC_EVENT_LOG_FACTORY_INTERFACE_H_ + +#include + +#include "api/rtc_event_log/rtc_event_log.h" + +namespace webrtc { + +// This interface exists to allow webrtc to be optionally built without +// RtcEventLog support. A PeerConnectionFactory is constructed with an +// RtcEventLogFactoryInterface, which may or may not be null. +class RtcEventLogFactoryInterface { + public: + virtual ~RtcEventLogFactoryInterface() = default; + + virtual std::unique_ptr CreateRtcEventLog( + RtcEventLog::EncodingType encoding_type) = 0; +}; + +} // namespace webrtc + +#endif // API_RTC_EVENT_LOG_RTC_EVENT_LOG_FACTORY_INTERFACE_H_ diff --git a/api/rtc_event_log_output.h b/api/rtc_event_log_output.h new file mode 100644 index 0000000..92fb9e8 --- /dev/null +++ b/api/rtc_event_log_output.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTC_EVENT_LOG_OUTPUT_H_ +#define API_RTC_EVENT_LOG_OUTPUT_H_ + +#include + +namespace webrtc { + +// NOTE: This class is still under development and may change without notice. +class RtcEventLogOutput { + public: + virtual ~RtcEventLogOutput() = default; + + // An output normally starts out active, though that might not always be + // the case (e.g. failed to open a file for writing). + // Once an output has become inactive (e.g. maximum file size reached), it can + // never become active again. + virtual bool IsActive() const = 0; + + // Write encoded events to an output. Returns true if the output was + // successfully written in its entirety. Otherwise, no guarantee is given + // about how much data was written, if any. The output sink becomes inactive + // after the first time |false| is returned. Write() may not be called on + // an inactive output sink. + virtual bool Write(const std::string& output) = 0; + + // Indicates that buffers should be written to disk if applicable. + virtual void Flush() {} +}; + +} // namespace webrtc + +#endif // API_RTC_EVENT_LOG_OUTPUT_H_ diff --git a/api/rtc_event_log_output_file.cc b/api/rtc_event_log_output_file.cc new file mode 100644 index 0000000..2e31c2d --- /dev/null +++ b/api/rtc_event_log_output_file.cc @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtc_event_log_output_file.h" + +#include +#include + +#include "api/rtc_event_log/rtc_event_log.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +// Together with the assumption of no single Write() would ever be called on +// an input with length greater-than-or-equal-to (max(size_t) / 2), this +// guarantees no overflow of the check for remaining file capacity in Write(). +// This does *not* apply to files with unlimited size. +const size_t RtcEventLogOutputFile::kMaxReasonableFileSize = + std::numeric_limits::max() / 2; + +RtcEventLogOutputFile::RtcEventLogOutputFile(const std::string& file_name) + : RtcEventLogOutputFile(FileWrapper::OpenWriteOnly(file_name), + RtcEventLog::kUnlimitedOutput) {} + +RtcEventLogOutputFile::RtcEventLogOutputFile(const std::string& file_name, + size_t max_size_bytes) + + // Unlike plain fopen, FileWrapper takes care of filename utf8 -> + // wchar conversion on Windows. + : RtcEventLogOutputFile(FileWrapper::OpenWriteOnly(file_name), + max_size_bytes) {} + +RtcEventLogOutputFile::RtcEventLogOutputFile(FILE* file, size_t max_size_bytes) + : RtcEventLogOutputFile(FileWrapper(file), max_size_bytes) {} + +RtcEventLogOutputFile::RtcEventLogOutputFile(FileWrapper file, + size_t max_size_bytes) + : max_size_bytes_(max_size_bytes), file_(std::move(file)) { + RTC_CHECK_LE(max_size_bytes_, kMaxReasonableFileSize); + if (!file_.is_open()) { + RTC_LOG(LS_ERROR) << "Invalid file. WebRTC event log not started."; + } +} + +bool RtcEventLogOutputFile::IsActive() const { + return IsActiveInternal(); +} + +bool RtcEventLogOutputFile::Write(const std::string& output) { + RTC_DCHECK(IsActiveInternal()); + // No single write may be so big, that it would risk overflowing the + // calculation of (written_bytes_ + output.length()). + RTC_DCHECK_LT(output.length(), kMaxReasonableFileSize); + + if (max_size_bytes_ == RtcEventLog::kUnlimitedOutput || + written_bytes_ + output.length() <= max_size_bytes_) { + if (file_.Write(output.c_str(), output.size())) { + written_bytes_ += output.size(); + return true; + } else { + RTC_LOG(LS_ERROR) << "Write to WebRtcEventLog file failed."; + } + } else { + RTC_LOG(LS_VERBOSE) << "Max file size reached."; + } + + // Failed, for one of above reasons. Close output file. + file_.Close(); + return false; +} + +// Internal non-virtual method. +bool RtcEventLogOutputFile::IsActiveInternal() const { + return file_.is_open(); +} + +} // namespace webrtc diff --git a/api/rtc_event_log_output_file.h b/api/rtc_event_log_output_file.h new file mode 100644 index 0000000..d2901be --- /dev/null +++ b/api/rtc_event_log_output_file.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTC_EVENT_LOG_OUTPUT_FILE_H_ +#define API_RTC_EVENT_LOG_OUTPUT_FILE_H_ + +#include +#include + +#include + +#include "api/rtc_event_log_output.h" +#include "rtc_base/system/file_wrapper.h" + +namespace webrtc { + +class RtcEventLogOutputFile final : public RtcEventLogOutput { + public: + static const size_t kMaxReasonableFileSize; // Explanation at declaration. + + // Unlimited/limited-size output file (by filename). + explicit RtcEventLogOutputFile(const std::string& file_name); + RtcEventLogOutputFile(const std::string& file_name, size_t max_size_bytes); + + // Limited-size output file (by FILE*). This class takes ownership + // of the FILE*, and closes it on destruction. + RtcEventLogOutputFile(FILE* file, size_t max_size_bytes); + + ~RtcEventLogOutputFile() override = default; + + bool IsActive() const override; + + bool Write(const std::string& output) override; + + private: + RtcEventLogOutputFile(FileWrapper file, size_t max_size_bytes); + + // IsActive() can be called either from outside or from inside, but we don't + // want to incur the overhead of a virtual function call if called from inside + // some other function of this class. + inline bool IsActiveInternal() const; + + // Maximum size, or zero for no limit. + const size_t max_size_bytes_; + size_t written_bytes_{0}; + FileWrapper file_; +}; + +} // namespace webrtc + +#endif // API_RTC_EVENT_LOG_OUTPUT_FILE_H_ diff --git a/api/rtc_event_log_output_file_unittest.cc b/api/rtc_event_log_output_file_unittest.cc new file mode 100644 index 0000000..4274215 --- /dev/null +++ b/api/rtc_event_log_output_file_unittest.cc @@ -0,0 +1,167 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtc_event_log_output_file.h" + +#include +#include +#include +#include + +#include "rtc_base/checks.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { + +class RtcEventLogOutputFileTest : public ::testing::Test { + public: + RtcEventLogOutputFileTest() : output_file_name_(GetOutputFilePath()) { + // Ensure no leftovers from previous runs, which might not have terminated + // in an orderly fashion. + remove(output_file_name_.c_str()); + } + + ~RtcEventLogOutputFileTest() override { remove(output_file_name_.c_str()); } + + protected: + std::string GetOutputFilePath() const { + auto test_info = ::testing::UnitTest::GetInstance()->current_test_info(); + return test::OutputPath() + test_info->test_case_name() + test_info->name(); + } + + std::string GetOutputFileContents() const { + std::ifstream file(output_file_name_, + std::ios_base::in | std::ios_base::binary); + RTC_CHECK(file.is_open()); + RTC_CHECK(file.good()); + std::string file_str((std::istreambuf_iterator(file)), + std::istreambuf_iterator()); + return file_str; + } + + const std::string output_file_name_; +}; + +TEST_F(RtcEventLogOutputFileTest, NonDefectiveOutputsStartOutActive) { + auto output_file = std::make_unique(output_file_name_); + EXPECT_TRUE(output_file->IsActive()); +} + +TEST_F(RtcEventLogOutputFileTest, DefectiveOutputsStartOutInactive) { + const std::string illegal_filename = "/////////"; + auto output_file = std::make_unique(illegal_filename); + EXPECT_FALSE(output_file->IsActive()); +} + +// Sanity over opening a file (by filename) with an unlimited size. +TEST_F(RtcEventLogOutputFileTest, UnlimitedOutputFile) { + const std::string output_str = "one two three"; + + auto output_file = std::make_unique(output_file_name_); + output_file->Write(output_str); + output_file.reset(); // Closing the file flushes the buffer to disk. + + EXPECT_EQ(GetOutputFileContents(), output_str); +} + +// Do not allow writing more bytes to the file than +TEST_F(RtcEventLogOutputFileTest, LimitedOutputFileCappedToCapacity) { + // Fit two bytes, then the third should be rejected. + auto output_file = + std::make_unique(output_file_name_, 2); + + output_file->Write("1"); + output_file->Write("2"); + output_file->Write("3"); + // Unsuccessful writes close the file; no need to delete the output to flush. + + EXPECT_EQ(GetOutputFileContents(), "12"); +} + +// Make sure that calls to Write() either write everything to the file, or +// nothing (short of underlying issues in the module that handles the file, +// which would be beyond our control). +TEST_F(RtcEventLogOutputFileTest, DoNotWritePartialLines) { + const std::string output_str_1 = "0123456789"; + const std::string output_str_2 = "abcdefghij"; + + // Set a file size limit just shy of fitting the entire second line. + const size_t size_limit = output_str_1.length() + output_str_2.length() - 1; + auto output_file = + std::make_unique(output_file_name_, size_limit); + + output_file->Write(output_str_1); + output_file->Write(output_str_2); + // Unsuccessful writes close the file; no need to delete the output to flush. + + EXPECT_EQ(GetOutputFileContents(), output_str_1); +} + +TEST_F(RtcEventLogOutputFileTest, UnsuccessfulWriteReturnsFalse) { + auto output_file = + std::make_unique(output_file_name_, 2); + EXPECT_FALSE(output_file->Write("abc")); +} + +TEST_F(RtcEventLogOutputFileTest, SuccessfulWriteReturnsTrue) { + auto output_file = + std::make_unique(output_file_name_, 3); + EXPECT_TRUE(output_file->Write("abc")); +} + +// Even if capacity is reached, a successful write leaves the output active. +TEST_F(RtcEventLogOutputFileTest, FileStillActiveAfterSuccessfulWrite) { + auto output_file = + std::make_unique(output_file_name_, 3); + ASSERT_TRUE(output_file->Write("abc")); + EXPECT_TRUE(output_file->IsActive()); +} + +// Unsuccessful writes switch the output to inactive, even if capacity has +// not yet been reached. +TEST_F(RtcEventLogOutputFileTest, FileInactiveAfterUnsuccessfulWrite) { + auto output_file = + std::make_unique(output_file_name_, 2); + ASSERT_FALSE(output_file->Write("abc")); + EXPECT_FALSE(output_file->IsActive()); +} + +TEST_F(RtcEventLogOutputFileTest, AllowReasonableFileSizeLimits) { + auto output_file = std::make_unique( + output_file_name_, RtcEventLogOutputFile::kMaxReasonableFileSize); + EXPECT_TRUE(output_file->IsActive()); +} + +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +class RtcEventLogOutputFileDeathTest : public RtcEventLogOutputFileTest {}; + +TEST_F(RtcEventLogOutputFileDeathTest, WritingToInactiveFileForbidden) { + RtcEventLogOutputFile output_file(output_file_name_, 2); + ASSERT_FALSE(output_file.Write("abc")); + ASSERT_FALSE(output_file.IsActive()); + EXPECT_DEATH(output_file.Write("abc"), ""); +} + +TEST_F(RtcEventLogOutputFileDeathTest, DisallowUnreasonableFileSizeLimits) { + // Keeping in a temporary unique_ptr to make it clearer that the death is + // triggered by construction, not destruction. + std::unique_ptr output_file; + auto create_output_file = [&] { + const size_t unreasonable_size = + RtcEventLogOutputFile::kMaxReasonableFileSize + 1; + output_file = std::make_unique(output_file_name_, + unreasonable_size); + }; + EXPECT_DEATH(create_output_file(), ""); +} +#endif + +} // namespace webrtc diff --git a/api/rtp_headers.cc b/api/rtp_headers.cc new file mode 100644 index 0000000..e0ad9eb --- /dev/null +++ b/api/rtp_headers.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtp_headers.h" + +namespace webrtc { + +RTPHeaderExtension::RTPHeaderExtension() + : hasTransmissionTimeOffset(false), + transmissionTimeOffset(0), + hasAbsoluteSendTime(false), + absoluteSendTime(0), + hasTransportSequenceNumber(false), + transportSequenceNumber(0), + hasAudioLevel(false), + voiceActivity(false), + audioLevel(0), + hasVideoRotation(false), + videoRotation(kVideoRotation_0), + hasVideoContentType(false), + videoContentType(VideoContentType::UNSPECIFIED), + has_video_timing(false) {} + +RTPHeaderExtension::RTPHeaderExtension(const RTPHeaderExtension& other) = + default; + +RTPHeaderExtension& RTPHeaderExtension::operator=( + const RTPHeaderExtension& other) = default; + +RTPHeader::RTPHeader() + : markerBit(false), + payloadType(0), + sequenceNumber(0), + timestamp(0), + ssrc(0), + numCSRCs(0), + arrOfCSRCs(), + paddingLength(0), + headerLength(0), + payload_type_frequency(0), + extension() {} + +RTPHeader::RTPHeader(const RTPHeader& other) = default; + +RTPHeader& RTPHeader::operator=(const RTPHeader& other) = default; + +} // namespace webrtc diff --git a/api/rtp_headers.h b/api/rtp_headers.h new file mode 100644 index 0000000..454149c --- /dev/null +++ b/api/rtp_headers.h @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTP_HEADERS_H_ +#define API_RTP_HEADERS_H_ + +#include +#include + +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/units/timestamp.h" +#include "api/video/color_space.h" +#include "api/video/video_content_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" +#include "common_types.h" // NOLINT (build/include) + +namespace webrtc { + +struct FeedbackRequest { + // Determines whether the recv delta as specified in + // https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions-01 + // should be included. + bool include_timestamps; + // Include feedback of received packets in the range [sequence_number - + // sequence_count + 1, sequence_number]. That is, no feedback will be sent if + // sequence_count is zero. + int sequence_count; +}; + +// The Absolute Capture Time extension is used to stamp RTP packets with a NTP +// timestamp showing when the first audio or video frame in a packet was +// originally captured. The intent of this extension is to provide a way to +// accomplish audio-to-video synchronization when RTCP-terminating intermediate +// systems (e.g. mixers) are involved. See: +// http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time +struct AbsoluteCaptureTime { + // Absolute capture timestamp is the NTP timestamp of when the first frame in + // a packet was originally captured. This timestamp MUST be based on the same + // clock as the clock used to generate NTP timestamps for RTCP sender reports + // on the capture system. + // + // It’s not always possible to do an NTP clock readout at the exact moment of + // when a media frame is captured. A capture system MAY postpone the readout + // until a more convenient time. A capture system SHOULD have known delays + // (e.g. from hardware buffers) subtracted from the readout to make the final + // timestamp as close to the actual capture time as possible. + // + // This field is encoded as a 64-bit unsigned fixed-point number with the high + // 32 bits for the timestamp in seconds and low 32 bits for the fractional + // part. This is also known as the UQ32.32 format and is what the RTP + // specification defines as the canonical format to represent NTP timestamps. + uint64_t absolute_capture_timestamp; + + // Estimated capture clock offset is the sender’s estimate of the offset + // between its own NTP clock and the capture system’s NTP clock. The sender is + // here defined as the system that owns the NTP clock used to generate the NTP + // timestamps for the RTCP sender reports on this stream. The sender system is + // typically either the capture system or a mixer. + // + // This field is encoded as a 64-bit two’s complement signed fixed-point + // number with the high 32 bits for the seconds and low 32 bits for the + // fractional part. It’s intended to make it easy for a receiver, that knows + // how to estimate the sender system’s NTP clock, to also estimate the capture + // system’s NTP clock: + // + // Capture NTP Clock = Sender NTP Clock + Capture Clock Offset + absl::optional estimated_capture_clock_offset; +}; + +inline bool operator==(const AbsoluteCaptureTime& lhs, + const AbsoluteCaptureTime& rhs) { + return (lhs.absolute_capture_timestamp == rhs.absolute_capture_timestamp) && + (lhs.estimated_capture_clock_offset == + rhs.estimated_capture_clock_offset); +} + +inline bool operator!=(const AbsoluteCaptureTime& lhs, + const AbsoluteCaptureTime& rhs) { + return !(lhs == rhs); +} + +struct RTPHeaderExtension { + RTPHeaderExtension(); + RTPHeaderExtension(const RTPHeaderExtension& other); + RTPHeaderExtension& operator=(const RTPHeaderExtension& other); + + static constexpr int kAbsSendTimeFraction = 18; + + Timestamp GetAbsoluteSendTimestamp() const { + RTC_DCHECK(hasAbsoluteSendTime); + RTC_DCHECK(absoluteSendTime < (1ul << 24)); + return Timestamp::Micros((absoluteSendTime * 1000000ll) / + (1 << kAbsSendTimeFraction)); + } + + TimeDelta GetAbsoluteSendTimeDelta(uint32_t previous_sendtime) const { + RTC_DCHECK(hasAbsoluteSendTime); + RTC_DCHECK(absoluteSendTime < (1ul << 24)); + RTC_DCHECK(previous_sendtime < (1ul << 24)); + int32_t delta = + static_cast((absoluteSendTime - previous_sendtime) << 8) >> 8; + return TimeDelta::Micros((delta * 1000000ll) / (1 << kAbsSendTimeFraction)); + } + + bool hasTransmissionTimeOffset; + int32_t transmissionTimeOffset; + bool hasAbsoluteSendTime; + uint32_t absoluteSendTime; + absl::optional absolute_capture_time; + bool hasTransportSequenceNumber; + uint16_t transportSequenceNumber; + absl::optional feedback_request; + + // Audio Level includes both level in dBov and voiced/unvoiced bit. See: + // https://tools.ietf.org/html/rfc6464#section-3 + bool hasAudioLevel; + bool voiceActivity; + uint8_t audioLevel; + + // For Coordination of Video Orientation. See + // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ + // ts_126114v120700p.pdf + bool hasVideoRotation; + VideoRotation videoRotation; + + // TODO(ilnik): Refactor this and one above to be absl::optional() and remove + // a corresponding bool flag. + bool hasVideoContentType; + VideoContentType videoContentType; + + bool has_video_timing; + VideoSendTiming video_timing; + + PlayoutDelay playout_delay = {-1, -1}; + + // For identification of a stream when ssrc is not signaled. See + // https://tools.ietf.org/html/draft-ietf-avtext-rid-09 + // TODO(danilchap): Update url from draft to release version. + std::string stream_id; + std::string repaired_stream_id; + + // For identifying the media section used to interpret this RTP packet. See + // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-38 + std::string mid; + + absl::optional color_space; +}; + +enum { kRtpCsrcSize = 15 }; // RFC 3550 page 13 + +struct RTPHeader { + RTPHeader(); + RTPHeader(const RTPHeader& other); + RTPHeader& operator=(const RTPHeader& other); + + bool markerBit; + uint8_t payloadType; + uint16_t sequenceNumber; + uint32_t timestamp; + uint32_t ssrc; + uint8_t numCSRCs; + uint32_t arrOfCSRCs[kRtpCsrcSize]; + size_t paddingLength; + size_t headerLength; + int payload_type_frequency; + RTPHeaderExtension extension; +}; + +// RTCP mode to use. Compound mode is described by RFC 4585 and reduced-size +// RTCP mode is described by RFC 5506. +enum class RtcpMode { kOff, kCompound, kReducedSize }; + +enum NetworkState { + kNetworkUp, + kNetworkDown, +}; + +} // namespace webrtc + +#endif // API_RTP_HEADERS_H_ diff --git a/api/rtp_packet_info.cc b/api/rtp_packet_info.cc new file mode 100644 index 0000000..a9ebd9d --- /dev/null +++ b/api/rtp_packet_info.cc @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtp_packet_info.h" + +#include +#include + +namespace webrtc { + +RtpPacketInfo::RtpPacketInfo() + : ssrc_(0), rtp_timestamp_(0), receive_time_ms_(-1) {} + +RtpPacketInfo::RtpPacketInfo( + uint32_t ssrc, + std::vector csrcs, + uint32_t rtp_timestamp, + absl::optional audio_level, + absl::optional absolute_capture_time, + int64_t receive_time_ms) + : ssrc_(ssrc), + csrcs_(std::move(csrcs)), + rtp_timestamp_(rtp_timestamp), + audio_level_(audio_level), + absolute_capture_time_(absolute_capture_time), + receive_time_ms_(receive_time_ms) {} + +RtpPacketInfo::RtpPacketInfo(const RTPHeader& rtp_header, + int64_t receive_time_ms) + : ssrc_(rtp_header.ssrc), + rtp_timestamp_(rtp_header.timestamp), + receive_time_ms_(receive_time_ms) { + const auto& extension = rtp_header.extension; + const auto csrcs_count = std::min(rtp_header.numCSRCs, kRtpCsrcSize); + + csrcs_.assign(&rtp_header.arrOfCSRCs[0], &rtp_header.arrOfCSRCs[csrcs_count]); + + if (extension.hasAudioLevel) { + audio_level_ = extension.audioLevel; + } + + absolute_capture_time_ = extension.absolute_capture_time; +} + +bool operator==(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs) { + return (lhs.ssrc() == rhs.ssrc()) && (lhs.csrcs() == rhs.csrcs()) && + (lhs.rtp_timestamp() == rhs.rtp_timestamp()) && + (lhs.audio_level() == rhs.audio_level()) && + (lhs.absolute_capture_time() == rhs.absolute_capture_time()) && + (lhs.receive_time_ms() == rhs.receive_time_ms()); +} + +} // namespace webrtc diff --git a/api/rtp_packet_info.h b/api/rtp_packet_info.h new file mode 100644 index 0000000..639ba32 --- /dev/null +++ b/api/rtp_packet_info.h @@ -0,0 +1,97 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTP_PACKET_INFO_H_ +#define API_RTP_PACKET_INFO_H_ + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/rtp_headers.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// +// Structure to hold information about a received |RtpPacket|. It is primarily +// used to carry per-packet information from when a packet is received until +// the information is passed to |SourceTracker|. +// +class RTC_EXPORT RtpPacketInfo { + public: + RtpPacketInfo(); + + RtpPacketInfo(uint32_t ssrc, + std::vector csrcs, + uint32_t rtp_timestamp, + absl::optional audio_level, + absl::optional absolute_capture_time, + int64_t receive_time_ms); + + RtpPacketInfo(const RTPHeader& rtp_header, int64_t receive_time_ms); + + RtpPacketInfo(const RtpPacketInfo& other) = default; + RtpPacketInfo(RtpPacketInfo&& other) = default; + RtpPacketInfo& operator=(const RtpPacketInfo& other) = default; + RtpPacketInfo& operator=(RtpPacketInfo&& other) = default; + + uint32_t ssrc() const { return ssrc_; } + void set_ssrc(uint32_t value) { ssrc_ = value; } + + const std::vector& csrcs() const { return csrcs_; } + void set_csrcs(std::vector value) { csrcs_ = std::move(value); } + + uint32_t rtp_timestamp() const { return rtp_timestamp_; } + void set_rtp_timestamp(uint32_t value) { rtp_timestamp_ = value; } + + absl::optional audio_level() const { return audio_level_; } + void set_audio_level(absl::optional value) { audio_level_ = value; } + + const absl::optional& absolute_capture_time() const { + return absolute_capture_time_; + } + void set_absolute_capture_time( + const absl::optional& value) { + absolute_capture_time_ = value; + } + + int64_t receive_time_ms() const { return receive_time_ms_; } + void set_receive_time_ms(int64_t value) { receive_time_ms_ = value; } + + private: + // Fields from the RTP header: + // https://tools.ietf.org/html/rfc3550#section-5.1 + uint32_t ssrc_; + std::vector csrcs_; + uint32_t rtp_timestamp_; + + // Fields from the Audio Level header extension: + // https://tools.ietf.org/html/rfc6464#section-3 + absl::optional audio_level_; + + // Fields from the Absolute Capture Time header extension: + // http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time + absl::optional absolute_capture_time_; + + // Local |webrtc::Clock|-based timestamp of when the packet was received. + int64_t receive_time_ms_; +}; + +bool operator==(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs); + +inline bool operator!=(const RtpPacketInfo& lhs, const RtpPacketInfo& rhs) { + return !(lhs == rhs); +} + +} // namespace webrtc + +#endif // API_RTP_PACKET_INFO_H_ diff --git a/api/rtp_packet_info_unittest.cc b/api/rtp_packet_info_unittest.cc new file mode 100644 index 0000000..fe79f6d --- /dev/null +++ b/api/rtp_packet_info_unittest.cc @@ -0,0 +1,179 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtp_packet_infos.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +TEST(RtpPacketInfoTest, Ssrc) { + const uint32_t value = 4038189233; + + RtpPacketInfo lhs; + RtpPacketInfo rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs.set_ssrc(value); + EXPECT_EQ(rhs.ssrc(), value); + + EXPECT_FALSE(lhs == rhs); + EXPECT_TRUE(lhs != rhs); + + lhs = rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs = RtpPacketInfo(); + EXPECT_NE(rhs.ssrc(), value); + + rhs = RtpPacketInfo(value, {}, {}, {}, {}, {}); + EXPECT_EQ(rhs.ssrc(), value); +} + +TEST(RtpPacketInfoTest, Csrcs) { + const std::vector value = {4038189233, 3016333617, 1207992985}; + + RtpPacketInfo lhs; + RtpPacketInfo rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs.set_csrcs(value); + EXPECT_EQ(rhs.csrcs(), value); + + EXPECT_FALSE(lhs == rhs); + EXPECT_TRUE(lhs != rhs); + + lhs = rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs = RtpPacketInfo(); + EXPECT_NE(rhs.csrcs(), value); + + rhs = RtpPacketInfo({}, value, {}, {}, {}, {}); + EXPECT_EQ(rhs.csrcs(), value); +} + +TEST(RtpPacketInfoTest, RtpTimestamp) { + const uint32_t value = 4038189233; + + RtpPacketInfo lhs; + RtpPacketInfo rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs.set_rtp_timestamp(value); + EXPECT_EQ(rhs.rtp_timestamp(), value); + + EXPECT_FALSE(lhs == rhs); + EXPECT_TRUE(lhs != rhs); + + lhs = rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs = RtpPacketInfo(); + EXPECT_NE(rhs.rtp_timestamp(), value); + + rhs = RtpPacketInfo({}, {}, value, {}, {}, {}); + EXPECT_EQ(rhs.rtp_timestamp(), value); +} + +TEST(RtpPacketInfoTest, AudioLevel) { + const absl::optional value = 31; + + RtpPacketInfo lhs; + RtpPacketInfo rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs.set_audio_level(value); + EXPECT_EQ(rhs.audio_level(), value); + + EXPECT_FALSE(lhs == rhs); + EXPECT_TRUE(lhs != rhs); + + lhs = rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs = RtpPacketInfo(); + EXPECT_NE(rhs.audio_level(), value); + + rhs = RtpPacketInfo({}, {}, {}, value, {}, {}); + EXPECT_EQ(rhs.audio_level(), value); +} + +TEST(RtpPacketInfoTest, AbsoluteCaptureTime) { + const absl::optional value = AbsoluteCaptureTime{12, 34}; + + RtpPacketInfo lhs; + RtpPacketInfo rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs.set_absolute_capture_time(value); + EXPECT_EQ(rhs.absolute_capture_time(), value); + + EXPECT_FALSE(lhs == rhs); + EXPECT_TRUE(lhs != rhs); + + lhs = rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs = RtpPacketInfo(); + EXPECT_NE(rhs.absolute_capture_time(), value); + + rhs = RtpPacketInfo({}, {}, {}, {}, value, {}); + EXPECT_EQ(rhs.absolute_capture_time(), value); +} + +TEST(RtpPacketInfoTest, ReceiveTimeMs) { + const int64_t value = 8868963877546349045LL; + + RtpPacketInfo lhs; + RtpPacketInfo rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs.set_receive_time_ms(value); + EXPECT_EQ(rhs.receive_time_ms(), value); + + EXPECT_FALSE(lhs == rhs); + EXPECT_TRUE(lhs != rhs); + + lhs = rhs; + + EXPECT_TRUE(lhs == rhs); + EXPECT_FALSE(lhs != rhs); + + rhs = RtpPacketInfo(); + EXPECT_NE(rhs.receive_time_ms(), value); + + rhs = RtpPacketInfo({}, {}, {}, {}, {}, value); + EXPECT_EQ(rhs.receive_time_ms(), value); +} + +} // namespace webrtc diff --git a/api/rtp_packet_infos.h b/api/rtp_packet_infos.h new file mode 100644 index 0000000..d636464 --- /dev/null +++ b/api/rtp_packet_infos.h @@ -0,0 +1,130 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTP_PACKET_INFOS_H_ +#define API_RTP_PACKET_INFOS_H_ + +#include +#include +#include + +#include "api/ref_counted_base.h" +#include "api/rtp_packet_info.h" +#include "api/scoped_refptr.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Semi-immutable structure to hold information about packets used to assemble +// an audio or video frame. Uses internal reference counting to make it very +// cheap to copy. +// +// We should ideally just use |std::vector| and have it +// |std::move()|-ed as the per-packet information is transferred from one object +// to another. But moving the info, instead of copying it, is not easily done +// for the current video code. +class RTC_EXPORT RtpPacketInfos { + public: + using vector_type = std::vector; + + using value_type = vector_type::value_type; + using size_type = vector_type::size_type; + using difference_type = vector_type::difference_type; + using const_reference = vector_type::const_reference; + using const_pointer = vector_type::const_pointer; + using const_iterator = vector_type::const_iterator; + using const_reverse_iterator = vector_type::const_reverse_iterator; + + using reference = const_reference; + using pointer = const_pointer; + using iterator = const_iterator; + using reverse_iterator = const_reverse_iterator; + + RtpPacketInfos() {} + explicit RtpPacketInfos(const vector_type& entries) + : data_(Data::Create(entries)) {} + + explicit RtpPacketInfos(vector_type&& entries) + : data_(Data::Create(std::move(entries))) {} + + RtpPacketInfos(const RtpPacketInfos& other) = default; + RtpPacketInfos(RtpPacketInfos&& other) = default; + RtpPacketInfos& operator=(const RtpPacketInfos& other) = default; + RtpPacketInfos& operator=(RtpPacketInfos&& other) = default; + + const_reference operator[](size_type pos) const { return entries()[pos]; } + + const_reference at(size_type pos) const { return entries().at(pos); } + const_reference front() const { return entries().front(); } + const_reference back() const { return entries().back(); } + + const_iterator begin() const { return entries().begin(); } + const_iterator end() const { return entries().end(); } + const_reverse_iterator rbegin() const { return entries().rbegin(); } + const_reverse_iterator rend() const { return entries().rend(); } + + const_iterator cbegin() const { return entries().cbegin(); } + const_iterator cend() const { return entries().cend(); } + const_reverse_iterator crbegin() const { return entries().crbegin(); } + const_reverse_iterator crend() const { return entries().crend(); } + + bool empty() const { return entries().empty(); } + size_type size() const { return entries().size(); } + + private: + class Data : public rtc::RefCountedBase { + public: + static rtc::scoped_refptr Create(const vector_type& entries) { + // Performance optimization for the empty case. + if (entries.empty()) { + return nullptr; + } + + return new Data(entries); + } + + static rtc::scoped_refptr Create(vector_type&& entries) { + // Performance optimization for the empty case. + if (entries.empty()) { + return nullptr; + } + + return new Data(std::move(entries)); + } + + const vector_type& entries() const { return entries_; } + + private: + explicit Data(const vector_type& entries) : entries_(entries) {} + explicit Data(vector_type&& entries) : entries_(std::move(entries)) {} + ~Data() override {} + + const vector_type entries_; + }; + + static const vector_type& empty_entries() { + static const vector_type& value = *new vector_type(); + return value; + } + + const vector_type& entries() const { + if (data_ != nullptr) { + return data_->entries(); + } else { + return empty_entries(); + } + } + + rtc::scoped_refptr data_; +}; + +} // namespace webrtc + +#endif // API_RTP_PACKET_INFOS_H_ diff --git a/api/rtp_packet_infos_unittest.cc b/api/rtp_packet_infos_unittest.cc new file mode 100644 index 0000000..ce502ac --- /dev/null +++ b/api/rtp_packet_infos_unittest.cc @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtp_packet_infos.h" + +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; +using ::testing::SizeIs; + +template +RtpPacketInfos::vector_type ToVector(Iterator begin, Iterator end) { + return RtpPacketInfos::vector_type(begin, end); +} + +} // namespace + +TEST(RtpPacketInfosTest, BasicFunctionality) { + RtpPacketInfo p0(123, {1, 2}, 89, 5, AbsoluteCaptureTime{45, 78}, 7); + RtpPacketInfo p1(456, {3, 4}, 89, 4, AbsoluteCaptureTime{13, 21}, 1); + RtpPacketInfo p2(789, {5, 6}, 88, 1, AbsoluteCaptureTime{99, 78}, 7); + + RtpPacketInfos x({p0, p1, p2}); + + ASSERT_THAT(x, SizeIs(3)); + + EXPECT_EQ(x[0], p0); + EXPECT_EQ(x[1], p1); + EXPECT_EQ(x[2], p2); + + EXPECT_EQ(x.front(), p0); + EXPECT_EQ(x.back(), p2); + + EXPECT_THAT(ToVector(x.begin(), x.end()), ElementsAre(p0, p1, p2)); + EXPECT_THAT(ToVector(x.rbegin(), x.rend()), ElementsAre(p2, p1, p0)); + + EXPECT_THAT(ToVector(x.cbegin(), x.cend()), ElementsAre(p0, p1, p2)); + EXPECT_THAT(ToVector(x.crbegin(), x.crend()), ElementsAre(p2, p1, p0)); + + EXPECT_FALSE(x.empty()); +} + +TEST(RtpPacketInfosTest, CopyShareData) { + RtpPacketInfo p0(123, {1, 2}, 89, 5, AbsoluteCaptureTime{45, 78}, 7); + RtpPacketInfo p1(456, {3, 4}, 89, 4, AbsoluteCaptureTime{13, 21}, 1); + RtpPacketInfo p2(789, {5, 6}, 88, 1, AbsoluteCaptureTime{99, 78}, 7); + + RtpPacketInfos lhs({p0, p1, p2}); + RtpPacketInfos rhs = lhs; + + ASSERT_THAT(lhs, SizeIs(3)); + ASSERT_THAT(rhs, SizeIs(3)); + + for (size_t i = 0; i < lhs.size(); ++i) { + EXPECT_EQ(lhs[i], rhs[i]); + } + + EXPECT_EQ(lhs.front(), rhs.front()); + EXPECT_EQ(lhs.back(), rhs.back()); + + EXPECT_EQ(lhs.begin(), rhs.begin()); + EXPECT_EQ(lhs.end(), rhs.end()); + EXPECT_EQ(lhs.rbegin(), rhs.rbegin()); + EXPECT_EQ(lhs.rend(), rhs.rend()); + + EXPECT_EQ(lhs.cbegin(), rhs.cbegin()); + EXPECT_EQ(lhs.cend(), rhs.cend()); + EXPECT_EQ(lhs.crbegin(), rhs.crbegin()); + EXPECT_EQ(lhs.crend(), rhs.crend()); + + EXPECT_EQ(lhs.empty(), rhs.empty()); +} + +} // namespace webrtc diff --git a/api/rtp_parameters.cc b/api/rtp_parameters.cc new file mode 100644 index 0000000..28acb68 --- /dev/null +++ b/api/rtp_parameters.cc @@ -0,0 +1,226 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/rtp_parameters.h" + +#include +#include +#include + +#include "api/array_view.h" +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { + +const char* DegradationPreferenceToString( + DegradationPreference degradation_preference) { + switch (degradation_preference) { + case DegradationPreference::DISABLED: + return "disabled"; + case DegradationPreference::MAINTAIN_FRAMERATE: + return "maintain-framerate"; + case DegradationPreference::MAINTAIN_RESOLUTION: + return "maintain-resolution"; + case DegradationPreference::BALANCED: + return "balanced"; + } +} + +const double kDefaultBitratePriority = 1.0; + +RtcpFeedback::RtcpFeedback() = default; +RtcpFeedback::RtcpFeedback(RtcpFeedbackType type) : type(type) {} +RtcpFeedback::RtcpFeedback(RtcpFeedbackType type, + RtcpFeedbackMessageType message_type) + : type(type), message_type(message_type) {} +RtcpFeedback::RtcpFeedback(const RtcpFeedback& rhs) = default; +RtcpFeedback::~RtcpFeedback() = default; + +RtpCodecCapability::RtpCodecCapability() = default; +RtpCodecCapability::~RtpCodecCapability() = default; + +RtpHeaderExtensionCapability::RtpHeaderExtensionCapability() = default; +RtpHeaderExtensionCapability::RtpHeaderExtensionCapability( + absl::string_view uri) + : uri(uri) {} +RtpHeaderExtensionCapability::RtpHeaderExtensionCapability( + absl::string_view uri, + int preferred_id) + : uri(uri), preferred_id(preferred_id) {} +RtpHeaderExtensionCapability::RtpHeaderExtensionCapability( + absl::string_view uri, + int preferred_id, + RtpTransceiverDirection direction) + : uri(uri), preferred_id(preferred_id), direction(direction) {} +RtpHeaderExtensionCapability::~RtpHeaderExtensionCapability() = default; + +RtpExtension::RtpExtension() = default; +RtpExtension::RtpExtension(absl::string_view uri, int id) : uri(uri), id(id) {} +RtpExtension::RtpExtension(absl::string_view uri, int id, bool encrypt) + : uri(uri), id(id), encrypt(encrypt) {} +RtpExtension::~RtpExtension() = default; + +RtpFecParameters::RtpFecParameters() = default; +RtpFecParameters::RtpFecParameters(FecMechanism mechanism) + : mechanism(mechanism) {} +RtpFecParameters::RtpFecParameters(FecMechanism mechanism, uint32_t ssrc) + : ssrc(ssrc), mechanism(mechanism) {} +RtpFecParameters::RtpFecParameters(const RtpFecParameters& rhs) = default; +RtpFecParameters::~RtpFecParameters() = default; + +RtpRtxParameters::RtpRtxParameters() = default; +RtpRtxParameters::RtpRtxParameters(uint32_t ssrc) : ssrc(ssrc) {} +RtpRtxParameters::RtpRtxParameters(const RtpRtxParameters& rhs) = default; +RtpRtxParameters::~RtpRtxParameters() = default; + +RtpEncodingParameters::RtpEncodingParameters() = default; +RtpEncodingParameters::RtpEncodingParameters(const RtpEncodingParameters& rhs) = + default; +RtpEncodingParameters::~RtpEncodingParameters() = default; + +RtpCodecParameters::RtpCodecParameters() = default; +RtpCodecParameters::RtpCodecParameters(const RtpCodecParameters& rhs) = default; +RtpCodecParameters::~RtpCodecParameters() = default; + +RtpCapabilities::RtpCapabilities() = default; +RtpCapabilities::~RtpCapabilities() = default; + +RtcpParameters::RtcpParameters() = default; +RtcpParameters::RtcpParameters(const RtcpParameters& rhs) = default; +RtcpParameters::~RtcpParameters() = default; + +RtpParameters::RtpParameters() = default; +RtpParameters::RtpParameters(const RtpParameters& rhs) = default; +RtpParameters::~RtpParameters() = default; + +std::string RtpExtension::ToString() const { + char buf[256]; + rtc::SimpleStringBuilder sb(buf); + sb << "{uri: " << uri; + sb << ", id: " << id; + if (encrypt) { + sb << ", encrypt"; + } + sb << '}'; + return sb.str(); +} + +constexpr char RtpExtension::kEncryptHeaderExtensionsUri[]; +constexpr char RtpExtension::kAudioLevelUri[]; +constexpr char RtpExtension::kTimestampOffsetUri[]; +constexpr char RtpExtension::kAbsSendTimeUri[]; +constexpr char RtpExtension::kAbsoluteCaptureTimeUri[]; +constexpr char RtpExtension::kVideoRotationUri[]; +constexpr char RtpExtension::kVideoContentTypeUri[]; +constexpr char RtpExtension::kVideoTimingUri[]; +constexpr char RtpExtension::kGenericFrameDescriptorUri00[]; +constexpr char RtpExtension::kDependencyDescriptorUri[]; +constexpr char RtpExtension::kTransportSequenceNumberUri[]; +constexpr char RtpExtension::kTransportSequenceNumberV2Uri[]; +constexpr char RtpExtension::kPlayoutDelayUri[]; +constexpr char RtpExtension::kColorSpaceUri[]; +constexpr char RtpExtension::kMidUri[]; +constexpr char RtpExtension::kRidUri[]; +constexpr char RtpExtension::kRepairedRidUri[]; + +constexpr int RtpExtension::kMinId; +constexpr int RtpExtension::kMaxId; +constexpr int RtpExtension::kMaxValueSize; +constexpr int RtpExtension::kOneByteHeaderExtensionMaxId; +constexpr int RtpExtension::kOneByteHeaderExtensionMaxValueSize; + +bool RtpExtension::IsSupportedForAudio(absl::string_view uri) { + return uri == webrtc::RtpExtension::kAudioLevelUri || + uri == webrtc::RtpExtension::kAbsSendTimeUri || + uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri || + uri == webrtc::RtpExtension::kTransportSequenceNumberUri || + uri == webrtc::RtpExtension::kTransportSequenceNumberV2Uri || + uri == webrtc::RtpExtension::kMidUri || + uri == webrtc::RtpExtension::kRidUri || + uri == webrtc::RtpExtension::kRepairedRidUri; +} + +bool RtpExtension::IsSupportedForVideo(absl::string_view uri) { + return uri == webrtc::RtpExtension::kTimestampOffsetUri || + uri == webrtc::RtpExtension::kAbsSendTimeUri || + uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri || + uri == webrtc::RtpExtension::kVideoRotationUri || + uri == webrtc::RtpExtension::kTransportSequenceNumberUri || + uri == webrtc::RtpExtension::kTransportSequenceNumberV2Uri || + uri == webrtc::RtpExtension::kPlayoutDelayUri || + uri == webrtc::RtpExtension::kVideoContentTypeUri || + uri == webrtc::RtpExtension::kVideoTimingUri || + uri == webrtc::RtpExtension::kMidUri || + uri == webrtc::RtpExtension::kGenericFrameDescriptorUri00 || + uri == webrtc::RtpExtension::kDependencyDescriptorUri || + uri == webrtc::RtpExtension::kColorSpaceUri || + uri == webrtc::RtpExtension::kRidUri || + uri == webrtc::RtpExtension::kRepairedRidUri; +} + +bool RtpExtension::IsEncryptionSupported(absl::string_view uri) { + return uri == webrtc::RtpExtension::kAudioLevelUri || + uri == webrtc::RtpExtension::kTimestampOffsetUri || +#if !defined(ENABLE_EXTERNAL_AUTH) + // TODO(jbauch): Figure out a way to always allow "kAbsSendTimeUri" + // here and filter out later if external auth is really used in + // srtpfilter. External auth is used by Chromium and replaces the + // extension header value of "kAbsSendTimeUri", so it must not be + // encrypted (which can't be done by Chromium). + uri == webrtc::RtpExtension::kAbsSendTimeUri || +#endif + uri == webrtc::RtpExtension::kAbsoluteCaptureTimeUri || + uri == webrtc::RtpExtension::kVideoRotationUri || + uri == webrtc::RtpExtension::kTransportSequenceNumberUri || + uri == webrtc::RtpExtension::kTransportSequenceNumberV2Uri || + uri == webrtc::RtpExtension::kPlayoutDelayUri || + uri == webrtc::RtpExtension::kVideoContentTypeUri || + uri == webrtc::RtpExtension::kMidUri || + uri == webrtc::RtpExtension::kRidUri || + uri == webrtc::RtpExtension::kRepairedRidUri; +} + +const RtpExtension* RtpExtension::FindHeaderExtensionByUri( + const std::vector& extensions, + absl::string_view uri) { + for (const auto& extension : extensions) { + if (extension.uri == uri) { + return &extension; + } + } + return nullptr; +} + +std::vector RtpExtension::FilterDuplicateNonEncrypted( + const std::vector& extensions) { + std::vector filtered; + for (auto extension = extensions.begin(); extension != extensions.end(); + ++extension) { + if (extension->encrypt) { + filtered.push_back(*extension); + continue; + } + + // Only add non-encrypted extension if no encrypted with the same URI + // is also present... + if (std::any_of(extension + 1, extensions.end(), + [&](const RtpExtension& check) { + return extension->uri == check.uri; + })) { + continue; + } + + // ...and has not been added before. + if (!FindHeaderExtensionByUri(filtered, extension->uri)) { + filtered.push_back(*extension); + } + } + return filtered; +} +} // namespace webrtc diff --git a/api/rtp_parameters.h b/api/rtp_parameters.h new file mode 100644 index 0000000..b667bf8 --- /dev/null +++ b/api/rtp_parameters.h @@ -0,0 +1,658 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTP_PARAMETERS_H_ +#define API_RTP_PARAMETERS_H_ + +#include + +#include +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/media_types.h" +#include "api/priority.h" +#include "api/rtp_transceiver_direction.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// These structures are intended to mirror those defined by: +// http://draft.ortc.org/#rtcrtpdictionaries* +// Contains everything specified as of 2017 Jan 24. +// +// They are used when retrieving or modifying the parameters of an +// RtpSender/RtpReceiver, or retrieving capabilities. +// +// Note on conventions: Where ORTC may use "octet", "short" and "unsigned" +// types, we typically use "int", in keeping with our style guidelines. The +// parameter's actual valid range will be enforced when the parameters are set, +// rather than when the parameters struct is built. An exception is made for +// SSRCs, since they use the full unsigned 32-bit range, and aren't expected to +// be used for any numeric comparisons/operations. +// +// Additionally, where ORTC uses strings, we may use enums for things that have +// a fixed number of supported values. However, for things that can be extended +// (such as codecs, by providing an external encoder factory), a string +// identifier is used. + +enum class FecMechanism { + RED, + RED_AND_ULPFEC, + FLEXFEC, +}; + +// Used in RtcpFeedback struct. +enum class RtcpFeedbackType { + CCM, + LNTF, // "goog-lntf" + NACK, + REMB, // "goog-remb" + TRANSPORT_CC, +}; + +// Used in RtcpFeedback struct when type is NACK or CCM. +enum class RtcpFeedbackMessageType { + // Equivalent to {type: "nack", parameter: undefined} in ORTC. + GENERIC_NACK, + PLI, // Usable with NACK. + FIR, // Usable with CCM. +}; + +enum class DtxStatus { + DISABLED, + ENABLED, +}; + +// Based on the spec in +// https://w3c.github.io/webrtc-pc/#idl-def-rtcdegradationpreference. +// These options are enforced on a best-effort basis. For instance, all of +// these options may suffer some frame drops in order to avoid queuing. +// TODO(sprang): Look into possibility of more strictly enforcing the +// maintain-framerate option. +// TODO(deadbeef): Default to "balanced", as the spec indicates? +enum class DegradationPreference { + // Don't take any actions based on over-utilization signals. Not part of the + // web API. + DISABLED, + // On over-use, request lower resolution, possibly causing down-scaling. + MAINTAIN_FRAMERATE, + // On over-use, request lower frame rate, possibly causing frame drops. + MAINTAIN_RESOLUTION, + // Try to strike a "pleasing" balance between frame rate or resolution. + BALANCED, +}; + +RTC_EXPORT const char* DegradationPreferenceToString( + DegradationPreference degradation_preference); + +RTC_EXPORT extern const double kDefaultBitratePriority; + +struct RTC_EXPORT RtcpFeedback { + RtcpFeedbackType type = RtcpFeedbackType::CCM; + + // Equivalent to ORTC "parameter" field with slight differences: + // 1. It's an enum instead of a string. + // 2. Generic NACK feedback is represented by a GENERIC_NACK message type, + // rather than an unset "parameter" value. + absl::optional message_type; + + // Constructors for convenience. + RtcpFeedback(); + explicit RtcpFeedback(RtcpFeedbackType type); + RtcpFeedback(RtcpFeedbackType type, RtcpFeedbackMessageType message_type); + RtcpFeedback(const RtcpFeedback&); + ~RtcpFeedback(); + + bool operator==(const RtcpFeedback& o) const { + return type == o.type && message_type == o.message_type; + } + bool operator!=(const RtcpFeedback& o) const { return !(*this == o); } +}; + +// RtpCodecCapability is to RtpCodecParameters as RtpCapabilities is to +// RtpParameters. This represents the static capabilities of an endpoint's +// implementation of a codec. +struct RTC_EXPORT RtpCodecCapability { + RtpCodecCapability(); + ~RtpCodecCapability(); + + // Build MIME "type/subtype" string from |name| and |kind|. + std::string mime_type() const { return MediaTypeToString(kind) + "/" + name; } + + // Used to identify the codec. Equivalent to MIME subtype. + std::string name; + + // The media type of this codec. Equivalent to MIME top-level type. + cricket::MediaType kind = cricket::MEDIA_TYPE_AUDIO; + + // Clock rate in Hertz. If unset, the codec is applicable to any clock rate. + absl::optional clock_rate; + + // Default payload type for this codec. Mainly needed for codecs that use + // that have statically assigned payload types. + absl::optional preferred_payload_type; + + // Maximum packetization time supported by an RtpReceiver for this codec. + // TODO(deadbeef): Not implemented. + absl::optional max_ptime; + + // Preferred packetization time for an RtpReceiver or RtpSender of this codec. + // TODO(deadbeef): Not implemented. + absl::optional ptime; + + // The number of audio channels supported. Unused for video codecs. + absl::optional num_channels; + + // Feedback mechanisms supported for this codec. + std::vector rtcp_feedback; + + // Codec-specific parameters that must be signaled to the remote party. + // + // Corresponds to "a=fmtp" parameters in SDP. + // + // Contrary to ORTC, these parameters are named using all lowercase strings. + // This helps make the mapping to SDP simpler, if an application is using SDP. + // Boolean values are represented by the string "1". + std::map parameters; + + // Codec-specific parameters that may optionally be signaled to the remote + // party. + // TODO(deadbeef): Not implemented. + std::map options; + + // Maximum number of temporal layer extensions supported by this codec. + // For example, a value of 1 indicates that 2 total layers are supported. + // TODO(deadbeef): Not implemented. + int max_temporal_layer_extensions = 0; + + // Maximum number of spatial layer extensions supported by this codec. + // For example, a value of 1 indicates that 2 total layers are supported. + // TODO(deadbeef): Not implemented. + int max_spatial_layer_extensions = 0; + + // Whether the implementation can send/receive SVC layers with distinct SSRCs. + // Always false for audio codecs. True for video codecs that support scalable + // video coding with MRST. + // TODO(deadbeef): Not implemented. + bool svc_multi_stream_support = false; + + bool operator==(const RtpCodecCapability& o) const { + return name == o.name && kind == o.kind && clock_rate == o.clock_rate && + preferred_payload_type == o.preferred_payload_type && + max_ptime == o.max_ptime && ptime == o.ptime && + num_channels == o.num_channels && rtcp_feedback == o.rtcp_feedback && + parameters == o.parameters && options == o.options && + max_temporal_layer_extensions == o.max_temporal_layer_extensions && + max_spatial_layer_extensions == o.max_spatial_layer_extensions && + svc_multi_stream_support == o.svc_multi_stream_support; + } + bool operator!=(const RtpCodecCapability& o) const { return !(*this == o); } +}; + +// Used in RtpCapabilities and RtpTransceiverInterface's header extensions query +// and setup methods; represents the capabilities/preferences of an +// implementation for a header extension. +// +// Just called "RtpHeaderExtension" in ORTC, but the "Capability" suffix was +// added here for consistency and to avoid confusion with +// RtpHeaderExtensionParameters. +// +// Note that ORTC includes a "kind" field, but we omit this because it's +// redundant; if you call "RtpReceiver::GetCapabilities(MEDIA_TYPE_AUDIO)", +// you know you're getting audio capabilities. +struct RTC_EXPORT RtpHeaderExtensionCapability { + // URI of this extension, as defined in RFC8285. + std::string uri; + + // Preferred value of ID that goes in the packet. + absl::optional preferred_id; + + // If true, it's preferred that the value in the header is encrypted. + // TODO(deadbeef): Not implemented. + bool preferred_encrypt = false; + + // The direction of the extension. The kStopped value is only used with + // RtpTransceiverInterface::HeaderExtensionsToOffer() and + // SetOfferedRtpHeaderExtensions(). + RtpTransceiverDirection direction = RtpTransceiverDirection::kSendRecv; + + // Constructors for convenience. + RtpHeaderExtensionCapability(); + explicit RtpHeaderExtensionCapability(absl::string_view uri); + RtpHeaderExtensionCapability(absl::string_view uri, int preferred_id); + RtpHeaderExtensionCapability(absl::string_view uri, + int preferred_id, + RtpTransceiverDirection direction); + ~RtpHeaderExtensionCapability(); + + bool operator==(const RtpHeaderExtensionCapability& o) const { + return uri == o.uri && preferred_id == o.preferred_id && + preferred_encrypt == o.preferred_encrypt && direction == o.direction; + } + bool operator!=(const RtpHeaderExtensionCapability& o) const { + return !(*this == o); + } +}; + +// RTP header extension, see RFC8285. +struct RTC_EXPORT RtpExtension { + RtpExtension(); + RtpExtension(absl::string_view uri, int id); + RtpExtension(absl::string_view uri, int id, bool encrypt); + ~RtpExtension(); + + std::string ToString() const; + bool operator==(const RtpExtension& rhs) const { + return uri == rhs.uri && id == rhs.id && encrypt == rhs.encrypt; + } + static bool IsSupportedForAudio(absl::string_view uri); + static bool IsSupportedForVideo(absl::string_view uri); + // Return "true" if the given RTP header extension URI may be encrypted. + static bool IsEncryptionSupported(absl::string_view uri); + + // Returns the named header extension if found among all extensions, + // nullptr otherwise. + static const RtpExtension* FindHeaderExtensionByUri( + const std::vector& extensions, + absl::string_view uri); + + // Return a list of RTP header extensions with the non-encrypted extensions + // removed if both the encrypted and non-encrypted extension is present for + // the same URI. + static std::vector FilterDuplicateNonEncrypted( + const std::vector& extensions); + + // Encryption of Header Extensions, see RFC 6904 for details: + // https://tools.ietf.org/html/rfc6904 + static constexpr char kEncryptHeaderExtensionsUri[] = + "urn:ietf:params:rtp-hdrext:encrypt"; + + // Header extension for audio levels, as defined in: + // https://tools.ietf.org/html/rfc6464 + static constexpr char kAudioLevelUri[] = + "urn:ietf:params:rtp-hdrext:ssrc-audio-level"; + + // Header extension for RTP timestamp offset, see RFC 5450 for details: + // http://tools.ietf.org/html/rfc5450 + static constexpr char kTimestampOffsetUri[] = + "urn:ietf:params:rtp-hdrext:toffset"; + + // Header extension for absolute send time, see url for details: + // http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time + static constexpr char kAbsSendTimeUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time"; + + // Header extension for absolute capture time, see url for details: + // http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time + static constexpr char kAbsoluteCaptureTimeUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time"; + + // Header extension for coordination of video orientation, see url for + // details: + // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf + static constexpr char kVideoRotationUri[] = "urn:3gpp:video-orientation"; + + // Header extension for video content type. E.g. default or screenshare. + static constexpr char kVideoContentTypeUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/video-content-type"; + + // Header extension for video timing. + static constexpr char kVideoTimingUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/video-timing"; + + // Experimental codec agnostic frame descriptor. + static constexpr char kGenericFrameDescriptorUri00[] = + "http://www.webrtc.org/experiments/rtp-hdrext/" + "generic-frame-descriptor-00"; + static constexpr char kDependencyDescriptorUri[] = + "https://aomediacodec.github.io/av1-rtp-spec/" + "#dependency-descriptor-rtp-header-extension"; + + // Header extension for transport sequence number, see url for details: + // http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions + static constexpr char kTransportSequenceNumberUri[] = + "http://www.ietf.org/id/" + "draft-holmer-rmcat-transport-wide-cc-extensions-01"; + static constexpr char kTransportSequenceNumberV2Uri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/transport-wide-cc-02"; + + // This extension allows applications to adaptively limit the playout delay + // on frames as per the current needs. For example, a gaming application + // has very different needs on end-to-end delay compared to a video-conference + // application. + static constexpr char kPlayoutDelayUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/playout-delay"; + + // Header extension for color space information. + static constexpr char kColorSpaceUri[] = + "http://www.webrtc.org/experiments/rtp-hdrext/color-space"; + + // Header extension for identifying media section within a transport. + // https://tools.ietf.org/html/draft-ietf-mmusic-sdp-bundle-negotiation-49#section-15 + static constexpr char kMidUri[] = "urn:ietf:params:rtp-hdrext:sdes:mid"; + + // Header extension for RIDs and Repaired RIDs + // https://tools.ietf.org/html/draft-ietf-avtext-rid-09 + // https://tools.ietf.org/html/draft-ietf-mmusic-rid-15 + static constexpr char kRidUri[] = + "urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id"; + static constexpr char kRepairedRidUri[] = + "urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id"; + + // Inclusive min and max IDs for two-byte header extensions and one-byte + // header extensions, per RFC8285 Section 4.2-4.3. + static constexpr int kMinId = 1; + static constexpr int kMaxId = 255; + static constexpr int kMaxValueSize = 255; + static constexpr int kOneByteHeaderExtensionMaxId = 14; + static constexpr int kOneByteHeaderExtensionMaxValueSize = 16; + + std::string uri; + int id = 0; + bool encrypt = false; +}; + +struct RTC_EXPORT RtpFecParameters { + // If unset, a value is chosen by the implementation. + // Works just like RtpEncodingParameters::ssrc. + absl::optional ssrc; + + FecMechanism mechanism = FecMechanism::RED; + + // Constructors for convenience. + RtpFecParameters(); + explicit RtpFecParameters(FecMechanism mechanism); + RtpFecParameters(FecMechanism mechanism, uint32_t ssrc); + RtpFecParameters(const RtpFecParameters&); + ~RtpFecParameters(); + + bool operator==(const RtpFecParameters& o) const { + return ssrc == o.ssrc && mechanism == o.mechanism; + } + bool operator!=(const RtpFecParameters& o) const { return !(*this == o); } +}; + +struct RTC_EXPORT RtpRtxParameters { + // If unset, a value is chosen by the implementation. + // Works just like RtpEncodingParameters::ssrc. + absl::optional ssrc; + + // Constructors for convenience. + RtpRtxParameters(); + explicit RtpRtxParameters(uint32_t ssrc); + RtpRtxParameters(const RtpRtxParameters&); + ~RtpRtxParameters(); + + bool operator==(const RtpRtxParameters& o) const { return ssrc == o.ssrc; } + bool operator!=(const RtpRtxParameters& o) const { return !(*this == o); } +}; + +struct RTC_EXPORT RtpEncodingParameters { + RtpEncodingParameters(); + RtpEncodingParameters(const RtpEncodingParameters&); + ~RtpEncodingParameters(); + + // If unset, a value is chosen by the implementation. + // + // Note that the chosen value is NOT returned by GetParameters, because it + // may change due to an SSRC conflict, in which case the conflict is handled + // internally without any event. Another way of looking at this is that an + // unset SSRC acts as a "wildcard" SSRC. + absl::optional ssrc; + + // The relative bitrate priority of this encoding. Currently this is + // implemented for the entire rtp sender by using the value of the first + // encoding parameter. + // See: https://w3c.github.io/webrtc-priority/#enumdef-rtcprioritytype + // "very-low" = 0.5 + // "low" = 1.0 + // "medium" = 2.0 + // "high" = 4.0 + // TODO(webrtc.bugs.org/8630): Implement this per encoding parameter. + // Currently there is logic for how bitrate is distributed per simulcast layer + // in the VideoBitrateAllocator. This must be updated to incorporate relative + // bitrate priority. + double bitrate_priority = kDefaultBitratePriority; + + // The relative DiffServ Code Point priority for this encoding, allowing + // packets to be marked relatively higher or lower without affecting + // bandwidth allocations. See https://w3c.github.io/webrtc-dscp-exp/ . + // TODO(http://crbug.com/webrtc/8630): Implement this per encoding parameter. + // TODO(http://crbug.com/webrtc/11379): TCP connections should use a single + // DSCP value even if shared by multiple senders; this is not implemented. + Priority network_priority = Priority::kLow; + + // If set, this represents the Transport Independent Application Specific + // maximum bandwidth defined in RFC3890. If unset, there is no maximum + // bitrate. Currently this is implemented for the entire rtp sender by using + // the value of the first encoding parameter. + // + // Just called "maxBitrate" in ORTC spec. + // + // TODO(deadbeef): With ORTC RtpSenders, this currently sets the total + // bandwidth for the entire bandwidth estimator (audio and video). This is + // just always how "b=AS" was handled, but it's not correct and should be + // fixed. + absl::optional max_bitrate_bps; + + // Specifies the minimum bitrate in bps for video. + absl::optional min_bitrate_bps; + + // Specifies the maximum framerate in fps for video. + absl::optional max_framerate; + + // Specifies the number of temporal layers for video (if the feature is + // supported by the codec implementation). + // TODO(asapersson): Different number of temporal layers are not supported + // per simulcast layer. + // Screencast support is experimental. + absl::optional num_temporal_layers; + + // For video, scale the resolution down by this factor. + absl::optional scale_resolution_down_by; + + // For an RtpSender, set to true to cause this encoding to be encoded and + // sent, and false for it not to be encoded and sent. This allows control + // across multiple encodings of a sender for turning simulcast layers on and + // off. + // TODO(webrtc.bugs.org/8807): Updating this parameter will trigger an encoder + // reset, but this isn't necessarily required. + bool active = true; + + // Value to use for RID RTP header extension. + // Called "encodingId" in ORTC. + std::string rid; + + // Allow dynamic frame length changes for audio: + // https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime + bool adaptive_ptime = false; + + bool operator==(const RtpEncodingParameters& o) const { + return ssrc == o.ssrc && bitrate_priority == o.bitrate_priority && + network_priority == o.network_priority && + max_bitrate_bps == o.max_bitrate_bps && + min_bitrate_bps == o.min_bitrate_bps && + max_framerate == o.max_framerate && + num_temporal_layers == o.num_temporal_layers && + scale_resolution_down_by == o.scale_resolution_down_by && + active == o.active && rid == o.rid && + adaptive_ptime == o.adaptive_ptime; + } + bool operator!=(const RtpEncodingParameters& o) const { + return !(*this == o); + } +}; + +struct RTC_EXPORT RtpCodecParameters { + RtpCodecParameters(); + RtpCodecParameters(const RtpCodecParameters&); + ~RtpCodecParameters(); + + // Build MIME "type/subtype" string from |name| and |kind|. + std::string mime_type() const { return MediaTypeToString(kind) + "/" + name; } + + // Used to identify the codec. Equivalent to MIME subtype. + std::string name; + + // The media type of this codec. Equivalent to MIME top-level type. + cricket::MediaType kind = cricket::MEDIA_TYPE_AUDIO; + + // Payload type used to identify this codec in RTP packets. + // This must always be present, and must be unique across all codecs using + // the same transport. + int payload_type = 0; + + // If unset, the implementation default is used. + absl::optional clock_rate; + + // The number of audio channels used. Unset for video codecs. If unset for + // audio, the implementation default is used. + // TODO(deadbeef): The "implementation default" part isn't fully implemented. + // Only defaults to 1, even though some codecs (such as opus) should really + // default to 2. + absl::optional num_channels; + + // The maximum packetization time to be used by an RtpSender. + // If |ptime| is also set, this will be ignored. + // TODO(deadbeef): Not implemented. + absl::optional max_ptime; + + // The packetization time to be used by an RtpSender. + // If unset, will use any time up to max_ptime. + // TODO(deadbeef): Not implemented. + absl::optional ptime; + + // Feedback mechanisms to be used for this codec. + // TODO(deadbeef): Not implemented with PeerConnection senders/receivers. + std::vector rtcp_feedback; + + // Codec-specific parameters that must be signaled to the remote party. + // + // Corresponds to "a=fmtp" parameters in SDP. + // + // Contrary to ORTC, these parameters are named using all lowercase strings. + // This helps make the mapping to SDP simpler, if an application is using SDP. + // Boolean values are represented by the string "1". + std::map parameters; + + bool operator==(const RtpCodecParameters& o) const { + return name == o.name && kind == o.kind && payload_type == o.payload_type && + clock_rate == o.clock_rate && num_channels == o.num_channels && + max_ptime == o.max_ptime && ptime == o.ptime && + rtcp_feedback == o.rtcp_feedback && parameters == o.parameters; + } + bool operator!=(const RtpCodecParameters& o) const { return !(*this == o); } +}; + +// RtpCapabilities is used to represent the static capabilities of an endpoint. +// An application can use these capabilities to construct an RtpParameters. +struct RTC_EXPORT RtpCapabilities { + RtpCapabilities(); + ~RtpCapabilities(); + + // Supported codecs. + std::vector codecs; + + // Supported RTP header extensions. + std::vector header_extensions; + + // Supported Forward Error Correction (FEC) mechanisms. Note that the RED, + // ulpfec and flexfec codecs used by these mechanisms will still appear in + // |codecs|. + std::vector fec; + + bool operator==(const RtpCapabilities& o) const { + return codecs == o.codecs && header_extensions == o.header_extensions && + fec == o.fec; + } + bool operator!=(const RtpCapabilities& o) const { return !(*this == o); } +}; + +struct RtcpParameters final { + RtcpParameters(); + RtcpParameters(const RtcpParameters&); + ~RtcpParameters(); + + // The SSRC to be used in the "SSRC of packet sender" field. If not set, one + // will be chosen by the implementation. + // TODO(deadbeef): Not implemented. + absl::optional ssrc; + + // The Canonical Name (CNAME) used by RTCP (e.g. in SDES messages). + // + // If empty in the construction of the RtpTransport, one will be generated by + // the implementation, and returned in GetRtcpParameters. Multiple + // RtpTransports created by the same OrtcFactory will use the same generated + // CNAME. + // + // If empty when passed into SetParameters, the CNAME simply won't be + // modified. + std::string cname; + + // Send reduced-size RTCP? + bool reduced_size = false; + + // Send RTCP multiplexed on the RTP transport? + // Not used with PeerConnection senders/receivers + bool mux = true; + + bool operator==(const RtcpParameters& o) const { + return ssrc == o.ssrc && cname == o.cname && + reduced_size == o.reduced_size && mux == o.mux; + } + bool operator!=(const RtcpParameters& o) const { return !(*this == o); } +}; + +struct RTC_EXPORT RtpParameters { + RtpParameters(); + RtpParameters(const RtpParameters&); + ~RtpParameters(); + + // Used when calling getParameters/setParameters with a PeerConnection + // RtpSender, to ensure that outdated parameters are not unintentionally + // applied successfully. + std::string transaction_id; + + // Value to use for MID RTP header extension. + // Called "muxId" in ORTC. + // TODO(deadbeef): Not implemented. + std::string mid; + + std::vector codecs; + + std::vector header_extensions; + + std::vector encodings; + + // Only available with a Peerconnection RtpSender. + // In ORTC, our API includes an additional "RtpTransport" + // abstraction on which RTCP parameters are set. + RtcpParameters rtcp; + + // When bandwidth is constrained and the RtpSender needs to choose between + // degrading resolution or degrading framerate, degradationPreference + // indicates which is preferred. Only for video tracks. + absl::optional degradation_preference; + + bool operator==(const RtpParameters& o) const { + return mid == o.mid && codecs == o.codecs && + header_extensions == o.header_extensions && + encodings == o.encodings && rtcp == o.rtcp && + degradation_preference == o.degradation_preference; + } + bool operator!=(const RtpParameters& o) const { return !(*this == o); } +}; + +} // namespace webrtc + +#endif // API_RTP_PARAMETERS_H_ diff --git a/api/rtp_parameters_unittest.cc b/api/rtp_parameters_unittest.cc new file mode 100644 index 0000000..5928cbd --- /dev/null +++ b/api/rtp_parameters_unittest.cc @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtp_parameters.h" + +#include "test/gtest.h" + +namespace webrtc { + +using webrtc::RtpExtension; + +static const char kExtensionUri1[] = "extension-uri1"; +static const char kExtensionUri2[] = "extension-uri2"; + +static const RtpExtension kExtension1(kExtensionUri1, 1); +static const RtpExtension kExtension1Encrypted(kExtensionUri1, 10, true); +static const RtpExtension kExtension2(kExtensionUri2, 2); + +TEST(RtpExtensionTest, FilterDuplicateNonEncrypted) { + std::vector extensions; + std::vector filtered; + + extensions.push_back(kExtension1); + extensions.push_back(kExtension1Encrypted); + filtered = RtpExtension::FilterDuplicateNonEncrypted(extensions); + EXPECT_EQ(1u, filtered.size()); + EXPECT_EQ(std::vector{kExtension1Encrypted}, filtered); + + extensions.clear(); + extensions.push_back(kExtension1Encrypted); + extensions.push_back(kExtension1); + filtered = RtpExtension::FilterDuplicateNonEncrypted(extensions); + EXPECT_EQ(1u, filtered.size()); + EXPECT_EQ(std::vector{kExtension1Encrypted}, filtered); + + extensions.clear(); + extensions.push_back(kExtension1); + extensions.push_back(kExtension2); + filtered = RtpExtension::FilterDuplicateNonEncrypted(extensions); + EXPECT_EQ(2u, filtered.size()); + EXPECT_EQ(extensions, filtered); +} +} // namespace webrtc diff --git a/api/rtp_receiver_interface.cc b/api/rtp_receiver_interface.cc new file mode 100644 index 0000000..bc9aef5 --- /dev/null +++ b/api/rtp_receiver_interface.cc @@ -0,0 +1,44 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtp_receiver_interface.h" + +namespace webrtc { + +std::vector RtpReceiverInterface::stream_ids() const { + return {}; +} + +std::vector> +RtpReceiverInterface::streams() const { + return {}; +} + +std::vector RtpReceiverInterface::GetSources() const { + return {}; +} + +void RtpReceiverInterface::SetFrameDecryptor( + rtc::scoped_refptr frame_decryptor) {} + +rtc::scoped_refptr +RtpReceiverInterface::GetFrameDecryptor() const { + return nullptr; +} + +rtc::scoped_refptr +RtpReceiverInterface::dtls_transport() const { + return nullptr; +} + +void RtpReceiverInterface::SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) {} + +} // namespace webrtc diff --git a/api/rtp_receiver_interface.h b/api/rtp_receiver_interface.h new file mode 100644 index 0000000..a15864e --- /dev/null +++ b/api/rtp_receiver_interface.h @@ -0,0 +1,149 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces for RtpReceivers +// http://w3c.github.io/webrtc-pc/#rtcrtpreceiver-interface + +#ifndef API_RTP_RECEIVER_INTERFACE_H_ +#define API_RTP_RECEIVER_INTERFACE_H_ + +#include +#include + +#include "api/crypto/frame_decryptor_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/proxy.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "api/transport/rtp/rtp_source.h" +#include "rtc_base/deprecation.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class RtpReceiverObserverInterface { + public: + // Note: Currently if there are multiple RtpReceivers of the same media type, + // they will all call OnFirstPacketReceived at once. + // + // In the future, it's likely that an RtpReceiver will only call + // OnFirstPacketReceived when a packet is received specifically for its + // SSRC/mid. + virtual void OnFirstPacketReceived(cricket::MediaType media_type) = 0; + + protected: + virtual ~RtpReceiverObserverInterface() {} +}; + +class RTC_EXPORT RtpReceiverInterface : public rtc::RefCountInterface { + public: + virtual rtc::scoped_refptr track() const = 0; + + // The dtlsTransport attribute exposes the DTLS transport on which the + // media is received. It may be null. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-transport + // TODO(https://bugs.webrtc.org/907849) remove default implementation + virtual rtc::scoped_refptr dtls_transport() const; + + // The list of streams that |track| is associated with. This is the same as + // the [[AssociatedRemoteMediaStreams]] internal slot in the spec. + // https://w3c.github.io/webrtc-pc/#dfn-associatedremotemediastreams + // TODO(hbos): Make pure virtual as soon as Chromium's mock implements this. + // TODO(https://crbug.com/webrtc/9480): Remove streams() in favor of + // stream_ids() as soon as downstream projects are no longer dependent on + // stream objects. + virtual std::vector stream_ids() const; + virtual std::vector> streams() const; + + // Audio or video receiver? + virtual cricket::MediaType media_type() const = 0; + + // Not to be confused with "mid", this is a field we can temporarily use + // to uniquely identify a receiver until we implement Unified Plan SDP. + virtual std::string id() const = 0; + + // The WebRTC specification only defines RTCRtpParameters in terms of senders, + // but this API also applies them to receivers, similar to ORTC: + // http://ortc.org/wp-content/uploads/2016/03/ortc.html#rtcrtpparameters*. + virtual RtpParameters GetParameters() const = 0; + // TODO(dinosaurav): Delete SetParameters entirely after rolling to Chromium. + // Currently, doesn't support changing any parameters. + virtual bool SetParameters(const RtpParameters& parameters) { return false; } + + // Does not take ownership of observer. + // Must call SetObserver(nullptr) before the observer is destroyed. + virtual void SetObserver(RtpReceiverObserverInterface* observer) = 0; + + // Sets the jitter buffer minimum delay until media playout. Actual observed + // delay may differ depending on the congestion control. |delay_seconds| is a + // positive value including 0.0 measured in seconds. |nullopt| means default + // value must be used. + virtual void SetJitterBufferMinimumDelay( + absl::optional delay_seconds) = 0; + + // TODO(zhihuang): Remove the default implementation once the subclasses + // implement this. Currently, the only relevant subclass is the + // content::FakeRtpReceiver in Chromium. + virtual std::vector GetSources() const; + + // Sets a user defined frame decryptor that will decrypt the entire frame + // before it is sent across the network. This will decrypt the entire frame + // using the user provided decryption mechanism regardless of whether SRTP is + // enabled or not. + virtual void SetFrameDecryptor( + rtc::scoped_refptr frame_decryptor); + + // Returns a pointer to the frame decryptor set previously by the + // user. This can be used to update the state of the object. + virtual rtc::scoped_refptr GetFrameDecryptor() const; + + // Sets a frame transformer between the depacketizer and the decoder to enable + // client code to transform received frames according to their own processing + // logic. + virtual void SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer); + + protected: + ~RtpReceiverInterface() override = default; +}; + +// Define proxy for RtpReceiverInterface. +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. +BEGIN_SIGNALING_PROXY_MAP(RtpReceiver) +PROXY_SIGNALING_THREAD_DESTRUCTOR() +PROXY_CONSTMETHOD0(rtc::scoped_refptr, track) +PROXY_CONSTMETHOD0(rtc::scoped_refptr, dtls_transport) +PROXY_CONSTMETHOD0(std::vector, stream_ids) +PROXY_CONSTMETHOD0(std::vector>, + streams) +PROXY_CONSTMETHOD0(cricket::MediaType, media_type) +PROXY_CONSTMETHOD0(std::string, id) +PROXY_CONSTMETHOD0(RtpParameters, GetParameters) +PROXY_METHOD1(void, SetObserver, RtpReceiverObserverInterface*) +PROXY_METHOD1(void, SetJitterBufferMinimumDelay, absl::optional) +PROXY_CONSTMETHOD0(std::vector, GetSources) +PROXY_METHOD1(void, + SetFrameDecryptor, + rtc::scoped_refptr) +PROXY_CONSTMETHOD0(rtc::scoped_refptr, + GetFrameDecryptor) +PROXY_METHOD1(void, + SetDepacketizerToDecoderFrameTransformer, + rtc::scoped_refptr) +END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_RTP_RECEIVER_INTERFACE_H_ diff --git a/api/rtp_sender_interface.cc b/api/rtp_sender_interface.cc new file mode 100644 index 0000000..57a5a10 --- /dev/null +++ b/api/rtp_sender_interface.cc @@ -0,0 +1,36 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtp_sender_interface.h" + +namespace webrtc { + +void RtpSenderInterface::SetFrameEncryptor( + rtc::scoped_refptr frame_encryptor) {} + +rtc::scoped_refptr +RtpSenderInterface::GetFrameEncryptor() const { + return nullptr; +} + +std::vector RtpSenderInterface::init_send_encodings() + const { + return {}; +} + +rtc::scoped_refptr RtpSenderInterface::dtls_transport() + const { + return nullptr; +} + +void RtpSenderInterface::SetEncoderToPacketizerFrameTransformer( + rtc::scoped_refptr frame_transformer) {} + +} // namespace webrtc diff --git a/api/rtp_sender_interface.h b/api/rtp_sender_interface.h new file mode 100644 index 0000000..bdbd6dc --- /dev/null +++ b/api/rtp_sender_interface.h @@ -0,0 +1,133 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains interfaces for RtpSenders +// http://w3c.github.io/webrtc-pc/#rtcrtpsender-interface + +#ifndef API_RTP_SENDER_INTERFACE_H_ +#define API_RTP_SENDER_INTERFACE_H_ + +#include +#include + +#include "api/crypto/frame_encryptor_interface.h" +#include "api/dtls_transport_interface.h" +#include "api/dtmf_sender_interface.h" +#include "api/frame_transformer_interface.h" +#include "api/media_stream_interface.h" +#include "api/media_types.h" +#include "api/proxy.h" +#include "api/rtc_error.h" +#include "api/rtp_parameters.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class RTC_EXPORT RtpSenderInterface : public rtc::RefCountInterface { + public: + // Returns true if successful in setting the track. + // Fails if an audio track is set on a video RtpSender, or vice-versa. + virtual bool SetTrack(MediaStreamTrackInterface* track) = 0; + virtual rtc::scoped_refptr track() const = 0; + + // The dtlsTransport attribute exposes the DTLS transport on which the + // media is sent. It may be null. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtpsender-transport + // TODO(https://bugs.webrtc.org/907849) remove default implementation + virtual rtc::scoped_refptr dtls_transport() const; + + // Returns primary SSRC used by this sender for sending media. + // Returns 0 if not yet determined. + // TODO(deadbeef): Change to absl::optional. + // TODO(deadbeef): Remove? With GetParameters this should be redundant. + virtual uint32_t ssrc() const = 0; + + // Audio or video sender? + virtual cricket::MediaType media_type() const = 0; + + // Not to be confused with "mid", this is a field we can temporarily use + // to uniquely identify a receiver until we implement Unified Plan SDP. + virtual std::string id() const = 0; + + // Returns a list of media stream ids associated with this sender's track. + // These are signalled in the SDP so that the remote side can associate + // tracks. + virtual std::vector stream_ids() const = 0; + + // Sets the IDs of the media streams associated with this sender's track. + // These are signalled in the SDP so that the remote side can associate + // tracks. + virtual void SetStreams(const std::vector& stream_ids) {} + + // Returns the list of encoding parameters that will be applied when the SDP + // local description is set. These initial encoding parameters can be set by + // PeerConnection::AddTransceiver, and later updated with Get/SetParameters. + // TODO(orphis): Make it pure virtual once Chrome has updated + virtual std::vector init_send_encodings() const; + + virtual RtpParameters GetParameters() const = 0; + // Note that only a subset of the parameters can currently be changed. See + // rtpparameters.h + // The encodings are in increasing quality order for simulcast. + virtual RTCError SetParameters(const RtpParameters& parameters) = 0; + + // Returns null for a video sender. + virtual rtc::scoped_refptr GetDtmfSender() const = 0; + + // Sets a user defined frame encryptor that will encrypt the entire frame + // before it is sent across the network. This will encrypt the entire frame + // using the user provided encryption mechanism regardless of whether SRTP is + // enabled or not. + virtual void SetFrameEncryptor( + rtc::scoped_refptr frame_encryptor); + + // Returns a pointer to the frame encryptor set previously by the + // user. This can be used to update the state of the object. + virtual rtc::scoped_refptr GetFrameEncryptor() const; + + virtual void SetEncoderToPacketizerFrameTransformer( + rtc::scoped_refptr frame_transformer); + + protected: + ~RtpSenderInterface() override = default; +}; + +// Define proxy for RtpSenderInterface. +// TODO(deadbeef): Move this to .cc file and out of api/. What threads methods +// are called on is an implementation detail. +BEGIN_SIGNALING_PROXY_MAP(RtpSender) +PROXY_SIGNALING_THREAD_DESTRUCTOR() +PROXY_METHOD1(bool, SetTrack, MediaStreamTrackInterface*) +PROXY_CONSTMETHOD0(rtc::scoped_refptr, track) +PROXY_CONSTMETHOD0(rtc::scoped_refptr, dtls_transport) +PROXY_CONSTMETHOD0(uint32_t, ssrc) +PROXY_CONSTMETHOD0(cricket::MediaType, media_type) +PROXY_CONSTMETHOD0(std::string, id) +PROXY_CONSTMETHOD0(std::vector, stream_ids) +PROXY_CONSTMETHOD0(std::vector, init_send_encodings) +PROXY_CONSTMETHOD0(RtpParameters, GetParameters) +PROXY_METHOD1(RTCError, SetParameters, const RtpParameters&) +PROXY_CONSTMETHOD0(rtc::scoped_refptr, GetDtmfSender) +PROXY_METHOD1(void, + SetFrameEncryptor, + rtc::scoped_refptr) +PROXY_CONSTMETHOD0(rtc::scoped_refptr, + GetFrameEncryptor) +PROXY_METHOD1(void, SetStreams, const std::vector&) +PROXY_METHOD1(void, + SetEncoderToPacketizerFrameTransformer, + rtc::scoped_refptr) +END_PROXY_MAP() + +} // namespace webrtc + +#endif // API_RTP_SENDER_INTERFACE_H_ diff --git a/api/rtp_transceiver_direction.h b/api/rtp_transceiver_direction.h new file mode 100644 index 0000000..3c7d4cb --- /dev/null +++ b/api/rtp_transceiver_direction.h @@ -0,0 +1,27 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTP_TRANSCEIVER_DIRECTION_H_ +#define API_RTP_TRANSCEIVER_DIRECTION_H_ + +namespace webrtc { + +// https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverdirection +enum class RtpTransceiverDirection { + kSendRecv, + kSendOnly, + kRecvOnly, + kInactive, + kStopped, +}; + +} // namespace webrtc + +#endif // API_RTP_TRANSCEIVER_DIRECTION_H_ diff --git a/api/rtp_transceiver_interface.cc b/api/rtp_transceiver_interface.cc new file mode 100644 index 0000000..1dc0fcc --- /dev/null +++ b/api/rtp_transceiver_interface.cc @@ -0,0 +1,80 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/rtp_transceiver_interface.h" + +#include "rtc_base/checks.h" + +namespace webrtc { + +RtpTransceiverInit::RtpTransceiverInit() = default; + +RtpTransceiverInit::RtpTransceiverInit(const RtpTransceiverInit& rhs) = default; + +RtpTransceiverInit::~RtpTransceiverInit() = default; + +absl::optional +RtpTransceiverInterface::fired_direction() const { + return absl::nullopt; +} + +bool RtpTransceiverInterface::stopping() const { + return false; +} + +void RtpTransceiverInterface::Stop() { + StopInternal(); +} + +RTCError RtpTransceiverInterface::StopStandard() { + RTC_NOTREACHED() << "DEBUG: RtpTransceiverInterface::StopStandard called"; + return RTCError::OK(); +} + +void RtpTransceiverInterface::StopInternal() { + RTC_NOTREACHED() << "DEBUG: RtpTransceiverInterface::StopInternal called"; +} + +RTCError RtpTransceiverInterface::SetCodecPreferences( + rtc::ArrayView) { + RTC_NOTREACHED() << "Not implemented"; + return {}; +} + +std::vector RtpTransceiverInterface::codec_preferences() + const { + return {}; +} + +std::vector +RtpTransceiverInterface::HeaderExtensionsToOffer() const { + return {}; +} + +webrtc::RTCError RtpTransceiverInterface::SetOfferedRtpHeaderExtensions( + rtc::ArrayView + header_extensions_to_offer) { + return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION); +} + +// TODO(bugs.webrtc.org/11839) Remove default implementations when clients +// are updated. +void RtpTransceiverInterface::SetDirection( + RtpTransceiverDirection new_direction) { + SetDirectionWithError(new_direction); +} + +RTCError RtpTransceiverInterface::SetDirectionWithError( + RtpTransceiverDirection new_direction) { + RTC_NOTREACHED() << "Default implementation called"; + return RTCError::OK(); +} + +} // namespace webrtc diff --git a/api/rtp_transceiver_interface.h b/api/rtp_transceiver_interface.h new file mode 100644 index 0000000..cdda34b --- /dev/null +++ b/api/rtp_transceiver_interface.h @@ -0,0 +1,171 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_RTP_TRANSCEIVER_INTERFACE_H_ +#define API_RTP_TRANSCEIVER_INTERFACE_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/media_types.h" +#include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "api/rtp_transceiver_direction.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Structure for initializing an RtpTransceiver in a call to +// PeerConnectionInterface::AddTransceiver. +// https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverinit +struct RTC_EXPORT RtpTransceiverInit final { + RtpTransceiverInit(); + RtpTransceiverInit(const RtpTransceiverInit&); + ~RtpTransceiverInit(); + // Direction of the RtpTransceiver. See RtpTransceiverInterface::direction(). + RtpTransceiverDirection direction = RtpTransceiverDirection::kSendRecv; + + // The added RtpTransceiver will be added to these streams. + std::vector stream_ids; + + // TODO(bugs.webrtc.org/7600): Not implemented. + std::vector send_encodings; +}; + +// The RtpTransceiverInterface maps to the RTCRtpTransceiver defined by the +// WebRTC specification. A transceiver represents a combination of an RtpSender +// and an RtpReceiver than share a common mid. As defined in JSEP, an +// RtpTransceiver is said to be associated with a media description if its mid +// property is non-null; otherwise, it is said to be disassociated. +// JSEP: https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24 +// +// Note that RtpTransceivers are only supported when using PeerConnection with +// Unified Plan SDP. +// +// This class is thread-safe. +// +// WebRTC specification for RTCRtpTransceiver, the JavaScript analog: +// https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver +class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { + public: + // Media type of the transceiver. Any sender(s)/receiver(s) will have this + // type as well. + virtual cricket::MediaType media_type() const = 0; + + // The mid attribute is the mid negotiated and present in the local and + // remote descriptions. Before negotiation is complete, the mid value may be + // null. After rollbacks, the value may change from a non-null value to null. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-mid + virtual absl::optional mid() const = 0; + + // The sender attribute exposes the RtpSender corresponding to the RTP media + // that may be sent with the transceiver's mid. The sender is always present, + // regardless of the direction of media. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-sender + virtual rtc::scoped_refptr sender() const = 0; + + // The receiver attribute exposes the RtpReceiver corresponding to the RTP + // media that may be received with the transceiver's mid. The receiver is + // always present, regardless of the direction of media. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-receiver + virtual rtc::scoped_refptr receiver() const = 0; + + // The stopped attribute indicates that the sender of this transceiver will no + // longer send, and that the receiver will no longer receive. It is true if + // either stop has been called or if setting the local or remote description + // has caused the RtpTransceiver to be stopped. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stopped + virtual bool stopped() const = 0; + + // The stopping attribute indicates that the user has indicated that the + // sender of this transceiver will stop sending, and that the receiver will + // no longer receive. It is always true if stopped() is true. + // If stopping() is true and stopped() is false, it means that the + // transceiver's stop() method has been called, but the negotiation with + // the other end for shutting down the transceiver is not yet done. + // https://w3c.github.io/webrtc-pc/#dfn-stopping-0 + // TODO(hta): Remove default implementation. + virtual bool stopping() const; + + // The direction attribute indicates the preferred direction of this + // transceiver, which will be used in calls to CreateOffer and CreateAnswer. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction + virtual RtpTransceiverDirection direction() const = 0; + + // Sets the preferred direction of this transceiver. An update of + // directionality does not take effect immediately. Instead, future calls to + // CreateOffer and CreateAnswer mark the corresponding media descriptions as + // sendrecv, sendonly, recvonly, or inactive. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction + // TODO(hta): Deprecate SetDirection without error and rename + // SetDirectionWithError to SetDirection, remove default implementations. + virtual void SetDirection(RtpTransceiverDirection new_direction); + virtual RTCError SetDirectionWithError(RtpTransceiverDirection new_direction); + + // The current_direction attribute indicates the current direction negotiated + // for this transceiver. If this transceiver has never been represented in an + // offer/answer exchange, or if the transceiver is stopped, the value is null. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-currentdirection + virtual absl::optional current_direction() const = 0; + + // An internal slot designating for which direction the relevant + // PeerConnection events have been fired. This is to ensure that events like + // OnAddTrack only get fired once even if the same session description is + // applied again. + // Exposed in the public interface for use by Chromium. + virtual absl::optional fired_direction() const; + + // Initiates a stop of the transceiver. + // The stop is complete when stopped() returns true. + // A stopped transceiver can be reused for a different track. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-stop + // TODO(hta): Rename to Stop() when users of the non-standard Stop() are + // updated. + virtual RTCError StopStandard(); + + // Stops a transceiver immediately, without waiting for signalling. + // This is an internal function, and is exposed for historical reasons. + // https://w3c.github.io/webrtc-pc/#dfn-stop-the-rtcrtptransceiver + virtual void StopInternal(); + RTC_DEPRECATED virtual void Stop(); + + // The SetCodecPreferences method overrides the default codec preferences used + // by WebRTC for this transceiver. + // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-setcodecpreferences + virtual RTCError SetCodecPreferences( + rtc::ArrayView codecs); + virtual std::vector codec_preferences() const; + + // Readonly attribute which contains the set of header extensions that was set + // with SetOfferedRtpHeaderExtensions, or a default set if it has not been + // called. + // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface + virtual std::vector HeaderExtensionsToOffer() + const; + + // The SetOfferedRtpHeaderExtensions method modifies the next SDP negotiation + // so that it negotiates use of header extensions which are not kStopped. + // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface + virtual webrtc::RTCError SetOfferedRtpHeaderExtensions( + rtc::ArrayView + header_extensions_to_offer); + + protected: + ~RtpTransceiverInterface() override = default; +}; + +} // namespace webrtc + +#endif // API_RTP_TRANSCEIVER_INTERFACE_H_ diff --git a/api/scoped_refptr.h b/api/scoped_refptr.h new file mode 100644 index 0000000..fa4e83d --- /dev/null +++ b/api/scoped_refptr.h @@ -0,0 +1,164 @@ +/* + * Copyright 2011 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Originally these classes are from Chromium. +// http://src.chromium.org/viewvc/chrome/trunk/src/base/memory/ref_counted.h?view=markup + +// +// A smart pointer class for reference counted objects. Use this class instead +// of calling AddRef and Release manually on a reference counted object to +// avoid common memory leaks caused by forgetting to Release an object +// reference. Sample usage: +// +// class MyFoo : public RefCounted { +// ... +// }; +// +// void some_function() { +// scoped_refptr foo = new MyFoo(); +// foo->Method(param); +// // |foo| is released when this function returns +// } +// +// void some_other_function() { +// scoped_refptr foo = new MyFoo(); +// ... +// foo = nullptr; // explicitly releases |foo| +// ... +// if (foo) +// foo->Method(param); +// } +// +// The above examples show how scoped_refptr acts like a pointer to T. +// Given two scoped_refptr classes, it is also possible to exchange +// references between the two objects, like so: +// +// { +// scoped_refptr a = new MyFoo(); +// scoped_refptr b; +// +// b.swap(a); +// // now, |b| references the MyFoo object, and |a| references null. +// } +// +// To make both |a| and |b| in the above example reference the same MyFoo +// object, simply use the assignment operator: +// +// { +// scoped_refptr a = new MyFoo(); +// scoped_refptr b; +// +// b = a; +// // now, |a| and |b| each own a reference to the same MyFoo object. +// } +// + +#ifndef API_SCOPED_REFPTR_H_ +#define API_SCOPED_REFPTR_H_ + +#include +#include + +namespace rtc { + +template +class scoped_refptr { + public: + typedef T element_type; + + scoped_refptr() : ptr_(nullptr) {} + + scoped_refptr(T* p) : ptr_(p) { // NOLINT(runtime/explicit) + if (ptr_) + ptr_->AddRef(); + } + + scoped_refptr(const scoped_refptr& r) : ptr_(r.ptr_) { + if (ptr_) + ptr_->AddRef(); + } + + template + scoped_refptr(const scoped_refptr& r) : ptr_(r.get()) { + if (ptr_) + ptr_->AddRef(); + } + + // Move constructors. + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + + template + scoped_refptr(scoped_refptr&& r) noexcept : ptr_(r.release()) {} + + ~scoped_refptr() { + if (ptr_) + ptr_->Release(); + } + + T* get() const { return ptr_; } + operator T*() const { return ptr_; } + T* operator->() const { return ptr_; } + + // Returns the (possibly null) raw pointer, and makes the scoped_refptr hold a + // null pointer, all without touching the reference count of the underlying + // pointed-to object. The object is still reference counted, and the caller of + // release() is now the proud owner of one reference, so it is responsible for + // calling Release() once on the object when no longer using it. + T* release() { + T* retVal = ptr_; + ptr_ = nullptr; + return retVal; + } + + scoped_refptr& operator=(T* p) { + // AddRef first so that self assignment should work + if (p) + p->AddRef(); + if (ptr_) + ptr_->Release(); + ptr_ = p; + return *this; + } + + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.ptr_; + } + + template + scoped_refptr& operator=(const scoped_refptr& r) { + return *this = r.get(); + } + + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + + template + scoped_refptr& operator=(scoped_refptr&& r) noexcept { + scoped_refptr(std::move(r)).swap(*this); + return *this; + } + + void swap(T** pp) noexcept { + T* p = ptr_; + ptr_ = *pp; + *pp = p; + } + + void swap(scoped_refptr& r) noexcept { swap(&r.ptr_); } + + protected: + T* ptr_; +}; + +} // namespace rtc + +#endif // API_SCOPED_REFPTR_H_ diff --git a/api/scoped_refptr_unittest.cc b/api/scoped_refptr_unittest.cc new file mode 100644 index 0000000..75a202b --- /dev/null +++ b/api/scoped_refptr_unittest.cc @@ -0,0 +1,111 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/scoped_refptr.h" + +#include +#include + +#include "test/gtest.h" + +namespace rtc { +namespace { + +struct FunctionsCalled { + int addref = 0; + int release = 0; +}; + +class ScopedRefCounted { + public: + explicit ScopedRefCounted(FunctionsCalled* called) : called_(*called) {} + ScopedRefCounted(const ScopedRefCounted&) = delete; + ScopedRefCounted& operator=(const ScopedRefCounted&) = delete; + + void AddRef() { + ++called_.addref; + ++ref_count_; + } + void Release() { + ++called_.release; + if (0 == --ref_count_) + delete this; + } + + private: + ~ScopedRefCounted() = default; + + FunctionsCalled& called_; + int ref_count_ = 0; +}; + +TEST(ScopedRefptrTest, IsCopyConstructable) { + FunctionsCalled called; + scoped_refptr ptr = new ScopedRefCounted(&called); + scoped_refptr another_ptr = ptr; + + EXPECT_TRUE(ptr); + EXPECT_TRUE(another_ptr); + EXPECT_EQ(called.addref, 2); +} + +TEST(ScopedRefptrTest, IsCopyAssignable) { + FunctionsCalled called; + scoped_refptr another_ptr; + scoped_refptr ptr = new ScopedRefCounted(&called); + another_ptr = ptr; + + EXPECT_TRUE(ptr); + EXPECT_TRUE(another_ptr); + EXPECT_EQ(called.addref, 2); +} + +TEST(ScopedRefptrTest, IsMoveConstructableWithoutExtraAddRefRelease) { + FunctionsCalled called; + scoped_refptr ptr = new ScopedRefCounted(&called); + scoped_refptr another_ptr = std::move(ptr); + + EXPECT_FALSE(ptr); + EXPECT_TRUE(another_ptr); + EXPECT_EQ(called.addref, 1); + EXPECT_EQ(called.release, 0); +} + +TEST(ScopedRefptrTest, IsMoveAssignableWithoutExtraAddRefRelease) { + FunctionsCalled called; + scoped_refptr another_ptr; + scoped_refptr ptr = new ScopedRefCounted(&called); + another_ptr = std::move(ptr); + + EXPECT_FALSE(ptr); + EXPECT_TRUE(another_ptr); + EXPECT_EQ(called.addref, 1); + EXPECT_EQ(called.release, 0); +} + +TEST(ScopedRefptrTest, MovableDuringVectorReallocation) { + static_assert( + std::is_nothrow_move_constructible>(), + ""); + // Test below describes a scenario where it is helpful for move constructor + // to be noexcept. + FunctionsCalled called; + std::vector> ptrs; + ptrs.reserve(1); + // Insert more elements than reserved to provoke reallocation. + ptrs.push_back(new ScopedRefCounted(&called)); + ptrs.push_back(new ScopedRefCounted(&called)); + + EXPECT_EQ(called.addref, 2); + EXPECT_EQ(called.release, 0); +} + +} // namespace +} // namespace rtc diff --git a/api/sctp_transport_interface.cc b/api/sctp_transport_interface.cc new file mode 100644 index 0000000..8a0a866 --- /dev/null +++ b/api/sctp_transport_interface.cc @@ -0,0 +1,32 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/sctp_transport_interface.h" + +#include + +namespace webrtc { + +SctpTransportInformation::SctpTransportInformation(SctpTransportState state) + : state_(state) {} + +SctpTransportInformation::SctpTransportInformation( + SctpTransportState state, + rtc::scoped_refptr dtls_transport, + absl::optional max_message_size, + absl::optional max_channels) + : state_(state), + dtls_transport_(std::move(dtls_transport)), + max_message_size_(max_message_size), + max_channels_(max_channels) {} + +SctpTransportInformation::~SctpTransportInformation() {} + +} // namespace webrtc diff --git a/api/sctp_transport_interface.h b/api/sctp_transport_interface.h new file mode 100644 index 0000000..6af0bfc --- /dev/null +++ b/api/sctp_transport_interface.h @@ -0,0 +1,90 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_SCTP_TRANSPORT_INTERFACE_H_ +#define API_SCTP_TRANSPORT_INTERFACE_H_ + +#include "absl/types/optional.h" +#include "api/dtls_transport_interface.h" +#include "api/rtc_error.h" +#include "api/scoped_refptr.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// States of a SCTP transport, corresponding to the JS API specification. +// http://w3c.github.io/webrtc-pc/#dom-rtcsctptransportstate +enum class SctpTransportState { + kNew, // Has not started negotiating yet. Non-standard state. + kConnecting, // In the process of negotiating an association. + kConnected, // Completed negotiation of an association. + kClosed, // Closed by local or remote party. + kNumValues +}; + +// This object gives snapshot information about the changeable state of a +// SctpTransport. +// It reflects the readonly attributes of the object in the specification. +// http://w3c.github.io/webrtc-pc/#rtcsctptransport-interface +class RTC_EXPORT SctpTransportInformation { + public: + explicit SctpTransportInformation(SctpTransportState state); + SctpTransportInformation( + SctpTransportState state, + rtc::scoped_refptr dtls_transport, + absl::optional max_message_size, + absl::optional max_channels); + ~SctpTransportInformation(); + // The DTLS transport that supports this SCTP transport. + rtc::scoped_refptr dtls_transport() const { + return dtls_transport_; + } + SctpTransportState state() const { return state_; } + absl::optional MaxMessageSize() const { return max_message_size_; } + absl::optional MaxChannels() const { return max_channels_; } + + private: + SctpTransportState state_; + rtc::scoped_refptr dtls_transport_; + absl::optional max_message_size_; + absl::optional max_channels_; +}; + +class SctpTransportObserverInterface { + public: + // This callback carries information about the state of the transport. + // The argument is a pass-by-value snapshot of the state. + // The callback will be called on the network thread. + virtual void OnStateChange(SctpTransportInformation info) = 0; + + protected: + virtual ~SctpTransportObserverInterface() = default; +}; + +// A SCTP transport, as represented to the outside world. +// This object is created on the network thread, and can only be +// accessed on that thread, except for functions explicitly marked otherwise. +// References can be held by other threads, and destruction can therefore +// be initiated by other threads. +class SctpTransportInterface : public rtc::RefCountInterface { + public: + // This function can be called from other threads. + virtual rtc::scoped_refptr dtls_transport() const = 0; + // Returns information on the state of the SctpTransport. + // This function can be called from other threads. + virtual SctpTransportInformation Information() const = 0; + // Observer management. + virtual void RegisterObserver(SctpTransportObserverInterface* observer) = 0; + virtual void UnregisterObserver() = 0; +}; + +} // namespace webrtc + +#endif // API_SCTP_TRANSPORT_INTERFACE_H_ diff --git a/api/set_local_description_observer_interface.h b/api/set_local_description_observer_interface.h new file mode 100644 index 0000000..90d000c --- /dev/null +++ b/api/set_local_description_observer_interface.h @@ -0,0 +1,30 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_SET_LOCAL_DESCRIPTION_OBSERVER_INTERFACE_H_ +#define API_SET_LOCAL_DESCRIPTION_OBSERVER_INTERFACE_H_ + +#include "api/rtc_error.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// OnSetLocalDescriptionComplete() invokes as soon as +// PeerConnectionInterface::SetLocalDescription() operation completes, allowing +// the observer to examine the effects of the operation without delay. +class SetLocalDescriptionObserverInterface : public rtc::RefCountInterface { + public: + // On success, |error.ok()| is true. + virtual void OnSetLocalDescriptionComplete(RTCError error) = 0; +}; + +} // namespace webrtc + +#endif // API_SET_LOCAL_DESCRIPTION_OBSERVER_INTERFACE_H_ diff --git a/api/set_remote_description_observer_interface.h b/api/set_remote_description_observer_interface.h new file mode 100644 index 0000000..1782555 --- /dev/null +++ b/api/set_remote_description_observer_interface.h @@ -0,0 +1,31 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_SET_REMOTE_DESCRIPTION_OBSERVER_INTERFACE_H_ +#define API_SET_REMOTE_DESCRIPTION_OBSERVER_INTERFACE_H_ + +#include "api/rtc_error.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// An observer for PeerConnectionInterface::SetRemoteDescription(). The +// callback is invoked such that the state of the peer connection can be +// examined to accurately reflect the effects of the SetRemoteDescription +// operation. +class SetRemoteDescriptionObserverInterface : public rtc::RefCountInterface { + public: + // On success, |error.ok()| is true. + virtual void OnSetRemoteDescriptionComplete(RTCError error) = 0; +}; + +} // namespace webrtc + +#endif // API_SET_REMOTE_DESCRIPTION_OBSERVER_INTERFACE_H_ diff --git a/api/stats/OWNERS b/api/stats/OWNERS new file mode 100644 index 0000000..7e98070 --- /dev/null +++ b/api/stats/OWNERS @@ -0,0 +1,2 @@ +hbos@webrtc.org +hta@webrtc.org diff --git a/api/stats/rtc_stats.h b/api/stats/rtc_stats.h new file mode 100644 index 0000000..5de5b7f --- /dev/null +++ b/api/stats/rtc_stats.h @@ -0,0 +1,461 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_STATS_RTC_STATS_H_ +#define API_STATS_RTC_STATS_H_ + +#include +#include + +#include +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/system/rtc_export_template.h" + +namespace webrtc { + +class RTCStatsMemberInterface; + +// Abstract base class for RTCStats-derived dictionaries, see +// https://w3c.github.io/webrtc-stats/. +// +// All derived classes must have the following static variable defined: +// static const char kType[]; +// It is used as a unique class identifier and a string representation of the +// class type, see https://w3c.github.io/webrtc-stats/#rtcstatstype-str*. +// Use the |WEBRTC_RTCSTATS_IMPL| macro when implementing subclasses, see macro +// for details. +// +// Derived classes list their dictionary members, RTCStatsMember, as public +// fields, allowing the following: +// +// RTCFooStats foo("fooId", GetCurrentTime()); +// foo.bar = 42; +// foo.baz = std::vector(); +// foo.baz->push_back("hello world"); +// uint32_t x = *foo.bar; +// +// Pointers to all the members are available with |Members|, allowing iteration: +// +// for (const RTCStatsMemberInterface* member : foo.Members()) { +// printf("%s = %s\n", member->name(), member->ValueToString().c_str()); +// } +class RTC_EXPORT RTCStats { + public: + RTCStats(const std::string& id, int64_t timestamp_us) + : id_(id), timestamp_us_(timestamp_us) {} + RTCStats(std::string&& id, int64_t timestamp_us) + : id_(std::move(id)), timestamp_us_(timestamp_us) {} + virtual ~RTCStats() {} + + virtual std::unique_ptr copy() const = 0; + + const std::string& id() const { return id_; } + // Time relative to the UNIX epoch (Jan 1, 1970, UTC), in microseconds. + int64_t timestamp_us() const { return timestamp_us_; } + // Returns the static member variable |kType| of the implementing class. + virtual const char* type() const = 0; + // Returns a vector of pointers to all the |RTCStatsMemberInterface| members + // of this class. This allows for iteration of members. For a given class, + // |Members| always returns the same members in the same order. + std::vector Members() const; + // Checks if the two stats objects are of the same type and have the same + // member values. Timestamps are not compared. These operators are exposed for + // testing. + bool operator==(const RTCStats& other) const; + bool operator!=(const RTCStats& other) const; + + // Creates a JSON readable string representation of the stats + // object, listing all of its members (names and values). + std::string ToJson() const; + + // Downcasts the stats object to an |RTCStats| subclass |T|. DCHECKs that the + // object is of type |T|. + template + const T& cast_to() const { + RTC_DCHECK_EQ(type(), T::kType); + return static_cast(*this); + } + + protected: + // Gets a vector of all members of this |RTCStats| object, including members + // derived from parent classes. |additional_capacity| is how many more members + // shall be reserved in the vector (so that subclasses can allocate a vector + // with room for both parent and child members without it having to resize). + virtual std::vector + MembersOfThisObjectAndAncestors(size_t additional_capacity) const; + + std::string const id_; + int64_t timestamp_us_; +}; + +// All |RTCStats| classes should use these macros. +// |WEBRTC_RTCSTATS_DECL| is placed in a public section of the class definition. +// |WEBRTC_RTCSTATS_IMPL| is placed outside the class definition (in a .cc). +// +// These macros declare (in _DECL) and define (in _IMPL) the static |kType| and +// overrides methods as required by subclasses of |RTCStats|: |copy|, |type| and +// |MembersOfThisObjectAndAncestors|. The |...| argument is a list of addresses +// to each member defined in the implementing class. The list must have at least +// one member. +// +// (Since class names need to be known to implement these methods this cannot be +// part of the base |RTCStats|. While these methods could be implemented using +// templates, that would only work for immediate subclasses. Subclasses of +// subclasses also have to override these methods, resulting in boilerplate +// code. Using a macro avoids this and works for any |RTCStats| class, including +// grandchildren.) +// +// Sample usage: +// +// rtcfoostats.h: +// class RTCFooStats : public RTCStats { +// public: +// WEBRTC_RTCSTATS_DECL(); +// +// RTCFooStats(const std::string& id, int64_t timestamp_us); +// +// RTCStatsMember foo; +// RTCStatsMember bar; +// }; +// +// rtcfoostats.cc: +// WEBRTC_RTCSTATS_IMPL(RTCFooStats, RTCStats, "foo-stats" +// &foo, +// &bar); +// +// RTCFooStats::RTCFooStats(const std::string& id, int64_t timestamp_us) +// : RTCStats(id, timestamp_us), +// foo("foo"), +// bar("bar") { +// } +// +#define WEBRTC_RTCSTATS_DECL() \ + protected: \ + std::vector \ + MembersOfThisObjectAndAncestors(size_t local_var_additional_capacity) \ + const override; \ + \ + public: \ + static const char kType[]; \ + \ + std::unique_ptr copy() const override; \ + const char* type() const override + +#define WEBRTC_RTCSTATS_IMPL(this_class, parent_class, type_str, ...) \ + const char this_class::kType[] = type_str; \ + \ + std::unique_ptr this_class::copy() const { \ + return std::unique_ptr(new this_class(*this)); \ + } \ + \ + const char* this_class::type() const { return this_class::kType; } \ + \ + std::vector \ + this_class::MembersOfThisObjectAndAncestors( \ + size_t local_var_additional_capacity) const { \ + const webrtc::RTCStatsMemberInterface* local_var_members[] = { \ + __VA_ARGS__}; \ + size_t local_var_members_count = \ + sizeof(local_var_members) / sizeof(local_var_members[0]); \ + std::vector \ + local_var_members_vec = parent_class::MembersOfThisObjectAndAncestors( \ + local_var_members_count + local_var_additional_capacity); \ + RTC_DCHECK_GE( \ + local_var_members_vec.capacity() - local_var_members_vec.size(), \ + local_var_members_count + local_var_additional_capacity); \ + local_var_members_vec.insert(local_var_members_vec.end(), \ + &local_var_members[0], \ + &local_var_members[local_var_members_count]); \ + return local_var_members_vec; \ + } + +// A version of WEBRTC_RTCSTATS_IMPL() where "..." is omitted, used to avoid a +// compile error on windows. This is used if the stats dictionary does not +// declare any members of its own (but perhaps its parent dictionary does). +#define WEBRTC_RTCSTATS_IMPL_NO_MEMBERS(this_class, parent_class, type_str) \ + const char this_class::kType[] = type_str; \ + \ + std::unique_ptr this_class::copy() const { \ + return std::unique_ptr(new this_class(*this)); \ + } \ + \ + const char* this_class::type() const { return this_class::kType; } \ + \ + std::vector \ + this_class::MembersOfThisObjectAndAncestors( \ + size_t local_var_additional_capacity) const { \ + return parent_class::MembersOfThisObjectAndAncestors(0); \ + } + +// Non-standard stats members can be exposed to the JavaScript API in Chrome +// e.g. through origin trials. The group ID can be used by the blink layer to +// determine if a stats member should be exposed or not. Multiple non-standard +// stats members can share the same group ID so that they are exposed together. +enum class NonStandardGroupId { + // Group ID used for testing purposes only. + kGroupIdForTesting, + // I2E: + // https://groups.google.com/a/chromium.org/forum/#!topic/blink-dev/hE2B1iItPDk + kRtcAudioJitterBufferMaxPackets, + // I2E: + // https://groups.google.com/a/chromium.org/forum/#!topic/blink-dev/YbhMyqLXXXo + kRtcStatsRelativePacketArrivalDelay, +}; + +// Interface for |RTCStats| members, which have a name and a value of a type +// defined in a subclass. Only the types listed in |Type| are supported, these +// are implemented by |RTCStatsMember|. The value of a member may be +// undefined, the value can only be read if |is_defined|. +class RTCStatsMemberInterface { + public: + // Member value types. + enum Type { + kBool, // bool + kInt32, // int32_t + kUint32, // uint32_t + kInt64, // int64_t + kUint64, // uint64_t + kDouble, // double + kString, // std::string + + kSequenceBool, // std::vector + kSequenceInt32, // std::vector + kSequenceUint32, // std::vector + kSequenceInt64, // std::vector + kSequenceUint64, // std::vector + kSequenceDouble, // std::vector + kSequenceString, // std::vector + }; + + virtual ~RTCStatsMemberInterface() {} + + const char* name() const { return name_; } + virtual Type type() const = 0; + virtual bool is_sequence() const = 0; + virtual bool is_string() const = 0; + bool is_defined() const { return is_defined_; } + // Is this part of the stats spec? Used so that chromium can easily filter + // out anything unstandardized. + virtual bool is_standardized() const = 0; + // Non-standard stats members can have group IDs in order to be exposed in + // JavaScript through experiments. Standardized stats have no group IDs. + virtual std::vector group_ids() const { return {}; } + // Type and value comparator. The names are not compared. These operators are + // exposed for testing. + virtual bool operator==(const RTCStatsMemberInterface& other) const = 0; + bool operator!=(const RTCStatsMemberInterface& other) const { + return !(*this == other); + } + virtual std::string ValueToString() const = 0; + // This is the same as ValueToString except for kInt64 and kUint64 types, + // where the value is represented as a double instead of as an integer. + // Since JSON stores numbers as floating point numbers, very large integers + // cannot be accurately represented, so we prefer to display them as doubles + // instead. + virtual std::string ValueToJson() const = 0; + + template + const T& cast_to() const { + RTC_DCHECK_EQ(type(), T::StaticType()); + return static_cast(*this); + } + + protected: + RTCStatsMemberInterface(const char* name, bool is_defined) + : name_(name), is_defined_(is_defined) {} + + const char* const name_; + bool is_defined_; +}; + +// Template implementation of |RTCStatsMemberInterface|. +// The supported types are the ones described by +// |RTCStatsMemberInterface::Type|. +template +class RTCStatsMember : public RTCStatsMemberInterface { + public: + explicit RTCStatsMember(const char* name) + : RTCStatsMemberInterface(name, /*is_defined=*/false), value_() {} + RTCStatsMember(const char* name, const T& value) + : RTCStatsMemberInterface(name, /*is_defined=*/true), value_(value) {} + RTCStatsMember(const char* name, T&& value) + : RTCStatsMemberInterface(name, /*is_defined=*/true), + value_(std::move(value)) {} + explicit RTCStatsMember(const RTCStatsMember& other) + : RTCStatsMemberInterface(other.name_, other.is_defined_), + value_(other.value_) {} + explicit RTCStatsMember(RTCStatsMember&& other) + : RTCStatsMemberInterface(other.name_, other.is_defined_), + value_(std::move(other.value_)) {} + + static Type StaticType(); + Type type() const override { return StaticType(); } + bool is_sequence() const override; + bool is_string() const override; + bool is_standardized() const override { return true; } + bool operator==(const RTCStatsMemberInterface& other) const override { + if (type() != other.type() || is_standardized() != other.is_standardized()) + return false; + const RTCStatsMember& other_t = + static_cast&>(other); + if (!is_defined_) + return !other_t.is_defined(); + if (!other.is_defined()) + return false; + return value_ == other_t.value_; + } + std::string ValueToString() const override; + std::string ValueToJson() const override; + + template + inline T ValueOrDefault(U default_value) const { + if (is_defined()) { + return *(*this); + } + return default_value; + } + + // Assignment operators. + T& operator=(const T& value) { + value_ = value; + is_defined_ = true; + return value_; + } + T& operator=(const T&& value) { + value_ = std::move(value); + is_defined_ = true; + return value_; + } + + // Value getters. + T& operator*() { + RTC_DCHECK(is_defined_); + return value_; + } + const T& operator*() const { + RTC_DCHECK(is_defined_); + return value_; + } + + // Value getters, arrow operator. + T* operator->() { + RTC_DCHECK(is_defined_); + return &value_; + } + const T* operator->() const { + RTC_DCHECK(is_defined_); + return &value_; + } + + private: + T value_; +}; + +#define WEBRTC_DECLARE_RTCSTATSMEMBER(T) \ + template <> \ + RTC_EXPORT RTCStatsMemberInterface::Type RTCStatsMember::StaticType(); \ + template <> \ + RTC_EXPORT bool RTCStatsMember::is_sequence() const; \ + template <> \ + RTC_EXPORT bool RTCStatsMember::is_string() const; \ + template <> \ + RTC_EXPORT std::string RTCStatsMember::ValueToString() const; \ + template <> \ + RTC_EXPORT std::string RTCStatsMember::ValueToJson() const; \ + extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) \ + RTCStatsMember + +WEBRTC_DECLARE_RTCSTATSMEMBER(bool); +WEBRTC_DECLARE_RTCSTATSMEMBER(int32_t); +WEBRTC_DECLARE_RTCSTATSMEMBER(uint32_t); +WEBRTC_DECLARE_RTCSTATSMEMBER(int64_t); +WEBRTC_DECLARE_RTCSTATSMEMBER(uint64_t); +WEBRTC_DECLARE_RTCSTATSMEMBER(double); +WEBRTC_DECLARE_RTCSTATSMEMBER(std::string); +WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); +WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); +WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); +WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); +WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); +WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); +WEBRTC_DECLARE_RTCSTATSMEMBER(std::vector); + +// Using inheritance just so that it's obvious from the member's declaration +// whether it's standardized or not. +template +class RTCNonStandardStatsMember : public RTCStatsMember { + public: + explicit RTCNonStandardStatsMember(const char* name) + : RTCStatsMember(name) {} + RTCNonStandardStatsMember(const char* name, + std::initializer_list group_ids) + : RTCStatsMember(name), group_ids_(group_ids) {} + RTCNonStandardStatsMember(const char* name, const T& value) + : RTCStatsMember(name, value) {} + RTCNonStandardStatsMember(const char* name, T&& value) + : RTCStatsMember(name, std::move(value)) {} + explicit RTCNonStandardStatsMember(const RTCNonStandardStatsMember& other) + : RTCStatsMember(other), group_ids_(other.group_ids_) {} + explicit RTCNonStandardStatsMember(RTCNonStandardStatsMember&& other) + : RTCStatsMember(std::move(other)), + group_ids_(std::move(other.group_ids_)) {} + + bool is_standardized() const override { return false; } + + std::vector group_ids() const override { + return group_ids_; + } + + T& operator=(const T& value) { return RTCStatsMember::operator=(value); } + T& operator=(const T&& value) { + return RTCStatsMember::operator=(std::move(value)); + } + + private: + std::vector group_ids_; +}; + +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember>; +extern template class RTC_EXPORT_TEMPLATE_DECLARE(RTC_EXPORT) + RTCNonStandardStatsMember>; + +} // namespace webrtc + +#endif // API_STATS_RTC_STATS_H_ diff --git a/api/stats/rtc_stats_collector_callback.h b/api/stats/rtc_stats_collector_callback.h new file mode 100644 index 0000000..c3e0824 --- /dev/null +++ b/api/stats/rtc_stats_collector_callback.h @@ -0,0 +1,30 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_STATS_RTC_STATS_COLLECTOR_CALLBACK_H_ +#define API_STATS_RTC_STATS_COLLECTOR_CALLBACK_H_ + +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats_report.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +class RTCStatsCollectorCallback : public virtual rtc::RefCountInterface { + public: + ~RTCStatsCollectorCallback() override = default; + + virtual void OnStatsDelivered( + const rtc::scoped_refptr& report) = 0; +}; + +} // namespace webrtc + +#endif // API_STATS_RTC_STATS_COLLECTOR_CALLBACK_H_ diff --git a/api/stats/rtc_stats_report.h b/api/stats/rtc_stats_report.h new file mode 100644 index 0000000..dc15937 --- /dev/null +++ b/api/stats/rtc_stats_report.h @@ -0,0 +1,121 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_STATS_RTC_STATS_REPORT_H_ +#define API_STATS_RTC_STATS_REPORT_H_ + +#include +#include + +#include +#include +#include +#include + +#include "api/scoped_refptr.h" +#include "api/stats/rtc_stats.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/ref_counted_object.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// A collection of stats. +// This is accessible as a map from |RTCStats::id| to |RTCStats|. +class RTC_EXPORT RTCStatsReport : public rtc::RefCountInterface { + public: + typedef std::map> StatsMap; + + class RTC_EXPORT ConstIterator { + public: + ConstIterator(ConstIterator&& other); + ~ConstIterator(); + + ConstIterator& operator++(); + ConstIterator& operator++(int); + const RTCStats& operator*() const; + const RTCStats* operator->() const; + bool operator==(const ConstIterator& other) const; + bool operator!=(const ConstIterator& other) const; + + private: + friend class RTCStatsReport; + ConstIterator(const rtc::scoped_refptr& report, + StatsMap::const_iterator it); + + // Reference report to make sure it is kept alive. + rtc::scoped_refptr report_; + StatsMap::const_iterator it_; + }; + + // TODO(hbos): Remove "= 0" once Chromium unittest has been updated to call + // with a parameter. crbug.com/627816 + static rtc::scoped_refptr Create(int64_t timestamp_us = 0); + + explicit RTCStatsReport(int64_t timestamp_us); + RTCStatsReport(const RTCStatsReport& other) = delete; + rtc::scoped_refptr Copy() const; + + int64_t timestamp_us() const { return timestamp_us_; } + void AddStats(std::unique_ptr stats); + const RTCStats* Get(const std::string& id) const; + size_t size() const { return stats_.size(); } + + // Gets the stat object of type |T| by ID, where |T| is any class descending + // from |RTCStats|. + // Returns null if there is no stats object for the given ID or it is the + // wrong type. + template + const T* GetAs(const std::string& id) const { + const RTCStats* stats = Get(id); + if (!stats || stats->type() != T::kType) { + return nullptr; + } + return &stats->cast_to(); + } + + // Removes the stats object from the report, returning ownership of it or null + // if there is no object with |id|. + std::unique_ptr Take(const std::string& id); + // Takes ownership of all the stats in |victim|, leaving it empty. + void TakeMembersFrom(rtc::scoped_refptr victim); + + // Stats iterators. Stats are ordered lexicographically on |RTCStats::id|. + ConstIterator begin() const; + ConstIterator end() const; + + // Gets the subset of stats that are of type |T|, where |T| is any class + // descending from |RTCStats|. + template + std::vector GetStatsOfType() const { + std::vector stats_of_type; + for (const RTCStats& stats : *this) { + if (stats.type() == T::kType) + stats_of_type.push_back(&stats.cast_to()); + } + return stats_of_type; + } + + // Creates a JSON readable string representation of the report, + // listing all of its stats objects. + std::string ToJson() const; + + friend class rtc::RefCountedObject; + + private: + ~RTCStatsReport() override; + + int64_t timestamp_us_; + StatsMap stats_; +}; + +} // namespace webrtc + +#endif // API_STATS_RTC_STATS_REPORT_H_ diff --git a/api/stats/rtcstats_objects.h b/api/stats/rtcstats_objects.h new file mode 100644 index 0000000..7d8f5f5 --- /dev/null +++ b/api/stats/rtcstats_objects.h @@ -0,0 +1,639 @@ +/* + * Copyright 2016 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_STATS_RTCSTATS_OBJECTS_H_ +#define API_STATS_RTCSTATS_OBJECTS_H_ + +#include + +#include +#include +#include + +#include "api/stats/rtc_stats.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// https://w3c.github.io/webrtc-pc/#idl-def-rtcdatachannelstate +struct RTCDataChannelState { + static const char* const kConnecting; + static const char* const kOpen; + static const char* const kClosing; + static const char* const kClosed; +}; + +// https://w3c.github.io/webrtc-stats/#dom-rtcstatsicecandidatepairstate +struct RTCStatsIceCandidatePairState { + static const char* const kFrozen; + static const char* const kWaiting; + static const char* const kInProgress; + static const char* const kFailed; + static const char* const kSucceeded; +}; + +// https://w3c.github.io/webrtc-pc/#rtcicecandidatetype-enum +struct RTCIceCandidateType { + static const char* const kHost; + static const char* const kSrflx; + static const char* const kPrflx; + static const char* const kRelay; +}; + +// https://w3c.github.io/webrtc-pc/#idl-def-rtcdtlstransportstate +struct RTCDtlsTransportState { + static const char* const kNew; + static const char* const kConnecting; + static const char* const kConnected; + static const char* const kClosed; + static const char* const kFailed; +}; + +// |RTCMediaStreamTrackStats::kind| is not an enum in the spec but the only +// valid values are "audio" and "video". +// https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-kind +struct RTCMediaStreamTrackKind { + static const char* const kAudio; + static const char* const kVideo; +}; + +// https://w3c.github.io/webrtc-stats/#dom-rtcnetworktype +struct RTCNetworkType { + static const char* const kBluetooth; + static const char* const kCellular; + static const char* const kEthernet; + static const char* const kWifi; + static const char* const kWimax; + static const char* const kVpn; + static const char* const kUnknown; +}; + +// https://w3c.github.io/webrtc-stats/#dom-rtcqualitylimitationreason +struct RTCQualityLimitationReason { + static const char* const kNone; + static const char* const kCpu; + static const char* const kBandwidth; + static const char* const kOther; +}; + +// https://webrtc.org/experiments/rtp-hdrext/video-content-type/ +struct RTCContentType { + static const char* const kUnspecified; + static const char* const kScreenshare; +}; + +// https://w3c.github.io/webrtc-stats/#certificatestats-dict* +class RTC_EXPORT RTCCertificateStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCCertificateStats(const std::string& id, int64_t timestamp_us); + RTCCertificateStats(std::string&& id, int64_t timestamp_us); + RTCCertificateStats(const RTCCertificateStats& other); + ~RTCCertificateStats() override; + + RTCStatsMember fingerprint; + RTCStatsMember fingerprint_algorithm; + RTCStatsMember base64_certificate; + RTCStatsMember issuer_certificate_id; +}; + +// https://w3c.github.io/webrtc-stats/#codec-dict* +class RTC_EXPORT RTCCodecStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCCodecStats(const std::string& id, int64_t timestamp_us); + RTCCodecStats(std::string&& id, int64_t timestamp_us); + RTCCodecStats(const RTCCodecStats& other); + ~RTCCodecStats() override; + + RTCStatsMember payload_type; + RTCStatsMember mime_type; + RTCStatsMember clock_rate; + RTCStatsMember channels; + RTCStatsMember sdp_fmtp_line; +}; + +// https://w3c.github.io/webrtc-stats/#dcstats-dict* +class RTC_EXPORT RTCDataChannelStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCDataChannelStats(const std::string& id, int64_t timestamp_us); + RTCDataChannelStats(std::string&& id, int64_t timestamp_us); + RTCDataChannelStats(const RTCDataChannelStats& other); + ~RTCDataChannelStats() override; + + RTCStatsMember label; + RTCStatsMember protocol; + RTCStatsMember data_channel_identifier; + // TODO(hbos): Support enum types? "RTCStatsMember"? + RTCStatsMember state; + RTCStatsMember messages_sent; + RTCStatsMember bytes_sent; + RTCStatsMember messages_received; + RTCStatsMember bytes_received; +}; + +// https://w3c.github.io/webrtc-stats/#candidatepair-dict* +// TODO(hbos): Tracking bug https://bugs.webrtc.org/7062 +class RTC_EXPORT RTCIceCandidatePairStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCIceCandidatePairStats(const std::string& id, int64_t timestamp_us); + RTCIceCandidatePairStats(std::string&& id, int64_t timestamp_us); + RTCIceCandidatePairStats(const RTCIceCandidatePairStats& other); + ~RTCIceCandidatePairStats() override; + + RTCStatsMember transport_id; + RTCStatsMember local_candidate_id; + RTCStatsMember remote_candidate_id; + // TODO(hbos): Support enum types? + // "RTCStatsMember"? + RTCStatsMember state; + RTCStatsMember priority; + RTCStatsMember nominated; + // TODO(hbos): Collect this the way the spec describes it. We have a value for + // it but it is not spec-compliant. https://bugs.webrtc.org/7062 + RTCStatsMember writable; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember readable; + RTCStatsMember bytes_sent; + RTCStatsMember bytes_received; + RTCStatsMember total_round_trip_time; + RTCStatsMember current_round_trip_time; + RTCStatsMember available_outgoing_bitrate; + // TODO(hbos): Populate this value. It is wired up and collected the same way + // "VideoBwe.googAvailableReceiveBandwidth" is, but that value is always + // undefined. https://bugs.webrtc.org/7062 + RTCStatsMember available_incoming_bitrate; + RTCStatsMember requests_received; + RTCStatsMember requests_sent; + RTCStatsMember responses_received; + RTCStatsMember responses_sent; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember retransmissions_received; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember retransmissions_sent; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember consent_requests_received; + RTCStatsMember consent_requests_sent; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember consent_responses_received; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7062 + RTCStatsMember consent_responses_sent; +}; + +// https://w3c.github.io/webrtc-stats/#icecandidate-dict* +// TODO(hbos): |RTCStatsCollector| only collects candidates that are part of +// ice candidate pairs, but there could be candidates not paired with anything. +// crbug.com/632723 +// TODO(qingsi): Add the stats of STUN binding requests (keepalives) and collect +// them in the new PeerConnection::GetStats. +class RTC_EXPORT RTCIceCandidateStats : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCIceCandidateStats(const RTCIceCandidateStats& other); + ~RTCIceCandidateStats() override; + + RTCStatsMember transport_id; + RTCStatsMember is_remote; + RTCStatsMember network_type; + RTCStatsMember ip; + RTCStatsMember port; + RTCStatsMember protocol; + RTCStatsMember relay_protocol; + // TODO(hbos): Support enum types? "RTCStatsMember"? + RTCStatsMember candidate_type; + RTCStatsMember priority; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/632723 + RTCStatsMember url; + // TODO(hbos): |deleted = true| case is not supported by |RTCStatsCollector|. + // crbug.com/632723 + RTCStatsMember deleted; // = false + + protected: + RTCIceCandidateStats(const std::string& id, + int64_t timestamp_us, + bool is_remote); + RTCIceCandidateStats(std::string&& id, int64_t timestamp_us, bool is_remote); +}; + +// In the spec both local and remote varieties are of type RTCIceCandidateStats. +// But here we define them as subclasses of |RTCIceCandidateStats| because the +// |kType| need to be different ("RTCStatsType type") in the local/remote case. +// https://w3c.github.io/webrtc-stats/#rtcstatstype-str* +// This forces us to have to override copy() and type(). +class RTC_EXPORT RTCLocalIceCandidateStats final : public RTCIceCandidateStats { + public: + static const char kType[]; + RTCLocalIceCandidateStats(const std::string& id, int64_t timestamp_us); + RTCLocalIceCandidateStats(std::string&& id, int64_t timestamp_us); + std::unique_ptr copy() const override; + const char* type() const override; +}; + +class RTC_EXPORT RTCRemoteIceCandidateStats final + : public RTCIceCandidateStats { + public: + static const char kType[]; + RTCRemoteIceCandidateStats(const std::string& id, int64_t timestamp_us); + RTCRemoteIceCandidateStats(std::string&& id, int64_t timestamp_us); + std::unique_ptr copy() const override; + const char* type() const override; +}; + +// https://w3c.github.io/webrtc-stats/#msstats-dict* +// TODO(hbos): Tracking bug crbug.com/660827 +class RTC_EXPORT RTCMediaStreamStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCMediaStreamStats(const std::string& id, int64_t timestamp_us); + RTCMediaStreamStats(std::string&& id, int64_t timestamp_us); + RTCMediaStreamStats(const RTCMediaStreamStats& other); + ~RTCMediaStreamStats() override; + + RTCStatsMember stream_identifier; + RTCStatsMember> track_ids; +}; + +// https://w3c.github.io/webrtc-stats/#mststats-dict* +// TODO(hbos): Tracking bug crbug.com/659137 +class RTC_EXPORT RTCMediaStreamTrackStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCMediaStreamTrackStats(const std::string& id, + int64_t timestamp_us, + const char* kind); + RTCMediaStreamTrackStats(std::string&& id, + int64_t timestamp_us, + const char* kind); + RTCMediaStreamTrackStats(const RTCMediaStreamTrackStats& other); + ~RTCMediaStreamTrackStats() override; + + RTCStatsMember track_identifier; + RTCStatsMember media_source_id; + RTCStatsMember remote_source; + RTCStatsMember ended; + // TODO(hbos): |RTCStatsCollector| does not return stats for detached tracks. + // crbug.com/659137 + RTCStatsMember detached; + // See |RTCMediaStreamTrackKind| for valid values. + RTCStatsMember kind; + RTCStatsMember jitter_buffer_delay; + RTCStatsMember jitter_buffer_emitted_count; + // Video-only members + RTCStatsMember frame_width; + RTCStatsMember frame_height; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/659137 + RTCStatsMember frames_per_second; + RTCStatsMember frames_sent; + RTCStatsMember huge_frames_sent; + RTCStatsMember frames_received; + RTCStatsMember frames_decoded; + RTCStatsMember frames_dropped; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/659137 + RTCStatsMember frames_corrupted; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/659137 + RTCStatsMember partial_frames_lost; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/659137 + RTCStatsMember full_frames_lost; + // Audio-only members + RTCStatsMember audio_level; // Receive-only + RTCStatsMember total_audio_energy; // Receive-only + RTCStatsMember echo_return_loss; + RTCStatsMember echo_return_loss_enhancement; + RTCStatsMember total_samples_received; + RTCStatsMember total_samples_duration; // Receive-only + RTCStatsMember concealed_samples; + RTCStatsMember silent_concealed_samples; + RTCStatsMember concealment_events; + RTCStatsMember inserted_samples_for_deceleration; + RTCStatsMember removed_samples_for_acceleration; + // Non-standard audio-only member + // TODO(kuddai): Add description to standard. crbug.com/webrtc/10042 + RTCNonStandardStatsMember jitter_buffer_flushes; + RTCNonStandardStatsMember delayed_packet_outage_samples; + RTCNonStandardStatsMember relative_packet_arrival_delay; + // Non-standard metric showing target delay of jitter buffer. + // This value is increased by the target jitter buffer delay every time a + // sample is emitted by the jitter buffer. The added target is the target + // delay, in seconds, at the time that the sample was emitted from the jitter + // buffer. (https://github.com/w3c/webrtc-provisional-stats/pull/20) + // Currently it is implemented only for audio. + // TODO(titovartem) implement for video streams when will be requested. + RTCNonStandardStatsMember jitter_buffer_target_delay; + // TODO(henrik.lundin): Add description of the interruption metrics at + // https://github.com/henbos/webrtc-provisional-stats/issues/17 + RTCNonStandardStatsMember interruption_count; + RTCNonStandardStatsMember total_interruption_duration; + // Non-standard video-only members. + // https://henbos.github.io/webrtc-provisional-stats/#RTCVideoReceiverStats-dict* + RTCNonStandardStatsMember freeze_count; + RTCNonStandardStatsMember pause_count; + RTCNonStandardStatsMember total_freezes_duration; + RTCNonStandardStatsMember total_pauses_duration; + RTCNonStandardStatsMember total_frames_duration; + RTCNonStandardStatsMember sum_squared_frame_durations; +}; + +// https://w3c.github.io/webrtc-stats/#pcstats-dict* +class RTC_EXPORT RTCPeerConnectionStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCPeerConnectionStats(const std::string& id, int64_t timestamp_us); + RTCPeerConnectionStats(std::string&& id, int64_t timestamp_us); + RTCPeerConnectionStats(const RTCPeerConnectionStats& other); + ~RTCPeerConnectionStats() override; + + RTCStatsMember data_channels_opened; + RTCStatsMember data_channels_closed; +}; + +// https://w3c.github.io/webrtc-stats/#streamstats-dict* +// TODO(hbos): Tracking bug crbug.com/657854 +class RTC_EXPORT RTCRTPStreamStats : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCRTPStreamStats(const RTCRTPStreamStats& other); + ~RTCRTPStreamStats() override; + + RTCStatsMember ssrc; + // TODO(hbos): Remote case not supported by |RTCStatsCollector|. + // crbug.com/657855, 657856 + RTCStatsMember is_remote; // = false + RTCStatsMember media_type; // renamed to kind. + RTCStatsMember kind; + RTCStatsMember track_id; + RTCStatsMember transport_id; + RTCStatsMember codec_id; + // FIR and PLI counts are only defined for |media_type == "video"|. + RTCStatsMember fir_count; + RTCStatsMember pli_count; + // TODO(hbos): NACK count should be collected by |RTCStatsCollector| for both + // audio and video but is only defined in the "video" case. crbug.com/657856 + RTCStatsMember nack_count; + // TODO(hbos): Not collected by |RTCStatsCollector|. crbug.com/657854 + // SLI count is only defined for |media_type == "video"|. + RTCStatsMember sli_count; + RTCStatsMember qp_sum; + + protected: + RTCRTPStreamStats(const std::string& id, int64_t timestamp_us); + RTCRTPStreamStats(std::string&& id, int64_t timestamp_us); +}; + +// https://w3c.github.io/webrtc-stats/#inboundrtpstats-dict* +// TODO(hbos): Support the remote case |is_remote = true|. +// https://bugs.webrtc.org/7065 +class RTC_EXPORT RTCInboundRTPStreamStats final : public RTCRTPStreamStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCInboundRTPStreamStats(const std::string& id, int64_t timestamp_us); + RTCInboundRTPStreamStats(std::string&& id, int64_t timestamp_us); + RTCInboundRTPStreamStats(const RTCInboundRTPStreamStats& other); + ~RTCInboundRTPStreamStats() override; + + RTCStatsMember packets_received; + RTCStatsMember fec_packets_received; + RTCStatsMember fec_packets_discarded; + RTCStatsMember bytes_received; + RTCStatsMember header_bytes_received; + RTCStatsMember packets_lost; // Signed per RFC 3550 + RTCStatsMember last_packet_received_timestamp; + // TODO(hbos): Collect and populate this value for both "audio" and "video", + // currently not collected for "video". https://bugs.webrtc.org/7065 + RTCStatsMember jitter; + RTCStatsMember jitter_buffer_delay; + RTCStatsMember jitter_buffer_emitted_count; + RTCStatsMember total_samples_received; + RTCStatsMember concealed_samples; + RTCStatsMember silent_concealed_samples; + RTCStatsMember concealment_events; + RTCStatsMember inserted_samples_for_deceleration; + RTCStatsMember removed_samples_for_acceleration; + RTCStatsMember audio_level; + RTCStatsMember total_audio_energy; + RTCStatsMember total_samples_duration; + RTCStatsMember frames_received; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember round_trip_time; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember packets_discarded; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember packets_repaired; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_packets_lost; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_packets_discarded; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_loss_count; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_discard_count; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_loss_rate; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember burst_discard_rate; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember gap_loss_rate; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7065 + RTCStatsMember gap_discard_rate; + RTCStatsMember frame_width; + RTCStatsMember frame_height; + RTCStatsMember frame_bit_depth; + RTCStatsMember frames_per_second; + RTCStatsMember frames_decoded; + RTCStatsMember key_frames_decoded; + RTCStatsMember frames_dropped; + RTCStatsMember total_decode_time; + RTCStatsMember total_inter_frame_delay; + RTCStatsMember total_squared_inter_frame_delay; + // https://henbos.github.io/webrtc-provisional-stats/#dom-rtcinboundrtpstreamstats-contenttype + RTCStatsMember content_type; + // TODO(asapersson): Currently only populated if audio/video sync is enabled. + RTCStatsMember estimated_playout_timestamp; + // TODO(hbos): This is only implemented for video; implement it for audio as + // well. + RTCStatsMember decoder_implementation; +}; + +// https://w3c.github.io/webrtc-stats/#outboundrtpstats-dict* +// TODO(hbos): Support the remote case |is_remote = true|. +// https://bugs.webrtc.org/7066 +class RTC_EXPORT RTCOutboundRTPStreamStats final : public RTCRTPStreamStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCOutboundRTPStreamStats(const std::string& id, int64_t timestamp_us); + RTCOutboundRTPStreamStats(std::string&& id, int64_t timestamp_us); + RTCOutboundRTPStreamStats(const RTCOutboundRTPStreamStats& other); + ~RTCOutboundRTPStreamStats() override; + + RTCStatsMember media_source_id; + RTCStatsMember remote_id; + RTCStatsMember rid; + RTCStatsMember packets_sent; + RTCStatsMember retransmitted_packets_sent; + RTCStatsMember bytes_sent; + RTCStatsMember header_bytes_sent; + RTCStatsMember retransmitted_bytes_sent; + // TODO(hbos): Collect and populate this value. https://bugs.webrtc.org/7066 + RTCStatsMember target_bitrate; + RTCStatsMember frames_encoded; + RTCStatsMember key_frames_encoded; + RTCStatsMember total_encode_time; + RTCStatsMember total_encoded_bytes_target; + RTCStatsMember frame_width; + RTCStatsMember frame_height; + RTCStatsMember frames_per_second; + RTCStatsMember frames_sent; + RTCStatsMember huge_frames_sent; + // TODO(https://crbug.com/webrtc/10635): This is only implemented for video; + // implement it for audio as well. + RTCStatsMember total_packet_send_delay; + // Enum type RTCQualityLimitationReason + // TODO(https://crbug.com/webrtc/10686): Also expose + // qualityLimitationDurations. Requires RTCStatsMember support for + // "record", see https://crbug.com/webrtc/10685. + RTCStatsMember quality_limitation_reason; + // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationresolutionchanges + RTCStatsMember quality_limitation_resolution_changes; + // https://henbos.github.io/webrtc-provisional-stats/#dom-rtcoutboundrtpstreamstats-contenttype + RTCStatsMember content_type; + // TODO(hbos): This is only implemented for video; implement it for audio as + // well. + RTCStatsMember encoder_implementation; +}; + +// TODO(https://crbug.com/webrtc/10671): Refactor the stats dictionaries to have +// the same hierarchy as in the spec; implement RTCReceivedRtpStreamStats. +// Several metrics are shared between "outbound-rtp", "remote-inbound-rtp", +// "inbound-rtp" and "remote-outbound-rtp". In the spec there is a hierarchy of +// dictionaries that minimizes defining the same metrics in multiple places. +// From JavaScript this hierarchy is not observable and the spec's hierarchy is +// purely editorial. In C++ non-final classes in the hierarchy could be used to +// refer to different stats objects within the hierarchy. +// https://w3c.github.io/webrtc-stats/#remoteinboundrtpstats-dict* +class RTC_EXPORT RTCRemoteInboundRtpStreamStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCRemoteInboundRtpStreamStats(const std::string& id, int64_t timestamp_us); + RTCRemoteInboundRtpStreamStats(std::string&& id, int64_t timestamp_us); + RTCRemoteInboundRtpStreamStats(const RTCRemoteInboundRtpStreamStats& other); + ~RTCRemoteInboundRtpStreamStats() override; + + // In the spec RTCRemoteInboundRtpStreamStats inherits from RTCRtpStreamStats + // and RTCReceivedRtpStreamStats. The members here are listed based on where + // they are defined in the spec. + // RTCRtpStreamStats + RTCStatsMember ssrc; + RTCStatsMember kind; + RTCStatsMember transport_id; + RTCStatsMember codec_id; + // RTCReceivedRtpStreamStats + RTCStatsMember packets_lost; + RTCStatsMember jitter; + // TODO(hbos): The following RTCReceivedRtpStreamStats metrics should also be + // implemented: packetsReceived, packetsDiscarded, packetsRepaired, + // burstPacketsLost, burstPacketsDiscarded, burstLossCount, burstDiscardCount, + // burstLossRate, burstDiscardRate, gapLossRate and gapDiscardRate. + // RTCRemoteInboundRtpStreamStats + RTCStatsMember local_id; + RTCStatsMember round_trip_time; + // TODO(hbos): The following RTCRemoteInboundRtpStreamStats metric should also + // be implemented: fractionLost. +}; + +// https://w3c.github.io/webrtc-stats/#dom-rtcmediasourcestats +class RTC_EXPORT RTCMediaSourceStats : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCMediaSourceStats(const RTCMediaSourceStats& other); + ~RTCMediaSourceStats() override; + + RTCStatsMember track_identifier; + RTCStatsMember kind; + + protected: + RTCMediaSourceStats(const std::string& id, int64_t timestamp_us); + RTCMediaSourceStats(std::string&& id, int64_t timestamp_us); +}; + +// https://w3c.github.io/webrtc-stats/#dom-rtcaudiosourcestats +class RTC_EXPORT RTCAudioSourceStats final : public RTCMediaSourceStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCAudioSourceStats(const std::string& id, int64_t timestamp_us); + RTCAudioSourceStats(std::string&& id, int64_t timestamp_us); + RTCAudioSourceStats(const RTCAudioSourceStats& other); + ~RTCAudioSourceStats() override; + + RTCStatsMember audio_level; + RTCStatsMember total_audio_energy; + RTCStatsMember total_samples_duration; +}; + +// https://w3c.github.io/webrtc-stats/#dom-rtcvideosourcestats +class RTC_EXPORT RTCVideoSourceStats final : public RTCMediaSourceStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCVideoSourceStats(const std::string& id, int64_t timestamp_us); + RTCVideoSourceStats(std::string&& id, int64_t timestamp_us); + RTCVideoSourceStats(const RTCVideoSourceStats& other); + ~RTCVideoSourceStats() override; + + RTCStatsMember width; + RTCStatsMember height; + // TODO(hbos): Implement this metric. + RTCStatsMember frames; + RTCStatsMember frames_per_second; +}; + +// https://w3c.github.io/webrtc-stats/#transportstats-dict* +class RTC_EXPORT RTCTransportStats final : public RTCStats { + public: + WEBRTC_RTCSTATS_DECL(); + + RTCTransportStats(const std::string& id, int64_t timestamp_us); + RTCTransportStats(std::string&& id, int64_t timestamp_us); + RTCTransportStats(const RTCTransportStats& other); + ~RTCTransportStats() override; + + RTCStatsMember bytes_sent; + RTCStatsMember packets_sent; + RTCStatsMember bytes_received; + RTCStatsMember packets_received; + RTCStatsMember rtcp_transport_stats_id; + // TODO(hbos): Support enum types? "RTCStatsMember"? + RTCStatsMember dtls_state; + RTCStatsMember selected_candidate_pair_id; + RTCStatsMember local_certificate_id; + RTCStatsMember remote_certificate_id; + RTCStatsMember tls_version; + RTCStatsMember dtls_cipher; + RTCStatsMember srtp_cipher; + RTCStatsMember selected_candidate_pair_changes; +}; + +} // namespace webrtc + +#endif // API_STATS_RTCSTATS_OBJECTS_H_ diff --git a/api/stats_types.cc b/api/stats_types.cc new file mode 100644 index 0000000..7dcbd13 --- /dev/null +++ b/api/stats_types.cc @@ -0,0 +1,847 @@ +/* + * Copyright 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/stats_types.h" + +#include + +#include "absl/algorithm/container.h" +#include "rtc_base/checks.h" +#include "rtc_base/ref_counted_object.h" + +// TODO(tommi): Could we have a static map of value name -> expected type +// and use this to RTC_DCHECK on correct usage (somewhat strongly typed values)? +// Alternatively, we could define the names+type in a separate document and +// generate strongly typed inline C++ code that forces the correct type to be +// used for a given name at compile time. + +using rtc::RefCountedObject; + +namespace webrtc { +namespace { + +// The id of StatsReport of type kStatsReportTypeBwe. +const char kStatsReportVideoBweId[] = "bweforvideo"; + +// NOTE: These names need to be consistent with an external +// specification (W3C Stats Identifiers). +const char* InternalTypeToString(StatsReport::StatsType type) { + switch (type) { + case StatsReport::kStatsReportTypeSession: + return "googLibjingleSession"; + case StatsReport::kStatsReportTypeBwe: + return "VideoBwe"; + case StatsReport::kStatsReportTypeRemoteSsrc: + return "remoteSsrc"; + case StatsReport::kStatsReportTypeSsrc: + return "ssrc"; + case StatsReport::kStatsReportTypeTrack: + return "googTrack"; + case StatsReport::kStatsReportTypeIceLocalCandidate: + return "localcandidate"; + case StatsReport::kStatsReportTypeIceRemoteCandidate: + return "remotecandidate"; + case StatsReport::kStatsReportTypeTransport: + return "transport"; + case StatsReport::kStatsReportTypeComponent: + return "googComponent"; + case StatsReport::kStatsReportTypeCandidatePair: + return "googCandidatePair"; + case StatsReport::kStatsReportTypeCertificate: + return "googCertificate"; + case StatsReport::kStatsReportTypeDataChannel: + return "datachannel"; + } + RTC_NOTREACHED(); + return nullptr; +} + +class BandwidthEstimationId : public StatsReport::IdBase { + public: + BandwidthEstimationId() + : StatsReport::IdBase(StatsReport::kStatsReportTypeBwe) {} + std::string ToString() const override { return kStatsReportVideoBweId; } +}; + +class TypedId : public StatsReport::IdBase { + public: + TypedId(StatsReport::StatsType type, const std::string& id) + : StatsReport::IdBase(type), id_(id) {} + + bool Equals(const IdBase& other) const override { + return IdBase::Equals(other) && + static_cast(other).id_ == id_; + } + + std::string ToString() const override { + return std::string(InternalTypeToString(type_)) + kSeparator + id_; + } + + protected: + const std::string id_; +}; + +class TypedIntId : public StatsReport::IdBase { + public: + TypedIntId(StatsReport::StatsType type, int id) + : StatsReport::IdBase(type), id_(id) {} + + bool Equals(const IdBase& other) const override { + return IdBase::Equals(other) && + static_cast(other).id_ == id_; + } + + std::string ToString() const override { + return std::string(InternalTypeToString(type_)) + kSeparator + + rtc::ToString(id_); + } + + protected: + const int id_; +}; + +class IdWithDirection : public TypedId { + public: + IdWithDirection(StatsReport::StatsType type, + const std::string& id, + StatsReport::Direction direction) + : TypedId(type, id), direction_(direction) {} + + bool Equals(const IdBase& other) const override { + return TypedId::Equals(other) && + static_cast(other).direction_ == direction_; + } + + std::string ToString() const override { + std::string ret(TypedId::ToString()); + ret += kSeparator; + ret += direction_ == StatsReport::kSend ? "send" : "recv"; + return ret; + } + + private: + const StatsReport::Direction direction_; +}; + +class CandidateId : public TypedId { + public: + CandidateId(bool local, const std::string& id) + : TypedId(local ? StatsReport::kStatsReportTypeIceLocalCandidate + : StatsReport::kStatsReportTypeIceRemoteCandidate, + id) {} + + std::string ToString() const override { return "Cand-" + id_; } +}; + +class ComponentId : public StatsReport::IdBase { + public: + ComponentId(const std::string& content_name, int component) + : ComponentId(StatsReport::kStatsReportTypeComponent, + content_name, + component) {} + + bool Equals(const IdBase& other) const override { + return IdBase::Equals(other) && + static_cast(other).component_ == component_ && + static_cast(other).content_name_ == + content_name_; + } + + std::string ToString() const override { return ToString("Channel-"); } + + protected: + ComponentId(StatsReport::StatsType type, + const std::string& content_name, + int component) + : IdBase(type), content_name_(content_name), component_(component) {} + + std::string ToString(const char* prefix) const { + std::string ret(prefix); + ret += content_name_; + ret += '-'; + ret += rtc::ToString(component_); + return ret; + } + + private: + const std::string content_name_; + const int component_; +}; + +class CandidatePairId : public ComponentId { + public: + CandidatePairId(const std::string& content_name, int component, int index) + : ComponentId(StatsReport::kStatsReportTypeCandidatePair, + content_name, + component), + index_(index) {} + + bool Equals(const IdBase& other) const override { + return ComponentId::Equals(other) && + static_cast(other).index_ == index_; + } + + std::string ToString() const override { + std::string ret(ComponentId::ToString("Conn-")); + ret += '-'; + ret += rtc::ToString(index_); + return ret; + } + + private: + const int index_; +}; + +} // namespace + +StatsReport::IdBase::IdBase(StatsType type) : type_(type) {} +StatsReport::IdBase::~IdBase() {} + +StatsReport::StatsType StatsReport::IdBase::type() const { + return type_; +} + +bool StatsReport::IdBase::Equals(const IdBase& other) const { + return other.type_ == type_; +} + +StatsReport::Value::Value(StatsValueName name, int64_t value, Type int_type) + : name(name), type_(int_type) { + RTC_DCHECK(type_ == kInt || type_ == kInt64); + type_ == kInt ? value_.int_ = static_cast(value) : value_.int64_ = value; +} + +StatsReport::Value::Value(StatsValueName name, float f) + : name(name), type_(kFloat) { + value_.float_ = f; +} + +StatsReport::Value::Value(StatsValueName name, const std::string& value) + : name(name), type_(kString) { + value_.string_ = new std::string(value); +} + +StatsReport::Value::Value(StatsValueName name, const char* value) + : name(name), type_(kStaticString) { + value_.static_string_ = value; +} + +StatsReport::Value::Value(StatsValueName name, bool b) + : name(name), type_(kBool) { + value_.bool_ = b; +} + +StatsReport::Value::Value(StatsValueName name, const Id& value) + : name(name), type_(kId) { + value_.id_ = new Id(value); +} + +StatsReport::Value::~Value() { + switch (type_) { + case kInt: + case kInt64: + case kFloat: + case kBool: + case kStaticString: + break; + case kString: + delete value_.string_; + break; + case kId: + delete value_.id_; + break; + } +} + +bool StatsReport::Value::Equals(const Value& other) const { + if (name != other.name) + return false; + + // There's a 1:1 relation between a name and a type, so we don't have to + // check that. + RTC_DCHECK_EQ(type_, other.type_); + + switch (type_) { + case kInt: + return value_.int_ == other.value_.int_; + case kInt64: + return value_.int64_ == other.value_.int64_; + case kFloat: + return value_.float_ == other.value_.float_; + case kStaticString: { +#if RTC_DCHECK_IS_ON + if (value_.static_string_ != other.value_.static_string_) { + RTC_DCHECK(strcmp(value_.static_string_, other.value_.static_string_) != + 0) + << "Duplicate global?"; + } +#endif + return value_.static_string_ == other.value_.static_string_; + } + case kString: + return *value_.string_ == *other.value_.string_; + case kBool: + return value_.bool_ == other.value_.bool_; + case kId: + return (*value_.id_)->Equals(*other.value_.id_); + } + RTC_NOTREACHED(); + return false; +} + +bool StatsReport::Value::operator==(const std::string& value) const { + return (type_ == kString && value_.string_->compare(value) == 0) || + (type_ == kStaticString && value.compare(value_.static_string_) == 0); +} + +bool StatsReport::Value::operator==(const char* value) const { + if (type_ == kString) + return value_.string_->compare(value) == 0; + if (type_ != kStaticString) + return false; +#if RTC_DCHECK_IS_ON + if (value_.static_string_ != value) + RTC_DCHECK(strcmp(value_.static_string_, value) != 0) + << "Duplicate global?"; +#endif + return value == value_.static_string_; +} + +bool StatsReport::Value::operator==(int64_t value) const { + return type_ == kInt ? value_.int_ == static_cast(value) + : (type_ == kInt64 ? value_.int64_ == value : false); +} + +bool StatsReport::Value::operator==(bool value) const { + return type_ == kBool && value_.bool_ == value; +} + +bool StatsReport::Value::operator==(float value) const { + return type_ == kFloat && value_.float_ == value; +} + +bool StatsReport::Value::operator==(const Id& value) const { + return type_ == kId && (*value_.id_)->Equals(value); +} + +int StatsReport::Value::int_val() const { + RTC_DCHECK_EQ(type_, kInt); + return value_.int_; +} + +int64_t StatsReport::Value::int64_val() const { + RTC_DCHECK_EQ(type_, kInt64); + return value_.int64_; +} + +float StatsReport::Value::float_val() const { + RTC_DCHECK_EQ(type_, kFloat); + return value_.float_; +} + +const char* StatsReport::Value::static_string_val() const { + RTC_DCHECK_EQ(type_, kStaticString); + return value_.static_string_; +} + +const std::string& StatsReport::Value::string_val() const { + RTC_DCHECK_EQ(type_, kString); + return *value_.string_; +} + +bool StatsReport::Value::bool_val() const { + RTC_DCHECK_EQ(type_, kBool); + return value_.bool_; +} + +const char* StatsReport::Value::display_name() const { + switch (name) { + case kStatsValueNameAecDivergentFilterFraction: + return "aecDivergentFilterFraction"; + case kStatsValueNameAudioOutputLevel: + return "audioOutputLevel"; + case kStatsValueNameAudioInputLevel: + return "audioInputLevel"; + case kStatsValueNameBytesSent: + return "bytesSent"; + case kStatsValueNameConcealedSamples: + return "concealedSamples"; + case kStatsValueNameConcealmentEvents: + return "concealmentEvents"; + case kStatsValueNamePacketsSent: + return "packetsSent"; + case kStatsValueNameBytesReceived: + return "bytesReceived"; + case kStatsValueNameLabel: + return "label"; + case kStatsValueNamePacketsReceived: + return "packetsReceived"; + case kStatsValueNamePacketsLost: + return "packetsLost"; + case kStatsValueNameProtocol: + return "protocol"; + case kStatsValueNameTotalSamplesReceived: + return "totalSamplesReceived"; + case kStatsValueNameTransportId: + return "transportId"; + case kStatsValueNameSelectedCandidatePairId: + return "selectedCandidatePairId"; + case kStatsValueNameSsrc: + return "ssrc"; + case kStatsValueNameState: + return "state"; + case kStatsValueNameDataChannelId: + return "datachannelid"; + case kStatsValueNameFramesDecoded: + return "framesDecoded"; + case kStatsValueNameFramesEncoded: + return "framesEncoded"; + case kStatsValueNameJitterBufferDelay: + return "jitterBufferDelay"; + case kStatsValueNameCodecImplementationName: + return "codecImplementationName"; + case kStatsValueNameMediaType: + return "mediaType"; + case kStatsValueNameQpSum: + return "qpSum"; + // 'goog' prefixed constants. + case kStatsValueNameAccelerateRate: + return "googAccelerateRate"; + case kStatsValueNameActiveConnection: + return "googActiveConnection"; + case kStatsValueNameActualEncBitrate: + return "googActualEncBitrate"; + case kStatsValueNameAvailableReceiveBandwidth: + return "googAvailableReceiveBandwidth"; + case kStatsValueNameAvailableSendBandwidth: + return "googAvailableSendBandwidth"; + case kStatsValueNameAvgEncodeMs: + return "googAvgEncodeMs"; + case kStatsValueNameBucketDelay: + return "googBucketDelay"; + case kStatsValueNameBandwidthLimitedResolution: + return "googBandwidthLimitedResolution"; + // STUN ping related attributes. + // + // TODO(zhihuang) Rename these stats to follow the standards. + // Connectivity checks. + case kStatsValueNameSentPingRequestsTotal: + return "requestsSent"; + case kStatsValueNameSentPingRequestsBeforeFirstResponse: + return "consentRequestsSent"; + case kStatsValueNameSentPingResponses: + return "responsesSent"; + case kStatsValueNameRecvPingRequests: + return "requestsReceived"; + case kStatsValueNameRecvPingResponses: + return "responsesReceived"; + // STUN Keepalive pings. + case kStatsValueNameSentStunKeepaliveRequests: + return "stunKeepaliveRequestsSent"; + case kStatsValueNameRecvStunKeepaliveResponses: + return "stunKeepaliveResponsesReceived"; + case kStatsValueNameStunKeepaliveRttTotal: + return "stunKeepaliveRttTotal"; + case kStatsValueNameStunKeepaliveRttSquaredTotal: + return "stunKeepaliveRttSquaredTotal"; + + // Candidate related attributes. Values are taken from + // http://w3c.github.io/webrtc-stats/#rtcstatstype-enum*. + case kStatsValueNameCandidateIPAddress: + return "ipAddress"; + case kStatsValueNameCandidateNetworkType: + return "networkType"; + case kStatsValueNameCandidatePortNumber: + return "portNumber"; + case kStatsValueNameCandidatePriority: + return "priority"; + case kStatsValueNameCandidateTransportType: + return "transport"; + case kStatsValueNameCandidateType: + return "candidateType"; + + case kStatsValueNameChannelId: + return "googChannelId"; + case kStatsValueNameCodecName: + return "googCodecName"; + case kStatsValueNameComponent: + return "googComponent"; + case kStatsValueNameContentName: + return "googContentName"; + case kStatsValueNameContentType: + return "googContentType"; + case kStatsValueNameCpuLimitedResolution: + return "googCpuLimitedResolution"; + case kStatsValueNameDecodingCTSG: + return "googDecodingCTSG"; + case kStatsValueNameDecodingCTN: + return "googDecodingCTN"; + case kStatsValueNameDecodingMutedOutput: + return "googDecodingMuted"; + case kStatsValueNameDecodingNormal: + return "googDecodingNormal"; + case kStatsValueNameDecodingPLC: + return "googDecodingPLC"; + case kStatsValueNameDecodingCodecPLC: + return "googDecodingCodecPLC"; + case kStatsValueNameDecodingCNG: + return "googDecodingCNG"; + case kStatsValueNameDecodingPLCCNG: + return "googDecodingPLCCNG"; + case kStatsValueNameDer: + return "googDerBase64"; + case kStatsValueNameDtlsCipher: + return "dtlsCipher"; + case kStatsValueNameEchoDelayMedian: + return "googEchoCancellationEchoDelayMedian"; + case kStatsValueNameEchoDelayStdDev: + return "googEchoCancellationEchoDelayStdDev"; + case kStatsValueNameEchoReturnLoss: + return "googEchoCancellationReturnLoss"; + case kStatsValueNameEchoReturnLossEnhancement: + return "googEchoCancellationReturnLossEnhancement"; + case kStatsValueNameEncodeUsagePercent: + return "googEncodeUsagePercent"; + case kStatsValueNameExpandRate: + return "googExpandRate"; + case kStatsValueNameFingerprint: + return "googFingerprint"; + case kStatsValueNameFingerprintAlgorithm: + return "googFingerprintAlgorithm"; + case kStatsValueNameFirsReceived: + return "googFirsReceived"; + case kStatsValueNameFirsSent: + return "googFirsSent"; + case kStatsValueNameFirstFrameReceivedToDecodedMs: + return "googFirstFrameReceivedToDecodedMs"; + case kStatsValueNameFrameHeightInput: + return "googFrameHeightInput"; + case kStatsValueNameFrameHeightReceived: + return "googFrameHeightReceived"; + case kStatsValueNameFrameHeightSent: + return "googFrameHeightSent"; + case kStatsValueNameFrameRateReceived: + return "googFrameRateReceived"; + case kStatsValueNameFrameRateDecoded: + return "googFrameRateDecoded"; + case kStatsValueNameFrameRateOutput: + return "googFrameRateOutput"; + case kStatsValueNameDecodeMs: + return "googDecodeMs"; + case kStatsValueNameMaxDecodeMs: + return "googMaxDecodeMs"; + case kStatsValueNameCurrentDelayMs: + return "googCurrentDelayMs"; + case kStatsValueNameTargetDelayMs: + return "googTargetDelayMs"; + case kStatsValueNameJitterBufferMs: + return "googJitterBufferMs"; + case kStatsValueNameMinPlayoutDelayMs: + return "googMinPlayoutDelayMs"; + case kStatsValueNameRenderDelayMs: + return "googRenderDelayMs"; + case kStatsValueNameCaptureStartNtpTimeMs: + return "googCaptureStartNtpTimeMs"; + case kStatsValueNameFrameRateInput: + return "googFrameRateInput"; + case kStatsValueNameFrameRateSent: + return "googFrameRateSent"; + case kStatsValueNameFrameWidthInput: + return "googFrameWidthInput"; + case kStatsValueNameFrameWidthReceived: + return "googFrameWidthReceived"; + case kStatsValueNameFrameWidthSent: + return "googFrameWidthSent"; + case kStatsValueNameHasEnteredLowResolution: + return "googHasEnteredLowResolution"; + case kStatsValueNameHugeFramesSent: + return "hugeFramesSent"; + case kStatsValueNameInitiator: + return "googInitiator"; + case kStatsValueNameInterframeDelayMaxMs: + return "googInterframeDelayMax"; + case kStatsValueNameIssuerId: + return "googIssuerId"; + case kStatsValueNameJitterReceived: + return "googJitterReceived"; + case kStatsValueNameLocalAddress: + return "googLocalAddress"; + case kStatsValueNameLocalCandidateId: + return "localCandidateId"; + case kStatsValueNameLocalCandidateType: + return "googLocalCandidateType"; + case kStatsValueNameLocalCertificateId: + return "localCertificateId"; + case kStatsValueNameAdaptationChanges: + return "googAdaptationChanges"; + case kStatsValueNameNacksReceived: + return "googNacksReceived"; + case kStatsValueNameNacksSent: + return "googNacksSent"; + case kStatsValueNamePreemptiveExpandRate: + return "googPreemptiveExpandRate"; + case kStatsValueNamePlisReceived: + return "googPlisReceived"; + case kStatsValueNamePlisSent: + return "googPlisSent"; + case kStatsValueNamePreferredJitterBufferMs: + return "googPreferredJitterBufferMs"; + case kStatsValueNameReceiving: + return "googReadable"; + case kStatsValueNameRemoteAddress: + return "googRemoteAddress"; + case kStatsValueNameRemoteCandidateId: + return "remoteCandidateId"; + case kStatsValueNameRemoteCandidateType: + return "googRemoteCandidateType"; + case kStatsValueNameRemoteCertificateId: + return "remoteCertificateId"; + case kStatsValueNameResidualEchoLikelihood: + return "googResidualEchoLikelihood"; + case kStatsValueNameResidualEchoLikelihoodRecentMax: + return "googResidualEchoLikelihoodRecentMax"; + case kStatsValueNameAnaBitrateActionCounter: + return "googAnaBitrateActionCounter"; + case kStatsValueNameAnaChannelActionCounter: + return "googAnaChannelActionCounter"; + case kStatsValueNameAnaDtxActionCounter: + return "googAnaDtxActionCounter"; + case kStatsValueNameAnaFecActionCounter: + return "googAnaFecActionCounter"; + case kStatsValueNameAnaFrameLengthIncreaseCounter: + return "googAnaFrameLengthIncreaseCounter"; + case kStatsValueNameAnaFrameLengthDecreaseCounter: + return "googAnaFrameLengthDecreaseCounter"; + case kStatsValueNameAnaUplinkPacketLossFraction: + return "googAnaUplinkPacketLossFraction"; + case kStatsValueNameRetransmitBitrate: + return "googRetransmitBitrate"; + case kStatsValueNameRtt: + return "googRtt"; + case kStatsValueNameSecondaryDecodedRate: + return "googSecondaryDecodedRate"; + case kStatsValueNameSecondaryDiscardedRate: + return "googSecondaryDiscardedRate"; + case kStatsValueNameSendPacketsDiscarded: + return "packetsDiscardedOnSend"; + case kStatsValueNameSpeechExpandRate: + return "googSpeechExpandRate"; + case kStatsValueNameSrtpCipher: + return "srtpCipher"; + case kStatsValueNameTargetEncBitrate: + return "googTargetEncBitrate"; + case kStatsValueNameTotalAudioEnergy: + return "totalAudioEnergy"; + case kStatsValueNameTotalSamplesDuration: + return "totalSamplesDuration"; + case kStatsValueNameTransmitBitrate: + return "googTransmitBitrate"; + case kStatsValueNameTransportType: + return "googTransportType"; + case kStatsValueNameTrackId: + return "googTrackId"; + case kStatsValueNameTimingFrameInfo: + return "googTimingFrameInfo"; + case kStatsValueNameTypingNoiseState: + return "googTypingNoiseState"; + case kStatsValueNameWritable: + return "googWritable"; + case kStatsValueNameAudioDeviceUnderrunCounter: + return "googAudioDeviceUnderrunCounter"; + } + + return nullptr; +} + +std::string StatsReport::Value::ToString() const { + switch (type_) { + case kInt: + return rtc::ToString(value_.int_); + case kInt64: + return rtc::ToString(value_.int64_); + case kFloat: + return rtc::ToString(value_.float_); + case kStaticString: + return std::string(value_.static_string_); + case kString: + return *value_.string_; + case kBool: + return value_.bool_ ? "true" : "false"; + case kId: + return (*value_.id_)->ToString(); + } + RTC_NOTREACHED(); + return std::string(); +} + +StatsReport::StatsReport(const Id& id) : id_(id), timestamp_(0.0) { + RTC_DCHECK(id_.get()); +} + +StatsReport::~StatsReport() = default; + +// static +StatsReport::Id StatsReport::NewBandwidthEstimationId() { + return Id(new RefCountedObject()); +} + +// static +StatsReport::Id StatsReport::NewTypedId(StatsType type, const std::string& id) { + return Id(new RefCountedObject(type, id)); +} + +// static +StatsReport::Id StatsReport::NewTypedIntId(StatsType type, int id) { + return Id(new RefCountedObject(type, id)); +} + +// static +StatsReport::Id StatsReport::NewIdWithDirection( + StatsType type, + const std::string& id, + StatsReport::Direction direction) { + return Id(new RefCountedObject(type, id, direction)); +} + +// static +StatsReport::Id StatsReport::NewCandidateId(bool local, const std::string& id) { + return Id(new RefCountedObject(local, id)); +} + +// static +StatsReport::Id StatsReport::NewComponentId(const std::string& content_name, + int component) { + return Id(new RefCountedObject(content_name, component)); +} + +// static +StatsReport::Id StatsReport::NewCandidatePairId(const std::string& content_name, + int component, + int index) { + return Id( + new RefCountedObject(content_name, component, index)); +} + +const char* StatsReport::TypeToString() const { + return InternalTypeToString(id_->type()); +} + +void StatsReport::AddString(StatsReport::StatsValueName name, + const std::string& value) { + const Value* found = FindValue(name); + if (!found || !(*found == value)) + values_[name] = ValuePtr(new Value(name, value)); +} + +void StatsReport::AddString(StatsReport::StatsValueName name, + const char* value) { + const Value* found = FindValue(name); + if (!found || !(*found == value)) + values_[name] = ValuePtr(new Value(name, value)); +} + +void StatsReport::AddInt64(StatsReport::StatsValueName name, int64_t value) { + const Value* found = FindValue(name); + if (!found || !(*found == value)) + values_[name] = ValuePtr(new Value(name, value, Value::kInt64)); +} + +void StatsReport::AddInt(StatsReport::StatsValueName name, int value) { + const Value* found = FindValue(name); + if (!found || !(*found == static_cast(value))) + values_[name] = ValuePtr(new Value(name, value, Value::kInt)); +} + +void StatsReport::AddFloat(StatsReport::StatsValueName name, float value) { + const Value* found = FindValue(name); + if (!found || !(*found == value)) + values_[name] = ValuePtr(new Value(name, value)); +} + +void StatsReport::AddBoolean(StatsReport::StatsValueName name, bool value) { + const Value* found = FindValue(name); + if (!found || !(*found == value)) + values_[name] = ValuePtr(new Value(name, value)); +} + +void StatsReport::AddId(StatsReport::StatsValueName name, const Id& value) { + const Value* found = FindValue(name); + if (!found || !(*found == value)) + values_[name] = ValuePtr(new Value(name, value)); +} + +const StatsReport::Value* StatsReport::FindValue(StatsValueName name) const { + Values::const_iterator it = values_.find(name); + return it == values_.end() ? nullptr : it->second.get(); +} + +StatsCollection::StatsCollection() {} + +StatsCollection::~StatsCollection() { + RTC_DCHECK(thread_checker_.IsCurrent()); + for (auto* r : list_) + delete r; +} + +StatsCollection::const_iterator StatsCollection::begin() const { + RTC_DCHECK(thread_checker_.IsCurrent()); + return list_.begin(); +} + +StatsCollection::const_iterator StatsCollection::end() const { + RTC_DCHECK(thread_checker_.IsCurrent()); + return list_.end(); +} + +size_t StatsCollection::size() const { + RTC_DCHECK(thread_checker_.IsCurrent()); + return list_.size(); +} + +StatsReport* StatsCollection::InsertNew(const StatsReport::Id& id) { + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(Find(id) == nullptr); + StatsReport* report = new StatsReport(id); + list_.push_back(report); + return report; +} + +StatsReport* StatsCollection::FindOrAddNew(const StatsReport::Id& id) { + RTC_DCHECK(thread_checker_.IsCurrent()); + StatsReport* ret = Find(id); + return ret ? ret : InsertNew(id); +} + +StatsReport* StatsCollection::ReplaceOrAddNew(const StatsReport::Id& id) { + RTC_DCHECK(thread_checker_.IsCurrent()); + RTC_DCHECK(id.get()); + Container::iterator it = absl::c_find_if( + list_, + [&id](const StatsReport* r) -> bool { return r->id()->Equals(id); }); + if (it != end()) { + StatsReport* report = new StatsReport((*it)->id()); + delete *it; + *it = report; + return report; + } + return InsertNew(id); +} + +// Looks for a report with the given |id|. If one is not found, null +// will be returned. +StatsReport* StatsCollection::Find(const StatsReport::Id& id) { + RTC_DCHECK(thread_checker_.IsCurrent()); + Container::iterator it = absl::c_find_if( + list_, + [&id](const StatsReport* r) -> bool { return r->id()->Equals(id); }); + return it == list_.end() ? nullptr : *it; +} + +} // namespace webrtc diff --git a/api/stats_types.h b/api/stats_types.h new file mode 100644 index 0000000..c1922a8 --- /dev/null +++ b/api/stats_types.h @@ -0,0 +1,455 @@ +/* + * Copyright 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains structures used for retrieving statistics from an ongoing +// libjingle session. + +#ifndef API_STATS_TYPES_H_ +#define API_STATS_TYPES_H_ + +#include +#include +#include +#include +#include + +#include "api/scoped_refptr.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/string_encode.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread_checker.h" + +namespace webrtc { + +class RTC_EXPORT StatsReport { + public: + // Indicates whether a track is for sending or receiving. + // Used in reports for audio/video tracks. + enum Direction { + kSend = 0, + kReceive, + }; + + enum StatsType { + // StatsReport types. + // A StatsReport of |type| = "googSession" contains overall information + // about the thing libjingle calls a session (which may contain one + // or more RTP sessions. + kStatsReportTypeSession, + + // A StatsReport of |type| = "googTransport" contains information + // about a libjingle "transport". + kStatsReportTypeTransport, + + // A StatsReport of |type| = "googComponent" contains information + // about a libjingle "channel" (typically, RTP or RTCP for a transport). + // This is intended to be the same thing as an ICE "Component". + kStatsReportTypeComponent, + + // A StatsReport of |type| = "googCandidatePair" contains information + // about a libjingle "connection" - a single source/destination port pair. + // This is intended to be the same thing as an ICE "candidate pair". + kStatsReportTypeCandidatePair, + + // A StatsReport of |type| = "VideoBWE" is statistics for video Bandwidth + // Estimation, which is global per-session. The |id| field is "bweforvideo" + // (will probably change in the future). + kStatsReportTypeBwe, + + // A StatsReport of |type| = "ssrc" is statistics for a specific rtp stream. + // The |id| field is the SSRC in decimal form of the rtp stream. + kStatsReportTypeSsrc, + + // A StatsReport of |type| = "remoteSsrc" is statistics for a specific + // rtp stream, generated by the remote end of the connection. + kStatsReportTypeRemoteSsrc, + + // A StatsReport of |type| = "googTrack" is statistics for a specific media + // track. The |id| field is the track id. + kStatsReportTypeTrack, + + // A StatsReport of |type| = "localcandidate" or "remotecandidate" is + // attributes on a specific ICE Candidate. It links to its connection pair + // by candidate id. The string value is taken from + // http://w3c.github.io/webrtc-stats/#rtcstatstype-enum*. + kStatsReportTypeIceLocalCandidate, + kStatsReportTypeIceRemoteCandidate, + + // A StatsReport of |type| = "googCertificate" contains an SSL certificate + // transmitted by one of the endpoints of this connection. The |id| is + // controlled by the fingerprint, and is used to identify the certificate in + // the Channel stats (as "googLocalCertificateId" or + // "googRemoteCertificateId") and in any child certificates (as + // "googIssuerId"). + kStatsReportTypeCertificate, + + // A StatsReport of |type| = "datachannel" with statistics for a + // particular DataChannel. + kStatsReportTypeDataChannel, + }; + + enum StatsValueName { + kStatsValueNameActiveConnection, + kStatsValueNameAecDivergentFilterFraction, + kStatsValueNameAudioInputLevel, + kStatsValueNameAudioOutputLevel, + kStatsValueNameBytesReceived, + kStatsValueNameBytesSent, + kStatsValueNameCodecImplementationName, + kStatsValueNameConcealedSamples, + kStatsValueNameConcealmentEvents, + kStatsValueNameDataChannelId, + kStatsValueNameFramesDecoded, + kStatsValueNameFramesEncoded, + kStatsValueNameJitterBufferDelay, + kStatsValueNameMediaType, + kStatsValueNamePacketsLost, + kStatsValueNamePacketsReceived, + kStatsValueNamePacketsSent, + kStatsValueNameProtocol, + kStatsValueNameQpSum, + kStatsValueNameReceiving, + kStatsValueNameSelectedCandidatePairId, + kStatsValueNameSsrc, + kStatsValueNameState, + kStatsValueNameTotalAudioEnergy, + kStatsValueNameTotalSamplesDuration, + kStatsValueNameTotalSamplesReceived, + kStatsValueNameTransportId, + kStatsValueNameSentPingRequestsTotal, + kStatsValueNameSentPingRequestsBeforeFirstResponse, + kStatsValueNameSentPingResponses, + kStatsValueNameRecvPingRequests, + kStatsValueNameRecvPingResponses, + kStatsValueNameSentStunKeepaliveRequests, + kStatsValueNameRecvStunKeepaliveResponses, + kStatsValueNameStunKeepaliveRttTotal, + kStatsValueNameStunKeepaliveRttSquaredTotal, + + // Internal StatsValue names. + kStatsValueNameAccelerateRate, + kStatsValueNameActualEncBitrate, + kStatsValueNameAdaptationChanges, + kStatsValueNameAvailableReceiveBandwidth, + kStatsValueNameAvailableSendBandwidth, + kStatsValueNameAvgEncodeMs, + kStatsValueNameBandwidthLimitedResolution, + kStatsValueNameBucketDelay, + kStatsValueNameCaptureStartNtpTimeMs, + kStatsValueNameCandidateIPAddress, + kStatsValueNameCandidateNetworkType, + kStatsValueNameCandidatePortNumber, + kStatsValueNameCandidatePriority, + kStatsValueNameCandidateTransportType, + kStatsValueNameCandidateType, + kStatsValueNameChannelId, + kStatsValueNameCodecName, + kStatsValueNameComponent, + kStatsValueNameContentName, + kStatsValueNameContentType, + kStatsValueNameCpuLimitedResolution, + kStatsValueNameCurrentDelayMs, + kStatsValueNameDecodeMs, + kStatsValueNameDecodingCNG, + kStatsValueNameDecodingCTN, + kStatsValueNameDecodingCTSG, + kStatsValueNameDecodingMutedOutput, + kStatsValueNameDecodingNormal, + kStatsValueNameDecodingPLC, + kStatsValueNameDecodingCodecPLC, + kStatsValueNameDecodingPLCCNG, + kStatsValueNameDer, + kStatsValueNameDtlsCipher, + kStatsValueNameEchoDelayMedian, + kStatsValueNameEchoDelayStdDev, + kStatsValueNameEchoReturnLoss, + kStatsValueNameEchoReturnLossEnhancement, + kStatsValueNameEncodeUsagePercent, + kStatsValueNameExpandRate, + kStatsValueNameFingerprint, + kStatsValueNameFingerprintAlgorithm, + kStatsValueNameFirsReceived, + kStatsValueNameFirsSent, + kStatsValueNameFirstFrameReceivedToDecodedMs, + kStatsValueNameFrameHeightInput, + kStatsValueNameFrameHeightReceived, + kStatsValueNameFrameHeightSent, + kStatsValueNameFrameRateDecoded, + kStatsValueNameFrameRateInput, + kStatsValueNameFrameRateOutput, + kStatsValueNameFrameRateReceived, + kStatsValueNameFrameRateSent, + kStatsValueNameFrameWidthInput, + kStatsValueNameFrameWidthReceived, + kStatsValueNameFrameWidthSent, + kStatsValueNameHasEnteredLowResolution, + kStatsValueNameHugeFramesSent, + kStatsValueNameInitiator, + kStatsValueNameInterframeDelayMaxMs, // Max over last 10 seconds. + kStatsValueNameIssuerId, + kStatsValueNameJitterBufferMs, + kStatsValueNameJitterReceived, + kStatsValueNameLabel, + kStatsValueNameLocalAddress, + kStatsValueNameLocalCandidateId, + kStatsValueNameLocalCandidateType, + kStatsValueNameLocalCertificateId, + kStatsValueNameMaxDecodeMs, + kStatsValueNameMinPlayoutDelayMs, + kStatsValueNameNacksReceived, + kStatsValueNameNacksSent, + kStatsValueNamePlisReceived, + kStatsValueNamePlisSent, + kStatsValueNamePreemptiveExpandRate, + kStatsValueNamePreferredJitterBufferMs, + kStatsValueNameRemoteAddress, + kStatsValueNameRemoteCandidateId, + kStatsValueNameRemoteCandidateType, + kStatsValueNameRemoteCertificateId, + kStatsValueNameRenderDelayMs, + kStatsValueNameResidualEchoLikelihood, + kStatsValueNameResidualEchoLikelihoodRecentMax, + kStatsValueNameAnaBitrateActionCounter, + kStatsValueNameAnaChannelActionCounter, + kStatsValueNameAnaDtxActionCounter, + kStatsValueNameAnaFecActionCounter, + kStatsValueNameAnaFrameLengthIncreaseCounter, + kStatsValueNameAnaFrameLengthDecreaseCounter, + kStatsValueNameAnaUplinkPacketLossFraction, + kStatsValueNameRetransmitBitrate, + kStatsValueNameRtt, + kStatsValueNameSecondaryDecodedRate, + kStatsValueNameSecondaryDiscardedRate, + kStatsValueNameSendPacketsDiscarded, + kStatsValueNameSpeechExpandRate, + kStatsValueNameSrtpCipher, + kStatsValueNameTargetDelayMs, + kStatsValueNameTargetEncBitrate, + kStatsValueNameTimingFrameInfo, // Result of |TimingFrameInfo::ToString| + kStatsValueNameTrackId, + kStatsValueNameTransmitBitrate, + kStatsValueNameTransportType, + kStatsValueNameTypingNoiseState, + kStatsValueNameWritable, + kStatsValueNameAudioDeviceUnderrunCounter, + }; + + class RTC_EXPORT IdBase : public rtc::RefCountInterface { + public: + ~IdBase() override; + StatsType type() const; + + // Users of IdBase will be using the Id typedef, which is compatible with + // this Equals() function. It simply calls the protected (and overridden) + // Equals() method. + bool Equals(const rtc::scoped_refptr& other) const { + return Equals(*other.get()); + } + + virtual std::string ToString() const = 0; + + protected: + // Protected since users of the IdBase type will be using the Id typedef. + virtual bool Equals(const IdBase& other) const; + + explicit IdBase(StatsType type); // Only meant for derived classes. + const StatsType type_; + + static const char kSeparator = '_'; + }; + + typedef rtc::scoped_refptr Id; + + struct RTC_EXPORT Value { + enum Type { + kInt, // int. + kInt64, // int64_t. + kFloat, // float. + kString, // std::string + kStaticString, // const char*. + kBool, // bool. + kId, // Id. + }; + + Value(StatsValueName name, int64_t value, Type int_type); + Value(StatsValueName name, float f); + Value(StatsValueName name, const std::string& value); + Value(StatsValueName name, const char* value); + Value(StatsValueName name, bool b); + Value(StatsValueName name, const Id& value); + + ~Value(); + + // Support ref counting. Note that for performance reasons, we + // don't use thread safe operations. Therefore, all operations + // affecting the ref count (in practice, creation and copying of + // the Values mapping) must occur on webrtc's signalling thread. + int AddRef() const { + RTC_DCHECK_RUN_ON(&thread_checker_); + return ++ref_count_; + } + int Release() const { + RTC_DCHECK_RUN_ON(&thread_checker_); + int count = --ref_count_; + if (!count) + delete this; + return count; + } + + // TODO(tommi): This compares name as well as value... + // I think we should only need to compare the value part and + // move the name part into a hash map. + bool Equals(const Value& other) const; + + // Comparison operators. Return true iff the current instance is of the + // correct type and holds the same value. No conversion is performed so + // a string value of "123" is not equal to an int value of 123 and an int + // value of 123 is not equal to a float value of 123.0f. + // One exception to this is that types kInt and kInt64 can be compared and + // kString and kStaticString too. + bool operator==(const std::string& value) const; + bool operator==(const char* value) const; + bool operator==(int64_t value) const; + bool operator==(bool value) const; + bool operator==(float value) const; + bool operator==(const Id& value) const; + + // Getters that allow getting the native value directly. + // The caller must know the type beforehand or else hit a check. + int int_val() const; + int64_t int64_val() const; + float float_val() const; + const char* static_string_val() const; + const std::string& string_val() const; + bool bool_val() const; + const Id& id_val() const; + + // Returns the string representation of |name|. + const char* display_name() const; + + // Converts the native value to a string representation of the value. + std::string ToString() const; + + Type type() const { return type_; } + + // TODO(tommi): Move |name| and |display_name| out of the Value struct. + const StatsValueName name; + + private: + rtc::ThreadChecker thread_checker_; + mutable int ref_count_ RTC_GUARDED_BY(thread_checker_) = 0; + + const Type type_; + // TODO(tommi): Use C++ 11 union and make value_ const. + union InternalType { + int int_; + int64_t int64_; + float float_; + bool bool_; + std::string* string_; + const char* static_string_; + Id* id_; + } value_; + + RTC_DISALLOW_COPY_AND_ASSIGN(Value); + }; + + typedef rtc::scoped_refptr ValuePtr; + typedef std::map Values; + + // Ownership of |id| is passed to |this|. + explicit StatsReport(const Id& id); + ~StatsReport(); + + // Factory functions for various types of stats IDs. + static Id NewBandwidthEstimationId(); + static Id NewTypedId(StatsType type, const std::string& id); + static Id NewTypedIntId(StatsType type, int id); + static Id NewIdWithDirection(StatsType type, + const std::string& id, + Direction direction); + static Id NewCandidateId(bool local, const std::string& id); + static Id NewComponentId(const std::string& content_name, int component); + static Id NewCandidatePairId(const std::string& content_name, + int component, + int index); + + const Id& id() const { return id_; } + StatsType type() const { return id_->type(); } + double timestamp() const { return timestamp_; } + void set_timestamp(double t) { timestamp_ = t; } + bool empty() const { return values_.empty(); } + const Values& values() const { return values_; } + + const char* TypeToString() const; + + void AddString(StatsValueName name, const std::string& value); + void AddString(StatsValueName name, const char* value); + void AddInt64(StatsValueName name, int64_t value); + void AddInt(StatsValueName name, int value); + void AddFloat(StatsValueName name, float value); + void AddBoolean(StatsValueName name, bool value); + void AddId(StatsValueName name, const Id& value); + + const Value* FindValue(StatsValueName name) const; + + private: + // The unique identifier for this object. + // This is used as a key for this report in ordered containers, + // so it must never be changed. + const Id id_; + double timestamp_; // Time since 1970-01-01T00:00:00Z in milliseconds. + Values values_; + + RTC_DISALLOW_COPY_AND_ASSIGN(StatsReport); +}; + +// Typedef for an array of const StatsReport pointers. +// Ownership of the pointers held by this implementation is assumed to lie +// elsewhere and lifetime guarantees are made by the implementation that uses +// this type. In the StatsCollector, object ownership lies with the +// StatsCollection class. +typedef std::vector StatsReports; + +// A map from the report id to the report. +// This class wraps an STL container and provides a limited set of +// functionality in order to keep things simple. +class StatsCollection { + public: + StatsCollection(); + ~StatsCollection(); + + typedef std::list Container; + typedef Container::iterator iterator; + typedef Container::const_iterator const_iterator; + + const_iterator begin() const; + const_iterator end() const; + size_t size() const; + + // Creates a new report object with |id| that does not already + // exist in the list of reports. + StatsReport* InsertNew(const StatsReport::Id& id); + StatsReport* FindOrAddNew(const StatsReport::Id& id); + StatsReport* ReplaceOrAddNew(const StatsReport::Id& id); + + // Looks for a report with the given |id|. If one is not found, null + // will be returned. + StatsReport* Find(const StatsReport::Id& id); + + private: + Container list_; + rtc::ThreadChecker thread_checker_; +}; + +} // namespace webrtc + +#endif // API_STATS_TYPES_H_ diff --git a/api/task_queue/BUILD.gn b/api/task_queue/BUILD.gn new file mode 100644 index 0000000..1072057 --- /dev/null +++ b/api/task_queue/BUILD.gn @@ -0,0 +1,110 @@ +# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_library("task_queue") { + visibility = [ "*" ] + public = [ + "queued_task.h", + "task_queue_base.h", + "task_queue_factory.h", + ] + sources = [ "task_queue_base.cc" ] + + deps = [ + "../../rtc_base:checks", + "../../rtc_base:macromagic", + "../../rtc_base/system:rtc_export", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/base:config", + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/strings", + ] +} + +rtc_library("task_queue_test") { + visibility = [ "*" ] + testonly = true + sources = [ + "task_queue_test.cc", + "task_queue_test.h", + ] + + check_includes = false # no-presubmit-check TODO(bugs.webrtc.org/9419) + if (build_with_chromium) { + visibility = [] + visibility = webrtc_default_visibility + visibility += [ + # This is the only Chromium target that can depend on this. The reason + # behind this is the fact that this is a 'testonly' target and as such + # it cannot be part of the WebRTC component. + "//third_party/blink/renderer/platform:blink_platform_unittests_sources", + ] + + # Don't depend on WebRTC code outside of webrtc_overrides:webrtc_component + # because this will break the WebRTC component build in Chromium. + deps = [ + "../../../webrtc_overrides:webrtc_component", + "../../test:test_support", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + ] + } else { + deps = [ + ":task_queue", + "../../rtc_base:refcount", + "../../rtc_base:rtc_event", + "../../rtc_base:timeutils", + "../../rtc_base/task_utils:to_queued_task", + "../../test:test_support", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + ] + } +} + +rtc_library("default_task_queue_factory") { + visibility = [ "*" ] + if (!is_ios && !is_android) { + poisonous = [ "default_task_queue" ] + } + sources = [ "default_task_queue_factory.h" ] + deps = [ ":task_queue" ] + + if (rtc_enable_libevent) { + sources += [ "default_task_queue_factory_libevent.cc" ] + deps += [ "../../rtc_base:rtc_task_queue_libevent" ] + } else if (is_mac || is_ios) { + sources += [ "default_task_queue_factory_gcd.cc" ] + deps += [ "../../rtc_base:rtc_task_queue_gcd" ] + } else if (is_win && current_os != "winuwp") { + sources += [ "default_task_queue_factory_win.cc" ] + deps += [ "../../rtc_base:rtc_task_queue_win" ] + } else { + sources += [ "default_task_queue_factory_stdlib.cc" ] + deps += [ "../../rtc_base:rtc_task_queue_stdlib" ] + } +} + +if (rtc_include_tests) { + rtc_library("task_queue_default_factory_unittests") { + testonly = true + sources = [ "default_task_queue_factory_unittest.cc" ] + deps = [ + ":default_task_queue_factory", + ":task_queue_test", + "../../test:test_support", + ] + } +} diff --git a/api/task_queue/DEPS b/api/task_queue/DEPS new file mode 100644 index 0000000..fab6056 --- /dev/null +++ b/api/task_queue/DEPS @@ -0,0 +1,10 @@ +specific_include_rules = { + "task_queue_base\.h": [ + # Make TaskQueueBase RTC_LOCKABALE to allow annotate variables are only + # accessed on specific task queue. + "+rtc_base/thread_annotations.h", + ], + "task_queue_test\.h": [ + "+test/gtest.h", + ], +} diff --git a/api/task_queue/default_task_queue_factory.h b/api/task_queue/default_task_queue_factory.h new file mode 100644 index 0000000..ccdd1eb --- /dev/null +++ b/api/task_queue/default_task_queue_factory.h @@ -0,0 +1,23 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TASK_QUEUE_DEFAULT_TASK_QUEUE_FACTORY_H_ +#define API_TASK_QUEUE_DEFAULT_TASK_QUEUE_FACTORY_H_ + +#include + +#include "api/task_queue/task_queue_factory.h" + +namespace webrtc { + +std::unique_ptr CreateDefaultTaskQueueFactory(); + +} // namespace webrtc + +#endif // API_TASK_QUEUE_DEFAULT_TASK_QUEUE_FACTORY_H_ diff --git a/api/task_queue/default_task_queue_factory_gcd.cc b/api/task_queue/default_task_queue_factory_gcd.cc new file mode 100644 index 0000000..7e17b48 --- /dev/null +++ b/api/task_queue/default_task_queue_factory_gcd.cc @@ -0,0 +1,21 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include + +#include "api/task_queue/task_queue_factory.h" +#include "rtc_base/task_queue_gcd.h" + +namespace webrtc { + +std::unique_ptr CreateDefaultTaskQueueFactory() { + return CreateTaskQueueGcdFactory(); +} + +} // namespace webrtc diff --git a/api/task_queue/default_task_queue_factory_libevent.cc b/api/task_queue/default_task_queue_factory_libevent.cc new file mode 100644 index 0000000..f2fb418 --- /dev/null +++ b/api/task_queue/default_task_queue_factory_libevent.cc @@ -0,0 +1,21 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include + +#include "api/task_queue/task_queue_factory.h" +#include "rtc_base/task_queue_libevent.h" + +namespace webrtc { + +std::unique_ptr CreateDefaultTaskQueueFactory() { + return CreateTaskQueueLibeventFactory(); +} + +} // namespace webrtc diff --git a/api/task_queue/default_task_queue_factory_stdlib.cc b/api/task_queue/default_task_queue_factory_stdlib.cc new file mode 100644 index 0000000..ca7d720 --- /dev/null +++ b/api/task_queue/default_task_queue_factory_stdlib.cc @@ -0,0 +1,21 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include + +#include "api/task_queue/task_queue_factory.h" +#include "rtc_base/task_queue_stdlib.h" + +namespace webrtc { + +std::unique_ptr CreateDefaultTaskQueueFactory() { + return CreateTaskQueueStdlibFactory(); +} + +} // namespace webrtc diff --git a/api/task_queue/default_task_queue_factory_unittest.cc b/api/task_queue/default_task_queue_factory_unittest.cc new file mode 100644 index 0000000..92c17d8 --- /dev/null +++ b/api/task_queue/default_task_queue_factory_unittest.cc @@ -0,0 +1,24 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/task_queue/default_task_queue_factory.h" + +#include "api/task_queue/task_queue_test.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +INSTANTIATE_TEST_SUITE_P(Default, + TaskQueueTest, + ::testing::Values(CreateDefaultTaskQueueFactory)); + +} // namespace +} // namespace webrtc diff --git a/api/task_queue/default_task_queue_factory_win.cc b/api/task_queue/default_task_queue_factory_win.cc new file mode 100644 index 0000000..493ea66 --- /dev/null +++ b/api/task_queue/default_task_queue_factory_win.cc @@ -0,0 +1,21 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include + +#include "api/task_queue/task_queue_factory.h" +#include "rtc_base/task_queue_win.h" + +namespace webrtc { + +std::unique_ptr CreateDefaultTaskQueueFactory() { + return CreateTaskQueueWinFactory(); +} + +} // namespace webrtc diff --git a/api/task_queue/queued_task.h b/api/task_queue/queued_task.h new file mode 100644 index 0000000..5748628 --- /dev/null +++ b/api/task_queue/queued_task.h @@ -0,0 +1,32 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TASK_QUEUE_QUEUED_TASK_H_ +#define API_TASK_QUEUE_QUEUED_TASK_H_ + +namespace webrtc { + +// Base interface for asynchronously executed tasks. +// The interface basically consists of a single function, Run(), that executes +// on the target queue. For more details see the Run() method and TaskQueue. +class QueuedTask { + public: + virtual ~QueuedTask() = default; + + // Main routine that will run when the task is executed on the desired queue. + // The task should return |true| to indicate that it should be deleted or + // |false| to indicate that the queue should consider ownership of the task + // having been transferred. Returning |false| can be useful if a task has + // re-posted itself to a different queue or is otherwise being re-used. + virtual bool Run() = 0; +}; + +} // namespace webrtc + +#endif // API_TASK_QUEUE_QUEUED_TASK_H_ diff --git a/api/task_queue/task_queue_base.cc b/api/task_queue/task_queue_base.cc new file mode 100644 index 0000000..7d3539a --- /dev/null +++ b/api/task_queue/task_queue_base.cc @@ -0,0 +1,79 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/task_queue/task_queue_base.h" + +#include "absl/base/attributes.h" +#include "absl/base/config.h" +#include "rtc_base/checks.h" + +#if defined(ABSL_HAVE_THREAD_LOCAL) + +namespace webrtc { +namespace { + +ABSL_CONST_INIT thread_local TaskQueueBase* current = nullptr; + +} // namespace + +TaskQueueBase* TaskQueueBase::Current() { + return current; +} + +TaskQueueBase::CurrentTaskQueueSetter::CurrentTaskQueueSetter( + TaskQueueBase* task_queue) + : previous_(current) { + current = task_queue; +} + +TaskQueueBase::CurrentTaskQueueSetter::~CurrentTaskQueueSetter() { + current = previous_; +} +} // namespace webrtc + +#elif defined(WEBRTC_POSIX) + +#include + +namespace webrtc { +namespace { + +ABSL_CONST_INIT pthread_key_t g_queue_ptr_tls = 0; + +void InitializeTls() { + RTC_CHECK(pthread_key_create(&g_queue_ptr_tls, nullptr) == 0); +} + +pthread_key_t GetQueuePtrTls() { + static pthread_once_t init_once = PTHREAD_ONCE_INIT; + RTC_CHECK(pthread_once(&init_once, &InitializeTls) == 0); + return g_queue_ptr_tls; +} + +} // namespace + +TaskQueueBase* TaskQueueBase::Current() { + return static_cast(pthread_getspecific(GetQueuePtrTls())); +} + +TaskQueueBase::CurrentTaskQueueSetter::CurrentTaskQueueSetter( + TaskQueueBase* task_queue) + : previous_(TaskQueueBase::Current()) { + pthread_setspecific(GetQueuePtrTls(), task_queue); +} + +TaskQueueBase::CurrentTaskQueueSetter::~CurrentTaskQueueSetter() { + pthread_setspecific(GetQueuePtrTls(), previous_); +} + +} // namespace webrtc + +#else +#error Unsupported platform +#endif diff --git a/api/task_queue/task_queue_base.h b/api/task_queue/task_queue_base.h new file mode 100644 index 0000000..90b1efd --- /dev/null +++ b/api/task_queue/task_queue_base.h @@ -0,0 +1,83 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TASK_QUEUE_TASK_QUEUE_BASE_H_ +#define API_TASK_QUEUE_TASK_QUEUE_BASE_H_ + +#include + +#include "api/task_queue/queued_task.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +// Asynchronously executes tasks in a way that guarantees that they're executed +// in FIFO order and that tasks never overlap. Tasks may always execute on the +// same worker thread and they may not. To DCHECK that tasks are executing on a +// known task queue, use IsCurrent(). +class RTC_LOCKABLE RTC_EXPORT TaskQueueBase { + public: + // Starts destruction of the task queue. + // On return ensures no task are running and no new tasks are able to start + // on the task queue. + // Responsible for deallocation. Deallocation may happen syncrhoniously during + // Delete or asynchronously after Delete returns. + // Code not running on the TaskQueue should not make any assumption when + // TaskQueue is deallocated and thus should not call any methods after Delete. + // Code running on the TaskQueue should not call Delete, but can assume + // TaskQueue still exists and may call other methods, e.g. PostTask. + virtual void Delete() = 0; + + // Schedules a task to execute. Tasks are executed in FIFO order. + // If |task->Run()| returns true, task is deleted on the task queue + // before next QueuedTask starts executing. + // When a TaskQueue is deleted, pending tasks will not be executed but they + // will be deleted. The deletion of tasks may happen synchronously on the + // TaskQueue or it may happen asynchronously after TaskQueue is deleted. + // This may vary from one implementation to the next so assumptions about + // lifetimes of pending tasks should not be made. + virtual void PostTask(std::unique_ptr task) = 0; + + // Schedules a task to execute a specified number of milliseconds from when + // the call is made. The precision should be considered as "best effort" + // and in some cases, such as on Windows when all high precision timers have + // been used up, can be off by as much as 15 millseconds. + virtual void PostDelayedTask(std::unique_ptr task, + uint32_t milliseconds) = 0; + + // Returns the task queue that is running the current thread. + // Returns nullptr if this thread is not associated with any task queue. + static TaskQueueBase* Current(); + bool IsCurrent() const { return Current() == this; } + + protected: + class CurrentTaskQueueSetter { + public: + explicit CurrentTaskQueueSetter(TaskQueueBase* task_queue); + CurrentTaskQueueSetter(const CurrentTaskQueueSetter&) = delete; + CurrentTaskQueueSetter& operator=(const CurrentTaskQueueSetter&) = delete; + ~CurrentTaskQueueSetter(); + + private: + TaskQueueBase* const previous_; + }; + + // Users of the TaskQueue should call Delete instead of directly deleting + // this object. + virtual ~TaskQueueBase() = default; +}; + +struct TaskQueueDeleter { + void operator()(TaskQueueBase* task_queue) const { task_queue->Delete(); } +}; + +} // namespace webrtc + +#endif // API_TASK_QUEUE_TASK_QUEUE_BASE_H_ diff --git a/api/task_queue/task_queue_factory.h b/api/task_queue/task_queue_factory.h new file mode 100644 index 0000000..b68ab33 --- /dev/null +++ b/api/task_queue/task_queue_factory.h @@ -0,0 +1,35 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TASK_QUEUE_TASK_QUEUE_FACTORY_H_ +#define API_TASK_QUEUE_TASK_QUEUE_FACTORY_H_ + +#include + +#include "absl/strings/string_view.h" +#include "api/task_queue/task_queue_base.h" + +namespace webrtc { + +// The implementation of this interface must be thread-safe. +class TaskQueueFactory { + public: + // TaskQueue priority levels. On some platforms these will map to thread + // priorities, on others such as Mac and iOS, GCD queue priorities. + enum class Priority { NORMAL = 0, HIGH, LOW }; + + virtual ~TaskQueueFactory() = default; + virtual std::unique_ptr CreateTaskQueue( + absl::string_view name, + Priority priority) const = 0; +}; + +} // namespace webrtc + +#endif // API_TASK_QUEUE_TASK_QUEUE_FACTORY_H_ diff --git a/api/task_queue/task_queue_test.cc b/api/task_queue/task_queue_test.cc new file mode 100644 index 0000000..0d411d2 --- /dev/null +++ b/api/task_queue/task_queue_test.cc @@ -0,0 +1,280 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/task_queue/task_queue_test.h" + +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "rtc_base/event.h" +#include "rtc_base/ref_counter.h" +#include "rtc_base/task_utils/to_queued_task.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { +namespace { + +std::unique_ptr CreateTaskQueue( + const std::unique_ptr& factory, + absl::string_view task_queue_name, + TaskQueueFactory::Priority priority = TaskQueueFactory::Priority::NORMAL) { + return factory->CreateTaskQueue(task_queue_name, priority); +} + +TEST_P(TaskQueueTest, Construct) { + std::unique_ptr factory = GetParam()(); + auto queue = CreateTaskQueue(factory, "Construct"); + EXPECT_FALSE(queue->IsCurrent()); +} + +TEST_P(TaskQueueTest, PostAndCheckCurrent) { + std::unique_ptr factory = GetParam()(); + rtc::Event event; + auto queue = CreateTaskQueue(factory, "PostAndCheckCurrent"); + + // We're not running a task, so |queue| shouldn't be current. + // Note that because rtc::Thread also supports the TQ interface and + // TestMainImpl::Init wraps the main test thread (bugs.webrtc.org/9714), that + // means that TaskQueueBase::Current() will still return a valid value. + EXPECT_FALSE(queue->IsCurrent()); + + queue->PostTask(ToQueuedTask([&event, &queue] { + EXPECT_TRUE(queue->IsCurrent()); + event.Set(); + })); + EXPECT_TRUE(event.Wait(1000)); +} + +TEST_P(TaskQueueTest, PostCustomTask) { + std::unique_ptr factory = GetParam()(); + rtc::Event ran; + auto queue = CreateTaskQueue(factory, "PostCustomImplementation"); + + class CustomTask : public QueuedTask { + public: + explicit CustomTask(rtc::Event* ran) : ran_(ran) {} + + private: + bool Run() override { + ran_->Set(); + return false; // Do not allow the task to be deleted by the queue. + } + + rtc::Event* const ran_; + } my_task(&ran); + + queue->PostTask(absl::WrapUnique(&my_task)); + EXPECT_TRUE(ran.Wait(1000)); +} + +TEST_P(TaskQueueTest, PostDelayedZero) { + std::unique_ptr factory = GetParam()(); + rtc::Event event; + auto queue = CreateTaskQueue(factory, "PostDelayedZero"); + + queue->PostDelayedTask(ToQueuedTask([&event] { event.Set(); }), 0); + EXPECT_TRUE(event.Wait(1000)); +} + +TEST_P(TaskQueueTest, PostFromQueue) { + std::unique_ptr factory = GetParam()(); + rtc::Event event; + auto queue = CreateTaskQueue(factory, "PostFromQueue"); + + queue->PostTask(ToQueuedTask([&event, &queue] { + queue->PostTask(ToQueuedTask([&event] { event.Set(); })); + })); + EXPECT_TRUE(event.Wait(1000)); +} + +TEST_P(TaskQueueTest, PostDelayed) { + std::unique_ptr factory = GetParam()(); + rtc::Event event; + auto queue = + CreateTaskQueue(factory, "PostDelayed", TaskQueueFactory::Priority::HIGH); + + int64_t start = rtc::TimeMillis(); + queue->PostDelayedTask(ToQueuedTask([&event, &queue] { + EXPECT_TRUE(queue->IsCurrent()); + event.Set(); + }), + 100); + EXPECT_TRUE(event.Wait(1000)); + int64_t end = rtc::TimeMillis(); + // These tests are a little relaxed due to how "powerful" our test bots can + // be. Most recently we've seen windows bots fire the callback after 94-99ms, + // which is why we have a little bit of leeway backwards as well. + EXPECT_GE(end - start, 90u); + EXPECT_NEAR(end - start, 190u, 100u); // Accept 90-290. +} + +TEST_P(TaskQueueTest, PostMultipleDelayed) { + std::unique_ptr factory = GetParam()(); + auto queue = CreateTaskQueue(factory, "PostMultipleDelayed"); + + std::vector events(100); + for (int i = 0; i < 100; ++i) { + rtc::Event* event = &events[i]; + queue->PostDelayedTask(ToQueuedTask([event, &queue] { + EXPECT_TRUE(queue->IsCurrent()); + event->Set(); + }), + i); + } + + for (rtc::Event& e : events) + EXPECT_TRUE(e.Wait(1000)); +} + +TEST_P(TaskQueueTest, PostDelayedAfterDestruct) { + std::unique_ptr factory = GetParam()(); + rtc::Event run; + rtc::Event deleted; + auto queue = CreateTaskQueue(factory, "PostDelayedAfterDestruct"); + queue->PostDelayedTask( + ToQueuedTask([&run] { run.Set(); }, [&deleted] { deleted.Set(); }), 100); + // Destroy the queue. + queue = nullptr; + // Task might outlive the TaskQueue, but still should be deleted. + EXPECT_TRUE(deleted.Wait(1000)); + EXPECT_FALSE(run.Wait(0)); // and should not run. +} + +TEST_P(TaskQueueTest, PostAndReuse) { + std::unique_ptr factory = GetParam()(); + rtc::Event event; + auto post_queue = CreateTaskQueue(factory, "PostQueue"); + auto reply_queue = CreateTaskQueue(factory, "ReplyQueue"); + + int call_count = 0; + + class ReusedTask : public QueuedTask { + public: + ReusedTask(int* counter, TaskQueueBase* reply_queue, rtc::Event* event) + : counter_(*counter), reply_queue_(reply_queue), event_(*event) { + EXPECT_EQ(counter_, 0); + } + + private: + bool Run() override { + if (++counter_ == 1) { + reply_queue_->PostTask(absl::WrapUnique(this)); + // At this point, the object is owned by reply_queue_ and it's + // theoratically possible that the object has been deleted (e.g. if + // posting wasn't possible). So, don't touch any member variables here. + + // Indicate to the current queue that ownership has been transferred. + return false; + } else { + EXPECT_EQ(counter_, 2); + EXPECT_TRUE(reply_queue_->IsCurrent()); + event_.Set(); + return true; // Indicate that the object should be deleted. + } + } + + int& counter_; + TaskQueueBase* const reply_queue_; + rtc::Event& event_; + }; + + auto task = + std::make_unique(&call_count, reply_queue.get(), &event); + post_queue->PostTask(std::move(task)); + EXPECT_TRUE(event.Wait(1000)); +} + +TEST_P(TaskQueueTest, PostALot) { + // Waits until DecrementCount called |count| times. Thread safe. + class BlockingCounter { + public: + explicit BlockingCounter(int initial_count) : count_(initial_count) {} + + void DecrementCount() { + if (count_.DecRef() == rtc::RefCountReleaseStatus::kDroppedLastRef) { + event_.Set(); + } + } + bool Wait(int give_up_after_ms) { return event_.Wait(give_up_after_ms); } + + private: + webrtc_impl::RefCounter count_; + rtc::Event event_; + }; + + std::unique_ptr factory = GetParam()(); + static constexpr int kTaskCount = 0xffff; + rtc::Event posting_done; + BlockingCounter all_destroyed(kTaskCount); + + int tasks_executed = 0; + auto task_queue = CreateTaskQueue(factory, "PostALot"); + + task_queue->PostTask(ToQueuedTask([&] { + // Post tasks from the queue to guarantee that the 1st task won't be + // executed before the last one is posted. + for (int i = 0; i < kTaskCount; ++i) { + task_queue->PostTask(ToQueuedTask( + [&] { ++tasks_executed; }, [&] { all_destroyed.DecrementCount(); })); + } + + posting_done.Set(); + })); + + // Before destroying the task queue wait until all child tasks are posted. + posting_done.Wait(rtc::Event::kForever); + // Destroy the task queue. + task_queue = nullptr; + + // Expect all tasks are destroyed eventually. In some task queue + // implementations that might happen on a different thread after task queue is + // destroyed. + EXPECT_TRUE(all_destroyed.Wait(60000)); + EXPECT_LE(tasks_executed, kTaskCount); +} + +// Test posting two tasks that have shared state not protected by a +// lock. The TaskQueue should guarantee memory read-write order and +// FIFO task execution order, so the second task should always see the +// changes that were made by the first task. +// +// If the TaskQueue doesn't properly synchronize the execution of +// tasks, there will be a data race, which is undefined behavior. The +// EXPECT calls may randomly catch this, but to make the most of this +// unit test, run it under TSan or some other tool that is able to +// directly detect data races. +TEST_P(TaskQueueTest, PostTwoWithSharedUnprotectedState) { + std::unique_ptr factory = GetParam()(); + struct SharedState { + // First task will set this value to 1 and second will assert it. + int state = 0; + } state; + + auto queue = CreateTaskQueue(factory, "PostTwoWithSharedUnprotectedState"); + rtc::Event done; + queue->PostTask(ToQueuedTask([&state, &queue, &done] { + // Post tasks from queue to guarantee, that 1st task won't be + // executed before the second one will be posted. + queue->PostTask(ToQueuedTask([&state] { state.state = 1; })); + queue->PostTask(ToQueuedTask([&state, &done] { + EXPECT_EQ(state.state, 1); + done.Set(); + })); + // Check, that state changing tasks didn't start yet. + EXPECT_EQ(state.state, 0); + })); + EXPECT_TRUE(done.Wait(1000)); +} + +// TaskQueueTest is a set of tests for any implementation of the TaskQueueBase. +// Tests are instantiated next to the concrete implementation(s). +// https://github.com/google/googletest/blob/master/googletest/docs/advanced.md#creating-value-parameterized-abstract-tests +GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(TaskQueueTest); + +} // namespace +} // namespace webrtc diff --git a/api/task_queue/task_queue_test.h b/api/task_queue/task_queue_test.h new file mode 100644 index 0000000..e2e4730 --- /dev/null +++ b/api/task_queue/task_queue_test.h @@ -0,0 +1,40 @@ +/* + * Copyright 2019 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TASK_QUEUE_TASK_QUEUE_TEST_H_ +#define API_TASK_QUEUE_TASK_QUEUE_TEST_H_ + +#include +#include + +#include "api/task_queue/task_queue_factory.h" +#include "test/gtest.h" + +namespace webrtc { + +// Suite of tests to verify TaskQueue implementation with. +// Example usage: +// +// namespace { +// +// using ::testing::Values; +// using ::webrtc::TaskQueueTest; +// +// std::unique_ptr CreateMyFactory(); +// +// INSTANTIATE_TEST_SUITE_P(My, TaskQueueTest, Values(CreateMyFactory)); +// +// } // namespace +class TaskQueueTest : public ::testing::TestWithParam< + std::function()>> { +}; + +} // namespace webrtc + +#endif // API_TASK_QUEUE_TASK_QUEUE_TEST_H_ diff --git a/api/test/DEPS b/api/test/DEPS new file mode 100644 index 0000000..33bf4f4 --- /dev/null +++ b/api/test/DEPS @@ -0,0 +1,46 @@ +specific_include_rules = { + ".*": [ + "+modules/video_coding", + ], + ".*": [ + "+video" + ], + "dummy_peer_connection\.h": [ + "+rtc_base/ref_counted_object.h", + ], + "fake_constraints\.h": [ + "+rtc_base/string_encode.h", + ], + "loopback_media_transport\.h": [ + "+rtc_base/async_invoker.h", + "+rtc_base/thread.h", + "+rtc_base/thread_checker.h", + ], + "neteq_factory_with_codecs\.h": [ + "+system_wrappers/include/clock.h", + ], + "network_emulation_manager\.h": [ + "+rtc_base/thread.h", + "+rtc_base/network.h", + "+rtc_base/network_constants.h", + ], + "peerconnection_quality_test_fixture\.h": [ + "+logging/rtc_event_log/rtc_event_log_factory_interface.h", + "+rtc_base/network.h", + "+rtc_base/rtc_certificate_generator.h", + "+rtc_base/ssl_certificate.h", + "+rtc_base/thread.h", + "+media/base/media_constants.h", + ], + "time_controller\.h": [ + "+modules/utility/include/process_thread.h", + "+rtc_base/synchronization/yield_policy.h", + "+system_wrappers/include/clock.h", + ], + "create_frame_generator\.h": [ + "+system_wrappers/include/clock.h", + ], + "videocodec_test_fixture\.h": [ + "+media/base/h264_profile_level_id.h" + ], +} diff --git a/api/test/OWNERS b/api/test/OWNERS new file mode 100644 index 0000000..770c299 --- /dev/null +++ b/api/test/OWNERS @@ -0,0 +1,2 @@ +srte@webrtc.org + diff --git a/api/test/audio_quality_analyzer_interface.h b/api/test/audio_quality_analyzer_interface.h new file mode 100644 index 0000000..c104479 --- /dev/null +++ b/api/test/audio_quality_analyzer_interface.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_AUDIO_QUALITY_ANALYZER_INTERFACE_H_ +#define API_TEST_AUDIO_QUALITY_ANALYZER_INTERFACE_H_ + +#include + +#include "api/test/stats_observer_interface.h" +#include "api/test/track_id_stream_info_map.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// API is in development. Can be changed/removed without notice. +class AudioQualityAnalyzerInterface : public StatsObserverInterface { + public: + ~AudioQualityAnalyzerInterface() override = default; + + // Will be called by the framework before the test. + // |test_case_name| is name of test case, that should be used to report all + // audio metrics. + // |analyzer_helper| is a pointer to a class that will allow track_id to + // stream_id matching. The caller is responsible for ensuring the + // AnalyzerHelper outlives the instance of the AudioQualityAnalyzerInterface. + virtual void Start(std::string test_case_name, + TrackIdStreamInfoMap* analyzer_helper) = 0; + + // Will be called by the framework at the end of the test. The analyzer + // has to finalize all its stats and it should report them. + virtual void Stop() = 0; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_AUDIO_QUALITY_ANALYZER_INTERFACE_H_ diff --git a/api/test/audioproc_float.cc b/api/test/audioproc_float.cc new file mode 100644 index 0000000..c8d7ff7 --- /dev/null +++ b/api/test/audioproc_float.cc @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/audioproc_float.h" + +#include + +#include "modules/audio_processing/test/audioproc_float_impl.h" + +namespace webrtc { +namespace test { + +int AudioprocFloat(rtc::scoped_refptr audio_processing, + int argc, + char* argv[]) { + return AudioprocFloatImpl(std::move(audio_processing), argc, argv); +} + +int AudioprocFloat(std::unique_ptr ap_builder, + int argc, + char* argv[]) { + return AudioprocFloatImpl(std::move(ap_builder), argc, argv, + /*input_aecdump=*/"", + /*processed_capture_samples=*/nullptr); +} + +int AudioprocFloat(std::unique_ptr ap_builder, + int argc, + char* argv[], + absl::string_view input_aecdump, + std::vector* processed_capture_samples) { + return AudioprocFloatImpl(std::move(ap_builder), argc, argv, input_aecdump, + processed_capture_samples); +} + +} // namespace test +} // namespace webrtc diff --git a/api/test/audioproc_float.h b/api/test/audioproc_float.h new file mode 100644 index 0000000..fec2ad1 --- /dev/null +++ b/api/test/audioproc_float.h @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_AUDIOPROC_FLOAT_H_ +#define API_TEST_AUDIOPROC_FLOAT_H_ + +#include +#include + +#include "modules/audio_processing/include/audio_processing.h" + +namespace webrtc { +namespace test { + +// This is an interface for the audio processing simulation utility. This +// utility can be used to simulate the audioprocessing module using a recording +// (either an AEC dump or wav files), and generate the output as a wav file. +// Any audio_processing object specified in the input is used for the +// simulation. The optional |audio_processing| object provides the +// AudioProcessing instance that is used during the simulation. Note that when +// the audio_processing object is specified all functionality that relies on +// using the AudioProcessingBuilder is deactivated, since the AudioProcessing +// object is already created and the builder is not used in the simulation. It +// is needed to pass the command line flags as |argc| and |argv|, so these can +// be interpreted properly by the utility. To see a list of all supported +// command line flags, run the executable with the '--help' flag. +int AudioprocFloat(rtc::scoped_refptr audio_processing, + int argc, + char* argv[]); + +// This is an interface for the audio processing simulation utility. This +// utility can be used to simulate the audioprocessing module using a recording +// (either an AEC dump or wav files), and generate the output as a wav file. +// The |ap_builder| object will be used to create the AudioProcessing instance +// that is used during the simulation. The |ap_builder| supports setting of +// injectable components, which will be passed on to the created AudioProcessing +// instance. It is needed to pass the command line flags as |argc| and |argv|, +// so these can be interpreted properly by the utility. +// To get a fully-working audioproc_f utility, all that is needed is to write a +// main function, create an AudioProcessingBuilder, optionally set custom +// processing components on it, and pass the builder together with the command +// line arguments into this function. +// To see a list of all supported command line flags, run the executable with +// the '--help' flag. +int AudioprocFloat(std::unique_ptr ap_builder, + int argc, + char* argv[]); + +// Interface for the audio processing simulation utility, which is similar to +// the one above, but which adds the option of receiving the input as a string +// and returning the output as an array. The first three arguments fulfill the +// same purpose as above. Pass the |input_aecdump| to provide the content of an +// AEC dump file as a string. After the simulation is completed, +// |processed_capture_samples| will contain the the samples processed on the +// capture side. +int AudioprocFloat(std::unique_ptr ap_builder, + int argc, + char* argv[], + absl::string_view input_aecdump, + std::vector* processed_capture_samples); +} // namespace test +} // namespace webrtc + +#endif // API_TEST_AUDIOPROC_FLOAT_H_ diff --git a/api/test/compile_all_headers.cc b/api/test/compile_all_headers.cc new file mode 100644 index 0000000..4cece5b --- /dev/null +++ b/api/test/compile_all_headers.cc @@ -0,0 +1,46 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file verifies that all include files in this directory can be +// compiled without errors or other required includes. + +// Note: The following header files are not not tested here, as their +// associated targets are not included in all configurations. +// "api/test/audioproc_float.h" +// "api/test/create_video_quality_test_fixture.h" +// "api/test/neteq_simulator_factory.h" +// "api/test/video_quality_test_fixture.h" +// The following header files are also not tested: +// "api/test/create_simulcast_test_fixture.h" +// "api/test/create_videocodec_test_fixture.h" +// "api/test/neteq_simulator.h" +// "api/test/simulated_network.h" +// "api/test/simulcast_test_fixture.h" +// "api/test/test_dependency_factory.h" +// "api/test/videocodec_test_fixture.h" +// "api/test/videocodec_test_stats.h" + +#include "api/test/dummy_peer_connection.h" +#include "api/test/fake_frame_decryptor.h" +#include "api/test/fake_frame_encryptor.h" +#include "api/test/mock_audio_mixer.h" +#include "api/test/mock_frame_decryptor.h" +#include "api/test/mock_frame_encryptor.h" +#include "api/test/mock_peer_connection_factory_interface.h" +#include "api/test/mock_peerconnectioninterface.h" +#include "api/test/mock_rtpreceiver.h" +#include "api/test/mock_rtpsender.h" +#include "api/test/mock_transformable_video_frame.h" +#include "api/test/mock_video_bitrate_allocator.h" +#include "api/test/mock_video_bitrate_allocator_factory.h" +#include "api/test/mock_video_decoder.h" +#include "api/test/mock_video_decoder_factory.h" +#include "api/test/mock_video_encoder.h" +#include "api/test/mock_video_encoder_factory.h" diff --git a/api/test/create_frame_generator.cc b/api/test/create_frame_generator.cc new file mode 100644 index 0000000..7ed0647 --- /dev/null +++ b/api/test/create_frame_generator.cc @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_frame_generator.h" + +#include +#include + +#include "rtc_base/checks.h" +#include "test/frame_generator.h" +#include "test/testsupport/ivf_video_frame_generator.h" + +namespace webrtc { +namespace test { + +std::unique_ptr CreateSquareFrameGenerator( + int width, + int height, + absl::optional type, + absl::optional num_squares) { + return std::make_unique( + width, height, type.value_or(FrameGeneratorInterface::OutputType::kI420), + num_squares.value_or(10)); +} + +std::unique_ptr CreateFromYuvFileFrameGenerator( + std::vector filenames, + size_t width, + size_t height, + int frame_repeat_count) { + RTC_DCHECK(!filenames.empty()); + std::vector files; + for (const std::string& filename : filenames) { + FILE* file = fopen(filename.c_str(), "rb"); + RTC_DCHECK(file != nullptr) << "Failed to open: '" << filename << "'\n"; + files.push_back(file); + } + + return std::make_unique(files, width, height, + frame_repeat_count); +} + +std::unique_ptr CreateFromIvfFileFrameGenerator( + std::string filename) { + return std::make_unique(std::move(filename)); +} + +std::unique_ptr +CreateScrollingInputFromYuvFilesFrameGenerator( + Clock* clock, + std::vector filenames, + size_t source_width, + size_t source_height, + size_t target_width, + size_t target_height, + int64_t scroll_time_ms, + int64_t pause_time_ms) { + RTC_DCHECK(!filenames.empty()); + std::vector files; + for (const std::string& filename : filenames) { + FILE* file = fopen(filename.c_str(), "rb"); + RTC_DCHECK(file != nullptr); + files.push_back(file); + } + + return std::make_unique( + clock, files, source_width, source_height, target_width, target_height, + scroll_time_ms, pause_time_ms); +} + +std::unique_ptr +CreateSlideFrameGenerator(int width, int height, int frame_repeat_count) { + return std::make_unique(width, height, frame_repeat_count); +} + +} // namespace test +} // namespace webrtc diff --git a/api/test/create_frame_generator.h b/api/test/create_frame_generator.h new file mode 100644 index 0000000..1514145 --- /dev/null +++ b/api/test/create_frame_generator.h @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_CREATE_FRAME_GENERATOR_H_ +#define API_TEST_CREATE_FRAME_GENERATOR_H_ + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/test/frame_generator_interface.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +namespace test { + +// Creates a frame generator that produces frames with small squares that +// move randomly towards the lower right corner. +// |type| has the default value FrameGeneratorInterface::OutputType::I420. +// |num_squares| has the default value 10. +std::unique_ptr CreateSquareFrameGenerator( + int width, + int height, + absl::optional type, + absl::optional num_squares); + +// Creates a frame generator that repeatedly plays a set of yuv files. +// The frame_repeat_count determines how many times each frame is shown, +// with 1 = show each frame once, etc. +std::unique_ptr CreateFromYuvFileFrameGenerator( + std::vector filenames, + size_t width, + size_t height, + int frame_repeat_count); + +// Creates a frame generator that repeatedly plays an ivf file. +std::unique_ptr CreateFromIvfFileFrameGenerator( + std::string filename); + +// Creates a frame generator which takes a set of yuv files (wrapping a +// frame generator created by CreateFromYuvFile() above), but outputs frames +// that have been cropped to specified resolution: source_width/source_height +// is the size of the source images, target_width/target_height is the size of +// the cropped output. For each source image read, the cropped viewport will +// be scrolled top to bottom/left to right for scroll_tim_ms milliseconds. +// After that the image will stay in place for pause_time_ms milliseconds, +// and then this will be repeated with the next file from the input set. +std::unique_ptr +CreateScrollingInputFromYuvFilesFrameGenerator( + Clock* clock, + std::vector filenames, + size_t source_width, + size_t source_height, + size_t target_width, + size_t target_height, + int64_t scroll_time_ms, + int64_t pause_time_ms); + +// Creates a frame generator that produces randomly generated slides. It fills +// the frames with randomly sized and colored squares. +// |frame_repeat_count| determines how many times each slide is shown. +std::unique_ptr +CreateSlideFrameGenerator(int width, int height, int frame_repeat_count); + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_CREATE_FRAME_GENERATOR_H_ diff --git a/api/test/create_network_emulation_manager.cc b/api/test/create_network_emulation_manager.cc new file mode 100644 index 0000000..089a2f8 --- /dev/null +++ b/api/test/create_network_emulation_manager.cc @@ -0,0 +1,25 @@ + +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_network_emulation_manager.h" + +#include + +#include "test/network/network_emulation_manager.h" + +namespace webrtc { + +std::unique_ptr CreateNetworkEmulationManager( + TimeMode mode) { + return std::make_unique(mode); +} + +} // namespace webrtc diff --git a/api/test/create_network_emulation_manager.h b/api/test/create_network_emulation_manager.h new file mode 100644 index 0000000..f444743 --- /dev/null +++ b/api/test/create_network_emulation_manager.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_CREATE_NETWORK_EMULATION_MANAGER_H_ +#define API_TEST_CREATE_NETWORK_EMULATION_MANAGER_H_ + +#include + +#include "api/test/network_emulation_manager.h" + +namespace webrtc { + +// Returns a non-null NetworkEmulationManager instance. +std::unique_ptr CreateNetworkEmulationManager( + TimeMode mode = TimeMode::kRealTime); + +} // namespace webrtc + +#endif // API_TEST_CREATE_NETWORK_EMULATION_MANAGER_H_ diff --git a/api/test/create_peer_connection_quality_test_frame_generator.cc b/api/test/create_peer_connection_quality_test_frame_generator.cc new file mode 100644 index 0000000..7f0ba20 --- /dev/null +++ b/api/test/create_peer_connection_quality_test_frame_generator.cc @@ -0,0 +1,108 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_peer_connection_quality_test_frame_generator.h" + +#include +#include + +#include "api/test/create_frame_generator.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "rtc_base/checks.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +using VideoConfig = + ::webrtc::webrtc_pc_e2e::PeerConnectionE2EQualityTestFixture::VideoConfig; +using ScreenShareConfig = ::webrtc::webrtc_pc_e2e:: + PeerConnectionE2EQualityTestFixture::ScreenShareConfig; + +void ValidateScreenShareConfig(const VideoConfig& video_config, + const ScreenShareConfig& screen_share_config) { + if (screen_share_config.slides_yuv_file_names.empty()) { + if (screen_share_config.scrolling_params) { + // If we have scrolling params, then its |source_width| and |source_heigh| + // will be used as width and height of video input, so we have to validate + // it against width and height of default input. + RTC_CHECK_EQ(screen_share_config.scrolling_params->source_width, + kDefaultSlidesWidth); + RTC_CHECK_EQ(screen_share_config.scrolling_params->source_height, + kDefaultSlidesHeight); + } else { + RTC_CHECK_EQ(video_config.width, kDefaultSlidesWidth); + RTC_CHECK_EQ(video_config.height, kDefaultSlidesHeight); + } + } + if (screen_share_config.scrolling_params) { + RTC_CHECK_LE(screen_share_config.scrolling_params->duration, + screen_share_config.slide_change_interval); + RTC_CHECK_GE(screen_share_config.scrolling_params->source_width, + video_config.width); + RTC_CHECK_GE(screen_share_config.scrolling_params->source_height, + video_config.height); + } +} + +std::unique_ptr CreateSquareFrameGenerator( + const VideoConfig& video_config, + absl::optional type) { + return test::CreateSquareFrameGenerator( + video_config.width, video_config.height, std::move(type), absl::nullopt); +} + +std::unique_ptr CreateFromYuvFileFrameGenerator( + const VideoConfig& video_config, + std::string filename) { + return test::CreateFromYuvFileFrameGenerator( + {std::move(filename)}, video_config.width, video_config.height, + /*frame_repeat_count=*/1); +} + +std::unique_ptr CreateScreenShareFrameGenerator( + const VideoConfig& video_config, + const ScreenShareConfig& screen_share_config) { + ValidateScreenShareConfig(video_config, screen_share_config); + if (screen_share_config.generate_slides) { + return test::CreateSlideFrameGenerator( + video_config.width, video_config.height, + screen_share_config.slide_change_interval.seconds() * video_config.fps); + } + std::vector slides = screen_share_config.slides_yuv_file_names; + if (slides.empty()) { + // If slides is empty we need to add default slides as source. In such case + // video width and height is validated to be equal to kDefaultSlidesWidth + // and kDefaultSlidesHeight. + slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); + slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); + } + if (!screen_share_config.scrolling_params) { + // Cycle image every slide_change_interval seconds. + return test::CreateFromYuvFileFrameGenerator( + slides, video_config.width, video_config.height, + screen_share_config.slide_change_interval.seconds() * video_config.fps); + } + + TimeDelta pause_duration = screen_share_config.slide_change_interval - + screen_share_config.scrolling_params->duration; + RTC_DCHECK(pause_duration >= TimeDelta::Zero()); + return test::CreateScrollingInputFromYuvFilesFrameGenerator( + Clock::GetRealTimeClock(), slides, + screen_share_config.scrolling_params->source_width, + screen_share_config.scrolling_params->source_height, video_config.width, + video_config.height, screen_share_config.scrolling_params->duration.ms(), + pause_duration.ms()); +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/api/test/create_peer_connection_quality_test_frame_generator.h b/api/test/create_peer_connection_quality_test_frame_generator.h new file mode 100644 index 0000000..ff87331 --- /dev/null +++ b/api/test/create_peer_connection_quality_test_frame_generator.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_CREATE_PEER_CONNECTION_QUALITY_TEST_FRAME_GENERATOR_H_ +#define API_TEST_CREATE_PEER_CONNECTION_QUALITY_TEST_FRAME_GENERATOR_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/test/frame_generator_interface.h" +#include "api/test/peerconnection_quality_test_fixture.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// Creates a frame generator that produces frames with small squares that move +// randomly towards the lower right corner. |type| has the default value +// FrameGeneratorInterface::OutputType::I420. video_config specifies frame +// weight and height. +std::unique_ptr CreateSquareFrameGenerator( + const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + absl::optional type); + +// Creates a frame generator that plays frames from the yuv file. +std::unique_ptr CreateFromYuvFileFrameGenerator( + const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + std::string filename); + +// Creates a proper frame generator for testing screen sharing. +std::unique_ptr CreateScreenShareFrameGenerator( + const PeerConnectionE2EQualityTestFixture::VideoConfig& video_config, + const PeerConnectionE2EQualityTestFixture::ScreenShareConfig& + screen_share_config); + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_CREATE_PEER_CONNECTION_QUALITY_TEST_FRAME_GENERATOR_H_ diff --git a/api/test/create_peerconnection_quality_test_fixture.cc b/api/test/create_peerconnection_quality_test_fixture.cc new file mode 100644 index 0000000..2d9d082 --- /dev/null +++ b/api/test/create_peerconnection_quality_test_fixture.cc @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_peerconnection_quality_test_fixture.h" + +#include +#include + +#include "api/test/time_controller.h" +#include "test/pc/e2e/peer_connection_quality_test.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +std::unique_ptr +CreatePeerConnectionE2EQualityTestFixture( + std::string test_case_name, + TimeController& time_controller, + std::unique_ptr audio_quality_analyzer, + std::unique_ptr video_quality_analyzer) { + return std::make_unique( + std::move(test_case_name), time_controller, + std::move(audio_quality_analyzer), std::move(video_quality_analyzer)); +} + +} // namespace webrtc_pc_e2e +} // namespace webrtc diff --git a/api/test/create_peerconnection_quality_test_fixture.h b/api/test/create_peerconnection_quality_test_fixture.h new file mode 100644 index 0000000..95b9ced --- /dev/null +++ b/api/test/create_peerconnection_quality_test_fixture.h @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_CREATE_PEERCONNECTION_QUALITY_TEST_FIXTURE_H_ +#define API_TEST_CREATE_PEERCONNECTION_QUALITY_TEST_FIXTURE_H_ + +#include +#include + +#include "api/test/audio_quality_analyzer_interface.h" +#include "api/test/peerconnection_quality_test_fixture.h" +#include "api/test/time_controller.h" +#include "api/test/video_quality_analyzer_interface.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// API is in development. Can be changed/removed without notice. + +// Create test fixture to establish test call between Alice and Bob. +// During the test Alice will be caller and Bob will answer the call. +// |test_case_name| is a name of test case, that will be used for all metrics +// reporting. +// |time_controller| is used to manage all rtc::Thread's and TaskQueue +// instances. Instance of |time_controller| have to outlive created fixture. +// Returns a non-null PeerConnectionE2EQualityTestFixture instance. +std::unique_ptr +CreatePeerConnectionE2EQualityTestFixture( + std::string test_case_name, + TimeController& time_controller, + std::unique_ptr audio_quality_analyzer, + std::unique_ptr video_quality_analyzer); + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_CREATE_PEERCONNECTION_QUALITY_TEST_FIXTURE_H_ diff --git a/api/test/create_simulcast_test_fixture.cc b/api/test/create_simulcast_test_fixture.cc new file mode 100644 index 0000000..024145d --- /dev/null +++ b/api/test/create_simulcast_test_fixture.cc @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_simulcast_test_fixture.h" + +#include +#include + +#include "api/test/simulcast_test_fixture.h" +#include "modules/video_coding/utility/simulcast_test_fixture_impl.h" + +namespace webrtc { +namespace test { + +std::unique_ptr CreateSimulcastTestFixture( + std::unique_ptr encoder_factory, + std::unique_ptr decoder_factory, + SdpVideoFormat video_format) { + return std::make_unique( + std::move(encoder_factory), std::move(decoder_factory), video_format); +} + +} // namespace test +} // namespace webrtc diff --git a/api/test/create_simulcast_test_fixture.h b/api/test/create_simulcast_test_fixture.h new file mode 100644 index 0000000..87f229c --- /dev/null +++ b/api/test/create_simulcast_test_fixture.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_CREATE_SIMULCAST_TEST_FIXTURE_H_ +#define API_TEST_CREATE_SIMULCAST_TEST_FIXTURE_H_ + +#include + +#include "api/test/simulcast_test_fixture.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" + +namespace webrtc { +namespace test { + +std::unique_ptr CreateSimulcastTestFixture( + std::unique_ptr encoder_factory, + std::unique_ptr decoder_factory, + SdpVideoFormat video_format); + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_CREATE_SIMULCAST_TEST_FIXTURE_H_ diff --git a/api/test/create_time_controller.cc b/api/test/create_time_controller.cc new file mode 100644 index 0000000..a2c0cb7 --- /dev/null +++ b/api/test/create_time_controller.cc @@ -0,0 +1,54 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_time_controller.h" + +#include + +#include "call/call.h" +#include "test/time_controller/external_time_controller.h" +#include "test/time_controller/simulated_time_controller.h" + +namespace webrtc { + +std::unique_ptr CreateTimeController( + ControlledAlarmClock* alarm) { + return std::make_unique(alarm); +} + +std::unique_ptr CreateSimulatedTimeController() { + return std::make_unique( + Timestamp::Seconds(10000)); +} + +std::unique_ptr CreateTimeControllerBasedCallFactory( + TimeController* time_controller) { + class TimeControllerBasedCallFactory : public CallFactoryInterface { + public: + explicit TimeControllerBasedCallFactory(TimeController* time_controller) + : time_controller_(time_controller) {} + Call* CreateCall(const Call::Config& config) override { + if (!module_thread_) { + module_thread_ = SharedModuleThread::Create( + time_controller_->CreateProcessThread("CallModules"), + [this]() { module_thread_ = nullptr; }); + } + return Call::Create(config, time_controller_->GetClock(), module_thread_, + time_controller_->CreateProcessThread("Pacer")); + } + + private: + TimeController* time_controller_; + rtc::scoped_refptr module_thread_; + }; + return std::make_unique(time_controller); +} + +} // namespace webrtc diff --git a/api/test/create_time_controller.h b/api/test/create_time_controller.h new file mode 100644 index 0000000..1b6896f --- /dev/null +++ b/api/test/create_time_controller.h @@ -0,0 +1,34 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_CREATE_TIME_CONTROLLER_H_ +#define API_TEST_CREATE_TIME_CONTROLLER_H_ + +#include + +#include "api/call/call_factory_interface.h" +#include "api/test/time_controller.h" + +namespace webrtc { + +// Creates a time coltroller that wraps |alarm|. +std::unique_ptr CreateTimeController( + ControlledAlarmClock* alarm); + +// Creates a time controller that runs in simulated time. +std::unique_ptr CreateSimulatedTimeController(); + +// This is creates a call factory that creates Call instances that are backed by +// a time controller. +std::unique_ptr CreateTimeControllerBasedCallFactory( + TimeController* time_controller); + +} // namespace webrtc + +#endif // API_TEST_CREATE_TIME_CONTROLLER_H_ diff --git a/api/test/create_time_controller_unittest.cc b/api/test/create_time_controller_unittest.cc new file mode 100644 index 0000000..0ea868c --- /dev/null +++ b/api/test/create_time_controller_unittest.cc @@ -0,0 +1,76 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_time_controller.h" + +#include "api/test/time_controller.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +class FakeAlarm : public ControlledAlarmClock { + public: + explicit FakeAlarm(Timestamp start_time); + + Clock* GetClock() override; + bool ScheduleAlarmAt(Timestamp deadline) override; + void SetCallback(std::function callback) override; + void Sleep(TimeDelta duration) override; + + private: + SimulatedClock clock_; + Timestamp deadline_; + std::function callback_; +}; + +FakeAlarm::FakeAlarm(Timestamp start_time) + : clock_(start_time), + deadline_(Timestamp::PlusInfinity()), + callback_([] {}) {} + +Clock* FakeAlarm::GetClock() { + return &clock_; +} + +bool FakeAlarm::ScheduleAlarmAt(Timestamp deadline) { + if (deadline < deadline_) { + deadline_ = deadline; + return true; + } + return false; +} + +void FakeAlarm::SetCallback(std::function callback) { + callback_ = callback; +} + +void FakeAlarm::Sleep(TimeDelta duration) { + Timestamp end_time = clock_.CurrentTime() + duration; + + while (deadline_ <= end_time) { + clock_.AdvanceTime(deadline_ - clock_.CurrentTime()); + deadline_ = Timestamp::PlusInfinity(); + callback_(); + } + + clock_.AdvanceTime(end_time - clock_.CurrentTime()); +} + +TEST(CreateTimeControllerTest, CreatesNonNullController) { + FakeAlarm alarm(Timestamp::Millis(100)); + EXPECT_NE(CreateTimeController(&alarm), nullptr); +} + +} // namespace +} // namespace webrtc diff --git a/api/test/create_video_quality_test_fixture.cc b/api/test/create_video_quality_test_fixture.cc new file mode 100644 index 0000000..1fa7d24 --- /dev/null +++ b/api/test/create_video_quality_test_fixture.cc @@ -0,0 +1,40 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_video_quality_test_fixture.h" + +#include +#include + +#include "video/video_quality_test.h" + +namespace webrtc { + +std::unique_ptr +CreateVideoQualityTestFixture() { + // By default, we don't override the FEC module, so pass an empty factory. + return std::make_unique(nullptr); +} + +std::unique_ptr CreateVideoQualityTestFixture( + std::unique_ptr fec_controller_factory) { + auto components = + std::make_unique(); + components->fec_controller_factory = std::move(fec_controller_factory); + return std::make_unique(std::move(components)); +} + +std::unique_ptr CreateVideoQualityTestFixture( + std::unique_ptr + components) { + return std::make_unique(std::move(components)); +} + +} // namespace webrtc diff --git a/api/test/create_video_quality_test_fixture.h b/api/test/create_video_quality_test_fixture.h new file mode 100644 index 0000000..ed618fe --- /dev/null +++ b/api/test/create_video_quality_test_fixture.h @@ -0,0 +1,31 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_CREATE_VIDEO_QUALITY_TEST_FIXTURE_H_ +#define API_TEST_CREATE_VIDEO_QUALITY_TEST_FIXTURE_H_ + +#include + +#include "api/fec_controller.h" +#include "api/test/video_quality_test_fixture.h" + +namespace webrtc { + +std::unique_ptr +CreateVideoQualityTestFixture(); + +std::unique_ptr CreateVideoQualityTestFixture( + std::unique_ptr fec_controller_factory); + +std::unique_ptr CreateVideoQualityTestFixture( + std::unique_ptr + components); +} // namespace webrtc + +#endif // API_TEST_CREATE_VIDEO_QUALITY_TEST_FIXTURE_H_ diff --git a/api/test/create_videocodec_test_fixture.cc b/api/test/create_videocodec_test_fixture.cc new file mode 100644 index 0000000..1f618e5 --- /dev/null +++ b/api/test/create_videocodec_test_fixture.cc @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/create_videocodec_test_fixture.h" + +#include +#include + +#include "api/test/videocodec_test_fixture.h" +#include "modules/video_coding/codecs/test/videocodec_test_fixture_impl.h" + +namespace webrtc { +namespace test { + +using Config = VideoCodecTestFixture::Config; + +std::unique_ptr CreateVideoCodecTestFixture( + const Config& config) { + return std::make_unique(config); +} + +std::unique_ptr CreateVideoCodecTestFixture( + const Config& config, + std::unique_ptr decoder_factory, + std::unique_ptr encoder_factory) { + return std::make_unique( + config, std::move(decoder_factory), std::move(encoder_factory)); +} + +} // namespace test +} // namespace webrtc diff --git a/api/test/create_videocodec_test_fixture.h b/api/test/create_videocodec_test_fixture.h new file mode 100644 index 0000000..7a44f6b --- /dev/null +++ b/api/test/create_videocodec_test_fixture.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_CREATE_VIDEOCODEC_TEST_FIXTURE_H_ +#define API_TEST_CREATE_VIDEOCODEC_TEST_FIXTURE_H_ + +#include + +#include "api/test/videocodec_test_fixture.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" + +namespace webrtc { +namespace test { + +std::unique_ptr CreateVideoCodecTestFixture( + const VideoCodecTestFixture::Config& config); + +std::unique_ptr CreateVideoCodecTestFixture( + const VideoCodecTestFixture::Config& config, + std::unique_ptr decoder_factory, + std::unique_ptr encoder_factory); + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_CREATE_VIDEOCODEC_TEST_FIXTURE_H_ diff --git a/api/test/dummy_peer_connection.h b/api/test/dummy_peer_connection.h new file mode 100644 index 0000000..0ca7d3f --- /dev/null +++ b/api/test/dummy_peer_connection.h @@ -0,0 +1,253 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_DUMMY_PEER_CONNECTION_H_ +#define API_TEST_DUMMY_PEER_CONNECTION_H_ + +#include +#include +#include + +#include "api/peer_connection_interface.h" +#include "api/rtc_error.h" +#include "rtc_base/checks.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +// This class includes dummy implementations of all methods on the +// PeerconnectionInterface. Accessor/getter methods return empty or default +// values. State-changing methods with a return value return failure. Remaining +// methods (except Close())) will crash with FATAL if called. +class DummyPeerConnection : public PeerConnectionInterface { + rtc::scoped_refptr local_streams() override { + return nullptr; + } + rtc::scoped_refptr remote_streams() override { + return nullptr; + } + + bool AddStream(MediaStreamInterface* stream) override { return false; } + void RemoveStream(MediaStreamInterface* stream) override { + FATAL() << "Not implemented"; + } + + RTCErrorOr> AddTrack( + rtc::scoped_refptr track, + const std::vector& stream_ids) override { + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); + } + + bool RemoveTrack(RtpSenderInterface* sender) override { return false; } + + RTCError RemoveTrackNew( + rtc::scoped_refptr sender) override { + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); + } + + RTCErrorOr> AddTransceiver( + rtc::scoped_refptr track) override { + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); + } + RTCErrorOr> AddTransceiver( + rtc::scoped_refptr track, + const RtpTransceiverInit& init) override { + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); + } + + RTCErrorOr> AddTransceiver( + cricket::MediaType media_type) override { + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); + } + RTCErrorOr> AddTransceiver( + cricket::MediaType media_type, + const RtpTransceiverInit& init) override { + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); + } + + rtc::scoped_refptr CreateSender( + const std::string& kind, + const std::string& stream_id) override { + return nullptr; + } + + std::vector> GetSenders() + const override { + return {}; + } + + std::vector> GetReceivers() + const override { + return {}; + } + + std::vector> GetTransceivers() + const override { + return {}; + } + + bool GetStats(StatsObserver* observer, + MediaStreamTrackInterface* track, // Optional + StatsOutputLevel level) override { + return false; + } + + void GetStats(RTCStatsCollectorCallback* callback) override { + FATAL() << "Not implemented"; + } + void GetStats( + rtc::scoped_refptr selector, + rtc::scoped_refptr callback) override { + FATAL() << "Not implemented"; + } + void GetStats( + rtc::scoped_refptr selector, + rtc::scoped_refptr callback) override { + FATAL() << "Not implemented"; + } + void ClearStatsCache() override {} + + rtc::scoped_refptr CreateDataChannel( + const std::string& label, + const DataChannelInit* config) override { + return nullptr; + } + + const SessionDescriptionInterface* local_description() const override { + return nullptr; + } + const SessionDescriptionInterface* remote_description() const override { + return nullptr; + } + + const SessionDescriptionInterface* current_local_description() + const override { + return nullptr; + } + const SessionDescriptionInterface* current_remote_description() + const override { + return nullptr; + } + + const SessionDescriptionInterface* pending_local_description() + const override { + return nullptr; + } + const SessionDescriptionInterface* pending_remote_description() + const override { + return nullptr; + } + + void RestartIce() override { FATAL() << "Not implemented"; } + + // Create a new offer. + // The CreateSessionDescriptionObserver callback will be called when done. + void CreateOffer(CreateSessionDescriptionObserver* observer, + const RTCOfferAnswerOptions& options) override { + FATAL() << "Not implemented"; + } + + void CreateAnswer(CreateSessionDescriptionObserver* observer, + const RTCOfferAnswerOptions& options) override { + FATAL() << "Not implemented"; + } + + void SetLocalDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc) override { + FATAL() << "Not implemented"; + } + void SetRemoteDescription(SetSessionDescriptionObserver* observer, + SessionDescriptionInterface* desc) override { + FATAL() << "Not implemented"; + } + void SetRemoteDescription( + std::unique_ptr desc, + rtc::scoped_refptr observer) + override { + FATAL() << "Not implemented"; + } + + PeerConnectionInterface::RTCConfiguration GetConfiguration() override { + return RTCConfiguration(); + } + RTCError SetConfiguration( + const PeerConnectionInterface::RTCConfiguration& config) override { + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); + } + + bool AddIceCandidate(const IceCandidateInterface* candidate) override { + return false; + } + bool RemoveIceCandidates( + const std::vector& candidates) override { + return false; + } + + RTCError SetBitrate(const BitrateSettings& bitrate) override { + return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); + } + + void SetAudioPlayout(bool playout) override { FATAL() << "Not implemented"; } + void SetAudioRecording(bool recording) override { + FATAL() << "Not implemented"; + } + + rtc::scoped_refptr LookupDtlsTransportByMid( + const std::string& mid) override { + return nullptr; + } + rtc::scoped_refptr GetSctpTransport() const override { + return nullptr; + } + + SignalingState signaling_state() override { return SignalingState(); } + + IceConnectionState ice_connection_state() override { + return IceConnectionState(); + } + + IceConnectionState standardized_ice_connection_state() override { + return IceConnectionState(); + } + + PeerConnectionState peer_connection_state() override { + return PeerConnectionState(); + } + + IceGatheringState ice_gathering_state() override { + return IceGatheringState(); + } + + absl::optional can_trickle_ice_candidates() { return absl::nullopt; } + + bool StartRtcEventLog(std::unique_ptr output, + int64_t output_period_ms) override { + return false; + } + bool StartRtcEventLog(std::unique_ptr output) override { + return false; + } + + void StopRtcEventLog() { FATAL() << "Not implemented"; } + + void Close() override {} + + rtc::Thread* signaling_thread() const override { + return rtc::Thread::Current(); + } +}; + +static_assert( + !std::is_abstract>::value, + ""); + +} // namespace webrtc + +#endif // API_TEST_DUMMY_PEER_CONNECTION_H_ diff --git a/api/test/fake_frame_decryptor.cc b/api/test/fake_frame_decryptor.cc new file mode 100644 index 0000000..16cb1bd --- /dev/null +++ b/api/test/fake_frame_decryptor.cc @@ -0,0 +1,71 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/fake_frame_decryptor.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +FakeFrameDecryptor::FakeFrameDecryptor(uint8_t fake_key, + uint8_t expected_postfix_byte) + : fake_key_(fake_key), expected_postfix_byte_(expected_postfix_byte) {} + +FakeFrameDecryptor::Result FakeFrameDecryptor::Decrypt( + cricket::MediaType media_type, + const std::vector& csrcs, + rtc::ArrayView additional_data, + rtc::ArrayView encrypted_frame, + rtc::ArrayView frame) { + if (fail_decryption_) { + return Result(Status::kFailedToDecrypt, 0); + } + + RTC_CHECK_EQ(frame.size() + 1, encrypted_frame.size()); + for (size_t i = 0; i < frame.size(); i++) { + frame[i] = encrypted_frame[i] ^ fake_key_; + } + + if (encrypted_frame[frame.size()] != expected_postfix_byte_) { + return Result(Status::kFailedToDecrypt, 0); + } + + return Result(Status::kOk, frame.size()); +} + +size_t FakeFrameDecryptor::GetMaxPlaintextByteSize( + cricket::MediaType media_type, + size_t encrypted_frame_size) { + return encrypted_frame_size - 1; +} + +void FakeFrameDecryptor::SetFakeKey(uint8_t fake_key) { + fake_key_ = fake_key; +} + +uint8_t FakeFrameDecryptor::GetFakeKey() const { + return fake_key_; +} + +void FakeFrameDecryptor::SetExpectedPostfixByte(uint8_t expected_postfix_byte) { + expected_postfix_byte_ = expected_postfix_byte; +} + +uint8_t FakeFrameDecryptor::GetExpectedPostfixByte() const { + return expected_postfix_byte_; +} + +void FakeFrameDecryptor::SetFailDecryption(bool fail_decryption) { + fail_decryption_ = fail_decryption; +} + +} // namespace webrtc diff --git a/api/test/fake_frame_decryptor.h b/api/test/fake_frame_decryptor.h new file mode 100644 index 0000000..bfd0e69 --- /dev/null +++ b/api/test/fake_frame_decryptor.h @@ -0,0 +1,72 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_FAKE_FRAME_DECRYPTOR_H_ +#define API_TEST_FAKE_FRAME_DECRYPTOR_H_ + +#include +#include + +#include + +#include "api/array_view.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/media_types.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +// The FakeFrameDecryptor is a TEST ONLY fake implementation of the +// FrameDecryptorInterface. It is constructed with a simple single digit key and +// a fixed postfix byte. This is just to validate that the core code works +// as expected. +class FakeFrameDecryptor final + : public rtc::RefCountedObject { + public: + // Provide a key (0,255) and some postfix byte (0,255) this should match the + // byte you expect from the FakeFrameEncryptor. + explicit FakeFrameDecryptor(uint8_t fake_key = 0xAA, + uint8_t expected_postfix_byte = 255); + // Fake decryption that just xors the payload with the 1 byte key and checks + // the postfix byte. This will always fail if fail_decryption_ is set to true. + Result Decrypt(cricket::MediaType media_type, + const std::vector& csrcs, + rtc::ArrayView additional_data, + rtc::ArrayView encrypted_frame, + rtc::ArrayView frame) override; + // Always returns 1 less than the size of the encrypted frame. + size_t GetMaxPlaintextByteSize(cricket::MediaType media_type, + size_t encrypted_frame_size) override; + // Sets the fake key to use for encryption. + void SetFakeKey(uint8_t fake_key); + // Returns the fake key used for encryption. + uint8_t GetFakeKey() const; + // Set the Postfix byte that is expected in the encrypted payload. + void SetExpectedPostfixByte(uint8_t expected_postfix_byte); + // Returns the postfix byte that will be checked for in the encrypted payload. + uint8_t GetExpectedPostfixByte() const; + // If set to true will force all encryption to fail. + void SetFailDecryption(bool fail_decryption); + // Simple error codes for tests to validate against. + enum class FakeDecryptStatus : int { + OK = 0, + FORCED_FAILURE = 1, + INVALID_POSTFIX = 2 + }; + + private: + uint8_t fake_key_ = 0; + uint8_t expected_postfix_byte_ = 0; + bool fail_decryption_ = false; +}; + +} // namespace webrtc + +#endif // API_TEST_FAKE_FRAME_DECRYPTOR_H_ diff --git a/api/test/fake_frame_encryptor.cc b/api/test/fake_frame_encryptor.cc new file mode 100644 index 0000000..89d14aa --- /dev/null +++ b/api/test/fake_frame_encryptor.cc @@ -0,0 +1,66 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/fake_frame_encryptor.h" + +#include "rtc_base/checks.h" + +namespace webrtc { +FakeFrameEncryptor::FakeFrameEncryptor(uint8_t fake_key, uint8_t postfix_byte) + : fake_key_(fake_key), postfix_byte_(postfix_byte) {} + +// FrameEncryptorInterface implementation +int FakeFrameEncryptor::Encrypt(cricket::MediaType media_type, + uint32_t ssrc, + rtc::ArrayView additional_data, + rtc::ArrayView frame, + rtc::ArrayView encrypted_frame, + size_t* bytes_written) { + if (fail_encryption_) { + return static_cast(FakeEncryptionStatus::FORCED_FAILURE); + } + + RTC_CHECK_EQ(frame.size() + 1, encrypted_frame.size()); + for (size_t i = 0; i < frame.size(); i++) { + encrypted_frame[i] = frame[i] ^ fake_key_; + } + + encrypted_frame[frame.size()] = postfix_byte_; + *bytes_written = encrypted_frame.size(); + return static_cast(FakeEncryptionStatus::OK); +} + +size_t FakeFrameEncryptor::GetMaxCiphertextByteSize( + cricket::MediaType media_type, + size_t frame_size) { + return frame_size + 1; +} + +void FakeFrameEncryptor::SetFakeKey(uint8_t fake_key) { + fake_key_ = fake_key; +} + +uint8_t FakeFrameEncryptor::GetFakeKey() const { + return fake_key_; +} + +void FakeFrameEncryptor::SetPostfixByte(uint8_t postfix_byte) { + postfix_byte_ = postfix_byte; +} + +uint8_t FakeFrameEncryptor::GetPostfixByte() const { + return postfix_byte_; +} + +void FakeFrameEncryptor::SetFailEncryption(bool fail_encryption) { + fail_encryption_ = fail_encryption; +} + +} // namespace webrtc diff --git a/api/test/fake_frame_encryptor.h b/api/test/fake_frame_encryptor.h new file mode 100644 index 0000000..074981b --- /dev/null +++ b/api/test/fake_frame_encryptor.h @@ -0,0 +1,69 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_FAKE_FRAME_ENCRYPTOR_H_ +#define API_TEST_FAKE_FRAME_ENCRYPTOR_H_ + +#include +#include + +#include "api/array_view.h" +#include "api/crypto/frame_encryptor_interface.h" +#include "api/media_types.h" +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +// The FakeFrameEncryptor is a TEST ONLY fake implementation of the +// FrameEncryptorInterface. It is constructed with a simple single digit key and +// a fixed postfix byte. This is just to validate that the core code works +// as expected. +class FakeFrameEncryptor + : public rtc::RefCountedObject { + public: + // Provide a key (0,255) and some postfix byte (0,255). + explicit FakeFrameEncryptor(uint8_t fake_key = 0xAA, + uint8_t postfix_byte = 255); + // Simply xors each payload with the provided fake key and adds the postfix + // bit to the end. This will always fail if fail_encryption_ is set to true. + int Encrypt(cricket::MediaType media_type, + uint32_t ssrc, + rtc::ArrayView additional_data, + rtc::ArrayView frame, + rtc::ArrayView encrypted_frame, + size_t* bytes_written) override; + // Always returns 1 more than the size of the frame. + size_t GetMaxCiphertextByteSize(cricket::MediaType media_type, + size_t frame_size) override; + // Sets the fake key to use during encryption. + void SetFakeKey(uint8_t fake_key); + // Returns the fake key used during encryption. + uint8_t GetFakeKey() const; + // Set the postfix byte to use. + void SetPostfixByte(uint8_t expected_postfix_byte); + // Return a postfix byte added to each outgoing payload. + uint8_t GetPostfixByte() const; + // Force all encryptions to fail. + void SetFailEncryption(bool fail_encryption); + + enum class FakeEncryptionStatus : int { + OK = 0, + FORCED_FAILURE = 1, + }; + + private: + uint8_t fake_key_ = 0; + uint8_t postfix_byte_ = 0; + bool fail_encryption_ = false; +}; + +} // namespace webrtc + +#endif // API_TEST_FAKE_FRAME_ENCRYPTOR_H_ diff --git a/api/test/frame_generator_interface.h b/api/test/frame_generator_interface.h new file mode 100644 index 0000000..691b6ee --- /dev/null +++ b/api/test/frame_generator_interface.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_FRAME_GENERATOR_INTERFACE_H_ +#define API_TEST_FRAME_GENERATOR_INTERFACE_H_ + +#include + +#include "absl/types/optional.h" +#include "api/scoped_refptr.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" + +namespace webrtc { +namespace test { + +class FrameGeneratorInterface { + public: + struct VideoFrameData { + VideoFrameData(rtc::scoped_refptr buffer, + absl::optional update_rect) + : buffer(std::move(buffer)), update_rect(update_rect) {} + + rtc::scoped_refptr buffer; + absl::optional update_rect; + }; + + enum class OutputType { kI420, kI420A, kI010 }; + + virtual ~FrameGeneratorInterface() = default; + + // Returns VideoFrameBuffer and area where most of update was done to set them + // on the VideoFrame object. + virtual VideoFrameData NextFrame() = 0; + + // Change the capture resolution. + virtual void ChangeResolution(size_t width, size_t height) = 0; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_FRAME_GENERATOR_INTERFACE_H_ diff --git a/api/test/mock_audio_mixer.h b/api/test/mock_audio_mixer.h new file mode 100644 index 0000000..88dc108 --- /dev/null +++ b/api/test/mock_audio_mixer.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_AUDIO_MIXER_H_ +#define API_TEST_MOCK_AUDIO_MIXER_H_ + +#include "api/audio/audio_mixer.h" +#include "test/gmock.h" + +namespace webrtc { +namespace test { + +class MockAudioMixer : public AudioMixer { + public: + MOCK_METHOD(bool, AddSource, (Source*), (override)); + MOCK_METHOD(void, RemoveSource, (Source*), (override)); + MOCK_METHOD(void, Mix, (size_t number_of_channels, AudioFrame*), (override)); +}; +} // namespace test +} // namespace webrtc + +#endif // API_TEST_MOCK_AUDIO_MIXER_H_ diff --git a/api/test/mock_fec_controller_override.h b/api/test/mock_fec_controller_override.h new file mode 100644 index 0000000..8f3accb --- /dev/null +++ b/api/test/mock_fec_controller_override.h @@ -0,0 +1,26 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_FEC_CONTROLLER_OVERRIDE_H_ +#define API_TEST_MOCK_FEC_CONTROLLER_OVERRIDE_H_ + +#include "api/fec_controller_override.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockFecControllerOverride : public FecControllerOverride { + public: + MOCK_METHOD(void, SetFecAllowed, (bool fec_allowed), (override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_FEC_CONTROLLER_OVERRIDE_H_ diff --git a/api/test/mock_frame_decryptor.h b/api/test/mock_frame_decryptor.h new file mode 100644 index 0000000..9604b96 --- /dev/null +++ b/api/test/mock_frame_decryptor.h @@ -0,0 +1,40 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_FRAME_DECRYPTOR_H_ +#define API_TEST_MOCK_FRAME_DECRYPTOR_H_ + +#include + +#include "api/crypto/frame_decryptor_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockFrameDecryptor : public FrameDecryptorInterface { + public: + MOCK_METHOD(Result, + Decrypt, + (cricket::MediaType, + const std::vector&, + rtc::ArrayView, + rtc::ArrayView, + rtc::ArrayView), + (override)); + + MOCK_METHOD(size_t, + GetMaxPlaintextByteSize, + (cricket::MediaType, size_t encrypted_frame_size), + (override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_FRAME_DECRYPTOR_H_ diff --git a/api/test/mock_frame_encryptor.h b/api/test/mock_frame_encryptor.h new file mode 100644 index 0000000..e47321f --- /dev/null +++ b/api/test/mock_frame_encryptor.h @@ -0,0 +1,39 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_FRAME_ENCRYPTOR_H_ +#define API_TEST_MOCK_FRAME_ENCRYPTOR_H_ + +#include "api/crypto/frame_encryptor_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockFrameEncryptor : public FrameEncryptorInterface { + public: + MOCK_METHOD(int, + Encrypt, + (cricket::MediaType, + uint32_t, + rtc::ArrayView, + rtc::ArrayView, + rtc::ArrayView, + size_t*), + (override)); + + MOCK_METHOD(size_t, + GetMaxCiphertextByteSize, + (cricket::MediaType media_type, size_t frame_size), + (override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_FRAME_ENCRYPTOR_H_ diff --git a/api/test/mock_peer_connection_factory_interface.h b/api/test/mock_peer_connection_factory_interface.h new file mode 100644 index 0000000..19c3f40 --- /dev/null +++ b/api/test/mock_peer_connection_factory_interface.h @@ -0,0 +1,75 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_ +#define API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_ + +#include +#include + +#include "api/peer_connection_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockPeerConnectionFactoryInterface final + : public rtc::RefCountedObject { + public: + rtc::scoped_refptr Create() { + return new MockPeerConnectionFactoryInterface(); + } + + MOCK_METHOD(void, SetOptions, (const Options&), (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreatePeerConnection, + (const PeerConnectionInterface::RTCConfiguration&, + PeerConnectionDependencies), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreatePeerConnection, + (const PeerConnectionInterface::RTCConfiguration&, + std::unique_ptr, + std::unique_ptr, + PeerConnectionObserver*), + (override)); + MOCK_METHOD(RtpCapabilities, + GetRtpSenderCapabilities, + (cricket::MediaType), + (const override)); + MOCK_METHOD(RtpCapabilities, + GetRtpReceiverCapabilities, + (cricket::MediaType), + (const override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateLocalMediaStream, + (const std::string&), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateAudioSource, + (const cricket::AudioOptions&), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateVideoTrack, + (const std::string&, VideoTrackSourceInterface*), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateAudioTrack, + (const std::string&, AudioSourceInterface*), + (override)); + MOCK_METHOD(bool, StartAecDump, (FILE*, int64_t), (override)); + MOCK_METHOD(void, StopAecDump, (), (override)); + + protected: + MockPeerConnectionFactoryInterface() = default; +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_PEER_CONNECTION_FACTORY_INTERFACE_H_ diff --git a/api/test/mock_peerconnectioninterface.h b/api/test/mock_peerconnectioninterface.h new file mode 100644 index 0000000..be34df0 --- /dev/null +++ b/api/test/mock_peerconnectioninterface.h @@ -0,0 +1,201 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_PEERCONNECTIONINTERFACE_H_ +#define API_TEST_MOCK_PEERCONNECTIONINTERFACE_H_ + +#include +#include +#include +#include +#include + +#include "api/peer_connection_interface.h" +#include "api/sctp_transport_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockPeerConnectionInterface + : public rtc::RefCountedObject { + public: + // PeerConnectionInterface + MOCK_METHOD(rtc::scoped_refptr, + local_streams, + (), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + remote_streams, + (), + (override)); + MOCK_METHOD(bool, AddStream, (MediaStreamInterface*), (override)); + MOCK_METHOD(void, RemoveStream, (MediaStreamInterface*), (override)); + MOCK_METHOD(RTCErrorOr>, + AddTrack, + (rtc::scoped_refptr, + const std::vector&), + (override)); + MOCK_METHOD(bool, RemoveTrack, (RtpSenderInterface*), (override)); + MOCK_METHOD(RTCError, + RemoveTrackNew, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(RTCErrorOr>, + AddTransceiver, + (rtc::scoped_refptr), + (override)); + MOCK_METHOD(RTCErrorOr>, + AddTransceiver, + (rtc::scoped_refptr, + const RtpTransceiverInit&), + (override)); + MOCK_METHOD(RTCErrorOr>, + AddTransceiver, + (cricket::MediaType), + (override)); + MOCK_METHOD(RTCErrorOr>, + AddTransceiver, + (cricket::MediaType, const RtpTransceiverInit&), + (override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateSender, + (const std::string&, const std::string&), + (override)); + MOCK_METHOD(std::vector>, + GetSenders, + (), + (const override)); + MOCK_METHOD(std::vector>, + GetReceivers, + (), + (const override)); + MOCK_METHOD(std::vector>, + GetTransceivers, + (), + (const override)); + MOCK_METHOD(bool, + GetStats, + (StatsObserver*, MediaStreamTrackInterface*, StatsOutputLevel), + (override)); + MOCK_METHOD(void, GetStats, (RTCStatsCollectorCallback*), (override)); + MOCK_METHOD(void, + GetStats, + (rtc::scoped_refptr, + rtc::scoped_refptr), + (override)); + MOCK_METHOD(void, + GetStats, + (rtc::scoped_refptr, + rtc::scoped_refptr), + (override)); + MOCK_METHOD(void, ClearStatsCache, (), (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetSctpTransport, + (), + (const override)); + MOCK_METHOD(rtc::scoped_refptr, + CreateDataChannel, + (const std::string&, const DataChannelInit*), + (override)); + MOCK_METHOD(const SessionDescriptionInterface*, + local_description, + (), + (const override)); + MOCK_METHOD(const SessionDescriptionInterface*, + remote_description, + (), + (const override)); + MOCK_METHOD(const SessionDescriptionInterface*, + current_local_description, + (), + (const override)); + MOCK_METHOD(const SessionDescriptionInterface*, + current_remote_description, + (), + (const override)); + MOCK_METHOD(const SessionDescriptionInterface*, + pending_local_description, + (), + (const override)); + MOCK_METHOD(const SessionDescriptionInterface*, + pending_remote_description, + (), + (const override)); + MOCK_METHOD(void, RestartIce, (), (override)); + MOCK_METHOD(void, + CreateOffer, + (CreateSessionDescriptionObserver*, const RTCOfferAnswerOptions&), + (override)); + MOCK_METHOD(void, + CreateAnswer, + (CreateSessionDescriptionObserver*, const RTCOfferAnswerOptions&), + (override)); + MOCK_METHOD(void, + SetLocalDescription, + (SetSessionDescriptionObserver*, SessionDescriptionInterface*), + (override)); + MOCK_METHOD(void, + SetRemoteDescription, + (SetSessionDescriptionObserver*, SessionDescriptionInterface*), + (override)); + MOCK_METHOD(void, + SetRemoteDescription, + (std::unique_ptr, + rtc::scoped_refptr), + (override)); + MOCK_METHOD(PeerConnectionInterface::RTCConfiguration, + GetConfiguration, + (), + (override)); + MOCK_METHOD(RTCError, + SetConfiguration, + (const PeerConnectionInterface::RTCConfiguration&), + (override)); + MOCK_METHOD(bool, + AddIceCandidate, + (const IceCandidateInterface*), + (override)); + MOCK_METHOD(bool, + RemoveIceCandidates, + (const std::vector&), + (override)); + MOCK_METHOD(RTCError, SetBitrate, (const BitrateSettings&), (override)); + MOCK_METHOD(void, SetAudioPlayout, (bool), (override)); + MOCK_METHOD(void, SetAudioRecording, (bool), (override)); + MOCK_METHOD(rtc::scoped_refptr, + LookupDtlsTransportByMid, + (const std::string&), + (override)); + MOCK_METHOD(SignalingState, signaling_state, (), (override)); + MOCK_METHOD(IceConnectionState, ice_connection_state, (), (override)); + MOCK_METHOD(IceConnectionState, + standardized_ice_connection_state, + (), + (override)); + MOCK_METHOD(PeerConnectionState, peer_connection_state, (), (override)); + MOCK_METHOD(IceGatheringState, ice_gathering_state, (), (override)); + MOCK_METHOD(absl::optional, can_trickle_ice_candidates, (), (override)); + MOCK_METHOD(bool, + StartRtcEventLog, + (std::unique_ptr, int64_t), + (override)); + MOCK_METHOD(bool, + StartRtcEventLog, + (std::unique_ptr), + (override)); + MOCK_METHOD(void, StopRtcEventLog, (), (override)); + MOCK_METHOD(void, Close, (), (override)); +}; + +static_assert(!std::is_abstract::value, ""); + +} // namespace webrtc + +#endif // API_TEST_MOCK_PEERCONNECTIONINTERFACE_H_ diff --git a/api/test/mock_rtpreceiver.h b/api/test/mock_rtpreceiver.h new file mode 100644 index 0000000..a0b79e0 --- /dev/null +++ b/api/test/mock_rtpreceiver.h @@ -0,0 +1,45 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_RTPRECEIVER_H_ +#define API_TEST_MOCK_RTPRECEIVER_H_ + +#include +#include + +#include "api/rtp_receiver_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockRtpReceiver : public rtc::RefCountedObject { + public: + MOCK_METHOD(rtc::scoped_refptr, + track, + (), + (const override)); + MOCK_METHOD(std::vector>, + streams, + (), + (const override)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const override)); + MOCK_METHOD(std::string, id, (), (const override)); + MOCK_METHOD(RtpParameters, GetParameters, (), (const override)); + MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override)); + MOCK_METHOD(void, + SetJitterBufferMinimumDelay, + (absl::optional), + (override)); + MOCK_METHOD(std::vector, GetSources, (), (const override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_RTPRECEIVER_H_ diff --git a/api/test/mock_rtpsender.h b/api/test/mock_rtpsender.h new file mode 100644 index 0000000..f12a618 --- /dev/null +++ b/api/test/mock_rtpsender.h @@ -0,0 +1,47 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_RTPSENDER_H_ +#define API_TEST_MOCK_RTPSENDER_H_ + +#include +#include + +#include "api/rtp_sender_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockRtpSender : public rtc::RefCountedObject { + public: + MOCK_METHOD(bool, SetTrack, (MediaStreamTrackInterface*), (override)); + MOCK_METHOD(rtc::scoped_refptr, + track, + (), + (const override)); + MOCK_METHOD(uint32_t, ssrc, (), (const override)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const override)); + MOCK_METHOD(std::string, id, (), (const override)); + MOCK_METHOD(std::vector, stream_ids, (), (const override)); + MOCK_METHOD(std::vector, + init_send_encodings, + (), + (const override)); + MOCK_METHOD(RtpParameters, GetParameters, (), (const override)); + MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override)); + MOCK_METHOD(rtc::scoped_refptr, + GetDtmfSender, + (), + (const override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_RTPSENDER_H_ diff --git a/api/test/mock_transformable_video_frame.h b/api/test/mock_transformable_video_frame.h new file mode 100644 index 0000000..36798b5 --- /dev/null +++ b/api/test/mock_transformable_video_frame.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_ +#define API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_ + +#include + +#include "api/frame_transformer_interface.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockTransformableVideoFrame + : public webrtc::TransformableVideoFrameInterface { + public: + MOCK_METHOD(rtc::ArrayView, GetData, (), (const override)); + MOCK_METHOD(void, SetData, (rtc::ArrayView data), (override)); + MOCK_METHOD(uint32_t, GetTimestamp, (), (const override)); + MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); + MOCK_METHOD(bool, IsKeyFrame, (), (const, override)); + MOCK_METHOD(std::vector, GetAdditionalData, (), (const, override)); + MOCK_METHOD(const webrtc::VideoFrameMetadata&, + GetMetadata, + (), + (const, override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_TRANSFORMABLE_VIDEO_FRAME_H_ diff --git a/api/test/mock_video_bitrate_allocator.h b/api/test/mock_video_bitrate_allocator.h new file mode 100644 index 0000000..76cf49e --- /dev/null +++ b/api/test/mock_video_bitrate_allocator.h @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_VIDEO_BITRATE_ALLOCATOR_H_ +#define API_TEST_MOCK_VIDEO_BITRATE_ALLOCATOR_H_ + +#include "api/video/video_bitrate_allocator.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockVideoBitrateAllocator : public webrtc::VideoBitrateAllocator { + MOCK_METHOD(VideoBitrateAllocation, + Allocate, + (VideoBitrateAllocationParameters parameters), + (override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_VIDEO_BITRATE_ALLOCATOR_H_ diff --git a/api/test/mock_video_bitrate_allocator_factory.h b/api/test/mock_video_bitrate_allocator_factory.h new file mode 100644 index 0000000..16af191 --- /dev/null +++ b/api/test/mock_video_bitrate_allocator_factory.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_ +#define API_TEST_MOCK_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_ + +#include + +#include "api/video/video_bitrate_allocator_factory.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockVideoBitrateAllocatorFactory + : public webrtc::VideoBitrateAllocatorFactory { + public: + ~MockVideoBitrateAllocatorFactory() override { Die(); } + MOCK_METHOD(std::unique_ptr, + CreateVideoBitrateAllocator, + (const VideoCodec&), + (override)); + MOCK_METHOD(void, Die, ()); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_ diff --git a/api/test/mock_video_decoder.h b/api/test/mock_video_decoder.h new file mode 100644 index 0000000..faadabc --- /dev/null +++ b/api/test/mock_video_decoder.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_VIDEO_DECODER_H_ +#define API_TEST_MOCK_VIDEO_DECODER_H_ + +#include "api/video_codecs/video_decoder.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockDecodedImageCallback : public DecodedImageCallback { + public: + MOCK_METHOD(int32_t, + Decoded, + (VideoFrame & decoded_image), // NOLINT + (override)); + MOCK_METHOD(int32_t, + Decoded, + (VideoFrame & decoded_image, // NOLINT + int64_t decode_time_ms), + (override)); + MOCK_METHOD(void, + Decoded, + (VideoFrame & decoded_image, // NOLINT + absl::optional decode_time_ms, + absl::optional qp), + (override)); +}; + +class MockVideoDecoder : public VideoDecoder { + public: + MOCK_METHOD(int32_t, + InitDecode, + (const VideoCodec* codec_settings, int32_t number_of_cores), + (override)); + MOCK_METHOD(int32_t, + Decode, + (const EncodedImage& input_image, + bool missing_frames, + int64_t render_time_ms), + (override)); + MOCK_METHOD(int32_t, + RegisterDecodeCompleteCallback, + (DecodedImageCallback * callback), + (override)); + MOCK_METHOD(int32_t, Release, (), (override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_VIDEO_DECODER_H_ diff --git a/api/test/mock_video_decoder_factory.h b/api/test/mock_video_decoder_factory.h new file mode 100644 index 0000000..98a5d40 --- /dev/null +++ b/api/test/mock_video_decoder_factory.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_VIDEO_DECODER_FACTORY_H_ +#define API_TEST_MOCK_VIDEO_DECODER_FACTORY_H_ + +#include +#include + +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockVideoDecoderFactory : public webrtc::VideoDecoderFactory { + public: + ~MockVideoDecoderFactory() override { Die(); } + + MOCK_METHOD(std::vector, + GetSupportedFormats, + (), + (const, override)); + MOCK_METHOD(std::unique_ptr, + CreateVideoDecoder, + (const webrtc::SdpVideoFormat&), + (override)); + MOCK_METHOD(void, Die, ()); +}; +} // namespace webrtc + +#endif // API_TEST_MOCK_VIDEO_DECODER_FACTORY_H_ diff --git a/api/test/mock_video_encoder.h b/api/test/mock_video_encoder.h new file mode 100644 index 0000000..11e0f64 --- /dev/null +++ b/api/test/mock_video_encoder.h @@ -0,0 +1,73 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_VIDEO_ENCODER_H_ +#define API_TEST_MOCK_VIDEO_ENCODER_H_ + +#include + +#include "api/video_codecs/video_encoder.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockEncodedImageCallback : public EncodedImageCallback { + public: + MOCK_METHOD(Result, + OnEncodedImage, + (const EncodedImage&, const CodecSpecificInfo*), + (override)); + MOCK_METHOD(void, OnDroppedFrame, (DropReason reason), (override)); +}; + +class MockVideoEncoder : public VideoEncoder { + public: + MOCK_METHOD(void, + SetFecControllerOverride, + (FecControllerOverride*), + (override)); + MOCK_METHOD(int32_t, + InitEncode, + (const VideoCodec*, int32_t numberOfCores, size_t maxPayloadSize), + (override)); + MOCK_METHOD(int32_t, + InitEncode, + (const VideoCodec*, const VideoEncoder::Settings& settings), + (override)); + + MOCK_METHOD(int32_t, + Encode, + (const VideoFrame& inputImage, + const std::vector*), + (override)); + MOCK_METHOD(int32_t, + RegisterEncodeCompleteCallback, + (EncodedImageCallback*), + (override)); + MOCK_METHOD(int32_t, Release, (), (override)); + MOCK_METHOD(void, + SetRates, + (const RateControlParameters& parameters), + (override)); + MOCK_METHOD(void, + OnPacketLossRateUpdate, + (float packet_loss_rate), + (override)); + MOCK_METHOD(void, OnRttUpdate, (int64_t rtt_ms), (override)); + MOCK_METHOD(void, + OnLossNotification, + (const LossNotification& loss_notification), + (override)); + MOCK_METHOD(EncoderInfo, GetEncoderInfo, (), (const, override)); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_VIDEO_ENCODER_H_ diff --git a/api/test/mock_video_encoder_factory.h b/api/test/mock_video_encoder_factory.h new file mode 100644 index 0000000..1aa1463 --- /dev/null +++ b/api/test/mock_video_encoder_factory.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_MOCK_VIDEO_ENCODER_FACTORY_H_ +#define API_TEST_MOCK_VIDEO_ENCODER_FACTORY_H_ + +#include +#include + +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockVideoEncoderFactory : public webrtc::VideoEncoderFactory { + public: + ~MockVideoEncoderFactory() override { Die(); } + + MOCK_METHOD(std::vector, + GetSupportedFormats, + (), + (const, override)); + MOCK_METHOD(CodecInfo, + QueryVideoEncoder, + (const SdpVideoFormat&), + (const, override)); + MOCK_METHOD(std::unique_ptr, + CreateVideoEncoder, + (const SdpVideoFormat&), + (override)); + + MOCK_METHOD(void, Die, ()); +}; + +} // namespace webrtc + +#endif // API_TEST_MOCK_VIDEO_ENCODER_FACTORY_H_ diff --git a/api/test/neteq_simulator.cc b/api/test/neteq_simulator.cc new file mode 100644 index 0000000..980db96 --- /dev/null +++ b/api/test/neteq_simulator.cc @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/neteq_simulator.h" + +namespace webrtc { +namespace test { + +NetEqSimulator::SimulationStepResult::SimulationStepResult() = default; +NetEqSimulator::SimulationStepResult::SimulationStepResult( + const NetEqSimulator::SimulationStepResult& other) = default; +NetEqSimulator::SimulationStepResult::~SimulationStepResult() = default; + +NetEqSimulator::NetEqState::NetEqState() = default; +NetEqSimulator::NetEqState::NetEqState(const NetEqState& other) = default; +NetEqSimulator::NetEqState::~NetEqState() = default; + +} // namespace test +} // namespace webrtc diff --git a/api/test/neteq_simulator.h b/api/test/neteq_simulator.h new file mode 100644 index 0000000..88c7ffa --- /dev/null +++ b/api/test/neteq_simulator.h @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_NETEQ_SIMULATOR_H_ +#define API_TEST_NETEQ_SIMULATOR_H_ + +#include + +#include +#include + +namespace webrtc { +namespace test { + +class NetEqSimulator { + public: + virtual ~NetEqSimulator() = default; + + enum class Action { kNormal, kExpand, kAccelerate, kPreemptiveExpand }; + + // The results of one simulation step. + struct SimulationStepResult { + SimulationStepResult(); + SimulationStepResult(const SimulationStepResult& other); + ~SimulationStepResult(); + + bool is_simulation_finished = false; + // The amount of audio produced (in ms) with the actions in this time step. + std::map action_times_ms; + // The amount of wall clock time (in ms) that elapsed since the previous + // event. This is not necessarily equal to the sum of the values in + // action_times_ms. + int64_t simulation_step_ms = 0; + }; + + struct NetEqState { + NetEqState(); + NetEqState(const NetEqState& other); + ~NetEqState(); + // The sum of the packet buffer and sync buffer delay. + int current_delay_ms = 0; + // An indicator that packet loss occurred since the last GetAudio event. + bool packet_loss_occurred = false; + // An indicator that the packet buffer has been flushed since the last + // GetAudio event. + bool packet_buffer_flushed = false; + // Indicates if the next needed packet is available in the buffer. + bool next_packet_available = false; + // The inter-arrival times in ms of the packets that have arrived since the + // last GetAudio event. + std::vector packet_iat_ms; + // The current packet size in ms. + int packet_size_ms = 0; + }; + + // Runs the simulation until the end. Returns the duration of the produced + // audio in ms. + virtual int64_t Run() = 0; + // Runs the simulation until we hit the next GetAudio event. If the simulation + // is finished, is_simulation_finished will be set to true in the returned + // SimulationStepResult. + virtual SimulationStepResult RunToNextGetAudio() = 0; + + // Set the next action to be taken by NetEq. This will override any action + // that NetEq would normally decide to take. + virtual void SetNextAction(Action next_operation) = 0; + + // Get the current state of NetEq. + virtual NetEqState GetNetEqState() = 0; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_NETEQ_SIMULATOR_H_ diff --git a/api/test/neteq_simulator_factory.cc b/api/test/neteq_simulator_factory.cc new file mode 100644 index 0000000..ea5be85 --- /dev/null +++ b/api/test/neteq_simulator_factory.cc @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/neteq_simulator_factory.h" + +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/flags/parse.h" +#include "modules/audio_coding/neteq/tools/neteq_test_factory.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace test { + +NetEqSimulatorFactory::NetEqSimulatorFactory() + : factory_(std::make_unique()) {} + +NetEqSimulatorFactory::~NetEqSimulatorFactory() = default; + +std::unique_ptr NetEqSimulatorFactory::CreateSimulatorFromFile( + absl::string_view event_log_filename, + absl::string_view replacement_audio_filename, + Config simulation_config) { + NetEqTestFactory::Config config; + config.replacement_audio_file = std::string(replacement_audio_filename); + config.max_nr_packets_in_buffer = simulation_config.max_nr_packets_in_buffer; + config.initial_dummy_packets = simulation_config.initial_dummy_packets; + config.skip_get_audio_events = simulation_config.skip_get_audio_events; + config.field_trial_string = simulation_config.field_trial_string; + config.output_audio_filename = simulation_config.output_audio_filename; + return factory_->InitializeTestFromFile( + std::string(event_log_filename), simulation_config.neteq_factory, config); +} + +std::unique_ptr +NetEqSimulatorFactory::CreateSimulatorFromString( + absl::string_view event_log_file_contents, + absl::string_view replacement_audio_filename, + Config simulation_config) { + NetEqTestFactory::Config config; + config.replacement_audio_file = std::string(replacement_audio_filename); + config.max_nr_packets_in_buffer = simulation_config.max_nr_packets_in_buffer; + config.initial_dummy_packets = simulation_config.initial_dummy_packets; + config.skip_get_audio_events = simulation_config.skip_get_audio_events; + config.field_trial_string = simulation_config.field_trial_string; + return factory_->InitializeTestFromString( + std::string(event_log_file_contents), simulation_config.neteq_factory, + config); +} + +} // namespace test +} // namespace webrtc diff --git a/api/test/neteq_simulator_factory.h b/api/test/neteq_simulator_factory.h new file mode 100644 index 0000000..b3c77b1 --- /dev/null +++ b/api/test/neteq_simulator_factory.h @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_NETEQ_SIMULATOR_FACTORY_H_ +#define API_TEST_NETEQ_SIMULATOR_FACTORY_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/neteq/neteq_factory.h" +#include "api/test/neteq_simulator.h" + +namespace webrtc { +namespace test { + +class NetEqTestFactory; + +class NetEqSimulatorFactory { + public: + NetEqSimulatorFactory(); + ~NetEqSimulatorFactory(); + struct Config { + // The maximum allowed number of packets in the jitter buffer. + int max_nr_packets_in_buffer = 0; + // The number of audio packets to insert at the start of the simulation. + // Since the simulation is done with a replacement audio file, these + // artificial packets will take a small piece of that replacement audio. + int initial_dummy_packets = 0; + // The number of simulation steps to skip at the start of the simulation. + // This removes incoming packets and GetAudio events from the start of the + // simulation, until the requested number of GetAudio events has been + // removed. + int skip_get_audio_events = 0; + // A WebRTC field trial string to be used during the simulation. + std::string field_trial_string; + // A filename for the generated output audio file. + absl::optional output_audio_filename; + // A custom NetEqFactory can be used. + NetEqFactory* neteq_factory = nullptr; + }; + std::unique_ptr CreateSimulatorFromFile( + absl::string_view event_log_filename, + absl::string_view replacement_audio_filename, + Config simulation_config); + // The same as above, but pass the file contents as a string. + std::unique_ptr CreateSimulatorFromString( + absl::string_view event_log_file_contents, + absl::string_view replacement_audio_file, + Config simulation_config); + + private: + std::unique_ptr factory_; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_NETEQ_SIMULATOR_FACTORY_H_ diff --git a/api/test/network_emulation/BUILD.gn b/api/test/network_emulation/BUILD.gn new file mode 100644 index 0000000..4780da2 --- /dev/null +++ b/api/test/network_emulation/BUILD.gn @@ -0,0 +1,29 @@ +# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_library("network_emulation") { + visibility = [ "*" ] + + sources = [ + "network_emulation_interfaces.cc", + "network_emulation_interfaces.h", + ] + + deps = [ + "../..:array_view", + "../../../rtc_base", + "../../../rtc_base:checks", + "../../../rtc_base:rtc_base_approved", + "../../units:data_rate", + "../../units:data_size", + "../../units:timestamp", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} diff --git a/api/test/network_emulation/DEPS b/api/test/network_emulation/DEPS new file mode 100644 index 0000000..0cf1288 --- /dev/null +++ b/api/test/network_emulation/DEPS @@ -0,0 +1,7 @@ +specific_include_rules = { + ".*": [ + "+rtc_base/socket_address.h", + "+rtc_base/ip_address.h", + "+rtc_base/copy_on_write_buffer.h", + ], +} diff --git a/api/test/network_emulation/network_emulation_interfaces.cc b/api/test/network_emulation/network_emulation_interfaces.cc new file mode 100644 index 0000000..ac2eb1d --- /dev/null +++ b/api/test/network_emulation/network_emulation_interfaces.cc @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/network_emulation/network_emulation_interfaces.h" + +#include "rtc_base/net_helper.h" + +namespace webrtc { +EmulatedIpPacket::EmulatedIpPacket(const rtc::SocketAddress& from, + const rtc::SocketAddress& to, + rtc::CopyOnWriteBuffer data, + Timestamp arrival_time, + uint16_t application_overhead) + : from(from), + to(to), + data(data), + headers_size(to.ipaddr().overhead() + application_overhead + + cricket::kUdpHeaderSize), + arrival_time(arrival_time) { + RTC_DCHECK(to.family() == AF_INET || to.family() == AF_INET6); +} + +} // namespace webrtc diff --git a/api/test/network_emulation/network_emulation_interfaces.h b/api/test/network_emulation/network_emulation_interfaces.h new file mode 100644 index 0000000..db1f9ad --- /dev/null +++ b/api/test/network_emulation/network_emulation_interfaces.h @@ -0,0 +1,215 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_ +#define API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_ + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/timestamp.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/socket_address.h" + +namespace webrtc { + +struct EmulatedIpPacket { + public: + EmulatedIpPacket(const rtc::SocketAddress& from, + const rtc::SocketAddress& to, + rtc::CopyOnWriteBuffer data, + Timestamp arrival_time, + uint16_t application_overhead = 0); + ~EmulatedIpPacket() = default; + // This object is not copyable or assignable. + EmulatedIpPacket(const EmulatedIpPacket&) = delete; + EmulatedIpPacket& operator=(const EmulatedIpPacket&) = delete; + // This object is only moveable. + EmulatedIpPacket(EmulatedIpPacket&&) = default; + EmulatedIpPacket& operator=(EmulatedIpPacket&&) = default; + + size_t size() const { return data.size(); } + const uint8_t* cdata() const { return data.cdata(); } + + size_t ip_packet_size() const { return size() + headers_size; } + rtc::SocketAddress from; + rtc::SocketAddress to; + // Holds the UDP payload. + rtc::CopyOnWriteBuffer data; + uint16_t headers_size; + Timestamp arrival_time; +}; + +// Interface for handling IP packets from an emulated network. This is used with +// EmulatedEndpoint to receive packets on a specific port. +class EmulatedNetworkReceiverInterface { + public: + virtual ~EmulatedNetworkReceiverInterface() = default; + + virtual void OnPacketReceived(EmulatedIpPacket packet) = 0; +}; + +class EmulatedNetworkOutgoingStats { + public: + virtual ~EmulatedNetworkOutgoingStats() = default; + + virtual int64_t PacketsSent() const = 0; + + virtual DataSize BytesSent() const = 0; + + virtual DataSize FirstSentPacketSize() const = 0; + + // Returns time of the first packet sent or infinite value if no packets were + // sent. + virtual Timestamp FirstPacketSentTime() const = 0; + + // Returns time of the last packet sent or infinite value if no packets were + // sent. + virtual Timestamp LastPacketSentTime() const = 0; + + // Returns average send rate. Requires that at least 2 packets were sent. + virtual DataRate AverageSendRate() const = 0; +}; + +class EmulatedNetworkIncomingStats { + public: + virtual ~EmulatedNetworkIncomingStats() = default; + + // Total amount of packets received with or without destination. + virtual int64_t PacketsReceived() const = 0; + // Total amount of bytes in received packets. + virtual DataSize BytesReceived() const = 0; + // Total amount of packets that were received, but no destination was found. + virtual int64_t PacketsDropped() const = 0; + // Total amount of bytes in dropped packets. + virtual DataSize BytesDropped() const = 0; + + virtual DataSize FirstReceivedPacketSize() const = 0; + + // Returns time of the first packet received or infinite value if no packets + // were received. + virtual Timestamp FirstPacketReceivedTime() const = 0; + + // Returns time of the last packet received or infinite value if no packets + // were received. + virtual Timestamp LastPacketReceivedTime() const = 0; + + virtual DataRate AverageReceiveRate() const = 0; +}; + +class EmulatedNetworkStats { + public: + virtual ~EmulatedNetworkStats() = default; + + // List of IP addresses that were used to send data considered in this stats + // object. + virtual std::vector LocalAddresses() const = 0; + + virtual int64_t PacketsSent() const = 0; + + virtual DataSize BytesSent() const = 0; + + virtual DataSize FirstSentPacketSize() const = 0; + // Returns time of the first packet sent or infinite value if no packets were + // sent. + virtual Timestamp FirstPacketSentTime() const = 0; + // Returns time of the last packet sent or infinite value if no packets were + // sent. + virtual Timestamp LastPacketSentTime() const = 0; + + virtual DataRate AverageSendRate() const = 0; + // Total amount of packets received regardless of the destination address. + virtual int64_t PacketsReceived() const = 0; + // Total amount of bytes in received packets. + virtual DataSize BytesReceived() const = 0; + // Total amount of packets that were received, but no destination was found. + virtual int64_t PacketsDropped() const = 0; + // Total amount of bytes in dropped packets. + virtual DataSize BytesDropped() const = 0; + + virtual DataSize FirstReceivedPacketSize() const = 0; + // Returns time of the first packet received or infinite value if no packets + // were received. + virtual Timestamp FirstPacketReceivedTime() const = 0; + // Returns time of the last packet received or infinite value if no packets + // were received. + virtual Timestamp LastPacketReceivedTime() const = 0; + + virtual DataRate AverageReceiveRate() const = 0; + + virtual std::map> + OutgoingStatsPerDestination() const = 0; + + virtual std::map> + IncomingStatsPerSource() const = 0; +}; + +// EmulatedEndpoint is an abstraction for network interface on device. Instances +// of this are created by NetworkEmulationManager::CreateEndpoint. +class EmulatedEndpoint : public EmulatedNetworkReceiverInterface { + public: + // Send packet into network. + // |from| will be used to set source address for the packet in destination + // socket. + // |to| will be used for routing verification and picking right socket by port + // on destination endpoint. + virtual void SendPacket(const rtc::SocketAddress& from, + const rtc::SocketAddress& to, + rtc::CopyOnWriteBuffer packet_data, + uint16_t application_overhead = 0) = 0; + + // Binds receiver to this endpoint to send and receive data. + // |desired_port| is a port that should be used. If it is equal to 0, + // endpoint will pick the first available port starting from + // |kFirstEphemeralPort|. + // + // Returns the port, that should be used (it will be equals to desired, if + // |desired_port| != 0 and is free or will be the one, selected by endpoint) + // or absl::nullopt if desired_port in used. Also fails if there are no more + // free ports to bind to. + virtual absl::optional BindReceiver( + uint16_t desired_port, + EmulatedNetworkReceiverInterface* receiver) = 0; + virtual void UnbindReceiver(uint16_t port) = 0; + virtual rtc::IPAddress GetPeerLocalAddress() const = 0; + + private: + // Ensure that there can be no other subclass than EmulatedEndpointImpl. This + // means that it's always safe to downcast EmulatedEndpoint instances to + // EmulatedEndpointImpl. + friend class EmulatedEndpointImpl; + EmulatedEndpoint() = default; +}; + +// Simulates a TCP connection, this roughly implements the Reno algorithm. In +// difference from TCP this only support sending messages with a fixed length, +// no streaming. This is useful to simulate signaling and cross traffic using +// message based protocols such as HTTP. It differs from UDP messages in that +// they are guranteed to be delivered eventually, even on lossy networks. +class TcpMessageRoute { + public: + // Sends a TCP message of the given |size| over the route, |on_received| is + // called when the message has been delivered. Note that the connection + // parameters are reset iff there's no currently pending message on the route. + virtual void SendMessage(size_t size, std::function on_received) = 0; + + protected: + ~TcpMessageRoute() = default; +}; +} // namespace webrtc + +#endif // API_TEST_NETWORK_EMULATION_NETWORK_EMULATION_INTERFACES_H_ diff --git a/api/test/network_emulation_manager.cc b/api/test/network_emulation_manager.cc new file mode 100644 index 0000000..602c90a --- /dev/null +++ b/api/test/network_emulation_manager.cc @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include + +#include "api/test/network_emulation_manager.h" +#include "call/simulated_network.h" + +namespace webrtc { + +NetworkEmulationManager::SimulatedNetworkNode::Builder& +NetworkEmulationManager::SimulatedNetworkNode::Builder::config( + BuiltInNetworkBehaviorConfig config) { + config_ = config; + return *this; +} + +NetworkEmulationManager::SimulatedNetworkNode::Builder& +NetworkEmulationManager::SimulatedNetworkNode::Builder::delay_ms( + int queue_delay_ms) { + config_.queue_delay_ms = queue_delay_ms; + return *this; +} + +NetworkEmulationManager::SimulatedNetworkNode::Builder& +NetworkEmulationManager::SimulatedNetworkNode::Builder::capacity_kbps( + int link_capacity_kbps) { + config_.link_capacity_kbps = link_capacity_kbps; + return *this; +} + +NetworkEmulationManager::SimulatedNetworkNode::Builder& +NetworkEmulationManager::SimulatedNetworkNode::Builder::capacity_Mbps( + int link_capacity_Mbps) { + config_.link_capacity_kbps = link_capacity_Mbps * 1000; + return *this; +} + +NetworkEmulationManager::SimulatedNetworkNode::Builder& +NetworkEmulationManager::SimulatedNetworkNode::Builder::loss(double loss_rate) { + config_.loss_percent = std::round(loss_rate * 100); + return *this; +} + +NetworkEmulationManager::SimulatedNetworkNode::Builder& +NetworkEmulationManager::SimulatedNetworkNode::Builder::packet_queue_length( + int max_queue_length_in_packets) { + config_.queue_length_packets = max_queue_length_in_packets; + return *this; +} + +NetworkEmulationManager::SimulatedNetworkNode +NetworkEmulationManager::SimulatedNetworkNode::Builder::Build() const { + RTC_CHECK(net_); + return Build(net_); +} + +NetworkEmulationManager::SimulatedNetworkNode +NetworkEmulationManager::SimulatedNetworkNode::Builder::Build( + NetworkEmulationManager* net) const { + RTC_CHECK(net); + RTC_CHECK(net_ == nullptr || net_ == net); + SimulatedNetworkNode res; + auto behavior = std::make_unique(config_); + res.simulation = behavior.get(); + res.node = net->CreateEmulatedNode(std::move(behavior)); + return res; +} +} // namespace webrtc diff --git a/api/test/network_emulation_manager.h b/api/test/network_emulation_manager.h new file mode 100644 index 0000000..90441e4 --- /dev/null +++ b/api/test/network_emulation_manager.h @@ -0,0 +1,207 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_NETWORK_EMULATION_MANAGER_H_ +#define API_TEST_NETWORK_EMULATION_MANAGER_H_ + +#include +#include +#include + +#include "api/array_view.h" +#include "api/test/network_emulation/network_emulation_interfaces.h" +#include "api/test/simulated_network.h" +#include "api/test/time_controller.h" +#include "api/units/timestamp.h" +#include "rtc_base/network.h" +#include "rtc_base/network_constants.h" +#include "rtc_base/thread.h" + +namespace webrtc { + +// This API is still in development and can be changed without prior notice. + +// These classes are forward declared here, because they used as handles, to +// make it possible for client code to operate with these abstractions and build +// required network configuration. With forward declaration here implementation +// is more readable, than with interfaces approach and cause user needn't any +// API methods on these abstractions it is acceptable here. + +// EmulatedNetworkNode is an abstraction for some network in the real world, +// like 3G network between peers, or Wi-Fi for one peer and LTE for another. +// Multiple networks can be joined into chain emulating a network path from +// one peer to another. +class EmulatedNetworkNode; + +// EmulatedRoute is handle for single route from one network interface on one +// peer device to another network interface on another peer device. +class EmulatedRoute; + +struct EmulatedEndpointConfig { + enum class IpAddressFamily { kIpv4, kIpv6 }; + + IpAddressFamily generated_ip_family = IpAddressFamily::kIpv4; + // If specified will be used as IP address for endpoint node. Must be unique + // among all created nodes. + absl::optional ip; + // Should endpoint be enabled or not, when it will be created. + // Enabled endpoints will be available for webrtc to send packets. + bool start_as_enabled = true; + // Network type which will be used to represent endpoint to WebRTC. + rtc::AdapterType type = rtc::AdapterType::ADAPTER_TYPE_UNKNOWN; +}; + + +// Provide interface to obtain all required objects to inject network emulation +// layer into PeerConnection. Also contains information about network interfaces +// accessible by PeerConnection. +class EmulatedNetworkManagerInterface { + public: + virtual ~EmulatedNetworkManagerInterface() = default; + + // Returns non-null pointer to thread that have to be used as network thread + // for WebRTC to properly setup network emulation. Returned thread is owned + // by EmulatedNetworkManagerInterface implementation. + virtual rtc::Thread* network_thread() = 0; + // Returns non-null pointer to network manager that have to be injected into + // WebRTC to properly setup network emulation. Returned manager is owned by + // EmulatedNetworkManagerInterface implementation. + virtual rtc::NetworkManager* network_manager() = 0; + // Returns list of endpoints that are associated with this instance. Pointers + // are guaranteed to be non-null and are owned by NetworkEmulationManager. + virtual std::vector endpoints() const = 0; + + // Passes summarized network stats for endpoints for this manager into + // specified |stats_callback|. + virtual void GetStats( + std::function)> stats_callback) + const = 0; +}; + +enum class TimeMode { kRealTime, kSimulated }; + +// Provides an API for creating and configuring emulated network layer. +// All objects returned by this API are owned by NetworkEmulationManager itself +// and will be deleted when manager will be deleted. +class NetworkEmulationManager { + public: + // Helper struct to simplify creation of simulated network behaviors. Contains + // non-owning pointers as the underlying instances are owned by the manager. + struct SimulatedNetworkNode { + SimulatedNetworkInterface* simulation; + EmulatedNetworkNode* node; + + class Builder { + public: + explicit Builder(NetworkEmulationManager* net) : net_(net) {} + Builder() : net_(nullptr) {} + Builder(const Builder&) = default; + // Sets the config state, note that this will replace any previously set + // values. + Builder& config(BuiltInNetworkBehaviorConfig config); + Builder& delay_ms(int queue_delay_ms); + Builder& capacity_kbps(int link_capacity_kbps); + Builder& capacity_Mbps(int link_capacity_Mbps); + Builder& loss(double loss_rate); + Builder& packet_queue_length(int max_queue_length_in_packets); + SimulatedNetworkNode Build() const; + SimulatedNetworkNode Build(NetworkEmulationManager* net) const; + + private: + NetworkEmulationManager* const net_; + BuiltInNetworkBehaviorConfig config_; + }; + }; + virtual ~NetworkEmulationManager() = default; + + virtual TimeController* time_controller() = 0; + + // Creates an emulated network node, which represents single network in + // the emulated network layer. + virtual EmulatedNetworkNode* CreateEmulatedNode( + BuiltInNetworkBehaviorConfig config) = 0; + virtual EmulatedNetworkNode* CreateEmulatedNode( + std::unique_ptr network_behavior) = 0; + + virtual SimulatedNetworkNode::Builder NodeBuilder() = 0; + + // Creates an emulated endpoint, which represents single network interface on + // the peer's device. + virtual EmulatedEndpoint* CreateEndpoint(EmulatedEndpointConfig config) = 0; + // Enable emulated endpoint to make it available for webrtc. + // Caller mustn't enable currently enabled endpoint. + virtual void EnableEndpoint(EmulatedEndpoint* endpoint) = 0; + // Disable emulated endpoint to make it unavailable for webrtc. + // Caller mustn't disable currently disabled endpoint. + virtual void DisableEndpoint(EmulatedEndpoint* endpoint) = 0; + + // Creates a route between endpoints going through specified network nodes. + // This route is single direction only and describe how traffic that was + // sent by network interface |from| have to be delivered to the network + // interface |to|. Return object can be used to remove created route. The + // route must contains at least one network node inside it. + // + // Assume that E{0-9} are endpoints and N{0-9} are network nodes, then + // creation of the route have to follow these rules: + // 1. A route consists of a source endpoint, an ordered list of one or + // more network nodes, and a destination endpoint. + // 2. If (E1, ..., E2) is a route, then E1 != E2. + // In other words, the source and the destination may not be the same. + // 3. Given two simultaneously existing routes (E1, ..., E2) and + // (E3, ..., E4), either E1 != E3 or E2 != E4. + // In other words, there may be at most one route from any given source + // endpoint to any given destination endpoint. + // 4. Given two simultaneously existing routes (E1, ..., N1, ..., E2) + // and (E3, ..., N2, ..., E4), either N1 != N2 or E2 != E4. + // In other words, a network node may not belong to two routes that lead + // to the same destination endpoint. + virtual EmulatedRoute* CreateRoute( + EmulatedEndpoint* from, + const std::vector& via_nodes, + EmulatedEndpoint* to) = 0; + + // Creates a route over the given |via_nodes| creating the required endpoints + // in the process. The returned EmulatedRoute pointer can be used in other + // calls as a transport route for message or cross traffic. + virtual EmulatedRoute* CreateRoute( + const std::vector& via_nodes) = 0; + + // Removes route previously created by CreateRoute(...). + // Caller mustn't call this function with route, that have been already + // removed earlier. + virtual void ClearRoute(EmulatedRoute* route) = 0; + + // Creates a simulated TCP connection using |send_route| for traffic and + // |ret_route| for feedback. This can be used to emulate HTTP cross traffic + // and to implement realistic reliable signaling over lossy networks. + // TODO(srte): Handle clearing of the routes involved. + virtual TcpMessageRoute* CreateTcpRoute(EmulatedRoute* send_route, + EmulatedRoute* ret_route) = 0; + + // Creates EmulatedNetworkManagerInterface which can be used then to inject + // network emulation layer into PeerConnection. |endpoints| - are available + // network interfaces for PeerConnection. If endpoint is enabled, it will be + // immediately available for PeerConnection, otherwise user will be able to + // enable endpoint later to make it available for PeerConnection. + virtual EmulatedNetworkManagerInterface* + CreateEmulatedNetworkManagerInterface( + const std::vector& endpoints) = 0; + + // Passes summarized network stats for specified |endpoints| into specifield + // |stats_callback|. + virtual void GetStats( + rtc::ArrayView endpoints, + std::function)> + stats_callback) = 0; +}; + +} // namespace webrtc + +#endif // API_TEST_NETWORK_EMULATION_MANAGER_H_ diff --git a/api/test/peerconnection_quality_test_fixture.h b/api/test/peerconnection_quality_test_fixture.h new file mode 100644 index 0000000..f370478 --- /dev/null +++ b/api/test/peerconnection_quality_test_fixture.h @@ -0,0 +1,473 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_PEERCONNECTION_QUALITY_TEST_FIXTURE_H_ +#define API_TEST_PEERCONNECTION_QUALITY_TEST_FIXTURE_H_ + +#include +#include +#include +#include +#include + +#include "absl/memory/memory.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/async_resolver_factory.h" +#include "api/call/call_factory_interface.h" +#include "api/fec_controller.h" +#include "api/function_view.h" +#include "api/media_stream_interface.h" +#include "api/peer_connection_interface.h" +#include "api/rtc_event_log/rtc_event_log_factory_interface.h" +#include "api/rtp_parameters.h" +#include "api/task_queue/task_queue_factory.h" +#include "api/test/audio_quality_analyzer_interface.h" +#include "api/test/frame_generator_interface.h" +#include "api/test/simulated_network.h" +#include "api/test/stats_observer_interface.h" +#include "api/test/track_id_stream_info_map.h" +#include "api/test/video_quality_analyzer_interface.h" +#include "api/transport/network_control.h" +#include "api/units/time_delta.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "media/base/media_constants.h" +#include "rtc_base/network.h" +#include "rtc_base/rtc_certificate_generator.h" +#include "rtc_base/ssl_certificate.h" +#include "rtc_base/thread.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +constexpr size_t kDefaultSlidesWidth = 1850; +constexpr size_t kDefaultSlidesHeight = 1110; + +// API is in development. Can be changed/removed without notice. +class PeerConnectionE2EQualityTestFixture { + public: + // The index of required capturing device in OS provided list of video + // devices. On Linux and Windows the list will be obtained via + // webrtc::VideoCaptureModule::DeviceInfo, on Mac OS via + // [RTCCameraVideoCapturer captureDevices]. + enum class CapturingDeviceIndex : size_t {}; + + // Contains parameters for screen share scrolling. + // + // If scrolling is enabled, then it will be done by putting sliding window + // on source video and moving this window from top left corner to the + // bottom right corner of the picture. + // + // In such case source dimensions must be greater or equal to the sliding + // window dimensions. So |source_width| and |source_height| are the dimensions + // of the source frame, while |VideoConfig::width| and |VideoConfig::height| + // are the dimensions of the sliding window. + // + // Because |source_width| and |source_height| are dimensions of the source + // frame, they have to be width and height of videos from + // |ScreenShareConfig::slides_yuv_file_names|. + // + // Because scrolling have to be done on single slide it also requires, that + // |duration| must be less or equal to + // |ScreenShareConfig::slide_change_interval|. + struct ScrollingParams { + ScrollingParams(TimeDelta duration, + size_t source_width, + size_t source_height) + : duration(duration), + source_width(source_width), + source_height(source_height) { + RTC_CHECK_GT(duration.ms(), 0); + } + + // Duration of scrolling. + TimeDelta duration; + // Width of source slides video. + size_t source_width; + // Height of source slides video. + size_t source_height; + }; + + // Contains screen share video stream properties. + struct ScreenShareConfig { + explicit ScreenShareConfig(TimeDelta slide_change_interval) + : slide_change_interval(slide_change_interval) { + RTC_CHECK_GT(slide_change_interval.ms(), 0); + } + + // Shows how long one slide should be presented on the screen during + // slide generation. + TimeDelta slide_change_interval; + // If true, slides will be generated programmatically. No scrolling params + // will be applied in such case. + bool generate_slides = false; + // If present scrolling will be applied. Please read extra requirement on + // |slides_yuv_file_names| for scrolling. + absl::optional scrolling_params; + // Contains list of yuv files with slides. + // + // If empty, default set of slides will be used. In such case + // |VideoConfig::width| must be equal to |kDefaultSlidesWidth| and + // |VideoConfig::height| must be equal to |kDefaultSlidesHeight| or if + // |scrolling_params| are specified, then |ScrollingParams::source_width| + // must be equal to |kDefaultSlidesWidth| and + // |ScrollingParams::source_height| must be equal to |kDefaultSlidesHeight|. + std::vector slides_yuv_file_names; + }; + + // Config for Vp8 simulcast or Vp9 SVC testing. + // + // SVC support is limited: + // During SVC testing there is no SFU, so framework will try to emulate SFU + // behavior in regular p2p call. Because of it there are such limitations: + // * if |target_spatial_index| is not equal to the highest spatial layer + // then no packet/frame drops are allowed. + // + // If there will be any drops, that will affect requested layer, then + // WebRTC SVC implementation will continue decoding only the highest + // available layer and won't restore lower layers, so analyzer won't + // receive required data which will cause wrong results or test failures. + struct VideoSimulcastConfig { + explicit VideoSimulcastConfig(int simulcast_streams_count) + : simulcast_streams_count(simulcast_streams_count) { + RTC_CHECK_GT(simulcast_streams_count, 1); + } + VideoSimulcastConfig(int simulcast_streams_count, int target_spatial_index) + : simulcast_streams_count(simulcast_streams_count), + target_spatial_index(target_spatial_index) { + RTC_CHECK_GT(simulcast_streams_count, 1); + RTC_CHECK_GE(target_spatial_index, 0); + RTC_CHECK_LT(target_spatial_index, simulcast_streams_count); + } + + // Specified amount of simulcast streams/SVC layers, depending on which + // encoder is used. + int simulcast_streams_count; + // Specifies spatial index of the video stream to analyze. + // There are 2 cases: + // 1. simulcast encoder is used: + // in such case |target_spatial_index| will specify the index of + // simulcast stream, that should be analyzed. Other streams will be + // dropped. + // 2. SVC encoder is used: + // in such case |target_spatial_index| will specify the top interesting + // spatial layer and all layers below, including target one will be + // processed. All layers above target one will be dropped. + // If not specified than whatever stream will be received will be analyzed. + // It requires Selective Forwarding Unit (SFU) to be configured in the + // network. + absl::optional target_spatial_index; + + // Encoding parameters per simulcast layer. If not empty, |encoding_params| + // size have to be equal to |simulcast_streams_count|. Will be used to set + // transceiver send encoding params for simulcast layers. Applicable only + // for codecs that support simulcast (ex. Vp8) and will be ignored + // otherwise. RtpEncodingParameters::rid may be changed by fixture + // implementation to ensure signaling correctness. + std::vector encoding_params; + }; + + // Contains properties of single video stream. + struct VideoConfig { + VideoConfig(size_t width, size_t height, int32_t fps) + : width(width), height(height), fps(fps) {} + + // Video stream width. + const size_t width; + // Video stream height. + const size_t height; + const int32_t fps; + // Have to be unique among all specified configs for all peers in the call. + // Will be auto generated if omitted. + absl::optional stream_label; + // Will be set for current video track. If equals to kText or kDetailed - + // screencast in on. + absl::optional content_hint; + // If presented video will be transfered in simulcast/SVC mode depending on + // which encoder is used. + // + // Simulcast is supported only from 1st added peer. For VP8 simulcast only + // without RTX is supported so it will be automatically disabled for all + // simulcast tracks. For VP9 simulcast enables VP9 SVC mode and support RTX, + // but only on non-lossy networks. See more in documentation to + // VideoSimulcastConfig. + absl::optional simulcast_config; + // Count of temporal layers for video stream. This value will be set into + // each RtpEncodingParameters of RtpParameters of corresponding + // RtpSenderInterface for this video stream. + absl::optional temporal_layers_count; + // Sets the maximum encode bitrate in bps. If this value is not set, the + // encoder will be capped at an internal maximum value around 2 Mbps + // depending on the resolution. This means that it will never be able to + // utilize a high bandwidth link. + absl::optional max_encode_bitrate_bps; + // Sets the minimum encode bitrate in bps. If this value is not set, the + // encoder will use an internal minimum value. Please note that if this + // value is set higher than the bandwidth of the link, the encoder will + // generate more data than the link can handle regardless of the bandwidth + // estimation. + absl::optional min_encode_bitrate_bps; + // If specified the input stream will be also copied to specified file. + // It is actually one of the test's output file, which contains copy of what + // was captured during the test for this video stream on sender side. + // It is useful when generator is used as input. + absl::optional input_dump_file_name; + // If specified this file will be used as output on the receiver side for + // this stream. If multiple streams will be produced by input stream, + // output files will be appended with indexes. The produced files contains + // what was rendered for this video stream on receiver side. + absl::optional output_dump_file_name; + // If true will display input and output video on the user's screen. + bool show_on_screen = false; + // If specified, determines a sync group to which this video stream belongs. + // According to bugs.webrtc.org/4762 WebRTC supports synchronization only + // for pair of single audio and single video stream. + absl::optional sync_group; + }; + + // Contains properties for audio in the call. + struct AudioConfig { + enum Mode { + kGenerated, + kFile, + }; + // Have to be unique among all specified configs for all peers in the call. + // Will be auto generated if omitted. + absl::optional stream_label; + Mode mode = kGenerated; + // Have to be specified only if mode = kFile + absl::optional input_file_name; + // If specified the input stream will be also copied to specified file. + absl::optional input_dump_file_name; + // If specified the output stream will be copied to specified file. + absl::optional output_dump_file_name; + + // Audio options to use. + cricket::AudioOptions audio_options; + // Sampling frequency of input audio data (from file or generated). + int sampling_frequency_in_hz = 48000; + // If specified, determines a sync group to which this audio stream belongs. + // According to bugs.webrtc.org/4762 WebRTC supports synchronization only + // for pair of single audio and single video stream. + absl::optional sync_group; + }; + + // This class is used to fully configure one peer inside the call. + class PeerConfigurer { + public: + virtual ~PeerConfigurer() = default; + + // Sets peer name that will be used to report metrics related to this peer. + // If not set, some default name will be assigned. All names have to be + // unique. + virtual PeerConfigurer* SetName(absl::string_view name) = 0; + + // The parameters of the following 9 methods will be passed to the + // PeerConnectionFactoryInterface implementation that will be created for + // this peer. + virtual PeerConfigurer* SetTaskQueueFactory( + std::unique_ptr task_queue_factory) = 0; + virtual PeerConfigurer* SetCallFactory( + std::unique_ptr call_factory) = 0; + virtual PeerConfigurer* SetEventLogFactory( + std::unique_ptr event_log_factory) = 0; + virtual PeerConfigurer* SetFecControllerFactory( + std::unique_ptr + fec_controller_factory) = 0; + virtual PeerConfigurer* SetNetworkControllerFactory( + std::unique_ptr + network_controller_factory) = 0; + virtual PeerConfigurer* SetVideoEncoderFactory( + std::unique_ptr video_encoder_factory) = 0; + virtual PeerConfigurer* SetVideoDecoderFactory( + std::unique_ptr video_decoder_factory) = 0; + // Set a custom NetEqFactory to be used in the call. + virtual PeerConfigurer* SetNetEqFactory( + std::unique_ptr neteq_factory) = 0; + + // The parameters of the following 4 methods will be passed to the + // PeerConnectionInterface implementation that will be created for this + // peer. + virtual PeerConfigurer* SetAsyncResolverFactory( + std::unique_ptr + async_resolver_factory) = 0; + virtual PeerConfigurer* SetRTCCertificateGenerator( + std::unique_ptr + cert_generator) = 0; + virtual PeerConfigurer* SetSSLCertificateVerifier( + std::unique_ptr tls_cert_verifier) = 0; + virtual PeerConfigurer* SetIceTransportFactory( + std::unique_ptr factory) = 0; + + // Add new video stream to the call that will be sent from this peer. + // Default implementation of video frames generator will be used. + virtual PeerConfigurer* AddVideoConfig(VideoConfig config) = 0; + // Add new video stream to the call that will be sent from this peer with + // provided own implementation of video frames generator. + virtual PeerConfigurer* AddVideoConfig( + VideoConfig config, + std::unique_ptr generator) = 0; + // Add new video stream to the call that will be sent from this peer. + // Capturing device with specified index will be used to get input video. + virtual PeerConfigurer* AddVideoConfig( + VideoConfig config, + CapturingDeviceIndex capturing_device_index) = 0; + // Set the audio stream for the call from this peer. If this method won't + // be invoked, this peer will send no audio. + virtual PeerConfigurer* SetAudioConfig(AudioConfig config) = 0; + // If is set, an RTCEventLog will be saved in that location and it will be + // available for further analysis. + virtual PeerConfigurer* SetRtcEventLogPath(std::string path) = 0; + // If is set, an AEC dump will be saved in that location and it will be + // available for further analysis. + virtual PeerConfigurer* SetAecDumpPath(std::string path) = 0; + virtual PeerConfigurer* SetRTCConfiguration( + PeerConnectionInterface::RTCConfiguration configuration) = 0; + // Set bitrate parameters on PeerConnection. This constraints will be + // applied to all summed RTP streams for this peer. + virtual PeerConfigurer* SetBitrateSettings( + BitrateSettings bitrate_settings) = 0; + }; + + // Contains configuration for echo emulator. + struct EchoEmulationConfig { + // Delay which represents the echo path delay, i.e. how soon rendered signal + // should reach capturer. + TimeDelta echo_delay = TimeDelta::Millis(50); + }; + + struct VideoCodecConfig { + explicit VideoCodecConfig(std::string name) + : name(std::move(name)), required_params() {} + VideoCodecConfig(std::string name, + std::map required_params) + : name(std::move(name)), required_params(std::move(required_params)) {} + // Next two fields are used to specify concrete video codec, that should be + // used in the test. Video code will be negotiated in SDP during offer/ + // answer exchange. + // Video codec name. You can find valid names in + // media/base/media_constants.h + std::string name = cricket::kVp8CodecName; + // Map of parameters, that have to be specified on SDP codec. Each parameter + // is described by key and value. Codec parameters will match the specified + // map if and only if for each key from |required_params| there will be + // a parameter with name equal to this key and parameter value will be equal + // to the value from |required_params| for this key. + // If empty then only name will be used to match the codec. + std::map required_params; + }; + + // Contains parameters, that describe how long framework should run quality + // test. + struct RunParams { + explicit RunParams(TimeDelta run_duration) : run_duration(run_duration) {} + + // Specifies how long the test should be run. This time shows how long + // the media should flow after connection was established and before + // it will be shut downed. + TimeDelta run_duration; + + // List of video codecs to use during the test. These codecs will be + // negotiated in SDP during offer/answer exchange. The order of these codecs + // during negotiation will be the same as in |video_codecs|. Codecs have + // to be available in codecs list provided by peer connection to be + // negotiated. If some of specified codecs won't be found, the test will + // crash. + // If list is empty Vp8 with no required_params will be used. + std::vector video_codecs; + bool use_ulp_fec = false; + bool use_flex_fec = false; + // Specifies how much video encoder target bitrate should be different than + // target bitrate, provided by WebRTC stack. Must be greater then 0. Can be + // used to emulate overshooting of video encoders. This multiplier will + // be applied for all video encoder on both sides for all layers. Bitrate + // estimated by WebRTC stack will be multiplied on this multiplier and then + // provided into VideoEncoder::SetRates(...). + double video_encoder_bitrate_multiplier = 1.0; + // If true will set conference mode in SDP media section for all video + // tracks for all peers. + bool use_conference_mode = false; + // If specified echo emulation will be done, by mixing the render audio into + // the capture signal. In such case input signal will be reduced by half to + // avoid saturation or compression in the echo path simulation. + absl::optional echo_emulation_config; + }; + + // Represent an entity that will report quality metrics after test. + class QualityMetricsReporter : public StatsObserverInterface { + public: + virtual ~QualityMetricsReporter() = default; + + // Invoked by framework after peer connection factory and peer connection + // itself will be created but before offer/answer exchange will be started. + // |test_case_name| is name of test case, that should be used to report all + // metrics. + // |reporter_helper| is a pointer to a class that will allow track_id to + // stream_id matching. The caller is responsible for ensuring the + // TrackIdStreamInfoMap will be valid from Start() to + // StopAndReportResults(). + virtual void Start(absl::string_view test_case_name, + const TrackIdStreamInfoMap* reporter_helper) = 0; + + // Invoked by framework after call is ended and peer connection factory and + // peer connection are destroyed. + virtual void StopAndReportResults() = 0; + }; + + virtual ~PeerConnectionE2EQualityTestFixture() = default; + + // Add activity that will be executed on the best effort at least after + // |target_time_since_start| after call will be set up (after offer/answer + // exchange, ICE gathering will be done and ICE candidates will passed to + // remote side). |func| param is amount of time spent from the call set up. + virtual void ExecuteAt(TimeDelta target_time_since_start, + std::function func) = 0; + // Add activity that will be executed every |interval| with first execution + // on the best effort at least after |initial_delay_since_start| after call + // will be set up (after all participants will be connected). |func| param is + // amount of time spent from the call set up. + virtual void ExecuteEvery(TimeDelta initial_delay_since_start, + TimeDelta interval, + std::function func) = 0; + + // Add stats reporter entity to observe the test. + virtual void AddQualityMetricsReporter( + std::unique_ptr quality_metrics_reporter) = 0; + + // Add a new peer to the call and return an object through which caller + // can configure peer's behavior. + // |network_thread| will be used as network thread for peer's peer connection + // |network_manager| will be used to provide network interfaces for peer's + // peer connection. + // |configurer| function will be used to configure peer in the call. + virtual void AddPeer(rtc::Thread* network_thread, + rtc::NetworkManager* network_manager, + rtc::FunctionView configurer) = 0; + // Runs the media quality test, which includes setting up the call with + // configured participants, running it according to provided |run_params| and + // terminating it properly at the end. During call duration media quality + // metrics are gathered, which are then reported to stdout and (if configured) + // to the json/protobuf output file through the WebRTC perf test results + // reporting system. + virtual void Run(RunParams run_params) = 0; + + // Returns real test duration - the time of test execution measured during + // test. Client must call this method only after test is finished (after + // Run(...) method returned). Test execution time is time from end of call + // setup (offer/answer, ICE candidates exchange done and ICE connected) to + // start of call tear down (PeerConnection closed). + virtual TimeDelta GetRealTestDuration() const = 0; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_PEERCONNECTION_QUALITY_TEST_FIXTURE_H_ diff --git a/api/test/simulated_network.h b/api/test/simulated_network.h new file mode 100644 index 0000000..3fba61f --- /dev/null +++ b/api/test/simulated_network.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_SIMULATED_NETWORK_H_ +#define API_TEST_SIMULATED_NETWORK_H_ + +#include +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "rtc_base/random.h" +#include "rtc_base/thread_annotations.h" + +namespace webrtc { + +struct PacketInFlightInfo { + PacketInFlightInfo(size_t size, int64_t send_time_us, uint64_t packet_id) + : size(size), send_time_us(send_time_us), packet_id(packet_id) {} + + size_t size; + int64_t send_time_us; + // Unique identifier for the packet in relation to other packets in flight. + uint64_t packet_id; +}; + +struct PacketDeliveryInfo { + static constexpr int kNotReceived = -1; + PacketDeliveryInfo(PacketInFlightInfo source, int64_t receive_time_us) + : receive_time_us(receive_time_us), packet_id(source.packet_id) {} + int64_t receive_time_us; + uint64_t packet_id; +}; + +// BuiltInNetworkBehaviorConfig is a built-in network behavior configuration +// for built-in network behavior that will be used by WebRTC if no custom +// NetworkBehaviorInterface is provided. +struct BuiltInNetworkBehaviorConfig { + BuiltInNetworkBehaviorConfig() {} + // Queue length in number of packets. + size_t queue_length_packets = 0; + // Delay in addition to capacity induced delay. + int queue_delay_ms = 0; + // Standard deviation of the extra delay. + int delay_standard_deviation_ms = 0; + // Link capacity in kbps. + int link_capacity_kbps = 0; + // Random packet loss. + int loss_percent = 0; + // If packets are allowed to be reordered. + bool allow_reordering = false; + // The average length of a burst of lost packets. + int avg_burst_loss_length = -1; + // Additional bytes to add to packet size. + int packet_overhead = 0; + // Enable CoDel active queue management. + bool codel_active_queue_management = false; +}; + +class NetworkBehaviorInterface { + public: + virtual bool EnqueuePacket(PacketInFlightInfo packet_info) = 0; + // Retrieves all packets that should be delivered by the given receive time. + virtual std::vector DequeueDeliverablePackets( + int64_t receive_time_us) = 0; + // Returns time in microseconds when caller should call + // DequeueDeliverablePackets to get next set of packets to deliver. + virtual absl::optional NextDeliveryTimeUs() const = 0; + virtual ~NetworkBehaviorInterface() = default; +}; + +// Class simulating a network link. This is a simple and naive solution just +// faking capacity and adding an extra transport delay in addition to the +// capacity introduced delay. +class SimulatedNetworkInterface : public NetworkBehaviorInterface { + public: + // Sets a new configuration. This won't affect packets already in the pipe. + virtual void SetConfig(const BuiltInNetworkBehaviorConfig& config) = 0; + virtual void UpdateConfig( + std::function config_modifier) = 0; + virtual void PauseTransmissionUntil(int64_t until_us) = 0; +}; + +} // namespace webrtc + +#endif // API_TEST_SIMULATED_NETWORK_H_ diff --git a/api/test/simulcast_test_fixture.h b/api/test/simulcast_test_fixture.h new file mode 100644 index 0000000..5270d13 --- /dev/null +++ b/api/test/simulcast_test_fixture.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_SIMULCAST_TEST_FIXTURE_H_ +#define API_TEST_SIMULCAST_TEST_FIXTURE_H_ + +namespace webrtc { +namespace test { + +class SimulcastTestFixture { + public: + virtual ~SimulcastTestFixture() = default; + + virtual void TestKeyFrameRequestsOnAllStreams() = 0; + virtual void TestPaddingAllStreams() = 0; + virtual void TestPaddingTwoStreams() = 0; + virtual void TestPaddingTwoStreamsOneMaxedOut() = 0; + virtual void TestPaddingOneStream() = 0; + virtual void TestPaddingOneStreamTwoMaxedOut() = 0; + virtual void TestSendAllStreams() = 0; + virtual void TestDisablingStreams() = 0; + virtual void TestActiveStreams() = 0; + virtual void TestSwitchingToOneStream() = 0; + virtual void TestSwitchingToOneOddStream() = 0; + virtual void TestSwitchingToOneSmallStream() = 0; + virtual void TestSpatioTemporalLayers333PatternEncoder() = 0; + virtual void TestSpatioTemporalLayers321PatternEncoder() = 0; + virtual void TestStrideEncodeDecode() = 0; + virtual void TestDecodeWidthHeightSet() = 0; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_SIMULCAST_TEST_FIXTURE_H_ diff --git a/api/test/stats_observer_interface.h b/api/test/stats_observer_interface.h new file mode 100644 index 0000000..ea4d6c2 --- /dev/null +++ b/api/test/stats_observer_interface.h @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_STATS_OBSERVER_INTERFACE_H_ +#define API_TEST_STATS_OBSERVER_INTERFACE_H_ + +#include "absl/strings/string_view.h" +#include "api/stats/rtc_stats_report.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// API is in development and can be changed without notice. +class StatsObserverInterface { + public: + virtual ~StatsObserverInterface() = default; + + // Method called when stats reports are available for the PeerConnection + // identified by |pc_label|. + virtual void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) = 0; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_STATS_OBSERVER_INTERFACE_H_ diff --git a/api/test/test_dependency_factory.cc b/api/test/test_dependency_factory.cc new file mode 100644 index 0000000..41ad70c --- /dev/null +++ b/api/test/test_dependency_factory.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/test_dependency_factory.h" + +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/platform_thread_types.h" + +namespace webrtc { + +namespace { +// This checks everything in this file gets called on the same thread. It's +// static because it needs to look at the static methods too. +bool IsValidTestDependencyFactoryThread() { + const rtc::PlatformThreadRef main_thread = rtc::CurrentThreadRef(); + return rtc::IsThreadRefEqual(main_thread, rtc::CurrentThreadRef()); +} +} // namespace + +std::unique_ptr TestDependencyFactory::instance_ = + nullptr; + +const TestDependencyFactory& TestDependencyFactory::GetInstance() { + RTC_DCHECK(IsValidTestDependencyFactoryThread()); + if (instance_ == nullptr) { + instance_ = std::make_unique(); + } + return *instance_; +} + +void TestDependencyFactory::SetInstance( + std::unique_ptr instance) { + RTC_DCHECK(IsValidTestDependencyFactoryThread()); + RTC_CHECK(instance_ == nullptr); + instance_ = std::move(instance); +} + +std::unique_ptr +TestDependencyFactory::CreateComponents() const { + RTC_DCHECK(IsValidTestDependencyFactoryThread()); + return nullptr; +} + +} // namespace webrtc diff --git a/api/test/test_dependency_factory.h b/api/test/test_dependency_factory.h new file mode 100644 index 0000000..29f00b8 --- /dev/null +++ b/api/test/test_dependency_factory.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_TEST_DEPENDENCY_FACTORY_H_ +#define API_TEST_TEST_DEPENDENCY_FACTORY_H_ + +#include + +#include "api/test/video_quality_test_fixture.h" + +namespace webrtc { + +// Override this class if to inject custom components into WebRTC tests. +// Not all WebRTC tests get their components from here, so you need to make +// sure the tests you want actually use this class. +// +// This class is not thread safe and you need to make call calls from the same +// (test main) thread. +class TestDependencyFactory { + public: + virtual ~TestDependencyFactory() = default; + + // The singleton MUST be stateless since tests execute in any order. It must + // be set before tests start executing. + static const TestDependencyFactory& GetInstance(); + static void SetInstance(std::unique_ptr instance); + + // Returns the component a test should use. Returning nullptr means that the + // test is free to use whatever defaults it wants. The injection components + // themselves can be mutable, but we need to make new ones for every test that + // executes so state doesn't spread between tests. + virtual std::unique_ptr + CreateComponents() const; + + private: + static std::unique_ptr instance_; +}; + +} // namespace webrtc + +#endif // API_TEST_TEST_DEPENDENCY_FACTORY_H_ diff --git a/api/test/time_controller.cc b/api/test/time_controller.cc new file mode 100644 index 0000000..364dbc2 --- /dev/null +++ b/api/test/time_controller.cc @@ -0,0 +1,43 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/test/time_controller.h" + +namespace webrtc { +std::unique_ptr TimeController::CreateTaskQueueFactory() { + class FactoryWrapper final : public TaskQueueFactory { + public: + explicit FactoryWrapper(TaskQueueFactory* inner_factory) + : inner_(inner_factory) {} + std::unique_ptr CreateTaskQueue( + absl::string_view name, + Priority priority) const override { + return inner_->CreateTaskQueue(name, priority); + } + + private: + TaskQueueFactory* const inner_; + }; + return std::make_unique(GetTaskQueueFactory()); +} +bool TimeController::Wait(const std::function& condition, + TimeDelta max_duration) { + // Step size is chosen to be short enough to not significantly affect latency + // in real time tests while being long enough to avoid adding too much load to + // the system. + const auto kStep = TimeDelta::Millis(5); + for (auto elapsed = TimeDelta::Zero(); elapsed < max_duration; + elapsed += kStep) { + if (condition()) + return true; + AdvanceTime(kStep); + } + return condition(); +} +} // namespace webrtc diff --git a/api/test/time_controller.h b/api/test/time_controller.h new file mode 100644 index 0000000..bd3192d --- /dev/null +++ b/api/test/time_controller.h @@ -0,0 +1,93 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TEST_TIME_CONTROLLER_H_ +#define API_TEST_TIME_CONTROLLER_H_ + +#include +#include +#include + +#include "api/task_queue/task_queue_factory.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/utility/include/process_thread.h" +#include "rtc_base/synchronization/yield_policy.h" +#include "rtc_base/thread.h" +#include "system_wrappers/include/clock.h" + +namespace webrtc { +// Interface for controlling time progress. This allows us to execute test code +// in either real time or simulated time by using different implementation of +// this interface. +class TimeController { + public: + virtual ~TimeController() = default; + // Provides a clock instance that follows implementation defined time + // progress. + virtual Clock* GetClock() = 0; + // The returned factory will created task queues that runs in implementation + // defined time domain. + virtual TaskQueueFactory* GetTaskQueueFactory() = 0; + // Simple helper to create an owned factory that can be used as a parameter + // for PeerConnectionFactory. Note that this might depend on the underlying + // time controller and therfore must be destroyed before the time controller + // is destroyed. + std::unique_ptr CreateTaskQueueFactory(); + + // Creates a process thread. + virtual std::unique_ptr CreateProcessThread( + const char* thread_name) = 0; + // Creates an rtc::Thread instance. If |socket_server| is nullptr, a default + // noop socket server is created. + // Returned thread is not null and started. + virtual std::unique_ptr CreateThread( + const std::string& name, + std::unique_ptr socket_server = nullptr) = 0; + + // Creates an rtc::Thread instance that ensure that it's set as the current + // thread. + virtual rtc::Thread* GetMainThread() = 0; + // Allow task queues and process threads created by this instance to execute + // for the given |duration|. + virtual void AdvanceTime(TimeDelta duration) = 0; + + // Waits until condition() == true, polling condition() in small time + // intervals. + // Returns true if condition() was evaluated to true before |max_duration| + // elapsed and false otherwise. + bool Wait(const std::function& condition, + TimeDelta max_duration = TimeDelta::Seconds(5)); +}; + +// Interface for telling time, scheduling an event to fire at a particular time, +// and waiting for time to pass. +class ControlledAlarmClock { + public: + virtual ~ControlledAlarmClock() = default; + + // Gets a clock that tells the alarm clock's notion of time. + virtual Clock* GetClock() = 0; + + // Schedules the alarm to fire at |deadline|. + // An alarm clock only supports one deadline. Calls to |ScheduleAlarmAt| with + // an earlier deadline will reset the alarm to fire earlier.Calls to + // |ScheduleAlarmAt| with a later deadline are ignored. Returns true if the + // deadline changed, false otherwise. + virtual bool ScheduleAlarmAt(Timestamp deadline) = 0; + + // Sets the callback that should be run when the alarm fires. + virtual void SetCallback(std::function callback) = 0; + + // Waits for |duration| to pass, according to the alarm clock. + virtual void Sleep(TimeDelta duration) = 0; +}; + +} // namespace webrtc +#endif // API_TEST_TIME_CONTROLLER_H_ diff --git a/api/test/track_id_stream_info_map.h b/api/test/track_id_stream_info_map.h new file mode 100644 index 0000000..bb73cfd --- /dev/null +++ b/api/test/track_id_stream_info_map.h @@ -0,0 +1,42 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_TRACK_ID_STREAM_INFO_MAP_H_ +#define API_TEST_TRACK_ID_STREAM_INFO_MAP_H_ + +#include "absl/strings/string_view.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// Instances of |TrackIdStreamInfoMap| provide bookkeeping capabilities that +// are useful to associate stats reports track_ids to the remote stream info. +class TrackIdStreamInfoMap { + public: + virtual ~TrackIdStreamInfoMap() = default; + + // These methods must be called on the same thread where + // StatsObserverInterface::OnStatsReports is invoked. + + // Returns a reference to a stream label owned by the TrackIdStreamInfoMap. + // Precondition: |track_id| must be already mapped to stream label. + virtual absl::string_view GetStreamLabelFromTrackId( + absl::string_view track_id) const = 0; + + // Returns a reference to a sync group name owned by the TrackIdStreamInfoMap. + // Precondition: |track_id| must be already mapped to sync group. + virtual absl::string_view GetSyncGroupLabelFromTrackId( + absl::string_view track_id) const = 0; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_TRACK_ID_STREAM_INFO_MAP_H_ diff --git a/api/test/video/BUILD.gn b/api/test/video/BUILD.gn new file mode 100644 index 0000000..4ebb0c9 --- /dev/null +++ b/api/test/video/BUILD.gn @@ -0,0 +1,23 @@ +# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_library("function_video_factory") { + visibility = [ "*" ] + testonly = true + public = [ + "function_video_decoder_factory.h", + "function_video_encoder_factory.h", + ] + + deps = [ + "../../../rtc_base:checks", + "../../video_codecs:video_codecs_api", + ] +} diff --git a/api/test/video/function_video_decoder_factory.h b/api/test/video/function_video_decoder_factory.h new file mode 100644 index 0000000..86abdd0 --- /dev/null +++ b/api/test/video/function_video_decoder_factory.h @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_VIDEO_FUNCTION_VIDEO_DECODER_FACTORY_H_ +#define API_TEST_VIDEO_FUNCTION_VIDEO_DECODER_FACTORY_H_ + +#include +#include +#include +#include + +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace test { + +// A decoder factory producing decoders by calling a supplied create function. +class FunctionVideoDecoderFactory final : public VideoDecoderFactory { + public: + explicit FunctionVideoDecoderFactory( + std::function()> create) + : create_([create = std::move(create)](const SdpVideoFormat&) { + return create(); + }) {} + explicit FunctionVideoDecoderFactory( + std::function(const SdpVideoFormat&)> + create) + : create_(std::move(create)) {} + FunctionVideoDecoderFactory( + std::function()> create, + std::vector sdp_video_formats) + : create_([create = std::move(create)](const SdpVideoFormat&) { + return create(); + }), + sdp_video_formats_(std::move(sdp_video_formats)) {} + + std::vector GetSupportedFormats() const override { + return sdp_video_formats_; + } + + std::unique_ptr CreateVideoDecoder( + const SdpVideoFormat& format) override { + return create_(format); + } + + private: + const std::function(const SdpVideoFormat&)> + create_; + const std::vector sdp_video_formats_; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_VIDEO_FUNCTION_VIDEO_DECODER_FACTORY_H_ diff --git a/api/test/video/function_video_encoder_factory.h b/api/test/video/function_video_encoder_factory.h new file mode 100644 index 0000000..a452eee --- /dev/null +++ b/api/test/video/function_video_encoder_factory.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_VIDEO_FUNCTION_VIDEO_ENCODER_FACTORY_H_ +#define API_TEST_VIDEO_FUNCTION_VIDEO_ENCODER_FACTORY_H_ + +#include +#include +#include +#include + +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace test { + +// An encoder factory producing encoders by calling a supplied create +// function. +class FunctionVideoEncoderFactory final : public VideoEncoderFactory { + public: + explicit FunctionVideoEncoderFactory( + std::function()> create) + : create_([create = std::move(create)](const SdpVideoFormat&) { + return create(); + }) {} + explicit FunctionVideoEncoderFactory( + std::function(const SdpVideoFormat&)> + create) + : create_(std::move(create)) {} + + // Unused by tests. + std::vector GetSupportedFormats() const override { + RTC_NOTREACHED(); + return {}; + } + + std::unique_ptr CreateVideoEncoder( + const SdpVideoFormat& format) override { + return create_(format); + } + + private: + const std::function(const SdpVideoFormat&)> + create_; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_VIDEO_FUNCTION_VIDEO_ENCODER_FACTORY_H_ diff --git a/api/test/video_quality_analyzer_interface.h b/api/test/video_quality_analyzer_interface.h new file mode 100644 index 0000000..c5370a7 --- /dev/null +++ b/api/test/video_quality_analyzer_interface.h @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_VIDEO_QUALITY_ANALYZER_INTERFACE_H_ +#define API_TEST_VIDEO_QUALITY_ANALYZER_INTERFACE_H_ + +#include +#include + +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/test/stats_observer_interface.h" +#include "api/video/encoded_image.h" +#include "api/video/video_frame.h" +#include "api/video_codecs/video_encoder.h" + +namespace webrtc { +namespace webrtc_pc_e2e { + +// API is in development and can be changed without notice. + +// Base interface for video quality analyzer for peer connection level end-2-end +// tests. Interface has only one abstract method, which have to return frame id. +// Other methods have empty implementation by default, so user can override only +// required parts. +// +// VideoQualityAnalyzerInterface will be injected into WebRTC pipeline on both +// sides of the call. Here is video data flow in WebRTC pipeline +// +// Alice: +// ___________ ________ _________ +// | | | | | | +// | Frame |-(A)→| WebRTC |-(B)→| Video |-(C)â” +// | Generator | | Stack | | Decoder | | +// ¯¯¯¯¯¯¯¯¯¯¯ ¯¯¯¯¯¯¯¯ ¯¯¯¯¯¯¯¯¯ | +// __↓________ +// | Transport | +// | & | +// | Network | +// ¯¯|¯¯¯¯¯¯¯¯ +// Bob: | +// _______ ________ _________ | +// | | | | | | | +// | Video |â†(F)-| WebRTC |â†(E)-| Video |â†(D)----┘ +// | Sink | | Stack | | Decoder | +// ¯¯¯¯¯¯¯ ¯¯¯¯¯¯¯¯ ¯¯¯¯¯¯¯¯¯ +// The analyzer will be injected in all points from A to F. +class VideoQualityAnalyzerInterface : public StatsObserverInterface { + public: + // Contains extra statistic provided by video encoder. + struct EncoderStats { + // TODO(hbos) https://crbug.com/webrtc/9547, + // https://crbug.com/webrtc/11443: improve stats API to make available + // there. + uint32_t target_encode_bitrate; + }; + // Contains extra statistic provided by video decoder. + struct DecoderStats { + // Decode time provided by decoder itself. If decoder doesn’t produce such + // information can be omitted. + absl::optional decode_time_ms; + }; + + ~VideoQualityAnalyzerInterface() override = default; + + // Will be called by framework before test. + // |test_case_name| is name of test case, that should be used to report all + // video metrics. + // |threads_count| is number of threads that analyzer can use for heavy + // calculations. Analyzer can perform simple calculations on the calling + // thread in each method, but should remember, that it is the same thread, + // that is used in video pipeline. + virtual void Start(std::string test_case_name, + rtc::ArrayView peer_names, + int max_threads_count) {} + + // Will be called when frame was generated from the input stream. + // |peer_name| is name of the peer on which side frame was captured. + // Returns frame id, that will be set by framework to the frame. + virtual uint16_t OnFrameCaptured(absl::string_view peer_name, + const std::string& stream_label, + const VideoFrame& frame) = 0; + // Will be called before calling the encoder. + // |peer_name| is name of the peer on which side frame came to encoder. + virtual void OnFramePreEncode(absl::string_view peer_name, + const VideoFrame& frame) {} + // Will be called for each EncodedImage received from encoder. Single + // VideoFrame can produce multiple EncodedImages. Each encoded image will + // have id from VideoFrame. + // |peer_name| is name of the peer on which side frame was encoded. + virtual void OnFrameEncoded(absl::string_view peer_name, + uint16_t frame_id, + const EncodedImage& encoded_image, + const EncoderStats& stats) {} + // Will be called for each frame dropped by encoder. + // |peer_name| is name of the peer on which side frame drop was detected. + virtual void OnFrameDropped(absl::string_view peer_name, + EncodedImageCallback::DropReason reason) {} + // Will be called before calling the decoder. + // |peer_name| is name of the peer on which side frame was received. + virtual void OnFramePreDecode(absl::string_view peer_name, + uint16_t frame_id, + const EncodedImage& encoded_image) {} + // Will be called after decoding the frame. + // |peer_name| is name of the peer on which side frame was decoded. + virtual void OnFrameDecoded(absl::string_view peer_name, + const VideoFrame& frame, + const DecoderStats& stats) {} + // Will be called when frame will be obtained from PeerConnection stack. + // |peer_name| is name of the peer on which side frame was rendered. + virtual void OnFrameRendered(absl::string_view peer_name, + const VideoFrame& frame) {} + // Will be called if encoder return not WEBRTC_VIDEO_CODEC_OK. + // All available codes are listed in + // modules/video_coding/include/video_error_codes.h + // |peer_name| is name of the peer on which side error acquired. + virtual void OnEncoderError(absl::string_view peer_name, + const VideoFrame& frame, + int32_t error_code) {} + // Will be called if decoder return not WEBRTC_VIDEO_CODEC_OK. + // All available codes are listed in + // modules/video_coding/include/video_error_codes.h + // |peer_name| is name of the peer on which side error acquired. + virtual void OnDecoderError(absl::string_view peer_name, + uint16_t frame_id, + int32_t error_code) {} + // Will be called every time new stats reports are available for the + // Peer Connection identified by |pc_label|. + void OnStatsReports( + absl::string_view pc_label, + const rtc::scoped_refptr& report) override {} + + // Tells analyzer that analysis complete and it should calculate final + // statistics. + virtual void Stop() {} + + virtual std::string GetStreamLabel(uint16_t frame_id) = 0; +}; + +} // namespace webrtc_pc_e2e +} // namespace webrtc + +#endif // API_TEST_VIDEO_QUALITY_ANALYZER_INTERFACE_H_ diff --git a/api/test/video_quality_test_fixture.h b/api/test/video_quality_test_fixture.h new file mode 100644 index 0000000..92c398a --- /dev/null +++ b/api/test/video_quality_test_fixture.h @@ -0,0 +1,142 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_VIDEO_QUALITY_TEST_FIXTURE_H_ +#define API_TEST_VIDEO_QUALITY_TEST_FIXTURE_H_ + +#include +#include +#include +#include + +#include "api/fec_controller.h" +#include "api/media_types.h" +#include "api/network_state_predictor.h" +#include "api/test/simulated_network.h" +#include "api/transport/bitrate_settings.h" +#include "api/transport/network_control.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_config.h" +#include "api/video_codecs/video_encoder_factory.h" + +namespace webrtc { + +class VideoQualityTestFixtureInterface { + public: + // Parameters are grouped into smaller structs to make it easier to set + // the desired elements and skip unused. + struct Params { + struct CallConfig { + bool send_side_bwe = false; + bool generic_descriptor = false; + BitrateConstraints call_bitrate_config; + int num_thumbnails = 0; + // Indicates if secondary_(video|ss|screenshare) structures are used. + bool dual_video = false; + } call; + struct Video { + bool enabled = false; + size_t width = 640; + size_t height = 480; + int32_t fps = 30; + int min_bitrate_bps = 50; + int target_bitrate_bps = 800; + int max_bitrate_bps = 800; + bool suspend_below_min_bitrate = false; + std::string codec = "VP8"; + int num_temporal_layers = 1; + int selected_tl = -1; + int min_transmit_bps = 0; + bool ulpfec = false; + bool flexfec = false; + bool automatic_scaling = false; + std::string clip_path; // "Generator" to generate frames instead. + size_t capture_device_index = 0; + SdpVideoFormat::Parameters sdp_params; + double encoder_overshoot_factor = 0.0; + } video[2]; + struct Audio { + bool enabled = false; + bool sync_video = false; + bool dtx = false; + bool use_real_adm = false; + absl::optional ana_config; + } audio; + struct Screenshare { + bool enabled = false; + bool generate_slides = false; + int32_t slide_change_interval = 10; + int32_t scroll_duration = 0; + std::vector slides; + } screenshare[2]; + struct Analyzer { + std::string test_label; + double avg_psnr_threshold = 0.0; // (*) + double avg_ssim_threshold = 0.0; // (*) + int test_durations_secs = 0; + std::string graph_data_output_filename; + std::string graph_title; + } analyzer; + // Config for default simulation implementation. Must be nullopt if + // `sender_network` and `receiver_network` in InjectionComponents are + // non-null. May be nullopt even if `sender_network` and `receiver_network` + // are null; in that case, a default config will be used. + absl::optional config; + struct SS { // Spatial scalability. + std::vector streams; // If empty, one stream is assumed. + size_t selected_stream = 0; + int num_spatial_layers = 0; + int selected_sl = -1; + InterLayerPredMode inter_layer_pred = InterLayerPredMode::kOn; + // If empty, bitrates are generated in VP9Impl automatically. + std::vector spatial_layers; + // If set, default parameters will be used instead of |streams|. + bool infer_streams = false; + } ss[2]; + struct Logging { + std::string rtc_event_log_name; + std::string rtp_dump_name; + std::string encoded_frame_base_path; + } logging; + }; + + // Contains objects, that will be injected on different layers of test + // framework to override the behavior of system parts. + struct InjectionComponents { + InjectionComponents(); + ~InjectionComponents(); + + // Simulations of sender and receiver networks. They must either both be + // null (in which case `config` from Params is used), or both be non-null + // (in which case `config` from Params must be nullopt). + std::unique_ptr sender_network; + std::unique_ptr receiver_network; + + std::unique_ptr fec_controller_factory; + std::unique_ptr video_encoder_factory; + std::unique_ptr video_decoder_factory; + std::unique_ptr + network_state_predictor_factory; + std::unique_ptr + network_controller_factory; + }; + + virtual ~VideoQualityTestFixtureInterface() = default; + + virtual void RunWithAnalyzer(const Params& params) = 0; + virtual void RunWithRenderers(const Params& params) = 0; + + virtual const std::map& payload_type_map() = 0; +}; + +} // namespace webrtc + +#endif // API_TEST_VIDEO_QUALITY_TEST_FIXTURE_H_ diff --git a/api/test/videocodec_test_fixture.h b/api/test/videocodec_test_fixture.h new file mode 100644 index 0000000..2ed4091 --- /dev/null +++ b/api/test/videocodec_test_fixture.h @@ -0,0 +1,155 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_VIDEOCODEC_TEST_FIXTURE_H_ +#define API_TEST_VIDEOCODEC_TEST_FIXTURE_H_ + +#include +#include + +#include "api/test/videocodec_test_stats.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder_factory.h" +#include "media/base/h264_profile_level_id.h" +#include "modules/video_coding/include/video_codec_interface.h" + +namespace webrtc { +namespace test { + +// Rates for the encoder and the frame number when to apply profile. +struct RateProfile { + size_t target_kbps; + double input_fps; + size_t frame_num; +}; + +struct RateControlThresholds { + double max_avg_bitrate_mismatch_percent; + double max_time_to_reach_target_bitrate_sec; + // TODO(ssilkin): Use absolute threshold for framerate. + double max_avg_framerate_mismatch_percent; + double max_avg_buffer_level_sec; + double max_max_key_frame_delay_sec; + double max_max_delta_frame_delay_sec; + size_t max_num_spatial_resizes; + size_t max_num_key_frames; +}; + +struct QualityThresholds { + double min_avg_psnr; + double min_min_psnr; + double min_avg_ssim; + double min_min_ssim; +}; + +struct BitstreamThresholds { + size_t max_max_nalu_size_bytes; +}; + +// NOTE: This class is still under development and may change without notice. +class VideoCodecTestFixture { + public: + class EncodedFrameChecker { + public: + virtual ~EncodedFrameChecker() = default; + virtual void CheckEncodedFrame(webrtc::VideoCodecType codec, + const EncodedImage& encoded_frame) const = 0; + }; + + struct Config { + Config(); + void SetCodecSettings(std::string codec_name, + size_t num_simulcast_streams, + size_t num_spatial_layers, + size_t num_temporal_layers, + bool denoising_on, + bool frame_dropper_on, + bool spatial_resize_on, + size_t width, + size_t height); + + size_t NumberOfCores() const; + size_t NumberOfTemporalLayers() const; + size_t NumberOfSpatialLayers() const; + size_t NumberOfSimulcastStreams() const; + + std::string ToString() const; + std::string CodecName() const; + + // Name of this config, to be used for accounting by the test runner. + std::string test_name; + + // Plain name of YUV file to process without file extension. + std::string filename; + + // File to process. This must be a video file in the YUV format. + std::string filepath; + + // Number of frames to process. + size_t num_frames = 0; + + // Bitstream constraints. + size_t max_payload_size_bytes = 1440; + + // Should we decode the encoded frames? + bool decode = true; + + // Force the encoder and decoder to use a single core for processing. + bool use_single_core = false; + + // Should cpu usage be measured? + // If set to true, the encoding will run in real-time. + bool measure_cpu = false; + + // Simulate frames arriving in real-time by adding delays between frames. + bool encode_in_real_time = false; + + // Codec settings to use. + webrtc::VideoCodec codec_settings; + + // Name of the codec being tested. + std::string codec_name; + + // H.264 specific settings. + struct H264CodecSettings { + H264::Profile profile = H264::kProfileConstrainedBaseline; + H264PacketizationMode packetization_mode = + webrtc::H264PacketizationMode::NonInterleaved; + } h264_codec_settings; + + // Custom checker that will be called for each frame. + const EncodedFrameChecker* encoded_frame_checker = nullptr; + + // Print out frame level stats. + bool print_frame_level_stats = false; + + // Path to a directory where encoded or/and decoded video should be saved. + std::string output_path; + + // Should video be saved persistently to disk for post-run visualization? + struct VisualizationParams { + bool save_encoded_ivf = false; + bool save_decoded_y4m = false; + } visualization_params; + }; + + virtual ~VideoCodecTestFixture() = default; + + virtual void RunTest(const std::vector& rate_profiles, + const std::vector* rc_thresholds, + const std::vector* quality_thresholds, + const BitstreamThresholds* bs_thresholds) = 0; + virtual VideoCodecTestStats& GetStats() = 0; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_VIDEOCODEC_TEST_FIXTURE_H_ diff --git a/api/test/videocodec_test_stats.cc b/api/test/videocodec_test_stats.cc new file mode 100644 index 0000000..b2f88a4 --- /dev/null +++ b/api/test/videocodec_test_stats.cc @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/test/videocodec_test_stats.h" + +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { +namespace test { + +VideoCodecTestStats::FrameStatistics::FrameStatistics(size_t frame_number, + size_t rtp_timestamp, + size_t spatial_idx) + : frame_number(frame_number), + rtp_timestamp(rtp_timestamp), + spatial_idx(spatial_idx) {} + +std::string VideoCodecTestStats::FrameStatistics::ToString() const { + rtc::StringBuilder ss; + ss << "frame_number " << frame_number; + ss << " decoded_width " << decoded_width; + ss << " decoded_height " << decoded_height; + ss << " spatial_idx " << spatial_idx; + ss << " temporal_idx " << temporal_idx; + ss << " inter_layer_predicted " << inter_layer_predicted; + ss << " non_ref_for_inter_layer_pred " << non_ref_for_inter_layer_pred; + ss << " frame_type " << static_cast(frame_type); + ss << " length_bytes " << length_bytes; + ss << " qp " << qp; + ss << " psnr " << psnr; + ss << " psnr_y " << psnr_y; + ss << " psnr_u " << psnr_u; + ss << " psnr_v " << psnr_v; + ss << " ssim " << ssim; + ss << " encode_time_us " << encode_time_us; + ss << " decode_time_us " << decode_time_us; + ss << " rtp_timestamp " << rtp_timestamp; + ss << " target_bitrate_kbps " << target_bitrate_kbps; + ss << " target_framerate_fps " << target_framerate_fps; + return ss.Release(); +} + +std::string VideoCodecTestStats::VideoStatistics::ToString( + std::string prefix) const { + rtc::StringBuilder ss; + ss << prefix << "target_bitrate_kbps: " << target_bitrate_kbps; + ss << "\n" << prefix << "input_framerate_fps: " << input_framerate_fps; + ss << "\n" << prefix << "spatial_idx: " << spatial_idx; + ss << "\n" << prefix << "temporal_idx: " << temporal_idx; + ss << "\n" << prefix << "width: " << width; + ss << "\n" << prefix << "height: " << height; + ss << "\n" << prefix << "length_bytes: " << length_bytes; + ss << "\n" << prefix << "bitrate_kbps: " << bitrate_kbps; + ss << "\n" << prefix << "framerate_fps: " << framerate_fps; + ss << "\n" << prefix << "enc_speed_fps: " << enc_speed_fps; + ss << "\n" << prefix << "dec_speed_fps: " << dec_speed_fps; + ss << "\n" << prefix << "avg_delay_sec: " << avg_delay_sec; + ss << "\n" + << prefix << "max_key_frame_delay_sec: " << max_key_frame_delay_sec; + ss << "\n" + << prefix << "max_delta_frame_delay_sec: " << max_delta_frame_delay_sec; + ss << "\n" + << prefix << "time_to_reach_target_bitrate_sec: " + << time_to_reach_target_bitrate_sec; + ss << "\n" + << prefix << "avg_key_frame_size_bytes: " << avg_key_frame_size_bytes; + ss << "\n" + << prefix << "avg_delta_frame_size_bytes: " << avg_delta_frame_size_bytes; + ss << "\n" << prefix << "avg_qp: " << avg_qp; + ss << "\n" << prefix << "avg_psnr: " << avg_psnr; + ss << "\n" << prefix << "min_psnr: " << min_psnr; + ss << "\n" << prefix << "avg_ssim: " << avg_ssim; + ss << "\n" << prefix << "min_ssim: " << min_ssim; + ss << "\n" << prefix << "num_input_frames: " << num_input_frames; + ss << "\n" << prefix << "num_encoded_frames: " << num_encoded_frames; + ss << "\n" << prefix << "num_decoded_frames: " << num_decoded_frames; + ss << "\n" + << prefix + << "num_dropped_frames: " << num_input_frames - num_encoded_frames; + ss << "\n" << prefix << "num_key_frames: " << num_key_frames; + ss << "\n" << prefix << "num_spatial_resizes: " << num_spatial_resizes; + ss << "\n" << prefix << "max_nalu_size_bytes: " << max_nalu_size_bytes; + return ss.Release(); +} + +} // namespace test +} // namespace webrtc diff --git a/api/test/videocodec_test_stats.h b/api/test/videocodec_test_stats.h new file mode 100644 index 0000000..63e1576 --- /dev/null +++ b/api/test/videocodec_test_stats.h @@ -0,0 +1,133 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TEST_VIDEOCODEC_TEST_STATS_H_ +#define API_TEST_VIDEOCODEC_TEST_STATS_H_ + +#include +#include + +#include +#include + +#include "api/video/video_frame_type.h" + +namespace webrtc { +namespace test { + +// Statistics for a sequence of processed frames. This class is not thread safe. +class VideoCodecTestStats { + public: + // Statistics for one processed frame. + struct FrameStatistics { + FrameStatistics(size_t frame_number, + size_t rtp_timestamp, + size_t spatial_idx); + + std::string ToString() const; + + size_t frame_number = 0; + size_t rtp_timestamp = 0; + + // Encoding. + int64_t encode_start_ns = 0; + int encode_return_code = 0; + bool encoding_successful = false; + size_t encode_time_us = 0; + size_t target_bitrate_kbps = 0; + double target_framerate_fps = 0.0; + size_t length_bytes = 0; + VideoFrameType frame_type = VideoFrameType::kVideoFrameDelta; + + // Layering. + size_t spatial_idx = 0; + size_t temporal_idx = 0; + bool inter_layer_predicted = false; + bool non_ref_for_inter_layer_pred = true; + + // H264 specific. + size_t max_nalu_size_bytes = 0; + + // Decoding. + int64_t decode_start_ns = 0; + int decode_return_code = 0; + bool decoding_successful = false; + size_t decode_time_us = 0; + size_t decoded_width = 0; + size_t decoded_height = 0; + + // Quantization. + int qp = -1; + + // Quality. + float psnr_y = 0.0f; + float psnr_u = 0.0f; + float psnr_v = 0.0f; + float psnr = 0.0f; // 10 * log10(255^2 / (mse_y + mse_u + mse_v)). + float ssim = 0.0f; // 0.8 * ssim_y + 0.1 * (ssim_u + ssim_v). + }; + + struct VideoStatistics { + std::string ToString(std::string prefix) const; + + size_t target_bitrate_kbps = 0; + float input_framerate_fps = 0.0f; + + size_t spatial_idx = 0; + size_t temporal_idx = 0; + + size_t width = 0; + size_t height = 0; + + size_t length_bytes = 0; + size_t bitrate_kbps = 0; + float framerate_fps = 0; + + float enc_speed_fps = 0.0f; + float dec_speed_fps = 0.0f; + + float avg_delay_sec = 0.0f; + float max_key_frame_delay_sec = 0.0f; + float max_delta_frame_delay_sec = 0.0f; + float time_to_reach_target_bitrate_sec = 0.0f; + + float avg_key_frame_size_bytes = 0.0f; + float avg_delta_frame_size_bytes = 0.0f; + float avg_qp = 0.0f; + + float avg_psnr_y = 0.0f; + float avg_psnr_u = 0.0f; + float avg_psnr_v = 0.0f; + float avg_psnr = 0.0f; + float min_psnr = 0.0f; + float avg_ssim = 0.0f; + float min_ssim = 0.0f; + + size_t num_input_frames = 0; + size_t num_encoded_frames = 0; + size_t num_decoded_frames = 0; + size_t num_key_frames = 0; + size_t num_spatial_resizes = 0; + size_t max_nalu_size_bytes = 0; + }; + + virtual ~VideoCodecTestStats() = default; + + virtual std::vector GetFrameStatistics() = 0; + + virtual std::vector SliceAndCalcLayerVideoStatistic( + size_t first_frame_num, + size_t last_frame_num) = 0; +}; + +} // namespace test +} // namespace webrtc + +#endif // API_TEST_VIDEOCODEC_TEST_STATS_H_ diff --git a/api/transport/BUILD.gn b/api/transport/BUILD.gn new file mode 100644 index 0000000..a4ada07 --- /dev/null +++ b/api/transport/BUILD.gn @@ -0,0 +1,159 @@ +# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_library("bitrate_settings") { + visibility = [ "*" ] + sources = [ + "bitrate_settings.cc", + "bitrate_settings.h", + ] + deps = [ "../../rtc_base/system:rtc_export" ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("enums") { + visibility = [ "*" ] + sources = [ "enums.h" ] +} + +rtc_library("network_control") { + visibility = [ "*" ] + sources = [ + "network_control.h", + "network_types.cc", + "network_types.h", + ] + + deps = [ + ":webrtc_key_value_config", + "../../rtc_base:deprecation", + "../rtc_event_log", + "../units:data_rate", + "../units:data_size", + "../units:time_delta", + "../units:timestamp", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/base:core_headers", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_source_set("webrtc_key_value_config") { + visibility = [ "*" ] + sources = [ "webrtc_key_value_config.h" ] + deps = [ "../../rtc_base/system:rtc_export" ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +rtc_library("field_trial_based_config") { + visibility = [ "*" ] + sources = [ + "field_trial_based_config.cc", + "field_trial_based_config.h", + ] + deps = [ + ":webrtc_key_value_config", + "../../system_wrappers:field_trial", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] +} + +# TODO(nisse): Rename? +rtc_source_set("datagram_transport_interface") { + visibility = [ "*" ] + sources = [ "data_channel_transport_interface.h" ] + deps = [ + "..:array_view", + "..:rtc_error", + "../../rtc_base:rtc_base_approved", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("goog_cc") { + visibility = [ "*" ] + sources = [ + "goog_cc_factory.cc", + "goog_cc_factory.h", + ] + deps = [ + ":network_control", + ":webrtc_key_value_config", + "..:network_state_predictor_api", + "../../modules/congestion_controller/goog_cc", + "../../rtc_base:deprecation", + ] +} + +rtc_source_set("stun_types") { + visibility = [ "*" ] + sources = [ + "stun.cc", + "stun.h", + ] + + deps = [ + "../../rtc_base:checks", + "../../rtc_base:rtc_base", + "../../rtc_base:rtc_base_approved", + ] +} + +if (rtc_include_tests) { + rtc_source_set("test_feedback_generator_interface") { + testonly = true + sources = [ "test/feedback_generator_interface.h" ] + deps = [ + ":network_control", + "..:simulated_network_api", + ] + } + rtc_library("test_feedback_generator") { + testonly = true + sources = [ + "test/create_feedback_generator.cc", + "test/create_feedback_generator.h", + ] + visibility = [ "*" ] + deps = [ + ":network_control", + ":test_feedback_generator_interface", + "../../test/network:feedback_generator", + ] + } +} + +if (rtc_include_tests) { + rtc_source_set("stun_unittest") { + visibility = [ "*" ] + testonly = true + sources = [ "stun_unittest.cc" ] + deps = [ + ":stun_types", + "../../rtc_base", + "../../rtc_base:macromagic", + "../../rtc_base:rtc_base_approved", + "../../test:test_support", + "//testing/gtest", + ] + } +} + +if (rtc_include_tests) { + rtc_source_set("mock_network_control") { + testonly = true + sources = [ "test/mock_network_control.h" ] + deps = [ + ":network_control", + "../../test:test_support", + ] + } +} diff --git a/api/transport/DEPS b/api/transport/DEPS new file mode 100644 index 0000000..53a68e0 --- /dev/null +++ b/api/transport/DEPS @@ -0,0 +1,7 @@ +specific_include_rules = { + "stun\.h": [ + "+rtc_base/byte_buffer.h", + "+rtc_base/ip_address.h", + "+rtc_base/socket_address.h", + ], +} diff --git a/api/transport/OWNERS b/api/transport/OWNERS new file mode 100644 index 0000000..5991f6f --- /dev/null +++ b/api/transport/OWNERS @@ -0,0 +1,2 @@ +srte@webrtc.org +terelius@webrtc.org diff --git a/api/transport/bitrate_settings.cc b/api/transport/bitrate_settings.cc new file mode 100644 index 0000000..c72bd82 --- /dev/null +++ b/api/transport/bitrate_settings.cc @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/transport/bitrate_settings.h" + +namespace webrtc { + +BitrateSettings::BitrateSettings() = default; +BitrateSettings::~BitrateSettings() = default; +BitrateSettings::BitrateSettings(const BitrateSettings&) = default; + +} // namespace webrtc diff --git a/api/transport/bitrate_settings.h b/api/transport/bitrate_settings.h new file mode 100644 index 0000000..b6c022d --- /dev/null +++ b/api/transport/bitrate_settings.h @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_BITRATE_SETTINGS_H_ +#define API_TRANSPORT_BITRATE_SETTINGS_H_ + +#include + +#include "absl/types/optional.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Configuration of send bitrate. The |start_bitrate_bps| value is +// used for multiple purposes, both as a prior in the bandwidth +// estimator, and for initial configuration of the encoder. We may +// want to create separate apis for those, and use a smaller struct +// with only the min and max constraints. +struct RTC_EXPORT BitrateSettings { + BitrateSettings(); + ~BitrateSettings(); + BitrateSettings(const BitrateSettings&); + // 0 <= min <= start <= max should hold for set parameters. + absl::optional min_bitrate_bps; + absl::optional start_bitrate_bps; + absl::optional max_bitrate_bps; +}; + +// TODO(srte): BitrateConstraints and BitrateSettings should be merged. +// Both represent the same kind data, but are using different default +// initializer and representation of unset values. +struct BitrateConstraints { + int min_bitrate_bps = 0; + int start_bitrate_bps = kDefaultStartBitrateBps; + int max_bitrate_bps = -1; + + private: + static constexpr int kDefaultStartBitrateBps = 300000; +}; + +} // namespace webrtc + +#endif // API_TRANSPORT_BITRATE_SETTINGS_H_ diff --git a/api/transport/data_channel_transport_interface.h b/api/transport/data_channel_transport_interface.h new file mode 100644 index 0000000..7b8c653 --- /dev/null +++ b/api/transport/data_channel_transport_interface.h @@ -0,0 +1,128 @@ +/* Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This is an experimental interface and is subject to change without notice. + +#ifndef API_TRANSPORT_DATA_CHANNEL_TRANSPORT_INTERFACE_H_ +#define API_TRANSPORT_DATA_CHANNEL_TRANSPORT_INTERFACE_H_ + +#include "absl/types/optional.h" +#include "api/rtc_error.h" +#include "rtc_base/copy_on_write_buffer.h" + +namespace webrtc { + +// Supported types of application data messages. +enum class DataMessageType { + // Application data buffer with the binary bit unset. + kText, + + // Application data buffer with the binary bit set. + kBinary, + + // Transport-agnostic control messages, such as open or open-ack messages. + kControl, +}; + +// Parameters for sending data. The parameters may change from message to +// message, even within a single channel. For example, control messages may be +// sent reliably and in-order, even if the data channel is configured for +// unreliable delivery. +struct SendDataParams { + SendDataParams() = default; + SendDataParams(const SendDataParams&) = default; + + DataMessageType type = DataMessageType::kText; + + // Whether to deliver the message in order with respect to other ordered + // messages with the same channel_id. + bool ordered = false; + + // If set, the maximum number of times this message may be + // retransmitted by the transport before it is dropped. + // Setting this value to zero disables retransmission. + // Must be non-negative. |max_rtx_count| and |max_rtx_ms| may not be set + // simultaneously. + absl::optional max_rtx_count; + + // If set, the maximum number of milliseconds for which the transport + // may retransmit this message before it is dropped. + // Setting this value to zero disables retransmission. + // Must be non-negative. |max_rtx_count| and |max_rtx_ms| may not be set + // simultaneously. + absl::optional max_rtx_ms; +}; + +// Sink for callbacks related to a data channel. +class DataChannelSink { + public: + virtual ~DataChannelSink() = default; + + // Callback issued when data is received by the transport. + virtual void OnDataReceived(int channel_id, + DataMessageType type, + const rtc::CopyOnWriteBuffer& buffer) = 0; + + // Callback issued when a remote data channel begins the closing procedure. + // Messages sent after the closing procedure begins will not be transmitted. + virtual void OnChannelClosing(int channel_id) = 0; + + // Callback issued when a (remote or local) data channel completes the closing + // procedure. Closing channels become closed after all pending data has been + // transmitted. + virtual void OnChannelClosed(int channel_id) = 0; + + // Callback issued when the data channel becomes ready to send. + // This callback will be issued immediately when the data channel sink is + // registered if the transport is ready at that time. This callback may be + // invoked again following send errors (eg. due to the transport being + // temporarily blocked or unavailable). + virtual void OnReadyToSend() = 0; + + // Callback issued when the data channel becomes unusable (closed). + // TODO(https://crbug.com/webrtc/10360): Make pure virtual when all + // consumers updated. + virtual void OnTransportClosed() {} +}; + +// Transport for data channels. +class DataChannelTransportInterface { + public: + virtual ~DataChannelTransportInterface() = default; + + // Opens a data |channel_id| for sending. May return an error if the + // specified |channel_id| is unusable. Must be called before |SendData|. + virtual RTCError OpenChannel(int channel_id) = 0; + + // Sends a data buffer to the remote endpoint using the given send parameters. + // |buffer| may not be larger than 256 KiB. Returns an error if the send + // fails. + virtual RTCError SendData(int channel_id, + const SendDataParams& params, + const rtc::CopyOnWriteBuffer& buffer) = 0; + + // Closes |channel_id| gracefully. Returns an error if |channel_id| is not + // open. Data sent after the closing procedure begins will not be + // transmitted. The channel becomes closed after pending data is transmitted. + virtual RTCError CloseChannel(int channel_id) = 0; + + // Sets a sink for data messages and channel state callbacks. Before media + // transport is destroyed, the sink must be unregistered by setting it to + // nullptr. + virtual void SetDataSink(DataChannelSink* sink) = 0; + + // Returns whether this data channel transport is ready to send. + // Note: the default implementation always returns false (as it assumes no one + // has implemented the interface). This default implementation is temporary. + virtual bool IsReadyToSend() const = 0; +}; + +} // namespace webrtc + +#endif // API_TRANSPORT_DATA_CHANNEL_TRANSPORT_INTERFACE_H_ diff --git a/api/transport/enums.h b/api/transport/enums.h new file mode 100644 index 0000000..eb33e91 --- /dev/null +++ b/api/transport/enums.h @@ -0,0 +1,39 @@ +/* + * Copyright 2018 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_ENUMS_H_ +#define API_TRANSPORT_ENUMS_H_ + +namespace webrtc { + +// See https://w3c.github.io/webrtc-pc/#rtcicetransportstate +// Note that kFailed is currently not a terminal state, and a transport might +// incorrectly be marked as failed while gathering candidates, see +// bugs.webrtc.org/8833 +enum class IceTransportState { + kNew, + kChecking, + kConnected, + kCompleted, + kFailed, + kDisconnected, + kClosed, +}; + +enum PortPrunePolicy { + NO_PRUNE, // Do not prune. + PRUNE_BASED_ON_PRIORITY, // Prune lower-priority ports on the same network. + KEEP_FIRST_READY // Keep the first ready port and prune the rest + // on the same network. +}; + +} // namespace webrtc + +#endif // API_TRANSPORT_ENUMS_H_ diff --git a/api/transport/field_trial_based_config.cc b/api/transport/field_trial_based_config.cc new file mode 100644 index 0000000..4a3a179 --- /dev/null +++ b/api/transport/field_trial_based_config.cc @@ -0,0 +1,18 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/transport/field_trial_based_config.h" + +#include "system_wrappers/include/field_trial.h" + +namespace webrtc { +std::string FieldTrialBasedConfig::Lookup(absl::string_view key) const { + return webrtc::field_trial::FindFullName(std::string(key)); +} +} // namespace webrtc diff --git a/api/transport/field_trial_based_config.h b/api/transport/field_trial_based_config.h new file mode 100644 index 0000000..0754570 --- /dev/null +++ b/api/transport/field_trial_based_config.h @@ -0,0 +1,26 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TRANSPORT_FIELD_TRIAL_BASED_CONFIG_H_ +#define API_TRANSPORT_FIELD_TRIAL_BASED_CONFIG_H_ + +#include + +#include "absl/strings/string_view.h" +#include "api/transport/webrtc_key_value_config.h" + +namespace webrtc { +// Implementation using the field trial API fo the key value lookup. +class FieldTrialBasedConfig : public WebRtcKeyValueConfig { + public: + std::string Lookup(absl::string_view key) const override; +}; +} // namespace webrtc + +#endif // API_TRANSPORT_FIELD_TRIAL_BASED_CONFIG_H_ diff --git a/api/transport/goog_cc_factory.cc b/api/transport/goog_cc_factory.cc new file mode 100644 index 0000000..fd11899 --- /dev/null +++ b/api/transport/goog_cc_factory.cc @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/transport/goog_cc_factory.h" + +#include +#include + +#include "modules/congestion_controller/goog_cc/goog_cc_network_control.h" + +namespace webrtc { +GoogCcNetworkControllerFactory::GoogCcNetworkControllerFactory( + RtcEventLog* event_log) + : event_log_(event_log) {} + +GoogCcNetworkControllerFactory::GoogCcNetworkControllerFactory( + NetworkStatePredictorFactoryInterface* network_state_predictor_factory) { + factory_config_.network_state_predictor_factory = + network_state_predictor_factory; +} + +GoogCcNetworkControllerFactory::GoogCcNetworkControllerFactory( + GoogCcFactoryConfig config) + : factory_config_(std::move(config)) {} + +std::unique_ptr +GoogCcNetworkControllerFactory::Create(NetworkControllerConfig config) { + if (event_log_) + config.event_log = event_log_; + GoogCcConfig goog_cc_config; + goog_cc_config.feedback_only = factory_config_.feedback_only; + if (factory_config_.network_state_estimator_factory) { + RTC_DCHECK(config.key_value_config); + goog_cc_config.network_state_estimator = + factory_config_.network_state_estimator_factory->Create( + config.key_value_config); + } + if (factory_config_.network_state_predictor_factory) { + goog_cc_config.network_state_predictor = + factory_config_.network_state_predictor_factory + ->CreateNetworkStatePredictor(); + } + return std::make_unique(config, + std::move(goog_cc_config)); +} + +TimeDelta GoogCcNetworkControllerFactory::GetProcessInterval() const { + const int64_t kUpdateIntervalMs = 25; + return TimeDelta::Millis(kUpdateIntervalMs); +} + +GoogCcFeedbackNetworkControllerFactory::GoogCcFeedbackNetworkControllerFactory( + RtcEventLog* event_log) + : GoogCcNetworkControllerFactory(event_log) { + factory_config_.feedback_only = true; +} + +} // namespace webrtc diff --git a/api/transport/goog_cc_factory.h b/api/transport/goog_cc_factory.h new file mode 100644 index 0000000..b14d6dc --- /dev/null +++ b/api/transport/goog_cc_factory.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_GOOG_CC_FACTORY_H_ +#define API_TRANSPORT_GOOG_CC_FACTORY_H_ +#include + +#include "api/network_state_predictor.h" +#include "api/transport/network_control.h" +#include "rtc_base/deprecation.h" + +namespace webrtc { +class RtcEventLog; + +struct GoogCcFactoryConfig { + std::unique_ptr + network_state_estimator_factory = nullptr; + NetworkStatePredictorFactoryInterface* network_state_predictor_factory = + nullptr; + bool feedback_only = false; +}; + +class GoogCcNetworkControllerFactory + : public NetworkControllerFactoryInterface { + public: + GoogCcNetworkControllerFactory() = default; + explicit RTC_DEPRECATED GoogCcNetworkControllerFactory( + RtcEventLog* event_log); + explicit GoogCcNetworkControllerFactory( + NetworkStatePredictorFactoryInterface* network_state_predictor_factory); + + explicit GoogCcNetworkControllerFactory(GoogCcFactoryConfig config); + std::unique_ptr Create( + NetworkControllerConfig config) override; + TimeDelta GetProcessInterval() const override; + + protected: + RtcEventLog* const event_log_ = nullptr; + GoogCcFactoryConfig factory_config_; +}; + +// Deprecated, use GoogCcFactoryConfig to enable feedback only mode instead. +// Factory to create packet feedback only GoogCC, this can be used for +// connections providing packet receive time feedback but no other reports. +class RTC_DEPRECATED GoogCcFeedbackNetworkControllerFactory + : public GoogCcNetworkControllerFactory { + public: + explicit GoogCcFeedbackNetworkControllerFactory(RtcEventLog* event_log); +}; + +} // namespace webrtc + +#endif // API_TRANSPORT_GOOG_CC_FACTORY_H_ diff --git a/api/transport/network_control.h b/api/transport/network_control.h new file mode 100644 index 0000000..6fc1f7c --- /dev/null +++ b/api/transport/network_control.h @@ -0,0 +1,140 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_NETWORK_CONTROL_H_ +#define API_TRANSPORT_NETWORK_CONTROL_H_ +#include + +#include + +#include "absl/base/attributes.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/network_types.h" +#include "api/transport/webrtc_key_value_config.h" + +namespace webrtc { + +class TargetTransferRateObserver { + public: + virtual ~TargetTransferRateObserver() = default; + // Called to indicate target transfer rate as well as giving information about + // the current estimate of network parameters. + virtual void OnTargetTransferRate(TargetTransferRate) = 0; + // Called to provide updates to the expected target rate in case it changes + // before the first call to OnTargetTransferRate. + virtual void OnStartRateUpdate(DataRate) {} +}; + +// Configuration sent to factory create function. The parameters here are +// optional to use for a network controller implementation. +struct NetworkControllerConfig { + // The initial constraints to start with, these can be changed at any later + // time by calls to OnTargetRateConstraints. Note that the starting rate + // has to be set initially to provide a starting state for the network + // controller, even though the field is marked as optional. + TargetRateConstraints constraints; + // Initial stream specific configuration, these are changed at any later time + // by calls to OnStreamsConfig. + StreamsConfig stream_based_config; + + // Optional override of configuration of WebRTC internals. Using nullptr here + // indicates that the field trial API will be used. + const WebRtcKeyValueConfig* key_value_config = nullptr; + // Optional override of event log. + RtcEventLog* event_log = nullptr; +}; + +// NetworkControllerInterface is implemented by network controllers. A network +// controller is a class that uses information about network state and traffic +// to estimate network parameters such as round trip time and bandwidth. Network +// controllers does not guarantee thread safety, the interface must be used in a +// non-concurrent fashion. +class NetworkControllerInterface { + public: + virtual ~NetworkControllerInterface() = default; + + // Called when network availabilty changes. + virtual NetworkControlUpdate OnNetworkAvailability(NetworkAvailability) + ABSL_MUST_USE_RESULT = 0; + // Called when the receiving or sending endpoint changes address. + virtual NetworkControlUpdate OnNetworkRouteChange(NetworkRouteChange) + ABSL_MUST_USE_RESULT = 0; + // Called periodically with a periodicy as specified by + // NetworkControllerFactoryInterface::GetProcessInterval. + virtual NetworkControlUpdate OnProcessInterval(ProcessInterval) + ABSL_MUST_USE_RESULT = 0; + // Called when remotely calculated bitrate is received. + virtual NetworkControlUpdate OnRemoteBitrateReport(RemoteBitrateReport) + ABSL_MUST_USE_RESULT = 0; + // Called round trip time has been calculated by protocol specific mechanisms. + virtual NetworkControlUpdate OnRoundTripTimeUpdate(RoundTripTimeUpdate) + ABSL_MUST_USE_RESULT = 0; + // Called when a packet is sent on the network. + virtual NetworkControlUpdate OnSentPacket(SentPacket) + ABSL_MUST_USE_RESULT = 0; + // Called when a packet is received from the remote client. + virtual NetworkControlUpdate OnReceivedPacket(ReceivedPacket) + ABSL_MUST_USE_RESULT = 0; + // Called when the stream specific configuration has been updated. + virtual NetworkControlUpdate OnStreamsConfig(StreamsConfig) + ABSL_MUST_USE_RESULT = 0; + // Called when target transfer rate constraints has been changed. + virtual NetworkControlUpdate OnTargetRateConstraints(TargetRateConstraints) + ABSL_MUST_USE_RESULT = 0; + // Called when a protocol specific calculation of packet loss has been made. + virtual NetworkControlUpdate OnTransportLossReport(TransportLossReport) + ABSL_MUST_USE_RESULT = 0; + // Called with per packet feedback regarding receive time. + virtual NetworkControlUpdate OnTransportPacketsFeedback( + TransportPacketsFeedback) ABSL_MUST_USE_RESULT = 0; + // Called with network state estimate updates. + virtual NetworkControlUpdate OnNetworkStateEstimate(NetworkStateEstimate) + ABSL_MUST_USE_RESULT = 0; +}; + +// NetworkControllerFactoryInterface is an interface for creating a network +// controller. +class NetworkControllerFactoryInterface { + public: + virtual ~NetworkControllerFactoryInterface() = default; + + // Used to create a new network controller, requires an observer to be + // provided to handle callbacks. + virtual std::unique_ptr Create( + NetworkControllerConfig config) = 0; + // Returns the interval by which the network controller expects + // OnProcessInterval calls. + virtual TimeDelta GetProcessInterval() const = 0; +}; + +// Under development, subject to change without notice. +class NetworkStateEstimator { + public: + // Gets the current best estimate according to the estimator. + virtual absl::optional GetCurrentEstimate() = 0; + // Called with per packet feedback regarding receive time. + // Used when the NetworkStateEstimator runs in the sending endpoint. + virtual void OnTransportPacketsFeedback(const TransportPacketsFeedback&) = 0; + // Called with per packet feedback regarding receive time. + // Used when the NetworkStateEstimator runs in the receiving endpoint. + virtual void OnReceivedPacket(const PacketResult&) {} + // Called when the receiving or sending endpoint changes address. + virtual void OnRouteChange(const NetworkRouteChange&) = 0; + virtual ~NetworkStateEstimator() = default; +}; +class NetworkStateEstimatorFactory { + public: + virtual std::unique_ptr Create( + const WebRtcKeyValueConfig* key_value_config) = 0; + virtual ~NetworkStateEstimatorFactory() = default; +}; +} // namespace webrtc + +#endif // API_TRANSPORT_NETWORK_CONTROL_H_ diff --git a/api/transport/network_types.cc b/api/transport/network_types.cc new file mode 100644 index 0000000..88b67b3 --- /dev/null +++ b/api/transport/network_types.cc @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/transport/network_types.h" + +#include + +namespace webrtc { +StreamsConfig::StreamsConfig() = default; +StreamsConfig::StreamsConfig(const StreamsConfig&) = default; +StreamsConfig::~StreamsConfig() = default; + +TargetRateConstraints::TargetRateConstraints() = default; +TargetRateConstraints::TargetRateConstraints(const TargetRateConstraints&) = + default; +TargetRateConstraints::~TargetRateConstraints() = default; + +NetworkRouteChange::NetworkRouteChange() = default; +NetworkRouteChange::NetworkRouteChange(const NetworkRouteChange&) = default; +NetworkRouteChange::~NetworkRouteChange() = default; + +PacketResult::PacketResult() = default; +PacketResult::PacketResult(const PacketResult& other) = default; +PacketResult::~PacketResult() = default; + +bool PacketResult::ReceiveTimeOrder::operator()(const PacketResult& lhs, + const PacketResult& rhs) { + if (lhs.receive_time != rhs.receive_time) + return lhs.receive_time < rhs.receive_time; + if (lhs.sent_packet.send_time != rhs.sent_packet.send_time) + return lhs.sent_packet.send_time < rhs.sent_packet.send_time; + return lhs.sent_packet.sequence_number < rhs.sent_packet.sequence_number; +} + +TransportPacketsFeedback::TransportPacketsFeedback() = default; +TransportPacketsFeedback::TransportPacketsFeedback( + const TransportPacketsFeedback& other) = default; +TransportPacketsFeedback::~TransportPacketsFeedback() = default; + +std::vector TransportPacketsFeedback::ReceivedWithSendInfo() + const { + std::vector res; + for (const PacketResult& fb : packet_feedbacks) { + if (fb.receive_time.IsFinite()) { + res.push_back(fb); + } + } + return res; +} + +std::vector TransportPacketsFeedback::LostWithSendInfo() const { + std::vector res; + for (const PacketResult& fb : packet_feedbacks) { + if (fb.receive_time.IsPlusInfinity()) { + res.push_back(fb); + } + } + return res; +} + +std::vector TransportPacketsFeedback::PacketsWithFeedback() + const { + return packet_feedbacks; +} + +std::vector TransportPacketsFeedback::SortedByReceiveTime() + const { + std::vector res; + for (const PacketResult& fb : packet_feedbacks) { + if (fb.receive_time.IsFinite()) { + res.push_back(fb); + } + } + std::sort(res.begin(), res.end(), PacketResult::ReceiveTimeOrder()); + return res; +} + +NetworkControlUpdate::NetworkControlUpdate() = default; +NetworkControlUpdate::NetworkControlUpdate(const NetworkControlUpdate&) = + default; +NetworkControlUpdate::~NetworkControlUpdate() = default; + +PacedPacketInfo::PacedPacketInfo() = default; + +PacedPacketInfo::PacedPacketInfo(int probe_cluster_id, + int probe_cluster_min_probes, + int probe_cluster_min_bytes) + : probe_cluster_id(probe_cluster_id), + probe_cluster_min_probes(probe_cluster_min_probes), + probe_cluster_min_bytes(probe_cluster_min_bytes) {} + +bool PacedPacketInfo::operator==(const PacedPacketInfo& rhs) const { + return send_bitrate_bps == rhs.send_bitrate_bps && + probe_cluster_id == rhs.probe_cluster_id && + probe_cluster_min_probes == rhs.probe_cluster_min_probes && + probe_cluster_min_bytes == rhs.probe_cluster_min_bytes; +} + +ProcessInterval::ProcessInterval() = default; +ProcessInterval::ProcessInterval(const ProcessInterval&) = default; +ProcessInterval::~ProcessInterval() = default; + +} // namespace webrtc diff --git a/api/transport/network_types.h b/api/transport/network_types.h new file mode 100644 index 0000000..10fc0be --- /dev/null +++ b/api/transport/network_types.h @@ -0,0 +1,280 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_NETWORK_TYPES_H_ +#define API_TRANSPORT_NETWORK_TYPES_H_ +#include + +#include + +#include "absl/types/optional.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/deprecation.h" + +namespace webrtc { + +// Configuration + +// Represents constraints and rates related to the currently enabled streams. +// This is used as input to the congestion controller via the StreamsConfig +// struct. +struct BitrateAllocationLimits { + // The total minimum send bitrate required by all sending streams. + DataRate min_allocatable_rate = DataRate::Zero(); + // The total maximum allocatable bitrate for all currently available streams. + DataRate max_allocatable_rate = DataRate::Zero(); + // The max bitrate to use for padding. The sum of the per-stream max padding + // rate. + DataRate max_padding_rate = DataRate::Zero(); +}; + +// Use StreamsConfig for information about streams that is required for specific +// adjustments to the algorithms in network controllers. Especially useful +// for experiments. +struct StreamsConfig { + StreamsConfig(); + StreamsConfig(const StreamsConfig&); + ~StreamsConfig(); + Timestamp at_time = Timestamp::PlusInfinity(); + absl::optional requests_alr_probing; + absl::optional pacing_factor; + + // TODO(srte): Use BitrateAllocationLimits here. + absl::optional min_total_allocated_bitrate; + absl::optional max_padding_rate; + absl::optional max_total_allocated_bitrate; +}; + +struct TargetRateConstraints { + TargetRateConstraints(); + TargetRateConstraints(const TargetRateConstraints&); + ~TargetRateConstraints(); + Timestamp at_time = Timestamp::PlusInfinity(); + absl::optional min_data_rate; + absl::optional max_data_rate; + // The initial bandwidth estimate to base target rate on. This should be used + // as the basis for initial OnTargetTransferRate and OnPacerConfig callbacks. + absl::optional starting_rate; +}; + +// Send side information + +struct NetworkAvailability { + Timestamp at_time = Timestamp::PlusInfinity(); + bool network_available = false; +}; + +struct NetworkRouteChange { + NetworkRouteChange(); + NetworkRouteChange(const NetworkRouteChange&); + ~NetworkRouteChange(); + Timestamp at_time = Timestamp::PlusInfinity(); + // The TargetRateConstraints are set here so they can be changed synchronously + // when network route changes. + TargetRateConstraints constraints; +}; + +struct PacedPacketInfo { + PacedPacketInfo(); + PacedPacketInfo(int probe_cluster_id, + int probe_cluster_min_probes, + int probe_cluster_min_bytes); + + bool operator==(const PacedPacketInfo& rhs) const; + + // TODO(srte): Move probing info to a separate, optional struct. + static constexpr int kNotAProbe = -1; + int send_bitrate_bps = -1; + int probe_cluster_id = kNotAProbe; + int probe_cluster_min_probes = -1; + int probe_cluster_min_bytes = -1; + int probe_cluster_bytes_sent = 0; +}; + +struct SentPacket { + Timestamp send_time = Timestamp::PlusInfinity(); + // Size of packet with overhead up to IP layer. + DataSize size = DataSize::Zero(); + // Size of preceeding packets that are not part of feedback. + DataSize prior_unacked_data = DataSize::Zero(); + // Probe cluster id and parameters including bitrate, number of packets and + // number of bytes. + PacedPacketInfo pacing_info; + // True if the packet is an audio packet, false for video, padding, RTX etc. + bool audio = false; + // Transport independent sequence number, any tracked packet should have a + // sequence number that is unique over the whole call and increasing by 1 for + // each packet. + int64_t sequence_number; + // Tracked data in flight when the packet was sent, excluding unacked data. + DataSize data_in_flight = DataSize::Zero(); +}; + +struct ReceivedPacket { + Timestamp send_time = Timestamp::MinusInfinity(); + Timestamp receive_time = Timestamp::PlusInfinity(); + DataSize size = DataSize::Zero(); +}; + +// Transport level feedback + +struct RemoteBitrateReport { + Timestamp receive_time = Timestamp::PlusInfinity(); + DataRate bandwidth = DataRate::Infinity(); +}; + +struct RoundTripTimeUpdate { + Timestamp receive_time = Timestamp::PlusInfinity(); + TimeDelta round_trip_time = TimeDelta::PlusInfinity(); + bool smoothed = false; +}; + +struct TransportLossReport { + Timestamp receive_time = Timestamp::PlusInfinity(); + Timestamp start_time = Timestamp::PlusInfinity(); + Timestamp end_time = Timestamp::PlusInfinity(); + uint64_t packets_lost_delta = 0; + uint64_t packets_received_delta = 0; +}; + +// Packet level feedback + +struct PacketResult { + class ReceiveTimeOrder { + public: + bool operator()(const PacketResult& lhs, const PacketResult& rhs); + }; + + PacketResult(); + PacketResult(const PacketResult&); + ~PacketResult(); + + SentPacket sent_packet; + Timestamp receive_time = Timestamp::PlusInfinity(); +}; + +struct TransportPacketsFeedback { + TransportPacketsFeedback(); + TransportPacketsFeedback(const TransportPacketsFeedback& other); + ~TransportPacketsFeedback(); + + Timestamp feedback_time = Timestamp::PlusInfinity(); + Timestamp first_unacked_send_time = Timestamp::PlusInfinity(); + DataSize data_in_flight = DataSize::Zero(); + DataSize prior_in_flight = DataSize::Zero(); + std::vector packet_feedbacks; + + // Arrival times for messages without send time information. + std::vector sendless_arrival_times; + + std::vector ReceivedWithSendInfo() const; + std::vector LostWithSendInfo() const; + std::vector PacketsWithFeedback() const; + std::vector SortedByReceiveTime() const; +}; + +// Network estimation + +struct NetworkEstimate { + Timestamp at_time = Timestamp::PlusInfinity(); + // Deprecated, use TargetTransferRate::target_rate instead. + DataRate bandwidth = DataRate::Infinity(); + TimeDelta round_trip_time = TimeDelta::PlusInfinity(); + TimeDelta bwe_period = TimeDelta::PlusInfinity(); + + float loss_rate_ratio = 0; +}; + +// Network control + +struct PacerConfig { + Timestamp at_time = Timestamp::PlusInfinity(); + // Pacer should send at most data_window data over time_window duration. + DataSize data_window = DataSize::Infinity(); + TimeDelta time_window = TimeDelta::PlusInfinity(); + // Pacer should send at least pad_window data over time_window duration. + DataSize pad_window = DataSize::Zero(); + DataRate data_rate() const { return data_window / time_window; } + DataRate pad_rate() const { return pad_window / time_window; } +}; + +struct ProbeClusterConfig { + Timestamp at_time = Timestamp::PlusInfinity(); + DataRate target_data_rate = DataRate::Zero(); + TimeDelta target_duration = TimeDelta::Zero(); + int32_t target_probe_count = 0; + int32_t id = 0; +}; + +struct TargetTransferRate { + Timestamp at_time = Timestamp::PlusInfinity(); + // The estimate on which the target rate is based on. + NetworkEstimate network_estimate; + DataRate target_rate = DataRate::Zero(); + DataRate stable_target_rate = DataRate::Zero(); + double cwnd_reduce_ratio = 0; +}; + +// Contains updates of network controller comand state. Using optionals to +// indicate whether a member has been updated. The array of probe clusters +// should be used to send out probes if not empty. +struct NetworkControlUpdate { + NetworkControlUpdate(); + NetworkControlUpdate(const NetworkControlUpdate&); + ~NetworkControlUpdate(); + absl::optional congestion_window; + absl::optional pacer_config; + std::vector probe_cluster_configs; + absl::optional target_rate; +}; + +// Process control +struct ProcessInterval { + ProcessInterval(); + ProcessInterval(const ProcessInterval&); + ~ProcessInterval(); + Timestamp at_time = Timestamp::PlusInfinity(); + absl::optional pacer_queue; +}; + +// Under development, subject to change without notice. +struct NetworkStateEstimate { + double confidence = NAN; + // The time the estimate was received/calculated. + Timestamp update_time = Timestamp::MinusInfinity(); + Timestamp last_receive_time = Timestamp::MinusInfinity(); + Timestamp last_send_time = Timestamp::MinusInfinity(); + + // Total estimated link capacity. + DataRate link_capacity = DataRate::MinusInfinity(); + // Used as a safe measure of available capacity. + DataRate link_capacity_lower = DataRate::MinusInfinity(); + // Used as limit for increasing bitrate. + DataRate link_capacity_upper = DataRate::MinusInfinity(); + + TimeDelta pre_link_buffer_delay = TimeDelta::MinusInfinity(); + TimeDelta post_link_buffer_delay = TimeDelta::MinusInfinity(); + TimeDelta propagation_delay = TimeDelta::MinusInfinity(); + + // Only for debugging + TimeDelta time_delta = TimeDelta::MinusInfinity(); + Timestamp last_feed_time = Timestamp::MinusInfinity(); + double cross_delay_rate = NAN; + double spike_delay_rate = NAN; + DataRate link_capacity_std_dev = DataRate::MinusInfinity(); + DataRate link_capacity_min = DataRate::MinusInfinity(); + double cross_traffic_ratio = NAN; +}; +} // namespace webrtc + +#endif // API_TRANSPORT_NETWORK_TYPES_H_ diff --git a/api/transport/rtp/BUILD.gn b/api/transport/rtp/BUILD.gn new file mode 100644 index 0000000..7b01169 --- /dev/null +++ b/api/transport/rtp/BUILD.gn @@ -0,0 +1,33 @@ +# Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_source_set("rtp_source") { + visibility = [ "*" ] + sources = [ "rtp_source.h" ] + deps = [ + "../../../api:rtp_headers", + "../../../rtc_base:checks", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("dependency_descriptor") { + visibility = [ "*" ] + sources = [ + "dependency_descriptor.cc", + "dependency_descriptor.h", + ] + deps = [ "../../../rtc_base:checks" ] + absl_deps = [ + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} diff --git a/api/transport/rtp/dependency_descriptor.cc b/api/transport/rtp/dependency_descriptor.cc new file mode 100644 index 0000000..2a9b6d9 --- /dev/null +++ b/api/transport/rtp/dependency_descriptor.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/transport/rtp/dependency_descriptor.h" + +#include "absl/container/inlined_vector.h" +#include "absl/strings/string_view.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +constexpr int DependencyDescriptor::kMaxSpatialIds; +constexpr int DependencyDescriptor::kMaxTemporalIds; +constexpr int DependencyDescriptor::kMaxTemplates; +constexpr int DependencyDescriptor::kMaxDecodeTargets; + +namespace webrtc_impl { + +absl::InlinedVector StringToDecodeTargetIndications( + absl::string_view symbols) { + absl::InlinedVector dtis; + dtis.reserve(symbols.size()); + for (char symbol : symbols) { + DecodeTargetIndication indication; + switch (symbol) { + case '-': + indication = DecodeTargetIndication::kNotPresent; + break; + case 'D': + indication = DecodeTargetIndication::kDiscardable; + break; + case 'R': + indication = DecodeTargetIndication::kRequired; + break; + case 'S': + indication = DecodeTargetIndication::kSwitch; + break; + default: + RTC_NOTREACHED(); + } + dtis.push_back(indication); + } + return dtis; +} + +} // namespace webrtc_impl +} // namespace webrtc diff --git a/api/transport/rtp/dependency_descriptor.h b/api/transport/rtp/dependency_descriptor.h new file mode 100644 index 0000000..6967c83 --- /dev/null +++ b/api/transport/rtp/dependency_descriptor.h @@ -0,0 +1,148 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_RTP_DEPENDENCY_DESCRIPTOR_H_ +#define API_TRANSPORT_RTP_DEPENDENCY_DESCRIPTOR_H_ + +#include + +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "absl/strings/string_view.h" +#include "absl/types/optional.h" + +namespace webrtc { +// Structures to build and parse dependency descriptor as described in +// https://aomediacodec.github.io/av1-rtp-spec/#dependency-descriptor-rtp-header-extension +class RenderResolution { + public: + constexpr RenderResolution() = default; + constexpr RenderResolution(int width, int height) + : width_(width), height_(height) {} + RenderResolution(const RenderResolution&) = default; + RenderResolution& operator=(const RenderResolution&) = default; + + friend bool operator==(const RenderResolution& lhs, + const RenderResolution& rhs) { + return lhs.width_ == rhs.width_ && lhs.height_ == rhs.height_; + } + + constexpr int Width() const { return width_; } + constexpr int Height() const { return height_; } + + private: + int width_ = 0; + int height_ = 0; +}; + +// Relationship of a frame to a Decode target. +enum class DecodeTargetIndication { + kNotPresent = 0, // DecodeTargetInfo symbol '-' + kDiscardable = 1, // DecodeTargetInfo symbol 'D' + kSwitch = 2, // DecodeTargetInfo symbol 'S' + kRequired = 3 // DecodeTargetInfo symbol 'R' +}; + +struct FrameDependencyTemplate { + // Setters are named briefly to chain them when building the template. + FrameDependencyTemplate& S(int spatial_layer); + FrameDependencyTemplate& T(int temporal_layer); + FrameDependencyTemplate& Dtis(absl::string_view dtis); + FrameDependencyTemplate& FrameDiffs(std::initializer_list diffs); + FrameDependencyTemplate& ChainDiffs(std::initializer_list diffs); + + friend bool operator==(const FrameDependencyTemplate& lhs, + const FrameDependencyTemplate& rhs) { + return lhs.spatial_id == rhs.spatial_id && + lhs.temporal_id == rhs.temporal_id && + lhs.decode_target_indications == rhs.decode_target_indications && + lhs.frame_diffs == rhs.frame_diffs && + lhs.chain_diffs == rhs.chain_diffs; + } + + int spatial_id = 0; + int temporal_id = 0; + absl::InlinedVector decode_target_indications; + absl::InlinedVector frame_diffs; + absl::InlinedVector chain_diffs; +}; + +struct FrameDependencyStructure { + friend bool operator==(const FrameDependencyStructure& lhs, + const FrameDependencyStructure& rhs) { + return lhs.num_decode_targets == rhs.num_decode_targets && + lhs.num_chains == rhs.num_chains && + lhs.decode_target_protected_by_chain == + rhs.decode_target_protected_by_chain && + lhs.resolutions == rhs.resolutions && lhs.templates == rhs.templates; + } + + int structure_id = 0; + int num_decode_targets = 0; + int num_chains = 0; + // If chains are used (num_chains > 0), maps decode target index into index of + // the chain protecting that target. + absl::InlinedVector decode_target_protected_by_chain; + absl::InlinedVector resolutions; + std::vector templates; +}; + +struct DependencyDescriptor { + static constexpr int kMaxSpatialIds = 4; + static constexpr int kMaxTemporalIds = 8; + static constexpr int kMaxDecodeTargets = 32; + static constexpr int kMaxTemplates = 64; + + bool first_packet_in_frame = true; + bool last_packet_in_frame = true; + int frame_number = 0; + FrameDependencyTemplate frame_dependencies; + absl::optional resolution; + absl::optional active_decode_targets_bitmask; + std::unique_ptr attached_structure; +}; + +// Below are implementation details. +namespace webrtc_impl { +absl::InlinedVector StringToDecodeTargetIndications( + absl::string_view indication_symbols); +} // namespace webrtc_impl + +inline FrameDependencyTemplate& FrameDependencyTemplate::S(int spatial_layer) { + this->spatial_id = spatial_layer; + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::T(int temporal_layer) { + this->temporal_id = temporal_layer; + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::Dtis( + absl::string_view dtis) { + this->decode_target_indications = + webrtc_impl::StringToDecodeTargetIndications(dtis); + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::FrameDiffs( + std::initializer_list diffs) { + this->frame_diffs.assign(diffs.begin(), diffs.end()); + return *this; +} +inline FrameDependencyTemplate& FrameDependencyTemplate::ChainDiffs( + std::initializer_list diffs) { + this->chain_diffs.assign(diffs.begin(), diffs.end()); + return *this; +} + +} // namespace webrtc + +#endif // API_TRANSPORT_RTP_DEPENDENCY_DESCRIPTOR_H_ diff --git a/api/transport/rtp/rtp_source.h b/api/transport/rtp/rtp_source.h new file mode 100644 index 0000000..8c543ca --- /dev/null +++ b/api/transport/rtp/rtp_source.h @@ -0,0 +1,109 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_RTP_RTP_SOURCE_H_ +#define API_TRANSPORT_RTP_RTP_SOURCE_H_ + +#include + +#include "absl/types/optional.h" +#include "api/rtp_headers.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +enum class RtpSourceType { + SSRC, + CSRC, +}; + +class RtpSource { + public: + struct Extensions { + absl::optional audio_level; + absl::optional absolute_capture_time; + }; + + RtpSource() = delete; + + // TODO(bugs.webrtc.org/10739): Remove this constructor once all clients + // migrate to the version with absolute capture time. + RtpSource(int64_t timestamp_ms, + uint32_t source_id, + RtpSourceType source_type, + absl::optional audio_level, + uint32_t rtp_timestamp) + : RtpSource(timestamp_ms, + source_id, + source_type, + rtp_timestamp, + {audio_level, absl::nullopt}) {} + + RtpSource(int64_t timestamp_ms, + uint32_t source_id, + RtpSourceType source_type, + uint32_t rtp_timestamp, + const RtpSource::Extensions& extensions) + : timestamp_ms_(timestamp_ms), + source_id_(source_id), + source_type_(source_type), + extensions_(extensions), + rtp_timestamp_(rtp_timestamp) {} + + RtpSource(const RtpSource&) = default; + RtpSource& operator=(const RtpSource&) = default; + ~RtpSource() = default; + + int64_t timestamp_ms() const { return timestamp_ms_; } + void update_timestamp_ms(int64_t timestamp_ms) { + RTC_DCHECK_LE(timestamp_ms_, timestamp_ms); + timestamp_ms_ = timestamp_ms; + } + + // The identifier of the source can be the CSRC or the SSRC. + uint32_t source_id() const { return source_id_; } + + // The source can be either a contributing source or a synchronization source. + RtpSourceType source_type() const { return source_type_; } + + absl::optional audio_level() const { + return extensions_.audio_level; + } + + void set_audio_level(const absl::optional& level) { + extensions_.audio_level = level; + } + + uint32_t rtp_timestamp() const { return rtp_timestamp_; } + + absl::optional absolute_capture_time() const { + return extensions_.absolute_capture_time; + } + + bool operator==(const RtpSource& o) const { + return timestamp_ms_ == o.timestamp_ms() && source_id_ == o.source_id() && + source_type_ == o.source_type() && + extensions_.audio_level == o.extensions_.audio_level && + extensions_.absolute_capture_time == + o.extensions_.absolute_capture_time && + rtp_timestamp_ == o.rtp_timestamp(); + } + + private: + int64_t timestamp_ms_; + uint32_t source_id_; + RtpSourceType source_type_; + RtpSource::Extensions extensions_; + uint32_t rtp_timestamp_; +}; + +} // namespace webrtc + +#endif // API_TRANSPORT_RTP_RTP_SOURCE_H_ diff --git a/api/transport/stun.cc b/api/transport/stun.cc new file mode 100644 index 0000000..b083f15 --- /dev/null +++ b/api/transport/stun.cc @@ -0,0 +1,1346 @@ +/* + * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/transport/stun.h" + +#include + +#include +#include +#include + +#include "rtc_base/byte_order.h" +#include "rtc_base/checks.h" +#include "rtc_base/crc32.h" +#include "rtc_base/logging.h" +#include "rtc_base/message_digest.h" + +using rtc::ByteBufferReader; +using rtc::ByteBufferWriter; + +namespace { + +uint32_t ReduceTransactionId(const std::string& transaction_id) { + RTC_DCHECK(transaction_id.length() == cricket::kStunTransactionIdLength || + transaction_id.length() == + cricket::kStunLegacyTransactionIdLength); + ByteBufferReader reader(transaction_id.c_str(), transaction_id.length()); + uint32_t result = 0; + uint32_t next; + while (reader.ReadUInt32(&next)) { + result ^= next; + } + return result; +} + +} // namespace + +namespace cricket { + +const char STUN_ERROR_REASON_TRY_ALTERNATE_SERVER[] = "Try Alternate Server"; +const char STUN_ERROR_REASON_BAD_REQUEST[] = "Bad Request"; +const char STUN_ERROR_REASON_UNAUTHORIZED[] = "Unauthorized"; +const char STUN_ERROR_REASON_UNKNOWN_ATTRIBUTE[] = "Unknown Attribute"; +const char STUN_ERROR_REASON_FORBIDDEN[] = "Forbidden"; +const char STUN_ERROR_REASON_STALE_CREDENTIALS[] = "Stale Credentials"; +const char STUN_ERROR_REASON_ALLOCATION_MISMATCH[] = "Allocation Mismatch"; +const char STUN_ERROR_REASON_STALE_NONCE[] = "Stale Nonce"; +const char STUN_ERROR_REASON_WRONG_CREDENTIALS[] = "Wrong Credentials"; +const char STUN_ERROR_REASON_UNSUPPORTED_PROTOCOL[] = "Unsupported Protocol"; +const char STUN_ERROR_REASON_ROLE_CONFLICT[] = "Role Conflict"; +const char STUN_ERROR_REASON_SERVER_ERROR[] = "Server Error"; + +const char TURN_MAGIC_COOKIE_VALUE[] = {'\x72', '\xC6', '\x4B', '\xC6'}; +const char EMPTY_TRANSACTION_ID[] = "0000000000000000"; +const uint32_t STUN_FINGERPRINT_XOR_VALUE = 0x5354554E; +const int SERVER_NOT_REACHABLE_ERROR = 701; + +// StunMessage + +StunMessage::StunMessage() + : type_(0), + length_(0), + transaction_id_(EMPTY_TRANSACTION_ID), + stun_magic_cookie_(kStunMagicCookie) { + RTC_DCHECK(IsValidTransactionId(transaction_id_)); +} + +StunMessage::~StunMessage() = default; + +bool StunMessage::IsLegacy() const { + if (transaction_id_.size() == kStunLegacyTransactionIdLength) + return true; + RTC_DCHECK(transaction_id_.size() == kStunTransactionIdLength); + return false; +} + +bool StunMessage::SetTransactionID(const std::string& str) { + if (!IsValidTransactionId(str)) { + return false; + } + transaction_id_ = str; + reduced_transaction_id_ = ReduceTransactionId(transaction_id_); + return true; +} + +static bool DesignatedExpertRange(int attr_type) { + return (attr_type >= 0x4000 && attr_type <= 0x7FFF) || + (attr_type >= 0xC000 && attr_type <= 0xFFFF); +} + +void StunMessage::AddAttribute(std::unique_ptr attr) { + // Fail any attributes that aren't valid for this type of message, + // but allow any type for the range that in the RFC is reserved for + // the "designated experts". + if (!DesignatedExpertRange(attr->type())) { + RTC_DCHECK_EQ(attr->value_type(), GetAttributeValueType(attr->type())); + } + + attr->SetOwner(this); + size_t attr_length = attr->length(); + if (attr_length % 4 != 0) { + attr_length += (4 - (attr_length % 4)); + } + length_ += static_cast(attr_length + 4); + + attrs_.push_back(std::move(attr)); +} + +std::unique_ptr StunMessage::RemoveAttribute(int type) { + std::unique_ptr attribute; + for (auto it = attrs_.rbegin(); it != attrs_.rend(); ++it) { + if ((*it)->type() == type) { + attribute = std::move(*it); + attrs_.erase(std::next(it).base()); + break; + } + } + if (attribute) { + attribute->SetOwner(nullptr); + size_t attr_length = attribute->length(); + if (attr_length % 4 != 0) { + attr_length += (4 - (attr_length % 4)); + } + length_ -= static_cast(attr_length + 4); + } + return attribute; +} + +void StunMessage::ClearAttributes() { + for (auto it = attrs_.rbegin(); it != attrs_.rend(); ++it) { + (*it)->SetOwner(nullptr); + } + attrs_.clear(); + length_ = 0; +} + +std::vector StunMessage::GetNonComprehendedAttributes() const { + std::vector unknown_attributes; + for (auto& attr : attrs_) { + // "comprehension-required" range is 0x0000-0x7FFF. + if (attr->type() >= 0x0000 && attr->type() <= 0x7FFF && + GetAttributeValueType(attr->type()) == STUN_VALUE_UNKNOWN) { + unknown_attributes.push_back(attr->type()); + } + } + return unknown_attributes; +} + +const StunAddressAttribute* StunMessage::GetAddress(int type) const { + switch (type) { + case STUN_ATTR_MAPPED_ADDRESS: { + // Return XOR-MAPPED-ADDRESS when MAPPED-ADDRESS attribute is + // missing. + const StunAttribute* mapped_address = + GetAttribute(STUN_ATTR_MAPPED_ADDRESS); + if (!mapped_address) + mapped_address = GetAttribute(STUN_ATTR_XOR_MAPPED_ADDRESS); + return reinterpret_cast(mapped_address); + } + + default: + return static_cast(GetAttribute(type)); + } +} + +const StunUInt32Attribute* StunMessage::GetUInt32(int type) const { + return static_cast(GetAttribute(type)); +} + +const StunUInt64Attribute* StunMessage::GetUInt64(int type) const { + return static_cast(GetAttribute(type)); +} + +const StunByteStringAttribute* StunMessage::GetByteString(int type) const { + return static_cast(GetAttribute(type)); +} + +const StunUInt16ListAttribute* StunMessage::GetUInt16List(int type) const { + return static_cast(GetAttribute(type)); +} + +const StunErrorCodeAttribute* StunMessage::GetErrorCode() const { + return static_cast( + GetAttribute(STUN_ATTR_ERROR_CODE)); +} + +int StunMessage::GetErrorCodeValue() const { + const StunErrorCodeAttribute* error_attribute = GetErrorCode(); + return error_attribute ? error_attribute->code() : STUN_ERROR_GLOBAL_FAILURE; +} + +const StunUInt16ListAttribute* StunMessage::GetUnknownAttributes() const { + return static_cast( + GetAttribute(STUN_ATTR_UNKNOWN_ATTRIBUTES)); +} + +bool StunMessage::ValidateMessageIntegrity(const char* data, + size_t size, + const std::string& password) { + return ValidateMessageIntegrityOfType(STUN_ATTR_MESSAGE_INTEGRITY, + kStunMessageIntegritySize, data, size, + password); +} + +bool StunMessage::ValidateMessageIntegrity32(const char* data, + size_t size, + const std::string& password) { + return ValidateMessageIntegrityOfType(STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32, + kStunMessageIntegrity32Size, data, size, + password); +} + +// Verifies a STUN message has a valid MESSAGE-INTEGRITY attribute, using the +// procedure outlined in RFC 5389, section 15.4. +bool StunMessage::ValidateMessageIntegrityOfType(int mi_attr_type, + size_t mi_attr_size, + const char* data, + size_t size, + const std::string& password) { + RTC_DCHECK(mi_attr_size <= kStunMessageIntegritySize); + + // Verifying the size of the message. + if ((size % 4) != 0 || size < kStunHeaderSize) { + return false; + } + + // Getting the message length from the STUN header. + uint16_t msg_length = rtc::GetBE16(&data[2]); + if (size != (msg_length + kStunHeaderSize)) { + return false; + } + + // Finding Message Integrity attribute in stun message. + size_t current_pos = kStunHeaderSize; + bool has_message_integrity_attr = false; + while (current_pos + 4 <= size) { + uint16_t attr_type, attr_length; + // Getting attribute type and length. + attr_type = rtc::GetBE16(&data[current_pos]); + attr_length = rtc::GetBE16(&data[current_pos + sizeof(attr_type)]); + + // If M-I, sanity check it, and break out. + if (attr_type == mi_attr_type) { + if (attr_length != mi_attr_size || + current_pos + sizeof(attr_type) + sizeof(attr_length) + attr_length > + size) { + return false; + } + has_message_integrity_attr = true; + break; + } + + // Otherwise, skip to the next attribute. + current_pos += sizeof(attr_type) + sizeof(attr_length) + attr_length; + if ((attr_length % 4) != 0) { + current_pos += (4 - (attr_length % 4)); + } + } + + if (!has_message_integrity_attr) { + return false; + } + + // Getting length of the message to calculate Message Integrity. + size_t mi_pos = current_pos; + std::unique_ptr temp_data(new char[current_pos]); + memcpy(temp_data.get(), data, current_pos); + if (size > mi_pos + kStunAttributeHeaderSize + mi_attr_size) { + // Stun message has other attributes after message integrity. + // Adjust the length parameter in stun message to calculate HMAC. + size_t extra_offset = + size - (mi_pos + kStunAttributeHeaderSize + mi_attr_size); + size_t new_adjusted_len = size - extra_offset - kStunHeaderSize; + + // Writing new length of the STUN message @ Message Length in temp buffer. + // 0 1 2 3 + // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + // |0 0| STUN Message Type | Message Length | + // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + rtc::SetBE16(temp_data.get() + 2, static_cast(new_adjusted_len)); + } + + char hmac[kStunMessageIntegritySize]; + size_t ret = + rtc::ComputeHmac(rtc::DIGEST_SHA_1, password.c_str(), password.size(), + temp_data.get(), mi_pos, hmac, sizeof(hmac)); + RTC_DCHECK(ret == sizeof(hmac)); + if (ret != sizeof(hmac)) { + return false; + } + + // Comparing the calculated HMAC with the one present in the message. + return memcmp(data + current_pos + kStunAttributeHeaderSize, hmac, + mi_attr_size) == 0; +} + +bool StunMessage::AddMessageIntegrity(const std::string& password) { + return AddMessageIntegrityOfType(STUN_ATTR_MESSAGE_INTEGRITY, + kStunMessageIntegritySize, password.c_str(), + password.size()); +} + +bool StunMessage::AddMessageIntegrity(const char* key, size_t keylen) { + return AddMessageIntegrityOfType(STUN_ATTR_MESSAGE_INTEGRITY, + kStunMessageIntegritySize, key, keylen); +} + +bool StunMessage::AddMessageIntegrity32(absl::string_view password) { + return AddMessageIntegrityOfType(STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32, + kStunMessageIntegrity32Size, password.data(), + password.length()); +} + +bool StunMessage::AddMessageIntegrityOfType(int attr_type, + size_t attr_size, + const char* key, + size_t keylen) { + // Add the attribute with a dummy value. Since this is a known attribute, it + // can't fail. + RTC_DCHECK(attr_size <= kStunMessageIntegritySize); + auto msg_integrity_attr_ptr = std::make_unique( + attr_type, std::string(attr_size, '0')); + auto* msg_integrity_attr = msg_integrity_attr_ptr.get(); + AddAttribute(std::move(msg_integrity_attr_ptr)); + + // Calculate the HMAC for the message. + ByteBufferWriter buf; + if (!Write(&buf)) + return false; + + int msg_len_for_hmac = static_cast( + buf.Length() - kStunAttributeHeaderSize - msg_integrity_attr->length()); + char hmac[kStunMessageIntegritySize]; + size_t ret = rtc::ComputeHmac(rtc::DIGEST_SHA_1, key, keylen, buf.Data(), + msg_len_for_hmac, hmac, sizeof(hmac)); + RTC_DCHECK(ret == sizeof(hmac)); + if (ret != sizeof(hmac)) { + RTC_LOG(LS_ERROR) << "HMAC computation failed. Message-Integrity " + "has dummy value."; + return false; + } + + // Insert correct HMAC into the attribute. + msg_integrity_attr->CopyBytes(hmac, attr_size); + return true; +} + +// Verifies a message is in fact a STUN message, by performing the checks +// outlined in RFC 5389, section 7.3, including the FINGERPRINT check detailed +// in section 15.5. +bool StunMessage::ValidateFingerprint(const char* data, size_t size) { + // Check the message length. + size_t fingerprint_attr_size = + kStunAttributeHeaderSize + StunUInt32Attribute::SIZE; + if (size % 4 != 0 || size < kStunHeaderSize + fingerprint_attr_size) + return false; + + // Skip the rest if the magic cookie isn't present. + const char* magic_cookie = + data + kStunTransactionIdOffset - kStunMagicCookieLength; + if (rtc::GetBE32(magic_cookie) != kStunMagicCookie) + return false; + + // Check the fingerprint type and length. + const char* fingerprint_attr_data = data + size - fingerprint_attr_size; + if (rtc::GetBE16(fingerprint_attr_data) != STUN_ATTR_FINGERPRINT || + rtc::GetBE16(fingerprint_attr_data + sizeof(uint16_t)) != + StunUInt32Attribute::SIZE) + return false; + + // Check the fingerprint value. + uint32_t fingerprint = + rtc::GetBE32(fingerprint_attr_data + kStunAttributeHeaderSize); + return ((fingerprint ^ STUN_FINGERPRINT_XOR_VALUE) == + rtc::ComputeCrc32(data, size - fingerprint_attr_size)); +} + +bool StunMessage::IsStunMethod(rtc::ArrayView methods, + const char* data, + size_t size) { + // Check the message length. + if (size % 4 != 0 || size < kStunHeaderSize) + return false; + + // Skip the rest if the magic cookie isn't present. + const char* magic_cookie = + data + kStunTransactionIdOffset - kStunMagicCookieLength; + if (rtc::GetBE32(magic_cookie) != kStunMagicCookie) + return false; + + int method = rtc::GetBE16(data); + for (int m : methods) { + if (m == method) { + return true; + } + } + return false; +} + +bool StunMessage::AddFingerprint() { + // Add the attribute with a dummy value. Since this is a known attribute, + // it can't fail. + auto fingerprint_attr_ptr = + std::make_unique(STUN_ATTR_FINGERPRINT, 0); + auto* fingerprint_attr = fingerprint_attr_ptr.get(); + AddAttribute(std::move(fingerprint_attr_ptr)); + + // Calculate the CRC-32 for the message and insert it. + ByteBufferWriter buf; + if (!Write(&buf)) + return false; + + int msg_len_for_crc32 = static_cast( + buf.Length() - kStunAttributeHeaderSize - fingerprint_attr->length()); + uint32_t c = rtc::ComputeCrc32(buf.Data(), msg_len_for_crc32); + + // Insert the correct CRC-32, XORed with a constant, into the attribute. + fingerprint_attr->SetValue(c ^ STUN_FINGERPRINT_XOR_VALUE); + return true; +} + +bool StunMessage::Read(ByteBufferReader* buf) { + if (!buf->ReadUInt16(&type_)) { + return false; + } + + if (type_ & 0x8000) { + // RTP and RTCP set the MSB of first byte, since first two bits are version, + // and version is always 2 (10). If set, this is not a STUN packet. + return false; + } + + if (!buf->ReadUInt16(&length_)) { + return false; + } + + std::string magic_cookie; + if (!buf->ReadString(&magic_cookie, kStunMagicCookieLength)) { + return false; + } + + std::string transaction_id; + if (!buf->ReadString(&transaction_id, kStunTransactionIdLength)) { + return false; + } + + uint32_t magic_cookie_int; + static_assert(sizeof(magic_cookie_int) == kStunMagicCookieLength, + "Integer size mismatch: magic_cookie_int and kStunMagicCookie"); + std::memcpy(&magic_cookie_int, magic_cookie.data(), sizeof(magic_cookie_int)); + if (rtc::NetworkToHost32(magic_cookie_int) != kStunMagicCookie) { + // If magic cookie is invalid it means that the peer implements + // RFC3489 instead of RFC5389. + transaction_id.insert(0, magic_cookie); + } + RTC_DCHECK(IsValidTransactionId(transaction_id)); + transaction_id_ = transaction_id; + reduced_transaction_id_ = ReduceTransactionId(transaction_id_); + + if (length_ != buf->Length()) { + return false; + } + + attrs_.resize(0); + + size_t rest = buf->Length() - length_; + while (buf->Length() > rest) { + uint16_t attr_type, attr_length; + if (!buf->ReadUInt16(&attr_type)) + return false; + if (!buf->ReadUInt16(&attr_length)) + return false; + + std::unique_ptr attr( + CreateAttribute(attr_type, attr_length)); + if (!attr) { + // Skip any unknown or malformed attributes. + if ((attr_length % 4) != 0) { + attr_length += (4 - (attr_length % 4)); + } + if (!buf->Consume(attr_length)) { + return false; + } + } else { + if (!attr->Read(buf)) { + return false; + } + attrs_.push_back(std::move(attr)); + } + } + + RTC_DCHECK(buf->Length() == rest); + return true; +} + +bool StunMessage::Write(ByteBufferWriter* buf) const { + buf->WriteUInt16(type_); + buf->WriteUInt16(length_); + if (!IsLegacy()) + buf->WriteUInt32(stun_magic_cookie_); + buf->WriteString(transaction_id_); + + for (const auto& attr : attrs_) { + buf->WriteUInt16(attr->type()); + buf->WriteUInt16(static_cast(attr->length())); + if (!attr->Write(buf)) { + return false; + } + } + + return true; +} + +StunMessage* StunMessage::CreateNew() const { + return new StunMessage(); +} + +void StunMessage::SetStunMagicCookie(uint32_t val) { + stun_magic_cookie_ = val; +} + +StunAttributeValueType StunMessage::GetAttributeValueType(int type) const { + switch (type) { + case STUN_ATTR_MAPPED_ADDRESS: + return STUN_VALUE_ADDRESS; + case STUN_ATTR_USERNAME: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_MESSAGE_INTEGRITY: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_ERROR_CODE: + return STUN_VALUE_ERROR_CODE; + case STUN_ATTR_UNKNOWN_ATTRIBUTES: + return STUN_VALUE_UINT16_LIST; + case STUN_ATTR_REALM: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_NONCE: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_XOR_MAPPED_ADDRESS: + return STUN_VALUE_XOR_ADDRESS; + case STUN_ATTR_SOFTWARE: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_ALTERNATE_SERVER: + return STUN_VALUE_ADDRESS; + case STUN_ATTR_FINGERPRINT: + return STUN_VALUE_UINT32; + case STUN_ATTR_ORIGIN: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_RETRANSMIT_COUNT: + return STUN_VALUE_UINT32; + case STUN_ATTR_LAST_ICE_CHECK_RECEIVED: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_GOOG_MISC_INFO: + return STUN_VALUE_UINT16_LIST; + default: + return STUN_VALUE_UNKNOWN; + } +} + +StunAttribute* StunMessage::CreateAttribute(int type, size_t length) /*const*/ { + StunAttributeValueType value_type = GetAttributeValueType(type); + if (value_type != STUN_VALUE_UNKNOWN) { + return StunAttribute::Create(value_type, type, + static_cast(length), this); + } else if (DesignatedExpertRange(type)) { + // Read unknown attributes as STUN_VALUE_BYTE_STRING + return StunAttribute::Create(STUN_VALUE_BYTE_STRING, type, + static_cast(length), this); + } else { + return NULL; + } +} + +const StunAttribute* StunMessage::GetAttribute(int type) const { + for (const auto& attr : attrs_) { + if (attr->type() == type) { + return attr.get(); + } + } + return NULL; +} + +bool StunMessage::IsValidTransactionId(const std::string& transaction_id) { + return transaction_id.size() == kStunTransactionIdLength || + transaction_id.size() == kStunLegacyTransactionIdLength; +} + +bool StunMessage::EqualAttributes( + const StunMessage* other, + std::function attribute_type_mask) const { + RTC_DCHECK(other != nullptr); + rtc::ByteBufferWriter tmp_buffer_ptr1; + rtc::ByteBufferWriter tmp_buffer_ptr2; + for (const auto& attr : attrs_) { + if (attribute_type_mask(attr->type())) { + const StunAttribute* other_attr = other->GetAttribute(attr->type()); + if (other_attr == nullptr) { + return false; + } + tmp_buffer_ptr1.Clear(); + tmp_buffer_ptr2.Clear(); + attr->Write(&tmp_buffer_ptr1); + other_attr->Write(&tmp_buffer_ptr2); + if (tmp_buffer_ptr1.Length() != tmp_buffer_ptr2.Length()) { + return false; + } + if (memcmp(tmp_buffer_ptr1.Data(), tmp_buffer_ptr2.Data(), + tmp_buffer_ptr1.Length()) != 0) { + return false; + } + } + } + + for (const auto& attr : other->attrs_) { + if (attribute_type_mask(attr->type())) { + const StunAttribute* own_attr = GetAttribute(attr->type()); + if (own_attr == nullptr) { + return false; + } + // we have already compared all values... + } + } + return true; +} + +// StunAttribute + +StunAttribute::StunAttribute(uint16_t type, uint16_t length) + : type_(type), length_(length) {} + +void StunAttribute::ConsumePadding(ByteBufferReader* buf) const { + int remainder = length_ % 4; + if (remainder > 0) { + buf->Consume(4 - remainder); + } +} + +void StunAttribute::WritePadding(ByteBufferWriter* buf) const { + int remainder = length_ % 4; + if (remainder > 0) { + char zeroes[4] = {0}; + buf->WriteBytes(zeroes, 4 - remainder); + } +} + +StunAttribute* StunAttribute::Create(StunAttributeValueType value_type, + uint16_t type, + uint16_t length, + StunMessage* owner) { + switch (value_type) { + case STUN_VALUE_ADDRESS: + return new StunAddressAttribute(type, length); + case STUN_VALUE_XOR_ADDRESS: + return new StunXorAddressAttribute(type, length, owner); + case STUN_VALUE_UINT32: + return new StunUInt32Attribute(type); + case STUN_VALUE_UINT64: + return new StunUInt64Attribute(type); + case STUN_VALUE_BYTE_STRING: + return new StunByteStringAttribute(type, length); + case STUN_VALUE_ERROR_CODE: + return new StunErrorCodeAttribute(type, length); + case STUN_VALUE_UINT16_LIST: + return new StunUInt16ListAttribute(type, length); + default: + return NULL; + } +} + +std::unique_ptr StunAttribute::CreateAddress( + uint16_t type) { + return std::make_unique(type, 0); +} + +std::unique_ptr StunAttribute::CreateXorAddress( + uint16_t type) { + return std::make_unique(type, 0, nullptr); +} + +std::unique_ptr StunAttribute::CreateUInt64( + uint16_t type) { + return std::make_unique(type); +} + +std::unique_ptr StunAttribute::CreateUInt32( + uint16_t type) { + return std::make_unique(type); +} + +std::unique_ptr StunAttribute::CreateByteString( + uint16_t type) { + return std::make_unique(type, 0); +} + +std::unique_ptr StunAttribute::CreateErrorCode() { + return std::make_unique( + STUN_ATTR_ERROR_CODE, StunErrorCodeAttribute::MIN_SIZE); +} + +std::unique_ptr +StunAttribute::CreateUInt16ListAttribute(uint16_t type) { + return std::make_unique(type, 0); +} + +std::unique_ptr +StunAttribute::CreateUnknownAttributes() { + return std::make_unique(STUN_ATTR_UNKNOWN_ATTRIBUTES, + 0); +} + +StunAddressAttribute::StunAddressAttribute(uint16_t type, + const rtc::SocketAddress& addr) + : StunAttribute(type, 0) { + SetAddress(addr); +} + +StunAddressAttribute::StunAddressAttribute(uint16_t type, uint16_t length) + : StunAttribute(type, length) {} + +StunAttributeValueType StunAddressAttribute::value_type() const { + return STUN_VALUE_ADDRESS; +} + +bool StunAddressAttribute::Read(ByteBufferReader* buf) { + uint8_t dummy; + if (!buf->ReadUInt8(&dummy)) + return false; + + uint8_t stun_family; + if (!buf->ReadUInt8(&stun_family)) { + return false; + } + uint16_t port; + if (!buf->ReadUInt16(&port)) + return false; + if (stun_family == STUN_ADDRESS_IPV4) { + in_addr v4addr; + if (length() != SIZE_IP4) { + return false; + } + if (!buf->ReadBytes(reinterpret_cast(&v4addr), sizeof(v4addr))) { + return false; + } + rtc::IPAddress ipaddr(v4addr); + SetAddress(rtc::SocketAddress(ipaddr, port)); + } else if (stun_family == STUN_ADDRESS_IPV6) { + in6_addr v6addr; + if (length() != SIZE_IP6) { + return false; + } + if (!buf->ReadBytes(reinterpret_cast(&v6addr), sizeof(v6addr))) { + return false; + } + rtc::IPAddress ipaddr(v6addr); + SetAddress(rtc::SocketAddress(ipaddr, port)); + } else { + return false; + } + return true; +} + +bool StunAddressAttribute::Write(ByteBufferWriter* buf) const { + StunAddressFamily address_family = family(); + if (address_family == STUN_ADDRESS_UNDEF) { + RTC_LOG(LS_ERROR) << "Error writing address attribute: unknown family."; + return false; + } + buf->WriteUInt8(0); + buf->WriteUInt8(address_family); + buf->WriteUInt16(address_.port()); + switch (address_.family()) { + case AF_INET: { + in_addr v4addr = address_.ipaddr().ipv4_address(); + buf->WriteBytes(reinterpret_cast(&v4addr), sizeof(v4addr)); + break; + } + case AF_INET6: { + in6_addr v6addr = address_.ipaddr().ipv6_address(); + buf->WriteBytes(reinterpret_cast(&v6addr), sizeof(v6addr)); + break; + } + } + return true; +} + +StunXorAddressAttribute::StunXorAddressAttribute(uint16_t type, + const rtc::SocketAddress& addr) + : StunAddressAttribute(type, addr), owner_(NULL) {} + +StunXorAddressAttribute::StunXorAddressAttribute(uint16_t type, + uint16_t length, + StunMessage* owner) + : StunAddressAttribute(type, length), owner_(owner) {} + +StunAttributeValueType StunXorAddressAttribute::value_type() const { + return STUN_VALUE_XOR_ADDRESS; +} + +void StunXorAddressAttribute::SetOwner(StunMessage* owner) { + owner_ = owner; +} + +rtc::IPAddress StunXorAddressAttribute::GetXoredIP() const { + if (owner_) { + rtc::IPAddress ip = ipaddr(); + switch (ip.family()) { + case AF_INET: { + in_addr v4addr = ip.ipv4_address(); + v4addr.s_addr = + (v4addr.s_addr ^ rtc::HostToNetwork32(kStunMagicCookie)); + return rtc::IPAddress(v4addr); + } + case AF_INET6: { + in6_addr v6addr = ip.ipv6_address(); + const std::string& transaction_id = owner_->transaction_id(); + if (transaction_id.length() == kStunTransactionIdLength) { + uint32_t transactionid_as_ints[3]; + memcpy(&transactionid_as_ints[0], transaction_id.c_str(), + transaction_id.length()); + uint32_t* ip_as_ints = reinterpret_cast(&v6addr.s6_addr); + // Transaction ID is in network byte order, but magic cookie + // is stored in host byte order. + ip_as_ints[0] = + (ip_as_ints[0] ^ rtc::HostToNetwork32(kStunMagicCookie)); + ip_as_ints[1] = (ip_as_ints[1] ^ transactionid_as_ints[0]); + ip_as_ints[2] = (ip_as_ints[2] ^ transactionid_as_ints[1]); + ip_as_ints[3] = (ip_as_ints[3] ^ transactionid_as_ints[2]); + return rtc::IPAddress(v6addr); + } + break; + } + } + } + // Invalid ip family or transaction ID, or missing owner. + // Return an AF_UNSPEC address. + return rtc::IPAddress(); +} + +bool StunXorAddressAttribute::Read(ByteBufferReader* buf) { + if (!StunAddressAttribute::Read(buf)) + return false; + uint16_t xoredport = port() ^ (kStunMagicCookie >> 16); + rtc::IPAddress xored_ip = GetXoredIP(); + SetAddress(rtc::SocketAddress(xored_ip, xoredport)); + return true; +} + +bool StunXorAddressAttribute::Write(ByteBufferWriter* buf) const { + StunAddressFamily address_family = family(); + if (address_family == STUN_ADDRESS_UNDEF) { + RTC_LOG(LS_ERROR) << "Error writing xor-address attribute: unknown family."; + return false; + } + rtc::IPAddress xored_ip = GetXoredIP(); + if (xored_ip.family() == AF_UNSPEC) { + return false; + } + buf->WriteUInt8(0); + buf->WriteUInt8(family()); + buf->WriteUInt16(port() ^ (kStunMagicCookie >> 16)); + switch (xored_ip.family()) { + case AF_INET: { + in_addr v4addr = xored_ip.ipv4_address(); + buf->WriteBytes(reinterpret_cast(&v4addr), sizeof(v4addr)); + break; + } + case AF_INET6: { + in6_addr v6addr = xored_ip.ipv6_address(); + buf->WriteBytes(reinterpret_cast(&v6addr), sizeof(v6addr)); + break; + } + } + return true; +} + +StunUInt32Attribute::StunUInt32Attribute(uint16_t type, uint32_t value) + : StunAttribute(type, SIZE), bits_(value) {} + +StunUInt32Attribute::StunUInt32Attribute(uint16_t type) + : StunAttribute(type, SIZE), bits_(0) {} + +StunAttributeValueType StunUInt32Attribute::value_type() const { + return STUN_VALUE_UINT32; +} + +bool StunUInt32Attribute::GetBit(size_t index) const { + RTC_DCHECK(index < 32); + return static_cast((bits_ >> index) & 0x1); +} + +void StunUInt32Attribute::SetBit(size_t index, bool value) { + RTC_DCHECK(index < 32); + bits_ &= ~(1 << index); + bits_ |= value ? (1 << index) : 0; +} + +bool StunUInt32Attribute::Read(ByteBufferReader* buf) { + if (length() != SIZE || !buf->ReadUInt32(&bits_)) + return false; + return true; +} + +bool StunUInt32Attribute::Write(ByteBufferWriter* buf) const { + buf->WriteUInt32(bits_); + return true; +} + +StunUInt64Attribute::StunUInt64Attribute(uint16_t type, uint64_t value) + : StunAttribute(type, SIZE), bits_(value) {} + +StunUInt64Attribute::StunUInt64Attribute(uint16_t type) + : StunAttribute(type, SIZE), bits_(0) {} + +StunAttributeValueType StunUInt64Attribute::value_type() const { + return STUN_VALUE_UINT64; +} + +bool StunUInt64Attribute::Read(ByteBufferReader* buf) { + if (length() != SIZE || !buf->ReadUInt64(&bits_)) + return false; + return true; +} + +bool StunUInt64Attribute::Write(ByteBufferWriter* buf) const { + buf->WriteUInt64(bits_); + return true; +} + +StunByteStringAttribute::StunByteStringAttribute(uint16_t type) + : StunAttribute(type, 0), bytes_(NULL) {} + +StunByteStringAttribute::StunByteStringAttribute(uint16_t type, + const std::string& str) + : StunAttribute(type, 0), bytes_(NULL) { + CopyBytes(str.c_str(), str.size()); +} + +StunByteStringAttribute::StunByteStringAttribute(uint16_t type, + const void* bytes, + size_t length) + : StunAttribute(type, 0), bytes_(NULL) { + CopyBytes(bytes, length); +} + +StunByteStringAttribute::StunByteStringAttribute(uint16_t type, uint16_t length) + : StunAttribute(type, length), bytes_(NULL) {} + +StunByteStringAttribute::~StunByteStringAttribute() { + delete[] bytes_; +} + +StunAttributeValueType StunByteStringAttribute::value_type() const { + return STUN_VALUE_BYTE_STRING; +} + +void StunByteStringAttribute::CopyBytes(const char* bytes) { + CopyBytes(bytes, strlen(bytes)); +} + +void StunByteStringAttribute::CopyBytes(const void* bytes, size_t length) { + char* new_bytes = new char[length]; + memcpy(new_bytes, bytes, length); + SetBytes(new_bytes, length); +} + +uint8_t StunByteStringAttribute::GetByte(size_t index) const { + RTC_DCHECK(bytes_ != NULL); + RTC_DCHECK(index < length()); + return static_cast(bytes_[index]); +} + +void StunByteStringAttribute::SetByte(size_t index, uint8_t value) { + RTC_DCHECK(bytes_ != NULL); + RTC_DCHECK(index < length()); + bytes_[index] = value; +} + +bool StunByteStringAttribute::Read(ByteBufferReader* buf) { + bytes_ = new char[length()]; + if (!buf->ReadBytes(bytes_, length())) { + return false; + } + + ConsumePadding(buf); + return true; +} + +bool StunByteStringAttribute::Write(ByteBufferWriter* buf) const { + buf->WriteBytes(bytes_, length()); + WritePadding(buf); + return true; +} + +void StunByteStringAttribute::SetBytes(char* bytes, size_t length) { + delete[] bytes_; + bytes_ = bytes; + SetLength(static_cast(length)); +} + +const uint16_t StunErrorCodeAttribute::MIN_SIZE = 4; + +StunErrorCodeAttribute::StunErrorCodeAttribute(uint16_t type, + int code, + const std::string& reason) + : StunAttribute(type, 0) { + SetCode(code); + SetReason(reason); +} + +StunErrorCodeAttribute::StunErrorCodeAttribute(uint16_t type, uint16_t length) + : StunAttribute(type, length), class_(0), number_(0) {} + +StunErrorCodeAttribute::~StunErrorCodeAttribute() {} + +StunAttributeValueType StunErrorCodeAttribute::value_type() const { + return STUN_VALUE_ERROR_CODE; +} + +int StunErrorCodeAttribute::code() const { + return class_ * 100 + number_; +} + +void StunErrorCodeAttribute::SetCode(int code) { + class_ = static_cast(code / 100); + number_ = static_cast(code % 100); +} + +void StunErrorCodeAttribute::SetReason(const std::string& reason) { + SetLength(MIN_SIZE + static_cast(reason.size())); + reason_ = reason; +} + +bool StunErrorCodeAttribute::Read(ByteBufferReader* buf) { + uint32_t val; + if (length() < MIN_SIZE || !buf->ReadUInt32(&val)) + return false; + + if ((val >> 11) != 0) + RTC_LOG(LS_ERROR) << "error-code bits not zero"; + + class_ = ((val >> 8) & 0x7); + number_ = (val & 0xff); + + if (!buf->ReadString(&reason_, length() - 4)) + return false; + + ConsumePadding(buf); + return true; +} + +bool StunErrorCodeAttribute::Write(ByteBufferWriter* buf) const { + buf->WriteUInt32(class_ << 8 | number_); + buf->WriteString(reason_); + WritePadding(buf); + return true; +} + +StunUInt16ListAttribute::StunUInt16ListAttribute(uint16_t type, uint16_t length) + : StunAttribute(type, length) { + attr_types_ = new std::vector(); +} + +StunUInt16ListAttribute::~StunUInt16ListAttribute() { + delete attr_types_; +} + +StunAttributeValueType StunUInt16ListAttribute::value_type() const { + return STUN_VALUE_UINT16_LIST; +} + +size_t StunUInt16ListAttribute::Size() const { + return attr_types_->size(); +} + +uint16_t StunUInt16ListAttribute::GetType(int index) const { + return (*attr_types_)[index]; +} + +void StunUInt16ListAttribute::SetType(int index, uint16_t value) { + (*attr_types_)[index] = value; +} + +void StunUInt16ListAttribute::AddType(uint16_t value) { + attr_types_->push_back(value); + SetLength(static_cast(attr_types_->size() * 2)); +} + +void StunUInt16ListAttribute::AddTypeAtIndex(uint16_t index, uint16_t value) { + if (attr_types_->size() < static_cast(index + 1)) { + attr_types_->resize(index + 1); + } + (*attr_types_)[index] = value; + SetLength(static_cast(attr_types_->size() * 2)); +} + +bool StunUInt16ListAttribute::Read(ByteBufferReader* buf) { + if (length() % 2) { + return false; + } + + for (size_t i = 0; i < length() / 2; i++) { + uint16_t attr; + if (!buf->ReadUInt16(&attr)) + return false; + attr_types_->push_back(attr); + } + // Padding of these attributes is done in RFC 5389 style. This is + // slightly different from RFC3489, but it shouldn't be important. + // RFC3489 pads out to a 32 bit boundary by duplicating one of the + // entries in the list (not necessarily the last one - it's unspecified). + // RFC5389 pads on the end, and the bytes are always ignored. + ConsumePadding(buf); + return true; +} + +bool StunUInt16ListAttribute::Write(ByteBufferWriter* buf) const { + for (size_t i = 0; i < attr_types_->size(); ++i) { + buf->WriteUInt16((*attr_types_)[i]); + } + WritePadding(buf); + return true; +} + +std::string StunMethodToString(int msg_type) { + switch (msg_type) { + case STUN_BINDING_REQUEST: + return "STUN BINDING request"; + case STUN_BINDING_INDICATION: + return "STUN BINDING indication"; + case STUN_BINDING_RESPONSE: + return "STUN BINDING response"; + case STUN_BINDING_ERROR_RESPONSE: + return "STUN BINDING error response"; + case GOOG_PING_REQUEST: + return "GOOG PING request"; + case GOOG_PING_RESPONSE: + return "GOOG PING response"; + case GOOG_PING_ERROR_RESPONSE: + return "GOOG PING error response"; + case STUN_ALLOCATE_REQUEST: + return "TURN ALLOCATE request"; + case STUN_ALLOCATE_RESPONSE: + return "TURN ALLOCATE response"; + case STUN_ALLOCATE_ERROR_RESPONSE: + return "TURN ALLOCATE error response"; + case TURN_REFRESH_REQUEST: + return "TURN REFRESH request"; + case TURN_REFRESH_RESPONSE: + return "TURN REFRESH response"; + case TURN_REFRESH_ERROR_RESPONSE: + return "TURN REFRESH error response"; + case TURN_SEND_INDICATION: + return "TURN SEND INDICATION"; + case TURN_DATA_INDICATION: + return "TURN DATA INDICATION"; + case TURN_CREATE_PERMISSION_REQUEST: + return "TURN CREATE PERMISSION request"; + case TURN_CREATE_PERMISSION_RESPONSE: + return "TURN CREATE PERMISSION response"; + case TURN_CREATE_PERMISSION_ERROR_RESPONSE: + return "TURN CREATE PERMISSION error response"; + case TURN_CHANNEL_BIND_REQUEST: + return "TURN CHANNEL BIND request"; + case TURN_CHANNEL_BIND_RESPONSE: + return "TURN CHANNEL BIND response"; + case TURN_CHANNEL_BIND_ERROR_RESPONSE: + return "TURN CHANNEL BIND error response"; + default: + return "UNKNOWN<" + std::to_string(msg_type) + ">"; + } +} + +int GetStunSuccessResponseType(int req_type) { + return IsStunRequestType(req_type) ? (req_type | 0x100) : -1; +} + +int GetStunErrorResponseType(int req_type) { + return IsStunRequestType(req_type) ? (req_type | 0x110) : -1; +} + +bool IsStunRequestType(int msg_type) { + return ((msg_type & kStunTypeMask) == 0x000); +} + +bool IsStunIndicationType(int msg_type) { + return ((msg_type & kStunTypeMask) == 0x010); +} + +bool IsStunSuccessResponseType(int msg_type) { + return ((msg_type & kStunTypeMask) == 0x100); +} + +bool IsStunErrorResponseType(int msg_type) { + return ((msg_type & kStunTypeMask) == 0x110); +} + +bool ComputeStunCredentialHash(const std::string& username, + const std::string& realm, + const std::string& password, + std::string* hash) { + // http://tools.ietf.org/html/rfc5389#section-15.4 + // long-term credentials will be calculated using the key and key is + // key = MD5(username ":" realm ":" SASLprep(password)) + std::string input = username; + input += ':'; + input += realm; + input += ':'; + input += password; + + char digest[rtc::MessageDigest::kMaxSize]; + size_t size = rtc::ComputeDigest(rtc::DIGEST_MD5, input.c_str(), input.size(), + digest, sizeof(digest)); + if (size == 0) { + return false; + } + + *hash = std::string(digest, size); + return true; +} + +std::unique_ptr CopyStunAttribute( + const StunAttribute& attribute, + rtc::ByteBufferWriter* tmp_buffer_ptr) { + ByteBufferWriter tmpBuffer; + if (tmp_buffer_ptr == nullptr) { + tmp_buffer_ptr = &tmpBuffer; + } + + std::unique_ptr copy(StunAttribute::Create( + attribute.value_type(), attribute.type(), + static_cast(attribute.length()), nullptr)); + + if (!copy) { + return nullptr; + } + tmp_buffer_ptr->Clear(); + if (!attribute.Write(tmp_buffer_ptr)) { + return nullptr; + } + rtc::ByteBufferReader reader(*tmp_buffer_ptr); + if (!copy->Read(&reader)) { + return nullptr; + } + + return copy; +} + +StunAttributeValueType RelayMessage::GetAttributeValueType(int type) const { + switch (type) { + case STUN_ATTR_LIFETIME: + return STUN_VALUE_UINT32; + case STUN_ATTR_MAGIC_COOKIE: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_BANDWIDTH: + return STUN_VALUE_UINT32; + case STUN_ATTR_DESTINATION_ADDRESS: + return STUN_VALUE_ADDRESS; + case STUN_ATTR_SOURCE_ADDRESS2: + return STUN_VALUE_ADDRESS; + case STUN_ATTR_DATA: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_OPTIONS: + return STUN_VALUE_UINT32; + default: + return StunMessage::GetAttributeValueType(type); + } +} + +StunMessage* RelayMessage::CreateNew() const { + return new RelayMessage(); +} + +StunAttributeValueType TurnMessage::GetAttributeValueType(int type) const { + switch (type) { + case STUN_ATTR_CHANNEL_NUMBER: + return STUN_VALUE_UINT32; + case STUN_ATTR_TURN_LIFETIME: + return STUN_VALUE_UINT32; + case STUN_ATTR_XOR_PEER_ADDRESS: + return STUN_VALUE_XOR_ADDRESS; + case STUN_ATTR_DATA: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_XOR_RELAYED_ADDRESS: + return STUN_VALUE_XOR_ADDRESS; + case STUN_ATTR_EVEN_PORT: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_REQUESTED_TRANSPORT: + return STUN_VALUE_UINT32; + case STUN_ATTR_DONT_FRAGMENT: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_RESERVATION_TOKEN: + return STUN_VALUE_BYTE_STRING; + default: + return StunMessage::GetAttributeValueType(type); + } +} + +StunMessage* TurnMessage::CreateNew() const { + return new TurnMessage(); +} + +StunAttributeValueType IceMessage::GetAttributeValueType(int type) const { + switch (type) { + case STUN_ATTR_PRIORITY: + case STUN_ATTR_NETWORK_INFO: + case STUN_ATTR_NOMINATION: + return STUN_VALUE_UINT32; + case STUN_ATTR_USE_CANDIDATE: + return STUN_VALUE_BYTE_STRING; + case STUN_ATTR_ICE_CONTROLLED: + return STUN_VALUE_UINT64; + case STUN_ATTR_ICE_CONTROLLING: + return STUN_VALUE_UINT64; + default: + return StunMessage::GetAttributeValueType(type); + } +} + +StunMessage* IceMessage::CreateNew() const { + return new IceMessage(); +} + +std::unique_ptr StunMessage::Clone() const { + std::unique_ptr copy(CreateNew()); + if (!copy) { + return nullptr; + } + rtc::ByteBufferWriter buf; + if (!Write(&buf)) { + return nullptr; + } + rtc::ByteBufferReader reader(buf); + if (!copy->Read(&reader)) { + return nullptr; + } + return copy; +} + +} // namespace cricket diff --git a/api/transport/stun.h b/api/transport/stun.h new file mode 100644 index 0000000..51ca306 --- /dev/null +++ b/api/transport/stun.h @@ -0,0 +1,708 @@ +/* + * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_STUN_H_ +#define API_TRANSPORT_STUN_H_ + +// This file contains classes for dealing with the STUN protocol, as specified +// in RFC 5389, and its descendants. + +#include +#include + +#include +#include +#include + +#include "rtc_base/byte_buffer.h" +#include "rtc_base/ip_address.h" +#include "rtc_base/socket_address.h" + +namespace cricket { + +// These are the types of STUN messages defined in RFC 5389. +enum StunMessageType { + STUN_BINDING_REQUEST = 0x0001, + STUN_BINDING_INDICATION = 0x0011, + STUN_BINDING_RESPONSE = 0x0101, + STUN_BINDING_ERROR_RESPONSE = 0x0111, + + // Method 0x80, GOOG-PING is a variant of STUN BINDING + // that is sent instead of a STUN BINDING if the binding + // was identical to the one before. + GOOG_PING_REQUEST = 0x200, + GOOG_PING_RESPONSE = 0x300, + GOOG_PING_ERROR_RESPONSE = 0x310, +}; + +// These are all known STUN attributes, defined in RFC 5389 and elsewhere. +// Next to each is the name of the class (T is StunTAttribute) that implements +// that type. +// RETRANSMIT_COUNT is the number of outstanding pings without a response at +// the time the packet is generated. +enum StunAttributeType { + STUN_ATTR_MAPPED_ADDRESS = 0x0001, // Address + STUN_ATTR_USERNAME = 0x0006, // ByteString + STUN_ATTR_MESSAGE_INTEGRITY = 0x0008, // ByteString, 20 bytes + STUN_ATTR_ERROR_CODE = 0x0009, // ErrorCode + STUN_ATTR_UNKNOWN_ATTRIBUTES = 0x000a, // UInt16List + STUN_ATTR_REALM = 0x0014, // ByteString + STUN_ATTR_NONCE = 0x0015, // ByteString + STUN_ATTR_XOR_MAPPED_ADDRESS = 0x0020, // XorAddress + STUN_ATTR_SOFTWARE = 0x8022, // ByteString + STUN_ATTR_ALTERNATE_SERVER = 0x8023, // Address + STUN_ATTR_FINGERPRINT = 0x8028, // UInt32 + STUN_ATTR_ORIGIN = 0x802F, // ByteString + STUN_ATTR_RETRANSMIT_COUNT = 0xFF00 // UInt32 +}; + +// These are the types of the values associated with the attributes above. +// This allows us to perform some basic validation when reading or adding +// attributes. Note that these values are for our own use, and not defined in +// RFC 5389. +enum StunAttributeValueType { + STUN_VALUE_UNKNOWN = 0, + STUN_VALUE_ADDRESS = 1, + STUN_VALUE_XOR_ADDRESS = 2, + STUN_VALUE_UINT32 = 3, + STUN_VALUE_UINT64 = 4, + STUN_VALUE_BYTE_STRING = 5, + STUN_VALUE_ERROR_CODE = 6, + STUN_VALUE_UINT16_LIST = 7 +}; + +// These are the types of STUN addresses defined in RFC 5389. +enum StunAddressFamily { + // NB: UNDEF is not part of the STUN spec. + STUN_ADDRESS_UNDEF = 0, + STUN_ADDRESS_IPV4 = 1, + STUN_ADDRESS_IPV6 = 2 +}; + +// These are the types of STUN error codes defined in RFC 5389. +enum StunErrorCode { + STUN_ERROR_TRY_ALTERNATE = 300, + STUN_ERROR_BAD_REQUEST = 400, + STUN_ERROR_UNAUTHORIZED = 401, + STUN_ERROR_UNKNOWN_ATTRIBUTE = 420, + STUN_ERROR_STALE_CREDENTIALS = 430, // GICE only + STUN_ERROR_STALE_NONCE = 438, + STUN_ERROR_SERVER_ERROR = 500, + STUN_ERROR_GLOBAL_FAILURE = 600 +}; + +// Strings for the error codes above. +extern const char STUN_ERROR_REASON_TRY_ALTERNATE_SERVER[]; +extern const char STUN_ERROR_REASON_BAD_REQUEST[]; +extern const char STUN_ERROR_REASON_UNAUTHORIZED[]; +extern const char STUN_ERROR_REASON_UNKNOWN_ATTRIBUTE[]; +extern const char STUN_ERROR_REASON_STALE_CREDENTIALS[]; +extern const char STUN_ERROR_REASON_STALE_NONCE[]; +extern const char STUN_ERROR_REASON_SERVER_ERROR[]; + +// The mask used to determine whether a STUN message is a request/response etc. +const uint32_t kStunTypeMask = 0x0110; + +// STUN Attribute header length. +const size_t kStunAttributeHeaderSize = 4; + +// Following values correspond to RFC5389. +const size_t kStunHeaderSize = 20; +const size_t kStunTransactionIdOffset = 8; +const size_t kStunTransactionIdLength = 12; +const uint32_t kStunMagicCookie = 0x2112A442; +constexpr size_t kStunMagicCookieLength = sizeof(kStunMagicCookie); + +// Following value corresponds to an earlier version of STUN from +// RFC3489. +const size_t kStunLegacyTransactionIdLength = 16; + +// STUN Message Integrity HMAC length. +const size_t kStunMessageIntegritySize = 20; +// Size of STUN_ATTR_MESSAGE_INTEGRITY_32 +const size_t kStunMessageIntegrity32Size = 4; + +class StunAddressAttribute; +class StunAttribute; +class StunByteStringAttribute; +class StunErrorCodeAttribute; + +class StunUInt16ListAttribute; +class StunUInt32Attribute; +class StunUInt64Attribute; +class StunXorAddressAttribute; + +// Records a complete STUN/TURN message. Each message consists of a type and +// any number of attributes. Each attribute is parsed into an instance of an +// appropriate class (see above). The Get* methods will return instances of +// that attribute class. +class StunMessage { + public: + StunMessage(); + virtual ~StunMessage(); + + int type() const { return type_; } + size_t length() const { return length_; } + const std::string& transaction_id() const { return transaction_id_; } + uint32_t reduced_transaction_id() const { return reduced_transaction_id_; } + + // Returns true if the message confirms to RFC3489 rather than + // RFC5389. The main difference between two version of the STUN + // protocol is the presence of the magic cookie and different length + // of transaction ID. For outgoing packets version of the protocol + // is determined by the lengths of the transaction ID. + bool IsLegacy() const; + + void SetType(int type) { type_ = static_cast(type); } + bool SetTransactionID(const std::string& str); + + // Get a list of all of the attribute types in the "comprehension required" + // range that were not recognized. + std::vector GetNonComprehendedAttributes() const; + + // Gets the desired attribute value, or NULL if no such attribute type exists. + const StunAddressAttribute* GetAddress(int type) const; + const StunUInt32Attribute* GetUInt32(int type) const; + const StunUInt64Attribute* GetUInt64(int type) const; + const StunByteStringAttribute* GetByteString(int type) const; + const StunUInt16ListAttribute* GetUInt16List(int type) const; + + // Gets these specific attribute values. + const StunErrorCodeAttribute* GetErrorCode() const; + // Returns the code inside the error code attribute, if present, and + // STUN_ERROR_GLOBAL_FAILURE otherwise. + int GetErrorCodeValue() const; + const StunUInt16ListAttribute* GetUnknownAttributes() const; + + // Takes ownership of the specified attribute and adds it to the message. + void AddAttribute(std::unique_ptr attr); + + // Remove the last occurrence of an attribute. + std::unique_ptr RemoveAttribute(int type); + + // Remote all attributes and releases them. + void ClearAttributes(); + + // Validates that a raw STUN message has a correct MESSAGE-INTEGRITY value. + // This can't currently be done on a StunMessage, since it is affected by + // padding data (which we discard when reading a StunMessage). + static bool ValidateMessageIntegrity(const char* data, + size_t size, + const std::string& password); + static bool ValidateMessageIntegrity32(const char* data, + size_t size, + const std::string& password); + + // Adds a MESSAGE-INTEGRITY attribute that is valid for the current message. + bool AddMessageIntegrity(const std::string& password); + bool AddMessageIntegrity(const char* key, size_t keylen); + + // Adds a STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32 attribute that is valid for the + // current message. + bool AddMessageIntegrity32(absl::string_view password); + + // Verify that a buffer has stun magic cookie and one of the specified + // methods. Note that it does not check for the existance of FINGERPRINT. + static bool IsStunMethod(rtc::ArrayView methods, + const char* data, + size_t size); + + // Verifies that a given buffer is STUN by checking for a correct FINGERPRINT. + static bool ValidateFingerprint(const char* data, size_t size); + + // Adds a FINGERPRINT attribute that is valid for the current message. + bool AddFingerprint(); + + // Parses the STUN packet in the given buffer and records it here. The + // return value indicates whether this was successful. + bool Read(rtc::ByteBufferReader* buf); + + // Writes this object into a STUN packet. The return value indicates whether + // this was successful. + bool Write(rtc::ByteBufferWriter* buf) const; + + // Creates an empty message. Overridable by derived classes. + virtual StunMessage* CreateNew() const; + + // Modify the stun magic cookie used for this STUN message. + // This is used for testing. + void SetStunMagicCookie(uint32_t val); + + // Contruct a copy of |this|. + std::unique_ptr Clone() const; + + // Check if the attributes of this StunMessage equals those of |other| + // for all attributes that |attribute_type_mask| return true + bool EqualAttributes(const StunMessage* other, + std::function attribute_type_mask) const; + + protected: + // Verifies that the given attribute is allowed for this message. + virtual StunAttributeValueType GetAttributeValueType(int type) const; + + std::vector> attrs_; + + private: + StunAttribute* CreateAttribute(int type, size_t length) /* const*/; + const StunAttribute* GetAttribute(int type) const; + static bool IsValidTransactionId(const std::string& transaction_id); + bool AddMessageIntegrityOfType(int mi_attr_type, + size_t mi_attr_size, + const char* key, + size_t keylen); + static bool ValidateMessageIntegrityOfType(int mi_attr_type, + size_t mi_attr_size, + const char* data, + size_t size, + const std::string& password); + + uint16_t type_; + uint16_t length_; + std::string transaction_id_; + uint32_t reduced_transaction_id_; + uint32_t stun_magic_cookie_; +}; + +// Base class for all STUN/TURN attributes. +class StunAttribute { + public: + virtual ~StunAttribute() {} + + int type() const { return type_; } + size_t length() const { return length_; } + + // Return the type of this attribute. + virtual StunAttributeValueType value_type() const = 0; + + // Only XorAddressAttribute needs this so far. + virtual void SetOwner(StunMessage* owner) {} + + // Reads the body (not the type or length) for this type of attribute from + // the given buffer. Return value is true if successful. + virtual bool Read(rtc::ByteBufferReader* buf) = 0; + + // Writes the body (not the type or length) to the given buffer. Return + // value is true if successful. + virtual bool Write(rtc::ByteBufferWriter* buf) const = 0; + + // Creates an attribute object with the given type and smallest length. + static StunAttribute* Create(StunAttributeValueType value_type, + uint16_t type, + uint16_t length, + StunMessage* owner); + // TODO(?): Allow these create functions to take parameters, to reduce + // the amount of work callers need to do to initialize attributes. + static std::unique_ptr CreateAddress(uint16_t type); + static std::unique_ptr CreateXorAddress( + uint16_t type); + static std::unique_ptr CreateUInt32(uint16_t type); + static std::unique_ptr CreateUInt64(uint16_t type); + static std::unique_ptr CreateByteString( + uint16_t type); + static std::unique_ptr CreateUInt16ListAttribute( + uint16_t type); + static std::unique_ptr CreateErrorCode(); + static std::unique_ptr CreateUnknownAttributes(); + + protected: + StunAttribute(uint16_t type, uint16_t length); + void SetLength(uint16_t length) { length_ = length; } + void WritePadding(rtc::ByteBufferWriter* buf) const; + void ConsumePadding(rtc::ByteBufferReader* buf) const; + + private: + uint16_t type_; + uint16_t length_; +}; + +// Implements STUN attributes that record an Internet address. +class StunAddressAttribute : public StunAttribute { + public: + static const uint16_t SIZE_UNDEF = 0; + static const uint16_t SIZE_IP4 = 8; + static const uint16_t SIZE_IP6 = 20; + StunAddressAttribute(uint16_t type, const rtc::SocketAddress& addr); + StunAddressAttribute(uint16_t type, uint16_t length); + + StunAttributeValueType value_type() const override; + + StunAddressFamily family() const { + switch (address_.ipaddr().family()) { + case AF_INET: + return STUN_ADDRESS_IPV4; + case AF_INET6: + return STUN_ADDRESS_IPV6; + } + return STUN_ADDRESS_UNDEF; + } + + const rtc::SocketAddress& GetAddress() const { return address_; } + const rtc::IPAddress& ipaddr() const { return address_.ipaddr(); } + uint16_t port() const { return address_.port(); } + + void SetAddress(const rtc::SocketAddress& addr) { + address_ = addr; + EnsureAddressLength(); + } + void SetIP(const rtc::IPAddress& ip) { + address_.SetIP(ip); + EnsureAddressLength(); + } + void SetPort(uint16_t port) { address_.SetPort(port); } + + bool Read(rtc::ByteBufferReader* buf) override; + bool Write(rtc::ByteBufferWriter* buf) const override; + + private: + void EnsureAddressLength() { + switch (family()) { + case STUN_ADDRESS_IPV4: { + SetLength(SIZE_IP4); + break; + } + case STUN_ADDRESS_IPV6: { + SetLength(SIZE_IP6); + break; + } + default: { + SetLength(SIZE_UNDEF); + break; + } + } + } + rtc::SocketAddress address_; +}; + +// Implements STUN attributes that record an Internet address. When encoded +// in a STUN message, the address contained in this attribute is XORed with the +// transaction ID of the message. +class StunXorAddressAttribute : public StunAddressAttribute { + public: + StunXorAddressAttribute(uint16_t type, const rtc::SocketAddress& addr); + StunXorAddressAttribute(uint16_t type, uint16_t length, StunMessage* owner); + + StunAttributeValueType value_type() const override; + void SetOwner(StunMessage* owner) override; + bool Read(rtc::ByteBufferReader* buf) override; + bool Write(rtc::ByteBufferWriter* buf) const override; + + private: + rtc::IPAddress GetXoredIP() const; + StunMessage* owner_; +}; + +// Implements STUN attributes that record a 32-bit integer. +class StunUInt32Attribute : public StunAttribute { + public: + static const uint16_t SIZE = 4; + StunUInt32Attribute(uint16_t type, uint32_t value); + explicit StunUInt32Attribute(uint16_t type); + + StunAttributeValueType value_type() const override; + + uint32_t value() const { return bits_; } + void SetValue(uint32_t bits) { bits_ = bits; } + + bool GetBit(size_t index) const; + void SetBit(size_t index, bool value); + + bool Read(rtc::ByteBufferReader* buf) override; + bool Write(rtc::ByteBufferWriter* buf) const override; + + private: + uint32_t bits_; +}; + +class StunUInt64Attribute : public StunAttribute { + public: + static const uint16_t SIZE = 8; + StunUInt64Attribute(uint16_t type, uint64_t value); + explicit StunUInt64Attribute(uint16_t type); + + StunAttributeValueType value_type() const override; + + uint64_t value() const { return bits_; } + void SetValue(uint64_t bits) { bits_ = bits; } + + bool Read(rtc::ByteBufferReader* buf) override; + bool Write(rtc::ByteBufferWriter* buf) const override; + + private: + uint64_t bits_; +}; + +// Implements STUN attributes that record an arbitrary byte string. +class StunByteStringAttribute : public StunAttribute { + public: + explicit StunByteStringAttribute(uint16_t type); + StunByteStringAttribute(uint16_t type, const std::string& str); + StunByteStringAttribute(uint16_t type, const void* bytes, size_t length); + StunByteStringAttribute(uint16_t type, uint16_t length); + ~StunByteStringAttribute() override; + + StunAttributeValueType value_type() const override; + + const char* bytes() const { return bytes_; } + std::string GetString() const { return std::string(bytes_, length()); } + + void CopyBytes(const char* bytes); // uses strlen + void CopyBytes(const void* bytes, size_t length); + + uint8_t GetByte(size_t index) const; + void SetByte(size_t index, uint8_t value); + + bool Read(rtc::ByteBufferReader* buf) override; + bool Write(rtc::ByteBufferWriter* buf) const override; + + private: + void SetBytes(char* bytes, size_t length); + + char* bytes_; +}; + +// Implements STUN attributes that record an error code. +class StunErrorCodeAttribute : public StunAttribute { + public: + static const uint16_t MIN_SIZE; + StunErrorCodeAttribute(uint16_t type, int code, const std::string& reason); + StunErrorCodeAttribute(uint16_t type, uint16_t length); + ~StunErrorCodeAttribute() override; + + StunAttributeValueType value_type() const override; + + // The combined error and class, e.g. 0x400. + int code() const; + void SetCode(int code); + + // The individual error components. + int eclass() const { return class_; } + int number() const { return number_; } + const std::string& reason() const { return reason_; } + void SetClass(uint8_t eclass) { class_ = eclass; } + void SetNumber(uint8_t number) { number_ = number; } + void SetReason(const std::string& reason); + + bool Read(rtc::ByteBufferReader* buf) override; + bool Write(rtc::ByteBufferWriter* buf) const override; + + private: + uint8_t class_; + uint8_t number_; + std::string reason_; +}; + +// Implements STUN attributes that record a list of attribute names. +class StunUInt16ListAttribute : public StunAttribute { + public: + StunUInt16ListAttribute(uint16_t type, uint16_t length); + ~StunUInt16ListAttribute() override; + + StunAttributeValueType value_type() const override; + + size_t Size() const; + uint16_t GetType(int index) const; + void SetType(int index, uint16_t value); + void AddType(uint16_t value); + void AddTypeAtIndex(uint16_t index, uint16_t value); + + bool Read(rtc::ByteBufferReader* buf) override; + bool Write(rtc::ByteBufferWriter* buf) const override; + + private: + std::vector* attr_types_; +}; + +// Return a string e.g "STUN BINDING request". +std::string StunMethodToString(int msg_type); + +// Returns the (successful) response type for the given request type. +// Returns -1 if |request_type| is not a valid request type. +int GetStunSuccessResponseType(int request_type); + +// Returns the error response type for the given request type. +// Returns -1 if |request_type| is not a valid request type. +int GetStunErrorResponseType(int request_type); + +// Returns whether a given message is a request type. +bool IsStunRequestType(int msg_type); + +// Returns whether a given message is an indication type. +bool IsStunIndicationType(int msg_type); + +// Returns whether a given response is a success type. +bool IsStunSuccessResponseType(int msg_type); + +// Returns whether a given response is an error type. +bool IsStunErrorResponseType(int msg_type); + +// Computes the STUN long-term credential hash. +bool ComputeStunCredentialHash(const std::string& username, + const std::string& realm, + const std::string& password, + std::string* hash); + +// Make a copy af |attribute| and return a new StunAttribute. +// This is useful if you don't care about what kind of attribute you +// are handling. +// +// The implementation copies by calling Write() followed by Read(). +// +// If |tmp_buffer| is supplied this buffer will be used, otherwise +// a buffer will created in the method. +std::unique_ptr CopyStunAttribute( + const StunAttribute& attribute, + rtc::ByteBufferWriter* tmp_buffer_ptr = 0); + +// TODO(?): Move the TURN/ICE stuff below out to separate files. +extern const char TURN_MAGIC_COOKIE_VALUE[4]; + +// "GTURN" STUN methods. +// TODO(?): Rename these methods to GTURN_ to make it clear they aren't +// part of standard STUN/TURN. +enum RelayMessageType { + // For now, using the same defs from TurnMessageType below. + // STUN_ALLOCATE_REQUEST = 0x0003, + // STUN_ALLOCATE_RESPONSE = 0x0103, + // STUN_ALLOCATE_ERROR_RESPONSE = 0x0113, + STUN_SEND_REQUEST = 0x0004, + STUN_SEND_RESPONSE = 0x0104, + STUN_SEND_ERROR_RESPONSE = 0x0114, + STUN_DATA_INDICATION = 0x0115, +}; + +// "GTURN"-specific STUN attributes. +// TODO(?): Rename these attributes to GTURN_ to avoid conflicts. +enum RelayAttributeType { + STUN_ATTR_LIFETIME = 0x000d, // UInt32 + STUN_ATTR_MAGIC_COOKIE = 0x000f, // ByteString, 4 bytes + STUN_ATTR_BANDWIDTH = 0x0010, // UInt32 + STUN_ATTR_DESTINATION_ADDRESS = 0x0011, // Address + STUN_ATTR_SOURCE_ADDRESS2 = 0x0012, // Address + STUN_ATTR_DATA = 0x0013, // ByteString + STUN_ATTR_OPTIONS = 0x8001, // UInt32 +}; + +// A "GTURN" STUN message. +class RelayMessage : public StunMessage { + protected: + StunAttributeValueType GetAttributeValueType(int type) const override; + StunMessage* CreateNew() const override; +}; + +// Defined in TURN RFC 5766. +enum TurnMessageType { + STUN_ALLOCATE_REQUEST = 0x0003, + STUN_ALLOCATE_RESPONSE = 0x0103, + STUN_ALLOCATE_ERROR_RESPONSE = 0x0113, + TURN_REFRESH_REQUEST = 0x0004, + TURN_REFRESH_RESPONSE = 0x0104, + TURN_REFRESH_ERROR_RESPONSE = 0x0114, + TURN_SEND_INDICATION = 0x0016, + TURN_DATA_INDICATION = 0x0017, + TURN_CREATE_PERMISSION_REQUEST = 0x0008, + TURN_CREATE_PERMISSION_RESPONSE = 0x0108, + TURN_CREATE_PERMISSION_ERROR_RESPONSE = 0x0118, + TURN_CHANNEL_BIND_REQUEST = 0x0009, + TURN_CHANNEL_BIND_RESPONSE = 0x0109, + TURN_CHANNEL_BIND_ERROR_RESPONSE = 0x0119, +}; + +enum TurnAttributeType { + STUN_ATTR_CHANNEL_NUMBER = 0x000C, // UInt32 + STUN_ATTR_TURN_LIFETIME = 0x000d, // UInt32 + STUN_ATTR_XOR_PEER_ADDRESS = 0x0012, // XorAddress + // TODO(mallinath) - Uncomment after RelayAttributes are renamed. + // STUN_ATTR_DATA = 0x0013, // ByteString + STUN_ATTR_XOR_RELAYED_ADDRESS = 0x0016, // XorAddress + STUN_ATTR_EVEN_PORT = 0x0018, // ByteString, 1 byte. + STUN_ATTR_REQUESTED_TRANSPORT = 0x0019, // UInt32 + STUN_ATTR_DONT_FRAGMENT = 0x001A, // No content, Length = 0 + STUN_ATTR_RESERVATION_TOKEN = 0x0022, // ByteString, 8 bytes. + // TODO(mallinath) - Rename STUN_ATTR_TURN_LIFETIME to STUN_ATTR_LIFETIME and + // STUN_ATTR_TURN_DATA to STUN_ATTR_DATA. Also rename RelayMessage attributes + // by appending G to attribute name. +}; + +// RFC 5766-defined errors. +enum TurnErrorType { + STUN_ERROR_FORBIDDEN = 403, + STUN_ERROR_ALLOCATION_MISMATCH = 437, + STUN_ERROR_WRONG_CREDENTIALS = 441, + STUN_ERROR_UNSUPPORTED_PROTOCOL = 442 +}; + +extern const int SERVER_NOT_REACHABLE_ERROR; + +extern const char STUN_ERROR_REASON_FORBIDDEN[]; +extern const char STUN_ERROR_REASON_ALLOCATION_MISMATCH[]; +extern const char STUN_ERROR_REASON_WRONG_CREDENTIALS[]; +extern const char STUN_ERROR_REASON_UNSUPPORTED_PROTOCOL[]; +class TurnMessage : public StunMessage { + protected: + StunAttributeValueType GetAttributeValueType(int type) const override; + StunMessage* CreateNew() const override; +}; + +enum IceAttributeType { + // RFC 5245 ICE STUN attributes. + STUN_ATTR_PRIORITY = 0x0024, // UInt32 + STUN_ATTR_USE_CANDIDATE = 0x0025, // No content, Length = 0 + STUN_ATTR_ICE_CONTROLLED = 0x8029, // UInt64 + STUN_ATTR_ICE_CONTROLLING = 0x802A, // UInt64 + // The following attributes are in the comprehension-optional range + // (0xC000-0xFFFF) and are not registered with IANA. These STUN attributes are + // intended for ICE and should NOT be used in generic use cases of STUN + // messages. + // + // Note that the value 0xC001 has already been assigned by IANA to + // ENF-FLOW-DESCRIPTION + // (https://www.iana.org/assignments/stun-parameters/stun-parameters.xml). + STUN_ATTR_NOMINATION = 0xC001, // UInt32 + // UInt32. The higher 16 bits are the network ID. The lower 16 bits are the + // network cost. + STUN_ATTR_NETWORK_INFO = 0xC057, + // Experimental: Transaction ID of the last connectivity check received. + STUN_ATTR_LAST_ICE_CHECK_RECEIVED = 0xC058, + // Uint16List. Miscellaneous attributes for future extension. + STUN_ATTR_GOOG_MISC_INFO = 0xC059, + // MESSAGE-INTEGRITY truncated to 32-bit. + STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32 = 0xC060, +}; + +// When adding new attributes to STUN_ATTR_GOOG_MISC_INFO +// (which is a list of uint16_t), append the indices of these attributes below +// and do NOT change the existing indices. The indices of attributes must be +// consistent with those used in ConnectionRequest::Prepare when forming a STUN +// message for the ICE connectivity check, and they are used when parsing a +// received STUN message. +enum class IceGoogMiscInfoBindingRequestAttributeIndex { + SUPPORT_GOOG_PING_VERSION = 0, +}; + +enum class IceGoogMiscInfoBindingResponseAttributeIndex { + SUPPORT_GOOG_PING_VERSION = 0, +}; + +// RFC 5245-defined errors. +enum IceErrorCode { + STUN_ERROR_ROLE_CONFLICT = 487, +}; +extern const char STUN_ERROR_REASON_ROLE_CONFLICT[]; + +// A RFC 5245 ICE STUN message. +class IceMessage : public StunMessage { + protected: + StunAttributeValueType GetAttributeValueType(int type) const override; + StunMessage* CreateNew() const override; +}; + +} // namespace cricket + +#endif // API_TRANSPORT_STUN_H_ diff --git a/api/transport/stun_unittest.cc b/api/transport/stun_unittest.cc new file mode 100644 index 0000000..0884b2c --- /dev/null +++ b/api/transport/stun_unittest.cc @@ -0,0 +1,1906 @@ +/* + * Copyright 2004 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/transport/stun.h" + +#include + +#include +#include +#include + +#include "rtc_base/arraysize.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/byte_order.h" +#include "rtc_base/socket_address.h" +#include "test/gtest.h" + +namespace cricket { + +class StunTest : public ::testing::Test { + protected: + void CheckStunHeader(const StunMessage& msg, + StunMessageType expected_type, + size_t expected_length) { + ASSERT_EQ(expected_type, msg.type()); + ASSERT_EQ(expected_length, msg.length()); + } + + void CheckStunTransactionID(const StunMessage& msg, + const unsigned char* expectedID, + size_t length) { + ASSERT_EQ(length, msg.transaction_id().size()); + ASSERT_EQ(length == kStunTransactionIdLength + 4, msg.IsLegacy()); + ASSERT_EQ(length == kStunTransactionIdLength, !msg.IsLegacy()); + ASSERT_EQ(0, memcmp(msg.transaction_id().c_str(), expectedID, length)); + } + + void CheckStunAddressAttribute(const StunAddressAttribute* addr, + StunAddressFamily expected_family, + int expected_port, + const rtc::IPAddress& expected_address) { + ASSERT_EQ(expected_family, addr->family()); + ASSERT_EQ(expected_port, addr->port()); + + if (addr->family() == STUN_ADDRESS_IPV4) { + in_addr v4_address = expected_address.ipv4_address(); + in_addr stun_address = addr->ipaddr().ipv4_address(); + ASSERT_EQ(0, memcmp(&v4_address, &stun_address, sizeof(stun_address))); + } else if (addr->family() == STUN_ADDRESS_IPV6) { + in6_addr v6_address = expected_address.ipv6_address(); + in6_addr stun_address = addr->ipaddr().ipv6_address(); + ASSERT_EQ(0, memcmp(&v6_address, &stun_address, sizeof(stun_address))); + } else { + ASSERT_TRUE(addr->family() == STUN_ADDRESS_IPV6 || + addr->family() == STUN_ADDRESS_IPV4); + } + } + + size_t ReadStunMessageTestCase(StunMessage* msg, + const unsigned char* testcase, + size_t size) { + const char* input = reinterpret_cast(testcase); + rtc::ByteBufferReader buf(input, size); + if (msg->Read(&buf)) { + // Returns the size the stun message should report itself as being + return (size - 20); + } else { + return 0; + } + } +}; + +// Sample STUN packets with various attributes +// Gathered by wiresharking pjproject's pjnath test programs +// pjproject available at www.pjsip.org + +// clang-format off +// clang formatting doesn't respect inline comments. + +static const unsigned char kStunMessageWithIPv6MappedAddress[] = { + 0x00, 0x01, 0x00, 0x18, // message header + 0x21, 0x12, 0xa4, 0x42, // transaction id + 0x29, 0x1f, 0xcd, 0x7c, + 0xba, 0x58, 0xab, 0xd7, + 0xf2, 0x41, 0x01, 0x00, + 0x00, 0x01, 0x00, 0x14, // Address type (mapped), length + 0x00, 0x02, 0xb8, 0x81, // family (IPv6), port + 0x24, 0x01, 0xfa, 0x00, // an IPv6 address + 0x00, 0x04, 0x10, 0x00, + 0xbe, 0x30, 0x5b, 0xff, + 0xfe, 0xe5, 0x00, 0xc3 +}; + +static const unsigned char kStunMessageWithIPv4MappedAddress[] = { + 0x01, 0x01, 0x00, 0x0c, // binding response, length 12 + 0x21, 0x12, 0xa4, 0x42, // magic cookie + 0x29, 0x1f, 0xcd, 0x7c, // transaction ID + 0xba, 0x58, 0xab, 0xd7, + 0xf2, 0x41, 0x01, 0x00, + 0x00, 0x01, 0x00, 0x08, // Mapped, 8 byte length + 0x00, 0x01, 0x9d, 0xfc, // AF_INET, unxor-ed port + 0xac, 0x17, 0x44, 0xe6 // IPv4 address +}; + +// Test XOR-mapped IP addresses: +static const unsigned char kStunMessageWithIPv6XorMappedAddress[] = { + 0x01, 0x01, 0x00, 0x18, // message header (binding response) + 0x21, 0x12, 0xa4, 0x42, // magic cookie (rfc5389) + 0xe3, 0xa9, 0x46, 0xe1, // transaction ID + 0x7c, 0x00, 0xc2, 0x62, + 0x54, 0x08, 0x01, 0x00, + 0x00, 0x20, 0x00, 0x14, // Address Type (XOR), length + 0x00, 0x02, 0xcb, 0x5b, // family, XOR-ed port + 0x05, 0x13, 0x5e, 0x42, // XOR-ed IPv6 address + 0xe3, 0xad, 0x56, 0xe1, + 0xc2, 0x30, 0x99, 0x9d, + 0xaa, 0xed, 0x01, 0xc3 +}; + +static const unsigned char kStunMessageWithIPv4XorMappedAddress[] = { + 0x01, 0x01, 0x00, 0x0c, // message header (binding response) + 0x21, 0x12, 0xa4, 0x42, // magic cookie + 0x29, 0x1f, 0xcd, 0x7c, // transaction ID + 0xba, 0x58, 0xab, 0xd7, + 0xf2, 0x41, 0x01, 0x00, + 0x00, 0x20, 0x00, 0x08, // address type (xor), length + 0x00, 0x01, 0xfc, 0xb5, // family (AF_INET), XOR-ed port + 0x8d, 0x05, 0xe0, 0xa4 // IPv4 address +}; + +// ByteString Attribute (username) +static const unsigned char kStunMessageWithByteStringAttribute[] = { + 0x00, 0x01, 0x00, 0x0c, + 0x21, 0x12, 0xa4, 0x42, + 0xe3, 0xa9, 0x46, 0xe1, + 0x7c, 0x00, 0xc2, 0x62, + 0x54, 0x08, 0x01, 0x00, + 0x00, 0x06, 0x00, 0x08, // username attribute (length 8) + 0x61, 0x62, 0x63, 0x64, // abcdefgh + 0x65, 0x66, 0x67, 0x68 +}; + +// Message with an unknown but comprehensible optional attribute. +// Parsing should succeed despite this unknown attribute. +static const unsigned char kStunMessageWithUnknownAttribute[] = { + 0x00, 0x01, 0x00, 0x14, + 0x21, 0x12, 0xa4, 0x42, + 0xe3, 0xa9, 0x46, 0xe1, + 0x7c, 0x00, 0xc2, 0x62, + 0x54, 0x08, 0x01, 0x00, + 0x00, 0xaa, 0x00, 0x07, // Unknown attribute, length 7 (needs padding!) + 0x61, 0x62, 0x63, 0x64, // abcdefg + padding + 0x65, 0x66, 0x67, 0x00, + 0x00, 0x06, 0x00, 0x03, // Followed by a known attribute we can + 0x61, 0x62, 0x63, 0x00 // check for (username of length 3) +}; + +// ByteString Attribute (username) with padding byte +static const unsigned char kStunMessageWithPaddedByteStringAttribute[] = { + 0x00, 0x01, 0x00, 0x08, + 0x21, 0x12, 0xa4, 0x42, + 0xe3, 0xa9, 0x46, 0xe1, + 0x7c, 0x00, 0xc2, 0x62, + 0x54, 0x08, 0x01, 0x00, + 0x00, 0x06, 0x00, 0x03, // username attribute (length 3) + 0x61, 0x62, 0x63, 0xcc // abc +}; + +// Message with an Unknown Attributes (uint16_t list) attribute. +static const unsigned char kStunMessageWithUInt16ListAttribute[] = { + 0x00, 0x01, 0x00, 0x0c, + 0x21, 0x12, 0xa4, 0x42, + 0xe3, 0xa9, 0x46, 0xe1, + 0x7c, 0x00, 0xc2, 0x62, + 0x54, 0x08, 0x01, 0x00, + 0x00, 0x0a, 0x00, 0x06, // username attribute (length 6) + 0x00, 0x01, 0x10, 0x00, // three attributes plus padding + 0xAB, 0xCU, 0xBE, 0xEF +}; + +// Error response message (unauthorized) +static const unsigned char kStunMessageWithErrorAttribute[] = { + 0x01, 0x11, 0x00, 0x14, + 0x21, 0x12, 0xa4, 0x42, + 0x29, 0x1f, 0xcd, 0x7c, + 0xba, 0x58, 0xab, 0xd7, + 0xf2, 0x41, 0x01, 0x00, + 0x00, 0x09, 0x00, 0x10, + 0x00, 0x00, 0x04, 0x01, + 0x55, 0x6e, 0x61, 0x75, + 0x74, 0x68, 0x6f, 0x72, + 0x69, 0x7a, 0x65, 0x64 +}; + +static const unsigned char kStunMessageWithOriginAttribute[] = { + 0x00, 0x01, 0x00, 0x18, // message header (binding request), length 24 + 0x21, 0x12, 0xA4, 0x42, // magic cookie + 0x29, 0x1f, 0xcd, 0x7c, // transaction id + 0xba, 0x58, 0xab, 0xd7, + 0xf2, 0x41, 0x01, 0x00, + 0x80, 0x2f, 0x00, 0x12, // origin attribute (length 18) + 0x68, 0x74, 0x74, 0x70, // http://example.com + 0x3A, 0x2F, 0x2F, 0x65, + 0x78, 0x61, 0x6d, 0x70, + 0x6c, 0x65, 0x2e, 0x63, + 0x6f, 0x6d, 0x00, 0x00, +}; + +// Sample messages with an invalid length Field + +// The actual length in bytes of the invalid messages (including STUN header) +static const int kRealLengthOfInvalidLengthTestCases = 32; + +static const unsigned char kStunMessageWithZeroLength[] = { + 0x00, 0x01, 0x00, 0x00, // length of 0 (last 2 bytes) + 0x21, 0x12, 0xA4, 0x42, // magic cookie + '0', '1', '2', '3', // transaction id + '4', '5', '6', '7', + '8', '9', 'a', 'b', + 0x00, 0x20, 0x00, 0x08, // xor mapped address + 0x00, 0x01, 0x21, 0x1F, + 0x21, 0x12, 0xA4, 0x53, +}; + +static const unsigned char kStunMessageWithExcessLength[] = { + 0x00, 0x01, 0x00, 0x55, // length of 85 + 0x21, 0x12, 0xA4, 0x42, // magic cookie + '0', '1', '2', '3', // transaction id + '4', '5', '6', '7', + '8', '9', 'a', 'b', + 0x00, 0x20, 0x00, 0x08, // xor mapped address + 0x00, 0x01, 0x21, 0x1F, + 0x21, 0x12, 0xA4, 0x53, +}; + +static const unsigned char kStunMessageWithSmallLength[] = { + 0x00, 0x01, 0x00, 0x03, // length of 3 + 0x21, 0x12, 0xA4, 0x42, // magic cookie + '0', '1', '2', '3', // transaction id + '4', '5', '6', '7', + '8', '9', 'a', 'b', + 0x00, 0x20, 0x00, 0x08, // xor mapped address + 0x00, 0x01, 0x21, 0x1F, + 0x21, 0x12, 0xA4, 0x53, +}; + +static const unsigned char kStunMessageWithBadHmacAtEnd[] = { + 0x00, 0x01, 0x00, 0x14, // message length exactly 20 + 0x21, 0x12, 0xA4, 0x42, // magic cookie + '0', '1', '2', '3', // transaction ID + '4', '5', '6', '7', + '8', '9', 'a', 'b', + 0x00, 0x08, 0x00, 0x14, // type=STUN_ATTR_MESSAGE_INTEGRITY, length=20 + '0', '0', '0', '0', // We lied, there are only 16 bytes of HMAC. + '0', '0', '0', '0', + '0', '0', '0', '0', + '0', '0', '0', '0', +}; + +// RTCP packet, for testing we correctly ignore non stun packet types. +// V=2, P=false, RC=0, Type=200, Len=6, Sender-SSRC=85, etc +static const unsigned char kRtcpPacket[] = { + 0x80, 0xc8, 0x00, 0x06, 0x00, 0x00, 0x00, 0x55, + 0xce, 0xa5, 0x18, 0x3a, 0x39, 0xcc, 0x7d, 0x09, + 0x23, 0xed, 0x19, 0x07, 0x00, 0x00, 0x01, 0x56, + 0x00, 0x03, 0x73, 0x50, +}; + + +// RFC5769 Test Vectors +// Software name (request): "STUN test client" (without quotes) +// Software name (response): "test vector" (without quotes) +// Username: "evtj:h6vY" (without quotes) +// Password: "VOkJxbRl1RmTxUk/WvJxBt" (without quotes) +static const unsigned char kRfc5769SampleMsgTransactionId[] = { + 0xb7, 0xe7, 0xa7, 0x01, 0xbc, 0x34, 0xd6, 0x86, 0xfa, 0x87, 0xdf, 0xae +}; +static const char kRfc5769SampleMsgClientSoftware[] = "STUN test client"; +static const char kRfc5769SampleMsgServerSoftware[] = "test vector"; +static const char kRfc5769SampleMsgUsername[] = "evtj:h6vY"; +static const char kRfc5769SampleMsgPassword[] = "VOkJxbRl1RmTxUk/WvJxBt"; +static const rtc::SocketAddress kRfc5769SampleMsgMappedAddress( + "192.0.2.1", 32853); +static const rtc::SocketAddress kRfc5769SampleMsgIPv6MappedAddress( + "2001:db8:1234:5678:11:2233:4455:6677", 32853); + +static const unsigned char kRfc5769SampleMsgWithAuthTransactionId[] = { + 0x78, 0xad, 0x34, 0x33, 0xc6, 0xad, 0x72, 0xc0, 0x29, 0xda, 0x41, 0x2e +}; +static const char kRfc5769SampleMsgWithAuthUsername[] = + "\xe3\x83\x9e\xe3\x83\x88\xe3\x83\xaa\xe3\x83\x83\xe3\x82\xaf\xe3\x82\xb9"; +static const char kRfc5769SampleMsgWithAuthPassword[] = "TheMatrIX"; +static const char kRfc5769SampleMsgWithAuthNonce[] = + "f//499k954d6OL34oL9FSTvy64sA"; +static const char kRfc5769SampleMsgWithAuthRealm[] = "example.org"; + +// 2.1. Sample Request +static const unsigned char kRfc5769SampleRequest[] = { + 0x00, 0x01, 0x00, 0x58, // Request type and message length + 0x21, 0x12, 0xa4, 0x42, // Magic cookie + 0xb7, 0xe7, 0xa7, 0x01, // } + 0xbc, 0x34, 0xd6, 0x86, // } Transaction ID + 0xfa, 0x87, 0xdf, 0xae, // } + 0x80, 0x22, 0x00, 0x10, // SOFTWARE attribute header + 0x53, 0x54, 0x55, 0x4e, // } + 0x20, 0x74, 0x65, 0x73, // } User-agent... + 0x74, 0x20, 0x63, 0x6c, // } ...name + 0x69, 0x65, 0x6e, 0x74, // } + 0x00, 0x24, 0x00, 0x04, // PRIORITY attribute header + 0x6e, 0x00, 0x01, 0xff, // ICE priority value + 0x80, 0x29, 0x00, 0x08, // ICE-CONTROLLED attribute header + 0x93, 0x2f, 0xf9, 0xb1, // } Pseudo-random tie breaker... + 0x51, 0x26, 0x3b, 0x36, // } ...for ICE control + 0x00, 0x06, 0x00, 0x09, // USERNAME attribute header + 0x65, 0x76, 0x74, 0x6a, // } + 0x3a, 0x68, 0x36, 0x76, // } Username (9 bytes) and padding (3 bytes) + 0x59, 0x20, 0x20, 0x20, // } + 0x00, 0x08, 0x00, 0x14, // MESSAGE-INTEGRITY attribute header + 0x9a, 0xea, 0xa7, 0x0c, // } + 0xbf, 0xd8, 0xcb, 0x56, // } + 0x78, 0x1e, 0xf2, 0xb5, // } HMAC-SHA1 fingerprint + 0xb2, 0xd3, 0xf2, 0x49, // } + 0xc1, 0xb5, 0x71, 0xa2, // } + 0x80, 0x28, 0x00, 0x04, // FINGERPRINT attribute header + 0xe5, 0x7a, 0x3b, 0xcf // CRC32 fingerprint +}; + +// 2.1. Sample Request +static const unsigned char kSampleRequestMI32[] = { + 0x00, 0x01, 0x00, 0x48, // Request type and message length + 0x21, 0x12, 0xa4, 0x42, // Magic cookie + 0xb7, 0xe7, 0xa7, 0x01, // } + 0xbc, 0x34, 0xd6, 0x86, // } Transaction ID + 0xfa, 0x87, 0xdf, 0xae, // } + 0x80, 0x22, 0x00, 0x10, // SOFTWARE attribute header + 0x53, 0x54, 0x55, 0x4e, // } + 0x20, 0x74, 0x65, 0x73, // } User-agent... + 0x74, 0x20, 0x63, 0x6c, // } ...name + 0x69, 0x65, 0x6e, 0x74, // } + 0x00, 0x24, 0x00, 0x04, // PRIORITY attribute header + 0x6e, 0x00, 0x01, 0xff, // ICE priority value + 0x80, 0x29, 0x00, 0x08, // ICE-CONTROLLED attribute header + 0x93, 0x2f, 0xf9, 0xb1, // } Pseudo-random tie breaker... + 0x51, 0x26, 0x3b, 0x36, // } ...for ICE control + 0x00, 0x06, 0x00, 0x09, // USERNAME attribute header + 0x65, 0x76, 0x74, 0x6a, // } + 0x3a, 0x68, 0x36, 0x76, // } Username (9 bytes) and padding (3 bytes) + 0x59, 0x20, 0x20, 0x20, // } + 0xC0, 0x60, 0x00, 0x04, // MESSAGE-INTEGRITY-32 attribute header + 0x45, 0x45, 0xce, 0x7c, // } HMAC-SHA1 fingerprint (first 32 bit) + 0x80, 0x28, 0x00, 0x04, // FINGERPRINT attribute header + 0xe5, 0x7a, 0x3b, 0xcf // CRC32 fingerprint +}; + +// 2.2. Sample IPv4 Response +static const unsigned char kRfc5769SampleResponse[] = { + 0x01, 0x01, 0x00, 0x3c, // Response type and message length + 0x21, 0x12, 0xa4, 0x42, // Magic cookie + 0xb7, 0xe7, 0xa7, 0x01, // } + 0xbc, 0x34, 0xd6, 0x86, // } Transaction ID + 0xfa, 0x87, 0xdf, 0xae, // } + 0x80, 0x22, 0x00, 0x0b, // SOFTWARE attribute header + 0x74, 0x65, 0x73, 0x74, // } + 0x20, 0x76, 0x65, 0x63, // } UTF-8 server name + 0x74, 0x6f, 0x72, 0x20, // } + 0x00, 0x20, 0x00, 0x08, // XOR-MAPPED-ADDRESS attribute header + 0x00, 0x01, 0xa1, 0x47, // Address family (IPv4) and xor'd mapped port + 0xe1, 0x12, 0xa6, 0x43, // Xor'd mapped IPv4 address + 0x00, 0x08, 0x00, 0x14, // MESSAGE-INTEGRITY attribute header + 0x2b, 0x91, 0xf5, 0x99, // } + 0xfd, 0x9e, 0x90, 0xc3, // } + 0x8c, 0x74, 0x89, 0xf9, // } HMAC-SHA1 fingerprint + 0x2a, 0xf9, 0xba, 0x53, // } + 0xf0, 0x6b, 0xe7, 0xd7, // } + 0x80, 0x28, 0x00, 0x04, // FINGERPRINT attribute header + 0xc0, 0x7d, 0x4c, 0x96 // CRC32 fingerprint +}; + +// 2.3. Sample IPv6 Response +static const unsigned char kRfc5769SampleResponseIPv6[] = { + 0x01, 0x01, 0x00, 0x48, // Response type and message length + 0x21, 0x12, 0xa4, 0x42, // Magic cookie + 0xb7, 0xe7, 0xa7, 0x01, // } + 0xbc, 0x34, 0xd6, 0x86, // } Transaction ID + 0xfa, 0x87, 0xdf, 0xae, // } + 0x80, 0x22, 0x00, 0x0b, // SOFTWARE attribute header + 0x74, 0x65, 0x73, 0x74, // } + 0x20, 0x76, 0x65, 0x63, // } UTF-8 server name + 0x74, 0x6f, 0x72, 0x20, // } + 0x00, 0x20, 0x00, 0x14, // XOR-MAPPED-ADDRESS attribute header + 0x00, 0x02, 0xa1, 0x47, // Address family (IPv6) and xor'd mapped port. + 0x01, 0x13, 0xa9, 0xfa, // } + 0xa5, 0xd3, 0xf1, 0x79, // } Xor'd mapped IPv6 address + 0xbc, 0x25, 0xf4, 0xb5, // } + 0xbe, 0xd2, 0xb9, 0xd9, // } + 0x00, 0x08, 0x00, 0x14, // MESSAGE-INTEGRITY attribute header + 0xa3, 0x82, 0x95, 0x4e, // } + 0x4b, 0xe6, 0x7b, 0xf1, // } + 0x17, 0x84, 0xc9, 0x7c, // } HMAC-SHA1 fingerprint + 0x82, 0x92, 0xc2, 0x75, // } + 0xbf, 0xe3, 0xed, 0x41, // } + 0x80, 0x28, 0x00, 0x04, // FINGERPRINT attribute header + 0xc8, 0xfb, 0x0b, 0x4c // CRC32 fingerprint +}; + +// 2.4. Sample Request with Long-Term Authentication +static const unsigned char kRfc5769SampleRequestLongTermAuth[] = { + 0x00, 0x01, 0x00, 0x60, // Request type and message length + 0x21, 0x12, 0xa4, 0x42, // Magic cookie + 0x78, 0xad, 0x34, 0x33, // } + 0xc6, 0xad, 0x72, 0xc0, // } Transaction ID + 0x29, 0xda, 0x41, 0x2e, // } + 0x00, 0x06, 0x00, 0x12, // USERNAME attribute header + 0xe3, 0x83, 0x9e, 0xe3, // } + 0x83, 0x88, 0xe3, 0x83, // } + 0xaa, 0xe3, 0x83, 0x83, // } Username value (18 bytes) and padding (2 bytes) + 0xe3, 0x82, 0xaf, 0xe3, // } + 0x82, 0xb9, 0x00, 0x00, // } + 0x00, 0x15, 0x00, 0x1c, // NONCE attribute header + 0x66, 0x2f, 0x2f, 0x34, // } + 0x39, 0x39, 0x6b, 0x39, // } + 0x35, 0x34, 0x64, 0x36, // } + 0x4f, 0x4c, 0x33, 0x34, // } Nonce value + 0x6f, 0x4c, 0x39, 0x46, // } + 0x53, 0x54, 0x76, 0x79, // } + 0x36, 0x34, 0x73, 0x41, // } + 0x00, 0x14, 0x00, 0x0b, // REALM attribute header + 0x65, 0x78, 0x61, 0x6d, // } + 0x70, 0x6c, 0x65, 0x2e, // } Realm value (11 bytes) and padding (1 byte) + 0x6f, 0x72, 0x67, 0x00, // } + 0x00, 0x08, 0x00, 0x14, // MESSAGE-INTEGRITY attribute header + 0xf6, 0x70, 0x24, 0x65, // } + 0x6d, 0xd6, 0x4a, 0x3e, // } + 0x02, 0xb8, 0xe0, 0x71, // } HMAC-SHA1 fingerprint + 0x2e, 0x85, 0xc9, 0xa2, // } + 0x8c, 0xa8, 0x96, 0x66 // } +}; + +// Length parameter is changed to 0x38 from 0x58. +// AddMessageIntegrity will add MI information and update the length param +// accordingly. +static const unsigned char kRfc5769SampleRequestWithoutMI[] = { + 0x00, 0x01, 0x00, 0x38, // Request type and message length + 0x21, 0x12, 0xa4, 0x42, // Magic cookie + 0xb7, 0xe7, 0xa7, 0x01, // } + 0xbc, 0x34, 0xd6, 0x86, // } Transaction ID + 0xfa, 0x87, 0xdf, 0xae, // } + 0x80, 0x22, 0x00, 0x10, // SOFTWARE attribute header + 0x53, 0x54, 0x55, 0x4e, // } + 0x20, 0x74, 0x65, 0x73, // } User-agent... + 0x74, 0x20, 0x63, 0x6c, // } ...name + 0x69, 0x65, 0x6e, 0x74, // } + 0x00, 0x24, 0x00, 0x04, // PRIORITY attribute header + 0x6e, 0x00, 0x01, 0xff, // ICE priority value + 0x80, 0x29, 0x00, 0x08, // ICE-CONTROLLED attribute header + 0x93, 0x2f, 0xf9, 0xb1, // } Pseudo-random tie breaker... + 0x51, 0x26, 0x3b, 0x36, // } ...for ICE control + 0x00, 0x06, 0x00, 0x09, // USERNAME attribute header + 0x65, 0x76, 0x74, 0x6a, // } + 0x3a, 0x68, 0x36, 0x76, // } Username (9 bytes) and padding (3 bytes) + 0x59, 0x20, 0x20, 0x20 // } +}; + +// This HMAC differs from the RFC 5769 SampleRequest message. This differs +// because spec uses 0x20 for the padding where as our implementation uses 0. +static const unsigned char kCalculatedHmac1[] = { + 0x79, 0x07, 0xc2, 0xd2, // } + 0xed, 0xbf, 0xea, 0x48, // } + 0x0e, 0x4c, 0x76, 0xd8, // } HMAC-SHA1 fingerprint + 0x29, 0x62, 0xd5, 0xc3, // } + 0x74, 0x2a, 0xf9, 0xe3 // } +}; + +// This truncated HMAC differs from kCalculatedHmac1 +// above since the sum is computed including header +// and the header is different since the message is shorter +// than when MESSAGE-INTEGRITY is used. +static const unsigned char kCalculatedHmac1_32[] = { + 0xda, 0x39, 0xde, 0x5d, // } +}; + +// Length parameter is changed to 0x1c from 0x3c. +// AddMessageIntegrity will add MI information and update the length param +// accordingly. +static const unsigned char kRfc5769SampleResponseWithoutMI[] = { + 0x01, 0x01, 0x00, 0x1c, // Response type and message length + 0x21, 0x12, 0xa4, 0x42, // Magic cookie + 0xb7, 0xe7, 0xa7, 0x01, // } + 0xbc, 0x34, 0xd6, 0x86, // } Transaction ID + 0xfa, 0x87, 0xdf, 0xae, // } + 0x80, 0x22, 0x00, 0x0b, // SOFTWARE attribute header + 0x74, 0x65, 0x73, 0x74, // } + 0x20, 0x76, 0x65, 0x63, // } UTF-8 server name + 0x74, 0x6f, 0x72, 0x20, // } + 0x00, 0x20, 0x00, 0x08, // XOR-MAPPED-ADDRESS attribute header + 0x00, 0x01, 0xa1, 0x47, // Address family (IPv4) and xor'd mapped port + 0xe1, 0x12, 0xa6, 0x43 // Xor'd mapped IPv4 address +}; + +// This HMAC differs from the RFC 5769 SampleResponse message. This differs +// because spec uses 0x20 for the padding where as our implementation uses 0. +static const unsigned char kCalculatedHmac2[] = { + 0x5d, 0x6b, 0x58, 0xbe, // } + 0xad, 0x94, 0xe0, 0x7e, // } + 0xef, 0x0d, 0xfc, 0x12, // } HMAC-SHA1 fingerprint + 0x82, 0xa2, 0xbd, 0x08, // } + 0x43, 0x14, 0x10, 0x28 // } +}; + +// This truncated HMAC differs from kCalculatedHmac2 +// above since the sum is computed including header +// and the header is different since the message is shorter +// than when MESSAGE-INTEGRITY is used. +static const unsigned char kCalculatedHmac2_32[] = { + 0xe7, 0x5c, 0xd3, 0x16, // } +}; + +// clang-format on + +// A transaction ID without the 'magic cookie' portion +// pjnat's test programs use this transaction ID a lot. +const unsigned char kTestTransactionId1[] = {0x029, 0x01f, 0x0cd, 0x07c, + 0x0ba, 0x058, 0x0ab, 0x0d7, + 0x0f2, 0x041, 0x001, 0x000}; + +// They use this one sometimes too. +const unsigned char kTestTransactionId2[] = {0x0e3, 0x0a9, 0x046, 0x0e1, + 0x07c, 0x000, 0x0c2, 0x062, + 0x054, 0x008, 0x001, 0x000}; + +const in6_addr kIPv6TestAddress1 = { + {{0x24, 0x01, 0xfa, 0x00, 0x00, 0x04, 0x10, 0x00, 0xbe, 0x30, 0x5b, 0xff, + 0xfe, 0xe5, 0x00, 0xc3}}}; +const in6_addr kIPv6TestAddress2 = { + {{0x24, 0x01, 0xfa, 0x00, 0x00, 0x04, 0x10, 0x12, 0x06, 0x0c, 0xce, 0xff, + 0xfe, 0x1f, 0x61, 0xa4}}}; + +#ifdef WEBRTC_POSIX +const in_addr kIPv4TestAddress1 = {0xe64417ac}; +#elif defined WEBRTC_WIN +// Windows in_addr has a union with a uchar[] array first. +const in_addr kIPv4TestAddress1 = {{{0x0ac, 0x017, 0x044, 0x0e6}}}; +#endif +const char kTestUserName1[] = "abcdefgh"; +const char kTestUserName2[] = "abc"; +const char kTestErrorReason[] = "Unauthorized"; +const char kTestOrigin[] = "http://example.com"; +const int kTestErrorClass = 4; +const int kTestErrorNumber = 1; +const int kTestErrorCode = 401; + +const int kTestMessagePort1 = 59977; +const int kTestMessagePort2 = 47233; +const int kTestMessagePort3 = 56743; +const int kTestMessagePort4 = 40444; + +#define ReadStunMessage(X, Y) ReadStunMessageTestCase(X, Y, sizeof(Y)); + +// Test that the GetStun*Type and IsStun*Type methods work as expected. +TEST_F(StunTest, MessageTypes) { + EXPECT_EQ(STUN_BINDING_RESPONSE, + GetStunSuccessResponseType(STUN_BINDING_REQUEST)); + EXPECT_EQ(STUN_BINDING_ERROR_RESPONSE, + GetStunErrorResponseType(STUN_BINDING_REQUEST)); + EXPECT_EQ(-1, GetStunSuccessResponseType(STUN_BINDING_INDICATION)); + EXPECT_EQ(-1, GetStunSuccessResponseType(STUN_BINDING_RESPONSE)); + EXPECT_EQ(-1, GetStunSuccessResponseType(STUN_BINDING_ERROR_RESPONSE)); + EXPECT_EQ(-1, GetStunErrorResponseType(STUN_BINDING_INDICATION)); + EXPECT_EQ(-1, GetStunErrorResponseType(STUN_BINDING_RESPONSE)); + EXPECT_EQ(-1, GetStunErrorResponseType(STUN_BINDING_ERROR_RESPONSE)); + + int types[] = {STUN_BINDING_REQUEST, STUN_BINDING_INDICATION, + STUN_BINDING_RESPONSE, STUN_BINDING_ERROR_RESPONSE}; + for (size_t i = 0; i < arraysize(types); ++i) { + EXPECT_EQ(i == 0U, IsStunRequestType(types[i])); + EXPECT_EQ(i == 1U, IsStunIndicationType(types[i])); + EXPECT_EQ(i == 2U, IsStunSuccessResponseType(types[i])); + EXPECT_EQ(i == 3U, IsStunErrorResponseType(types[i])); + EXPECT_EQ(1, types[i] & 0xFEEF); + } +} + +TEST_F(StunTest, ReadMessageWithIPv4AddressAttribute) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kStunMessageWithIPv4MappedAddress); + CheckStunHeader(msg, STUN_BINDING_RESPONSE, size); + CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); + + const StunAddressAttribute* addr = msg.GetAddress(STUN_ATTR_MAPPED_ADDRESS); + rtc::IPAddress test_address(kIPv4TestAddress1); + CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV4, kTestMessagePort4, + test_address); +} + +TEST_F(StunTest, ReadMessageWithIPv4XorAddressAttribute) { + StunMessage msg; + StunMessage msg2; + size_t size = ReadStunMessage(&msg, kStunMessageWithIPv4XorMappedAddress); + CheckStunHeader(msg, STUN_BINDING_RESPONSE, size); + CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); + + const StunAddressAttribute* addr = + msg.GetAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); + rtc::IPAddress test_address(kIPv4TestAddress1); + CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV4, kTestMessagePort3, + test_address); +} + +TEST_F(StunTest, ReadMessageWithIPv6AddressAttribute) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kStunMessageWithIPv6MappedAddress); + CheckStunHeader(msg, STUN_BINDING_REQUEST, size); + CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); + + rtc::IPAddress test_address(kIPv6TestAddress1); + + const StunAddressAttribute* addr = msg.GetAddress(STUN_ATTR_MAPPED_ADDRESS); + CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV6, kTestMessagePort2, + test_address); +} + +TEST_F(StunTest, ReadMessageWithInvalidAddressAttribute) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kStunMessageWithIPv6MappedAddress); + CheckStunHeader(msg, STUN_BINDING_REQUEST, size); + CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); + + rtc::IPAddress test_address(kIPv6TestAddress1); + + const StunAddressAttribute* addr = msg.GetAddress(STUN_ATTR_MAPPED_ADDRESS); + CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV6, kTestMessagePort2, + test_address); +} + +TEST_F(StunTest, ReadMessageWithIPv6XorAddressAttribute) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kStunMessageWithIPv6XorMappedAddress); + + rtc::IPAddress test_address(kIPv6TestAddress1); + + CheckStunHeader(msg, STUN_BINDING_RESPONSE, size); + CheckStunTransactionID(msg, kTestTransactionId2, kStunTransactionIdLength); + + const StunAddressAttribute* addr = + msg.GetAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); + CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV6, kTestMessagePort1, + test_address); +} + +// Read the RFC5389 fields from the RFC5769 sample STUN request. +TEST_F(StunTest, ReadRfc5769RequestMessage) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kRfc5769SampleRequest); + CheckStunHeader(msg, STUN_BINDING_REQUEST, size); + CheckStunTransactionID(msg, kRfc5769SampleMsgTransactionId, + kStunTransactionIdLength); + + const StunByteStringAttribute* software = + msg.GetByteString(STUN_ATTR_SOFTWARE); + ASSERT_TRUE(software != NULL); + EXPECT_EQ(kRfc5769SampleMsgClientSoftware, software->GetString()); + + const StunByteStringAttribute* username = + msg.GetByteString(STUN_ATTR_USERNAME); + ASSERT_TRUE(username != NULL); + EXPECT_EQ(kRfc5769SampleMsgUsername, username->GetString()); + + // Actual M-I value checked in a later test. + ASSERT_TRUE(msg.GetByteString(STUN_ATTR_MESSAGE_INTEGRITY) != NULL); + + // Fingerprint checked in a later test, but double-check the value here. + const StunUInt32Attribute* fingerprint = msg.GetUInt32(STUN_ATTR_FINGERPRINT); + ASSERT_TRUE(fingerprint != NULL); + EXPECT_EQ(0xe57a3bcf, fingerprint->value()); +} + +// Read the RFC5389 fields from the RFC5769 sample STUN response. +TEST_F(StunTest, ReadRfc5769ResponseMessage) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kRfc5769SampleResponse); + CheckStunHeader(msg, STUN_BINDING_RESPONSE, size); + CheckStunTransactionID(msg, kRfc5769SampleMsgTransactionId, + kStunTransactionIdLength); + + const StunByteStringAttribute* software = + msg.GetByteString(STUN_ATTR_SOFTWARE); + ASSERT_TRUE(software != NULL); + EXPECT_EQ(kRfc5769SampleMsgServerSoftware, software->GetString()); + + const StunAddressAttribute* mapped_address = + msg.GetAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); + ASSERT_TRUE(mapped_address != NULL); + EXPECT_EQ(kRfc5769SampleMsgMappedAddress, mapped_address->GetAddress()); + + // Actual M-I and fingerprint checked in later tests. + ASSERT_TRUE(msg.GetByteString(STUN_ATTR_MESSAGE_INTEGRITY) != NULL); + ASSERT_TRUE(msg.GetUInt32(STUN_ATTR_FINGERPRINT) != NULL); +} + +// Read the RFC5389 fields from the RFC5769 sample STUN response for IPv6. +TEST_F(StunTest, ReadRfc5769ResponseMessageIPv6) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kRfc5769SampleResponseIPv6); + CheckStunHeader(msg, STUN_BINDING_RESPONSE, size); + CheckStunTransactionID(msg, kRfc5769SampleMsgTransactionId, + kStunTransactionIdLength); + + const StunByteStringAttribute* software = + msg.GetByteString(STUN_ATTR_SOFTWARE); + ASSERT_TRUE(software != NULL); + EXPECT_EQ(kRfc5769SampleMsgServerSoftware, software->GetString()); + + const StunAddressAttribute* mapped_address = + msg.GetAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); + ASSERT_TRUE(mapped_address != NULL); + EXPECT_EQ(kRfc5769SampleMsgIPv6MappedAddress, mapped_address->GetAddress()); + + // Actual M-I and fingerprint checked in later tests. + ASSERT_TRUE(msg.GetByteString(STUN_ATTR_MESSAGE_INTEGRITY) != NULL); + ASSERT_TRUE(msg.GetUInt32(STUN_ATTR_FINGERPRINT) != NULL); +} + +// Read the RFC5389 fields from the RFC5769 sample STUN response with auth. +TEST_F(StunTest, ReadRfc5769RequestMessageLongTermAuth) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kRfc5769SampleRequestLongTermAuth); + CheckStunHeader(msg, STUN_BINDING_REQUEST, size); + CheckStunTransactionID(msg, kRfc5769SampleMsgWithAuthTransactionId, + kStunTransactionIdLength); + + const StunByteStringAttribute* username = + msg.GetByteString(STUN_ATTR_USERNAME); + ASSERT_TRUE(username != NULL); + EXPECT_EQ(kRfc5769SampleMsgWithAuthUsername, username->GetString()); + + const StunByteStringAttribute* nonce = msg.GetByteString(STUN_ATTR_NONCE); + ASSERT_TRUE(nonce != NULL); + EXPECT_EQ(kRfc5769SampleMsgWithAuthNonce, nonce->GetString()); + + const StunByteStringAttribute* realm = msg.GetByteString(STUN_ATTR_REALM); + ASSERT_TRUE(realm != NULL); + EXPECT_EQ(kRfc5769SampleMsgWithAuthRealm, realm->GetString()); + + // No fingerprint, actual M-I checked in later tests. + ASSERT_TRUE(msg.GetByteString(STUN_ATTR_MESSAGE_INTEGRITY) != NULL); + ASSERT_TRUE(msg.GetUInt32(STUN_ATTR_FINGERPRINT) == NULL); +} + +// The RFC3489 packet in this test is the same as +// kStunMessageWithIPv4MappedAddress, but with a different value where the +// magic cookie was. +TEST_F(StunTest, ReadLegacyMessage) { + unsigned char rfc3489_packet[sizeof(kStunMessageWithIPv4MappedAddress)]; + memcpy(rfc3489_packet, kStunMessageWithIPv4MappedAddress, + sizeof(kStunMessageWithIPv4MappedAddress)); + // Overwrite the magic cookie here. + memcpy(&rfc3489_packet[4], "ABCD", 4); + + StunMessage msg; + size_t size = ReadStunMessage(&msg, rfc3489_packet); + CheckStunHeader(msg, STUN_BINDING_RESPONSE, size); + CheckStunTransactionID(msg, &rfc3489_packet[4], kStunTransactionIdLength + 4); + + const StunAddressAttribute* addr = msg.GetAddress(STUN_ATTR_MAPPED_ADDRESS); + rtc::IPAddress test_address(kIPv4TestAddress1); + CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV4, kTestMessagePort4, + test_address); +} + +TEST_F(StunTest, SetIPv6XorAddressAttributeOwner) { + StunMessage msg; + StunMessage msg2; + size_t size = ReadStunMessage(&msg, kStunMessageWithIPv6XorMappedAddress); + + rtc::IPAddress test_address(kIPv6TestAddress1); + + CheckStunHeader(msg, STUN_BINDING_RESPONSE, size); + CheckStunTransactionID(msg, kTestTransactionId2, kStunTransactionIdLength); + + const StunAddressAttribute* addr = + msg.GetAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); + CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV6, kTestMessagePort1, + test_address); + + // Owner with a different transaction ID. + msg2.SetTransactionID("ABCDABCDABCD"); + StunXorAddressAttribute addr2(STUN_ATTR_XOR_MAPPED_ADDRESS, 20, NULL); + addr2.SetIP(addr->ipaddr()); + addr2.SetPort(addr->port()); + addr2.SetOwner(&msg2); + // The internal IP address shouldn't change. + ASSERT_EQ(addr2.ipaddr(), addr->ipaddr()); + + rtc::ByteBufferWriter correct_buf; + rtc::ByteBufferWriter wrong_buf; + EXPECT_TRUE(addr->Write(&correct_buf)); + EXPECT_TRUE(addr2.Write(&wrong_buf)); + // But when written out, the buffers should look different. + ASSERT_NE(0, + memcmp(correct_buf.Data(), wrong_buf.Data(), wrong_buf.Length())); + // And when reading a known good value, the address should be wrong. + rtc::ByteBufferReader read_buf(correct_buf); + addr2.Read(&read_buf); + ASSERT_NE(addr->ipaddr(), addr2.ipaddr()); + addr2.SetIP(addr->ipaddr()); + addr2.SetPort(addr->port()); + // Try writing with no owner at all, should fail and write nothing. + addr2.SetOwner(NULL); + ASSERT_EQ(addr2.ipaddr(), addr->ipaddr()); + wrong_buf.Clear(); + EXPECT_FALSE(addr2.Write(&wrong_buf)); + ASSERT_EQ(0U, wrong_buf.Length()); +} + +TEST_F(StunTest, SetIPv4XorAddressAttributeOwner) { + // Unlike the IPv6XorAddressAttributeOwner test, IPv4 XOR address attributes + // should _not_ be affected by a change in owner. IPv4 XOR address uses the + // magic cookie value which is fixed. + StunMessage msg; + StunMessage msg2; + size_t size = ReadStunMessage(&msg, kStunMessageWithIPv4XorMappedAddress); + + rtc::IPAddress test_address(kIPv4TestAddress1); + + CheckStunHeader(msg, STUN_BINDING_RESPONSE, size); + CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); + + const StunAddressAttribute* addr = + msg.GetAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); + CheckStunAddressAttribute(addr, STUN_ADDRESS_IPV4, kTestMessagePort3, + test_address); + + // Owner with a different transaction ID. + msg2.SetTransactionID("ABCDABCDABCD"); + StunXorAddressAttribute addr2(STUN_ATTR_XOR_MAPPED_ADDRESS, 20, NULL); + addr2.SetIP(addr->ipaddr()); + addr2.SetPort(addr->port()); + addr2.SetOwner(&msg2); + // The internal IP address shouldn't change. + ASSERT_EQ(addr2.ipaddr(), addr->ipaddr()); + + rtc::ByteBufferWriter correct_buf; + rtc::ByteBufferWriter wrong_buf; + EXPECT_TRUE(addr->Write(&correct_buf)); + EXPECT_TRUE(addr2.Write(&wrong_buf)); + // The same address data should be written. + ASSERT_EQ(0, + memcmp(correct_buf.Data(), wrong_buf.Data(), wrong_buf.Length())); + // And an attribute should be able to un-XOR an address belonging to a message + // with a different transaction ID. + rtc::ByteBufferReader read_buf(correct_buf); + EXPECT_TRUE(addr2.Read(&read_buf)); + ASSERT_EQ(addr->ipaddr(), addr2.ipaddr()); + + // However, no owner is still an error, should fail and write nothing. + addr2.SetOwner(NULL); + ASSERT_EQ(addr2.ipaddr(), addr->ipaddr()); + wrong_buf.Clear(); + EXPECT_FALSE(addr2.Write(&wrong_buf)); +} + +TEST_F(StunTest, CreateIPv6AddressAttribute) { + rtc::IPAddress test_ip(kIPv6TestAddress2); + + auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); + rtc::SocketAddress test_addr(test_ip, kTestMessagePort2); + addr->SetAddress(test_addr); + + CheckStunAddressAttribute(addr.get(), STUN_ADDRESS_IPV6, kTestMessagePort2, + test_ip); +} + +TEST_F(StunTest, CreateIPv4AddressAttribute) { + struct in_addr test_in_addr; + test_in_addr.s_addr = 0xBEB0B0BE; + rtc::IPAddress test_ip(test_in_addr); + + auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); + rtc::SocketAddress test_addr(test_ip, kTestMessagePort2); + addr->SetAddress(test_addr); + + CheckStunAddressAttribute(addr.get(), STUN_ADDRESS_IPV4, kTestMessagePort2, + test_ip); +} + +// Test that we don't care what order we set the parts of an address +TEST_F(StunTest, CreateAddressInArbitraryOrder) { + auto addr = StunAttribute::CreateAddress(STUN_ATTR_DESTINATION_ADDRESS); + // Port first + addr->SetPort(kTestMessagePort1); + addr->SetIP(rtc::IPAddress(kIPv4TestAddress1)); + ASSERT_EQ(kTestMessagePort1, addr->port()); + ASSERT_EQ(rtc::IPAddress(kIPv4TestAddress1), addr->ipaddr()); + + auto addr2 = StunAttribute::CreateAddress(STUN_ATTR_DESTINATION_ADDRESS); + // IP first + addr2->SetIP(rtc::IPAddress(kIPv4TestAddress1)); + addr2->SetPort(kTestMessagePort2); + ASSERT_EQ(kTestMessagePort2, addr2->port()); + ASSERT_EQ(rtc::IPAddress(kIPv4TestAddress1), addr2->ipaddr()); +} + +TEST_F(StunTest, WriteMessageWithIPv6AddressAttribute) { + StunMessage msg; + size_t size = sizeof(kStunMessageWithIPv6MappedAddress); + + rtc::IPAddress test_ip(kIPv6TestAddress1); + + msg.SetType(STUN_BINDING_REQUEST); + msg.SetTransactionID( + std::string(reinterpret_cast(kTestTransactionId1), + kStunTransactionIdLength)); + CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); + + auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); + rtc::SocketAddress test_addr(test_ip, kTestMessagePort2); + addr->SetAddress(test_addr); + msg.AddAttribute(std::move(addr)); + + CheckStunHeader(msg, STUN_BINDING_REQUEST, (size - 20)); + + rtc::ByteBufferWriter out; + EXPECT_TRUE(msg.Write(&out)); + ASSERT_EQ(out.Length(), sizeof(kStunMessageWithIPv6MappedAddress)); + int len1 = static_cast(out.Length()); + rtc::ByteBufferReader read_buf(out); + std::string bytes; + read_buf.ReadString(&bytes, len1); + ASSERT_EQ(0, memcmp(bytes.c_str(), kStunMessageWithIPv6MappedAddress, len1)); +} + +TEST_F(StunTest, WriteMessageWithIPv4AddressAttribute) { + StunMessage msg; + size_t size = sizeof(kStunMessageWithIPv4MappedAddress); + + rtc::IPAddress test_ip(kIPv4TestAddress1); + + msg.SetType(STUN_BINDING_RESPONSE); + msg.SetTransactionID( + std::string(reinterpret_cast(kTestTransactionId1), + kStunTransactionIdLength)); + CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); + + auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); + rtc::SocketAddress test_addr(test_ip, kTestMessagePort4); + addr->SetAddress(test_addr); + msg.AddAttribute(std::move(addr)); + + CheckStunHeader(msg, STUN_BINDING_RESPONSE, (size - 20)); + + rtc::ByteBufferWriter out; + EXPECT_TRUE(msg.Write(&out)); + ASSERT_EQ(out.Length(), sizeof(kStunMessageWithIPv4MappedAddress)); + int len1 = static_cast(out.Length()); + rtc::ByteBufferReader read_buf(out); + std::string bytes; + read_buf.ReadString(&bytes, len1); + ASSERT_EQ(0, memcmp(bytes.c_str(), kStunMessageWithIPv4MappedAddress, len1)); +} + +TEST_F(StunTest, WriteMessageWithIPv6XorAddressAttribute) { + StunMessage msg; + size_t size = sizeof(kStunMessageWithIPv6XorMappedAddress); + + rtc::IPAddress test_ip(kIPv6TestAddress1); + + msg.SetType(STUN_BINDING_RESPONSE); + msg.SetTransactionID( + std::string(reinterpret_cast(kTestTransactionId2), + kStunTransactionIdLength)); + CheckStunTransactionID(msg, kTestTransactionId2, kStunTransactionIdLength); + + auto addr = StunAttribute::CreateXorAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); + rtc::SocketAddress test_addr(test_ip, kTestMessagePort1); + addr->SetAddress(test_addr); + msg.AddAttribute(std::move(addr)); + + CheckStunHeader(msg, STUN_BINDING_RESPONSE, (size - 20)); + + rtc::ByteBufferWriter out; + EXPECT_TRUE(msg.Write(&out)); + ASSERT_EQ(out.Length(), sizeof(kStunMessageWithIPv6XorMappedAddress)); + int len1 = static_cast(out.Length()); + rtc::ByteBufferReader read_buf(out); + std::string bytes; + read_buf.ReadString(&bytes, len1); + ASSERT_EQ(0, + memcmp(bytes.c_str(), kStunMessageWithIPv6XorMappedAddress, len1)); +} + +TEST_F(StunTest, WriteMessageWithIPv4XoreAddressAttribute) { + StunMessage msg; + size_t size = sizeof(kStunMessageWithIPv4XorMappedAddress); + + rtc::IPAddress test_ip(kIPv4TestAddress1); + + msg.SetType(STUN_BINDING_RESPONSE); + msg.SetTransactionID( + std::string(reinterpret_cast(kTestTransactionId1), + kStunTransactionIdLength)); + CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); + + auto addr = StunAttribute::CreateXorAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); + rtc::SocketAddress test_addr(test_ip, kTestMessagePort3); + addr->SetAddress(test_addr); + msg.AddAttribute(std::move(addr)); + + CheckStunHeader(msg, STUN_BINDING_RESPONSE, (size - 20)); + + rtc::ByteBufferWriter out; + EXPECT_TRUE(msg.Write(&out)); + ASSERT_EQ(out.Length(), sizeof(kStunMessageWithIPv4XorMappedAddress)); + int len1 = static_cast(out.Length()); + rtc::ByteBufferReader read_buf(out); + std::string bytes; + read_buf.ReadString(&bytes, len1); + ASSERT_EQ(0, + memcmp(bytes.c_str(), kStunMessageWithIPv4XorMappedAddress, len1)); +} + +TEST_F(StunTest, ReadByteStringAttribute) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kStunMessageWithByteStringAttribute); + + CheckStunHeader(msg, STUN_BINDING_REQUEST, size); + CheckStunTransactionID(msg, kTestTransactionId2, kStunTransactionIdLength); + const StunByteStringAttribute* username = + msg.GetByteString(STUN_ATTR_USERNAME); + ASSERT_TRUE(username != NULL); + EXPECT_EQ(kTestUserName1, username->GetString()); +} + +TEST_F(StunTest, ReadPaddedByteStringAttribute) { + StunMessage msg; + size_t size = + ReadStunMessage(&msg, kStunMessageWithPaddedByteStringAttribute); + ASSERT_NE(0U, size); + CheckStunHeader(msg, STUN_BINDING_REQUEST, size); + CheckStunTransactionID(msg, kTestTransactionId2, kStunTransactionIdLength); + const StunByteStringAttribute* username = + msg.GetByteString(STUN_ATTR_USERNAME); + ASSERT_TRUE(username != NULL); + EXPECT_EQ(kTestUserName2, username->GetString()); +} + +TEST_F(StunTest, ReadErrorCodeAttribute) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kStunMessageWithErrorAttribute); + + CheckStunHeader(msg, STUN_BINDING_ERROR_RESPONSE, size); + CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); + const StunErrorCodeAttribute* errorcode = msg.GetErrorCode(); + ASSERT_TRUE(errorcode != NULL); + EXPECT_EQ(kTestErrorClass, errorcode->eclass()); + EXPECT_EQ(kTestErrorNumber, errorcode->number()); + EXPECT_EQ(kTestErrorReason, errorcode->reason()); + EXPECT_EQ(kTestErrorCode, errorcode->code()); + EXPECT_EQ(kTestErrorCode, msg.GetErrorCodeValue()); +} + +// Test that GetErrorCodeValue returns STUN_ERROR_GLOBAL_FAILURE if the message +// in question doesn't have an error code attribute, rather than crashing. +TEST_F(StunTest, GetErrorCodeValueWithNoErrorAttribute) { + StunMessage msg; + ReadStunMessage(&msg, kStunMessageWithIPv6MappedAddress); + EXPECT_EQ(STUN_ERROR_GLOBAL_FAILURE, msg.GetErrorCodeValue()); +} + +TEST_F(StunTest, ReadMessageWithAUInt16ListAttribute) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kStunMessageWithUInt16ListAttribute); + CheckStunHeader(msg, STUN_BINDING_REQUEST, size); + const StunUInt16ListAttribute* types = msg.GetUnknownAttributes(); + ASSERT_TRUE(types != NULL); + EXPECT_EQ(3U, types->Size()); + EXPECT_EQ(0x1U, types->GetType(0)); + EXPECT_EQ(0x1000U, types->GetType(1)); + EXPECT_EQ(0xAB0CU, types->GetType(2)); +} + +TEST_F(StunTest, ReadMessageWithAnUnknownAttribute) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kStunMessageWithUnknownAttribute); + CheckStunHeader(msg, STUN_BINDING_REQUEST, size); + + // Parsing should have succeeded and there should be a USERNAME attribute + const StunByteStringAttribute* username = + msg.GetByteString(STUN_ATTR_USERNAME); + ASSERT_TRUE(username != NULL); + EXPECT_EQ(kTestUserName2, username->GetString()); +} + +TEST_F(StunTest, ReadMessageWithOriginAttribute) { + StunMessage msg; + size_t size = ReadStunMessage(&msg, kStunMessageWithOriginAttribute); + CheckStunHeader(msg, STUN_BINDING_REQUEST, size); + const StunByteStringAttribute* origin = msg.GetByteString(STUN_ATTR_ORIGIN); + ASSERT_TRUE(origin != NULL); + EXPECT_EQ(kTestOrigin, origin->GetString()); +} + +TEST_F(StunTest, WriteMessageWithAnErrorCodeAttribute) { + StunMessage msg; + size_t size = sizeof(kStunMessageWithErrorAttribute); + + msg.SetType(STUN_BINDING_ERROR_RESPONSE); + msg.SetTransactionID( + std::string(reinterpret_cast(kTestTransactionId1), + kStunTransactionIdLength)); + CheckStunTransactionID(msg, kTestTransactionId1, kStunTransactionIdLength); + auto errorcode = StunAttribute::CreateErrorCode(); + errorcode->SetCode(kTestErrorCode); + errorcode->SetReason(kTestErrorReason); + msg.AddAttribute(std::move(errorcode)); + CheckStunHeader(msg, STUN_BINDING_ERROR_RESPONSE, (size - 20)); + + rtc::ByteBufferWriter out; + EXPECT_TRUE(msg.Write(&out)); + ASSERT_EQ(size, out.Length()); + // No padding. + ASSERT_EQ(0, memcmp(out.Data(), kStunMessageWithErrorAttribute, size)); +} + +TEST_F(StunTest, WriteMessageWithAUInt16ListAttribute) { + StunMessage msg; + size_t size = sizeof(kStunMessageWithUInt16ListAttribute); + + msg.SetType(STUN_BINDING_REQUEST); + msg.SetTransactionID( + std::string(reinterpret_cast(kTestTransactionId2), + kStunTransactionIdLength)); + CheckStunTransactionID(msg, kTestTransactionId2, kStunTransactionIdLength); + auto list = StunAttribute::CreateUnknownAttributes(); + list->AddType(0x1U); + list->AddType(0x1000U); + list->AddType(0xAB0CU); + msg.AddAttribute(std::move(list)); + CheckStunHeader(msg, STUN_BINDING_REQUEST, (size - 20)); + + rtc::ByteBufferWriter out; + EXPECT_TRUE(msg.Write(&out)); + ASSERT_EQ(size, out.Length()); + // Check everything up to the padding. + ASSERT_EQ(0, + memcmp(out.Data(), kStunMessageWithUInt16ListAttribute, size - 2)); +} + +TEST_F(StunTest, WriteMessageWithOriginAttribute) { + StunMessage msg; + size_t size = sizeof(kStunMessageWithOriginAttribute); + + msg.SetType(STUN_BINDING_REQUEST); + msg.SetTransactionID( + std::string(reinterpret_cast(kTestTransactionId1), + kStunTransactionIdLength)); + auto origin = + std::make_unique(STUN_ATTR_ORIGIN, kTestOrigin); + msg.AddAttribute(std::move(origin)); + + rtc::ByteBufferWriter out; + EXPECT_TRUE(msg.Write(&out)); + ASSERT_EQ(size, out.Length()); + // Check everything up to the padding + ASSERT_EQ(0, memcmp(out.Data(), kStunMessageWithOriginAttribute, size - 2)); +} + +// Test that we fail to read messages with invalid lengths. +void CheckFailureToRead(const unsigned char* testcase, size_t length) { + StunMessage msg; + const char* input = reinterpret_cast(testcase); + rtc::ByteBufferReader buf(input, length); + ASSERT_FALSE(msg.Read(&buf)); +} + +TEST_F(StunTest, FailToReadInvalidMessages) { + CheckFailureToRead(kStunMessageWithZeroLength, + kRealLengthOfInvalidLengthTestCases); + CheckFailureToRead(kStunMessageWithSmallLength, + kRealLengthOfInvalidLengthTestCases); + CheckFailureToRead(kStunMessageWithExcessLength, + kRealLengthOfInvalidLengthTestCases); +} + +// Test that we properly fail to read a non-STUN message. +TEST_F(StunTest, FailToReadRtcpPacket) { + CheckFailureToRead(kRtcpPacket, sizeof(kRtcpPacket)); +} + +// Check our STUN message validation code against the RFC5769 test messages. +TEST_F(StunTest, ValidateMessageIntegrity) { + // Try the messages from RFC 5769. + EXPECT_TRUE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kRfc5769SampleRequest), + sizeof(kRfc5769SampleRequest), kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kRfc5769SampleRequest), + sizeof(kRfc5769SampleRequest), "InvalidPassword")); + + EXPECT_TRUE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kRfc5769SampleResponse), + sizeof(kRfc5769SampleResponse), kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kRfc5769SampleResponse), + sizeof(kRfc5769SampleResponse), "InvalidPassword")); + + EXPECT_TRUE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kRfc5769SampleResponseIPv6), + sizeof(kRfc5769SampleResponseIPv6), kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kRfc5769SampleResponseIPv6), + sizeof(kRfc5769SampleResponseIPv6), "InvalidPassword")); + + // We first need to compute the key for the long-term authentication HMAC. + std::string key; + ComputeStunCredentialHash(kRfc5769SampleMsgWithAuthUsername, + kRfc5769SampleMsgWithAuthRealm, + kRfc5769SampleMsgWithAuthPassword, &key); + EXPECT_TRUE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kRfc5769SampleRequestLongTermAuth), + sizeof(kRfc5769SampleRequestLongTermAuth), key)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kRfc5769SampleRequestLongTermAuth), + sizeof(kRfc5769SampleRequestLongTermAuth), "InvalidPassword")); + + // Try some edge cases. + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kStunMessageWithZeroLength), + sizeof(kStunMessageWithZeroLength), kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kStunMessageWithExcessLength), + sizeof(kStunMessageWithExcessLength), kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kStunMessageWithSmallLength), + sizeof(kStunMessageWithSmallLength), kRfc5769SampleMsgPassword)); + + // Again, but with the lengths matching what is claimed in the headers. + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kStunMessageWithZeroLength), + kStunHeaderSize + rtc::GetBE16(&kStunMessageWithZeroLength[2]), + kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kStunMessageWithExcessLength), + kStunHeaderSize + rtc::GetBE16(&kStunMessageWithExcessLength[2]), + kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kStunMessageWithSmallLength), + kStunHeaderSize + rtc::GetBE16(&kStunMessageWithSmallLength[2]), + kRfc5769SampleMsgPassword)); + + // Check that a too-short HMAC doesn't cause buffer overflow. + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(kStunMessageWithBadHmacAtEnd), + sizeof(kStunMessageWithBadHmacAtEnd), kRfc5769SampleMsgPassword)); + + // Test that munging a single bit anywhere in the message causes the + // message-integrity check to fail, unless it is after the M-I attribute. + char buf[sizeof(kRfc5769SampleRequest)]; + memcpy(buf, kRfc5769SampleRequest, sizeof(kRfc5769SampleRequest)); + for (size_t i = 0; i < sizeof(buf); ++i) { + buf[i] ^= 0x01; + if (i > 0) + buf[i - 1] ^= 0x01; + EXPECT_EQ(i >= sizeof(buf) - 8, + StunMessage::ValidateMessageIntegrity(buf, sizeof(buf), + kRfc5769SampleMsgPassword)); + } +} + +// Validate that we generate correct MESSAGE-INTEGRITY attributes. +// Note the use of IceMessage instead of StunMessage; this is necessary because +// the RFC5769 test messages used include attributes not found in basic STUN. +TEST_F(StunTest, AddMessageIntegrity) { + IceMessage msg; + rtc::ByteBufferReader buf( + reinterpret_cast(kRfc5769SampleRequestWithoutMI), + sizeof(kRfc5769SampleRequestWithoutMI)); + EXPECT_TRUE(msg.Read(&buf)); + EXPECT_TRUE(msg.AddMessageIntegrity(kRfc5769SampleMsgPassword)); + const StunByteStringAttribute* mi_attr = + msg.GetByteString(STUN_ATTR_MESSAGE_INTEGRITY); + EXPECT_EQ(20U, mi_attr->length()); + EXPECT_EQ( + 0, memcmp(mi_attr->bytes(), kCalculatedHmac1, sizeof(kCalculatedHmac1))); + + rtc::ByteBufferWriter buf1; + EXPECT_TRUE(msg.Write(&buf1)); + EXPECT_TRUE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(buf1.Data()), buf1.Length(), + kRfc5769SampleMsgPassword)); + + IceMessage msg2; + rtc::ByteBufferReader buf2( + reinterpret_cast(kRfc5769SampleResponseWithoutMI), + sizeof(kRfc5769SampleResponseWithoutMI)); + EXPECT_TRUE(msg2.Read(&buf2)); + EXPECT_TRUE(msg2.AddMessageIntegrity(kRfc5769SampleMsgPassword)); + const StunByteStringAttribute* mi_attr2 = + msg2.GetByteString(STUN_ATTR_MESSAGE_INTEGRITY); + EXPECT_EQ(20U, mi_attr2->length()); + EXPECT_EQ( + 0, memcmp(mi_attr2->bytes(), kCalculatedHmac2, sizeof(kCalculatedHmac2))); + + rtc::ByteBufferWriter buf3; + EXPECT_TRUE(msg2.Write(&buf3)); + EXPECT_TRUE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(buf3.Data()), buf3.Length(), + kRfc5769SampleMsgPassword)); +} + +// Check our STUN message validation code against the RFC5769 test messages. +TEST_F(StunTest, ValidateMessageIntegrity32) { + // Try the messages from RFC 5769. + EXPECT_TRUE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(kSampleRequestMI32), + sizeof(kSampleRequestMI32), kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(kSampleRequestMI32), + sizeof(kSampleRequestMI32), "InvalidPassword")); + + // Try some edge cases. + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(kStunMessageWithZeroLength), + sizeof(kStunMessageWithZeroLength), kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(kStunMessageWithExcessLength), + sizeof(kStunMessageWithExcessLength), kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(kStunMessageWithSmallLength), + sizeof(kStunMessageWithSmallLength), kRfc5769SampleMsgPassword)); + + // Again, but with the lengths matching what is claimed in the headers. + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(kStunMessageWithZeroLength), + kStunHeaderSize + rtc::GetBE16(&kStunMessageWithZeroLength[2]), + kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(kStunMessageWithExcessLength), + kStunHeaderSize + rtc::GetBE16(&kStunMessageWithExcessLength[2]), + kRfc5769SampleMsgPassword)); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(kStunMessageWithSmallLength), + kStunHeaderSize + rtc::GetBE16(&kStunMessageWithSmallLength[2]), + kRfc5769SampleMsgPassword)); + + // Check that a too-short HMAC doesn't cause buffer overflow. + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(kStunMessageWithBadHmacAtEnd), + sizeof(kStunMessageWithBadHmacAtEnd), kRfc5769SampleMsgPassword)); + + // Test that munging a single bit anywhere in the message causes the + // message-integrity check to fail, unless it is after the M-I attribute. + char buf[sizeof(kSampleRequestMI32)]; + memcpy(buf, kSampleRequestMI32, sizeof(kSampleRequestMI32)); + for (size_t i = 0; i < sizeof(buf); ++i) { + buf[i] ^= 0x01; + if (i > 0) + buf[i - 1] ^= 0x01; + EXPECT_EQ(i >= sizeof(buf) - 8, + StunMessage::ValidateMessageIntegrity32( + buf, sizeof(buf), kRfc5769SampleMsgPassword)); + } +} + +// Validate that we generate correct MESSAGE-INTEGRITY-32 attributes. +TEST_F(StunTest, AddMessageIntegrity32) { + IceMessage msg; + rtc::ByteBufferReader buf( + reinterpret_cast(kRfc5769SampleRequestWithoutMI), + sizeof(kRfc5769SampleRequestWithoutMI)); + EXPECT_TRUE(msg.Read(&buf)); + EXPECT_TRUE(msg.AddMessageIntegrity32(kRfc5769SampleMsgPassword)); + const StunByteStringAttribute* mi_attr = + msg.GetByteString(STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32); + EXPECT_EQ(4U, mi_attr->length()); + EXPECT_EQ(0, memcmp(mi_attr->bytes(), kCalculatedHmac1_32, + sizeof(kCalculatedHmac1_32))); + + rtc::ByteBufferWriter buf1; + EXPECT_TRUE(msg.Write(&buf1)); + EXPECT_TRUE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(buf1.Data()), buf1.Length(), + kRfc5769SampleMsgPassword)); + + IceMessage msg2; + rtc::ByteBufferReader buf2( + reinterpret_cast(kRfc5769SampleResponseWithoutMI), + sizeof(kRfc5769SampleResponseWithoutMI)); + EXPECT_TRUE(msg2.Read(&buf2)); + EXPECT_TRUE(msg2.AddMessageIntegrity32(kRfc5769SampleMsgPassword)); + const StunByteStringAttribute* mi_attr2 = + msg2.GetByteString(STUN_ATTR_GOOG_MESSAGE_INTEGRITY_32); + EXPECT_EQ(4U, mi_attr2->length()); + EXPECT_EQ(0, memcmp(mi_attr2->bytes(), kCalculatedHmac2_32, + sizeof(kCalculatedHmac2_32))); + + rtc::ByteBufferWriter buf3; + EXPECT_TRUE(msg2.Write(&buf3)); + EXPECT_TRUE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(buf3.Data()), buf3.Length(), + kRfc5769SampleMsgPassword)); +} + +// Validate that the message validates if both MESSAGE-INTEGRITY-32 and +// MESSAGE-INTEGRITY are present in the message. +// This is not expected to be used, but is not forbidden. +TEST_F(StunTest, AddMessageIntegrity32AndMessageIntegrity) { + IceMessage msg; + auto attr = StunAttribute::CreateByteString(STUN_ATTR_USERNAME); + attr->CopyBytes("keso", sizeof("keso")); + msg.AddAttribute(std::move(attr)); + msg.AddMessageIntegrity32("password1"); + msg.AddMessageIntegrity("password2"); + + rtc::ByteBufferWriter buf1; + EXPECT_TRUE(msg.Write(&buf1)); + EXPECT_TRUE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(buf1.Data()), buf1.Length(), "password1")); + EXPECT_TRUE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(buf1.Data()), buf1.Length(), "password2")); + + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity32( + reinterpret_cast(buf1.Data()), buf1.Length(), "password2")); + EXPECT_FALSE(StunMessage::ValidateMessageIntegrity( + reinterpret_cast(buf1.Data()), buf1.Length(), "password1")); +} + +// Check our STUN message validation code against the RFC5769 test messages. +TEST_F(StunTest, ValidateFingerprint) { + EXPECT_TRUE(StunMessage::ValidateFingerprint( + reinterpret_cast(kRfc5769SampleRequest), + sizeof(kRfc5769SampleRequest))); + EXPECT_TRUE(StunMessage::ValidateFingerprint( + reinterpret_cast(kRfc5769SampleResponse), + sizeof(kRfc5769SampleResponse))); + EXPECT_TRUE(StunMessage::ValidateFingerprint( + reinterpret_cast(kRfc5769SampleResponseIPv6), + sizeof(kRfc5769SampleResponseIPv6))); + + EXPECT_FALSE(StunMessage::ValidateFingerprint( + reinterpret_cast(kStunMessageWithZeroLength), + sizeof(kStunMessageWithZeroLength))); + EXPECT_FALSE(StunMessage::ValidateFingerprint( + reinterpret_cast(kStunMessageWithExcessLength), + sizeof(kStunMessageWithExcessLength))); + EXPECT_FALSE(StunMessage::ValidateFingerprint( + reinterpret_cast(kStunMessageWithSmallLength), + sizeof(kStunMessageWithSmallLength))); + + // Test that munging a single bit anywhere in the message causes the + // fingerprint check to fail. + char buf[sizeof(kRfc5769SampleRequest)]; + memcpy(buf, kRfc5769SampleRequest, sizeof(kRfc5769SampleRequest)); + for (size_t i = 0; i < sizeof(buf); ++i) { + buf[i] ^= 0x01; + if (i > 0) + buf[i - 1] ^= 0x01; + EXPECT_FALSE(StunMessage::ValidateFingerprint(buf, sizeof(buf))); + } + // Put them all back to normal and the check should pass again. + buf[sizeof(buf) - 1] ^= 0x01; + EXPECT_TRUE(StunMessage::ValidateFingerprint(buf, sizeof(buf))); +} + +TEST_F(StunTest, AddFingerprint) { + IceMessage msg; + rtc::ByteBufferReader buf( + reinterpret_cast(kRfc5769SampleRequestWithoutMI), + sizeof(kRfc5769SampleRequestWithoutMI)); + EXPECT_TRUE(msg.Read(&buf)); + EXPECT_TRUE(msg.AddFingerprint()); + + rtc::ByteBufferWriter buf1; + EXPECT_TRUE(msg.Write(&buf1)); + EXPECT_TRUE(StunMessage::ValidateFingerprint( + reinterpret_cast(buf1.Data()), buf1.Length())); +} + +// Sample "GTURN" relay message. +// clang-format off +// clang formatting doesn't respect inline comments. +static const unsigned char kRelayMessage[] = { + 0x00, 0x01, 0x00, 88, // message header + 0x21, 0x12, 0xA4, 0x42, // magic cookie + '0', '1', '2', '3', // transaction id + '4', '5', '6', '7', + '8', '9', 'a', 'b', + 0x00, 0x01, 0x00, 8, // mapped address + 0x00, 0x01, 0x00, 13, + 0x00, 0x00, 0x00, 17, + 0x00, 0x06, 0x00, 12, // username + 'a', 'b', 'c', 'd', + 'e', 'f', 'g', 'h', + 'i', 'j', 'k', 'l', + 0x00, 0x0d, 0x00, 4, // lifetime + 0x00, 0x00, 0x00, 11, + 0x00, 0x0f, 0x00, 4, // magic cookie + 0x72, 0xc6, 0x4b, 0xc6, + 0x00, 0x10, 0x00, 4, // bandwidth + 0x00, 0x00, 0x00, 6, + 0x00, 0x11, 0x00, 8, // destination address + 0x00, 0x01, 0x00, 13, + 0x00, 0x00, 0x00, 17, + 0x00, 0x12, 0x00, 8, // source address 2 + 0x00, 0x01, 0x00, 13, + 0x00, 0x00, 0x00, 17, + 0x00, 0x13, 0x00, 7, // data + 'a', 'b', 'c', 'd', + 'e', 'f', 'g', 0 // DATA must be padded per rfc5766. +}; +// clang-format on + +// Test that we can read the GTURN-specific fields. +TEST_F(StunTest, ReadRelayMessage) { + RelayMessage msg, msg2; + + const char* input = reinterpret_cast(kRelayMessage); + size_t size = sizeof(kRelayMessage); + rtc::ByteBufferReader buf(input, size); + EXPECT_TRUE(msg.Read(&buf)); + + EXPECT_EQ(STUN_BINDING_REQUEST, msg.type()); + EXPECT_EQ(size - 20, msg.length()); + EXPECT_EQ("0123456789ab", msg.transaction_id()); + + msg2.SetType(STUN_BINDING_REQUEST); + msg2.SetTransactionID("0123456789ab"); + + in_addr legacy_in_addr; + legacy_in_addr.s_addr = htonl(17U); + rtc::IPAddress legacy_ip(legacy_in_addr); + + const StunAddressAttribute* addr = msg.GetAddress(STUN_ATTR_MAPPED_ADDRESS); + ASSERT_TRUE(addr != NULL); + EXPECT_EQ(1, addr->family()); + EXPECT_EQ(13, addr->port()); + EXPECT_EQ(legacy_ip, addr->ipaddr()); + + auto addr2 = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); + addr2->SetPort(13); + addr2->SetIP(legacy_ip); + msg2.AddAttribute(std::move(addr2)); + + const StunByteStringAttribute* bytes = msg.GetByteString(STUN_ATTR_USERNAME); + ASSERT_TRUE(bytes != NULL); + EXPECT_EQ(12U, bytes->length()); + EXPECT_EQ("abcdefghijkl", bytes->GetString()); + + auto bytes2 = StunAttribute::CreateByteString(STUN_ATTR_USERNAME); + bytes2->CopyBytes("abcdefghijkl"); + msg2.AddAttribute(std::move(bytes2)); + + const StunUInt32Attribute* uval = msg.GetUInt32(STUN_ATTR_LIFETIME); + ASSERT_TRUE(uval != NULL); + EXPECT_EQ(11U, uval->value()); + + auto uval2 = StunAttribute::CreateUInt32(STUN_ATTR_LIFETIME); + uval2->SetValue(11); + msg2.AddAttribute(std::move(uval2)); + + bytes = msg.GetByteString(STUN_ATTR_MAGIC_COOKIE); + ASSERT_TRUE(bytes != NULL); + EXPECT_EQ(4U, bytes->length()); + EXPECT_EQ(0, memcmp(bytes->bytes(), TURN_MAGIC_COOKIE_VALUE, + sizeof(TURN_MAGIC_COOKIE_VALUE))); + + bytes2 = StunAttribute::CreateByteString(STUN_ATTR_MAGIC_COOKIE); + bytes2->CopyBytes(reinterpret_cast(TURN_MAGIC_COOKIE_VALUE), + sizeof(TURN_MAGIC_COOKIE_VALUE)); + msg2.AddAttribute(std::move(bytes2)); + + uval = msg.GetUInt32(STUN_ATTR_BANDWIDTH); + ASSERT_TRUE(uval != NULL); + EXPECT_EQ(6U, uval->value()); + + uval2 = StunAttribute::CreateUInt32(STUN_ATTR_BANDWIDTH); + uval2->SetValue(6); + msg2.AddAttribute(std::move(uval2)); + + addr = msg.GetAddress(STUN_ATTR_DESTINATION_ADDRESS); + ASSERT_TRUE(addr != NULL); + EXPECT_EQ(1, addr->family()); + EXPECT_EQ(13, addr->port()); + EXPECT_EQ(legacy_ip, addr->ipaddr()); + + addr2 = StunAttribute::CreateAddress(STUN_ATTR_DESTINATION_ADDRESS); + addr2->SetPort(13); + addr2->SetIP(legacy_ip); + msg2.AddAttribute(std::move(addr2)); + + addr = msg.GetAddress(STUN_ATTR_SOURCE_ADDRESS2); + ASSERT_TRUE(addr != NULL); + EXPECT_EQ(1, addr->family()); + EXPECT_EQ(13, addr->port()); + EXPECT_EQ(legacy_ip, addr->ipaddr()); + + addr2 = StunAttribute::CreateAddress(STUN_ATTR_SOURCE_ADDRESS2); + addr2->SetPort(13); + addr2->SetIP(legacy_ip); + msg2.AddAttribute(std::move(addr2)); + + bytes = msg.GetByteString(STUN_ATTR_DATA); + ASSERT_TRUE(bytes != NULL); + EXPECT_EQ(7U, bytes->length()); + EXPECT_EQ("abcdefg", bytes->GetString()); + + bytes2 = StunAttribute::CreateByteString(STUN_ATTR_DATA); + bytes2->CopyBytes("abcdefg"); + msg2.AddAttribute(std::move(bytes2)); + + rtc::ByteBufferWriter out; + EXPECT_TRUE(msg.Write(&out)); + EXPECT_EQ(size, out.Length()); + size_t len1 = out.Length(); + rtc::ByteBufferReader read_buf(out); + std::string outstring; + read_buf.ReadString(&outstring, len1); + EXPECT_EQ(0, memcmp(outstring.c_str(), input, len1)); + + rtc::ByteBufferWriter out2; + EXPECT_TRUE(msg2.Write(&out2)); + EXPECT_EQ(size, out2.Length()); + size_t len2 = out2.Length(); + rtc::ByteBufferReader read_buf2(out2); + std::string outstring2; + read_buf2.ReadString(&outstring2, len2); + EXPECT_EQ(0, memcmp(outstring2.c_str(), input, len2)); +} + +// Test that we can remove attribute from a message. +TEST_F(StunTest, RemoveAttribute) { + StunMessage msg; + + // Removing something that does exist should return nullptr. + EXPECT_EQ(msg.RemoveAttribute(STUN_ATTR_USERNAME), nullptr); + + { + auto attr = StunAttribute::CreateByteString(STUN_ATTR_USERNAME); + attr->CopyBytes("kes", sizeof("kes")); + msg.AddAttribute(std::move(attr)); + } + + size_t len = msg.length(); + { + auto attr = msg.RemoveAttribute(STUN_ATTR_USERNAME); + ASSERT_NE(attr, nullptr); + EXPECT_EQ(attr->type(), STUN_ATTR_USERNAME); + EXPECT_STREQ("kes", + static_cast(attr.get())->bytes()); + EXPECT_LT(msg.length(), len); + } + + // Now add same attribute type twice. + { + auto attr = StunAttribute::CreateByteString(STUN_ATTR_USERNAME); + attr->CopyBytes("kes", sizeof("kes")); + msg.AddAttribute(std::move(attr)); + } + + { + auto attr = StunAttribute::CreateByteString(STUN_ATTR_USERNAME); + attr->CopyBytes("kenta", sizeof("kenta")); + msg.AddAttribute(std::move(attr)); + } + + // Remove should remove the last added occurrence. + { + auto attr = msg.RemoveAttribute(STUN_ATTR_USERNAME); + ASSERT_NE(attr, nullptr); + EXPECT_EQ(attr->type(), STUN_ATTR_USERNAME); + EXPECT_STREQ("kenta", + static_cast(attr.get())->bytes()); + } + + // Remove should remove the last added occurrence. + { + auto attr = msg.RemoveAttribute(STUN_ATTR_USERNAME); + ASSERT_NE(attr, nullptr); + EXPECT_EQ(attr->type(), STUN_ATTR_USERNAME); + EXPECT_STREQ("kes", + static_cast(attr.get())->bytes()); + } + + // Removing something that does exist should return nullptr. + EXPECT_EQ(msg.RemoveAttribute(STUN_ATTR_USERNAME), nullptr); +} + +// Test that we can remove attribute from a message. +TEST_F(StunTest, ClearAttributes) { + StunMessage msg; + + auto attr = StunAttribute::CreateByteString(STUN_ATTR_USERNAME); + attr->CopyBytes("kes", sizeof("kes")); + msg.AddAttribute(std::move(attr)); + size_t len = msg.length(); + + msg.ClearAttributes(); + EXPECT_EQ(msg.length(), len - /* 3 + 1 byte padding + header */ 8); + EXPECT_EQ(nullptr, msg.GetByteString(STUN_ATTR_USERNAME)); +} + +// Test CopyStunAttribute +TEST_F(StunTest, CopyAttribute) { + rtc::ByteBufferWriter buf; + rtc::ByteBufferWriter* buffer_ptrs[] = {&buf, nullptr}; + // Test both with and without supplied ByteBufferWriter. + for (auto buffer_ptr : buffer_ptrs) { + { // Test StunByteStringAttribute. + auto attr = StunAttribute::CreateByteString(STUN_ATTR_USERNAME); + attr->CopyBytes("kes", sizeof("kes")); + + auto copy = CopyStunAttribute(*attr.get(), buffer_ptr); + ASSERT_EQ(copy->value_type(), STUN_VALUE_BYTE_STRING); + EXPECT_STREQ("kes", + static_cast(copy.get())->bytes()); + } + + { // Test StunAddressAttribute. + rtc::IPAddress test_ip(kIPv6TestAddress2); + auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); + rtc::SocketAddress test_addr(test_ip, kTestMessagePort2); + addr->SetAddress(test_addr); + CheckStunAddressAttribute(addr.get(), STUN_ADDRESS_IPV6, + kTestMessagePort2, test_ip); + + auto copy = CopyStunAttribute(*addr.get(), buffer_ptr); + ASSERT_EQ(copy->value_type(), STUN_VALUE_ADDRESS); + CheckStunAddressAttribute(static_cast(copy.get()), + STUN_ADDRESS_IPV6, kTestMessagePort2, test_ip); + } + + { // Test StunAddressAttribute. + rtc::IPAddress test_ip(kIPv6TestAddress2); + auto addr = StunAttribute::CreateAddress(STUN_ATTR_XOR_MAPPED_ADDRESS); + rtc::SocketAddress test_addr(test_ip, kTestMessagePort2); + addr->SetAddress(test_addr); + CheckStunAddressAttribute(addr.get(), STUN_ADDRESS_IPV6, + kTestMessagePort2, test_ip); + + auto copy = CopyStunAttribute(*addr.get(), buffer_ptr); + ASSERT_EQ(copy->value_type(), STUN_VALUE_ADDRESS); + CheckStunAddressAttribute(static_cast(copy.get()), + STUN_ADDRESS_IPV6, kTestMessagePort2, test_ip); + } + } +} + +// Test Clone +TEST_F(StunTest, Clone) { + IceMessage msg; + { + auto errorcode = StunAttribute::CreateErrorCode(); + errorcode->SetCode(kTestErrorCode); + errorcode->SetReason(kTestErrorReason); + msg.AddAttribute(std::move(errorcode)); + } + { + auto bytes2 = StunAttribute::CreateByteString(STUN_ATTR_USERNAME); + bytes2->CopyBytes("abcdefghijkl"); + msg.AddAttribute(std::move(bytes2)); + } + { + auto uval2 = StunAttribute::CreateUInt32(STUN_ATTR_RETRANSMIT_COUNT); + uval2->SetValue(11); + msg.AddAttribute(std::move(uval2)); + } + { + auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); + addr->SetIP(rtc::IPAddress(kIPv6TestAddress1)); + addr->SetPort(kTestMessagePort1); + msg.AddAttribute(std::move(addr)); + } + auto copy = msg.Clone(); + ASSERT_NE(nullptr, copy.get()); + + msg.SetTransactionID("0123456789ab"); + copy->SetTransactionID("0123456789ab"); + + rtc::ByteBufferWriter out1; + EXPECT_TRUE(msg.Write(&out1)); + rtc::ByteBufferWriter out2; + EXPECT_TRUE(copy->Write(&out2)); + + ASSERT_EQ(out1.Length(), out2.Length()); + EXPECT_EQ(0, memcmp(out1.Data(), out2.Data(), out1.Length())); +} + +// Test EqualAttributes +TEST_F(StunTest, EqualAttributes) { + IceMessage msg; + { + auto errorcode = StunAttribute::CreateErrorCode(); + errorcode->SetCode(kTestErrorCode); + errorcode->SetReason(kTestErrorReason); + msg.AddAttribute(std::move(errorcode)); + } + { + auto bytes2 = StunAttribute::CreateByteString(STUN_ATTR_USERNAME); + bytes2->CopyBytes("abcdefghijkl"); + msg.AddAttribute(std::move(bytes2)); + } + { + auto uval2 = StunAttribute::CreateUInt32(STUN_ATTR_RETRANSMIT_COUNT); + uval2->SetValue(11); + msg.AddAttribute(std::move(uval2)); + } + { + auto addr = StunAttribute::CreateAddress(STUN_ATTR_MAPPED_ADDRESS); + addr->SetIP(rtc::IPAddress(kIPv6TestAddress1)); + addr->SetPort(kTestMessagePort1); + msg.AddAttribute(std::move(addr)); + } + auto copy = msg.Clone(); + ASSERT_NE(nullptr, copy.get()); + + EXPECT_TRUE(copy->EqualAttributes(&msg, [](int type) { return true; })); + + { + auto attr = StunAttribute::CreateByteString(STUN_ATTR_NONCE); + attr->CopyBytes("keso"); + msg.AddAttribute(std::move(attr)); + EXPECT_FALSE(copy->EqualAttributes(&msg, [](int type) { return true; })); + EXPECT_TRUE(copy->EqualAttributes( + &msg, [](int type) { return type != STUN_ATTR_NONCE; })); + } + + { + auto attr = StunAttribute::CreateByteString(STUN_ATTR_NONCE); + attr->CopyBytes("keso"); + copy->AddAttribute(std::move(attr)); + EXPECT_TRUE(copy->EqualAttributes(&msg, [](int type) { return true; })); + } + { + copy->RemoveAttribute(STUN_ATTR_NONCE); + auto attr = StunAttribute::CreateByteString(STUN_ATTR_NONCE); + attr->CopyBytes("kent"); + copy->AddAttribute(std::move(attr)); + EXPECT_FALSE(copy->EqualAttributes(&msg, [](int type) { return true; })); + EXPECT_TRUE(copy->EqualAttributes( + &msg, [](int type) { return type != STUN_ATTR_NONCE; })); + } + + { + msg.RemoveAttribute(STUN_ATTR_NONCE); + EXPECT_FALSE(copy->EqualAttributes(&msg, [](int type) { return true; })); + EXPECT_TRUE(copy->EqualAttributes( + &msg, [](int type) { return type != STUN_ATTR_NONCE; })); + } +} + +TEST_F(StunTest, ReduceTransactionIdIsHostOrderIndependent) { + std::string transaction_id = "abcdefghijkl"; + StunMessage message; + ASSERT_TRUE(message.SetTransactionID(transaction_id)); + uint32_t reduced_transaction_id = message.reduced_transaction_id(); + EXPECT_EQ(reduced_transaction_id, 1835954016u); +} + +TEST_F(StunTest, GoogMiscInfo) { + StunMessage msg; + const size_t size = + /* msg header */ 20 + + /* attr header */ 4 + + /* 3 * 2 rounded to multiple of 4 */ 8; + msg.SetType(STUN_BINDING_REQUEST); + msg.SetTransactionID("ABCDEFGH"); + auto list = + StunAttribute::CreateUInt16ListAttribute(STUN_ATTR_GOOG_MISC_INFO); + list->AddTypeAtIndex(0, 0x1U); + list->AddTypeAtIndex(3, 0x1000U); + list->AddTypeAtIndex(2, 0xAB0CU); + msg.AddAttribute(std::move(list)); + CheckStunHeader(msg, STUN_BINDING_REQUEST, (size - 20)); + + rtc::ByteBufferWriter out; + EXPECT_TRUE(msg.Write(&out)); + ASSERT_EQ(size, out.Length()); + + size_t read_size = ReadStunMessageTestCase( + &msg, reinterpret_cast(out.Data()), out.Length()); + ASSERT_EQ(read_size + 20, size); + CheckStunHeader(msg, STUN_BINDING_REQUEST, read_size); + const StunUInt16ListAttribute* types = + msg.GetUInt16List(STUN_ATTR_GOOG_MISC_INFO); + ASSERT_TRUE(types != NULL); + EXPECT_EQ(4U, types->Size()); + EXPECT_EQ(0x1U, types->GetType(0)); + EXPECT_EQ(0x0U, types->GetType(1)); + EXPECT_EQ(0x1000U, types->GetType(3)); + EXPECT_EQ(0xAB0CU, types->GetType(2)); +} + +TEST_F(StunTest, IsStunMethod) { + int methods[] = {STUN_BINDING_REQUEST}; + EXPECT_TRUE(StunMessage::IsStunMethod( + methods, reinterpret_cast(kRfc5769SampleRequest), + sizeof(kRfc5769SampleRequest))); +} + +} // namespace cricket diff --git a/api/transport/test/create_feedback_generator.cc b/api/transport/test/create_feedback_generator.cc new file mode 100644 index 0000000..7c83823 --- /dev/null +++ b/api/transport/test/create_feedback_generator.cc @@ -0,0 +1,23 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/transport/test/create_feedback_generator.h" + +#include + +#include "test/network/feedback_generator.h" + +namespace webrtc { + +std::unique_ptr CreateFeedbackGenerator( + FeedbackGenerator::Config confg) { + return std::make_unique(confg); +} + +} // namespace webrtc diff --git a/api/transport/test/create_feedback_generator.h b/api/transport/test/create_feedback_generator.h new file mode 100644 index 0000000..a1a2226 --- /dev/null +++ b/api/transport/test/create_feedback_generator.h @@ -0,0 +1,21 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TRANSPORT_TEST_CREATE_FEEDBACK_GENERATOR_H_ +#define API_TRANSPORT_TEST_CREATE_FEEDBACK_GENERATOR_H_ + +#include + +#include "api/transport/test/feedback_generator_interface.h" + +namespace webrtc { +std::unique_ptr CreateFeedbackGenerator( + FeedbackGenerator::Config confg); +} // namespace webrtc +#endif // API_TRANSPORT_TEST_CREATE_FEEDBACK_GENERATOR_H_ diff --git a/api/transport/test/feedback_generator_interface.h b/api/transport/test/feedback_generator_interface.h new file mode 100644 index 0000000..6e5118c --- /dev/null +++ b/api/transport/test/feedback_generator_interface.h @@ -0,0 +1,37 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TRANSPORT_TEST_FEEDBACK_GENERATOR_INTERFACE_H_ +#define API_TRANSPORT_TEST_FEEDBACK_GENERATOR_INTERFACE_H_ + +#include + +#include "api/test/simulated_network.h" +#include "api/transport/network_types.h" + +namespace webrtc { +class FeedbackGenerator { + public: + struct Config { + BuiltInNetworkBehaviorConfig send_link; + BuiltInNetworkBehaviorConfig return_link; + TimeDelta feedback_interval = TimeDelta::Millis(50); + DataSize feedback_packet_size = DataSize::Bytes(20); + }; + virtual ~FeedbackGenerator() = default; + virtual Timestamp Now() = 0; + virtual void Sleep(TimeDelta duration) = 0; + virtual void SendPacket(size_t size) = 0; + virtual std::vector PopFeedback() = 0; + virtual void SetSendConfig(BuiltInNetworkBehaviorConfig config) = 0; + virtual void SetReturnConfig(BuiltInNetworkBehaviorConfig config) = 0; + virtual void SetSendLinkCapacity(DataRate capacity) = 0; +}; +} // namespace webrtc +#endif // API_TRANSPORT_TEST_FEEDBACK_GENERATOR_INTERFACE_H_ diff --git a/api/transport/test/mock_network_control.h b/api/transport/test/mock_network_control.h new file mode 100644 index 0000000..f613004 --- /dev/null +++ b/api/transport/test/mock_network_control.h @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TRANSPORT_TEST_MOCK_NETWORK_CONTROL_H_ +#define API_TRANSPORT_TEST_MOCK_NETWORK_CONTROL_H_ + +#include "api/transport/network_control.h" +#include "test/gmock.h" + +namespace webrtc { + +class MockNetworkStateEstimator : public NetworkStateEstimator { + public: + MOCK_METHOD(absl::optional, + GetCurrentEstimate, + (), + (override)); + MOCK_METHOD(void, + OnTransportPacketsFeedback, + (const TransportPacketsFeedback&), + (override)); + MOCK_METHOD(void, OnReceivedPacket, (const PacketResult&), (override)); + MOCK_METHOD(void, OnRouteChange, (const NetworkRouteChange&), (override)); +}; + +} // namespace webrtc + +#endif // API_TRANSPORT_TEST_MOCK_NETWORK_CONTROL_H_ diff --git a/api/transport/webrtc_key_value_config.h b/api/transport/webrtc_key_value_config.h new file mode 100644 index 0000000..5666a82 --- /dev/null +++ b/api/transport/webrtc_key_value_config.h @@ -0,0 +1,33 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_TRANSPORT_WEBRTC_KEY_VALUE_CONFIG_H_ +#define API_TRANSPORT_WEBRTC_KEY_VALUE_CONFIG_H_ + +#include + +#include "absl/strings/string_view.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// An interface that provides a key-value mapping for configuring internal +// details of WebRTC. Note that there's no guarantess that the meaning of a +// particular key value mapping will be preserved over time and no announcements +// will be made if they are changed. It's up to the library user to ensure that +// the behavior does not break. +class RTC_EXPORT WebRtcKeyValueConfig { + public: + virtual ~WebRtcKeyValueConfig() = default; + // The configured value for the given key. Defaults to an empty string. + virtual std::string Lookup(absl::string_view key) const = 0; +}; +} // namespace webrtc + +#endif // API_TRANSPORT_WEBRTC_KEY_VALUE_CONFIG_H_ diff --git a/api/turn_customizer.h b/api/turn_customizer.h new file mode 100644 index 0000000..f0bf0d9 --- /dev/null +++ b/api/turn_customizer.h @@ -0,0 +1,45 @@ +/* + * Copyright 2017 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_TURN_CUSTOMIZER_H_ +#define API_TURN_CUSTOMIZER_H_ + +#include + +namespace cricket { +class PortInterface; +class StunMessage; +} // namespace cricket + +namespace webrtc { + +class TurnCustomizer { + public: + // This is called before a TURN message is sent. + // This could be used to add implementation specific attributes to a request. + virtual void MaybeModifyOutgoingStunMessage( + cricket::PortInterface* port, + cricket::StunMessage* message) = 0; + + // TURN can send data using channel data messages or Send indication. + // This method should return false if |data| should be sent using + // a Send indication instead of a ChannelData message, even if a + // channel is bound. + virtual bool AllowChannelData(cricket::PortInterface* port, + const void* data, + size_t size, + bool payload) = 0; + + virtual ~TurnCustomizer() {} +}; + +} // namespace webrtc + +#endif // API_TURN_CUSTOMIZER_H_ diff --git a/api/uma_metrics.h b/api/uma_metrics.h new file mode 100644 index 0000000..8436d4f --- /dev/null +++ b/api/uma_metrics.h @@ -0,0 +1,159 @@ +/* + * Copyright 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This file contains enums related to IPv4/IPv6 metrics. + +#ifndef API_UMA_METRICS_H_ +#define API_UMA_METRICS_H_ + +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// Currently this contains information related to WebRTC network/transport +// information. + +// The difference between PeerConnectionEnumCounter and +// PeerConnectionMetricsName is that the "EnumCounter" is only counting the +// occurrences of events, while "Name" has a value associated with it which is +// used to form a histogram. + +// This enum is backed by Chromium's histograms.xml, +// chromium/src/tools/metrics/histograms/histograms.xml +// Existing values cannot be re-ordered and new enums must be added +// before kBoundary. +enum PeerConnectionAddressFamilyCounter { + kPeerConnection_IPv4, + kPeerConnection_IPv6, + kBestConnections_IPv4, + kBestConnections_IPv6, + kPeerConnectionAddressFamilyCounter_Max, +}; + +// This enum defines types for UMA samples, which will have a range. +enum PeerConnectionMetricsName { + kNetworkInterfaces_IPv4, // Number of IPv4 interfaces. + kNetworkInterfaces_IPv6, // Number of IPv6 interfaces. + kTimeToConnect, // In milliseconds. + kLocalCandidates_IPv4, // Number of IPv4 local candidates. + kLocalCandidates_IPv6, // Number of IPv6 local candidates. + kPeerConnectionMetricsName_Max +}; + +// The IceCandidatePairType has the format of +// _. It is recorded based on the +// type of candidate pair used when the PeerConnection first goes to a completed +// state. When BUNDLE is enabled, only the first transport gets recorded. +enum IceCandidatePairType { + // HostHost is deprecated. It was replaced with the set of types at the bottom + // to report private or public host IP address. + kIceCandidatePairHostHost, + kIceCandidatePairHostSrflx, + kIceCandidatePairHostRelay, + kIceCandidatePairHostPrflx, + kIceCandidatePairSrflxHost, + kIceCandidatePairSrflxSrflx, + kIceCandidatePairSrflxRelay, + kIceCandidatePairSrflxPrflx, + kIceCandidatePairRelayHost, + kIceCandidatePairRelaySrflx, + kIceCandidatePairRelayRelay, + kIceCandidatePairRelayPrflx, + kIceCandidatePairPrflxHost, + kIceCandidatePairPrflxSrflx, + kIceCandidatePairPrflxRelay, + + // The following 9 types tell whether local and remote hosts have hostname, + // private or public IP addresses. + kIceCandidatePairHostPrivateHostPrivate, + kIceCandidatePairHostPrivateHostPublic, + kIceCandidatePairHostPublicHostPrivate, + kIceCandidatePairHostPublicHostPublic, + kIceCandidatePairHostNameHostName, + kIceCandidatePairHostNameHostPrivate, + kIceCandidatePairHostNameHostPublic, + kIceCandidatePairHostPrivateHostName, + kIceCandidatePairHostPublicHostName, + kIceCandidatePairMax +}; + +enum KeyExchangeProtocolType { + kEnumCounterKeyProtocolDtls, + kEnumCounterKeyProtocolSdes, + kEnumCounterKeyProtocolMax +}; + +enum KeyExchangeProtocolMedia { + kEnumCounterKeyProtocolMediaTypeDtlsAudio, + kEnumCounterKeyProtocolMediaTypeDtlsVideo, + kEnumCounterKeyProtocolMediaTypeDtlsData, + kEnumCounterKeyProtocolMediaTypeSdesAudio, + kEnumCounterKeyProtocolMediaTypeSdesVideo, + kEnumCounterKeyProtocolMediaTypeSdesData, + kEnumCounterKeyProtocolMediaTypeMax +}; + +enum SdpSemanticRequested { + kSdpSemanticRequestDefault, + kSdpSemanticRequestPlanB, + kSdpSemanticRequestUnifiedPlan, + kSdpSemanticRequestMax +}; + +enum SdpSemanticNegotiated { + kSdpSemanticNegotiatedNone, + kSdpSemanticNegotiatedPlanB, + kSdpSemanticNegotiatedUnifiedPlan, + kSdpSemanticNegotiatedMixed, + kSdpSemanticNegotiatedMax +}; + +// Metric which records the format of the received SDP for tracking how much the +// difference between Plan B and Unified Plan affect users. +enum SdpFormatReceived { + // No audio or video tracks. This is worth special casing since it seems to be + // the most common scenario (data-channel only). + kSdpFormatReceivedNoTracks, + // No more than one audio and one video track. Should be compatible with both + // Plan B and Unified Plan endpoints. + kSdpFormatReceivedSimple, + // More than one audio track or more than one video track in the Plan B format + // (e.g., one audio media section with multiple streams). + kSdpFormatReceivedComplexPlanB, + // More than one audio track or more than one video track in the Unified Plan + // format (e.g., two audio media sections). + kSdpFormatReceivedComplexUnifiedPlan, + kSdpFormatReceivedMax +}; + +// Metric for counting the outcome of adding an ICE candidate +enum AddIceCandidateResult { + kAddIceCandidateSuccess, + kAddIceCandidateFailClosed, + kAddIceCandidateFailNoRemoteDescription, + kAddIceCandidateFailNullCandidate, + kAddIceCandidateFailNotValid, + kAddIceCandidateFailNotReady, + kAddIceCandidateFailInAddition, + kAddIceCandidateFailNotUsable, + kAddIceCandidateMax +}; + +// Metric for recording which api surface was used to enable simulcast. +enum SimulcastApiVersion { + kSimulcastApiVersionNone, + kSimulcastApiVersionLegacy, + kSimulcastApiVersionSpecCompliant, + kSimulcastApiVersionMax, +}; + +} // namespace webrtc + +#endif // API_UMA_METRICS_H_ diff --git a/api/units/BUILD.gn b/api/units/BUILD.gn new file mode 100644 index 0000000..45cdcd3 --- /dev/null +++ b/api/units/BUILD.gn @@ -0,0 +1,111 @@ +# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +rtc_library("data_rate") { + visibility = [ "*" ] + sources = [ + "data_rate.cc", + "data_rate.h", + ] + + deps = [ + ":data_size", + ":frequency", + ":time_delta", + "..:array_view", + "../../rtc_base:checks", + "../../rtc_base:stringutils", + "../../rtc_base/units:unit_base", + ] +} + +rtc_library("data_size") { + visibility = [ "*" ] + sources = [ + "data_size.cc", + "data_size.h", + ] + + deps = [ + "..:array_view", + "../../rtc_base:checks", + "../../rtc_base:stringutils", + "../../rtc_base/units:unit_base", + ] +} + +rtc_library("time_delta") { + visibility = [ "*" ] + sources = [ + "time_delta.cc", + "time_delta.h", + ] + + deps = [ + "..:array_view", + "../../rtc_base:checks", + "../../rtc_base:stringutils", + "../../rtc_base/units:unit_base", + ] +} + +rtc_library("frequency") { + visibility = [ "*" ] + sources = [ + "frequency.cc", + "frequency.h", + ] + + deps = [ + ":time_delta", + "..:array_view", + "../../rtc_base:checks", + "../../rtc_base:stringutils", + "../../rtc_base/units:unit_base", + ] +} + +rtc_library("timestamp") { + visibility = [ "*" ] + sources = [ + "timestamp.cc", + "timestamp.h", + ] + + deps = [ + ":time_delta", + "..:array_view", + "../../rtc_base:checks", + "../../rtc_base:stringutils", + "../../rtc_base/units:unit_base", + ] +} + +if (rtc_include_tests) { + rtc_library("units_unittests") { + testonly = true + sources = [ + "data_rate_unittest.cc", + "data_size_unittest.cc", + "frequency_unittest.cc", + "time_delta_unittest.cc", + "timestamp_unittest.cc", + ] + deps = [ + ":data_rate", + ":data_size", + ":frequency", + ":time_delta", + ":timestamp", + "../../rtc_base:logging", + "../../test:test_support", + ] + } +} diff --git a/api/units/OWNERS b/api/units/OWNERS new file mode 100644 index 0000000..53e076b --- /dev/null +++ b/api/units/OWNERS @@ -0,0 +1 @@ +srte@webrtc.org diff --git a/api/units/data_rate.cc b/api/units/data_rate.cc new file mode 100644 index 0000000..f9586c5 --- /dev/null +++ b/api/units/data_rate.cc @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/units/data_rate.h" + +#include "api/array_view.h" +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { + +std::string ToString(DataRate value) { + char buf[64]; + rtc::SimpleStringBuilder sb(buf); + if (value.IsPlusInfinity()) { + sb << "+inf bps"; + } else if (value.IsMinusInfinity()) { + sb << "-inf bps"; + } else { + if (value.bps() == 0 || value.bps() % 1000 != 0) { + sb << value.bps() << " bps"; + } else { + sb << value.kbps() << " kbps"; + } + } + return sb.str(); +} +} // namespace webrtc diff --git a/api/units/data_rate.h b/api/units/data_rate.h new file mode 100644 index 0000000..5c8a61f --- /dev/null +++ b/api/units/data_rate.h @@ -0,0 +1,155 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_UNITS_DATA_RATE_H_ +#define API_UNITS_DATA_RATE_H_ + +#ifdef UNIT_TEST +#include // no-presubmit-check TODO(webrtc:8982) +#endif // UNIT_TEST + +#include +#include +#include + +#include "api/units/data_size.h" +#include "api/units/frequency.h" +#include "api/units/time_delta.h" +#include "rtc_base/checks.h" +#include "rtc_base/units/unit_base.h" + +namespace webrtc { +// DataRate is a class that represents a given data rate. This can be used to +// represent bandwidth, encoding bitrate, etc. The internal storage is bits per +// second (bps). +class DataRate final : public rtc_units_impl::RelativeUnit { + public: + template + static constexpr DataRate BitsPerSec(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromValue(value); + } + template + static constexpr DataRate BytesPerSec(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromFraction(8, value); + } + template + static constexpr DataRate KilobitsPerSec(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromFraction(1000, value); + } + static constexpr DataRate Infinity() { return PlusInfinity(); } + + DataRate() = delete; + + template + constexpr T bps() const { + return ToValue(); + } + template + constexpr T bytes_per_sec() const { + return ToFraction<8, T>(); + } + template + constexpr T kbps() const { + return ToFraction<1000, T>(); + } + constexpr int64_t bps_or(int64_t fallback_value) const { + return ToValueOr(fallback_value); + } + constexpr int64_t kbps_or(int64_t fallback_value) const { + return ToFractionOr<1000>(fallback_value); + } + + private: + // Bits per second used internally to simplify debugging by making the value + // more recognizable. + friend class rtc_units_impl::UnitBase; + using RelativeUnit::RelativeUnit; + static constexpr bool one_sided = true; +}; + +namespace data_rate_impl { +inline constexpr int64_t Microbits(const DataSize& size) { + constexpr int64_t kMaxBeforeConversion = + std::numeric_limits::max() / 8000000; + RTC_DCHECK_LE(size.bytes(), kMaxBeforeConversion) + << "size is too large to be expressed in microbits"; + return size.bytes() * 8000000; +} + +inline constexpr int64_t MillibytePerSec(const DataRate& size) { + constexpr int64_t kMaxBeforeConversion = + std::numeric_limits::max() / (1000 / 8); + RTC_DCHECK_LE(size.bps(), kMaxBeforeConversion) + << "rate is too large to be expressed in microbytes per second"; + return size.bps() * (1000 / 8); +} +} // namespace data_rate_impl + +inline constexpr DataRate operator/(const DataSize size, + const TimeDelta duration) { + return DataRate::BitsPerSec(data_rate_impl::Microbits(size) / duration.us()); +} +inline constexpr TimeDelta operator/(const DataSize size, const DataRate rate) { + return TimeDelta::Micros(data_rate_impl::Microbits(size) / rate.bps()); +} +inline constexpr DataSize operator*(const DataRate rate, + const TimeDelta duration) { + int64_t microbits = rate.bps() * duration.us(); + return DataSize::Bytes((microbits + 4000000) / 8000000); +} +inline constexpr DataSize operator*(const TimeDelta duration, + const DataRate rate) { + return rate * duration; +} + +inline constexpr DataSize operator/(const DataRate rate, + const Frequency frequency) { + int64_t millihertz = frequency.millihertz(); + // Note that the value is truncated here reather than rounded, potentially + // introducing an error of .5 bytes if rounding were expected. + return DataSize::Bytes(data_rate_impl::MillibytePerSec(rate) / millihertz); +} +inline constexpr Frequency operator/(const DataRate rate, const DataSize size) { + return Frequency::MilliHertz(data_rate_impl::MillibytePerSec(rate) / + size.bytes()); +} +inline constexpr DataRate operator*(const DataSize size, + const Frequency frequency) { + RTC_DCHECK(frequency.IsZero() || + size.bytes() <= std::numeric_limits::max() / 8 / + frequency.millihertz()); + int64_t millibits_per_second = + size.bytes() * 8 * frequency.millihertz(); + return DataRate::BitsPerSec((millibits_per_second + 500) / 1000); +} +inline constexpr DataRate operator*(const Frequency frequency, + const DataSize size) { + return size * frequency; +} + +std::string ToString(DataRate value); +inline std::string ToLogString(DataRate value) { + return ToString(value); +} + +#ifdef UNIT_TEST +inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) + std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) + DataRate value) { + return stream << ToString(value); +} +#endif // UNIT_TEST + +} // namespace webrtc + +#endif // API_UNITS_DATA_RATE_H_ diff --git a/api/units/data_rate_unittest.cc b/api/units/data_rate_unittest.cc new file mode 100644 index 0000000..f77b370 --- /dev/null +++ b/api/units/data_rate_unittest.cc @@ -0,0 +1,197 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/units/data_rate.h" + +#include "rtc_base/logging.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { + +TEST(DataRateTest, CompilesWithChecksAndLogs) { + DataRate a = DataRate::KilobitsPerSec(300); + DataRate b = DataRate::KilobitsPerSec(210); + RTC_CHECK_GT(a, b); + RTC_LOG(LS_INFO) << a; +} + +TEST(DataRateTest, ConstExpr) { + constexpr int64_t kValue = 12345; + constexpr DataRate kDataRateZero = DataRate::Zero(); + constexpr DataRate kDataRateInf = DataRate::Infinity(); + static_assert(kDataRateZero.IsZero(), ""); + static_assert(kDataRateInf.IsInfinite(), ""); + static_assert(kDataRateInf.bps_or(-1) == -1, ""); + static_assert(kDataRateInf > kDataRateZero, ""); + + constexpr DataRate kDataRateBps = DataRate::BitsPerSec(kValue); + constexpr DataRate kDataRateKbps = DataRate::KilobitsPerSec(kValue); + static_assert(kDataRateBps.bps() == kValue, ""); + static_assert(kDataRateBps.bps_or(0) == kValue, ""); + static_assert(kDataRateKbps.kbps_or(0) == kValue, ""); +} + +TEST(DataRateTest, GetBackSameValues) { + const int64_t kValue = 123 * 8; + EXPECT_EQ(DataRate::BitsPerSec(kValue).bps(), kValue); + EXPECT_EQ(DataRate::KilobitsPerSec(kValue).kbps(), kValue); +} + +TEST(DataRateTest, GetDifferentPrefix) { + const int64_t kValue = 123 * 8000; + EXPECT_EQ(DataRate::BitsPerSec(kValue).kbps(), kValue / 1000); +} + +TEST(DataRateTest, IdentityChecks) { + const int64_t kValue = 3000; + EXPECT_TRUE(DataRate::Zero().IsZero()); + EXPECT_FALSE(DataRate::BitsPerSec(kValue).IsZero()); + + EXPECT_TRUE(DataRate::Infinity().IsInfinite()); + EXPECT_FALSE(DataRate::Zero().IsInfinite()); + EXPECT_FALSE(DataRate::BitsPerSec(kValue).IsInfinite()); + + EXPECT_FALSE(DataRate::Infinity().IsFinite()); + EXPECT_TRUE(DataRate::BitsPerSec(kValue).IsFinite()); + EXPECT_TRUE(DataRate::Zero().IsFinite()); +} + +TEST(DataRateTest, ComparisonOperators) { + const int64_t kSmall = 450; + const int64_t kLarge = 451; + const DataRate small = DataRate::BitsPerSec(kSmall); + const DataRate large = DataRate::BitsPerSec(kLarge); + + EXPECT_EQ(DataRate::Zero(), DataRate::BitsPerSec(0)); + EXPECT_EQ(DataRate::Infinity(), DataRate::Infinity()); + EXPECT_EQ(small, small); + EXPECT_LE(small, small); + EXPECT_GE(small, small); + EXPECT_NE(small, large); + EXPECT_LE(small, large); + EXPECT_LT(small, large); + EXPECT_GE(large, small); + EXPECT_GT(large, small); + EXPECT_LT(DataRate::Zero(), small); + EXPECT_GT(DataRate::Infinity(), large); +} + +TEST(DataRateTest, ConvertsToAndFromDouble) { + const int64_t kValue = 128; + const double kDoubleValue = static_cast(kValue); + const double kDoubleKbps = kValue * 1e-3; + const double kFloatKbps = static_cast(kDoubleKbps); + + EXPECT_EQ(DataRate::BitsPerSec(kValue).bps(), kDoubleValue); + EXPECT_EQ(DataRate::BitsPerSec(kValue).kbps(), kDoubleKbps); + EXPECT_EQ(DataRate::BitsPerSec(kValue).kbps(), kFloatKbps); + EXPECT_EQ(DataRate::BitsPerSec(kDoubleValue).bps(), kValue); + EXPECT_EQ(DataRate::KilobitsPerSec(kDoubleKbps).bps(), kValue); + + const double kInfinity = std::numeric_limits::infinity(); + EXPECT_EQ(DataRate::Infinity().bps(), kInfinity); + EXPECT_TRUE(DataRate::BitsPerSec(kInfinity).IsInfinite()); + EXPECT_TRUE(DataRate::KilobitsPerSec(kInfinity).IsInfinite()); +} +TEST(DataRateTest, Clamping) { + const DataRate upper = DataRate::KilobitsPerSec(800); + const DataRate lower = DataRate::KilobitsPerSec(100); + const DataRate under = DataRate::KilobitsPerSec(100); + const DataRate inside = DataRate::KilobitsPerSec(500); + const DataRate over = DataRate::KilobitsPerSec(1000); + EXPECT_EQ(under.Clamped(lower, upper), lower); + EXPECT_EQ(inside.Clamped(lower, upper), inside); + EXPECT_EQ(over.Clamped(lower, upper), upper); + + DataRate mutable_rate = lower; + mutable_rate.Clamp(lower, upper); + EXPECT_EQ(mutable_rate, lower); + mutable_rate = inside; + mutable_rate.Clamp(lower, upper); + EXPECT_EQ(mutable_rate, inside); + mutable_rate = over; + mutable_rate.Clamp(lower, upper); + EXPECT_EQ(mutable_rate, upper); +} + +TEST(DataRateTest, MathOperations) { + const int64_t kValueA = 450; + const int64_t kValueB = 267; + const DataRate rate_a = DataRate::BitsPerSec(kValueA); + const DataRate rate_b = DataRate::BitsPerSec(kValueB); + const int32_t kInt32Value = 123; + const double kFloatValue = 123.0; + + EXPECT_EQ((rate_a + rate_b).bps(), kValueA + kValueB); + EXPECT_EQ((rate_a - rate_b).bps(), kValueA - kValueB); + + EXPECT_EQ((rate_a * kValueB).bps(), kValueA * kValueB); + EXPECT_EQ((rate_a * kInt32Value).bps(), kValueA * kInt32Value); + EXPECT_EQ((rate_a * kFloatValue).bps(), kValueA * kFloatValue); + + EXPECT_EQ(rate_a / rate_b, static_cast(kValueA) / kValueB); + + EXPECT_EQ((rate_a / 10).bps(), kValueA / 10); + EXPECT_NEAR((rate_a / 0.5).bps(), kValueA * 2, 1); + + DataRate mutable_rate = DataRate::BitsPerSec(kValueA); + mutable_rate += rate_b; + EXPECT_EQ(mutable_rate.bps(), kValueA + kValueB); + mutable_rate -= rate_a; + EXPECT_EQ(mutable_rate.bps(), kValueB); +} + +TEST(UnitConversionTest, DataRateAndDataSizeAndTimeDelta) { + const int64_t kSeconds = 5; + const int64_t kBitsPerSecond = 440; + const int64_t kBytes = 44000; + const TimeDelta delta_a = TimeDelta::Seconds(kSeconds); + const DataRate rate_b = DataRate::BitsPerSec(kBitsPerSecond); + const DataSize size_c = DataSize::Bytes(kBytes); + EXPECT_EQ((delta_a * rate_b).bytes(), kSeconds * kBitsPerSecond / 8); + EXPECT_EQ((rate_b * delta_a).bytes(), kSeconds * kBitsPerSecond / 8); + EXPECT_EQ((size_c / delta_a).bps(), kBytes * 8 / kSeconds); + EXPECT_EQ((size_c / rate_b).seconds(), kBytes * 8 / kBitsPerSecond); +} + +TEST(UnitConversionTest, DataRateAndDataSizeAndFrequency) { + const int64_t kHertz = 30; + const int64_t kBitsPerSecond = 96000; + const int64_t kBytes = 1200; + const Frequency freq_a = Frequency::Hertz(kHertz); + const DataRate rate_b = DataRate::BitsPerSec(kBitsPerSecond); + const DataSize size_c = DataSize::Bytes(kBytes); + EXPECT_EQ((freq_a * size_c).bps(), kHertz * kBytes * 8); + EXPECT_EQ((size_c * freq_a).bps(), kHertz * kBytes * 8); + EXPECT_EQ((rate_b / size_c).hertz(), kBitsPerSecond / kBytes / 8); + EXPECT_EQ((rate_b / freq_a).bytes(), kBitsPerSecond / kHertz / 8); +} + +TEST(UnitConversionDeathTest, DivisionFailsOnLargeSize) { + // Note that the failure is expected since the current implementation is + // implementated in a way that does not support division of large sizes. If + // the implementation is changed, this test can safely be removed. + const int64_t kJustSmallEnoughForDivision = + std::numeric_limits::max() / 8000000; + const DataSize large_size = DataSize::Bytes(kJustSmallEnoughForDivision); + const DataRate data_rate = DataRate::KilobitsPerSec(100); + const TimeDelta time_delta = TimeDelta::Millis(100); + EXPECT_TRUE((large_size / data_rate).IsFinite()); + EXPECT_TRUE((large_size / time_delta).IsFinite()); +#if GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) && RTC_DCHECK_IS_ON + const int64_t kToolargeForDivision = kJustSmallEnoughForDivision + 1; + const DataSize too_large_size = DataSize::Bytes(kToolargeForDivision); + EXPECT_DEATH(too_large_size / data_rate, ""); + EXPECT_DEATH(too_large_size / time_delta, ""); +#endif // GTEST_HAS_DEATH_TEST && !!defined(WEBRTC_ANDROID) && RTC_DCHECK_IS_ON +} +} // namespace test +} // namespace webrtc diff --git a/api/units/data_size.cc b/api/units/data_size.cc new file mode 100644 index 0000000..45487df --- /dev/null +++ b/api/units/data_size.cc @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/units/data_size.h" + +#include "api/array_view.h" +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { + +std::string ToString(DataSize value) { + char buf[64]; + rtc::SimpleStringBuilder sb(buf); + if (value.IsPlusInfinity()) { + sb << "+inf bytes"; + } else if (value.IsMinusInfinity()) { + sb << "-inf bytes"; + } else { + sb << value.bytes() << " bytes"; + } + return sb.str(); +} +} // namespace webrtc diff --git a/api/units/data_size.h b/api/units/data_size.h new file mode 100644 index 0000000..27a2a4e --- /dev/null +++ b/api/units/data_size.h @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_UNITS_DATA_SIZE_H_ +#define API_UNITS_DATA_SIZE_H_ + +#ifdef UNIT_TEST +#include // no-presubmit-check TODO(webrtc:8982) +#endif // UNIT_TEST + +#include +#include + +#include "rtc_base/units/unit_base.h" + +namespace webrtc { +// DataSize is a class represeting a count of bytes. +class DataSize final : public rtc_units_impl::RelativeUnit { + public: + template + static constexpr DataSize Bytes(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromValue(value); + } + static constexpr DataSize Infinity() { return PlusInfinity(); } + + DataSize() = delete; + + template + constexpr T bytes() const { + return ToValue(); + } + + constexpr int64_t bytes_or(int64_t fallback_value) const { + return ToValueOr(fallback_value); + } + + private: + friend class rtc_units_impl::UnitBase; + using RelativeUnit::RelativeUnit; + static constexpr bool one_sided = true; +}; + +std::string ToString(DataSize value); +inline std::string ToLogString(DataSize value) { + return ToString(value); +} + +#ifdef UNIT_TEST +inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) + std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) + DataSize value) { + return stream << ToString(value); +} +#endif // UNIT_TEST + +} // namespace webrtc + +#endif // API_UNITS_DATA_SIZE_H_ diff --git a/api/units/data_size_unittest.cc b/api/units/data_size_unittest.cc new file mode 100644 index 0000000..eb8d98c --- /dev/null +++ b/api/units/data_size_unittest.cc @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/units/data_size.h" + +#include + +#include "test/gtest.h" + +namespace webrtc { +namespace test { + +TEST(DataSizeTest, ConstExpr) { + constexpr int64_t kValue = 12345; + constexpr DataSize kDataSizeZero = DataSize::Zero(); + constexpr DataSize kDataSizeInf = DataSize::Infinity(); + static_assert(kDataSizeZero.IsZero(), ""); + static_assert(kDataSizeInf.IsInfinite(), ""); + static_assert(kDataSizeInf.bytes_or(-1) == -1, ""); + static_assert(kDataSizeInf > kDataSizeZero, ""); + + constexpr DataSize kDataSize = DataSize::Bytes(kValue); + static_assert(kDataSize.bytes_or(-1) == kValue, ""); + + EXPECT_EQ(kDataSize.bytes(), kValue); +} + +TEST(DataSizeTest, GetBackSameValues) { + const int64_t kValue = 123 * 8; + EXPECT_EQ(DataSize::Bytes(kValue).bytes(), kValue); +} + +TEST(DataSizeTest, IdentityChecks) { + const int64_t kValue = 3000; + EXPECT_TRUE(DataSize::Zero().IsZero()); + EXPECT_FALSE(DataSize::Bytes(kValue).IsZero()); + + EXPECT_TRUE(DataSize::Infinity().IsInfinite()); + EXPECT_FALSE(DataSize::Zero().IsInfinite()); + EXPECT_FALSE(DataSize::Bytes(kValue).IsInfinite()); + + EXPECT_FALSE(DataSize::Infinity().IsFinite()); + EXPECT_TRUE(DataSize::Bytes(kValue).IsFinite()); + EXPECT_TRUE(DataSize::Zero().IsFinite()); +} + +TEST(DataSizeTest, ComparisonOperators) { + const int64_t kSmall = 450; + const int64_t kLarge = 451; + const DataSize small = DataSize::Bytes(kSmall); + const DataSize large = DataSize::Bytes(kLarge); + + EXPECT_EQ(DataSize::Zero(), DataSize::Bytes(0)); + EXPECT_EQ(DataSize::Infinity(), DataSize::Infinity()); + EXPECT_EQ(small, small); + EXPECT_LE(small, small); + EXPECT_GE(small, small); + EXPECT_NE(small, large); + EXPECT_LE(small, large); + EXPECT_LT(small, large); + EXPECT_GE(large, small); + EXPECT_GT(large, small); + EXPECT_LT(DataSize::Zero(), small); + EXPECT_GT(DataSize::Infinity(), large); +} + +TEST(DataSizeTest, ConvertsToAndFromDouble) { + const int64_t kValue = 128; + const double kDoubleValue = static_cast(kValue); + + EXPECT_EQ(DataSize::Bytes(kValue).bytes(), kDoubleValue); + EXPECT_EQ(DataSize::Bytes(kDoubleValue).bytes(), kValue); + + const double kInfinity = std::numeric_limits::infinity(); + EXPECT_EQ(DataSize::Infinity().bytes(), kInfinity); + EXPECT_TRUE(DataSize::Bytes(kInfinity).IsInfinite()); +} + +TEST(DataSizeTest, MathOperations) { + const int64_t kValueA = 450; + const int64_t kValueB = 267; + const DataSize size_a = DataSize::Bytes(kValueA); + const DataSize size_b = DataSize::Bytes(kValueB); + EXPECT_EQ((size_a + size_b).bytes(), kValueA + kValueB); + EXPECT_EQ((size_a - size_b).bytes(), kValueA - kValueB); + + const int32_t kInt32Value = 123; + const double kFloatValue = 123.0; + EXPECT_EQ((size_a * kValueB).bytes(), kValueA * kValueB); + EXPECT_EQ((size_a * kInt32Value).bytes(), kValueA * kInt32Value); + EXPECT_EQ((size_a * kFloatValue).bytes(), kValueA * kFloatValue); + + EXPECT_EQ((size_a / 10).bytes(), kValueA / 10); + EXPECT_EQ(size_a / size_b, static_cast(kValueA) / kValueB); + + DataSize mutable_size = DataSize::Bytes(kValueA); + mutable_size += size_b; + EXPECT_EQ(mutable_size.bytes(), kValueA + kValueB); + mutable_size -= size_a; + EXPECT_EQ(mutable_size.bytes(), kValueB); +} +} // namespace test +} // namespace webrtc diff --git a/api/units/frequency.cc b/api/units/frequency.cc new file mode 100644 index 0000000..2d938a2 --- /dev/null +++ b/api/units/frequency.cc @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/units/frequency.h" + +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { +std::string ToString(Frequency value) { + char buf[64]; + rtc::SimpleStringBuilder sb(buf); + if (value.IsPlusInfinity()) { + sb << "+inf Hz"; + } else if (value.IsMinusInfinity()) { + sb << "-inf Hz"; + } else if (value.millihertz() % 1000 != 0) { + sb.AppendFormat("%.3f Hz", value.hertz()); + } else { + sb << value.hertz() << " Hz"; + } + return sb.str(); +} +} // namespace webrtc diff --git a/api/units/frequency.h b/api/units/frequency.h new file mode 100644 index 0000000..88912c6 --- /dev/null +++ b/api/units/frequency.h @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef API_UNITS_FREQUENCY_H_ +#define API_UNITS_FREQUENCY_H_ + +#ifdef UNIT_TEST +#include // no-presubmit-check TODO(webrtc:8982) +#endif // UNIT_TEST + +#include +#include +#include +#include + +#include "api/units/time_delta.h" +#include "rtc_base/units/unit_base.h" + +namespace webrtc { + +class Frequency final : public rtc_units_impl::RelativeUnit { + public: + template + static constexpr Frequency MilliHertz(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromValue(value); + } + template + static constexpr Frequency Hertz(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromFraction(1'000, value); + } + template + static constexpr Frequency KiloHertz(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromFraction(1'000'000, value); + } + + Frequency() = delete; + + template + constexpr T hertz() const { + return ToFraction<1000, T>(); + } + template + constexpr T millihertz() const { + return ToValue(); + } + + private: + friend class rtc_units_impl::UnitBase; + using RelativeUnit::RelativeUnit; + static constexpr bool one_sided = true; +}; + +inline constexpr Frequency operator/(int64_t nominator, + const TimeDelta& interval) { + constexpr int64_t kKiloPerMicro = 1000 * 1000000; + RTC_DCHECK_LE(nominator, std::numeric_limits::max() / kKiloPerMicro); + RTC_CHECK(interval.IsFinite()); + RTC_CHECK(!interval.IsZero()); + return Frequency::MilliHertz(nominator * kKiloPerMicro / interval.us()); +} + +inline constexpr TimeDelta operator/(int64_t nominator, + const Frequency& frequency) { + constexpr int64_t kMegaPerMilli = 1000000 * 1000; + RTC_DCHECK_LE(nominator, std::numeric_limits::max() / kMegaPerMilli); + RTC_CHECK(frequency.IsFinite()); + RTC_CHECK(!frequency.IsZero()); + return TimeDelta::Micros(nominator * kMegaPerMilli / frequency.millihertz()); +} + +inline constexpr double operator*(Frequency frequency, TimeDelta time_delta) { + return frequency.hertz() * time_delta.seconds(); +} +inline constexpr double operator*(TimeDelta time_delta, Frequency frequency) { + return frequency * time_delta; +} + +std::string ToString(Frequency value); +inline std::string ToLogString(Frequency value) { + return ToString(value); +} + +#ifdef UNIT_TEST +inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) + std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) + Frequency value) { + return stream << ToString(value); +} +#endif // UNIT_TEST + +} // namespace webrtc +#endif // API_UNITS_FREQUENCY_H_ diff --git a/api/units/frequency_unittest.cc b/api/units/frequency_unittest.cc new file mode 100644 index 0000000..1260c21 --- /dev/null +++ b/api/units/frequency_unittest.cc @@ -0,0 +1,161 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/units/frequency.h" + +#include + +#include "test/gtest.h" + +namespace webrtc { +namespace test { +TEST(FrequencyTest, ConstExpr) { + constexpr Frequency kFrequencyZero = Frequency::Zero(); + constexpr Frequency kFrequencyPlusInf = Frequency::PlusInfinity(); + constexpr Frequency kFrequencyMinusInf = Frequency::MinusInfinity(); + static_assert(kFrequencyZero.IsZero(), ""); + static_assert(kFrequencyPlusInf.IsPlusInfinity(), ""); + static_assert(kFrequencyMinusInf.IsMinusInfinity(), ""); + + static_assert(kFrequencyPlusInf > kFrequencyZero, ""); +} + +TEST(FrequencyTest, GetBackSameValues) { + const int64_t kValue = 31; + EXPECT_EQ(Frequency::Hertz(kValue).hertz(), kValue); + EXPECT_EQ(Frequency::Zero().hertz(), 0); +} + +TEST(FrequencyTest, GetDifferentPrefix) { + const int64_t kValue = 30000; + EXPECT_EQ(Frequency::MilliHertz(kValue).hertz(), kValue / 1000); + EXPECT_EQ(Frequency::Hertz(kValue).millihertz(), kValue * 1000); + EXPECT_EQ(Frequency::KiloHertz(kValue).hertz(), kValue * 1000); +} + +TEST(FrequencyTest, IdentityChecks) { + const int64_t kValue = 31; + EXPECT_TRUE(Frequency::Zero().IsZero()); + EXPECT_FALSE(Frequency::Hertz(kValue).IsZero()); + + EXPECT_TRUE(Frequency::PlusInfinity().IsInfinite()); + EXPECT_TRUE(Frequency::MinusInfinity().IsInfinite()); + EXPECT_FALSE(Frequency::Zero().IsInfinite()); + EXPECT_FALSE(Frequency::Hertz(kValue).IsInfinite()); + + EXPECT_FALSE(Frequency::PlusInfinity().IsFinite()); + EXPECT_FALSE(Frequency::MinusInfinity().IsFinite()); + EXPECT_TRUE(Frequency::Hertz(kValue).IsFinite()); + EXPECT_TRUE(Frequency::Zero().IsFinite()); + + EXPECT_TRUE(Frequency::PlusInfinity().IsPlusInfinity()); + EXPECT_FALSE(Frequency::MinusInfinity().IsPlusInfinity()); + + EXPECT_TRUE(Frequency::MinusInfinity().IsMinusInfinity()); + EXPECT_FALSE(Frequency::PlusInfinity().IsMinusInfinity()); +} + +TEST(FrequencyTest, ComparisonOperators) { + const int64_t kSmall = 42; + const int64_t kLarge = 45; + const Frequency small = Frequency::Hertz(kSmall); + const Frequency large = Frequency::Hertz(kLarge); + + EXPECT_EQ(Frequency::Zero(), Frequency::Hertz(0)); + EXPECT_EQ(Frequency::PlusInfinity(), Frequency::PlusInfinity()); + EXPECT_EQ(small, Frequency::Hertz(kSmall)); + EXPECT_LE(small, Frequency::Hertz(kSmall)); + EXPECT_GE(small, Frequency::Hertz(kSmall)); + EXPECT_NE(small, Frequency::Hertz(kLarge)); + EXPECT_LE(small, Frequency::Hertz(kLarge)); + EXPECT_LT(small, Frequency::Hertz(kLarge)); + EXPECT_GE(large, Frequency::Hertz(kSmall)); + EXPECT_GT(large, Frequency::Hertz(kSmall)); + EXPECT_LT(Frequency::Zero(), small); + + EXPECT_GT(Frequency::PlusInfinity(), large); + EXPECT_LT(Frequency::MinusInfinity(), Frequency::Zero()); +} + +TEST(FrequencyTest, Clamping) { + const Frequency upper = Frequency::Hertz(800); + const Frequency lower = Frequency::Hertz(100); + const Frequency under = Frequency::Hertz(100); + const Frequency inside = Frequency::Hertz(500); + const Frequency over = Frequency::Hertz(1000); + EXPECT_EQ(under.Clamped(lower, upper), lower); + EXPECT_EQ(inside.Clamped(lower, upper), inside); + EXPECT_EQ(over.Clamped(lower, upper), upper); + + Frequency mutable_frequency = lower; + mutable_frequency.Clamp(lower, upper); + EXPECT_EQ(mutable_frequency, lower); + mutable_frequency = inside; + mutable_frequency.Clamp(lower, upper); + EXPECT_EQ(mutable_frequency, inside); + mutable_frequency = over; + mutable_frequency.Clamp(lower, upper); + EXPECT_EQ(mutable_frequency, upper); +} + +TEST(FrequencyTest, MathOperations) { + const int64_t kValueA = 457; + const int64_t kValueB = 260; + const Frequency frequency_a = Frequency::Hertz(kValueA); + const Frequency frequency_b = Frequency::Hertz(kValueB); + EXPECT_EQ((frequency_a + frequency_b).hertz(), kValueA + kValueB); + EXPECT_EQ((frequency_a - frequency_b).hertz(), kValueA - kValueB); + + EXPECT_EQ((Frequency::Hertz(kValueA) * kValueB).hertz(), + kValueA * kValueB); + + EXPECT_EQ((frequency_b / 10).hertz(), kValueB / 10); + EXPECT_EQ(frequency_b / frequency_a, static_cast(kValueB) / kValueA); + + Frequency mutable_frequency = Frequency::Hertz(kValueA); + mutable_frequency += Frequency::Hertz(kValueB); + EXPECT_EQ(mutable_frequency, Frequency::Hertz(kValueA + kValueB)); + mutable_frequency -= Frequency::Hertz(kValueB); + EXPECT_EQ(mutable_frequency, Frequency::Hertz(kValueA)); +} +TEST(FrequencyTest, Rounding) { + const Frequency freq_high = Frequency::Hertz(23.976); + EXPECT_EQ(freq_high.hertz(), 24); + EXPECT_EQ(freq_high.RoundDownTo(Frequency::Hertz(1)), Frequency::Hertz(23)); + EXPECT_EQ(freq_high.RoundTo(Frequency::Hertz(1)), Frequency::Hertz(24)); + EXPECT_EQ(freq_high.RoundUpTo(Frequency::Hertz(1)), Frequency::Hertz(24)); + + const Frequency freq_low = Frequency::Hertz(23.4); + EXPECT_EQ(freq_low.hertz(), 23); + EXPECT_EQ(freq_low.RoundDownTo(Frequency::Hertz(1)), Frequency::Hertz(23)); + EXPECT_EQ(freq_low.RoundTo(Frequency::Hertz(1)), Frequency::Hertz(23)); + EXPECT_EQ(freq_low.RoundUpTo(Frequency::Hertz(1)), Frequency::Hertz(24)); +} + +TEST(FrequencyTest, InfinityOperations) { + const double kValue = 267; + const Frequency finite = Frequency::Hertz(kValue); + EXPECT_TRUE((Frequency::PlusInfinity() + finite).IsPlusInfinity()); + EXPECT_TRUE((Frequency::PlusInfinity() - finite).IsPlusInfinity()); + EXPECT_TRUE((finite + Frequency::PlusInfinity()).IsPlusInfinity()); + EXPECT_TRUE((finite - Frequency::MinusInfinity()).IsPlusInfinity()); + + EXPECT_TRUE((Frequency::MinusInfinity() + finite).IsMinusInfinity()); + EXPECT_TRUE((Frequency::MinusInfinity() - finite).IsMinusInfinity()); + EXPECT_TRUE((finite + Frequency::MinusInfinity()).IsMinusInfinity()); + EXPECT_TRUE((finite - Frequency::PlusInfinity()).IsMinusInfinity()); +} + +TEST(UnitConversionTest, TimeDeltaAndFrequency) { + EXPECT_EQ(1 / Frequency::Hertz(50), TimeDelta::Millis(20)); + EXPECT_EQ(1 / TimeDelta::Millis(20), Frequency::Hertz(50)); + EXPECT_EQ(Frequency::KiloHertz(200) * TimeDelta::Millis(2), 400.0); +} +} // namespace test +} // namespace webrtc diff --git a/api/units/time_delta.cc b/api/units/time_delta.cc new file mode 100644 index 0000000..31bf3e0 --- /dev/null +++ b/api/units/time_delta.cc @@ -0,0 +1,36 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/units/time_delta.h" + +#include "api/array_view.h" +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { + +std::string ToString(TimeDelta value) { + char buf[64]; + rtc::SimpleStringBuilder sb(buf); + if (value.IsPlusInfinity()) { + sb << "+inf ms"; + } else if (value.IsMinusInfinity()) { + sb << "-inf ms"; + } else { + if (value.us() == 0 || (value.us() % 1000) != 0) + sb << value.us() << " us"; + else if (value.ms() % 1000 != 0) + sb << value.ms() << " ms"; + else + sb << value.seconds() << " s"; + } + return sb.str(); +} + +} // namespace webrtc diff --git a/api/units/time_delta.h b/api/units/time_delta.h new file mode 100644 index 0000000..173affc --- /dev/null +++ b/api/units/time_delta.h @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_UNITS_TIME_DELTA_H_ +#define API_UNITS_TIME_DELTA_H_ + +#ifdef UNIT_TEST +#include // no-presubmit-check TODO(webrtc:8982) +#endif // UNIT_TEST + +#include +#include +#include + +#include "rtc_base/units/unit_base.h" + +namespace webrtc { + +// TimeDelta represents the difference between two timestamps. Commonly this can +// be a duration. However since two Timestamps are not guaranteed to have the +// same epoch (they might come from different computers, making exact +// synchronisation infeasible), the duration covered by a TimeDelta can be +// undefined. To simplify usage, it can be constructed and converted to +// different units, specifically seconds (s), milliseconds (ms) and +// microseconds (us). +class TimeDelta final : public rtc_units_impl::RelativeUnit { + public: + template + static constexpr TimeDelta Seconds(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromFraction(1'000'000, value); + } + template + static constexpr TimeDelta Millis(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromFraction(1'000, value); + } + template + static constexpr TimeDelta Micros(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromValue(value); + } + + TimeDelta() = delete; + + template + constexpr T seconds() const { + return ToFraction<1000000, T>(); + } + template + constexpr T ms() const { + return ToFraction<1000, T>(); + } + template + constexpr T us() const { + return ToValue(); + } + template + constexpr T ns() const { + return ToMultiple<1000, T>(); + } + + constexpr int64_t seconds_or(int64_t fallback_value) const { + return ToFractionOr<1000000>(fallback_value); + } + constexpr int64_t ms_or(int64_t fallback_value) const { + return ToFractionOr<1000>(fallback_value); + } + constexpr int64_t us_or(int64_t fallback_value) const { + return ToValueOr(fallback_value); + } + + constexpr TimeDelta Abs() const { + return us() < 0 ? TimeDelta::Micros(-us()) : *this; + } + + private: + friend class rtc_units_impl::UnitBase; + using RelativeUnit::RelativeUnit; + static constexpr bool one_sided = false; +}; + +std::string ToString(TimeDelta value); +inline std::string ToLogString(TimeDelta value) { + return ToString(value); +} + +#ifdef UNIT_TEST +inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) + std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) + TimeDelta value) { + return stream << ToString(value); +} +#endif // UNIT_TEST + +} // namespace webrtc + +#endif // API_UNITS_TIME_DELTA_H_ diff --git a/api/units/time_delta_unittest.cc b/api/units/time_delta_unittest.cc new file mode 100644 index 0000000..cb43860 --- /dev/null +++ b/api/units/time_delta_unittest.cc @@ -0,0 +1,221 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/units/time_delta.h" + +#include + +#include "test/gtest.h" + +namespace webrtc { +namespace test { +TEST(TimeDeltaTest, ConstExpr) { + constexpr int64_t kValue = -12345; + constexpr TimeDelta kTimeDeltaZero = TimeDelta::Zero(); + constexpr TimeDelta kTimeDeltaPlusInf = TimeDelta::PlusInfinity(); + constexpr TimeDelta kTimeDeltaMinusInf = TimeDelta::MinusInfinity(); + static_assert(kTimeDeltaZero.IsZero(), ""); + static_assert(kTimeDeltaPlusInf.IsPlusInfinity(), ""); + static_assert(kTimeDeltaMinusInf.IsMinusInfinity(), ""); + static_assert(kTimeDeltaPlusInf.ms_or(-1) == -1, ""); + + static_assert(kTimeDeltaPlusInf > kTimeDeltaZero, ""); + + constexpr TimeDelta kTimeDeltaSeconds = TimeDelta::Seconds(kValue); + constexpr TimeDelta kTimeDeltaMs = TimeDelta::Millis(kValue); + constexpr TimeDelta kTimeDeltaUs = TimeDelta::Micros(kValue); + + static_assert(kTimeDeltaSeconds.seconds_or(0) == kValue, ""); + static_assert(kTimeDeltaMs.ms_or(0) == kValue, ""); + static_assert(kTimeDeltaUs.us_or(0) == kValue, ""); +} + +TEST(TimeDeltaTest, GetBackSameValues) { + const int64_t kValue = 499; + for (int sign = -1; sign <= 1; ++sign) { + int64_t value = kValue * sign; + EXPECT_EQ(TimeDelta::Millis(value).ms(), value); + EXPECT_EQ(TimeDelta::Micros(value).us(), value); + EXPECT_EQ(TimeDelta::Seconds(value).seconds(), value); + EXPECT_EQ(TimeDelta::Seconds(value).seconds(), value); + } + EXPECT_EQ(TimeDelta::Zero().us(), 0); +} + +TEST(TimeDeltaTest, GetDifferentPrefix) { + const int64_t kValue = 3000000; + EXPECT_EQ(TimeDelta::Micros(kValue).seconds(), kValue / 1000000); + EXPECT_EQ(TimeDelta::Millis(kValue).seconds(), kValue / 1000); + EXPECT_EQ(TimeDelta::Micros(kValue).ms(), kValue / 1000); + + EXPECT_EQ(TimeDelta::Millis(kValue).us(), kValue * 1000); + EXPECT_EQ(TimeDelta::Seconds(kValue).ms(), kValue * 1000); + EXPECT_EQ(TimeDelta::Seconds(kValue).us(), kValue * 1000000); +} + +TEST(TimeDeltaTest, IdentityChecks) { + const int64_t kValue = 3000; + EXPECT_TRUE(TimeDelta::Zero().IsZero()); + EXPECT_FALSE(TimeDelta::Millis(kValue).IsZero()); + + EXPECT_TRUE(TimeDelta::PlusInfinity().IsInfinite()); + EXPECT_TRUE(TimeDelta::MinusInfinity().IsInfinite()); + EXPECT_FALSE(TimeDelta::Zero().IsInfinite()); + EXPECT_FALSE(TimeDelta::Millis(-kValue).IsInfinite()); + EXPECT_FALSE(TimeDelta::Millis(kValue).IsInfinite()); + + EXPECT_FALSE(TimeDelta::PlusInfinity().IsFinite()); + EXPECT_FALSE(TimeDelta::MinusInfinity().IsFinite()); + EXPECT_TRUE(TimeDelta::Millis(-kValue).IsFinite()); + EXPECT_TRUE(TimeDelta::Millis(kValue).IsFinite()); + EXPECT_TRUE(TimeDelta::Zero().IsFinite()); + + EXPECT_TRUE(TimeDelta::PlusInfinity().IsPlusInfinity()); + EXPECT_FALSE(TimeDelta::MinusInfinity().IsPlusInfinity()); + + EXPECT_TRUE(TimeDelta::MinusInfinity().IsMinusInfinity()); + EXPECT_FALSE(TimeDelta::PlusInfinity().IsMinusInfinity()); +} + +TEST(TimeDeltaTest, ComparisonOperators) { + const int64_t kSmall = 450; + const int64_t kLarge = 451; + const TimeDelta small = TimeDelta::Millis(kSmall); + const TimeDelta large = TimeDelta::Millis(kLarge); + + EXPECT_EQ(TimeDelta::Zero(), TimeDelta::Millis(0)); + EXPECT_EQ(TimeDelta::PlusInfinity(), TimeDelta::PlusInfinity()); + EXPECT_EQ(small, TimeDelta::Millis(kSmall)); + EXPECT_LE(small, TimeDelta::Millis(kSmall)); + EXPECT_GE(small, TimeDelta::Millis(kSmall)); + EXPECT_NE(small, TimeDelta::Millis(kLarge)); + EXPECT_LE(small, TimeDelta::Millis(kLarge)); + EXPECT_LT(small, TimeDelta::Millis(kLarge)); + EXPECT_GE(large, TimeDelta::Millis(kSmall)); + EXPECT_GT(large, TimeDelta::Millis(kSmall)); + EXPECT_LT(TimeDelta::Zero(), small); + EXPECT_GT(TimeDelta::Zero(), TimeDelta::Millis(-kSmall)); + EXPECT_GT(TimeDelta::Zero(), TimeDelta::Millis(-kSmall)); + + EXPECT_GT(TimeDelta::PlusInfinity(), large); + EXPECT_LT(TimeDelta::MinusInfinity(), TimeDelta::Zero()); +} + +TEST(TimeDeltaTest, Clamping) { + const TimeDelta upper = TimeDelta::Millis(800); + const TimeDelta lower = TimeDelta::Millis(100); + const TimeDelta under = TimeDelta::Millis(100); + const TimeDelta inside = TimeDelta::Millis(500); + const TimeDelta over = TimeDelta::Millis(1000); + EXPECT_EQ(under.Clamped(lower, upper), lower); + EXPECT_EQ(inside.Clamped(lower, upper), inside); + EXPECT_EQ(over.Clamped(lower, upper), upper); + + TimeDelta mutable_delta = lower; + mutable_delta.Clamp(lower, upper); + EXPECT_EQ(mutable_delta, lower); + mutable_delta = inside; + mutable_delta.Clamp(lower, upper); + EXPECT_EQ(mutable_delta, inside); + mutable_delta = over; + mutable_delta.Clamp(lower, upper); + EXPECT_EQ(mutable_delta, upper); +} + +TEST(TimeDeltaTest, CanBeInititializedFromLargeInt) { + const int kMaxInt = std::numeric_limits::max(); + EXPECT_EQ(TimeDelta::Seconds(kMaxInt).us(), + static_cast(kMaxInt) * 1000000); + EXPECT_EQ(TimeDelta::Millis(kMaxInt).us(), + static_cast(kMaxInt) * 1000); +} + +TEST(TimeDeltaTest, ConvertsToAndFromDouble) { + const int64_t kMicros = 17017; + const double kNanosDouble = kMicros * 1e3; + const double kMicrosDouble = kMicros; + const double kMillisDouble = kMicros * 1e-3; + const double kSecondsDouble = kMillisDouble * 1e-3; + + EXPECT_EQ(TimeDelta::Micros(kMicros).seconds(), kSecondsDouble); + EXPECT_EQ(TimeDelta::Seconds(kSecondsDouble).us(), kMicros); + + EXPECT_EQ(TimeDelta::Micros(kMicros).ms(), kMillisDouble); + EXPECT_EQ(TimeDelta::Millis(kMillisDouble).us(), kMicros); + + EXPECT_EQ(TimeDelta::Micros(kMicros).us(), kMicrosDouble); + EXPECT_EQ(TimeDelta::Micros(kMicrosDouble).us(), kMicros); + + EXPECT_NEAR(TimeDelta::Micros(kMicros).ns(), kNanosDouble, 1); + + const double kPlusInfinity = std::numeric_limits::infinity(); + const double kMinusInfinity = -kPlusInfinity; + + EXPECT_EQ(TimeDelta::PlusInfinity().seconds(), kPlusInfinity); + EXPECT_EQ(TimeDelta::MinusInfinity().seconds(), kMinusInfinity); + EXPECT_EQ(TimeDelta::PlusInfinity().ms(), kPlusInfinity); + EXPECT_EQ(TimeDelta::MinusInfinity().ms(), kMinusInfinity); + EXPECT_EQ(TimeDelta::PlusInfinity().us(), kPlusInfinity); + EXPECT_EQ(TimeDelta::MinusInfinity().us(), kMinusInfinity); + EXPECT_EQ(TimeDelta::PlusInfinity().ns(), kPlusInfinity); + EXPECT_EQ(TimeDelta::MinusInfinity().ns(), kMinusInfinity); + + EXPECT_TRUE(TimeDelta::Seconds(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(TimeDelta::Seconds(kMinusInfinity).IsMinusInfinity()); + EXPECT_TRUE(TimeDelta::Millis(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(TimeDelta::Millis(kMinusInfinity).IsMinusInfinity()); + EXPECT_TRUE(TimeDelta::Micros(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(TimeDelta::Micros(kMinusInfinity).IsMinusInfinity()); +} + +TEST(TimeDeltaTest, MathOperations) { + const int64_t kValueA = 267; + const int64_t kValueB = 450; + const TimeDelta delta_a = TimeDelta::Millis(kValueA); + const TimeDelta delta_b = TimeDelta::Millis(kValueB); + EXPECT_EQ((delta_a + delta_b).ms(), kValueA + kValueB); + EXPECT_EQ((delta_a - delta_b).ms(), kValueA - kValueB); + + const int32_t kInt32Value = 123; + const double kFloatValue = 123.0; + EXPECT_EQ((TimeDelta::Micros(kValueA) * kValueB).us(), kValueA * kValueB); + EXPECT_EQ((TimeDelta::Micros(kValueA) * kInt32Value).us(), + kValueA * kInt32Value); + EXPECT_EQ((TimeDelta::Micros(kValueA) * kFloatValue).us(), + kValueA * kFloatValue); + + EXPECT_EQ((delta_b / 10).ms(), kValueB / 10); + EXPECT_EQ(delta_b / delta_a, static_cast(kValueB) / kValueA); + + EXPECT_EQ(TimeDelta::Micros(-kValueA).Abs().us(), kValueA); + EXPECT_EQ(TimeDelta::Micros(kValueA).Abs().us(), kValueA); + + TimeDelta mutable_delta = TimeDelta::Millis(kValueA); + mutable_delta += TimeDelta::Millis(kValueB); + EXPECT_EQ(mutable_delta, TimeDelta::Millis(kValueA + kValueB)); + mutable_delta -= TimeDelta::Millis(kValueB); + EXPECT_EQ(mutable_delta, TimeDelta::Millis(kValueA)); +} + +TEST(TimeDeltaTest, InfinityOperations) { + const int64_t kValue = 267; + const TimeDelta finite = TimeDelta::Millis(kValue); + EXPECT_TRUE((TimeDelta::PlusInfinity() + finite).IsPlusInfinity()); + EXPECT_TRUE((TimeDelta::PlusInfinity() - finite).IsPlusInfinity()); + EXPECT_TRUE((finite + TimeDelta::PlusInfinity()).IsPlusInfinity()); + EXPECT_TRUE((finite - TimeDelta::MinusInfinity()).IsPlusInfinity()); + + EXPECT_TRUE((TimeDelta::MinusInfinity() + finite).IsMinusInfinity()); + EXPECT_TRUE((TimeDelta::MinusInfinity() - finite).IsMinusInfinity()); + EXPECT_TRUE((finite + TimeDelta::MinusInfinity()).IsMinusInfinity()); + EXPECT_TRUE((finite - TimeDelta::PlusInfinity()).IsMinusInfinity()); +} +} // namespace test +} // namespace webrtc diff --git a/api/units/timestamp.cc b/api/units/timestamp.cc new file mode 100644 index 0000000..fc4f419 --- /dev/null +++ b/api/units/timestamp.cc @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/units/timestamp.h" + +#include "api/array_view.h" +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { +std::string ToString(Timestamp value) { + char buf[64]; + rtc::SimpleStringBuilder sb(buf); + if (value.IsPlusInfinity()) { + sb << "+inf ms"; + } else if (value.IsMinusInfinity()) { + sb << "-inf ms"; + } else { + if (value.us() == 0 || (value.us() % 1000) != 0) + sb << value.us() << " us"; + else if (value.ms() % 1000 != 0) + sb << value.ms() << " ms"; + else + sb << value.seconds() << " s"; + } + return sb.str(); +} +} // namespace webrtc diff --git a/api/units/timestamp.h b/api/units/timestamp.h new file mode 100644 index 0000000..f83477e --- /dev/null +++ b/api/units/timestamp.h @@ -0,0 +1,138 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_UNITS_TIMESTAMP_H_ +#define API_UNITS_TIMESTAMP_H_ + +#ifdef UNIT_TEST +#include // no-presubmit-check TODO(webrtc:8982) +#endif // UNIT_TEST + +#include +#include + +#include "api/units/time_delta.h" +#include "rtc_base/checks.h" + +namespace webrtc { +// Timestamp represents the time that has passed since some unspecified epoch. +// The epoch is assumed to be before any represented timestamps, this means that +// negative values are not valid. The most notable feature is that the +// difference of two Timestamps results in a TimeDelta. +class Timestamp final : public rtc_units_impl::UnitBase { + public: + template + static constexpr Timestamp Seconds(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromFraction(1'000'000, value); + } + template + static constexpr Timestamp Millis(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromFraction(1'000, value); + } + template + static constexpr Timestamp Micros(T value) { + static_assert(std::is_arithmetic::value, ""); + return FromValue(value); + } + + Timestamp() = delete; + + template + constexpr T seconds() const { + return ToFraction<1000000, T>(); + } + template + constexpr T ms() const { + return ToFraction<1000, T>(); + } + template + constexpr T us() const { + return ToValue(); + } + + constexpr int64_t seconds_or(int64_t fallback_value) const { + return ToFractionOr<1000000>(fallback_value); + } + constexpr int64_t ms_or(int64_t fallback_value) const { + return ToFractionOr<1000>(fallback_value); + } + constexpr int64_t us_or(int64_t fallback_value) const { + return ToValueOr(fallback_value); + } + + constexpr Timestamp operator+(const TimeDelta delta) const { + if (IsPlusInfinity() || delta.IsPlusInfinity()) { + RTC_DCHECK(!IsMinusInfinity()); + RTC_DCHECK(!delta.IsMinusInfinity()); + return PlusInfinity(); + } else if (IsMinusInfinity() || delta.IsMinusInfinity()) { + RTC_DCHECK(!IsPlusInfinity()); + RTC_DCHECK(!delta.IsPlusInfinity()); + return MinusInfinity(); + } + return Timestamp::Micros(us() + delta.us()); + } + constexpr Timestamp operator-(const TimeDelta delta) const { + if (IsPlusInfinity() || delta.IsMinusInfinity()) { + RTC_DCHECK(!IsMinusInfinity()); + RTC_DCHECK(!delta.IsPlusInfinity()); + return PlusInfinity(); + } else if (IsMinusInfinity() || delta.IsPlusInfinity()) { + RTC_DCHECK(!IsPlusInfinity()); + RTC_DCHECK(!delta.IsMinusInfinity()); + return MinusInfinity(); + } + return Timestamp::Micros(us() - delta.us()); + } + constexpr TimeDelta operator-(const Timestamp other) const { + if (IsPlusInfinity() || other.IsMinusInfinity()) { + RTC_DCHECK(!IsMinusInfinity()); + RTC_DCHECK(!other.IsPlusInfinity()); + return TimeDelta::PlusInfinity(); + } else if (IsMinusInfinity() || other.IsPlusInfinity()) { + RTC_DCHECK(!IsPlusInfinity()); + RTC_DCHECK(!other.IsMinusInfinity()); + return TimeDelta::MinusInfinity(); + } + return TimeDelta::Micros(us() - other.us()); + } + constexpr Timestamp& operator-=(const TimeDelta delta) { + *this = *this - delta; + return *this; + } + constexpr Timestamp& operator+=(const TimeDelta delta) { + *this = *this + delta; + return *this; + } + + private: + friend class rtc_units_impl::UnitBase; + using UnitBase::UnitBase; + static constexpr bool one_sided = true; +}; + +std::string ToString(Timestamp value); +inline std::string ToLogString(Timestamp value) { + return ToString(value); +} + +#ifdef UNIT_TEST +inline std::ostream& operator<<( // no-presubmit-check TODO(webrtc:8982) + std::ostream& stream, // no-presubmit-check TODO(webrtc:8982) + Timestamp value) { + return stream << ToString(value); +} +#endif // UNIT_TEST + +} // namespace webrtc + +#endif // API_UNITS_TIMESTAMP_H_ diff --git a/api/units/timestamp_unittest.cc b/api/units/timestamp_unittest.cc new file mode 100644 index 0000000..43b2985 --- /dev/null +++ b/api/units/timestamp_unittest.cc @@ -0,0 +1,162 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "api/units/timestamp.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { +TEST(TimestampTest, ConstExpr) { + constexpr int64_t kValue = 12345; + constexpr Timestamp kTimestampInf = Timestamp::PlusInfinity(); + static_assert(kTimestampInf.IsInfinite(), ""); + static_assert(kTimestampInf.ms_or(-1) == -1, ""); + + constexpr Timestamp kTimestampSeconds = Timestamp::Seconds(kValue); + constexpr Timestamp kTimestampMs = Timestamp::Millis(kValue); + constexpr Timestamp kTimestampUs = Timestamp::Micros(kValue); + + static_assert(kTimestampSeconds.seconds_or(0) == kValue, ""); + static_assert(kTimestampMs.ms_or(0) == kValue, ""); + static_assert(kTimestampUs.us_or(0) == kValue, ""); + + static_assert(kTimestampMs > kTimestampUs, ""); + + EXPECT_EQ(kTimestampSeconds.seconds(), kValue); + EXPECT_EQ(kTimestampMs.ms(), kValue); + EXPECT_EQ(kTimestampUs.us(), kValue); +} + +TEST(TimestampTest, GetBackSameValues) { + const int64_t kValue = 499; + EXPECT_EQ(Timestamp::Millis(kValue).ms(), kValue); + EXPECT_EQ(Timestamp::Micros(kValue).us(), kValue); + EXPECT_EQ(Timestamp::Seconds(kValue).seconds(), kValue); +} + +TEST(TimestampTest, GetDifferentPrefix) { + const int64_t kValue = 3000000; + EXPECT_EQ(Timestamp::Micros(kValue).seconds(), kValue / 1000000); + EXPECT_EQ(Timestamp::Millis(kValue).seconds(), kValue / 1000); + EXPECT_EQ(Timestamp::Micros(kValue).ms(), kValue / 1000); + + EXPECT_EQ(Timestamp::Millis(kValue).us(), kValue * 1000); + EXPECT_EQ(Timestamp::Seconds(kValue).ms(), kValue * 1000); + EXPECT_EQ(Timestamp::Seconds(kValue).us(), kValue * 1000000); +} + +TEST(TimestampTest, IdentityChecks) { + const int64_t kValue = 3000; + + EXPECT_TRUE(Timestamp::PlusInfinity().IsInfinite()); + EXPECT_TRUE(Timestamp::MinusInfinity().IsInfinite()); + EXPECT_FALSE(Timestamp::Millis(kValue).IsInfinite()); + + EXPECT_FALSE(Timestamp::PlusInfinity().IsFinite()); + EXPECT_FALSE(Timestamp::MinusInfinity().IsFinite()); + EXPECT_TRUE(Timestamp::Millis(kValue).IsFinite()); + + EXPECT_TRUE(Timestamp::PlusInfinity().IsPlusInfinity()); + EXPECT_FALSE(Timestamp::MinusInfinity().IsPlusInfinity()); + + EXPECT_TRUE(Timestamp::MinusInfinity().IsMinusInfinity()); + EXPECT_FALSE(Timestamp::PlusInfinity().IsMinusInfinity()); +} + +TEST(TimestampTest, ComparisonOperators) { + const int64_t kSmall = 450; + const int64_t kLarge = 451; + + EXPECT_EQ(Timestamp::PlusInfinity(), Timestamp::PlusInfinity()); + EXPECT_GE(Timestamp::PlusInfinity(), Timestamp::PlusInfinity()); + EXPECT_GT(Timestamp::PlusInfinity(), Timestamp::Millis(kLarge)); + EXPECT_EQ(Timestamp::Millis(kSmall), Timestamp::Millis(kSmall)); + EXPECT_LE(Timestamp::Millis(kSmall), Timestamp::Millis(kSmall)); + EXPECT_GE(Timestamp::Millis(kSmall), Timestamp::Millis(kSmall)); + EXPECT_NE(Timestamp::Millis(kSmall), Timestamp::Millis(kLarge)); + EXPECT_LE(Timestamp::Millis(kSmall), Timestamp::Millis(kLarge)); + EXPECT_LT(Timestamp::Millis(kSmall), Timestamp::Millis(kLarge)); + EXPECT_GE(Timestamp::Millis(kLarge), Timestamp::Millis(kSmall)); + EXPECT_GT(Timestamp::Millis(kLarge), Timestamp::Millis(kSmall)); +} + +TEST(TimestampTest, CanBeInititializedFromLargeInt) { + const int kMaxInt = std::numeric_limits::max(); + EXPECT_EQ(Timestamp::Seconds(kMaxInt).us(), + static_cast(kMaxInt) * 1000000); + EXPECT_EQ(Timestamp::Millis(kMaxInt).us(), + static_cast(kMaxInt) * 1000); +} + +TEST(TimestampTest, ConvertsToAndFromDouble) { + const int64_t kMicros = 17017; + const double kMicrosDouble = kMicros; + const double kMillisDouble = kMicros * 1e-3; + const double kSecondsDouble = kMillisDouble * 1e-3; + + EXPECT_EQ(Timestamp::Micros(kMicros).seconds(), kSecondsDouble); + EXPECT_EQ(Timestamp::Seconds(kSecondsDouble).us(), kMicros); + + EXPECT_EQ(Timestamp::Micros(kMicros).ms(), kMillisDouble); + EXPECT_EQ(Timestamp::Millis(kMillisDouble).us(), kMicros); + + EXPECT_EQ(Timestamp::Micros(kMicros).us(), kMicrosDouble); + EXPECT_EQ(Timestamp::Micros(kMicrosDouble).us(), kMicros); + + const double kPlusInfinity = std::numeric_limits::infinity(); + const double kMinusInfinity = -kPlusInfinity; + + EXPECT_EQ(Timestamp::PlusInfinity().seconds(), kPlusInfinity); + EXPECT_EQ(Timestamp::MinusInfinity().seconds(), kMinusInfinity); + EXPECT_EQ(Timestamp::PlusInfinity().ms(), kPlusInfinity); + EXPECT_EQ(Timestamp::MinusInfinity().ms(), kMinusInfinity); + EXPECT_EQ(Timestamp::PlusInfinity().us(), kPlusInfinity); + EXPECT_EQ(Timestamp::MinusInfinity().us(), kMinusInfinity); + + EXPECT_TRUE(Timestamp::Seconds(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(Timestamp::Seconds(kMinusInfinity).IsMinusInfinity()); + EXPECT_TRUE(Timestamp::Millis(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(Timestamp::Millis(kMinusInfinity).IsMinusInfinity()); + EXPECT_TRUE(Timestamp::Micros(kPlusInfinity).IsPlusInfinity()); + EXPECT_TRUE(Timestamp::Micros(kMinusInfinity).IsMinusInfinity()); +} + +TEST(UnitConversionTest, TimestampAndTimeDeltaMath) { + const int64_t kValueA = 267; + const int64_t kValueB = 450; + const Timestamp time_a = Timestamp::Millis(kValueA); + const Timestamp time_b = Timestamp::Millis(kValueB); + const TimeDelta delta_a = TimeDelta::Millis(kValueA); + const TimeDelta delta_b = TimeDelta::Millis(kValueB); + + EXPECT_EQ((time_a - time_b), TimeDelta::Millis(kValueA - kValueB)); + EXPECT_EQ((time_b - delta_a), Timestamp::Millis(kValueB - kValueA)); + EXPECT_EQ((time_b + delta_a), Timestamp::Millis(kValueB + kValueA)); + + Timestamp mutable_time = time_a; + mutable_time += delta_b; + EXPECT_EQ(mutable_time, time_a + delta_b); + mutable_time -= delta_b; + EXPECT_EQ(mutable_time, time_a); +} + +TEST(UnitConversionTest, InfinityOperations) { + const int64_t kValue = 267; + const Timestamp finite_time = Timestamp::Millis(kValue); + const TimeDelta finite_delta = TimeDelta::Millis(kValue); + EXPECT_TRUE((Timestamp::PlusInfinity() + finite_delta).IsInfinite()); + EXPECT_TRUE((Timestamp::PlusInfinity() - finite_delta).IsInfinite()); + EXPECT_TRUE((finite_time + TimeDelta::PlusInfinity()).IsInfinite()); + EXPECT_TRUE((finite_time - TimeDelta::MinusInfinity()).IsInfinite()); +} +} // namespace test +} // namespace webrtc diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn new file mode 100644 index 0000000..ee7e51d --- /dev/null +++ b/api/video/BUILD.gn @@ -0,0 +1,343 @@ +# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") +if (is_android) { + import("//build/config/android/config.gni") + import("//build/config/android/rules.gni") +} + +rtc_library("video_rtp_headers") { + visibility = [ "*" ] + sources = [ + "color_space.cc", + "color_space.h", + "hdr_metadata.cc", + "hdr_metadata.h", + "video_content_type.cc", + "video_content_type.h", + "video_rotation.h", + "video_timing.cc", + "video_timing.h", + ] + + deps = [ + "..:array_view", + "../../rtc_base:rtc_base_approved", + "../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("video_frame") { + visibility = [ "*" ] + sources = [ + "video_codec_type.h", + "video_frame.cc", + "video_frame.h", + "video_frame_buffer.cc", + "video_frame_buffer.h", + "video_sink_interface.h", + "video_source_interface.cc", + "video_source_interface.h", + ] + + deps = [ + ":video_rtp_headers", + "..:array_view", + "..:rtp_packet_info", + "..:scoped_refptr", + "../../rtc_base:checks", + "../../rtc_base:rtc_base_approved", + "../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +if (is_android) { + java_cpp_enum("video_frame_enums") { + sources = [ "video_codec_type.h" ] + } +} + +rtc_source_set("recordable_encoded_frame") { + visibility = [ "*" ] + sources = [ "recordable_encoded_frame.h" ] + + deps = [ + ":encoded_image", + ":video_frame", + ":video_rtp_headers", + "..:array_view", + "..:scoped_refptr", + "../../rtc_base:refcount", + "../units:timestamp", + ] +} + +rtc_source_set("video_frame_type") { + visibility = [ "*" ] + sources = [ "video_frame_type.h" ] +} + +rtc_library("video_frame_i420") { + visibility = [ "*" ] + sources = [ + "i420_buffer.cc", + "i420_buffer.h", + ] + deps = [ + ":video_frame", + ":video_rtp_headers", + "..:scoped_refptr", + "../../rtc_base", + "../../rtc_base:checks", + "../../rtc_base/memory:aligned_malloc", + "../../rtc_base/system:rtc_export", + "//third_party/libyuv", + ] +} + +rtc_library("video_frame_i010") { + visibility = [ "*" ] + sources = [ + "i010_buffer.cc", + "i010_buffer.h", + ] + deps = [ + ":video_frame", + ":video_frame_i420", + ":video_rtp_headers", + "..:scoped_refptr", + "../../rtc_base", + "../../rtc_base:checks", + "../../rtc_base/memory:aligned_malloc", + "//third_party/libyuv", + ] +} + +rtc_library("encoded_image") { + visibility = [ "*" ] + sources = [ + "encoded_image.cc", + "encoded_image.h", + ] + deps = [ + ":video_codec_constants", + ":video_frame", + ":video_frame_type", + ":video_rtp_headers", + "..:refcountedbase", + "..:rtp_packet_info", + "..:scoped_refptr", + "../..:webrtc_common", + "../../rtc_base:checks", + "../../rtc_base:deprecation", + "../../rtc_base:rtc_base_approved", + "../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("encoded_frame") { + visibility = [ "*" ] + sources = [ + "encoded_frame.cc", + "encoded_frame.h", + ] + + deps = [ "../../modules/video_coding:encoded_frame" ] +} + +rtc_source_set("video_codec_constants") { + visibility = [ "*" ] + sources = [ "video_codec_constants.h" ] + deps = [] +} + +rtc_library("video_bitrate_allocation") { + visibility = [ "*" ] + sources = [ + "video_bitrate_allocation.cc", + "video_bitrate_allocation.h", + ] + deps = [ + ":video_codec_constants", + "../../rtc_base:checks", + "../../rtc_base:safe_conversions", + "../../rtc_base:stringutils", + "../../rtc_base/system:rtc_export", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("video_bitrate_allocator") { + visibility = [ "*" ] + sources = [ + "video_bitrate_allocator.cc", + "video_bitrate_allocator.h", + ] + deps = [ + ":video_bitrate_allocation", + "../units:data_rate", + ] +} + +rtc_source_set("video_bitrate_allocator_factory") { + visibility = [ "*" ] + sources = [ "video_bitrate_allocator_factory.h" ] + deps = [ + ":video_bitrate_allocator", + "../../rtc_base:rtc_base_approved", + "../video_codecs:video_codecs_api", + ] +} + +rtc_source_set("video_stream_decoder") { + visibility = [ "*" ] + sources = [ "video_stream_decoder.h" ] + + deps = [ + ":encoded_frame", + ":video_frame", + "../task_queue", + "../units:time_delta", + "../video_codecs:video_codecs_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("video_stream_decoder_create") { + visibility = [ "*" ] + sources = [ + "video_stream_decoder_create.cc", + "video_stream_decoder_create.h", + ] + + deps = [ + ":video_stream_decoder", + "../../rtc_base:rtc_base_approved", + "../../video:video_stream_decoder_impl", + "../task_queue", + "../video_codecs:video_codecs_api", + ] +} + +rtc_library("video_adaptation") { + visibility = [ "*" ] + sources = [ + "video_adaptation_counters.cc", + "video_adaptation_counters.h", + "video_adaptation_reason.h", + ] + + deps = [ + "../../rtc_base:checks", + "../../rtc_base:stringutils", + ] +} + +rtc_source_set("video_stream_encoder") { + visibility = [ "*" ] + sources = [ + "video_stream_encoder_interface.h", + "video_stream_encoder_observer.h", + "video_stream_encoder_settings.h", + ] + + deps = [ + ":video_adaptation", + ":video_bitrate_allocation", + ":video_bitrate_allocator", + ":video_bitrate_allocator_factory", + ":video_codec_constants", + ":video_frame", + "..:rtp_parameters", + "..:scoped_refptr", + "../:fec_controller_api", + "../:rtp_parameters", + "../adaptation:resource_adaptation_api", + "../units:data_rate", + "../video_codecs:video_codecs_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("video_frame_metadata") { + visibility = [ "*" ] + sources = [ + "video_frame_metadata.cc", + "video_frame_metadata.h", + ] + deps = [ + "..:array_view", + "../../modules/rtp_rtcp:rtp_video_header", + "../transport/rtp:dependency_descriptor", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/container:inlined_vector", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("video_stream_encoder_create") { + visibility = [ "*" ] + sources = [ + "video_stream_encoder_create.cc", + "video_stream_encoder_create.h", + ] + + deps = [ + ":video_frame", + ":video_stream_encoder", + "../../api:scoped_refptr", + "../../video:video_stream_encoder_impl", + "../../video/adaptation:video_adaptation", + "../task_queue", + "../video_codecs:video_codecs_api", + ] +} + +rtc_library("builtin_video_bitrate_allocator_factory") { + visibility = [ "*" ] + sources = [ + "builtin_video_bitrate_allocator_factory.cc", + "builtin_video_bitrate_allocator_factory.h", + ] + + deps = [ + ":video_bitrate_allocation", + ":video_bitrate_allocator", + ":video_bitrate_allocator_factory", + "../../:webrtc_common", + "../../api:scoped_refptr", + "../../media:rtc_media_base", + "../../modules/video_coding:video_coding_utility", + "../../modules/video_coding:webrtc_vp9_helpers", + "../video_codecs:video_codecs_api", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/base:core_headers" ] +} + +if (rtc_include_tests) { + rtc_library("video_unittests") { + testonly = true + sources = [ + "video_frame_metadata_unittest.cc", + "video_stream_decoder_create_unittest.cc", + ] + deps = [ + ":video_frame_metadata", + ":video_stream_decoder_create", + "../../modules/rtp_rtcp:rtp_video_header", + "../../test:test_support", + "../task_queue:default_task_queue_factory", + "../video_codecs:builtin_video_decoder_factory", + ] + } +} diff --git a/api/video/DEPS b/api/video/DEPS new file mode 100644 index 0000000..3af594c --- /dev/null +++ b/api/video/DEPS @@ -0,0 +1,39 @@ +specific_include_rules = { + # Until the new VideoStreamDecoder is implemented the current decoding + # pipeline will be used, and therefore EncodedFrame needs to inherit + # VCMEncodedFrame. + "encoded_frame.h": [ + "+modules/video_coding/encoded_frame.h", + ], + + "encoded_image\.h" : [ + "+rtc_base/ref_count.h", + ], + + "i010_buffer\.h": [ + "+rtc_base/memory/aligned_malloc.h", + ], + + "i420_buffer\.h": [ + "+rtc_base/memory/aligned_malloc.h", + ], + + "recordable_encoded_frame\.h": [ + "+rtc_base/ref_count.h", + ], + + "video_frame\.h": [ + ], + + "video_frame_buffer\.h": [ + "+rtc_base/ref_count.h", + ], + + "video_stream_decoder_create.cc": [ + "+video/video_stream_decoder_impl.h", + ], + + "video_stream_encoder_create.cc": [ + "+video/video_stream_encoder.h", + ], +} diff --git a/api/video/OWNERS b/api/video/OWNERS new file mode 100644 index 0000000..e4a16c3 --- /dev/null +++ b/api/video/OWNERS @@ -0,0 +1,5 @@ +brandtr@webrtc.org +magjed@webrtc.org +nisse@webrtc.org + +per-file video_timing.h=ilnik@webrtc.org diff --git a/api/video/builtin_video_bitrate_allocator_factory.cc b/api/video/builtin_video_bitrate_allocator_factory.cc new file mode 100644 index 0000000..96b2545 --- /dev/null +++ b/api/video/builtin_video_bitrate_allocator_factory.cc @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/builtin_video_bitrate_allocator_factory.h" + +#include + +#include "absl/base/macros.h" +#include "api/video/video_bitrate_allocator.h" +#include "api/video_codecs/video_codec.h" +#include "modules/video_coding/codecs/vp9/svc_rate_allocator.h" +#include "modules/video_coding/utility/simulcast_rate_allocator.h" + +namespace webrtc { + +namespace { + +class BuiltinVideoBitrateAllocatorFactory + : public VideoBitrateAllocatorFactory { + public: + BuiltinVideoBitrateAllocatorFactory() = default; + ~BuiltinVideoBitrateAllocatorFactory() override = default; + + std::unique_ptr CreateVideoBitrateAllocator( + const VideoCodec& codec) override { + std::unique_ptr rate_allocator; + switch (codec.codecType) { + case kVideoCodecVP9: + rate_allocator.reset(new SvcRateAllocator(codec)); + break; + // TODO: add an allocator here for H.265 + default: + rate_allocator.reset(new SimulcastRateAllocator(codec)); + } + return rate_allocator; + } +}; + +} // namespace + +std::unique_ptr +CreateBuiltinVideoBitrateAllocatorFactory() { + return std::make_unique(); +} + +} // namespace webrtc diff --git a/api/video/builtin_video_bitrate_allocator_factory.h b/api/video/builtin_video_bitrate_allocator_factory.h new file mode 100644 index 0000000..ac880a0 --- /dev/null +++ b/api/video/builtin_video_bitrate_allocator_factory.h @@ -0,0 +1,25 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_BUILTIN_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_ +#define API_VIDEO_BUILTIN_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_ + +#include + +#include "api/video/video_bitrate_allocator_factory.h" + +namespace webrtc { + +std::unique_ptr +CreateBuiltinVideoBitrateAllocatorFactory(); + +} // namespace webrtc + +#endif // API_VIDEO_BUILTIN_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_ diff --git a/api/video/color_space.cc b/api/video/color_space.cc new file mode 100644 index 0000000..710bb43 --- /dev/null +++ b/api/video/color_space.cc @@ -0,0 +1,187 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/color_space.h" + +namespace webrtc { +namespace { +// Try to convert |enum_value| into the enum class T. |enum_bitmask| is created +// by the funciton below. Returns true if conversion was successful, false +// otherwise. +template +bool SetFromUint8(uint8_t enum_value, uint64_t enum_bitmask, T* out) { + if ((enum_value < 64) && ((enum_bitmask >> enum_value) & 1)) { + *out = static_cast(enum_value); + return true; + } + return false; +} + +// This function serves as an assert for the constexpr function below. It's on +// purpose not declared as constexpr so that it causes a build problem if enum +// values of 64 or above are used. The bitmask and the code generating it would +// have to be extended if the standard is updated to include enum values >= 64. +int EnumMustBeLessThan64() { + return -1; +} + +template +constexpr int MakeMask(const int index, const int length, T (&values)[N]) { + return length > 1 + ? (MakeMask(index, 1, values) + + MakeMask(index + 1, length - 1, values)) + : (static_cast(values[index]) < 64 + ? (uint64_t{1} << static_cast(values[index])) + : EnumMustBeLessThan64()); +} + +// Create a bitmask where each bit corresponds to one potential enum value. +// |values| should be an array listing all possible enum values. The bit is set +// to one if the corresponding enum exists. Only works for enums with values +// less than 64. +template +constexpr uint64_t CreateEnumBitmask(T (&values)[N]) { + return MakeMask(0, N, values); +} + +bool SetChromaSitingFromUint8(uint8_t enum_value, + ColorSpace::ChromaSiting* chroma_siting) { + constexpr ColorSpace::ChromaSiting kChromaSitings[] = { + ColorSpace::ChromaSiting::kUnspecified, + ColorSpace::ChromaSiting::kCollocated, ColorSpace::ChromaSiting::kHalf}; + constexpr uint64_t enum_bitmask = CreateEnumBitmask(kChromaSitings); + + return SetFromUint8(enum_value, enum_bitmask, chroma_siting); +} + +} // namespace + +ColorSpace::ColorSpace() = default; +ColorSpace::ColorSpace(const ColorSpace& other) = default; +ColorSpace::ColorSpace(ColorSpace&& other) = default; +ColorSpace& ColorSpace::operator=(const ColorSpace& other) = default; + +ColorSpace::ColorSpace(PrimaryID primaries, + TransferID transfer, + MatrixID matrix, + RangeID range) + : ColorSpace(primaries, + transfer, + matrix, + range, + ChromaSiting::kUnspecified, + ChromaSiting::kUnspecified, + nullptr) {} + +ColorSpace::ColorSpace(PrimaryID primaries, + TransferID transfer, + MatrixID matrix, + RangeID range, + ChromaSiting chroma_siting_horz, + ChromaSiting chroma_siting_vert, + const HdrMetadata* hdr_metadata) + : primaries_(primaries), + transfer_(transfer), + matrix_(matrix), + range_(range), + chroma_siting_horizontal_(chroma_siting_horz), + chroma_siting_vertical_(chroma_siting_vert), + hdr_metadata_(hdr_metadata ? absl::make_optional(*hdr_metadata) + : absl::nullopt) {} + +ColorSpace::PrimaryID ColorSpace::primaries() const { + return primaries_; +} + +ColorSpace::TransferID ColorSpace::transfer() const { + return transfer_; +} + +ColorSpace::MatrixID ColorSpace::matrix() const { + return matrix_; +} + +ColorSpace::RangeID ColorSpace::range() const { + return range_; +} + +ColorSpace::ChromaSiting ColorSpace::chroma_siting_horizontal() const { + return chroma_siting_horizontal_; +} + +ColorSpace::ChromaSiting ColorSpace::chroma_siting_vertical() const { + return chroma_siting_vertical_; +} + +const HdrMetadata* ColorSpace::hdr_metadata() const { + return hdr_metadata_ ? &*hdr_metadata_ : nullptr; +} + +bool ColorSpace::set_primaries_from_uint8(uint8_t enum_value) { + constexpr PrimaryID kPrimaryIds[] = { + PrimaryID::kBT709, PrimaryID::kUnspecified, PrimaryID::kBT470M, + PrimaryID::kBT470BG, PrimaryID::kSMPTE170M, PrimaryID::kSMPTE240M, + PrimaryID::kFILM, PrimaryID::kBT2020, PrimaryID::kSMPTEST428, + PrimaryID::kSMPTEST431, PrimaryID::kSMPTEST432, PrimaryID::kJEDECP22}; + constexpr uint64_t enum_bitmask = CreateEnumBitmask(kPrimaryIds); + + return SetFromUint8(enum_value, enum_bitmask, &primaries_); +} + +bool ColorSpace::set_transfer_from_uint8(uint8_t enum_value) { + constexpr TransferID kTransferIds[] = { + TransferID::kBT709, TransferID::kUnspecified, + TransferID::kGAMMA22, TransferID::kGAMMA28, + TransferID::kSMPTE170M, TransferID::kSMPTE240M, + TransferID::kLINEAR, TransferID::kLOG, + TransferID::kLOG_SQRT, TransferID::kIEC61966_2_4, + TransferID::kBT1361_ECG, TransferID::kIEC61966_2_1, + TransferID::kBT2020_10, TransferID::kBT2020_12, + TransferID::kSMPTEST2084, TransferID::kSMPTEST428, + TransferID::kARIB_STD_B67}; + constexpr uint64_t enum_bitmask = CreateEnumBitmask(kTransferIds); + + return SetFromUint8(enum_value, enum_bitmask, &transfer_); +} + +bool ColorSpace::set_matrix_from_uint8(uint8_t enum_value) { + constexpr MatrixID kMatrixIds[] = { + MatrixID::kRGB, MatrixID::kBT709, MatrixID::kUnspecified, + MatrixID::kFCC, MatrixID::kBT470BG, MatrixID::kSMPTE170M, + MatrixID::kSMPTE240M, MatrixID::kYCOCG, MatrixID::kBT2020_NCL, + MatrixID::kBT2020_CL, MatrixID::kSMPTE2085, MatrixID::kCDNCLS, + MatrixID::kCDCLS, MatrixID::kBT2100_ICTCP}; + constexpr uint64_t enum_bitmask = CreateEnumBitmask(kMatrixIds); + + return SetFromUint8(enum_value, enum_bitmask, &matrix_); +} + +bool ColorSpace::set_range_from_uint8(uint8_t enum_value) { + constexpr RangeID kRangeIds[] = {RangeID::kInvalid, RangeID::kLimited, + RangeID::kFull, RangeID::kDerived}; + constexpr uint64_t enum_bitmask = CreateEnumBitmask(kRangeIds); + + return SetFromUint8(enum_value, enum_bitmask, &range_); +} + +bool ColorSpace::set_chroma_siting_horizontal_from_uint8(uint8_t enum_value) { + return SetChromaSitingFromUint8(enum_value, &chroma_siting_horizontal_); +} + +bool ColorSpace::set_chroma_siting_vertical_from_uint8(uint8_t enum_value) { + return SetChromaSitingFromUint8(enum_value, &chroma_siting_vertical_); +} + +void ColorSpace::set_hdr_metadata(const HdrMetadata* hdr_metadata) { + hdr_metadata_ = + hdr_metadata ? absl::make_optional(*hdr_metadata) : absl::nullopt; +} + +} // namespace webrtc diff --git a/api/video/color_space.h b/api/video/color_space.h new file mode 100644 index 0000000..a7ad86b --- /dev/null +++ b/api/video/color_space.h @@ -0,0 +1,178 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_COLOR_SPACE_H_ +#define API_VIDEO_COLOR_SPACE_H_ + +#include + +#include "absl/types/optional.h" +#include "api/video/hdr_metadata.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// This class represents color information as specified in T-REC H.273, +// available from https://www.itu.int/rec/T-REC-H.273. +// +// WebRTC's supported codecs: +// - VP9 supports color profiles, see VP9 Bitstream & Decoding Process +// Specification Version 0.6 Section 7.2.2 "Color config semantics" available +// from https://www.webmproject.org. +// - VP8 only supports BT.601, see +// https://tools.ietf.org/html/rfc6386#section-9.2 +// - H264 uses the exact same representation as T-REC H.273. See T-REC-H.264 +// E.2.1, "VUI parameters semantics", available from +// https://www.itu.int/rec/T-REC-H.264. + +class RTC_EXPORT ColorSpace { + public: + enum class PrimaryID : uint8_t { + // The indices are equal to the values specified in T-REC H.273 Table 2. + kBT709 = 1, + kUnspecified = 2, + kBT470M = 4, + kBT470BG = 5, + kSMPTE170M = 6, // Identical to BT601 + kSMPTE240M = 7, + kFILM = 8, + kBT2020 = 9, + kSMPTEST428 = 10, + kSMPTEST431 = 11, + kSMPTEST432 = 12, + kJEDECP22 = 22, // Identical to EBU3213-E + // When adding/removing entries here, please make sure to do the + // corresponding change to kPrimaryIds. + }; + + enum class TransferID : uint8_t { + // The indices are equal to the values specified in T-REC H.273 Table 3. + kBT709 = 1, + kUnspecified = 2, + kGAMMA22 = 4, + kGAMMA28 = 5, + kSMPTE170M = 6, + kSMPTE240M = 7, + kLINEAR = 8, + kLOG = 9, + kLOG_SQRT = 10, + kIEC61966_2_4 = 11, + kBT1361_ECG = 12, + kIEC61966_2_1 = 13, + kBT2020_10 = 14, + kBT2020_12 = 15, + kSMPTEST2084 = 16, + kSMPTEST428 = 17, + kARIB_STD_B67 = 18, + // When adding/removing entries here, please make sure to do the + // corresponding change to kTransferIds. + }; + + enum class MatrixID : uint8_t { + // The indices are equal to the values specified in T-REC H.273 Table 4. + kRGB = 0, + kBT709 = 1, + kUnspecified = 2, + kFCC = 4, + kBT470BG = 5, + kSMPTE170M = 6, + kSMPTE240M = 7, + kYCOCG = 8, + kBT2020_NCL = 9, + kBT2020_CL = 10, + kSMPTE2085 = 11, + kCDNCLS = 12, + kCDCLS = 13, + kBT2100_ICTCP = 14, + // When adding/removing entries here, please make sure to do the + // corresponding change to kMatrixIds. + }; + + enum class RangeID { + // The indices are equal to the values specified at + // https://www.webmproject.org/docs/container/#colour for the element Range. + kInvalid = 0, + // Limited Rec. 709 color range with RGB values ranging from 16 to 235. + kLimited = 1, + // Full RGB color range with RGB valees from 0 to 255. + kFull = 2, + // Range is defined by MatrixCoefficients/TransferCharacteristics. + kDerived = 3, + // When adding/removing entries here, please make sure to do the + // corresponding change to kRangeIds. + }; + + enum class ChromaSiting { + // Chroma siting specifies how chroma is subsampled relative to the luma + // samples in a YUV video frame. + // The indices are equal to the values specified at + // https://www.webmproject.org/docs/container/#colour for the element + // ChromaSitingVert and ChromaSitingHorz. + kUnspecified = 0, + kCollocated = 1, + kHalf = 2, + // When adding/removing entries here, please make sure to do the + // corresponding change to kChromaSitings. + }; + + ColorSpace(); + ColorSpace(const ColorSpace& other); + ColorSpace(ColorSpace&& other); + ColorSpace& operator=(const ColorSpace& other); + ColorSpace(PrimaryID primaries, + TransferID transfer, + MatrixID matrix, + RangeID range); + ColorSpace(PrimaryID primaries, + TransferID transfer, + MatrixID matrix, + RangeID range, + ChromaSiting chroma_siting_horizontal, + ChromaSiting chroma_siting_vertical, + const HdrMetadata* hdr_metadata); + friend bool operator==(const ColorSpace& lhs, const ColorSpace& rhs) { + return lhs.primaries_ == rhs.primaries_ && lhs.transfer_ == rhs.transfer_ && + lhs.matrix_ == rhs.matrix_ && lhs.range_ == rhs.range_ && + lhs.chroma_siting_horizontal_ == rhs.chroma_siting_horizontal_ && + lhs.chroma_siting_vertical_ == rhs.chroma_siting_vertical_ && + lhs.hdr_metadata_ == rhs.hdr_metadata_; + } + friend bool operator!=(const ColorSpace& lhs, const ColorSpace& rhs) { + return !(lhs == rhs); + } + + PrimaryID primaries() const; + TransferID transfer() const; + MatrixID matrix() const; + RangeID range() const; + ChromaSiting chroma_siting_horizontal() const; + ChromaSiting chroma_siting_vertical() const; + const HdrMetadata* hdr_metadata() const; + + bool set_primaries_from_uint8(uint8_t enum_value); + bool set_transfer_from_uint8(uint8_t enum_value); + bool set_matrix_from_uint8(uint8_t enum_value); + bool set_range_from_uint8(uint8_t enum_value); + bool set_chroma_siting_horizontal_from_uint8(uint8_t enum_value); + bool set_chroma_siting_vertical_from_uint8(uint8_t enum_value); + void set_hdr_metadata(const HdrMetadata* hdr_metadata); + + private: + PrimaryID primaries_ = PrimaryID::kUnspecified; + TransferID transfer_ = TransferID::kUnspecified; + MatrixID matrix_ = MatrixID::kUnspecified; + RangeID range_ = RangeID::kInvalid; + ChromaSiting chroma_siting_horizontal_ = ChromaSiting::kUnspecified; + ChromaSiting chroma_siting_vertical_ = ChromaSiting::kUnspecified; + absl::optional hdr_metadata_; +}; + +} // namespace webrtc +#endif // API_VIDEO_COLOR_SPACE_H_ diff --git a/api/video/encoded_frame.cc b/api/video/encoded_frame.cc new file mode 100644 index 0000000..26a794e --- /dev/null +++ b/api/video/encoded_frame.cc @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/encoded_frame.h" + +namespace webrtc { +namespace video_coding { + +bool EncodedFrame::delayed_by_retransmission() const { + return 0; +} + +} // namespace video_coding +} // namespace webrtc diff --git a/api/video/encoded_frame.h b/api/video/encoded_frame.h new file mode 100644 index 0000000..f0a67a1 --- /dev/null +++ b/api/video/encoded_frame.h @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_ENCODED_FRAME_H_ +#define API_VIDEO_ENCODED_FRAME_H_ + +#include +#include + +#include "modules/video_coding/encoded_frame.h" + +namespace webrtc { +namespace video_coding { + +// NOTE: This class is still under development and may change without notice. +struct VideoLayerFrameId { + // TODO(philipel): The default ctor is currently used internaly, but have a + // look if we can remove it. + VideoLayerFrameId() : picture_id(-1), spatial_layer(0) {} + VideoLayerFrameId(int64_t picture_id, uint8_t spatial_layer) + : picture_id(picture_id), spatial_layer(spatial_layer) {} + + bool operator==(const VideoLayerFrameId& rhs) const { + return picture_id == rhs.picture_id && spatial_layer == rhs.spatial_layer; + } + + bool operator!=(const VideoLayerFrameId& rhs) const { + return !(*this == rhs); + } + + bool operator<(const VideoLayerFrameId& rhs) const { + if (picture_id == rhs.picture_id) + return spatial_layer < rhs.spatial_layer; + return picture_id < rhs.picture_id; + } + + bool operator<=(const VideoLayerFrameId& rhs) const { return !(rhs < *this); } + bool operator>(const VideoLayerFrameId& rhs) const { return rhs < *this; } + bool operator>=(const VideoLayerFrameId& rhs) const { return rhs <= *this; } + + int64_t picture_id; + uint8_t spatial_layer; +}; + +// TODO(philipel): Remove webrtc::VCMEncodedFrame inheritance. +// TODO(philipel): Move transport specific info out of EncodedFrame. +// NOTE: This class is still under development and may change without notice. +class EncodedFrame : public webrtc::VCMEncodedFrame { + public: + static const uint8_t kMaxFrameReferences = 5; + + EncodedFrame() = default; + EncodedFrame(const EncodedFrame&) = default; + virtual ~EncodedFrame() {} + + // When this frame was received. + virtual int64_t ReceivedTime() const = 0; + + // When this frame should be rendered. + virtual int64_t RenderTime() const = 0; + + // This information is currently needed by the timing calculation class. + // TODO(philipel): Remove this function when a new timing class has + // been implemented. + virtual bool delayed_by_retransmission() const; + + bool is_keyframe() const { return num_references == 0; } + + VideoLayerFrameId id; + + // TODO(philipel): Add simple modify/access functions to prevent adding too + // many |references|. + size_t num_references = 0; + int64_t references[kMaxFrameReferences]; + bool inter_layer_predicted = false; + // Is this subframe the last one in the superframe (In RTP stream that would + // mean that the last packet has a marker bit set). + bool is_last_spatial_layer = true; +}; + +} // namespace video_coding +} // namespace webrtc + +#endif // API_VIDEO_ENCODED_FRAME_H_ diff --git a/api/video/encoded_image.cc b/api/video/encoded_image.cc new file mode 100644 index 0000000..13d57ef --- /dev/null +++ b/api/video/encoded_image.cc @@ -0,0 +1,111 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/encoded_image.h" + +#include +#include + +#include "rtc_base/ref_counted_object.h" + +namespace webrtc { + +EncodedImageBuffer::EncodedImageBuffer(size_t size) : size_(size) { + buffer_ = static_cast(malloc(size)); +} + +EncodedImageBuffer::EncodedImageBuffer(const uint8_t* data, size_t size) + : EncodedImageBuffer(size) { + memcpy(buffer_, data, size); +} + +EncodedImageBuffer::~EncodedImageBuffer() { + free(buffer_); +} + +// static +rtc::scoped_refptr EncodedImageBuffer::Create(size_t size) { + return new rtc::RefCountedObject(size); +} +// static +rtc::scoped_refptr EncodedImageBuffer::Create( + const uint8_t* data, + size_t size) { + return new rtc::RefCountedObject(data, size); +} + +const uint8_t* EncodedImageBuffer::data() const { + return buffer_; +} +uint8_t* EncodedImageBuffer::data() { + return buffer_; +} +size_t EncodedImageBuffer::size() const { + return size_; +} + +void EncodedImageBuffer::Realloc(size_t size) { + // Calling realloc with size == 0 is equivalent to free, and returns nullptr. + // Which is confusing on systems where malloc(0) doesn't return a nullptr. + // More specifically, it breaks expectations of + // VCMSessionInfo::UpdateDataPointers. + RTC_DCHECK(size > 0); + buffer_ = static_cast(realloc(buffer_, size)); + size_ = size; +} + +EncodedImage::EncodedImage() : EncodedImage(nullptr, 0, 0) {} + +EncodedImage::EncodedImage(EncodedImage&&) = default; +EncodedImage::EncodedImage(const EncodedImage&) = default; + +EncodedImage::EncodedImage(uint8_t* buffer, size_t size, size_t capacity) + : size_(size), buffer_(buffer), capacity_(capacity) {} + +EncodedImage::~EncodedImage() = default; + +EncodedImage& EncodedImage::operator=(EncodedImage&&) = default; +EncodedImage& EncodedImage::operator=(const EncodedImage&) = default; + +void EncodedImage::Retain() { + if (buffer_) { + encoded_data_ = EncodedImageBuffer::Create(buffer_, size_); + buffer_ = nullptr; + } +} + +void EncodedImage::SetEncodeTime(int64_t encode_start_ms, + int64_t encode_finish_ms) { + timing_.encode_start_ms = encode_start_ms; + timing_.encode_finish_ms = encode_finish_ms; +} + +absl::optional EncodedImage::SpatialLayerFrameSize( + int spatial_index) const { + RTC_DCHECK_GE(spatial_index, 0); + RTC_DCHECK_LE(spatial_index, spatial_index_.value_or(0)); + + auto it = spatial_layer_frame_size_bytes_.find(spatial_index); + if (it == spatial_layer_frame_size_bytes_.end()) { + return absl::nullopt; + } + + return it->second; +} + +void EncodedImage::SetSpatialLayerFrameSize(int spatial_index, + size_t size_bytes) { + RTC_DCHECK_GE(spatial_index, 0); + RTC_DCHECK_LE(spatial_index, spatial_index_.value_or(0)); + RTC_DCHECK_GE(size_bytes, 0); + spatial_layer_frame_size_bytes_[spatial_index] = size_bytes; +} + +} // namespace webrtc diff --git a/api/video/encoded_image.h b/api/video/encoded_image.h new file mode 100644 index 0000000..35c2584 --- /dev/null +++ b/api/video/encoded_image.h @@ -0,0 +1,223 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_ENCODED_IMAGE_H_ +#define API_VIDEO_ENCODED_IMAGE_H_ + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/rtp_packet_infos.h" +#include "api/scoped_refptr.h" +#include "api/video/color_space.h" +#include "api/video/video_codec_constants.h" +#include "api/video/video_content_type.h" +#include "api/video/video_frame_type.h" +#include "api/video/video_rotation.h" +#include "api/video/video_timing.h" +#include "common_types.h" // NOLINT(build/include_directory) +#include "rtc_base/checks.h" +#include "rtc_base/deprecation.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Abstract interface for buffer storage. Intended to support buffers owned by +// external encoders with special release requirements, e.g, java encoders with +// releaseOutputBuffer. +class EncodedImageBufferInterface : public rtc::RefCountInterface { + public: + virtual const uint8_t* data() const = 0; + // TODO(bugs.webrtc.org/9378): Make interface essentially read-only, delete + // this non-const data method. + virtual uint8_t* data() = 0; + virtual size_t size() const = 0; +}; + +// Basic implementation of EncodedImageBufferInterface. +class RTC_EXPORT EncodedImageBuffer : public EncodedImageBufferInterface { + public: + static rtc::scoped_refptr Create() { return Create(0); } + static rtc::scoped_refptr Create(size_t size); + static rtc::scoped_refptr Create(const uint8_t* data, + size_t size); + + const uint8_t* data() const override; + uint8_t* data() override; + size_t size() const override; + void Realloc(size_t t); + + protected: + explicit EncodedImageBuffer(size_t size); + EncodedImageBuffer(const uint8_t* data, size_t size); + ~EncodedImageBuffer(); + + size_t size_; + uint8_t* buffer_; +}; + +// TODO(bug.webrtc.org/9378): This is a legacy api class, which is slowly being +// cleaned up. Direct use of its members is strongly discouraged. +class RTC_EXPORT EncodedImage { + public: + EncodedImage(); + EncodedImage(EncodedImage&&); + // Discouraged: potentially expensive. + EncodedImage(const EncodedImage&); + EncodedImage(uint8_t* buffer, size_t length, size_t capacity); + + ~EncodedImage(); + + EncodedImage& operator=(EncodedImage&&); + // Discouraged: potentially expensive. + EncodedImage& operator=(const EncodedImage&); + + // TODO(nisse): Change style to timestamp(), set_timestamp(), for consistency + // with the VideoFrame class. + // Set frame timestamp (90kHz). + void SetTimestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; } + + // Get frame timestamp (90kHz). + uint32_t Timestamp() const { return timestamp_rtp_; } + + void SetEncodeTime(int64_t encode_start_ms, int64_t encode_finish_ms); + + int64_t NtpTimeMs() const { return ntp_time_ms_; } + + absl::optional SpatialIndex() const { return spatial_index_; } + void SetSpatialIndex(absl::optional spatial_index) { + RTC_DCHECK_GE(spatial_index.value_or(0), 0); + RTC_DCHECK_LT(spatial_index.value_or(0), kMaxSpatialLayers); + spatial_index_ = spatial_index; + } + + // These methods can be used to set/get size of subframe with spatial index + // |spatial_index| on encoded frames that consist of multiple spatial layers. + absl::optional SpatialLayerFrameSize(int spatial_index) const; + void SetSpatialLayerFrameSize(int spatial_index, size_t size_bytes); + + const webrtc::ColorSpace* ColorSpace() const { + return color_space_ ? &*color_space_ : nullptr; + } + void SetColorSpace(const absl::optional& color_space) { + color_space_ = color_space; + } + + const RtpPacketInfos& PacketInfos() const { return packet_infos_; } + void SetPacketInfos(RtpPacketInfos packet_infos) { + packet_infos_ = std::move(packet_infos); + } + + bool RetransmissionAllowed() const { return retransmission_allowed_; } + void SetRetransmissionAllowed(bool retransmission_allowed) { + retransmission_allowed_ = retransmission_allowed; + } + + size_t size() const { return size_; } + void set_size(size_t new_size) { + // Allow set_size(0) even if we have no buffer. + RTC_DCHECK_LE(new_size, new_size == 0 ? 0 : capacity()); + size_ = new_size; + } + // TODO(nisse): Delete, provide only read-only access to the buffer. + size_t capacity() const { + return buffer_ ? capacity_ : (encoded_data_ ? encoded_data_->size() : 0); + } + + void SetEncodedData( + rtc::scoped_refptr encoded_data) { + encoded_data_ = encoded_data; + size_ = encoded_data->size(); + buffer_ = nullptr; + } + + void ClearEncodedData() { + encoded_data_ = nullptr; + size_ = 0; + buffer_ = nullptr; + capacity_ = 0; + } + + rtc::scoped_refptr GetEncodedData() const { + RTC_DCHECK(buffer_ == nullptr); + return encoded_data_; + } + + // TODO(nisse): Delete, provide only read-only access to the buffer. + uint8_t* data() { + return buffer_ ? buffer_ + : (encoded_data_ ? encoded_data_->data() : nullptr); + } + const uint8_t* data() const { + return buffer_ ? buffer_ + : (encoded_data_ ? encoded_data_->data() : nullptr); + } + + // Hack to workaround lack of ownership of the encoded data. If we don't + // already own the underlying data, make an owned copy. + void Retain(); + + uint32_t _encodedWidth = 0; + uint32_t _encodedHeight = 0; + // NTP time of the capture time in local timebase in milliseconds. + // TODO(minyue): make this member private. + int64_t ntp_time_ms_ = 0; + int64_t capture_time_ms_ = 0; + VideoFrameType _frameType = VideoFrameType::kVideoFrameDelta; + VideoRotation rotation_ = kVideoRotation_0; + VideoContentType content_type_ = VideoContentType::UNSPECIFIED; + bool _completeFrame = false; + int qp_ = -1; // Quantizer value. + + // When an application indicates non-zero values here, it is taken as an + // indication that all future frames will be constrained with those limits + // until the application indicates a change again. + PlayoutDelay playout_delay_ = {-1, -1}; + + struct Timing { + uint8_t flags = VideoSendTiming::kInvalid; + int64_t encode_start_ms = 0; + int64_t encode_finish_ms = 0; + int64_t packetization_finish_ms = 0; + int64_t pacer_exit_ms = 0; + int64_t network_timestamp_ms = 0; + int64_t network2_timestamp_ms = 0; + int64_t receive_start_ms = 0; + int64_t receive_finish_ms = 0; + } timing_; + + private: + // TODO(bugs.webrtc.org/9378): We're transitioning to always owning the + // encoded data. + rtc::scoped_refptr encoded_data_; + size_t size_; // Size of encoded frame data. + // Non-null when used with an un-owned buffer. + uint8_t* buffer_; + // Allocated size of _buffer; relevant only if it's non-null. + size_t capacity_; + uint32_t timestamp_rtp_ = 0; + absl::optional spatial_index_; + std::map spatial_layer_frame_size_bytes_; + absl::optional color_space_; + // Information about packets used to assemble this video frame. This is needed + // by |SourceTracker| when the frame is delivered to the RTCRtpReceiver's + // MediaStreamTrack, in order to implement getContributingSources(). See: + // https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getcontributingsources + RtpPacketInfos packet_infos_; + bool retransmission_allowed_ = true; +}; + +} // namespace webrtc + +#endif // API_VIDEO_ENCODED_IMAGE_H_ diff --git a/api/video/hdr_metadata.cc b/api/video/hdr_metadata.cc new file mode 100644 index 0000000..e2a669c --- /dev/null +++ b/api/video/hdr_metadata.cc @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/hdr_metadata.h" + +namespace webrtc { + +HdrMasteringMetadata::Chromaticity::Chromaticity() = default; + +HdrMasteringMetadata::HdrMasteringMetadata() = default; + +HdrMetadata::HdrMetadata() = default; + +} // namespace webrtc diff --git a/api/video/hdr_metadata.h b/api/video/hdr_metadata.h new file mode 100644 index 0000000..e9001a2 --- /dev/null +++ b/api/video/hdr_metadata.h @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_HDR_METADATA_H_ +#define API_VIDEO_HDR_METADATA_H_ + +namespace webrtc { + +// SMPTE ST 2086 mastering metadata, +// see https://ieeexplore.ieee.org/document/8353899. +struct HdrMasteringMetadata { + struct Chromaticity { + Chromaticity(); + + bool operator==(const Chromaticity& rhs) const { + return x == rhs.x && y == rhs.y; + } + + bool Validate() const { + return x >= 0.0 && x <= 1.0 && y >= 0.0 && y <= 1.0; + } + + // xy chromaticity coordinates must be calculated as specified in ISO + // 11664-3:2012 Section 7, and must be specified with four decimal places. + // The x coordinate should be in the range [0.0001, 0.7400] and the y + // coordinate should be in the range [0.0001, 0.8400]. Valid range [0.0000, + // 1.0000]. + float x = 0.0f; + float y = 0.0f; + }; + + HdrMasteringMetadata(); + + bool operator==(const HdrMasteringMetadata& rhs) const { + return ((primary_r == rhs.primary_r) && (primary_g == rhs.primary_g) && + (primary_b == rhs.primary_b) && (white_point == rhs.white_point) && + (luminance_max == rhs.luminance_max) && + (luminance_min == rhs.luminance_min)); + } + + bool Validate() const { + return luminance_max >= 0.0 && luminance_max <= 20000.0 && + luminance_min >= 0.0 && luminance_min <= 5.0 && + primary_r.Validate() && primary_g.Validate() && + primary_b.Validate() && white_point.Validate(); + } + + // The nominal primaries of the mastering display. + Chromaticity primary_r; + Chromaticity primary_g; + Chromaticity primary_b; + + // The nominal chromaticity of the white point of the mastering display. + Chromaticity white_point; + + // The nominal maximum display luminance of the mastering display. Specified + // in the unit candela/m2. The value should be in the range [5, 10000] with + // zero decimal places. Valid range [0, 20000]. + float luminance_max = 0.0f; + + // The nominal minimum display luminance of the mastering display. Specified + // in the unit candela/m2. The value should be in the range [0.0001, 5.0000] + // with four decimal places. Valid range [0.0000, 5.0000]. + float luminance_min = 0.0f; +}; + +// High dynamic range (HDR) metadata common for HDR10 and WebM/VP9-based HDR +// formats. This struct replicates the HDRMetadata struct defined in +// https://cs.chromium.org/chromium/src/media/base/hdr_metadata.h +struct HdrMetadata { + HdrMetadata(); + + bool operator==(const HdrMetadata& rhs) const { + return ( + (max_content_light_level == rhs.max_content_light_level) && + (max_frame_average_light_level == rhs.max_frame_average_light_level) && + (mastering_metadata == rhs.mastering_metadata)); + } + + bool Validate() const { + return max_content_light_level >= 0 && max_content_light_level <= 20000 && + max_frame_average_light_level >= 0 && + max_frame_average_light_level <= 20000 && + mastering_metadata.Validate(); + } + + HdrMasteringMetadata mastering_metadata; + // Max content light level (CLL), i.e. maximum brightness level present in the + // stream, in nits. 1 nit = 1 candela/m2. Valid range [0, 20000]. + int max_content_light_level = 0; + // Max frame-average light level (FALL), i.e. maximum average brightness of + // the brightest frame in the stream, in nits. Valid range [0, 20000]. + int max_frame_average_light_level = 0; +}; + +} // namespace webrtc + +#endif // API_VIDEO_HDR_METADATA_H_ diff --git a/api/video/i010_buffer.cc b/api/video/i010_buffer.cc new file mode 100644 index 0000000..7286676 --- /dev/null +++ b/api/video/i010_buffer.cc @@ -0,0 +1,266 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/video/i010_buffer.h" + +#include + +#include "api/video/i420_buffer.h" +#include "rtc_base/checks.h" +#include "rtc_base/ref_counted_object.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. +static const int kBufferAlignment = 64; +static const int kBytesPerPixel = 2; + +namespace webrtc { + +namespace { + +int I010DataSize(int height, int stride_y, int stride_u, int stride_v) { + return kBytesPerPixel * + (stride_y * height + (stride_u + stride_v) * ((height + 1) / 2)); +} + +} // namespace + +I010Buffer::I010Buffer(int width, + int height, + int stride_y, + int stride_u, + int stride_v) + : width_(width), + height_(height), + stride_y_(stride_y), + stride_u_(stride_u), + stride_v_(stride_v), + data_(static_cast( + AlignedMalloc(I010DataSize(height, stride_y, stride_u, stride_v), + kBufferAlignment))) { + RTC_DCHECK_GT(width, 0); + RTC_DCHECK_GT(height, 0); + RTC_DCHECK_GE(stride_y, width); + RTC_DCHECK_GE(stride_u, (width + 1) / 2); + RTC_DCHECK_GE(stride_v, (width + 1) / 2); +} + +I010Buffer::~I010Buffer() {} + +// static +rtc::scoped_refptr I010Buffer::Create(int width, int height) { + return new rtc::RefCountedObject( + width, height, width, (width + 1) / 2, (width + 1) / 2); +} + +// static +rtc::scoped_refptr I010Buffer::Copy( + const I010BufferInterface& source) { + const int width = source.width(); + const int height = source.height(); + rtc::scoped_refptr buffer = Create(width, height); + RTC_CHECK_EQ( + 0, libyuv::I010Copy( + source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), + source.DataV(), source.StrideV(), buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), width, height)); + return buffer; +} + +// static +rtc::scoped_refptr I010Buffer::Copy( + const I420BufferInterface& source) { + const int width = source.width(); + const int height = source.height(); + rtc::scoped_refptr buffer = Create(width, height); + RTC_CHECK_EQ( + 0, libyuv::I420ToI010( + source.DataY(), source.StrideY(), source.DataU(), source.StrideU(), + source.DataV(), source.StrideV(), buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), width, height)); + return buffer; +} + +// static +rtc::scoped_refptr I010Buffer::Rotate( + const I010BufferInterface& src, + VideoRotation rotation) { + if (rotation == webrtc::kVideoRotation_0) + return Copy(src); + + RTC_CHECK(src.DataY()); + RTC_CHECK(src.DataU()); + RTC_CHECK(src.DataV()); + int rotated_width = src.width(); + int rotated_height = src.height(); + if (rotation == webrtc::kVideoRotation_90 || + rotation == webrtc::kVideoRotation_270) { + std::swap(rotated_width, rotated_height); + } + + rtc::scoped_refptr buffer = + Create(rotated_width, rotated_height); + // TODO(emircan): Remove this when there is libyuv::I010Rotate(). + for (int x = 0; x < src.width(); x++) { + for (int y = 0; y < src.height(); y++) { + int dest_x = x; + int dest_y = y; + switch (rotation) { + // This case is covered by the early return. + case webrtc::kVideoRotation_0: + RTC_NOTREACHED(); + break; + case webrtc::kVideoRotation_90: + dest_x = src.height() - y - 1; + dest_y = x; + break; + case webrtc::kVideoRotation_180: + dest_x = src.width() - x - 1; + dest_y = src.height() - y - 1; + break; + case webrtc::kVideoRotation_270: + dest_x = y; + dest_y = src.width() - x - 1; + break; + } + buffer->MutableDataY()[dest_x + buffer->StrideY() * dest_y] = + src.DataY()[x + src.StrideY() * y]; + dest_x /= 2; + dest_y /= 2; + int src_x = x / 2; + int src_y = y / 2; + buffer->MutableDataU()[dest_x + buffer->StrideU() * dest_y] = + src.DataU()[src_x + src.StrideU() * src_y]; + buffer->MutableDataV()[dest_x + buffer->StrideV() * dest_y] = + src.DataV()[src_x + src.StrideV() * src_y]; + } + } + return buffer; +} + +rtc::scoped_refptr I010Buffer::ToI420() { + rtc::scoped_refptr i420_buffer = + I420Buffer::Create(width(), height()); + libyuv::I010ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), + i420_buffer->MutableDataY(), i420_buffer->StrideY(), + i420_buffer->MutableDataU(), i420_buffer->StrideU(), + i420_buffer->MutableDataV(), i420_buffer->StrideV(), + width(), height()); + return i420_buffer; +} + +int I010Buffer::width() const { + return width_; +} + +int I010Buffer::height() const { + return height_; +} + +const uint16_t* I010Buffer::DataY() const { + return data_.get(); +} +const uint16_t* I010Buffer::DataU() const { + return data_.get() + stride_y_ * height_; +} +const uint16_t* I010Buffer::DataV() const { + return data_.get() + stride_y_ * height_ + stride_u_ * ((height_ + 1) / 2); +} + +int I010Buffer::StrideY() const { + return stride_y_; +} +int I010Buffer::StrideU() const { + return stride_u_; +} +int I010Buffer::StrideV() const { + return stride_v_; +} + +uint16_t* I010Buffer::MutableDataY() { + return const_cast(DataY()); +} +uint16_t* I010Buffer::MutableDataU() { + return const_cast(DataU()); +} +uint16_t* I010Buffer::MutableDataV() { + return const_cast(DataV()); +} + +void I010Buffer::CropAndScaleFrom(const I010BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height) { + RTC_CHECK_LE(crop_width, src.width()); + RTC_CHECK_LE(crop_height, src.height()); + RTC_CHECK_LE(crop_width + offset_x, src.width()); + RTC_CHECK_LE(crop_height + offset_y, src.height()); + RTC_CHECK_GE(offset_x, 0); + RTC_CHECK_GE(offset_y, 0); + + // Make sure offset is even so that u/v plane becomes aligned. + const int uv_offset_x = offset_x / 2; + const int uv_offset_y = offset_y / 2; + offset_x = uv_offset_x * 2; + offset_y = uv_offset_y * 2; + + const uint16_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x; + const uint16_t* u_plane = + src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x; + const uint16_t* v_plane = + src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x; + int res = libyuv::I420Scale_16( + y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, src.StrideV(), + crop_width, crop_height, MutableDataY(), StrideY(), MutableDataU(), + StrideU(), MutableDataV(), StrideV(), width(), height(), + libyuv::kFilterBox); + + RTC_DCHECK_EQ(res, 0); +} + +void I010Buffer::ScaleFrom(const I010BufferInterface& src) { + CropAndScaleFrom(src, 0, 0, src.width(), src.height()); +} + +void I010Buffer::PasteFrom(const I010BufferInterface& picture, + int offset_col, + int offset_row) { + RTC_CHECK_LE(picture.width() + offset_col, width()); + RTC_CHECK_LE(picture.height() + offset_row, height()); + RTC_CHECK_GE(offset_col, 0); + RTC_CHECK_GE(offset_row, 0); + + // Pasted picture has to be aligned so subsumpled UV plane isn't corrupted. + RTC_CHECK(offset_col % 2 == 0); + RTC_CHECK(offset_row % 2 == 0); + RTC_CHECK(picture.width() % 2 == 0 || + picture.width() + offset_col == width()); + RTC_CHECK(picture.height() % 2 == 0 || + picture.height() + offset_row == height()); + + libyuv::CopyPlane_16(picture.DataY(), picture.StrideY(), + MutableDataY() + StrideY() * offset_row + offset_col, + StrideY(), picture.width(), picture.height()); + + libyuv::CopyPlane_16( + picture.DataU(), picture.StrideU(), + MutableDataU() + StrideU() * offset_row / 2 + offset_col / 2, StrideU(), + picture.width() / 2, picture.height() / 2); + + libyuv::CopyPlane_16( + picture.DataV(), picture.StrideV(), + MutableDataV() + StrideV() * offset_row / 2 + offset_col / 2, StrideV(), + picture.width() / 2, picture.height() / 2); +} + +} // namespace webrtc diff --git a/api/video/i010_buffer.h b/api/video/i010_buffer.h new file mode 100644 index 0000000..6299927 --- /dev/null +++ b/api/video/i010_buffer.h @@ -0,0 +1,90 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_I010_BUFFER_H_ +#define API_VIDEO_I010_BUFFER_H_ + +#include + +#include + +#include "api/scoped_refptr.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" +#include "rtc_base/memory/aligned_malloc.h" + +namespace webrtc { + +// Plain I010 buffer in standard memory. +class I010Buffer : public I010BufferInterface { + public: + // Create a new buffer. + static rtc::scoped_refptr Create(int width, int height); + + // Create a new buffer and copy the pixel data. + static rtc::scoped_refptr Copy(const I010BufferInterface& buffer); + + // Convert and put I420 buffer into a new buffer. + static rtc::scoped_refptr Copy(const I420BufferInterface& buffer); + + // Return a rotated copy of |src|. + static rtc::scoped_refptr Rotate(const I010BufferInterface& src, + VideoRotation rotation); + + // VideoFrameBuffer implementation. + rtc::scoped_refptr ToI420() override; + + // PlanarYuv16BBuffer implementation. + int width() const override; + int height() const override; + const uint16_t* DataY() const override; + const uint16_t* DataU() const override; + const uint16_t* DataV() const override; + int StrideY() const override; + int StrideU() const override; + int StrideV() const override; + + uint16_t* MutableDataY(); + uint16_t* MutableDataU(); + uint16_t* MutableDataV(); + + // Scale the cropped area of |src| to the size of |this| buffer, and + // write the result into |this|. + void CropAndScaleFrom(const I010BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height); + + // Scale all of |src| to the size of |this| buffer, with no cropping. + void ScaleFrom(const I010BufferInterface& src); + + // Pastes whole picture to canvas at (offset_row, offset_col). + // Offsets and picture dimensions must be even. + void PasteFrom(const I010BufferInterface& picture, + int offset_col, + int offset_row); + + protected: + I010Buffer(int width, int height, int stride_y, int stride_u, int stride_v); + ~I010Buffer() override; + + private: + const int width_; + const int height_; + const int stride_y_; + const int stride_u_; + const int stride_v_; + const std::unique_ptr data_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_I010_BUFFER_H_ diff --git a/api/video/i420_buffer.cc b/api/video/i420_buffer.cc new file mode 100644 index 0000000..2a52217 --- /dev/null +++ b/api/video/i420_buffer.cc @@ -0,0 +1,263 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "api/video/i420_buffer.h" + +#include + +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/ref_counted_object.h" +#include "third_party/libyuv/include/libyuv/convert.h" +#include "third_party/libyuv/include/libyuv/planar_functions.h" +#include "third_party/libyuv/include/libyuv/scale.h" + +// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. +static const int kBufferAlignment = 64; + +namespace webrtc { + +namespace { + +int I420DataSize(int height, int stride_y, int stride_u, int stride_v) { + return stride_y * height + (stride_u + stride_v) * ((height + 1) / 2); +} + +} // namespace + +I420Buffer::I420Buffer(int width, int height) + : I420Buffer(width, height, width, (width + 1) / 2, (width + 1) / 2) {} + +I420Buffer::I420Buffer(int width, + int height, + int stride_y, + int stride_u, + int stride_v) + : width_(width), + height_(height), + stride_y_(stride_y), + stride_u_(stride_u), + stride_v_(stride_v), + data_(static_cast( + AlignedMalloc(I420DataSize(height, stride_y, stride_u, stride_v), + kBufferAlignment))) { + RTC_DCHECK_GT(width, 0); + RTC_DCHECK_GT(height, 0); + RTC_DCHECK_GE(stride_y, width); + RTC_DCHECK_GE(stride_u, (width + 1) / 2); + RTC_DCHECK_GE(stride_v, (width + 1) / 2); +} + +I420Buffer::~I420Buffer() {} + +// static +rtc::scoped_refptr I420Buffer::Create(int width, int height) { + return new rtc::RefCountedObject(width, height); +} + +// static +rtc::scoped_refptr I420Buffer::Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v) { + return new rtc::RefCountedObject(width, height, stride_y, + stride_u, stride_v); +} + +// static +rtc::scoped_refptr I420Buffer::Copy( + const I420BufferInterface& source) { + return Copy(source.width(), source.height(), source.DataY(), source.StrideY(), + source.DataU(), source.StrideU(), source.DataV(), + source.StrideV()); +} + +// static +rtc::scoped_refptr I420Buffer::Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v) { + // Note: May use different strides than the input data. + rtc::scoped_refptr buffer = Create(width, height); + RTC_CHECK_EQ(0, libyuv::I420Copy(data_y, stride_y, data_u, stride_u, data_v, + stride_v, buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), + buffer->StrideU(), buffer->MutableDataV(), + buffer->StrideV(), width, height)); + return buffer; +} + +// static +rtc::scoped_refptr I420Buffer::Rotate( + const I420BufferInterface& src, + VideoRotation rotation) { + RTC_CHECK(src.DataY()); + RTC_CHECK(src.DataU()); + RTC_CHECK(src.DataV()); + + int rotated_width = src.width(); + int rotated_height = src.height(); + if (rotation == webrtc::kVideoRotation_90 || + rotation == webrtc::kVideoRotation_270) { + std::swap(rotated_width, rotated_height); + } + + rtc::scoped_refptr buffer = + I420Buffer::Create(rotated_width, rotated_height); + + RTC_CHECK_EQ(0, + libyuv::I420Rotate( + src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), + src.DataV(), src.StrideV(), buffer->MutableDataY(), + buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), src.width(), + src.height(), static_cast(rotation))); + + return buffer; +} + +void I420Buffer::InitializeData() { + memset(data_.get(), 0, + I420DataSize(height_, stride_y_, stride_u_, stride_v_)); +} + +int I420Buffer::width() const { + return width_; +} + +int I420Buffer::height() const { + return height_; +} + +const uint8_t* I420Buffer::DataY() const { + return data_.get(); +} +const uint8_t* I420Buffer::DataU() const { + return data_.get() + stride_y_ * height_; +} +const uint8_t* I420Buffer::DataV() const { + return data_.get() + stride_y_ * height_ + stride_u_ * ((height_ + 1) / 2); +} + +int I420Buffer::StrideY() const { + return stride_y_; +} +int I420Buffer::StrideU() const { + return stride_u_; +} +int I420Buffer::StrideV() const { + return stride_v_; +} + +uint8_t* I420Buffer::MutableDataY() { + return const_cast(DataY()); +} +uint8_t* I420Buffer::MutableDataU() { + return const_cast(DataU()); +} +uint8_t* I420Buffer::MutableDataV() { + return const_cast(DataV()); +} + +// static +void I420Buffer::SetBlack(I420Buffer* buffer) { + RTC_CHECK(libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(), + buffer->MutableDataU(), buffer->StrideU(), + buffer->MutableDataV(), buffer->StrideV(), 0, 0, + buffer->width(), buffer->height(), 0, 128, + 128) == 0); +} + +void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height) { + RTC_CHECK_LE(crop_width, src.width()); + RTC_CHECK_LE(crop_height, src.height()); + RTC_CHECK_LE(crop_width + offset_x, src.width()); + RTC_CHECK_LE(crop_height + offset_y, src.height()); + RTC_CHECK_GE(offset_x, 0); + RTC_CHECK_GE(offset_y, 0); + + // Make sure offset is even so that u/v plane becomes aligned. + const int uv_offset_x = offset_x / 2; + const int uv_offset_y = offset_y / 2; + offset_x = uv_offset_x * 2; + offset_y = uv_offset_y * 2; + + const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x; + const uint8_t* u_plane = + src.DataU() + src.StrideU() * uv_offset_y + uv_offset_x; + const uint8_t* v_plane = + src.DataV() + src.StrideV() * uv_offset_y + uv_offset_x; + int res = + libyuv::I420Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, + src.StrideV(), crop_width, crop_height, MutableDataY(), + StrideY(), MutableDataU(), StrideU(), MutableDataV(), + StrideV(), width(), height(), libyuv::kFilterBox); + + RTC_DCHECK_EQ(res, 0); +} + +void I420Buffer::CropAndScaleFrom(const I420BufferInterface& src) { + const int crop_width = + height() > 0 ? std::min(src.width(), width() * src.height() / height()) + : src.width(); + const int crop_height = + width() > 0 ? std::min(src.height(), height() * src.width() / width()) + : src.height(); + + CropAndScaleFrom(src, (src.width() - crop_width) / 2, + (src.height() - crop_height) / 2, crop_width, crop_height); +} + +void I420Buffer::ScaleFrom(const I420BufferInterface& src) { + CropAndScaleFrom(src, 0, 0, src.width(), src.height()); +} + +void I420Buffer::PasteFrom(const I420BufferInterface& picture, + int offset_col, + int offset_row) { + RTC_CHECK_LE(picture.width() + offset_col, width()); + RTC_CHECK_LE(picture.height() + offset_row, height()); + RTC_CHECK_GE(offset_col, 0); + RTC_CHECK_GE(offset_row, 0); + + // Pasted picture has to be aligned so subsumpled UV plane isn't corrupted. + RTC_CHECK(offset_col % 2 == 0); + RTC_CHECK(offset_row % 2 == 0); + RTC_CHECK(picture.width() % 2 == 0 || + picture.width() + offset_col == width()); + RTC_CHECK(picture.height() % 2 == 0 || + picture.height() + offset_row == height()); + + libyuv::CopyPlane(picture.DataY(), picture.StrideY(), + MutableDataY() + StrideY() * offset_row + offset_col, + StrideY(), picture.width(), picture.height()); + + libyuv::CopyPlane( + picture.DataU(), picture.StrideU(), + MutableDataU() + StrideU() * offset_row / 2 + offset_col / 2, StrideU(), + picture.width() / 2, picture.height() / 2); + + libyuv::CopyPlane( + picture.DataV(), picture.StrideV(), + MutableDataV() + StrideV() * offset_row / 2 + offset_col / 2, StrideV(), + picture.width() / 2, picture.height() / 2); +} + +} // namespace webrtc diff --git a/api/video/i420_buffer.h b/api/video/i420_buffer.h new file mode 100644 index 0000000..251eb93 --- /dev/null +++ b/api/video/i420_buffer.h @@ -0,0 +1,124 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_I420_BUFFER_H_ +#define API_VIDEO_I420_BUFFER_H_ + +#include + +#include + +#include "api/scoped_refptr.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" +#include "rtc_base/memory/aligned_malloc.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Plain I420 buffer in standard memory. +class RTC_EXPORT I420Buffer : public I420BufferInterface { + public: + static rtc::scoped_refptr Create(int width, int height); + static rtc::scoped_refptr Create(int width, + int height, + int stride_y, + int stride_u, + int stride_v); + + // Create a new buffer and copy the pixel data. + static rtc::scoped_refptr Copy(const I420BufferInterface& buffer); + // Deprecated. + static rtc::scoped_refptr Copy(const VideoFrameBuffer& buffer) { + return Copy(*buffer.GetI420()); + } + + static rtc::scoped_refptr Copy(int width, + int height, + const uint8_t* data_y, + int stride_y, + const uint8_t* data_u, + int stride_u, + const uint8_t* data_v, + int stride_v); + + // Returns a rotated copy of |src|. + static rtc::scoped_refptr Rotate(const I420BufferInterface& src, + VideoRotation rotation); + // Deprecated. + static rtc::scoped_refptr Rotate(const VideoFrameBuffer& src, + VideoRotation rotation) { + return Rotate(*src.GetI420(), rotation); + } + + // Sets the buffer to all black. + static void SetBlack(I420Buffer* buffer); + + // Sets all three planes to all zeros. Used to work around for + // quirks in memory checkers + // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and + // ffmpeg (http://crbug.com/390941). + // TODO(nisse): Deprecated. Should be deleted if/when those issues + // are resolved in a better way. Or in the mean time, use SetBlack. + void InitializeData(); + + int width() const override; + int height() const override; + const uint8_t* DataY() const override; + const uint8_t* DataU() const override; + const uint8_t* DataV() const override; + + int StrideY() const override; + int StrideU() const override; + int StrideV() const override; + + uint8_t* MutableDataY(); + uint8_t* MutableDataU(); + uint8_t* MutableDataV(); + + // Scale the cropped area of |src| to the size of |this| buffer, and + // write the result into |this|. + void CropAndScaleFrom(const I420BufferInterface& src, + int offset_x, + int offset_y, + int crop_width, + int crop_height); + + // The common case of a center crop, when needed to adjust the + // aspect ratio without distorting the image. + void CropAndScaleFrom(const I420BufferInterface& src); + + // Scale all of |src| to the size of |this| buffer, with no cropping. + void ScaleFrom(const I420BufferInterface& src); + + // Pastes whole picture to canvas at (offset_row, offset_col). + // Offsets and picture dimensions must be even. + void PasteFrom(const I420BufferInterface& picture, + int offset_col, + int offset_row); + + protected: + I420Buffer(int width, int height); + I420Buffer(int width, int height, int stride_y, int stride_u, int stride_v); + + ~I420Buffer() override; + + private: + const int width_; + const int height_; + const int stride_y_; + const int stride_u_; + const int stride_v_; + const std::unique_ptr data_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_I420_BUFFER_H_ diff --git a/api/video/recordable_encoded_frame.h b/api/video/recordable_encoded_frame.h new file mode 100644 index 0000000..db59964 --- /dev/null +++ b/api/video/recordable_encoded_frame.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_RECORDABLE_ENCODED_FRAME_H_ +#define API_VIDEO_RECORDABLE_ENCODED_FRAME_H_ + +#include "api/array_view.h" +#include "api/scoped_refptr.h" +#include "api/units/timestamp.h" +#include "api/video/color_space.h" +#include "api/video/encoded_image.h" +#include "api/video/video_codec_type.h" +#include "rtc_base/ref_count.h" + +namespace webrtc { + +// Interface for accessing recordable elements of an encoded frame. +class RecordableEncodedFrame { + public: + // Encoded resolution in pixels + struct EncodedResolution { + unsigned width; + unsigned height; + }; + + virtual ~RecordableEncodedFrame() = default; + + // Provides access to encoded data + virtual rtc::scoped_refptr encoded_buffer() + const = 0; + + // Optionally returns the colorspace of the encoded frame. This can differ + // from the eventually decoded frame's colorspace. + virtual absl::optional color_space() const = 0; + + // Returns the codec of the encoded frame + virtual VideoCodecType codec() const = 0; + + // Returns whether the encoded frame is a key frame + virtual bool is_key_frame() const = 0; + + // Returns the frame's encoded resolution. May be 0x0 if the frame + // doesn't contain resolution information + virtual EncodedResolution resolution() const = 0; + + // Returns the computed render time + virtual Timestamp render_time() const = 0; +}; + +} // namespace webrtc + +#endif // API_VIDEO_RECORDABLE_ENCODED_FRAME_H_ diff --git a/api/video/test/BUILD.gn b/api/video/test/BUILD.gn new file mode 100644 index 0000000..5633371 --- /dev/null +++ b/api/video/test/BUILD.gn @@ -0,0 +1,37 @@ +# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_library("rtc_api_video_unittests") { + testonly = true + sources = [ + "color_space_unittest.cc", + "video_adaptation_counters_unittest.cc", + "video_bitrate_allocation_unittest.cc", + ] + deps = [ + "..:video_adaptation", + "..:video_bitrate_allocation", + "..:video_frame", + "..:video_rtp_headers", + "../../../test:test_support", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_source_set("mock_recordable_encoded_frame") { + testonly = true + visibility = [ "*" ] + sources = [ "mock_recordable_encoded_frame.h" ] + + deps = [ + "..:recordable_encoded_frame", + "../../../test:test_support", + ] +} diff --git a/api/video/test/color_space_unittest.cc b/api/video/test/color_space_unittest.cc new file mode 100644 index 0000000..1d8b3a8 --- /dev/null +++ b/api/video/test/color_space_unittest.cc @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/color_space.h" + +#include + +#include "test/gtest.h" + +namespace webrtc { +TEST(ColorSpace, TestSettingPrimariesFromUint8) { + ColorSpace color_space; + EXPECT_TRUE(color_space.set_primaries_from_uint8( + static_cast(ColorSpace::PrimaryID::kBT470BG))); + EXPECT_EQ(ColorSpace::PrimaryID::kBT470BG, color_space.primaries()); + EXPECT_FALSE(color_space.set_primaries_from_uint8(3)); + EXPECT_FALSE(color_space.set_primaries_from_uint8(23)); + EXPECT_FALSE(color_space.set_primaries_from_uint8(64)); +} + +TEST(ColorSpace, TestSettingTransferFromUint8) { + ColorSpace color_space; + EXPECT_TRUE(color_space.set_transfer_from_uint8( + static_cast(ColorSpace::TransferID::kBT2020_10))); + EXPECT_EQ(ColorSpace::TransferID::kBT2020_10, color_space.transfer()); + EXPECT_FALSE(color_space.set_transfer_from_uint8(3)); + EXPECT_FALSE(color_space.set_transfer_from_uint8(19)); + EXPECT_FALSE(color_space.set_transfer_from_uint8(128)); +} + +TEST(ColorSpace, TestSettingMatrixFromUint8) { + ColorSpace color_space; + EXPECT_TRUE(color_space.set_matrix_from_uint8( + static_cast(ColorSpace::MatrixID::kCDNCLS))); + EXPECT_EQ(ColorSpace::MatrixID::kCDNCLS, color_space.matrix()); + EXPECT_FALSE(color_space.set_matrix_from_uint8(3)); + EXPECT_FALSE(color_space.set_matrix_from_uint8(15)); + EXPECT_FALSE(color_space.set_matrix_from_uint8(255)); +} + +TEST(ColorSpace, TestSettingRangeFromUint8) { + ColorSpace color_space; + EXPECT_TRUE(color_space.set_range_from_uint8( + static_cast(ColorSpace::RangeID::kFull))); + EXPECT_EQ(ColorSpace::RangeID::kFull, color_space.range()); + EXPECT_FALSE(color_space.set_range_from_uint8(4)); +} + +TEST(ColorSpace, TestSettingChromaSitingHorizontalFromUint8) { + ColorSpace color_space; + EXPECT_TRUE(color_space.set_chroma_siting_horizontal_from_uint8( + static_cast(ColorSpace::ChromaSiting::kCollocated))); + EXPECT_EQ(ColorSpace::ChromaSiting::kCollocated, + color_space.chroma_siting_horizontal()); + EXPECT_FALSE(color_space.set_chroma_siting_horizontal_from_uint8(3)); +} + +TEST(ColorSpace, TestSettingChromaSitingVerticalFromUint8) { + ColorSpace color_space; + EXPECT_TRUE(color_space.set_chroma_siting_vertical_from_uint8( + static_cast(ColorSpace::ChromaSiting::kHalf))); + EXPECT_EQ(ColorSpace::ChromaSiting::kHalf, + color_space.chroma_siting_vertical()); + EXPECT_FALSE(color_space.set_chroma_siting_vertical_from_uint8(3)); +} + +} // namespace webrtc diff --git a/api/video/test/mock_recordable_encoded_frame.h b/api/video/test/mock_recordable_encoded_frame.h new file mode 100644 index 0000000..2178932 --- /dev/null +++ b/api/video/test/mock_recordable_encoded_frame.h @@ -0,0 +1,34 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ +#define API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ + +#include "api/video/recordable_encoded_frame.h" +#include "test/gmock.h" + +namespace webrtc { +class MockRecordableEncodedFrame : public RecordableEncodedFrame { + public: + MOCK_METHOD(rtc::scoped_refptr, + encoded_buffer, + (), + (const, override)); + MOCK_METHOD(absl::optional, + color_space, + (), + (const, override)); + MOCK_METHOD(VideoCodecType, codec, (), (const, override)); + MOCK_METHOD(bool, is_key_frame, (), (const, override)); + MOCK_METHOD(EncodedResolution, resolution, (), (const, override)); + MOCK_METHOD(Timestamp, render_time, (), (const, override)); +}; +} // namespace webrtc +#endif // API_VIDEO_TEST_MOCK_RECORDABLE_ENCODED_FRAME_H_ diff --git a/api/video/test/video_adaptation_counters_unittest.cc b/api/video/test/video_adaptation_counters_unittest.cc new file mode 100644 index 0000000..a7d0bda --- /dev/null +++ b/api/video/test/video_adaptation_counters_unittest.cc @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_adaptation_counters.h" + +#include "test/gtest.h" + +namespace webrtc { + +TEST(AdaptationCountersTest, Addition) { + VideoAdaptationCounters a{0, 0}; + VideoAdaptationCounters b{1, 2}; + VideoAdaptationCounters total = a + b; + EXPECT_EQ(1, total.resolution_adaptations); + EXPECT_EQ(2, total.fps_adaptations); +} + +TEST(AdaptationCountersTest, Equality) { + VideoAdaptationCounters a{1, 2}; + VideoAdaptationCounters b{2, 1}; + EXPECT_EQ(a, a); + EXPECT_NE(a, b); +} + +} // namespace webrtc diff --git a/api/video/test/video_bitrate_allocation_unittest.cc b/api/video/test/video_bitrate_allocation_unittest.cc new file mode 100644 index 0000000..8e66d4b --- /dev/null +++ b/api/video/test/video_bitrate_allocation_unittest.cc @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_bitrate_allocation.h" + +#include + +#include "absl/types/optional.h" +#include "test/gtest.h" + +namespace webrtc { +TEST(VideoBitrateAllocation, SimulcastTargetBitrate) { + VideoBitrateAllocation bitrate; + bitrate.SetBitrate(0, 0, 10000); + bitrate.SetBitrate(0, 1, 20000); + bitrate.SetBitrate(1, 0, 40000); + bitrate.SetBitrate(1, 1, 80000); + + VideoBitrateAllocation layer0_bitrate; + layer0_bitrate.SetBitrate(0, 0, 10000); + layer0_bitrate.SetBitrate(0, 1, 20000); + + VideoBitrateAllocation layer1_bitrate; + layer1_bitrate.SetBitrate(0, 0, 40000); + layer1_bitrate.SetBitrate(0, 1, 80000); + + std::vector> layer_allocations = + bitrate.GetSimulcastAllocations(); + + EXPECT_EQ(layer0_bitrate, layer_allocations[0]); + EXPECT_EQ(layer1_bitrate, layer_allocations[1]); +} + +TEST(VideoBitrateAllocation, SimulcastTargetBitrateWithInactiveStream) { + // Create bitrate allocation with bitrate only for the first and third stream. + VideoBitrateAllocation bitrate; + bitrate.SetBitrate(0, 0, 10000); + bitrate.SetBitrate(0, 1, 20000); + bitrate.SetBitrate(2, 0, 40000); + bitrate.SetBitrate(2, 1, 80000); + + VideoBitrateAllocation layer0_bitrate; + layer0_bitrate.SetBitrate(0, 0, 10000); + layer0_bitrate.SetBitrate(0, 1, 20000); + + VideoBitrateAllocation layer2_bitrate; + layer2_bitrate.SetBitrate(0, 0, 40000); + layer2_bitrate.SetBitrate(0, 1, 80000); + + std::vector> layer_allocations = + bitrate.GetSimulcastAllocations(); + + EXPECT_EQ(layer0_bitrate, layer_allocations[0]); + EXPECT_FALSE(layer_allocations[1]); + EXPECT_EQ(layer2_bitrate, layer_allocations[2]); +} +} // namespace webrtc diff --git a/api/video/video_adaptation_counters.cc b/api/video/video_adaptation_counters.cc new file mode 100644 index 0000000..df1769d --- /dev/null +++ b/api/video/video_adaptation_counters.cc @@ -0,0 +1,42 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_adaptation_counters.h" + +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { + +bool VideoAdaptationCounters::operator==( + const VideoAdaptationCounters& rhs) const { + return fps_adaptations == rhs.fps_adaptations && + resolution_adaptations == rhs.resolution_adaptations; +} + +bool VideoAdaptationCounters::operator!=( + const VideoAdaptationCounters& rhs) const { + return !(rhs == *this); +} + +VideoAdaptationCounters VideoAdaptationCounters::operator+( + const VideoAdaptationCounters& other) const { + return VideoAdaptationCounters( + resolution_adaptations + other.resolution_adaptations, + fps_adaptations + other.fps_adaptations); +} + +std::string VideoAdaptationCounters::ToString() const { + rtc::StringBuilder ss; + ss << "{ res=" << resolution_adaptations << " fps=" << fps_adaptations + << " }"; + return ss.Release(); +} + +} // namespace webrtc diff --git a/api/video/video_adaptation_counters.h b/api/video/video_adaptation_counters.h new file mode 100644 index 0000000..2dea902 --- /dev/null +++ b/api/video/video_adaptation_counters.h @@ -0,0 +1,46 @@ +/* + * Copyright 2020 The WebRTC Project Authors. All rights reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_ +#define API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_ + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +// Counts the number of adaptations have resulted due to resource overuse. +// Today we can adapt resolution and fps. +struct VideoAdaptationCounters { + VideoAdaptationCounters() : resolution_adaptations(0), fps_adaptations(0) {} + VideoAdaptationCounters(int resolution_adaptations, int fps_adaptations) + : resolution_adaptations(resolution_adaptations), + fps_adaptations(fps_adaptations) { + RTC_DCHECK_GE(resolution_adaptations, 0); + RTC_DCHECK_GE(fps_adaptations, 0); + } + + int Total() const { return fps_adaptations + resolution_adaptations; } + + bool operator==(const VideoAdaptationCounters& rhs) const; + bool operator!=(const VideoAdaptationCounters& rhs) const; + + VideoAdaptationCounters operator+(const VideoAdaptationCounters& other) const; + + std::string ToString() const; + + int resolution_adaptations; + int fps_adaptations; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_ADAPTATION_COUNTERS_H_ diff --git a/api/video/video_adaptation_reason.h b/api/video/video_adaptation_reason.h new file mode 100644 index 0000000..3b7fc36 --- /dev/null +++ b/api/video/video_adaptation_reason.h @@ -0,0 +1,20 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_ADAPTATION_REASON_H_ +#define API_VIDEO_VIDEO_ADAPTATION_REASON_H_ + +namespace webrtc { + +enum class VideoAdaptationReason { kQuality, kCpu }; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_ADAPTATION_REASON_H_ diff --git a/api/video/video_bitrate_allocation.cc b/api/video/video_bitrate_allocation.cc new file mode 100644 index 0000000..e189db1 --- /dev/null +++ b/api/video/video_bitrate_allocation.cc @@ -0,0 +1,185 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_bitrate_allocation.h" + +#include + +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/strings/string_builder.h" + +namespace webrtc { + +VideoBitrateAllocation::VideoBitrateAllocation() + : sum_(0), is_bw_limited_(false) {} + +bool VideoBitrateAllocation::SetBitrate(size_t spatial_index, + size_t temporal_index, + uint32_t bitrate_bps) { + RTC_CHECK_LT(spatial_index, kMaxSpatialLayers); + RTC_CHECK_LT(temporal_index, kMaxTemporalStreams); + int64_t new_bitrate_sum_bps = sum_; + absl::optional& layer_bitrate = + bitrates_[spatial_index][temporal_index]; + if (layer_bitrate) { + RTC_DCHECK_LE(*layer_bitrate, sum_); + new_bitrate_sum_bps -= *layer_bitrate; + } + new_bitrate_sum_bps += bitrate_bps; + if (new_bitrate_sum_bps > kMaxBitrateBps) + return false; + + layer_bitrate = bitrate_bps; + sum_ = rtc::dchecked_cast(new_bitrate_sum_bps); + return true; +} + +bool VideoBitrateAllocation::HasBitrate(size_t spatial_index, + size_t temporal_index) const { + RTC_CHECK_LT(spatial_index, kMaxSpatialLayers); + RTC_CHECK_LT(temporal_index, kMaxTemporalStreams); + return bitrates_[spatial_index][temporal_index].has_value(); +} + +uint32_t VideoBitrateAllocation::GetBitrate(size_t spatial_index, + size_t temporal_index) const { + RTC_CHECK_LT(spatial_index, kMaxSpatialLayers); + RTC_CHECK_LT(temporal_index, kMaxTemporalStreams); + return bitrates_[spatial_index][temporal_index].value_or(0); +} + +// Whether the specific spatial layers has the bitrate set in any of its +// temporal layers. +bool VideoBitrateAllocation::IsSpatialLayerUsed(size_t spatial_index) const { + RTC_CHECK_LT(spatial_index, kMaxSpatialLayers); + for (size_t i = 0; i < kMaxTemporalStreams; ++i) { + if (bitrates_[spatial_index][i].has_value()) + return true; + } + return false; +} + +// Get the sum of all the temporal layer for a specific spatial layer. +uint32_t VideoBitrateAllocation::GetSpatialLayerSum( + size_t spatial_index) const { + RTC_CHECK_LT(spatial_index, kMaxSpatialLayers); + return GetTemporalLayerSum(spatial_index, kMaxTemporalStreams - 1); +} + +uint32_t VideoBitrateAllocation::GetTemporalLayerSum( + size_t spatial_index, + size_t temporal_index) const { + RTC_CHECK_LT(spatial_index, kMaxSpatialLayers); + RTC_CHECK_LT(temporal_index, kMaxTemporalStreams); + uint32_t sum = 0; + for (size_t i = 0; i <= temporal_index; ++i) { + sum += bitrates_[spatial_index][i].value_or(0); + } + return sum; +} + +std::vector VideoBitrateAllocation::GetTemporalLayerAllocation( + size_t spatial_index) const { + RTC_CHECK_LT(spatial_index, kMaxSpatialLayers); + std::vector temporal_rates; + + // Find the highest temporal layer with a defined bitrate in order to + // determine the size of the temporal layer allocation. + for (size_t i = kMaxTemporalStreams; i > 0; --i) { + if (bitrates_[spatial_index][i - 1].has_value()) { + temporal_rates.resize(i); + break; + } + } + + for (size_t i = 0; i < temporal_rates.size(); ++i) { + temporal_rates[i] = bitrates_[spatial_index][i].value_or(0); + } + + return temporal_rates; +} + +std::vector> +VideoBitrateAllocation::GetSimulcastAllocations() const { + std::vector> bitrates; + for (size_t si = 0; si < kMaxSpatialLayers; ++si) { + absl::optional layer_bitrate; + if (IsSpatialLayerUsed(si)) { + layer_bitrate = VideoBitrateAllocation(); + for (int tl = 0; tl < kMaxTemporalStreams; ++tl) { + if (HasBitrate(si, tl)) + layer_bitrate->SetBitrate(0, tl, GetBitrate(si, tl)); + } + } + bitrates.push_back(layer_bitrate); + } + return bitrates; +} + +bool VideoBitrateAllocation::operator==( + const VideoBitrateAllocation& other) const { + for (size_t si = 0; si < kMaxSpatialLayers; ++si) { + for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) { + if (bitrates_[si][ti] != other.bitrates_[si][ti]) + return false; + } + } + return true; +} + +std::string VideoBitrateAllocation::ToString() const { + if (sum_ == 0) + return "VideoBitrateAllocation [ [] ]"; + + // Max string length in practice is 260, but let's have some overhead and + // round up to nearest power of two. + char string_buf[512]; + rtc::SimpleStringBuilder ssb(string_buf); + + ssb << "VideoBitrateAllocation ["; + uint32_t spatial_cumulator = 0; + for (size_t si = 0; si < kMaxSpatialLayers; ++si) { + RTC_DCHECK_LE(spatial_cumulator, sum_); + if (spatial_cumulator == sum_) + break; + + const uint32_t layer_sum = GetSpatialLayerSum(si); + if (layer_sum == sum_ && si == 0) { + ssb << " ["; + } else { + if (si > 0) + ssb << ","; + ssb << '\n' << " ["; + } + spatial_cumulator += layer_sum; + + uint32_t temporal_cumulator = 0; + for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) { + RTC_DCHECK_LE(temporal_cumulator, layer_sum); + if (temporal_cumulator == layer_sum) + break; + + if (ti > 0) + ssb << ", "; + + uint32_t bitrate = bitrates_[si][ti].value_or(0); + ssb << bitrate; + temporal_cumulator += bitrate; + } + ssb << "]"; + } + + RTC_DCHECK_EQ(spatial_cumulator, sum_); + ssb << " ]"; + return ssb.str(); +} + +} // namespace webrtc diff --git a/api/video/video_bitrate_allocation.h b/api/video/video_bitrate_allocation.h new file mode 100644 index 0000000..56c0f64 --- /dev/null +++ b/api/video/video_bitrate_allocation.h @@ -0,0 +1,96 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_ +#define API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_ + +#include +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/video/video_codec_constants.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Class that describes how video bitrate, in bps, is allocated across temporal +// and spatial layers. Not that bitrates are NOT cumulative. Depending on if +// layers are dependent or not, it is up to the user to aggregate. +// For each index, the bitrate can also both set and unset. This is used with a +// set bps = 0 to signal an explicit "turn off" signal. +class RTC_EXPORT VideoBitrateAllocation { + public: + static constexpr uint32_t kMaxBitrateBps = + std::numeric_limits::max(); + VideoBitrateAllocation(); + + bool SetBitrate(size_t spatial_index, + size_t temporal_index, + uint32_t bitrate_bps); + + bool HasBitrate(size_t spatial_index, size_t temporal_index) const; + + uint32_t GetBitrate(size_t spatial_index, size_t temporal_index) const; + + // Whether the specific spatial layers has the bitrate set in any of its + // temporal layers. + bool IsSpatialLayerUsed(size_t spatial_index) const; + + // Get the sum of all the temporal layer for a specific spatial layer. + uint32_t GetSpatialLayerSum(size_t spatial_index) const; + + // Sum of bitrates of temporal layers, from layer 0 to |temporal_index| + // inclusive, of specified spatial layer |spatial_index|. Bitrates of lower + // spatial layers are not included. + uint32_t GetTemporalLayerSum(size_t spatial_index, + size_t temporal_index) const; + + // Returns a vector of the temporal layer bitrates for the specific spatial + // layer. Length of the returned vector is cropped to the highest temporal + // layer with a defined bitrate. + std::vector GetTemporalLayerAllocation(size_t spatial_index) const; + + // Returns one VideoBitrateAllocation for each spatial layer. This is used to + // configure simulcast streams. Note that the length of the returned vector is + // always kMaxSpatialLayers, the optional is unset for unused layers. + std::vector> GetSimulcastAllocations() + const; + + uint32_t get_sum_bps() const { return sum_; } // Sum of all bitrates. + uint32_t get_sum_kbps() const { + // Round down to not exceed the allocated bitrate. + return sum_ / 1000; + } + + bool operator==(const VideoBitrateAllocation& other) const; + inline bool operator!=(const VideoBitrateAllocation& other) const { + return !(*this == other); + } + + std::string ToString() const; + + // Indicates if the allocation has some layers/streams disabled due to + // low available bandwidth. + void set_bw_limited(bool limited) { is_bw_limited_ = limited; } + bool is_bw_limited() const { return is_bw_limited_; } + + private: + uint32_t sum_; + absl::optional bitrates_[kMaxSpatialLayers][kMaxTemporalStreams]; + bool is_bw_limited_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_BITRATE_ALLOCATION_H_ diff --git a/api/video/video_bitrate_allocator.cc b/api/video/video_bitrate_allocator.cc new file mode 100644 index 0000000..f4e843b --- /dev/null +++ b/api/video/video_bitrate_allocator.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_bitrate_allocator.h" + +namespace webrtc { + +VideoBitrateAllocationParameters::VideoBitrateAllocationParameters( + uint32_t total_bitrate_bps, + uint32_t framerate) + : total_bitrate(DataRate::BitsPerSec(total_bitrate_bps)), + stable_bitrate(DataRate::BitsPerSec(total_bitrate_bps)), + framerate(static_cast(framerate)) {} + +VideoBitrateAllocationParameters::VideoBitrateAllocationParameters( + DataRate total_bitrate, + double framerate) + : total_bitrate(total_bitrate), + stable_bitrate(total_bitrate), + framerate(framerate) {} + +VideoBitrateAllocationParameters::VideoBitrateAllocationParameters( + DataRate total_bitrate, + DataRate stable_bitrate, + double framerate) + : total_bitrate(total_bitrate), + stable_bitrate(stable_bitrate), + framerate(framerate) {} + +VideoBitrateAllocationParameters::~VideoBitrateAllocationParameters() = default; + +VideoBitrateAllocation VideoBitrateAllocator::GetAllocation( + uint32_t total_bitrate_bps, + uint32_t framerate) { + return Allocate({DataRate::BitsPerSec(total_bitrate_bps), + DataRate::BitsPerSec(total_bitrate_bps), + static_cast(framerate)}); +} + +VideoBitrateAllocation VideoBitrateAllocator::Allocate( + VideoBitrateAllocationParameters parameters) { + return GetAllocation(parameters.total_bitrate.bps(), parameters.framerate); +} + +void VideoBitrateAllocator::SetLegacyConferenceMode(bool enabled) {} + +} // namespace webrtc diff --git a/api/video/video_bitrate_allocator.h b/api/video/video_bitrate_allocator.h new file mode 100644 index 0000000..fdc86db --- /dev/null +++ b/api/video/video_bitrate_allocator.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_BITRATE_ALLOCATOR_H_ +#define API_VIDEO_VIDEO_BITRATE_ALLOCATOR_H_ + +#include "api/units/data_rate.h" +#include "api/video/video_bitrate_allocation.h" + +namespace webrtc { + +struct VideoBitrateAllocationParameters { + VideoBitrateAllocationParameters(uint32_t total_bitrate_bps, + uint32_t framerate); + VideoBitrateAllocationParameters(DataRate total_bitrate, double framerate); + VideoBitrateAllocationParameters(DataRate total_bitrate, + DataRate stable_bitrate, + double framerate); + ~VideoBitrateAllocationParameters(); + + DataRate total_bitrate; + DataRate stable_bitrate; + double framerate; +}; + +class VideoBitrateAllocator { + public: + VideoBitrateAllocator() {} + virtual ~VideoBitrateAllocator() {} + + virtual VideoBitrateAllocation GetAllocation(uint32_t total_bitrate_bps, + uint32_t framerate); + + virtual VideoBitrateAllocation Allocate( + VideoBitrateAllocationParameters parameters); + + // Deprecated: Only used to work around issues with the legacy conference + // screenshare mode and shouldn't be needed by any subclasses. + virtual void SetLegacyConferenceMode(bool enabled); +}; + +class VideoBitrateAllocationObserver { + public: + VideoBitrateAllocationObserver() {} + virtual ~VideoBitrateAllocationObserver() {} + + virtual void OnBitrateAllocationUpdated( + const VideoBitrateAllocation& allocation) = 0; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_BITRATE_ALLOCATOR_H_ diff --git a/api/video/video_bitrate_allocator_factory.h b/api/video/video_bitrate_allocator_factory.h new file mode 100644 index 0000000..cb34ebb --- /dev/null +++ b/api/video/video_bitrate_allocator_factory.h @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_ +#define API_VIDEO_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_ + +#include + +#include "api/video/video_bitrate_allocator.h" +#include "api/video_codecs/video_codec.h" + +namespace webrtc { + +// A factory that creates VideoBitrateAllocator. +// NOTE: This class is still under development and may change without notice. +class VideoBitrateAllocatorFactory { + public: + virtual ~VideoBitrateAllocatorFactory() = default; + // Creates a VideoBitrateAllocator for a specific video codec. + virtual std::unique_ptr CreateVideoBitrateAllocator( + const VideoCodec& codec) = 0; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_BITRATE_ALLOCATOR_FACTORY_H_ diff --git a/api/video/video_codec_constants.h b/api/video/video_codec_constants.h new file mode 100644 index 0000000..6b6feee --- /dev/null +++ b/api/video/video_codec_constants.h @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_CODEC_CONSTANTS_H_ +#define API_VIDEO_VIDEO_CODEC_CONSTANTS_H_ + +namespace webrtc { + +enum : int { kMaxEncoderBuffers = 8 }; +enum : int { kMaxSimulcastStreams = 3 }; +enum : int { kMaxSpatialLayers = 5 }; +enum : int { kMaxTemporalStreams = 4 }; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_CODEC_CONSTANTS_H_ diff --git a/api/video/video_codec_type.h b/api/video/video_codec_type.h new file mode 100644 index 0000000..04013e3 --- /dev/null +++ b/api/video/video_codec_type.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_CODEC_TYPE_H_ +#define API_VIDEO_VIDEO_CODEC_TYPE_H_ + +namespace webrtc { + +// GENERATED_JAVA_ENUM_PACKAGE: org.webrtc +#ifndef DISABLE_H265 +enum VideoCodecType { + // Java_cpp_enum.py does not allow ifdef in enum class, + // so we have to create two version of VideoCodecType here + kVideoCodecGeneric = 0, + kVideoCodecVP8, + kVideoCodecVP9, + kVideoCodecAV1, + kVideoCodecH264, + kVideoCodecH265, + kVideoCodecMultiplex, +}; +#else +enum VideoCodecType { + // There are various memset(..., 0, ...) calls in the code that rely on + // kVideoCodecGeneric being zero. + kVideoCodecGeneric = 0, + kVideoCodecVP8, + kVideoCodecVP9, + kVideoCodecAV1, + kVideoCodecH264, + kVideoCodecMultiplex, +}; +#endif + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_CODEC_TYPE_H_ diff --git a/api/video/video_content_type.cc b/api/video/video_content_type.cc new file mode 100644 index 0000000..9ba3ece --- /dev/null +++ b/api/video/video_content_type.cc @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_content_type.h" + +// VideoContentType stored as a single byte, which is sent over the network. +// Structure: +// +// 0 1 2 3 4 5 6 7 +// +---------------+ +// |r r e e e s s c| +// +// where: +// r - reserved bits. +// e - 3-bit number of an experiment group counted from 1. 0 means there's no +// experiment ongoing. +// s - 2-bit simulcast stream id or spatial layer, counted from 1. 0 means that +// no simulcast information is set. +// c - content type. 0 means real-time video, 1 means screenshare. +// + +namespace webrtc { +namespace videocontenttypehelpers { + +namespace { +static constexpr uint8_t kScreenshareBitsSize = 1; +static constexpr uint8_t kScreenshareBitsMask = + (1u << kScreenshareBitsSize) - 1; + +static constexpr uint8_t kSimulcastShift = 1; +static constexpr uint8_t kSimulcastBitsSize = 2; +static constexpr uint8_t kSimulcastBitsMask = ((1u << kSimulcastBitsSize) - 1) + << kSimulcastShift; // 0b00000110 + +static constexpr uint8_t kExperimentShift = 3; +static constexpr uint8_t kExperimentBitsSize = 3; +static constexpr uint8_t kExperimentBitsMask = + ((1u << kExperimentBitsSize) - 1) << kExperimentShift; // 0b00111000 + +static constexpr uint8_t kTotalBitsSize = + kScreenshareBitsSize + kSimulcastBitsSize + kExperimentBitsSize; +} // namespace + +bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id) { + // Store in bits 2-4. + if (experiment_id >= (1 << kExperimentBitsSize)) + return false; + *content_type = static_cast( + (static_cast(*content_type) & ~kExperimentBitsMask) | + ((experiment_id << kExperimentShift) & kExperimentBitsMask)); + return true; +} + +bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id) { + // Store in bits 5-6. + if (simulcast_id >= (1 << kSimulcastBitsSize)) + return false; + *content_type = static_cast( + (static_cast(*content_type) & ~kSimulcastBitsMask) | + ((simulcast_id << kSimulcastShift) & kSimulcastBitsMask)); + return true; +} + +uint8_t GetExperimentId(const VideoContentType& content_type) { + return (static_cast(content_type) & kExperimentBitsMask) >> + kExperimentShift; +} +uint8_t GetSimulcastId(const VideoContentType& content_type) { + return (static_cast(content_type) & kSimulcastBitsMask) >> + kSimulcastShift; +} + +bool IsScreenshare(const VideoContentType& content_type) { + return (static_cast(content_type) & kScreenshareBitsMask) > 0; +} + +bool IsValidContentType(uint8_t value) { + // Any 6-bit value is allowed. + return value < (1 << kTotalBitsSize); +} + +const char* ToString(const VideoContentType& content_type) { + return IsScreenshare(content_type) ? "screen" : "realtime"; +} +} // namespace videocontenttypehelpers +} // namespace webrtc diff --git a/api/video/video_content_type.h b/api/video/video_content_type.h new file mode 100644 index 0000000..2d38a62 --- /dev/null +++ b/api/video/video_content_type.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_CONTENT_TYPE_H_ +#define API_VIDEO_VIDEO_CONTENT_TYPE_H_ + +#include + +namespace webrtc { + +enum class VideoContentType : uint8_t { + UNSPECIFIED = 0, + SCREENSHARE = 1, +}; + +namespace videocontenttypehelpers { +bool SetExperimentId(VideoContentType* content_type, uint8_t experiment_id); +bool SetSimulcastId(VideoContentType* content_type, uint8_t simulcast_id); + +uint8_t GetExperimentId(const VideoContentType& content_type); +uint8_t GetSimulcastId(const VideoContentType& content_type); + +bool IsScreenshare(const VideoContentType& content_type); + +bool IsValidContentType(uint8_t value); + +const char* ToString(const VideoContentType& content_type); +} // namespace videocontenttypehelpers + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_CONTENT_TYPE_H_ diff --git a/api/video/video_frame.cc b/api/video/video_frame.cc new file mode 100644 index 0000000..d97e3aa --- /dev/null +++ b/api/video/video_frame.cc @@ -0,0 +1,315 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_frame.h" + +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { + +void VideoFrame::UpdateRect::Union(const UpdateRect& other) { + if (other.IsEmpty()) + return; + if (IsEmpty()) { + *this = other; + return; + } + int right = std::max(offset_x + width, other.offset_x + other.width); + int bottom = std::max(offset_y + height, other.offset_y + other.height); + offset_x = std::min(offset_x, other.offset_x); + offset_y = std::min(offset_y, other.offset_y); + width = right - offset_x; + height = bottom - offset_y; + RTC_DCHECK_GT(width, 0); + RTC_DCHECK_GT(height, 0); +} + +void VideoFrame::UpdateRect::Intersect(const UpdateRect& other) { + if (other.IsEmpty() || IsEmpty()) { + MakeEmptyUpdate(); + return; + } + + int right = std::min(offset_x + width, other.offset_x + other.width); + int bottom = std::min(offset_y + height, other.offset_y + other.height); + offset_x = std::max(offset_x, other.offset_x); + offset_y = std::max(offset_y, other.offset_y); + width = right - offset_x; + height = bottom - offset_y; + if (width <= 0 || height <= 0) { + MakeEmptyUpdate(); + } +} + +void VideoFrame::UpdateRect::MakeEmptyUpdate() { + width = height = offset_x = offset_y = 0; +} + +bool VideoFrame::UpdateRect::IsEmpty() const { + return width == 0 && height == 0; +} + +VideoFrame::UpdateRect VideoFrame::UpdateRect::ScaleWithFrame( + int frame_width, + int frame_height, + int crop_x, + int crop_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) const { + RTC_DCHECK_GT(frame_width, 0); + RTC_DCHECK_GT(frame_height, 0); + + RTC_DCHECK_GT(crop_width, 0); + RTC_DCHECK_GT(crop_height, 0); + + RTC_DCHECK_LE(crop_width + crop_x, frame_width); + RTC_DCHECK_LE(crop_height + crop_y, frame_height); + + RTC_DCHECK_GT(scaled_width, 0); + RTC_DCHECK_GT(scaled_height, 0); + + // Check if update rect is out of the cropped area. + if (offset_x + width < crop_x || offset_x > crop_x + crop_width || + offset_y + height < crop_y || offset_y > crop_y + crop_width) { + return {0, 0, 0, 0}; + } + + int x = offset_x - crop_x; + int w = width; + if (x < 0) { + w += x; + x = 0; + } + int y = offset_y - crop_y; + int h = height; + if (y < 0) { + h += y; + y = 0; + } + + // Lower corner is rounded down. + x = x * scaled_width / crop_width; + y = y * scaled_height / crop_height; + // Upper corner is rounded up. + w = (w * scaled_width + crop_width - 1) / crop_width; + h = (h * scaled_height + crop_height - 1) / crop_height; + + // Round to full 2x2 blocks due to possible subsampling in the pixel data. + if (x % 2) { + --x; + ++w; + } + if (y % 2) { + --y; + ++h; + } + if (w % 2) { + ++w; + } + if (h % 2) { + ++h; + } + + // Expand the update rect by 2 pixels in each direction to include any + // possible scaling artifacts. + if (scaled_width != crop_width || scaled_height != crop_height) { + if (x > 0) { + x -= 2; + w += 2; + } + if (y > 0) { + y -= 2; + h += 2; + } + w += 2; + h += 2; + } + + // Ensure update rect is inside frame dimensions. + if (x + w > scaled_width) { + w = scaled_width - x; + } + if (y + h > scaled_height) { + h = scaled_height - y; + } + RTC_DCHECK_GE(w, 0); + RTC_DCHECK_GE(h, 0); + if (w == 0 || h == 0) { + w = 0; + h = 0; + x = 0; + y = 0; + } + + return {x, y, w, h}; +} + +VideoFrame::Builder::Builder() = default; + +VideoFrame::Builder::~Builder() = default; + +VideoFrame VideoFrame::Builder::build() { + RTC_CHECK(video_frame_buffer_ != nullptr); + return VideoFrame(id_, video_frame_buffer_, timestamp_us_, timestamp_rtp_, + ntp_time_ms_, rotation_, color_space_, update_rect_, + packet_infos_); +} + +VideoFrame::Builder& VideoFrame::Builder::set_video_frame_buffer( + const rtc::scoped_refptr& buffer) { + video_frame_buffer_ = buffer; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_timestamp_ms( + int64_t timestamp_ms) { + timestamp_us_ = timestamp_ms * rtc::kNumMicrosecsPerMillisec; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_timestamp_us( + int64_t timestamp_us) { + timestamp_us_ = timestamp_us; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_timestamp_rtp( + uint32_t timestamp_rtp) { + timestamp_rtp_ = timestamp_rtp; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_ntp_time_ms(int64_t ntp_time_ms) { + ntp_time_ms_ = ntp_time_ms; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_rotation(VideoRotation rotation) { + rotation_ = rotation; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_color_space( + const absl::optional& color_space) { + color_space_ = color_space; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_color_space( + const ColorSpace* color_space) { + color_space_ = + color_space ? absl::make_optional(*color_space) : absl::nullopt; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_id(uint16_t id) { + id_ = id; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_update_rect( + const absl::optional& update_rect) { + update_rect_ = update_rect; + return *this; +} + +VideoFrame::Builder& VideoFrame::Builder::set_packet_infos( + RtpPacketInfos packet_infos) { + packet_infos_ = std::move(packet_infos); + return *this; +} + +VideoFrame::VideoFrame(const rtc::scoped_refptr& buffer, + webrtc::VideoRotation rotation, + int64_t timestamp_us) + : video_frame_buffer_(buffer), + timestamp_rtp_(0), + ntp_time_ms_(0), + timestamp_us_(timestamp_us), + rotation_(rotation) {} + +VideoFrame::VideoFrame(const rtc::scoped_refptr& buffer, + uint32_t timestamp_rtp, + int64_t render_time_ms, + VideoRotation rotation) + : video_frame_buffer_(buffer), + timestamp_rtp_(timestamp_rtp), + ntp_time_ms_(0), + timestamp_us_(render_time_ms * rtc::kNumMicrosecsPerMillisec), + rotation_(rotation) { + RTC_DCHECK(buffer); +} + +VideoFrame::VideoFrame(uint16_t id, + const rtc::scoped_refptr& buffer, + int64_t timestamp_us, + uint32_t timestamp_rtp, + int64_t ntp_time_ms, + VideoRotation rotation, + const absl::optional& color_space, + const absl::optional& update_rect, + RtpPacketInfos packet_infos) + : id_(id), + video_frame_buffer_(buffer), + timestamp_rtp_(timestamp_rtp), + ntp_time_ms_(ntp_time_ms), + timestamp_us_(timestamp_us), + rotation_(rotation), + color_space_(color_space), + update_rect_(update_rect), + packet_infos_(std::move(packet_infos)) { + if (update_rect_) { + RTC_DCHECK_GE(update_rect_->offset_x, 0); + RTC_DCHECK_GE(update_rect_->offset_y, 0); + RTC_DCHECK_LE(update_rect_->offset_x + update_rect_->width, width()); + RTC_DCHECK_LE(update_rect_->offset_y + update_rect_->height, height()); + } +} + +VideoFrame::~VideoFrame() = default; + +VideoFrame::VideoFrame(const VideoFrame&) = default; +VideoFrame::VideoFrame(VideoFrame&&) = default; +VideoFrame& VideoFrame::operator=(const VideoFrame&) = default; +VideoFrame& VideoFrame::operator=(VideoFrame&&) = default; + +int VideoFrame::width() const { + return video_frame_buffer_ ? video_frame_buffer_->width() : 0; +} + +int VideoFrame::height() const { + return video_frame_buffer_ ? video_frame_buffer_->height() : 0; +} + +uint32_t VideoFrame::size() const { + return width() * height(); +} + +rtc::scoped_refptr VideoFrame::video_frame_buffer() const { + return video_frame_buffer_; +} + +void VideoFrame::set_video_frame_buffer( + const rtc::scoped_refptr& buffer) { + RTC_CHECK(buffer); + video_frame_buffer_ = buffer; +} + +int64_t VideoFrame::render_time_ms() const { + return timestamp_us() / rtc::kNumMicrosecsPerMillisec; +} + +} // namespace webrtc diff --git a/api/video/video_frame.h b/api/video/video_frame.h new file mode 100644 index 0000000..08c939d --- /dev/null +++ b/api/video/video_frame.h @@ -0,0 +1,277 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_FRAME_H_ +#define API_VIDEO_VIDEO_FRAME_H_ + +#include + +#include + +#include "absl/types/optional.h" +#include "api/rtp_packet_infos.h" +#include "api/scoped_refptr.h" +#include "api/video/color_space.h" +#include "api/video/hdr_metadata.h" +#include "api/video/video_frame_buffer.h" +#include "api/video/video_rotation.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class RTC_EXPORT VideoFrame { + public: + struct RTC_EXPORT UpdateRect { + int offset_x; + int offset_y; + int width; + int height; + + // Makes this UpdateRect a bounding box of this and other rect. + void Union(const UpdateRect& other); + + // Makes this UpdateRect an intersection of this and other rect. + void Intersect(const UpdateRect& other); + + // Sets everything to 0, making this UpdateRect a zero-size (empty) update. + void MakeEmptyUpdate(); + + bool IsEmpty() const; + + // Per-member equality check. Empty rectangles with different offsets would + // be considered different. + bool operator==(const UpdateRect& other) const { + return other.offset_x == offset_x && other.offset_y == offset_y && + other.width == width && other.height == height; + } + + bool operator!=(const UpdateRect& other) const { return !(*this == other); } + + // Scales update_rect given original frame dimensions. + // Cropping is applied first, then rect is scaled down. + // Update rect is snapped to 2x2 grid due to possible UV subsampling and + // then expanded by additional 2 pixels in each direction to accommodate any + // possible scaling artifacts. + // Note, close but not equal update_rects on original frame may result in + // the same scaled update rects. + UpdateRect ScaleWithFrame(int frame_width, + int frame_height, + int crop_x, + int crop_y, + int crop_width, + int crop_height, + int scaled_width, + int scaled_height) const; + }; + + struct RTC_EXPORT ProcessingTime { + TimeDelta Elapsed() const { return finish - start; } + Timestamp start; + Timestamp finish; + }; + + // Preferred way of building VideoFrame objects. + class RTC_EXPORT Builder { + public: + Builder(); + ~Builder(); + + VideoFrame build(); + Builder& set_video_frame_buffer( + const rtc::scoped_refptr& buffer); + Builder& set_timestamp_ms(int64_t timestamp_ms); + Builder& set_timestamp_us(int64_t timestamp_us); + Builder& set_timestamp_rtp(uint32_t timestamp_rtp); + Builder& set_ntp_time_ms(int64_t ntp_time_ms); + Builder& set_rotation(VideoRotation rotation); + Builder& set_color_space(const absl::optional& color_space); + Builder& set_color_space(const ColorSpace* color_space); + Builder& set_id(uint16_t id); + Builder& set_update_rect(const absl::optional& update_rect); + Builder& set_packet_infos(RtpPacketInfos packet_infos); + + private: + uint16_t id_ = 0; + rtc::scoped_refptr video_frame_buffer_; + int64_t timestamp_us_ = 0; + uint32_t timestamp_rtp_ = 0; + int64_t ntp_time_ms_ = 0; + VideoRotation rotation_ = kVideoRotation_0; + absl::optional color_space_; + absl::optional update_rect_; + RtpPacketInfos packet_infos_; + }; + + // To be deprecated. Migrate all use to Builder. + VideoFrame(const rtc::scoped_refptr& buffer, + webrtc::VideoRotation rotation, + int64_t timestamp_us); + VideoFrame(const rtc::scoped_refptr& buffer, + uint32_t timestamp_rtp, + int64_t render_time_ms, + VideoRotation rotation); + + ~VideoFrame(); + + // Support move and copy. + VideoFrame(const VideoFrame&); + VideoFrame(VideoFrame&&); + VideoFrame& operator=(const VideoFrame&); + VideoFrame& operator=(VideoFrame&&); + + // Get frame width. + int width() const; + // Get frame height. + int height() const; + // Get frame size in pixels. + uint32_t size() const; + + // Get frame ID. Returns 0 if ID is not set. Not guarantee to be transferred + // from the sender to the receiver, but preserved on single side. The id + // should be propagated between all frame modifications during its lifetime + // from capturing to sending as encoded image. It is intended to be unique + // over a time window of a few minutes for peer connection, to which + // corresponding video stream belongs to. + uint16_t id() const { return id_; } + void set_id(uint16_t id) { id_ = id; } + + // System monotonic clock, same timebase as rtc::TimeMicros(). + int64_t timestamp_us() const { return timestamp_us_; } + void set_timestamp_us(int64_t timestamp_us) { timestamp_us_ = timestamp_us; } + + // TODO(nisse): After the cricket::VideoFrame and webrtc::VideoFrame + // merge, timestamps other than timestamp_us will likely be + // deprecated. + + // Set frame timestamp (90kHz). + void set_timestamp(uint32_t timestamp) { timestamp_rtp_ = timestamp; } + + // Get frame timestamp (90kHz). + uint32_t timestamp() const { return timestamp_rtp_; } + + // For now, transport_frame_id and rtp timestamp are the same. + // TODO(nisse): Must be handled differently for QUIC. + uint32_t transport_frame_id() const { return timestamp(); } + + // Set capture ntp time in milliseconds. + void set_ntp_time_ms(int64_t ntp_time_ms) { ntp_time_ms_ = ntp_time_ms; } + + // Get capture ntp time in milliseconds. + int64_t ntp_time_ms() const { return ntp_time_ms_; } + + // Naming convention for Coordination of Video Orientation. Please see + // http://www.etsi.org/deliver/etsi_ts/126100_126199/126114/12.07.00_60/ts_126114v120700p.pdf + // + // "pending rotation" or "pending" = a frame that has a VideoRotation > 0. + // + // "not pending" = a frame that has a VideoRotation == 0. + // + // "apply rotation" = modify a frame from being "pending" to being "not + // pending" rotation (a no-op for "unrotated"). + // + VideoRotation rotation() const { return rotation_; } + void set_rotation(VideoRotation rotation) { rotation_ = rotation; } + + // Get color space when available. + const absl::optional& color_space() const { return color_space_; } + void set_color_space(const absl::optional& color_space) { + color_space_ = color_space; + } + + // Get render time in milliseconds. + // TODO(nisse): Deprecated. Migrate all users to timestamp_us(). + int64_t render_time_ms() const; + + // Return the underlying buffer. Never nullptr for a properly + // initialized VideoFrame. + rtc::scoped_refptr video_frame_buffer() const; + + void set_video_frame_buffer( + const rtc::scoped_refptr& buffer); + + // TODO(nisse): Deprecated. + // Return true if the frame is stored in a texture. + bool is_texture() const { + return video_frame_buffer()->type() == VideoFrameBuffer::Type::kNative; + } + + bool has_update_rect() const { return update_rect_.has_value(); } + + // Returns update_rect set by the builder or set_update_rect() or whole frame + // rect if no update rect is available. + UpdateRect update_rect() const { + return update_rect_.value_or(UpdateRect{0, 0, width(), height()}); + } + + // Rectangle must be within the frame dimensions. + void set_update_rect(const VideoFrame::UpdateRect& update_rect) { + RTC_DCHECK_GE(update_rect.offset_x, 0); + RTC_DCHECK_GE(update_rect.offset_y, 0); + RTC_DCHECK_LE(update_rect.offset_x + update_rect.width, width()); + RTC_DCHECK_LE(update_rect.offset_y + update_rect.height, height()); + update_rect_ = update_rect; + } + + void clear_update_rect() { update_rect_ = absl::nullopt; } + + // Get information about packets used to assemble this video frame. Might be + // empty if the information isn't available. + const RtpPacketInfos& packet_infos() const { return packet_infos_; } + void set_packet_infos(RtpPacketInfos value) { + packet_infos_ = std::move(value); + } + + const absl::optional processing_time() const { + return processing_time_; + } + void set_processing_time(const ProcessingTime& processing_time) { + processing_time_ = processing_time; + } + + private: + VideoFrame(uint16_t id, + const rtc::scoped_refptr& buffer, + int64_t timestamp_us, + uint32_t timestamp_rtp, + int64_t ntp_time_ms, + VideoRotation rotation, + const absl::optional& color_space, + const absl::optional& update_rect, + RtpPacketInfos packet_infos); + + uint16_t id_; + // An opaque reference counted handle that stores the pixel data. + rtc::scoped_refptr video_frame_buffer_; + uint32_t timestamp_rtp_; + int64_t ntp_time_ms_; + int64_t timestamp_us_; + VideoRotation rotation_; + absl::optional color_space_; + // Updated since the last frame area. If present it means that the bounding + // box of all the changes is within the rectangular area and is close to it. + // If absent, it means that there's no information about the change at all and + // update_rect() will return a rectangle corresponding to the entire frame. + absl::optional update_rect_; + // Information about packets used to assemble this video frame. This is needed + // by |SourceTracker| when the frame is delivered to the RTCRtpReceiver's + // MediaStreamTrack, in order to implement getContributingSources(). See: + // https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getcontributingsources + RtpPacketInfos packet_infos_; + // Processing timestamps of the frame. For received video frames these are the + // timestamps when the frame is sent to the decoder and the decoded image + // returned from the decoder. + // Currently, not set for locally captured video frames. + absl::optional processing_time_; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_FRAME_H_ diff --git a/api/video/video_frame_buffer.cc b/api/video/video_frame_buffer.cc new file mode 100644 index 0000000..b9fd9cd --- /dev/null +++ b/api/video/video_frame_buffer.cc @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_frame_buffer.h" + +#include "rtc_base/checks.h" + +namespace webrtc { + +const I420BufferInterface* VideoFrameBuffer::GetI420() const { + // Overridden by subclasses that can return an I420 buffer without any + // conversion, in particular, I420BufferInterface. + return nullptr; +} + +const I420ABufferInterface* VideoFrameBuffer::GetI420A() const { + RTC_CHECK(type() == Type::kI420A); + return static_cast(this); +} + +const I444BufferInterface* VideoFrameBuffer::GetI444() const { + RTC_CHECK(type() == Type::kI444); + return static_cast(this); +} + +const I010BufferInterface* VideoFrameBuffer::GetI010() const { + RTC_CHECK(type() == Type::kI010); + return static_cast(this); +} + +VideoFrameBuffer::Type I420BufferInterface::type() const { + return Type::kI420; +} + +int I420BufferInterface::ChromaWidth() const { + return (width() + 1) / 2; +} + +int I420BufferInterface::ChromaHeight() const { + return (height() + 1) / 2; +} + +rtc::scoped_refptr I420BufferInterface::ToI420() { + return this; +} + +const I420BufferInterface* I420BufferInterface::GetI420() const { + return this; +} + +VideoFrameBuffer::Type I420ABufferInterface::type() const { + return Type::kI420A; +} + +VideoFrameBuffer::Type I444BufferInterface::type() const { + return Type::kI444; +} + +int I444BufferInterface::ChromaWidth() const { + return width(); +} + +int I444BufferInterface::ChromaHeight() const { + return height(); +} + +VideoFrameBuffer::Type I010BufferInterface::type() const { + return Type::kI010; +} + +int I010BufferInterface::ChromaWidth() const { + return (width() + 1) / 2; +} + +int I010BufferInterface::ChromaHeight() const { + return (height() + 1) / 2; +} + +} // namespace webrtc diff --git a/api/video/video_frame_buffer.h b/api/video/video_frame_buffer.h new file mode 100644 index 0000000..d87a423 --- /dev/null +++ b/api/video/video_frame_buffer.h @@ -0,0 +1,180 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_FRAME_BUFFER_H_ +#define API_VIDEO_VIDEO_FRAME_BUFFER_H_ + +#include + +#include "api/scoped_refptr.h" +#include "rtc_base/ref_count.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class I420BufferInterface; +class I420ABufferInterface; +class I444BufferInterface; +class I010BufferInterface; + +// Base class for frame buffers of different types of pixel format and storage. +// The tag in type() indicates how the data is represented, and each type is +// implemented as a subclass. To access the pixel data, call the appropriate +// GetXXX() function, where XXX represents the type. There is also a function +// ToI420() that returns a frame buffer in I420 format, converting from the +// underlying representation if necessary. I420 is the most widely accepted +// format and serves as a fallback for video sinks that can only handle I420, +// e.g. the internal WebRTC software encoders. A special enum value 'kNative' is +// provided for external clients to implement their own frame buffer +// representations, e.g. as textures. The external client can produce such +// native frame buffers from custom video sources, and then cast it back to the +// correct subclass in custom video sinks. The purpose of this is to improve +// performance by providing an optimized path without intermediate conversions. +// Frame metadata such as rotation and timestamp are stored in +// webrtc::VideoFrame, and not here. +class RTC_EXPORT VideoFrameBuffer : public rtc::RefCountInterface { + public: + // New frame buffer types will be added conservatively when there is an + // opportunity to optimize the path between some pair of video source and + // video sink. + enum class Type { + kNative, + kI420, + kI420A, + kI444, + kI010, + }; + + // This function specifies in what pixel format the data is stored in. + virtual Type type() const = 0; + + // The resolution of the frame in pixels. For formats where some planes are + // subsampled, this is the highest-resolution plane. + virtual int width() const = 0; + virtual int height() const = 0; + + // Returns a memory-backed frame buffer in I420 format. If the pixel data is + // in another format, a conversion will take place. All implementations must + // provide a fallback to I420 for compatibility with e.g. the internal WebRTC + // software encoders. + virtual rtc::scoped_refptr ToI420() = 0; + + // GetI420() methods should return I420 buffer if conversion is trivial, i.e + // no change for binary data is needed. Otherwise these methods should return + // nullptr. One example of buffer with that property is + // WebrtcVideoFrameAdapter in Chrome - it's I420 buffer backed by a shared + // memory buffer. Therefore it must have type kNative. Yet, ToI420() + // doesn't affect binary data at all. Another example is any I420A buffer. + virtual const I420BufferInterface* GetI420() const; + + // These functions should only be called if type() is of the correct type. + // Calling with a different type will result in a crash. + const I420ABufferInterface* GetI420A() const; + const I444BufferInterface* GetI444() const; + const I010BufferInterface* GetI010() const; + + protected: + ~VideoFrameBuffer() override {} +}; + +// This interface represents planar formats. +class PlanarYuvBuffer : public VideoFrameBuffer { + public: + virtual int ChromaWidth() const = 0; + virtual int ChromaHeight() const = 0; + + // Returns the number of steps(in terms of Data*() return type) between + // successive rows for a given plane. + virtual int StrideY() const = 0; + virtual int StrideU() const = 0; + virtual int StrideV() const = 0; + + protected: + ~PlanarYuvBuffer() override {} +}; + +// This interface represents 8-bit color depth formats: Type::kI420, +// Type::kI420A and Type::kI444. +class PlanarYuv8Buffer : public PlanarYuvBuffer { + public: + // Returns pointer to the pixel data for a given plane. The memory is owned by + // the VideoFrameBuffer object and must not be freed by the caller. + virtual const uint8_t* DataY() const = 0; + virtual const uint8_t* DataU() const = 0; + virtual const uint8_t* DataV() const = 0; + + protected: + ~PlanarYuv8Buffer() override {} +}; + +class RTC_EXPORT I420BufferInterface : public PlanarYuv8Buffer { + public: + Type type() const override; + + int ChromaWidth() const final; + int ChromaHeight() const final; + + rtc::scoped_refptr ToI420() final; + const I420BufferInterface* GetI420() const final; + + protected: + ~I420BufferInterface() override {} +}; + +class RTC_EXPORT I420ABufferInterface : public I420BufferInterface { + public: + Type type() const final; + virtual const uint8_t* DataA() const = 0; + virtual int StrideA() const = 0; + + protected: + ~I420ABufferInterface() override {} +}; + +class I444BufferInterface : public PlanarYuv8Buffer { + public: + Type type() const final; + + int ChromaWidth() const final; + int ChromaHeight() const final; + + protected: + ~I444BufferInterface() override {} +}; + +// This interface represents 8-bit to 16-bit color depth formats: Type::kI010. +class PlanarYuv16BBuffer : public PlanarYuvBuffer { + public: + // Returns pointer to the pixel data for a given plane. The memory is owned by + // the VideoFrameBuffer object and must not be freed by the caller. + virtual const uint16_t* DataY() const = 0; + virtual const uint16_t* DataU() const = 0; + virtual const uint16_t* DataV() const = 0; + + protected: + ~PlanarYuv16BBuffer() override {} +}; + +// Represents Type::kI010, allocates 16 bits per pixel and fills 10 least +// significant bits with color information. +class I010BufferInterface : public PlanarYuv16BBuffer { + public: + Type type() const override; + + int ChromaWidth() const final; + int ChromaHeight() const final; + + protected: + ~I010BufferInterface() override {} +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_FRAME_BUFFER_H_ diff --git a/api/video/video_frame_metadata.cc b/api/video/video_frame_metadata.cc new file mode 100644 index 0000000..df82875 --- /dev/null +++ b/api/video/video_frame_metadata.cc @@ -0,0 +1,28 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_frame_metadata.h" + +#include "modules/rtp_rtcp/source/rtp_video_header.h" + +namespace webrtc { + +VideoFrameMetadata::VideoFrameMetadata(const RTPVideoHeader& header) + : width_(header.width), height_(header.height) { + if (header.generic) { + frame_id_ = header.generic->frame_id; + spatial_index_ = header.generic->spatial_index; + temporal_index_ = header.generic->temporal_index; + frame_dependencies_ = header.generic->dependencies; + decode_target_indications_ = header.generic->decode_target_indications; + } +} + +} // namespace webrtc diff --git a/api/video/video_frame_metadata.h b/api/video/video_frame_metadata.h new file mode 100644 index 0000000..2e93098 --- /dev/null +++ b/api/video/video_frame_metadata.h @@ -0,0 +1,59 @@ +/* + * Copyright 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_FRAME_METADATA_H_ +#define API_VIDEO_VIDEO_FRAME_METADATA_H_ + +#include + +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/transport/rtp/dependency_descriptor.h" + +namespace webrtc { + +struct RTPVideoHeader; + +// A subset of metadata from the RTP video header, exposed in insertable streams +// API. +class VideoFrameMetadata { + public: + explicit VideoFrameMetadata(const RTPVideoHeader& header); + VideoFrameMetadata(const VideoFrameMetadata&) = default; + VideoFrameMetadata& operator=(const VideoFrameMetadata&) = default; + + uint16_t GetWidth() const { return width_; } + uint16_t GetHeight() const { return height_; } + absl::optional GetFrameId() const { return frame_id_; } + int GetSpatialIndex() const { return spatial_index_; } + int GetTemporalIndex() const { return temporal_index_; } + + rtc::ArrayView GetFrameDependencies() const { + return frame_dependencies_; + } + + rtc::ArrayView GetDecodeTargetIndications() + const { + return decode_target_indications_; + } + + private: + int16_t width_; + int16_t height_; + absl::optional frame_id_; + int spatial_index_ = 0; + int temporal_index_ = 0; + absl::InlinedVector frame_dependencies_; + absl::InlinedVector decode_target_indications_; +}; +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_FRAME_METADATA_H_ diff --git a/api/video/video_frame_metadata_unittest.cc b/api/video/video_frame_metadata_unittest.cc new file mode 100644 index 0000000..7a808e1 --- /dev/null +++ b/api/video/video_frame_metadata_unittest.cc @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_frame_metadata.h" + +#include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::ElementsAre; +using ::testing::IsEmpty; + +TEST(VideoFrameMetadata, GetWidthReturnsCorrectValue) { + RTPVideoHeader video_header; + video_header.width = 1280u; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetWidth(), video_header.width); +} + +TEST(VideoFrameMetadata, GetHeightReturnsCorrectValue) { + RTPVideoHeader video_header; + video_header.height = 720u; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetHeight(), video_header.height); +} + +TEST(VideoFrameMetadata, GetFrameIdReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.frame_id = 10; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetFrameId().value(), 10); +} + +TEST(VideoFrameMetadata, HasNoFrameIdForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetFrameId(), absl::nullopt); +} + +TEST(VideoFrameMetadata, GetSpatialIndexReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.spatial_index = 2; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetSpatialIndex(), 2); +} + +TEST(VideoFrameMetadata, SpatialIndexIsZeroForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetSpatialIndex(), 0); +} + +TEST(VideoFrameMetadata, GetTemporalIndexReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.temporal_index = 3; + VideoFrameMetadata metadata(video_header); + EXPECT_EQ(metadata.GetTemporalIndex(), 3); +} + +TEST(VideoFrameMetadata, TemporalIndexIsZeroForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_EQ(metadata.GetTemporalIndex(), 0); +} + +TEST(VideoFrameMetadata, GetFrameDependenciesReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.dependencies = {5, 6, 7}; + VideoFrameMetadata metadata(video_header); + EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5, 6, 7)); +} + +TEST(VideoFrameMetadata, FrameDependencyVectorIsEmptyForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_THAT(metadata.GetFrameDependencies(), IsEmpty()); +} + +TEST(VideoFrameMetadata, GetDecodeTargetIndicationsReturnsCorrectValue) { + RTPVideoHeader video_header; + RTPVideoHeader::GenericDescriptorInfo& generic = + video_header.generic.emplace(); + generic.decode_target_indications = {DecodeTargetIndication::kSwitch}; + VideoFrameMetadata metadata(video_header); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), + ElementsAre(DecodeTargetIndication::kSwitch)); +} + +TEST(VideoFrameMetadata, + DecodeTargetIndicationsVectorIsEmptyForHeaderWithoutGeneric) { + RTPVideoHeader video_header; + VideoFrameMetadata metadata(video_header); + ASSERT_FALSE(video_header.generic); + EXPECT_THAT(metadata.GetDecodeTargetIndications(), IsEmpty()); +} + +} // namespace +} // namespace webrtc diff --git a/api/video/video_frame_type.h b/api/video/video_frame_type.h new file mode 100644 index 0000000..4a96f1f --- /dev/null +++ b/api/video/video_frame_type.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_FRAME_TYPE_H_ +#define API_VIDEO_VIDEO_FRAME_TYPE_H_ + +namespace webrtc { + +enum class VideoFrameType { + kEmptyFrame = 0, + // Wire format for MultiplexEncodedImagePacker seems to depend on numerical + // values of these constants. + kVideoFrameKey = 3, + kVideoFrameDelta = 4, +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_FRAME_TYPE_H_ diff --git a/api/video/video_rotation.h b/api/video/video_rotation.h new file mode 100644 index 0000000..6a29588 --- /dev/null +++ b/api/video/video_rotation.h @@ -0,0 +1,26 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_ROTATION_H_ +#define API_VIDEO_VIDEO_ROTATION_H_ + +namespace webrtc { + +// enum for clockwise rotation. +enum VideoRotation { + kVideoRotation_0 = 0, + kVideoRotation_90 = 90, + kVideoRotation_180 = 180, + kVideoRotation_270 = 270 +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_ROTATION_H_ diff --git a/api/video/video_sink_interface.h b/api/video/video_sink_interface.h new file mode 100644 index 0000000..88cf9d9 --- /dev/null +++ b/api/video/video_sink_interface.h @@ -0,0 +1,32 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_SINK_INTERFACE_H_ +#define API_VIDEO_VIDEO_SINK_INTERFACE_H_ + +#include "rtc_base/checks.h" + +namespace rtc { + +template +class VideoSinkInterface { + public: + virtual ~VideoSinkInterface() = default; + + virtual void OnFrame(const VideoFrameT& frame) = 0; + + // Should be called by the source when it discards the frame due to rate + // limiting. + virtual void OnDiscardedFrame() {} +}; + +} // namespace rtc + +#endif // API_VIDEO_VIDEO_SINK_INTERFACE_H_ diff --git a/api/video/video_source_interface.cc b/api/video/video_source_interface.cc new file mode 100644 index 0000000..70a86c3 --- /dev/null +++ b/api/video/video_source_interface.cc @@ -0,0 +1,19 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_source_interface.h" + +namespace rtc { + +VideoSinkWants::VideoSinkWants() = default; +VideoSinkWants::VideoSinkWants(const VideoSinkWants&) = default; +VideoSinkWants::~VideoSinkWants() = default; + +} // namespace rtc diff --git a/api/video/video_source_interface.h b/api/video/video_source_interface.h new file mode 100644 index 0000000..b03d7c5 --- /dev/null +++ b/api/video/video_source_interface.h @@ -0,0 +1,67 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_SOURCE_INTERFACE_H_ +#define API_VIDEO_VIDEO_SOURCE_INTERFACE_H_ + +#include + +#include "absl/types/optional.h" +#include "api/video/video_sink_interface.h" +#include "rtc_base/system/rtc_export.h" + +namespace rtc { + +// VideoSinkWants is used for notifying the source of properties a video frame +// should have when it is delivered to a certain sink. +struct RTC_EXPORT VideoSinkWants { + VideoSinkWants(); + VideoSinkWants(const VideoSinkWants&); + ~VideoSinkWants(); + // Tells the source whether the sink wants frames with rotation applied. + // By default, any rotation must be applied by the sink. + bool rotation_applied = false; + + // Tells the source that the sink only wants black frames. + bool black_frames = false; + + // Tells the source the maximum number of pixels the sink wants. + int max_pixel_count = std::numeric_limits::max(); + // Tells the source the desired number of pixels the sinks wants. This will + // typically be used when stepping the resolution up again when conditions + // have improved after an earlier downgrade. The source should select the + // closest resolution to this pixel count, but if max_pixel_count is set, it + // still sets the absolute upper bound. + absl::optional target_pixel_count; + // Tells the source the maximum framerate the sink wants. + int max_framerate_fps = std::numeric_limits::max(); + + // Tells the source that the sink wants width and height of the video frames + // to be divisible by |resolution_alignment|. + // For example: With I420, this value would be a multiple of 2. + // Note that this field is unrelated to any horizontal or vertical stride + // requirements the encoder has on the incoming video frame buffers. + int resolution_alignment = 1; +}; + +template +class VideoSourceInterface { + public: + virtual ~VideoSourceInterface() = default; + + virtual void AddOrUpdateSink(VideoSinkInterface* sink, + const VideoSinkWants& wants) = 0; + // RemoveSink must guarantee that at the time the method returns, + // there is no current and no future calls to VideoSinkInterface::OnFrame. + virtual void RemoveSink(VideoSinkInterface* sink) = 0; +}; + +} // namespace rtc +#endif // API_VIDEO_VIDEO_SOURCE_INTERFACE_H_ diff --git a/api/video/video_stream_decoder.h b/api/video/video_stream_decoder.h new file mode 100644 index 0000000..8f27fa4 --- /dev/null +++ b/api/video/video_stream_decoder.h @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_STREAM_DECODER_H_ +#define API_VIDEO_VIDEO_STREAM_DECODER_H_ + +#include +#include +#include + +#include "api/units/time_delta.h" +#include "api/video/encoded_frame.h" +#include "api/video/video_frame.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_decoder_factory.h" + +namespace webrtc { +// NOTE: This class is still under development and may change without notice. +class VideoStreamDecoderInterface { + public: + class Callbacks { + public: + virtual ~Callbacks() = default; + + // Called when the VideoStreamDecoder enters a non-decodable state. + virtual void OnNonDecodableState() = 0; + + // Called with the last continuous frame. + virtual void OnContinuousUntil( + const video_coding::VideoLayerFrameId& key) = 0; + + // Called with the decoded frame. + virtual void OnDecodedFrame(VideoFrame decodedImage, + absl::optional decode_time_ms, + absl::optional qp) = 0; + }; + + virtual ~VideoStreamDecoderInterface() = default; + + virtual void OnFrame(std::unique_ptr frame) = 0; + + virtual void SetMinPlayoutDelay(TimeDelta min_delay) = 0; + virtual void SetMaxPlayoutDelay(TimeDelta max_delay) = 0; +}; + +} // namespace webrtc + +#endif // API_VIDEO_VIDEO_STREAM_DECODER_H_ diff --git a/api/video/video_stream_decoder_create.cc b/api/video/video_stream_decoder_create.cc new file mode 100644 index 0000000..8d70556 --- /dev/null +++ b/api/video/video_stream_decoder_create.cc @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "api/video/video_stream_decoder_create.h" + +#include + +#include "video/video_stream_decoder_impl.h" + +namespace webrtc { + +std::unique_ptr CreateVideoStreamDecoder( + VideoStreamDecoderInterface::Callbacks* callbacks, + VideoDecoderFactory* decoder_factory, + TaskQueueFactory* task_queue_factory, + std::map> decoder_settings) { + return std::make_unique(callbacks, decoder_factory, + task_queue_factory, + std::move(decoder_settings)); +} + +} // namespace webrtc diff --git a/api/video/video_stream_decoder_create.h b/api/video/video_stream_decoder_create.h new file mode 100644 index 0000000..4958dc1 --- /dev/null +++ b/api/video/video_stream_decoder_create.h @@ -0,0 +1,35 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef API_VIDEO_VIDEO_STREAM_DECODER_CREATE_H_ +#define API_VIDEO_VIDEO_STREAM_DECODER_CREATE_H_ + +#include +#include +#include + +#include "api/task_queue/task_queue_factory.h" +#include "api/video/video_stream_decoder.h" +#include "api/video_codecs/sdp_video_format.h" + +namespace webrtc { +// The |decoder_settings| parameter is a map between: +// --> <') + + # Add snackbar for notifications. + html.append( + '
' + '
' + '' + '
') + + return self._NEW_LINE.join(html) + + def _BuildScoreTab(self, score_name, anchor_data): + """Builds the content of a tab.""" + # Find unique values. + scores = self._scores_data_frame[ + self._scores_data_frame.eval_score_name == score_name] + apm_configs = sorted(self._FindUniqueTuples(scores, ['apm_config'])) + test_data_gen_configs = sorted(self._FindUniqueTuples( + scores, ['test_data_gen', 'test_data_gen_params'])) + + html = [ + '
', + '
', + '
', + (''), + ] + + # Header. + html.append('') + for test_data_gen_info in test_data_gen_configs: + html.append(''.format( + self._FormatName(test_data_gen_info[0]), test_data_gen_info[1])) + html.append('') + + # Body. + html.append('') + for apm_config in apm_configs: + html.append('') + for test_data_gen_info in test_data_gen_configs: + dialog_id = self._ScoreStatsInspectorDialogId( + score_name, apm_config[0], test_data_gen_info[0], + test_data_gen_info[1]) + html.append( + ''.format( + dialog_id, self._BuildScoreTableCell( + score_name, test_data_gen_info[0], test_data_gen_info[1], + apm_config[0]))) + html.append('') + html.append('') + + html.append('
APM config / Test data generator{} {}
' + self._FormatName(apm_config[0]) + '{}
') + + html.append(self._BuildScoreStatsInspectorDialogs( + score_name, apm_configs, test_data_gen_configs, + anchor_data)) + + return self._NEW_LINE.join(html) + + def _BuildScoreTableCell(self, score_name, test_data_gen, + test_data_gen_params, apm_config): + """Builds the content of a table cell for a score table.""" + scores = self._SliceDataForScoreTableCell( + score_name, apm_config, test_data_gen, test_data_gen_params) + stats = self._ComputeScoreStats(scores) + + html = [] + items_id_prefix = ( + score_name + test_data_gen + test_data_gen_params + apm_config) + if stats['count'] == 1: + # Show the only available score. + item_id = hashlib.md5(items_id_prefix.encode('utf-8')).hexdigest() + html.append('
{1:f}
'.format( + item_id, scores['score'].mean())) + html.append('
{}' + '
'.format(item_id, 'single value')) + else: + # Show stats. + for stat_name in ['min', 'max', 'mean', 'std dev']: + item_id = hashlib.md5( + (items_id_prefix + stat_name).encode('utf-8')).hexdigest() + html.append('
{1:f}
'.format( + item_id, stats[stat_name])) + html.append('
{}' + '
'.format(item_id, stat_name)) + + return self._NEW_LINE.join(html) + + def _BuildScoreStatsInspectorDialogs( + self, score_name, apm_configs, test_data_gen_configs, anchor_data): + """Builds a set of score stats inspector dialogs.""" + html = [] + for apm_config in apm_configs: + for test_data_gen_info in test_data_gen_configs: + dialog_id = self._ScoreStatsInspectorDialogId( + score_name, apm_config[0], + test_data_gen_info[0], test_data_gen_info[1]) + + html.append(''.format(dialog_id)) + + # Content. + html.append('
') + html.append('
APM config preset: {}
' + 'Test data generator: {} ({})
'.format( + self._FormatName(apm_config[0]), + self._FormatName(test_data_gen_info[0]), + test_data_gen_info[1])) + html.append(self._BuildScoreStatsInspectorDialog( + score_name, apm_config[0], test_data_gen_info[0], + test_data_gen_info[1], anchor_data + (dialog_id,))) + html.append('
') + + # Actions. + html.append('
') + html.append('') + html.append('
') + + html.append('
') + + return self._NEW_LINE.join(html) + + def _BuildScoreStatsInspectorDialog( + self, score_name, apm_config, test_data_gen, test_data_gen_params, + anchor_data): + """Builds one score stats inspector dialog.""" + scores = self._SliceDataForScoreTableCell( + score_name, apm_config, test_data_gen, test_data_gen_params) + + capture_render_pairs = sorted(self._FindUniqueTuples( + scores, ['capture', 'render'])) + echo_simulators = sorted(self._FindUniqueTuples(scores, ['echo_simulator'])) + + html = [''] + + # Header. + html.append('') + for echo_simulator in echo_simulators: + html.append('') + html.append('') + + # Body. + html.append('') + for row, (capture, render) in enumerate(capture_render_pairs): + html.append(''.format( + capture, render)) + for col, echo_simulator in enumerate(echo_simulators): + score_tuple = self._SliceDataForScoreStatsTableCell( + scores, capture, render, echo_simulator[0]) + cell_class = 'r{}c{}'.format(row, col) + html.append(''.format( + cell_class, self._BuildScoreStatsInspectorTableCell( + score_tuple, anchor_data + (cell_class,)))) + html.append('') + html.append('') + + html.append('
Capture-Render / Echo simulator' + self._FormatName(echo_simulator[0]) +'
{}
{}
{}
') + + # Placeholder for the audio inspector. + html.append('
') + + return self._NEW_LINE.join(html) + + def _BuildScoreStatsInspectorTableCell(self, score_tuple, anchor_data): + """Builds the content of a cell of a score stats inspector.""" + anchor = '&'.join(anchor_data) + html = [('
{}
' + '').format(score_tuple.score, anchor)] + + # Add all the available file paths as hidden data. + for field_name in score_tuple.keys(): + if field_name.endswith('_filepath'): + html.append(''.format( + field_name, score_tuple[field_name])) + + return self._NEW_LINE.join(html) + + def _SliceDataForScoreTableCell( + self, score_name, apm_config, test_data_gen, test_data_gen_params): + """Slices |self._scores_data_frame| to extract the data for a tab.""" + masks = [] + masks.append(self._scores_data_frame.eval_score_name == score_name) + masks.append(self._scores_data_frame.apm_config == apm_config) + masks.append(self._scores_data_frame.test_data_gen == test_data_gen) + masks.append( + self._scores_data_frame.test_data_gen_params == test_data_gen_params) + mask = functools.reduce((lambda i1, i2: i1 & i2), masks) + del masks + return self._scores_data_frame[mask] + + @classmethod + def _SliceDataForScoreStatsTableCell( + cls, scores, capture, render, echo_simulator): + """Slices |scores| to extract the data for a tab.""" + masks = [] + + masks.append(scores.capture == capture) + masks.append(scores.render == render) + masks.append(scores.echo_simulator == echo_simulator) + mask = functools.reduce((lambda i1, i2: i1 & i2), masks) + del masks + + sliced_data = scores[mask] + assert len(sliced_data) == 1, 'single score is expected' + return sliced_data.iloc[0] + + @classmethod + def _FindUniqueTuples(cls, data_frame, fields): + """Slices |data_frame| to a list of fields and finds unique tuples.""" + return data_frame[fields].drop_duplicates().values.tolist() + + @classmethod + def _ComputeScoreStats(cls, data_frame): + """Computes score stats.""" + scores = data_frame['score'] + return { + 'count': scores.count(), + 'min': scores.min(), + 'max': scores.max(), + 'mean': scores.mean(), + 'std dev': scores.std(), + } + + @classmethod + def _ScoreStatsInspectorDialogId(cls, score_name, apm_config, test_data_gen, + test_data_gen_params): + """Assigns a unique name to a dialog.""" + return 'score-stats-dialog-' + hashlib.md5( + 'score-stats-inspector-{}-{}-{}-{}'.format( + score_name, apm_config, test_data_gen, + test_data_gen_params).encode('utf-8')).hexdigest() + + @classmethod + def _Save(cls, output_filepath, html): + """Writes the HTML file. + + Args: + output_filepath: output file path. + html: string with the HTML content. + """ + with open(output_filepath, 'w') as f: + f.write(html) + + @classmethod + def _FormatName(cls, name): + """Formats a name. + + Args: + name: a string. + + Returns: + A copy of name in which underscores and dashes are replaced with a space. + """ + return re.sub(r'[_\-]', ' ', name) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/export_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/export_unittest.py new file mode 100644 index 0000000..264af7e --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/export_unittest.py @@ -0,0 +1,86 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""Unit tests for the export module. +""" + +import logging +import os +import shutil +import tempfile +import unittest + +import pyquery as pq + +from . import audioproc_wrapper +from . import collect_data +from . import eval_scores_factory +from . import evaluation +from . import export +from . import simulation +from . import test_data_generation_factory + + +class TestExport(unittest.TestCase): + """Unit tests for the export module. + """ + + _CLEAN_TMP_OUTPUT = True + + def setUp(self): + """Creates temporary data to export.""" + self._tmp_path = tempfile.mkdtemp() + + # Run a fake experiment to produce data to export. + simulator = simulation.ApmModuleSimulator( + test_data_generator_factory=( + test_data_generation_factory.TestDataGeneratorFactory( + aechen_ir_database_path='', + noise_tracks_path='', + copy_with_identity=False)), + evaluation_score_factory=( + eval_scores_factory.EvaluationScoreWorkerFactory( + polqa_tool_bin_path=os.path.join( + os.path.dirname(os.path.abspath(__file__)), 'fake_polqa'), + echo_metric_tool_bin_path=None + )), + ap_wrapper=audioproc_wrapper.AudioProcWrapper( + audioproc_wrapper.AudioProcWrapper.DEFAULT_APM_SIMULATOR_BIN_PATH), + evaluator=evaluation.ApmModuleEvaluator()) + simulator.Run( + config_filepaths=['apm_configs/default.json'], + capture_input_filepaths=[ + os.path.join(self._tmp_path, 'pure_tone-440_1000.wav'), + os.path.join(self._tmp_path, 'pure_tone-880_1000.wav'), + ], + test_data_generator_names=['identity', 'white_noise'], + eval_score_names=['audio_level_peak', 'audio_level_mean'], + output_dir=self._tmp_path) + + # Export results. + p = collect_data.InstanceArgumentsParser() + args = p.parse_args(['--output_dir', self._tmp_path]) + src_path = collect_data.ConstructSrcPath(args) + self._data_to_export = collect_data.FindScores(src_path, args) + + def tearDown(self): + """Recursively deletes temporary folders.""" + if self._CLEAN_TMP_OUTPUT: + shutil.rmtree(self._tmp_path) + else: + logging.warning(self.id() + ' did not clean the temporary path ' + ( + self._tmp_path)) + + def testCreateHtmlReport(self): + fn_out = os.path.join(self._tmp_path, 'results.html') + exporter = export.HtmlExport(fn_out) + exporter.Export(self._data_to_export) + + document = pq.PyQuery(filename=fn_out) + self.assertIsInstance(document, pq.PyQuery) + # TODO(alessiob): Use PyQuery API to check the HTML file. diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/external_vad.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/external_vad.py new file mode 100644 index 0000000..01418d8 --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/external_vad.py @@ -0,0 +1,77 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +from __future__ import division + +import logging +import os +import subprocess +import shutil +import sys +import tempfile + +try: + import numpy as np +except ImportError: + logging.critical('Cannot import the third-party Python package numpy') + sys.exit(1) + +from . import signal_processing + +class ExternalVad(object): + + def __init__(self, path_to_binary, name): + """Args: + path_to_binary: path to binary that accepts '-i ', '-o + '. There must be one float value per + 10ms audio + name: a name to identify the external VAD. Used for saving + the output as extvad_output-. + """ + self._path_to_binary = path_to_binary + self.name = name + assert os.path.exists(self._path_to_binary), ( + self._path_to_binary) + self._vad_output = None + + def Run(self, wav_file_path): + _signal = signal_processing.SignalProcessingUtils.LoadWav(wav_file_path) + if _signal.channels != 1: + raise NotImplementedError('Multiple-channel' + ' annotations not implemented') + if _signal.frame_rate != 48000: + raise NotImplementedError('Frame rates ' + 'other than 48000 not implemented') + + tmp_path = tempfile.mkdtemp() + try: + output_file_path = os.path.join( + tmp_path, self.name + '_vad.tmp') + subprocess.call([ + self._path_to_binary, + '-i', wav_file_path, + '-o', output_file_path + ]) + self._vad_output = np.fromfile(output_file_path, np.float32) + except Exception as e: + logging.error('Error while running the ' + self.name + + ' VAD (' + e.message + ')') + finally: + if os.path.exists(tmp_path): + shutil.rmtree(tmp_path) + + def GetVadOutput(self): + assert self._vad_output is not None + return self._vad_output + + @classmethod + def ConstructVadDict(cls, vad_paths, vad_names): + external_vads = {} + for path, name in zip(vad_paths, vad_names): + external_vads[name] = ExternalVad(path, name) + return external_vads diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_external_vad.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_external_vad.py new file mode 100755 index 0000000..7c75e8f --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_external_vad.py @@ -0,0 +1,24 @@ +#!/usr/bin/python +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. +import argparse +import numpy as np + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('-i', required=True) + parser.add_argument('-o', required=True) + + args = parser.parse_args() + + array = np.arange(100, dtype=np.float32) + array.tofile(open(args.o, 'w')) + + +if __name__ == '__main__': + main() diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_polqa.cc b/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_polqa.cc new file mode 100644 index 0000000..62d8ebb --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/fake_polqa.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace test { +namespace { + +const char* const kErrorMessage = "-Out /path/to/output/file is mandatory"; + +// Writes fake output intended to be parsed by +// quality_assessment.eval_scores.PolqaScore. +void WriteOutputFile(const std::string& output_file_path) { + RTC_CHECK_NE(output_file_path, ""); + std::ofstream out(output_file_path); + RTC_CHECK(!out.bad()); + out << "* Fake Polqa output" << std::endl; + out << "FakeField1\tPolqaScore\tFakeField2" << std::endl; + out << "FakeValue1\t3.25\tFakeValue2" << std::endl; + out.close(); +} + +} // namespace + +int main(int argc, char* argv[]) { + // Find "-Out" and use its next argument as output file path. + RTC_CHECK_GE(argc, 3) << kErrorMessage; + const std::string kSoughtFlagName = "-Out"; + for (int i = 1; i < argc - 1; ++i) { + if (kSoughtFlagName.compare(argv[i]) == 0) { + WriteOutputFile(argv[i + 1]); + return 0; + } + } + FATAL() << kErrorMessage; +} + +} // namespace test +} // namespace webrtc + +int main(int argc, char* argv[]) { + return webrtc::test::main(argc, argv); +} diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer.py new file mode 100644 index 0000000..b1afe14 --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer.py @@ -0,0 +1,95 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""Input mixer module. +""" + +import logging +import os + +from . import exceptions +from . import signal_processing + + +class ApmInputMixer(object): + """Class to mix a set of audio segments down to the APM input.""" + + _HARD_CLIPPING_LOG_MSG = 'hard clipping detected in the mixed signal' + + def __init__(self): + pass + + @classmethod + def HardClippingLogMessage(cls): + """Returns the log message used when hard clipping is detected in the mix. + + This method is mainly intended to be used by the unit tests. + """ + return cls._HARD_CLIPPING_LOG_MSG + + @classmethod + def Mix(cls, output_path, capture_input_filepath, echo_filepath): + """Mixes capture and echo. + + Creates the overall capture input for APM by mixing the "echo-free" capture + signal with the echo signal (e.g., echo simulated via the + echo_path_simulation module). + + The echo signal cannot be shorter than the capture signal and the generated + mix will have the same duration of the capture signal. The latter property + is enforced in order to let the input of APM and the reference signal + created by TestDataGenerator have the same length (required for the + evaluation step). + + Hard-clipping may occur in the mix; a warning is raised when this happens. + + If |echo_filepath| is None, nothing is done and |capture_input_filepath| is + returned. + + Args: + speech: AudioSegment instance. + echo_path: AudioSegment instance or None. + + Returns: + Path to the mix audio track file. + """ + if echo_filepath is None: + return capture_input_filepath + + # Build the mix output file name as a function of the echo file name. + # This ensures that if the internal parameters of the echo path simulator + # change, no erroneous cache hit occurs. + echo_file_name, _ = os.path.splitext(os.path.split(echo_filepath)[1]) + capture_input_file_name, _ = os.path.splitext( + os.path.split(capture_input_filepath)[1]) + mix_filepath = os.path.join(output_path, 'mix_capture_{}_{}.wav'.format( + capture_input_file_name, echo_file_name)) + + # Create the mix if not done yet. + mix = None + if not os.path.exists(mix_filepath): + echo_free_capture = signal_processing.SignalProcessingUtils.LoadWav( + capture_input_filepath) + echo = signal_processing.SignalProcessingUtils.LoadWav(echo_filepath) + + if signal_processing.SignalProcessingUtils.CountSamples(echo) < ( + signal_processing.SignalProcessingUtils.CountSamples( + echo_free_capture)): + raise exceptions.InputMixerException( + 'echo cannot be shorter than capture') + + mix = echo_free_capture.overlay(echo) + signal_processing.SignalProcessingUtils.SaveWav(mix_filepath, mix) + + # Check if hard clipping occurs. + if mix is None: + mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) + if signal_processing.SignalProcessingUtils.DetectHardClipping(mix): + logging.warning(cls._HARD_CLIPPING_LOG_MSG) + + return mix_filepath diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py new file mode 100644 index 0000000..87257e2 --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_mixer_unittest.py @@ -0,0 +1,144 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""Unit tests for the input mixer module. +""" + +import logging +import os +import shutil +import tempfile +import unittest + +import mock + +from . import exceptions +from . import input_mixer +from . import signal_processing + + +class TestApmInputMixer(unittest.TestCase): + """Unit tests for the ApmInputMixer class. + """ + + # Audio track file names created in setUp(). + _FILENAMES = ['capture', 'echo_1', 'echo_2', 'shorter', 'longer'] + + # Target peak power level (dBFS) of each audio track file created in setUp(). + # These values are hand-crafted in order to make saturation happen when + # capture and echo_2 are mixed and the contrary for capture and echo_1. + # None means that the power is not changed. + _MAX_PEAK_POWER_LEVELS = [-10.0, -5.0, 0.0, None, None] + + # Audio track file durations in milliseconds. + _DURATIONS = [1000, 1000, 1000, 800, 1200] + + _SAMPLE_RATE = 48000 + + def setUp(self): + """Creates temporary data.""" + self._tmp_path = tempfile.mkdtemp() + + # Create audio track files. + self._audio_tracks = {} + for filename, peak_power, duration in zip( + self._FILENAMES, self._MAX_PEAK_POWER_LEVELS, self._DURATIONS): + audio_track_filepath = os.path.join(self._tmp_path, '{}.wav'.format( + filename)) + + # Create a pure tone with the target peak power level. + template = signal_processing.SignalProcessingUtils.GenerateSilence( + duration=duration, sample_rate=self._SAMPLE_RATE) + signal = signal_processing.SignalProcessingUtils.GeneratePureTone( + template) + if peak_power is not None: + signal = signal.apply_gain(-signal.max_dBFS + peak_power) + + signal_processing.SignalProcessingUtils.SaveWav( + audio_track_filepath, signal) + self._audio_tracks[filename] = { + 'filepath': audio_track_filepath, + 'num_samples': signal_processing.SignalProcessingUtils.CountSamples( + signal) + } + + def tearDown(self): + """Recursively deletes temporary folders.""" + shutil.rmtree(self._tmp_path) + + def testCheckMixSameDuration(self): + """Checks the duration when mixing capture and echo with same duration.""" + mix_filepath = input_mixer.ApmInputMixer.Mix( + self._tmp_path, + self._audio_tracks['capture']['filepath'], + self._audio_tracks['echo_1']['filepath']) + self.assertTrue(os.path.exists(mix_filepath)) + + mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) + self.assertEqual(self._audio_tracks['capture']['num_samples'], + signal_processing.SignalProcessingUtils.CountSamples(mix)) + + def testRejectShorterEcho(self): + """Rejects echo signals that are shorter than the capture signal.""" + try: + _ = input_mixer.ApmInputMixer.Mix( + self._tmp_path, + self._audio_tracks['capture']['filepath'], + self._audio_tracks['shorter']['filepath']) + self.fail('no exception raised') + except exceptions.InputMixerException: + pass + + def testCheckMixDurationWithLongerEcho(self): + """Checks the duration when mixing an echo longer than the capture.""" + mix_filepath = input_mixer.ApmInputMixer.Mix( + self._tmp_path, + self._audio_tracks['capture']['filepath'], + self._audio_tracks['longer']['filepath']) + self.assertTrue(os.path.exists(mix_filepath)) + + mix = signal_processing.SignalProcessingUtils.LoadWav(mix_filepath) + self.assertEqual(self._audio_tracks['capture']['num_samples'], + signal_processing.SignalProcessingUtils.CountSamples(mix)) + + def testCheckOutputFileNamesConflict(self): + """Checks that different echo files lead to different output file names.""" + mix1_filepath = input_mixer.ApmInputMixer.Mix( + self._tmp_path, + self._audio_tracks['capture']['filepath'], + self._audio_tracks['echo_1']['filepath']) + self.assertTrue(os.path.exists(mix1_filepath)) + + mix2_filepath = input_mixer.ApmInputMixer.Mix( + self._tmp_path, + self._audio_tracks['capture']['filepath'], + self._audio_tracks['echo_2']['filepath']) + self.assertTrue(os.path.exists(mix2_filepath)) + + self.assertNotEqual(mix1_filepath, mix2_filepath) + + def testHardClippingLogExpected(self): + """Checks that hard clipping warning is raised when occurring.""" + logging.warning = mock.MagicMock(name='warning') + _ = input_mixer.ApmInputMixer.Mix( + self._tmp_path, + self._audio_tracks['capture']['filepath'], + self._audio_tracks['echo_2']['filepath']) + logging.warning.assert_called_once_with( + input_mixer.ApmInputMixer.HardClippingLogMessage()) + + def testHardClippingLogNotExpected(self): + """Checks that hard clipping warning is not raised when not occurring.""" + logging.warning = mock.MagicMock(name='warning') + _ = input_mixer.ApmInputMixer.Mix( + self._tmp_path, + self._audio_tracks['capture']['filepath'], + self._audio_tracks['echo_1']['filepath']) + self.assertNotIn( + mock.call(input_mixer.ApmInputMixer.HardClippingLogMessage()), + logging.warning.call_args_list) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_signal_creator.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_signal_creator.py new file mode 100644 index 0000000..1feec47 --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/input_signal_creator.py @@ -0,0 +1,67 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""Input signal creator module. +""" + +from . import exceptions +from . import signal_processing + + +class InputSignalCreator(object): + """Input signal creator class. + """ + + @classmethod + def Create(cls, name, raw_params): + """Creates a input signal and its metadata. + + Args: + name: Input signal creator name. + raw_params: Tuple of parameters to pass to the specific signal creator. + + Returns: + (AudioSegment, dict) tuple. + """ + try: + signal = {} + params = {} + + if name == 'pure_tone': + params['frequency'] = float(raw_params[0]) + params['duration'] = int(raw_params[1]) + signal = cls._CreatePureTone(params['frequency'], params['duration']) + else: + raise exceptions.InputSignalCreatorException( + 'Invalid input signal creator name') + + # Complete metadata. + params['signal'] = name + + return signal, params + except (TypeError, AssertionError) as e: + raise exceptions.InputSignalCreatorException( + 'Invalid signal creator parameters: {}'.format(e)) + + @classmethod + def _CreatePureTone(cls, frequency, duration): + """ + Generates a pure tone at 48000 Hz. + + Args: + frequency: Float in (0-24000] (Hz). + duration: Integer (milliseconds). + + Returns: + AudioSegment instance. + """ + assert 0 < frequency <= 24000 + assert duration > 0 + template = signal_processing.SignalProcessingUtils.GenerateSilence(duration) + return signal_processing.SignalProcessingUtils.GeneratePureTone( + template, frequency) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.css b/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.css new file mode 100644 index 0000000..2f406bb --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.css @@ -0,0 +1,32 @@ +/* Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +td.selected-score { + background-color: #DDD; +} + +td.single-score-cell{ + text-align: center; +} + +.audio-inspector { + text-align: center; +} + +.audio-inspector div{ + margin-bottom: 0; + padding-bottom: 0; + padding-top: 0; +} + +.audio-inspector div div{ + margin-bottom: 0; + padding-bottom: 0; + padding-top: 0; +} diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.js b/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.js new file mode 100644 index 0000000..8e47411 --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/results.js @@ -0,0 +1,376 @@ +// Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the LICENSE file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. + +/** + * Opens the score stats inspector dialog. + * @param {String} dialogId: identifier of the dialog to show. + * @return {DOMElement} The dialog element that has been opened. + */ +function openScoreStatsInspector(dialogId) { + var dialog = document.getElementById(dialogId); + dialog.showModal(); + return dialog; +} + +/** + * Closes the score stats inspector dialog. + */ +function closeScoreStatsInspector() { + var dialog = document.querySelector('dialog[open]'); + if (dialog == null) + return; + dialog.close(); +} + +/** + * Audio inspector class. + * @constructor + */ +function AudioInspector() { + console.debug('Creating an AudioInspector instance.'); + this.audioPlayer_ = new Audio(); + this.metadata_ = {}; + this.currentScore_ = null; + this.audioInspector_ = null; + this.snackbarContainer_ = document.querySelector('#snackbar'); + + // Get base URL without anchors. + this.baseUrl_ = window.location.href; + var index = this.baseUrl_.indexOf('#'); + if (index > 0) + this.baseUrl_ = this.baseUrl_.substr(0, index) + console.info('Base URL set to "' + window.location.href + '".'); + + window.event.stopPropagation(); + this.createTextAreasForCopy_(); + this.createAudioInspector_(); + this.initializeEventHandlers_(); + + // When MDL is ready, parse the anchor (if any) to show the requested + // experiment. + var self = this; + document.querySelectorAll('header a')[0].addEventListener( + 'mdl-componentupgraded', function() { + if (!self.parseWindowAnchor()) { + // If not experiment is requested, open the first section. + console.info('No anchor parsing, opening the first section.'); + document.querySelectorAll('header a > span')[0].click(); + } + }); +} + +/** + * Parse the anchor in the window URL. + * @return {bool} True if the parsing succeeded. + */ +AudioInspector.prototype.parseWindowAnchor = function() { + var index = location.href.indexOf('#'); + if (index == -1) { + console.debug('No # found in the URL.'); + return false; + } + + var anchor = location.href.substr(index - location.href.length + 1); + console.info('Anchor changed: "' + anchor + '".'); + + var parts = anchor.split('&'); + if (parts.length != 3) { + console.info('Ignoring anchor with invalid number of fields.'); + return false; + } + + var openDialog = document.querySelector('dialog[open]'); + try { + // Open the requested dialog if not already open. + if (!openDialog || openDialog.id != parts[1]) { + !openDialog || openDialog.close(); + document.querySelectorAll('header a > span')[ + parseInt(parts[0].substr(1))].click(); + openDialog = openScoreStatsInspector(parts[1]); + } + + // Trigger click on cell. + var cell = openDialog.querySelector('td.' + parts[2]); + cell.focus(); + cell.click(); + + this.showNotification_('Experiment selected.'); + return true; + } catch (e) { + this.showNotification_('Cannot select experiment :('); + console.error('Exception caught while selecting experiment: "' + e + '".'); + } + + return false; +} + +/** + * Set up the inspector for a new score. + * @param {DOMElement} element: Element linked to the selected score. + */ +AudioInspector.prototype.selectedScoreChange = function(element) { + if (this.currentScore_ == element) { return; } + if (this.currentScore_ != null) { + this.currentScore_.classList.remove('selected-score'); + } + this.currentScore_ = element; + this.currentScore_.classList.add('selected-score'); + this.stopAudio(); + + // Read metadata. + var matches = element.querySelectorAll('input[type=hidden]'); + this.metadata_ = {}; + for (var index = 0; index < matches.length; ++index) { + this.metadata_[matches[index].name] = matches[index].value; + } + + // Show the audio inspector interface. + var container = element.parentNode.parentNode.parentNode.parentNode; + var audioInspectorPlaceholder = container.querySelector( + '.audio-inspector-placeholder'); + this.moveInspector_(audioInspectorPlaceholder); +}; + +/** + * Stop playing audio. + */ +AudioInspector.prototype.stopAudio = function() { + console.info('Pausing audio play out.'); + this.audioPlayer_.pause(); +}; + +/** + * Show a text message using the snackbar. + */ +AudioInspector.prototype.showNotification_ = function(text) { + try { + this.snackbarContainer_.MaterialSnackbar.showSnackbar({ + message: text, timeout: 2000}); + } catch (e) { + // Fallback to an alert. + alert(text); + console.warn('Cannot use snackbar: "' + e + '"'); + } +} + +/** + * Move the audio inspector DOM node into the given parent. + * @param {DOMElement} newParentNode: New parent for the inspector. + */ +AudioInspector.prototype.moveInspector_ = function(newParentNode) { + newParentNode.appendChild(this.audioInspector_); +}; + +/** + * Play audio file from url. + * @param {string} metadataFieldName: Metadata field name. + */ +AudioInspector.prototype.playAudio = function(metadataFieldName) { + if (this.metadata_[metadataFieldName] == undefined) { return; } + if (this.metadata_[metadataFieldName] == 'None') { + alert('The selected stream was not used during the experiment.'); + return; + } + this.stopAudio(); + this.audioPlayer_.src = this.metadata_[metadataFieldName]; + console.debug('Audio source URL: "' + this.audioPlayer_.src + '"'); + this.audioPlayer_.play(); + console.info('Playing out audio.'); +}; + +/** + * Create hidden text areas to copy URLs. + * + * For each dialog, one text area is created since it is not possible to select + * text on a text area outside of the active dialog. + */ +AudioInspector.prototype.createTextAreasForCopy_ = function() { + var self = this; + document.querySelectorAll('dialog.mdl-dialog').forEach(function(element) { + var textArea = document.createElement("textarea"); + textArea.classList.add('url-copy'); + textArea.style.position = 'fixed'; + textArea.style.bottom = 0; + textArea.style.left = 0; + textArea.style.width = '2em'; + textArea.style.height = '2em'; + textArea.style.border = 'none'; + textArea.style.outline = 'none'; + textArea.style.boxShadow = 'none'; + textArea.style.background = 'transparent'; + textArea.style.fontSize = '6px'; + element.appendChild(textArea); + }); +} + +/** + * Create audio inspector. + */ +AudioInspector.prototype.createAudioInspector_ = function() { + var buttonIndex = 0; + function getButtonHtml(icon, toolTipText, caption, metadataFieldName) { + var buttonId = 'audioInspectorButton' + buttonIndex++; + html = caption == null ? '' : caption; + html += '' + + return html; + } + + // TODO(alessiob): Add timeline and highlight current track by changing icon + // color. + + this.audioInspector_ = document.createElement('div'); + this.audioInspector_.classList.add('audio-inspector'); + this.audioInspector_.innerHTML = + '
' + + '
' + + '
' + + getButtonHtml('play_arrow', 'Simulated echo', 'Ein', + 'echo_filepath') + + '
' + + '
' + + getButtonHtml('stop', 'Stop playing [S]', null, '__stop__') + + '
' + + '
' + + getButtonHtml('play_arrow', 'Render stream', 'Rin', + 'render_filepath') + + '
' + + '
' + + '
' + + '
' + + '
' + + '
' + + getButtonHtml('play_arrow', 'Capture stream (APM input) [1]', + 'Y\'in', 'capture_filepath') + + '
' + + '
APM
' + + '
' + + getButtonHtml('play_arrow', 'APM output [2]', 'Yout', + 'apm_output_filepath') + + '
' + + '
' + + '
' + + '
' + + '
' + + '
' + + getButtonHtml('play_arrow', 'Echo-free capture stream', + 'Yin', 'echo_free_capture_filepath') + + '
' + + '
' + + getButtonHtml('play_arrow', 'Clean capture stream', + 'Yclean', 'clean_capture_input_filepath') + + '
' + + '
' + + getButtonHtml('play_arrow', 'APM reference [3]', 'Yref', + 'apm_reference_filepath') + + '
' + + '
' + + '
'; + + // Add an invisible node as initial container for the audio inspector. + var parent = document.createElement('div'); + parent.style.display = 'none'; + this.moveInspector_(parent); + document.body.appendChild(parent); +}; + +/** + * Initialize event handlers. + */ +AudioInspector.prototype.initializeEventHandlers_ = function() { + var self = this; + + // Score cells. + document.querySelectorAll('td.single-score-cell').forEach(function(element) { + element.onclick = function() { + self.selectedScoreChange(this); + } + }); + + // Copy anchor URLs icons. + if (document.queryCommandSupported('copy')) { + document.querySelectorAll('td.single-score-cell button').forEach( + function(element) { + element.onclick = function() { + // Find the text area in the dialog. + var textArea = element.closest('dialog').querySelector( + 'textarea.url-copy'); + + // Copy. + textArea.value = self.baseUrl_ + '#' + element.getAttribute( + 'data-anchor'); + textArea.select(); + try { + if (!document.execCommand('copy')) + throw 'Copy returned false'; + self.showNotification_('Experiment URL copied.'); + } catch (e) { + self.showNotification_('Cannot copy experiment URL :('); + console.error(e); + } + } + }); + } else { + self.showNotification_( + 'The copy command is disabled. URL copy is not enabled.'); + } + + // Audio inspector buttons. + this.audioInspector_.querySelectorAll('button').forEach(function(element) { + var target = element.querySelector('input[type=hidden]'); + if (target == null) { return; } + element.onclick = function() { + if (target.value == '__stop__') { + self.stopAudio(); + } else { + self.playAudio(target.value); + } + }; + }); + + // Dialog close handlers. + var dialogs = document.querySelectorAll('dialog').forEach(function(element) { + element.onclose = function() { + self.stopAudio(); + } + }); + + // Keyboard shortcuts. + window.onkeyup = function(e) { + var key = e.keyCode ? e.keyCode : e.which; + switch (key) { + case 49: // 1. + self.playAudio('capture_filepath'); + break; + case 50: // 2. + self.playAudio('apm_output_filepath'); + break; + case 51: // 3. + self.playAudio('apm_reference_filepath'); + break; + case 83: // S. + case 115: // s. + self.stopAudio(); + break; + } + }; + + // Hash change. + window.onhashchange = function(e) { + self.parseWindowAnchor(); + } +}; diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing.py new file mode 100644 index 0000000..fd731fd --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing.py @@ -0,0 +1,356 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""Signal processing utility module. +""" + +import array +import logging +import os +import sys +import enum + +try: + import numpy as np +except ImportError: + logging.critical('Cannot import the third-party Python package numpy') + sys.exit(1) + +try: + import pydub + import pydub.generators +except ImportError: + logging.critical('Cannot import the third-party Python package pydub') + sys.exit(1) + +try: + import scipy.signal + import scipy.fftpack +except ImportError: + logging.critical('Cannot import the third-party Python package scipy') + sys.exit(1) + +from . import exceptions + + +class SignalProcessingUtils(object): + """Collection of signal processing utilities. + """ + + @enum.unique + class MixPadding(enum.Enum): + NO_PADDING = 0 + ZERO_PADDING = 1 + LOOP = 2 + + def __init__(self): + pass + + @classmethod + def LoadWav(cls, filepath, channels=1): + """Loads wav file. + + Args: + filepath: path to the wav audio track file to load. + channels: number of channels (downmixing to mono by default). + + Returns: + AudioSegment instance. + """ + if not os.path.exists(filepath): + logging.error('cannot find the <%s> audio track file', filepath) + raise exceptions.FileNotFoundError() + return pydub.AudioSegment.from_file( + filepath, format='wav', channels=channels) + + @classmethod + def SaveWav(cls, output_filepath, signal): + """Saves wav file. + + Args: + output_filepath: path to the wav audio track file to save. + signal: AudioSegment instance. + """ + return signal.export(output_filepath, format='wav') + + @classmethod + def CountSamples(cls, signal): + """Number of samples per channel. + + Args: + signal: AudioSegment instance. + + Returns: + An integer. + """ + number_of_samples = len(signal.get_array_of_samples()) + assert signal.channels > 0 + assert number_of_samples % signal.channels == 0 + return number_of_samples / signal.channels + + @classmethod + def GenerateSilence(cls, duration=1000, sample_rate=48000): + """Generates silence. + + This method can also be used to create a template AudioSegment instance. + A template can then be used with other Generate*() methods accepting an + AudioSegment instance as argument. + + Args: + duration: duration in ms. + sample_rate: sample rate. + + Returns: + AudioSegment instance. + """ + return pydub.AudioSegment.silent(duration, sample_rate) + + @classmethod + def GeneratePureTone(cls, template, frequency=440.0): + """Generates a pure tone. + + The pure tone is generated with the same duration and in the same format of + the given template signal. + + Args: + template: AudioSegment instance. + frequency: Frequency of the pure tone in Hz. + + Return: + AudioSegment instance. + """ + if frequency > template.frame_rate >> 1: + raise exceptions.SignalProcessingException('Invalid frequency') + + generator = pydub.generators.Sine( + sample_rate=template.frame_rate, + bit_depth=template.sample_width * 8, + freq=frequency) + + return generator.to_audio_segment( + duration=len(template), + volume=0.0) + + @classmethod + def GenerateWhiteNoise(cls, template): + """Generates white noise. + + The white noise is generated with the same duration and in the same format + of the given template signal. + + Args: + template: AudioSegment instance. + + Return: + AudioSegment instance. + """ + generator = pydub.generators.WhiteNoise( + sample_rate=template.frame_rate, + bit_depth=template.sample_width * 8) + return generator.to_audio_segment( + duration=len(template), + volume=0.0) + + @classmethod + def AudioSegmentToRawData(cls, signal): + samples = signal.get_array_of_samples() + if samples.typecode != 'h': + raise exceptions.SignalProcessingException('Unsupported samples type') + return np.array(signal.get_array_of_samples(), np.int16) + + @classmethod + def Fft(cls, signal, normalize=True): + if signal.channels != 1: + raise NotImplementedError('multiple-channel FFT not implemented') + x = cls.AudioSegmentToRawData(signal).astype(np.float32) + if normalize: + x /= max(abs(np.max(x)), 1.0) + y = scipy.fftpack.fft(x) + return y[:len(y) / 2] + + @classmethod + def DetectHardClipping(cls, signal, threshold=2): + """Detects hard clipping. + + Hard clipping is simply detected by counting samples that touch either the + lower or upper bound too many times in a row (according to |threshold|). + The presence of a single sequence of samples meeting such property is enough + to label the signal as hard clipped. + + Args: + signal: AudioSegment instance. + threshold: minimum number of samples at full-scale in a row. + + Returns: + True if hard clipping is detect, False otherwise. + """ + if signal.channels != 1: + raise NotImplementedError('multiple-channel clipping not implemented') + if signal.sample_width != 2: # Note that signal.sample_width is in bytes. + raise exceptions.SignalProcessingException( + 'hard-clipping detection only supported for 16 bit samples') + samples = cls.AudioSegmentToRawData(signal) + + # Detect adjacent clipped samples. + samples_type_info = np.iinfo(samples.dtype) + mask_min = samples == samples_type_info.min + mask_max = samples == samples_type_info.max + + def HasLongSequence(vector, min_legth=threshold): + """Returns True if there are one or more long sequences of True flags.""" + seq_length = 0 + for b in vector: + seq_length = seq_length + 1 if b else 0 + if seq_length >= min_legth: + return True + return False + + return HasLongSequence(mask_min) or HasLongSequence(mask_max) + + @classmethod + def ApplyImpulseResponse(cls, signal, impulse_response): + """Applies an impulse response to a signal. + + Args: + signal: AudioSegment instance. + impulse_response: list or numpy vector of float values. + + Returns: + AudioSegment instance. + """ + # Get samples. + assert signal.channels == 1, ( + 'multiple-channel recordings not supported') + samples = signal.get_array_of_samples() + + # Convolve. + logging.info('applying %d order impulse response to a signal lasting %d ms', + len(impulse_response), len(signal)) + convolved_samples = scipy.signal.fftconvolve( + in1=samples, + in2=impulse_response, + mode='full').astype(np.int16) + logging.info('convolution computed') + + # Cast. + convolved_samples = array.array(signal.array_type, convolved_samples) + + # Verify. + logging.debug('signal length: %d samples', len(samples)) + logging.debug('convolved signal length: %d samples', len(convolved_samples)) + assert len(convolved_samples) > len(samples) + + # Generate convolved signal AudioSegment instance. + convolved_signal = pydub.AudioSegment( + data=convolved_samples, + metadata={ + 'sample_width': signal.sample_width, + 'frame_rate': signal.frame_rate, + 'frame_width': signal.frame_width, + 'channels': signal.channels, + }) + assert len(convolved_signal) > len(signal) + + return convolved_signal + + @classmethod + def Normalize(cls, signal): + """Normalizes a signal. + + Args: + signal: AudioSegment instance. + + Returns: + An AudioSegment instance. + """ + return signal.apply_gain(-signal.max_dBFS) + + @classmethod + def Copy(cls, signal): + """Makes a copy os a signal. + + Args: + signal: AudioSegment instance. + + Returns: + An AudioSegment instance. + """ + return pydub.AudioSegment( + data=signal.get_array_of_samples(), + metadata={ + 'sample_width': signal.sample_width, + 'frame_rate': signal.frame_rate, + 'frame_width': signal.frame_width, + 'channels': signal.channels, + }) + + @classmethod + def MixSignals(cls, signal, noise, target_snr=0.0, + pad_noise=MixPadding.NO_PADDING): + """Mixes |signal| and |noise| with a target SNR. + + Mix |signal| and |noise| with a desired SNR by scaling |noise|. + If the target SNR is +/- infinite, a copy of signal/noise is returned. + If |signal| is shorter than |noise|, the length of the mix equals that of + |signal|. Otherwise, the mix length depends on whether padding is applied. + When padding is not applied, that is |pad_noise| is set to NO_PADDING + (default), the mix length equals that of |noise| - i.e., |signal| is + truncated. Otherwise, |noise| is extended and the resulting mix has the same + length of |signal|. + + Args: + signal: AudioSegment instance (signal). + noise: AudioSegment instance (noise). + target_snr: float, numpy.Inf or -numpy.Inf (dB). + pad_noise: SignalProcessingUtils.MixPadding, default: NO_PADDING. + + Returns: + An AudioSegment instance. + """ + # Handle infinite target SNR. + if target_snr == -np.Inf: + # Return a copy of noise. + logging.warning('SNR = -Inf, returning noise') + return cls.Copy(noise) + elif target_snr == np.Inf: + # Return a copy of signal. + logging.warning('SNR = +Inf, returning signal') + return cls.Copy(signal) + + # Check signal and noise power. + signal_power = float(signal.dBFS) + noise_power = float(noise.dBFS) + if signal_power == -np.Inf: + logging.error('signal has -Inf power, cannot mix') + raise exceptions.SignalProcessingException( + 'cannot mix a signal with -Inf power') + if noise_power == -np.Inf: + logging.error('noise has -Inf power, cannot mix') + raise exceptions.SignalProcessingException( + 'cannot mix a signal with -Inf power') + + # Mix. + gain_db = signal_power - noise_power - target_snr + signal_duration = len(signal) + noise_duration = len(noise) + if signal_duration <= noise_duration: + # Ignore |pad_noise|, |noise| is truncated if longer that |signal|, the + # mix will have the same length of |signal|. + return signal.overlay(noise.apply_gain(gain_db)) + elif pad_noise == cls.MixPadding.NO_PADDING: + # |signal| is longer than |noise|, but no padding is applied to |noise|. + # Truncate |signal|. + return noise.overlay(signal, gain_during_overlay=gain_db) + elif pad_noise == cls.MixPadding.ZERO_PADDING: + # TODO(alessiob): Check that this works as expected. + return signal.overlay(noise.apply_gain(gain_db)) + elif pad_noise == cls.MixPadding.LOOP: + # |signal| is longer than |noise|, extend |noise| by looping. + return signal.overlay(noise.apply_gain(gain_db), loop=True) + else: + raise exceptions.SignalProcessingException('invalid padding type') diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing_unittest.py new file mode 100644 index 0000000..30ada41 --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/signal_processing_unittest.py @@ -0,0 +1,186 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""Unit tests for the signal_processing module. +""" + +import unittest + +import numpy as np +import pydub + +from . import exceptions +from . import signal_processing + + +class TestSignalProcessing(unittest.TestCase): + """Unit tests for the signal_processing module. + """ + + def testMixSignals(self): + # Generate a template signal with which white noise can be generated. + silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) + + # Generate two distinct AudioSegment instances with 1 second of white noise. + signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + silence) + noise = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + silence) + + # Extract samples. + signal_samples = signal.get_array_of_samples() + noise_samples = noise.get_array_of_samples() + + # Test target SNR -Inf (noise expected). + mix_neg_inf = signal_processing.SignalProcessingUtils.MixSignals( + signal, noise, -np.Inf) + self.assertTrue(len(noise), len(mix_neg_inf)) # Check duration. + mix_neg_inf_samples = mix_neg_inf.get_array_of_samples() + self.assertTrue( # Check samples. + all([x == y for x, y in zip(noise_samples, mix_neg_inf_samples)])) + + # Test target SNR 0.0 (different data expected). + mix_0 = signal_processing.SignalProcessingUtils.MixSignals( + signal, noise, 0.0) + self.assertTrue(len(signal), len(mix_0)) # Check duration. + self.assertTrue(len(noise), len(mix_0)) + mix_0_samples = mix_0.get_array_of_samples() + self.assertTrue( + any([x != y for x, y in zip(signal_samples, mix_0_samples)])) + self.assertTrue( + any([x != y for x, y in zip(noise_samples, mix_0_samples)])) + + # Test target SNR +Inf (signal expected). + mix_pos_inf = signal_processing.SignalProcessingUtils.MixSignals( + signal, noise, np.Inf) + self.assertTrue(len(signal), len(mix_pos_inf)) # Check duration. + mix_pos_inf_samples = mix_pos_inf.get_array_of_samples() + self.assertTrue( # Check samples. + all([x == y for x, y in zip(signal_samples, mix_pos_inf_samples)])) + + def testMixSignalsMinInfPower(self): + silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) + signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + silence) + + with self.assertRaises(exceptions.SignalProcessingException): + _ = signal_processing.SignalProcessingUtils.MixSignals( + signal, silence, 0.0) + + with self.assertRaises(exceptions.SignalProcessingException): + _ = signal_processing.SignalProcessingUtils.MixSignals( + silence, signal, 0.0) + + def testMixSignalNoiseDifferentLengths(self): + # Test signals. + shorter = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + pydub.AudioSegment.silent(duration=1000, frame_rate=8000)) + longer = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + pydub.AudioSegment.silent(duration=2000, frame_rate=8000)) + + # When the signal is shorter than the noise, the mix length always equals + # that of the signal regardless of whether padding is applied. + # No noise padding, length of signal less than that of noise. + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=shorter, + noise=longer, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding.NO_PADDING) + self.assertEqual(len(shorter), len(mix)) + # With noise padding, length of signal less than that of noise. + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=shorter, + noise=longer, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding.ZERO_PADDING) + self.assertEqual(len(shorter), len(mix)) + + # When the signal is longer than the noise, the mix length depends on + # whether padding is applied. + # No noise padding, length of signal greater than that of noise. + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=longer, + noise=shorter, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding.NO_PADDING) + self.assertEqual(len(shorter), len(mix)) + # With noise padding, length of signal greater than that of noise. + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=longer, + noise=shorter, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding.ZERO_PADDING) + self.assertEqual(len(longer), len(mix)) + + def testMixSignalNoisePaddingTypes(self): + # Test signals. + shorter = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + pydub.AudioSegment.silent(duration=1000, frame_rate=8000)) + longer = signal_processing.SignalProcessingUtils.GeneratePureTone( + pydub.AudioSegment.silent(duration=2000, frame_rate=8000), 440.0) + + # Zero padding: expect pure tone only in 1-2s. + mix_zero_pad = signal_processing.SignalProcessingUtils.MixSignals( + signal=longer, + noise=shorter, + target_snr=-6, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding.ZERO_PADDING) + + # Loop: expect pure tone plus noise in 1-2s. + mix_loop = signal_processing.SignalProcessingUtils.MixSignals( + signal=longer, + noise=shorter, + target_snr=-6, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding.LOOP) + + def Energy(signal): + samples = signal_processing.SignalProcessingUtils.AudioSegmentToRawData( + signal).astype(np.float32) + return np.sum(samples * samples) + + e_mix_zero_pad = Energy(mix_zero_pad[-1000:]) + e_mix_loop = Energy(mix_loop[-1000:]) + self.assertLess(0, e_mix_zero_pad) + self.assertLess(e_mix_zero_pad, e_mix_loop) + + def testMixSignalSnr(self): + # Test signals. + tone_low = signal_processing.SignalProcessingUtils.GeneratePureTone( + pydub.AudioSegment.silent(duration=64, frame_rate=8000), 250.0) + tone_high = signal_processing.SignalProcessingUtils.GeneratePureTone( + pydub.AudioSegment.silent(duration=64, frame_rate=8000), 3000.0) + + def ToneAmplitudes(mix): + """Returns the amplitude of the coefficients #16 and #192, which + correspond to the tones at 250 and 3k Hz respectively.""" + mix_fft = np.absolute(signal_processing.SignalProcessingUtils.Fft(mix)) + return mix_fft[16], mix_fft[192] + + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=tone_low, + noise=tone_high, + target_snr=-6) + ampl_low, ampl_high = ToneAmplitudes(mix) + self.assertLess(ampl_low, ampl_high) + + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=tone_high, + noise=tone_low, + target_snr=-6) + ampl_low, ampl_high = ToneAmplitudes(mix) + self.assertLess(ampl_high, ampl_low) + + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=tone_low, + noise=tone_high, + target_snr=6) + ampl_low, ampl_high = ToneAmplitudes(mix) + self.assertLess(ampl_high, ampl_low) + + mix = signal_processing.SignalProcessingUtils.MixSignals( + signal=tone_high, + noise=tone_low, + target_snr=6) + ampl_low, ampl_high = ToneAmplitudes(mix) + self.assertLess(ampl_low, ampl_high) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation.py new file mode 100644 index 0000000..37db2ef --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation.py @@ -0,0 +1,426 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""APM module simulator. +""" + +import logging +import os + +from . import annotations +from . import data_access +from . import echo_path_simulation +from . import echo_path_simulation_factory +from . import eval_scores +from . import exceptions +from . import input_mixer +from . import input_signal_creator +from . import signal_processing +from . import test_data_generation + + +class ApmModuleSimulator(object): + """Audio processing module (APM) simulator class. + """ + + _TEST_DATA_GENERATOR_CLASSES = ( + test_data_generation.TestDataGenerator.REGISTERED_CLASSES) + _EVAL_SCORE_WORKER_CLASSES = eval_scores.EvaluationScore.REGISTERED_CLASSES + + _PREFIX_APM_CONFIG = 'apmcfg-' + _PREFIX_CAPTURE = 'capture-' + _PREFIX_RENDER = 'render-' + _PREFIX_ECHO_SIMULATOR = 'echosim-' + _PREFIX_TEST_DATA_GEN = 'datagen-' + _PREFIX_TEST_DATA_GEN_PARAMS = 'datagen_params-' + _PREFIX_SCORE = 'score-' + + def __init__(self, test_data_generator_factory, evaluation_score_factory, + ap_wrapper, evaluator, external_vads=None): + if external_vads is None: + external_vads = {} + self._test_data_generator_factory = test_data_generator_factory + self._evaluation_score_factory = evaluation_score_factory + self._audioproc_wrapper = ap_wrapper + self._evaluator = evaluator + self._annotator = annotations.AudioAnnotationsExtractor( + annotations.AudioAnnotationsExtractor.VadType.ENERGY_THRESHOLD | + annotations.AudioAnnotationsExtractor.VadType.WEBRTC_COMMON_AUDIO | + annotations.AudioAnnotationsExtractor.VadType.WEBRTC_APM, + external_vads + ) + + # Init. + self._test_data_generator_factory.SetOutputDirectoryPrefix( + self._PREFIX_TEST_DATA_GEN_PARAMS) + self._evaluation_score_factory.SetScoreFilenamePrefix( + self._PREFIX_SCORE) + + # Properties for each run. + self._base_output_path = None + self._output_cache_path = None + self._test_data_generators = None + self._evaluation_score_workers = None + self._config_filepaths = None + self._capture_input_filepaths = None + self._render_input_filepaths = None + self._echo_path_simulator_class = None + + @classmethod + def GetPrefixApmConfig(cls): + return cls._PREFIX_APM_CONFIG + + @classmethod + def GetPrefixCapture(cls): + return cls._PREFIX_CAPTURE + + @classmethod + def GetPrefixRender(cls): + return cls._PREFIX_RENDER + + @classmethod + def GetPrefixEchoSimulator(cls): + return cls._PREFIX_ECHO_SIMULATOR + + @classmethod + def GetPrefixTestDataGenerator(cls): + return cls._PREFIX_TEST_DATA_GEN + + @classmethod + def GetPrefixTestDataGeneratorParameters(cls): + return cls._PREFIX_TEST_DATA_GEN_PARAMS + + @classmethod + def GetPrefixScore(cls): + return cls._PREFIX_SCORE + + def Run(self, config_filepaths, capture_input_filepaths, + test_data_generator_names, eval_score_names, output_dir, + render_input_filepaths=None, echo_path_simulator_name=( + echo_path_simulation.NoEchoPathSimulator.NAME)): + """Runs the APM simulation. + + Initializes paths and required instances, then runs all the simulations. + The render input can be optionally added. If added, the number of capture + input audio tracks and the number of render input audio tracks have to be + equal. The two lists are used to form pairs of capture and render input. + + Args: + config_filepaths: set of APM configuration files to test. + capture_input_filepaths: set of capture input audio track files to test. + test_data_generator_names: set of test data generator names to test. + eval_score_names: set of evaluation score names to test. + output_dir: base path to the output directory for wav files and outcomes. + render_input_filepaths: set of render input audio track files to test. + echo_path_simulator_name: name of the echo path simulator to use when + render input is provided. + """ + assert render_input_filepaths is None or ( + len(capture_input_filepaths) == len(render_input_filepaths)), ( + 'render input set size not matching input set size') + assert render_input_filepaths is None or echo_path_simulator_name in ( + echo_path_simulation.EchoPathSimulator.REGISTERED_CLASSES), ( + 'invalid echo path simulator') + self._base_output_path = os.path.abspath(output_dir) + + # Output path used to cache the data shared across simulations. + self._output_cache_path = os.path.join(self._base_output_path, '_cache') + + # Instance test data generators. + self._test_data_generators = [self._test_data_generator_factory.GetInstance( + test_data_generators_class=( + self._TEST_DATA_GENERATOR_CLASSES[name])) for name in ( + test_data_generator_names)] + + # Instance evaluation score workers. + self._evaluation_score_workers = [ + self._evaluation_score_factory.GetInstance( + evaluation_score_class=self._EVAL_SCORE_WORKER_CLASSES[name]) for ( + name) in eval_score_names] + + # Set APM configuration file paths. + self._config_filepaths = self._CreatePathsCollection(config_filepaths) + + # Set probing signal file paths. + if render_input_filepaths is None: + # Capture input only. + self._capture_input_filepaths = self._CreatePathsCollection( + capture_input_filepaths) + self._render_input_filepaths = None + else: + # Set both capture and render input signals. + self._SetTestInputSignalFilePaths( + capture_input_filepaths, render_input_filepaths) + + # Set the echo path simulator class. + self._echo_path_simulator_class = ( + echo_path_simulation.EchoPathSimulator.REGISTERED_CLASSES[ + echo_path_simulator_name]) + + self._SimulateAll() + + def _SimulateAll(self): + """Runs all the simulations. + + Iterates over the combinations of APM configurations, probing signals, and + test data generators. This method is mainly responsible for the creation of + the cache and output directories required in order to call _Simulate(). + """ + without_render_input = self._render_input_filepaths is None + + # Try different APM config files. + for config_name in self._config_filepaths: + config_filepath = self._config_filepaths[config_name] + + # Try different capture-render pairs. + for capture_input_name in self._capture_input_filepaths: + # Output path for the capture signal annotations. + capture_annotations_cache_path = os.path.join( + self._output_cache_path, + self._PREFIX_CAPTURE + capture_input_name) + data_access.MakeDirectory(capture_annotations_cache_path) + + # Capture. + capture_input_filepath = self._capture_input_filepaths[ + capture_input_name] + if not os.path.exists(capture_input_filepath): + # If the input signal file does not exist, try to create using the + # available input signal creators. + self._CreateInputSignal(capture_input_filepath) + assert os.path.exists(capture_input_filepath) + self._ExtractCaptureAnnotations( + capture_input_filepath, capture_annotations_cache_path) + + # Render and simulated echo path (optional). + render_input_filepath = None if without_render_input else ( + self._render_input_filepaths[capture_input_name]) + render_input_name = '(none)' if without_render_input else ( + self._ExtractFileName(render_input_filepath)) + echo_path_simulator = ( + echo_path_simulation_factory.EchoPathSimulatorFactory.GetInstance( + self._echo_path_simulator_class, render_input_filepath)) + + # Try different test data generators. + for test_data_generators in self._test_data_generators: + logging.info('APM config preset: <%s>, capture: <%s>, render: <%s>,' + 'test data generator: <%s>, echo simulator: <%s>', + config_name, capture_input_name, render_input_name, + test_data_generators.NAME, echo_path_simulator.NAME) + + # Output path for the generated test data. + test_data_cache_path = os.path.join( + capture_annotations_cache_path, + self._PREFIX_TEST_DATA_GEN + test_data_generators.NAME) + data_access.MakeDirectory(test_data_cache_path) + logging.debug('test data cache path: <%s>', test_data_cache_path) + + # Output path for the echo simulator and APM input mixer output. + echo_test_data_cache_path = os.path.join( + test_data_cache_path, 'echosim-{}'.format( + echo_path_simulator.NAME)) + data_access.MakeDirectory(echo_test_data_cache_path) + logging.debug('echo test data cache path: <%s>', + echo_test_data_cache_path) + + # Full output path. + output_path = os.path.join( + self._base_output_path, + self._PREFIX_APM_CONFIG + config_name, + self._PREFIX_CAPTURE + capture_input_name, + self._PREFIX_RENDER + render_input_name, + self._PREFIX_ECHO_SIMULATOR + echo_path_simulator.NAME, + self._PREFIX_TEST_DATA_GEN + test_data_generators.NAME) + data_access.MakeDirectory(output_path) + logging.debug('output path: <%s>', output_path) + + self._Simulate(test_data_generators, capture_input_filepath, + render_input_filepath, test_data_cache_path, + echo_test_data_cache_path, output_path, + config_filepath, echo_path_simulator) + + @staticmethod + def _CreateInputSignal(input_signal_filepath): + """Creates a missing input signal file. + + The file name is parsed to extract input signal creator and params. If a + creator is matched and the parameters are valid, a new signal is generated + and written in |input_signal_filepath|. + + Args: + input_signal_filepath: Path to the input signal audio file to write. + + Raises: + InputSignalCreatorException + """ + filename = os.path.splitext(os.path.split(input_signal_filepath)[-1])[0] + filename_parts = filename.split('-') + + if len(filename_parts) < 2: + raise exceptions.InputSignalCreatorException( + 'Cannot parse input signal file name') + + signal, metadata = input_signal_creator.InputSignalCreator.Create( + filename_parts[0], filename_parts[1].split('_')) + + signal_processing.SignalProcessingUtils.SaveWav( + input_signal_filepath, signal) + data_access.Metadata.SaveFileMetadata(input_signal_filepath, metadata) + + def _ExtractCaptureAnnotations(self, input_filepath, output_path, + annotation_name=""): + self._annotator.Extract(input_filepath) + self._annotator.Save(output_path, annotation_name) + + def _Simulate(self, test_data_generators, clean_capture_input_filepath, + render_input_filepath, test_data_cache_path, + echo_test_data_cache_path, output_path, config_filepath, + echo_path_simulator): + """Runs a single set of simulation. + + Simulates a given combination of APM configuration, probing signal, and + test data generator. It iterates over the test data generator + internal configurations. + + Args: + test_data_generators: TestDataGenerator instance. + clean_capture_input_filepath: capture input audio track file to be + processed by a test data generator and + not affected by echo. + render_input_filepath: render input audio track file to test. + test_data_cache_path: path for the generated test audio track files. + echo_test_data_cache_path: path for the echo simulator. + output_path: base output path for the test data generator. + config_filepath: APM configuration file to test. + echo_path_simulator: EchoPathSimulator instance. + """ + # Generate pairs of noisy input and reference signal files. + test_data_generators.Generate( + input_signal_filepath=clean_capture_input_filepath, + test_data_cache_path=test_data_cache_path, + base_output_path=output_path) + + # Extract metadata linked to the clean input file (if any). + apm_input_metadata = None + try: + apm_input_metadata = data_access.Metadata.LoadFileMetadata( + clean_capture_input_filepath) + except IOError as e: + apm_input_metadata = {} + apm_input_metadata['test_data_gen_name'] = test_data_generators.NAME + apm_input_metadata['test_data_gen_config'] = None + + # For each test data pair, simulate a call and evaluate. + for config_name in test_data_generators.config_names: + logging.info(' - test data generator config: <%s>', config_name) + apm_input_metadata['test_data_gen_config'] = config_name + + # Paths to the test data generator output. + # Note that the reference signal does not depend on the render input + # which is optional. + noisy_capture_input_filepath = ( + test_data_generators.noisy_signal_filepaths[config_name]) + reference_signal_filepath = ( + test_data_generators.reference_signal_filepaths[config_name]) + + # Output path for the evaluation (e.g., APM output file). + evaluation_output_path = test_data_generators.apm_output_paths[ + config_name] + + # Paths to the APM input signals. + echo_path_filepath = echo_path_simulator.Simulate( + echo_test_data_cache_path) + apm_input_filepath = input_mixer.ApmInputMixer.Mix( + echo_test_data_cache_path, noisy_capture_input_filepath, + echo_path_filepath) + + # Extract annotations for the APM input mix. + apm_input_basepath, apm_input_filename = os.path.split( + apm_input_filepath) + self._ExtractCaptureAnnotations( + apm_input_filepath, apm_input_basepath, + os.path.splitext(apm_input_filename)[0] + '-') + + # Simulate a call using APM. + self._audioproc_wrapper.Run( + config_filepath=config_filepath, + capture_input_filepath=apm_input_filepath, + render_input_filepath=render_input_filepath, + output_path=evaluation_output_path) + + try: + # Evaluate. + self._evaluator.Run( + evaluation_score_workers=self._evaluation_score_workers, + apm_input_metadata=apm_input_metadata, + apm_output_filepath=self._audioproc_wrapper.output_filepath, + reference_input_filepath=reference_signal_filepath, + render_input_filepath=render_input_filepath, + output_path=evaluation_output_path, + ) + + # Save simulation metadata. + data_access.Metadata.SaveAudioTestDataPaths( + output_path=evaluation_output_path, + clean_capture_input_filepath=clean_capture_input_filepath, + echo_free_capture_filepath=noisy_capture_input_filepath, + echo_filepath=echo_path_filepath, + render_filepath=render_input_filepath, + capture_filepath=apm_input_filepath, + apm_output_filepath=self._audioproc_wrapper.output_filepath, + apm_reference_filepath=reference_signal_filepath, + apm_config_filepath=config_filepath, + ) + except exceptions.EvaluationScoreException as e: + logging.warning('the evaluation failed: %s', e.message) + continue + + def _SetTestInputSignalFilePaths(self, capture_input_filepaths, + render_input_filepaths): + """Sets input and render input file paths collections. + + Pairs the input and render input files by storing the file paths into two + collections. The key is the file name of the input file. + + Args: + capture_input_filepaths: list of file paths. + render_input_filepaths: list of file paths. + """ + self._capture_input_filepaths = {} + self._render_input_filepaths = {} + assert len(capture_input_filepaths) == len(render_input_filepaths) + for capture_input_filepath, render_input_filepath in zip( + capture_input_filepaths, render_input_filepaths): + name = self._ExtractFileName(capture_input_filepath) + self._capture_input_filepaths[name] = os.path.abspath( + capture_input_filepath) + self._render_input_filepaths[name] = os.path.abspath( + render_input_filepath) + + @classmethod + def _CreatePathsCollection(cls, filepaths): + """Creates a collection of file paths. + + Given a list of file paths, makes a collection with one item for each file + path. The value is absolute path, the key is the file name without + extenstion. + + Args: + filepaths: list of file paths. + + Returns: + A dict. + """ + filepaths_collection = {} + for filepath in filepaths: + name = cls._ExtractFileName(filepath) + filepaths_collection[name] = os.path.abspath(filepath) + return filepaths_collection + + @classmethod + def _ExtractFileName(cls, filepath): + return os.path.splitext(os.path.split(filepath)[-1])[0] diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py new file mode 100644 index 0000000..7d9ad6c --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/simulation_unittest.py @@ -0,0 +1,204 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""Unit tests for the simulation module. +""" + +import logging +import os +import shutil +import tempfile +import unittest + +import mock +import pydub + +from . import audioproc_wrapper +from . import eval_scores_factory +from . import evaluation +from . import external_vad +from . import signal_processing +from . import simulation +from . import test_data_generation_factory + + +class TestApmModuleSimulator(unittest.TestCase): + """Unit tests for the ApmModuleSimulator class. + """ + + def setUp(self): + """Create temporary folders and fake audio track.""" + self._output_path = tempfile.mkdtemp() + self._tmp_path = tempfile.mkdtemp() + + silence = pydub.AudioSegment.silent(duration=1000, frame_rate=48000) + fake_signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + silence) + self._fake_audio_track_path = os.path.join(self._output_path, 'fake.wav') + signal_processing.SignalProcessingUtils.SaveWav( + self._fake_audio_track_path, fake_signal) + + def tearDown(self): + """Recursively delete temporary folders.""" + shutil.rmtree(self._output_path) + shutil.rmtree(self._tmp_path) + + def testSimulation(self): + # Instance dependencies to mock and inject. + ap_wrapper = audioproc_wrapper.AudioProcWrapper( + audioproc_wrapper.AudioProcWrapper.DEFAULT_APM_SIMULATOR_BIN_PATH) + evaluator = evaluation.ApmModuleEvaluator() + ap_wrapper.Run = mock.MagicMock(name='Run') + evaluator.Run = mock.MagicMock(name='Run') + + # Instance non-mocked dependencies. + test_data_generator_factory = ( + test_data_generation_factory.TestDataGeneratorFactory( + aechen_ir_database_path='', + noise_tracks_path='', + copy_with_identity=False)) + evaluation_score_factory = eval_scores_factory.EvaluationScoreWorkerFactory( + polqa_tool_bin_path=os.path.join( + os.path.dirname(__file__), 'fake_polqa'), + echo_metric_tool_bin_path=None + ) + + # Instance simulator. + simulator = simulation.ApmModuleSimulator( + test_data_generator_factory=test_data_generator_factory, + evaluation_score_factory=evaluation_score_factory, + ap_wrapper=ap_wrapper, + evaluator=evaluator, + external_vads={'fake': external_vad.ExternalVad(os.path.join( + os.path.dirname(__file__), 'fake_external_vad.py'), 'fake')} + ) + + # What to simulate. + config_files = ['apm_configs/default.json'] + input_files = [self._fake_audio_track_path] + test_data_generators = ['identity', 'white_noise'] + eval_scores = ['audio_level_mean', 'polqa'] + + # Run all simulations. + simulator.Run( + config_filepaths=config_files, + capture_input_filepaths=input_files, + test_data_generator_names=test_data_generators, + eval_score_names=eval_scores, + output_dir=self._output_path) + + # Check. + # TODO(alessiob): Once the TestDataGenerator classes can be configured by + # the client code (e.g., number of SNR pairs for the white noise test data + # generator), the exact number of calls to ap_wrapper.Run and evaluator.Run + # is known; use that with assertEqual. + min_number_of_simulations = len(config_files) * len(input_files) * len( + test_data_generators) + self.assertGreaterEqual(len(ap_wrapper.Run.call_args_list), + min_number_of_simulations) + self.assertGreaterEqual(len(evaluator.Run.call_args_list), + min_number_of_simulations) + + def testInputSignalCreation(self): + # Instance simulator. + simulator = simulation.ApmModuleSimulator( + test_data_generator_factory=( + test_data_generation_factory.TestDataGeneratorFactory( + aechen_ir_database_path='', + noise_tracks_path='', + copy_with_identity=False)), + evaluation_score_factory=( + eval_scores_factory.EvaluationScoreWorkerFactory( + polqa_tool_bin_path=os.path.join( + os.path.dirname(__file__), 'fake_polqa'), + echo_metric_tool_bin_path=None + )), + ap_wrapper=audioproc_wrapper.AudioProcWrapper( + audioproc_wrapper.AudioProcWrapper.DEFAULT_APM_SIMULATOR_BIN_PATH), + evaluator=evaluation.ApmModuleEvaluator()) + + # Inexistent input files to be silently created. + input_files = [ + os.path.join(self._tmp_path, 'pure_tone-440_1000.wav'), + os.path.join(self._tmp_path, 'pure_tone-1000_500.wav'), + ] + self.assertFalse(any([os.path.exists(input_file) for input_file in ( + input_files)])) + + # The input files are created during the simulation. + simulator.Run( + config_filepaths=['apm_configs/default.json'], + capture_input_filepaths=input_files, + test_data_generator_names=['identity'], + eval_score_names=['audio_level_peak'], + output_dir=self._output_path) + self.assertTrue(all([os.path.exists(input_file) for input_file in ( + input_files)])) + + def testPureToneGenerationWithTotalHarmonicDistorsion(self): + logging.warning = mock.MagicMock(name='warning') + + # Instance simulator. + simulator = simulation.ApmModuleSimulator( + test_data_generator_factory=( + test_data_generation_factory.TestDataGeneratorFactory( + aechen_ir_database_path='', + noise_tracks_path='', + copy_with_identity=False)), + evaluation_score_factory=( + eval_scores_factory.EvaluationScoreWorkerFactory( + polqa_tool_bin_path=os.path.join( + os.path.dirname(__file__), 'fake_polqa'), + echo_metric_tool_bin_path=None + )), + ap_wrapper=audioproc_wrapper.AudioProcWrapper( + audioproc_wrapper.AudioProcWrapper.DEFAULT_APM_SIMULATOR_BIN_PATH), + evaluator=evaluation.ApmModuleEvaluator()) + + # What to simulate. + config_files = ['apm_configs/default.json'] + input_files = [os.path.join(self._tmp_path, 'pure_tone-440_1000.wav')] + eval_scores = ['thd'] + + # Should work. + simulator.Run( + config_filepaths=config_files, + capture_input_filepaths=input_files, + test_data_generator_names=['identity'], + eval_score_names=eval_scores, + output_dir=self._output_path) + self.assertFalse(logging.warning.called) + + # Warning expected. + simulator.Run( + config_filepaths=config_files, + capture_input_filepaths=input_files, + test_data_generator_names=['white_noise'], # Not allowed with THD. + eval_score_names=eval_scores, + output_dir=self._output_path) + logging.warning.assert_called_with('the evaluation failed: %s', ( + 'The THD score cannot be used with any test data generator other than ' + '"identity"')) + + # # Init. + # generator = test_data_generation.IdentityTestDataGenerator('tmp') + # input_signal_filepath = os.path.join( + # self._test_data_cache_path, 'pure_tone-440_1000.wav') + + # # Check that the input signal is generated. + # self.assertFalse(os.path.exists(input_signal_filepath)) + # generator.Generate( + # input_signal_filepath=input_signal_filepath, + # test_data_cache_path=self._test_data_cache_path, + # base_output_path=self._base_output_path) + # self.assertTrue(os.path.exists(input_signal_filepath)) + + # # Check input signal properties. + # input_signal = signal_processing.SignalProcessingUtils.LoadWav( + # input_signal_filepath) + # self.assertEqual(1000, len(input_signal)) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/sound_level.cc b/modules/audio_processing/test/py_quality_assessment/quality_assessment/sound_level.cc new file mode 100644 index 0000000..1f24d9d --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/sound_level.cc @@ -0,0 +1,127 @@ +// Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +// +// Use of this source code is governed by a BSD-style license +// that can be found in the LICENSE file in the root of the source +// tree. An additional intellectual property rights grant can be found +// in the file PATENTS. All contributing project authors may +// be found in the AUTHORS file in the root of the source tree. + +#include +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/flags/parse.h" +#include "common_audio/include/audio_util.h" +#include "common_audio/wav_file.h" +#include "rtc_base/logging.h" + +ABSL_FLAG(std::string, i, "", "Input wav file"); +ABSL_FLAG(std::string, oc, "", "Config output file"); +ABSL_FLAG(std::string, ol, "", "Levels output file"); +ABSL_FLAG(float, a, 5.f, "Attack (ms)"); +ABSL_FLAG(float, d, 20.f, "Decay (ms)"); +ABSL_FLAG(int, f, 10, "Frame length (ms)"); + +namespace webrtc { +namespace test { +namespace { + +constexpr int kMaxSampleRate = 48000; +constexpr uint8_t kMaxFrameLenMs = 30; +constexpr size_t kMaxFrameLen = kMaxFrameLenMs * kMaxSampleRate / 1000; + +const double kOneDbReduction = DbToRatio(-1.0); + +int main(int argc, char* argv[]) { + absl::ParseCommandLine(argc, argv); + // Check parameters. + if (absl::GetFlag(FLAGS_f) < 1 || absl::GetFlag(FLAGS_f) > kMaxFrameLenMs) { + RTC_LOG(LS_ERROR) << "Invalid frame length (min: 1, max: " << kMaxFrameLenMs + << ")"; + return 1; + } + if (absl::GetFlag(FLAGS_a) < 0 || absl::GetFlag(FLAGS_d) < 0) { + RTC_LOG(LS_ERROR) << "Attack and decay must be non-negative"; + return 1; + } + + // Open wav input file and check properties. + const std::string input_file = absl::GetFlag(FLAGS_i); + const std::string config_output_file = absl::GetFlag(FLAGS_oc); + const std::string levels_output_file = absl::GetFlag(FLAGS_ol); + WavReader wav_reader(input_file); + if (wav_reader.num_channels() != 1) { + RTC_LOG(LS_ERROR) << "Only mono wav files supported"; + return 1; + } + if (wav_reader.sample_rate() > kMaxSampleRate) { + RTC_LOG(LS_ERROR) << "Beyond maximum sample rate (" << kMaxSampleRate + << ")"; + return 1; + } + + // Map from milliseconds to samples. + const size_t audio_frame_length = rtc::CheckedDivExact( + absl::GetFlag(FLAGS_f) * wav_reader.sample_rate(), 1000); + auto time_const = [](double c) { + return std::pow(kOneDbReduction, absl::GetFlag(FLAGS_f) / c); + }; + const float attack = + absl::GetFlag(FLAGS_a) == 0.0 ? 0.0 : time_const(absl::GetFlag(FLAGS_a)); + const float decay = + absl::GetFlag(FLAGS_d) == 0.0 ? 0.0 : time_const(absl::GetFlag(FLAGS_d)); + + // Write config to file. + std::ofstream out_config(config_output_file); + out_config << "{" + "'frame_len_ms': " + << absl::GetFlag(FLAGS_f) + << ", " + "'attack_ms': " + << absl::GetFlag(FLAGS_a) + << ", " + "'decay_ms': " + << absl::GetFlag(FLAGS_d) << "}\n"; + out_config.close(); + + // Measure level frame-by-frame. + std::ofstream out_levels(levels_output_file, std::ofstream::binary); + std::array samples; + float level_prev = 0.f; + while (true) { + // Process frame. + const auto read_samples = + wav_reader.ReadSamples(audio_frame_length, samples.data()); + if (read_samples < audio_frame_length) + break; // EOF. + + // Frame peak level. + std::transform(samples.begin(), samples.begin() + audio_frame_length, + samples.begin(), [](int16_t s) { return std::abs(s); }); + const int16_t peak_level = *std::max_element( + samples.cbegin(), samples.cbegin() + audio_frame_length); + const float level_curr = static_cast(peak_level) / 32768.f; + + // Temporal smoothing. + auto smooth = [&level_prev, &level_curr](float c) { + return (1.0 - c) * level_curr + c * level_prev; + }; + level_prev = smooth(level_curr > level_prev ? attack : decay); + + // Write output. + out_levels.write(reinterpret_cast(&level_prev), sizeof(float)); + } + out_levels.close(); + + return 0; +} + +} // namespace +} // namespace test +} // namespace webrtc + +int main(int argc, char* argv[]) { + return webrtc::test::main(argc, argv); +} diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation.py new file mode 100644 index 0000000..dac4328 --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation.py @@ -0,0 +1,511 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""Test data generators producing signals pairs intended to be used to +test the APM module. Each pair consists of a noisy input and a reference signal. +The former is used as APM input and it is generated by adding noise to a +clean audio track. The reference is the expected APM output. + +Throughout this file, the following naming convention is used: + - input signal: the clean signal (e.g., speech), + - noise signal: the noise to be summed up to the input signal (e.g., white + noise, Gaussian noise), + - noisy signal: input + noise. +The noise signal may or may not be a function of the clean signal. For +instance, white noise is independently generated, whereas reverberation is +obtained by convolving the input signal with an impulse response. +""" + +import logging +import os +import shutil +import sys + +try: + import scipy.io +except ImportError: + logging.critical('Cannot import the third-party Python package scipy') + sys.exit(1) + +from . import data_access +from . import exceptions +from . import signal_processing + + +class TestDataGenerator(object): + """Abstract class responsible for the generation of noisy signals. + + Given a clean signal, it generates two streams named noisy signal and + reference. The former is the clean signal deteriorated by the noise source, + the latter goes through the same deterioration process, but more "gently". + Noisy signal and reference are produced so that the reference is the signal + expected at the output of the APM module when the latter is fed with the noisy + signal. + + An test data generator generates one or more pairs. + """ + + NAME = None + REGISTERED_CLASSES = {} + + def __init__(self, output_directory_prefix): + self._output_directory_prefix = output_directory_prefix + # Init dictionaries with one entry for each test data generator + # configuration (e.g., different SNRs). + # Noisy audio track files (stored separately in a cache folder). + self._noisy_signal_filepaths = None + # Path to be used for the APM simulation output files. + self._apm_output_paths = None + # Reference audio track files (stored separately in a cache folder). + self._reference_signal_filepaths = None + self.Clear() + + @classmethod + def RegisterClass(cls, class_to_register): + """Registers a TestDataGenerator implementation. + + Decorator to automatically register the classes that extend + TestDataGenerator. + Example usage: + + @TestDataGenerator.RegisterClass + class IdentityGenerator(TestDataGenerator): + pass + """ + cls.REGISTERED_CLASSES[class_to_register.NAME] = class_to_register + return class_to_register + + @property + def config_names(self): + return self._noisy_signal_filepaths.keys() + + @property + def noisy_signal_filepaths(self): + return self._noisy_signal_filepaths + + @property + def apm_output_paths(self): + return self._apm_output_paths + + @property + def reference_signal_filepaths(self): + return self._reference_signal_filepaths + + def Generate( + self, input_signal_filepath, test_data_cache_path, base_output_path): + """Generates a set of noisy input and reference audiotrack file pairs. + + This method initializes an empty set of pairs and calls the _Generate() + method implemented in a concrete class. + + Args: + input_signal_filepath: path to the clean input audio track file. + test_data_cache_path: path to the cache of the generated audio track + files. + base_output_path: base path where output is written. + """ + self.Clear() + self._Generate( + input_signal_filepath, test_data_cache_path, base_output_path) + + def Clear(self): + """Clears the generated output path dictionaries. + """ + self._noisy_signal_filepaths = {} + self._apm_output_paths = {} + self._reference_signal_filepaths = {} + + def _Generate( + self, input_signal_filepath, test_data_cache_path, base_output_path): + """Abstract method to be implemented in each concrete class. + """ + raise NotImplementedError() + + def _AddNoiseSnrPairs(self, base_output_path, noisy_mix_filepaths, + snr_value_pairs): + """Adds noisy-reference signal pairs. + + Args: + base_output_path: noisy tracks base output path. + noisy_mix_filepaths: nested dictionary of noisy signal paths organized + by noisy track name and SNR level. + snr_value_pairs: list of SNR pairs. + """ + for noise_track_name in noisy_mix_filepaths: + for snr_noisy, snr_refence in snr_value_pairs: + config_name = '{0}_{1:d}_{2:d}_SNR'.format( + noise_track_name, snr_noisy, snr_refence) + output_path = self._MakeDir(base_output_path, config_name) + self._AddNoiseReferenceFilesPair( + config_name=config_name, + noisy_signal_filepath=noisy_mix_filepaths[ + noise_track_name][snr_noisy], + reference_signal_filepath=noisy_mix_filepaths[ + noise_track_name][snr_refence], + output_path=output_path) + + def _AddNoiseReferenceFilesPair(self, config_name, noisy_signal_filepath, + reference_signal_filepath, output_path): + """Adds one noisy-reference signal pair. + + Args: + config_name: name of the APM configuration. + noisy_signal_filepath: path to noisy audio track file. + reference_signal_filepath: path to reference audio track file. + output_path: APM output path. + """ + assert config_name not in self._noisy_signal_filepaths + self._noisy_signal_filepaths[config_name] = os.path.abspath( + noisy_signal_filepath) + self._apm_output_paths[config_name] = os.path.abspath(output_path) + self._reference_signal_filepaths[config_name] = os.path.abspath( + reference_signal_filepath) + + def _MakeDir(self, base_output_path, test_data_generator_config_name): + output_path = os.path.join( + base_output_path, + self._output_directory_prefix + test_data_generator_config_name) + data_access.MakeDirectory(output_path) + return output_path + + +@TestDataGenerator.RegisterClass +class IdentityTestDataGenerator(TestDataGenerator): + """Generator that adds no noise. + + Both the noisy and the reference signals are the input signal. + """ + + NAME = 'identity' + + def __init__(self, output_directory_prefix, copy_with_identity): + TestDataGenerator.__init__(self, output_directory_prefix) + self._copy_with_identity = copy_with_identity + + @property + def copy_with_identity(self): + return self._copy_with_identity + + def _Generate( + self, input_signal_filepath, test_data_cache_path, base_output_path): + config_name = 'default' + output_path = self._MakeDir(base_output_path, config_name) + + if self._copy_with_identity: + input_signal_filepath_new = os.path.join( + test_data_cache_path, os.path.split(input_signal_filepath)[1]) + logging.info('copying ' + input_signal_filepath + ' to ' + ( + input_signal_filepath_new)) + shutil.copy(input_signal_filepath, input_signal_filepath_new) + input_signal_filepath = input_signal_filepath_new + + self._AddNoiseReferenceFilesPair( + config_name=config_name, + noisy_signal_filepath=input_signal_filepath, + reference_signal_filepath=input_signal_filepath, + output_path=output_path) + + +@TestDataGenerator.RegisterClass +class WhiteNoiseTestDataGenerator(TestDataGenerator): + """Generator that adds white noise. + """ + + NAME = 'white_noise' + + # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. + # The reference (second value of each pair) always has a lower amount of noise + # - i.e., the SNR is 10 dB higher. + _SNR_VALUE_PAIRS = [ + [20, 30], # Smallest noise. + [10, 20], + [5, 15], + [0, 10], # Largest noise. + ] + + _NOISY_SIGNAL_FILENAME_TEMPLATE = 'noise_{0:d}_SNR.wav' + + def __init__(self, output_directory_prefix): + TestDataGenerator.__init__(self, output_directory_prefix) + + def _Generate( + self, input_signal_filepath, test_data_cache_path, base_output_path): + # Load the input signal. + input_signal = signal_processing.SignalProcessingUtils.LoadWav( + input_signal_filepath) + + # Create the noise track. + noise_signal = signal_processing.SignalProcessingUtils.GenerateWhiteNoise( + input_signal) + + # Create the noisy mixes (once for each unique SNR value). + noisy_mix_filepaths = {} + snr_values = set([snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) + for snr in snr_values: + noisy_signal_filepath = os.path.join( + test_data_cache_path, + self._NOISY_SIGNAL_FILENAME_TEMPLATE.format(snr)) + + # Create and save if not done. + if not os.path.exists(noisy_signal_filepath): + # Create noisy signal. + noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( + input_signal, noise_signal, snr) + + # Save. + signal_processing.SignalProcessingUtils.SaveWav( + noisy_signal_filepath, noisy_signal) + + # Add file to the collection of mixes. + noisy_mix_filepaths[snr] = noisy_signal_filepath + + # Add all the noisy-reference signal pairs. + for snr_noisy, snr_refence in self._SNR_VALUE_PAIRS: + config_name = '{0:d}_{1:d}_SNR'.format(snr_noisy, snr_refence) + output_path = self._MakeDir(base_output_path, config_name) + self._AddNoiseReferenceFilesPair( + config_name=config_name, + noisy_signal_filepath=noisy_mix_filepaths[snr_noisy], + reference_signal_filepath=noisy_mix_filepaths[snr_refence], + output_path=output_path) + + +# TODO(alessiob): remove comment when class implemented. +# @TestDataGenerator.RegisterClass +class NarrowBandNoiseTestDataGenerator(TestDataGenerator): + """Generator that adds narrow-band noise. + """ + + NAME = 'narrow_band_noise' + + def __init__(self, output_directory_prefix): + TestDataGenerator.__init__(self, output_directory_prefix) + + def _Generate( + self, input_signal_filepath, test_data_cache_path, base_output_path): + # TODO(alessiob): implement. + pass + + +@TestDataGenerator.RegisterClass +class AdditiveNoiseTestDataGenerator(TestDataGenerator): + """Generator that adds noise loops. + + This generator uses all the wav files in a given path (default: noise_tracks/) + and mixes them to the clean speech with different target SNRs (hard-coded). + """ + + NAME = 'additive_noise' + _NOISY_SIGNAL_FILENAME_TEMPLATE = '{0}_{1:d}_SNR.wav' + + DEFAULT_NOISE_TRACKS_PATH = os.path.join( + os.path.dirname(__file__), os.pardir, 'noise_tracks') + + # TODO(alessiob): Make the list of SNR pairs customizable. + # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. + # The reference (second value of each pair) always has a lower amount of noise + # - i.e., the SNR is 10 dB higher. + _SNR_VALUE_PAIRS = [ + [20, 30], # Smallest noise. + [10, 20], + [5, 15], + [0, 10], # Largest noise. + ] + + def __init__(self, output_directory_prefix, noise_tracks_path): + TestDataGenerator.__init__(self, output_directory_prefix) + self._noise_tracks_path = noise_tracks_path + self._noise_tracks_file_names = [n for n in os.listdir( + self._noise_tracks_path) if n.lower().endswith('.wav')] + if len(self._noise_tracks_file_names) == 0: + raise exceptions.InitializationException( + 'No wav files found in the noise tracks path %s' % ( + self._noise_tracks_path)) + + def _Generate( + self, input_signal_filepath, test_data_cache_path, base_output_path): + """Generates test data pairs using environmental noise. + + For each noise track and pair of SNR values, the following two audio tracks + are created: the noisy signal and the reference signal. The former is + obtained by mixing the (clean) input signal to the corresponding noise + track enforcing the target SNR. + """ + # Init. + snr_values = set([snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) + + # Load the input signal. + input_signal = signal_processing.SignalProcessingUtils.LoadWav( + input_signal_filepath) + + noisy_mix_filepaths = {} + for noise_track_filename in self._noise_tracks_file_names: + # Load the noise track. + noise_track_name, _ = os.path.splitext(noise_track_filename) + noise_track_filepath = os.path.join( + self._noise_tracks_path, noise_track_filename) + if not os.path.exists(noise_track_filepath): + logging.error('cannot find the <%s> noise track', noise_track_filename) + raise exceptions.FileNotFoundError() + + noise_signal = signal_processing.SignalProcessingUtils.LoadWav( + noise_track_filepath) + + # Create the noisy mixes (once for each unique SNR value). + noisy_mix_filepaths[noise_track_name] = {} + for snr in snr_values: + noisy_signal_filepath = os.path.join( + test_data_cache_path, + self._NOISY_SIGNAL_FILENAME_TEMPLATE.format(noise_track_name, snr)) + + # Create and save if not done. + if not os.path.exists(noisy_signal_filepath): + # Create noisy signal. + noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( + input_signal, noise_signal, snr, + pad_noise=signal_processing.SignalProcessingUtils.MixPadding.LOOP) + + # Save. + signal_processing.SignalProcessingUtils.SaveWav( + noisy_signal_filepath, noisy_signal) + + # Add file to the collection of mixes. + noisy_mix_filepaths[noise_track_name][snr] = noisy_signal_filepath + + # Add all the noise-SNR pairs. + self._AddNoiseSnrPairs( + base_output_path, noisy_mix_filepaths, self._SNR_VALUE_PAIRS) + + +@TestDataGenerator.RegisterClass +class ReverberationTestDataGenerator(TestDataGenerator): + """Generator that adds reverberation noise. + + TODO(alessiob): Make this class more generic since the impulse response can be + anything (not just reverberation); call it e.g., + ConvolutionalNoiseTestDataGenerator. + """ + + NAME = 'reverberation' + + _IMPULSE_RESPONSES = { + 'lecture': 'air_binaural_lecture_0_0_1.mat', # Long echo. + 'booth': 'air_binaural_booth_0_0_1.mat', # Short echo. + } + _MAX_IMPULSE_RESPONSE_LENGTH = None + + # Each pair indicates the clean vs. noisy and reference vs. noisy SNRs. + # The reference (second value of each pair) always has a lower amount of noise + # - i.e., the SNR is 5 dB higher. + _SNR_VALUE_PAIRS = [ + [3, 8], # Smallest noise. + [-3, 2], # Largest noise. + ] + + _NOISE_TRACK_FILENAME_TEMPLATE = '{0}.wav' + _NOISY_SIGNAL_FILENAME_TEMPLATE = '{0}_{1:d}_SNR.wav' + + def __init__(self, output_directory_prefix, aechen_ir_database_path): + TestDataGenerator.__init__(self, output_directory_prefix) + self._aechen_ir_database_path = aechen_ir_database_path + + def _Generate( + self, input_signal_filepath, test_data_cache_path, base_output_path): + """Generates test data pairs using reverberation noise. + + For each impulse response, one noise track is created. For each impulse + response and pair of SNR values, the following 2 audio tracks are + created: the noisy signal and the reference signal. The former is + obtained by mixing the (clean) input signal to the corresponding noise + track enforcing the target SNR. + """ + # Init. + snr_values = set([snr for pair in self._SNR_VALUE_PAIRS for snr in pair]) + + # Load the input signal. + input_signal = signal_processing.SignalProcessingUtils.LoadWav( + input_signal_filepath) + + noisy_mix_filepaths = {} + for impulse_response_name in self._IMPULSE_RESPONSES: + noise_track_filename = self._NOISE_TRACK_FILENAME_TEMPLATE.format( + impulse_response_name) + noise_track_filepath = os.path.join( + test_data_cache_path, noise_track_filename) + noise_signal = None + try: + # Load noise track. + noise_signal = signal_processing.SignalProcessingUtils.LoadWav( + noise_track_filepath) + except exceptions.FileNotFoundError: + # Generate noise track by applying the impulse response. + impulse_response_filepath = os.path.join( + self._aechen_ir_database_path, + self._IMPULSE_RESPONSES[impulse_response_name]) + noise_signal = self._GenerateNoiseTrack( + noise_track_filepath, input_signal, impulse_response_filepath) + assert noise_signal is not None + + # Create the noisy mixes (once for each unique SNR value). + noisy_mix_filepaths[impulse_response_name] = {} + for snr in snr_values: + noisy_signal_filepath = os.path.join( + test_data_cache_path, + self._NOISY_SIGNAL_FILENAME_TEMPLATE.format( + impulse_response_name, snr)) + + # Create and save if not done. + if not os.path.exists(noisy_signal_filepath): + # Create noisy signal. + noisy_signal = signal_processing.SignalProcessingUtils.MixSignals( + input_signal, noise_signal, snr) + + # Save. + signal_processing.SignalProcessingUtils.SaveWav( + noisy_signal_filepath, noisy_signal) + + # Add file to the collection of mixes. + noisy_mix_filepaths[impulse_response_name][snr] = noisy_signal_filepath + + # Add all the noise-SNR pairs. + self._AddNoiseSnrPairs(base_output_path, noisy_mix_filepaths, + self._SNR_VALUE_PAIRS) + + def _GenerateNoiseTrack(self, noise_track_filepath, input_signal, + impulse_response_filepath): + """Generates noise track. + + Generate a signal by convolving input_signal with the impulse response in + impulse_response_filepath; then save to noise_track_filepath. + + Args: + noise_track_filepath: output file path for the noise track. + input_signal: (clean) input signal samples. + impulse_response_filepath: impulse response file path. + + Returns: + AudioSegment instance. + """ + # Load impulse response. + data = scipy.io.loadmat(impulse_response_filepath) + impulse_response = data['h_air'].flatten() + if self._MAX_IMPULSE_RESPONSE_LENGTH is not None: + logging.info('truncating impulse response from %d to %d samples', + len(impulse_response), self._MAX_IMPULSE_RESPONSE_LENGTH) + impulse_response = impulse_response[:self._MAX_IMPULSE_RESPONSE_LENGTH] + + # Apply impulse response. + processed_signal = ( + signal_processing.SignalProcessingUtils.ApplyImpulseResponse( + input_signal, impulse_response)) + + # Save. + signal_processing.SignalProcessingUtils.SaveWav( + noise_track_filepath, processed_signal) + + return processed_signal diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_factory.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_factory.py new file mode 100644 index 0000000..c80d150 --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_factory.py @@ -0,0 +1,71 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""TestDataGenerator factory class. +""" + +import logging + +from . import exceptions +from . import test_data_generation + + +class TestDataGeneratorFactory(object): + """Factory class used to create test data generators. + + Usage: Create a factory passing parameters to the ctor with which the + generators will be produced. + """ + + def __init__(self, aechen_ir_database_path, noise_tracks_path, + copy_with_identity): + """Ctor. + + Args: + aechen_ir_database_path: Path to the Aechen Impulse Response database. + noise_tracks_path: Path to the noise tracks to add. + copy_with_identity: Flag indicating whether the identity generator has to + make copies of the clean speech input files. + """ + self._output_directory_prefix = None + self._aechen_ir_database_path = aechen_ir_database_path + self._noise_tracks_path = noise_tracks_path + self._copy_with_identity = copy_with_identity + + def SetOutputDirectoryPrefix(self, prefix): + self._output_directory_prefix = prefix + + def GetInstance(self, test_data_generators_class): + """Creates an TestDataGenerator instance given a class object. + + Args: + test_data_generators_class: TestDataGenerator class object (not an + instance). + + Returns: + TestDataGenerator instance. + """ + if self._output_directory_prefix is None: + raise exceptions.InitializationException( + 'The output directory prefix for test data generators is not set') + logging.debug('factory producing %s', test_data_generators_class) + + if test_data_generators_class == ( + test_data_generation.IdentityTestDataGenerator): + return test_data_generation.IdentityTestDataGenerator( + self._output_directory_prefix, self._copy_with_identity) + elif test_data_generators_class == ( + test_data_generation.ReverberationTestDataGenerator): + return test_data_generation.ReverberationTestDataGenerator( + self._output_directory_prefix, self._aechen_ir_database_path) + elif test_data_generators_class == ( + test_data_generation.AdditiveNoiseTestDataGenerator): + return test_data_generation.AdditiveNoiseTestDataGenerator( + self._output_directory_prefix, self._noise_tracks_path) + else: + return test_data_generators_class(self._output_directory_prefix) diff --git a/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_unittest.py b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_unittest.py new file mode 100644 index 0000000..b0d003d --- /dev/null +++ b/modules/audio_processing/test/py_quality_assessment/quality_assessment/test_data_generation_unittest.py @@ -0,0 +1,206 @@ +# Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +"""Unit tests for the test_data_generation module. +""" + +import os +import shutil +import tempfile +import unittest + +import numpy as np +import scipy.io + +from . import test_data_generation +from . import test_data_generation_factory +from . import signal_processing + + +class TestTestDataGenerators(unittest.TestCase): + """Unit tests for the test_data_generation module. + """ + + def setUp(self): + """Create temporary folders.""" + self._base_output_path = tempfile.mkdtemp() + self._test_data_cache_path = tempfile.mkdtemp() + self._fake_air_db_path = tempfile.mkdtemp() + + # Fake AIR DB impulse responses. + # TODO(alessiob): ReverberationTestDataGenerator will change to allow custom + # impulse responses. When changed, the coupling below between + # impulse_response_mat_file_names and + # ReverberationTestDataGenerator._IMPULSE_RESPONSES can be removed. + impulse_response_mat_file_names = [ + 'air_binaural_lecture_0_0_1.mat', + 'air_binaural_booth_0_0_1.mat', + ] + for impulse_response_mat_file_name in impulse_response_mat_file_names: + data = {'h_air': np.random.rand(1, 1000).astype(' +#include +#include + +#include "absl/flags/flag.h" +#include "absl/flags/parse.h" +#include "common_audio/wav_file.h" +#include "rtc_base/logging.h" + +ABSL_FLAG(std::string, i, "", "Input wav file"); +ABSL_FLAG(std::string, o, "", "VAD output file"); + +namespace webrtc { +namespace test { +namespace { + +// The allowed values are 10, 20 or 30 ms. +constexpr uint8_t kAudioFrameLengthMilliseconds = 30; +constexpr int kMaxSampleRate = 48000; +constexpr size_t kMaxFrameLen = + kAudioFrameLengthMilliseconds * kMaxSampleRate / 1000; + +constexpr uint8_t kBitmaskBuffSize = 8; + +int main(int argc, char* argv[]) { + absl::ParseCommandLine(argc, argv); + const std::string input_file = absl::GetFlag(FLAGS_i); + const std::string output_file = absl::GetFlag(FLAGS_o); + // Open wav input file and check properties. + WavReader wav_reader(input_file); + if (wav_reader.num_channels() != 1) { + RTC_LOG(LS_ERROR) << "Only mono wav files supported"; + return 1; + } + if (wav_reader.sample_rate() > kMaxSampleRate) { + RTC_LOG(LS_ERROR) << "Beyond maximum sample rate (" << kMaxSampleRate + << ")"; + return 1; + } + const size_t audio_frame_length = rtc::CheckedDivExact( + kAudioFrameLengthMilliseconds * wav_reader.sample_rate(), 1000); + if (audio_frame_length > kMaxFrameLen) { + RTC_LOG(LS_ERROR) << "The frame size and/or the sample rate are too large."; + return 1; + } + + // Create output file and write header. + std::ofstream out_file(output_file, std::ofstream::binary); + const char audio_frame_length_ms = kAudioFrameLengthMilliseconds; + out_file.write(&audio_frame_length_ms, 1); // Header. + + // Run VAD and write decisions. + std::unique_ptr vad = CreateVad(Vad::Aggressiveness::kVadNormal); + std::array samples; + char buff = 0; // Buffer to write one bit per frame. + uint8_t next = 0; // Points to the next bit to write in |buff|. + while (true) { + // Process frame. + const auto read_samples = + wav_reader.ReadSamples(audio_frame_length, samples.data()); + if (read_samples < audio_frame_length) + break; + const auto is_speech = vad->VoiceActivity( + samples.data(), audio_frame_length, wav_reader.sample_rate()); + + // Write output. + buff = is_speech ? buff | (1 << next) : buff & ~(1 << next); + if (++next == kBitmaskBuffSize) { + out_file.write(&buff, 1); // Flush. + buff = 0; // Reset. + next = 0; + } + } + + // Finalize. + char extra_bits = 0; + if (next > 0) { + extra_bits = kBitmaskBuffSize - next; + out_file.write(&buff, 1); // Flush. + } + out_file.write(&extra_bits, 1); + out_file.close(); + + return 0; +} + +} // namespace +} // namespace test +} // namespace webrtc + +int main(int argc, char* argv[]) { + return webrtc::test::main(argc, argv); +} diff --git a/modules/audio_processing/test/runtime_setting_util.cc b/modules/audio_processing/test/runtime_setting_util.cc new file mode 100644 index 0000000..8876187 --- /dev/null +++ b/modules/audio_processing/test/runtime_setting_util.cc @@ -0,0 +1,46 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/test/runtime_setting_util.h" + +#include "rtc_base/checks.h" + +namespace webrtc { + +void ReplayRuntimeSetting(AudioProcessing* apm, + const webrtc::audioproc::RuntimeSetting& setting) { + RTC_CHECK(apm); + // TODO(bugs.webrtc.org/9138): Add ability to handle different types + // of settings. Currently CapturePreGain, CaptureFixedPostGain and + // PlayoutVolumeChange are supported. + RTC_CHECK(setting.has_capture_pre_gain() || + setting.has_capture_fixed_post_gain() || + setting.has_playout_volume_change()); + + if (setting.has_capture_pre_gain()) { + apm->SetRuntimeSetting( + AudioProcessing::RuntimeSetting::CreateCapturePreGain( + setting.capture_pre_gain())); + } else if (setting.has_capture_fixed_post_gain()) { + apm->SetRuntimeSetting( + AudioProcessing::RuntimeSetting::CreateCaptureFixedPostGain( + setting.capture_fixed_post_gain())); + } else if (setting.has_playout_volume_change()) { + apm->SetRuntimeSetting( + AudioProcessing::RuntimeSetting::CreatePlayoutVolumeChange( + setting.playout_volume_change())); + } else if (setting.has_playout_audio_device_change()) { + apm->SetRuntimeSetting( + AudioProcessing::RuntimeSetting::CreatePlayoutAudioDeviceChange( + {setting.playout_audio_device_change().id(), + setting.playout_audio_device_change().max_volume()})); + } +} +} // namespace webrtc diff --git a/modules/audio_processing/test/runtime_setting_util.h b/modules/audio_processing/test/runtime_setting_util.h new file mode 100644 index 0000000..d8cbe82 --- /dev/null +++ b/modules/audio_processing/test/runtime_setting_util.h @@ -0,0 +1,23 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TEST_RUNTIME_SETTING_UTIL_H_ +#define MODULES_AUDIO_PROCESSING_TEST_RUNTIME_SETTING_UTIL_H_ + +#include "modules/audio_processing/include/audio_processing.h" +#include "modules/audio_processing/test/protobuf_utils.h" + +namespace webrtc { + +void ReplayRuntimeSetting(AudioProcessing* apm, + const webrtc::audioproc::RuntimeSetting& setting); +} + +#endif // MODULES_AUDIO_PROCESSING_TEST_RUNTIME_SETTING_UTIL_H_ diff --git a/modules/audio_processing/test/simulator_buffers.cc b/modules/audio_processing/test/simulator_buffers.cc new file mode 100644 index 0000000..e6bd6c1 --- /dev/null +++ b/modules/audio_processing/test/simulator_buffers.cc @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/test/simulator_buffers.h" + +#include "modules/audio_processing/test/audio_buffer_tools.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace test { + +SimulatorBuffers::SimulatorBuffers(int render_input_sample_rate_hz, + int capture_input_sample_rate_hz, + int render_output_sample_rate_hz, + int capture_output_sample_rate_hz, + size_t num_render_input_channels, + size_t num_capture_input_channels, + size_t num_render_output_channels, + size_t num_capture_output_channels) { + Random rand_gen(42); + CreateConfigAndBuffer(render_input_sample_rate_hz, num_render_input_channels, + &rand_gen, &render_input_buffer, &render_input_config, + &render_input, &render_input_samples); + + CreateConfigAndBuffer(render_output_sample_rate_hz, + num_render_output_channels, &rand_gen, + &render_output_buffer, &render_output_config, + &render_output, &render_output_samples); + + CreateConfigAndBuffer(capture_input_sample_rate_hz, + num_capture_input_channels, &rand_gen, + &capture_input_buffer, &capture_input_config, + &capture_input, &capture_input_samples); + + CreateConfigAndBuffer(capture_output_sample_rate_hz, + num_capture_output_channels, &rand_gen, + &capture_output_buffer, &capture_output_config, + &capture_output, &capture_output_samples); + + UpdateInputBuffers(); +} + +SimulatorBuffers::~SimulatorBuffers() = default; + +void SimulatorBuffers::CreateConfigAndBuffer( + int sample_rate_hz, + size_t num_channels, + Random* rand_gen, + std::unique_ptr* buffer, + StreamConfig* config, + std::vector* buffer_data, + std::vector* buffer_data_samples) { + int samples_per_channel = rtc::CheckedDivExact(sample_rate_hz, 100); + *config = StreamConfig(sample_rate_hz, num_channels, false); + buffer->reset( + new AudioBuffer(config->sample_rate_hz(), config->num_channels(), + config->sample_rate_hz(), config->num_channels(), + config->sample_rate_hz(), config->num_channels())); + + buffer_data_samples->resize(samples_per_channel * num_channels); + for (auto& v : *buffer_data_samples) { + v = rand_gen->Rand(); + } + + buffer_data->resize(num_channels); + for (size_t ch = 0; ch < num_channels; ++ch) { + (*buffer_data)[ch] = &(*buffer_data_samples)[ch * samples_per_channel]; + } +} + +void SimulatorBuffers::UpdateInputBuffers() { + test::CopyVectorToAudioBuffer(capture_input_config, capture_input_samples, + capture_input_buffer.get()); + test::CopyVectorToAudioBuffer(render_input_config, render_input_samples, + render_input_buffer.get()); +} + +} // namespace test +} // namespace webrtc diff --git a/modules/audio_processing/test/simulator_buffers.h b/modules/audio_processing/test/simulator_buffers.h new file mode 100644 index 0000000..36dcf30 --- /dev/null +++ b/modules/audio_processing/test/simulator_buffers.h @@ -0,0 +1,66 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TEST_SIMULATOR_BUFFERS_H_ +#define MODULES_AUDIO_PROCESSING_TEST_SIMULATOR_BUFFERS_H_ + +#include +#include + +#include "modules/audio_processing/audio_buffer.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "rtc_base/random.h" + +namespace webrtc { +namespace test { + +struct SimulatorBuffers { + SimulatorBuffers(int render_input_sample_rate_hz, + int capture_input_sample_rate_hz, + int render_output_sample_rate_hz, + int capture_output_sample_rate_hz, + size_t num_render_input_channels, + size_t num_capture_input_channels, + size_t num_render_output_channels, + size_t num_capture_output_channels); + ~SimulatorBuffers(); + + void CreateConfigAndBuffer(int sample_rate_hz, + size_t num_channels, + Random* rand_gen, + std::unique_ptr* buffer, + StreamConfig* config, + std::vector* buffer_data, + std::vector* buffer_data_samples); + + void UpdateInputBuffers(); + + std::unique_ptr render_input_buffer; + std::unique_ptr capture_input_buffer; + std::unique_ptr render_output_buffer; + std::unique_ptr capture_output_buffer; + StreamConfig render_input_config; + StreamConfig capture_input_config; + StreamConfig render_output_config; + StreamConfig capture_output_config; + std::vector render_input; + std::vector render_input_samples; + std::vector capture_input; + std::vector capture_input_samples; + std::vector render_output; + std::vector render_output_samples; + std::vector capture_output; + std::vector capture_output_samples; +}; + +} // namespace test +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TEST_SIMULATOR_BUFFERS_H_ diff --git a/modules/audio_processing/test/test_utils.cc b/modules/audio_processing/test/test_utils.cc new file mode 100644 index 0000000..37a20ce --- /dev/null +++ b/modules/audio_processing/test/test_utils.cc @@ -0,0 +1,154 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/test/test_utils.h" + +#include + +#include "rtc_base/checks.h" +#include "rtc_base/system/arch.h" + +namespace webrtc { + +RawFile::RawFile(const std::string& filename) + : file_handle_(fopen(filename.c_str(), "wb")) {} + +RawFile::~RawFile() { + fclose(file_handle_); +} + +void RawFile::WriteSamples(const int16_t* samples, size_t num_samples) { +#ifndef WEBRTC_ARCH_LITTLE_ENDIAN +#error "Need to convert samples to little-endian when writing to PCM file" +#endif + fwrite(samples, sizeof(*samples), num_samples, file_handle_); +} + +void RawFile::WriteSamples(const float* samples, size_t num_samples) { + fwrite(samples, sizeof(*samples), num_samples, file_handle_); +} + +ChannelBufferWavReader::ChannelBufferWavReader(std::unique_ptr file) + : file_(std::move(file)) {} + +ChannelBufferWavReader::~ChannelBufferWavReader() = default; + +bool ChannelBufferWavReader::Read(ChannelBuffer* buffer) { + RTC_CHECK_EQ(file_->num_channels(), buffer->num_channels()); + interleaved_.resize(buffer->size()); + if (file_->ReadSamples(interleaved_.size(), &interleaved_[0]) != + interleaved_.size()) { + return false; + } + + FloatS16ToFloat(&interleaved_[0], interleaved_.size(), &interleaved_[0]); + Deinterleave(&interleaved_[0], buffer->num_frames(), buffer->num_channels(), + buffer->channels()); + return true; +} + +ChannelBufferWavWriter::ChannelBufferWavWriter(std::unique_ptr file) + : file_(std::move(file)) {} + +ChannelBufferWavWriter::~ChannelBufferWavWriter() = default; + +void ChannelBufferWavWriter::Write(const ChannelBuffer& buffer) { + RTC_CHECK_EQ(file_->num_channels(), buffer.num_channels()); + interleaved_.resize(buffer.size()); + Interleave(buffer.channels(), buffer.num_frames(), buffer.num_channels(), + &interleaved_[0]); + FloatToFloatS16(&interleaved_[0], interleaved_.size(), &interleaved_[0]); + file_->WriteSamples(&interleaved_[0], interleaved_.size()); +} + +ChannelBufferVectorWriter::ChannelBufferVectorWriter(std::vector* output) + : output_(output) { + RTC_DCHECK(output_); +} + +ChannelBufferVectorWriter::~ChannelBufferVectorWriter() = default; + +void ChannelBufferVectorWriter::Write(const ChannelBuffer& buffer) { + // Account for sample rate changes throughout a simulation. + interleaved_buffer_.resize(buffer.size()); + Interleave(buffer.channels(), buffer.num_frames(), buffer.num_channels(), + interleaved_buffer_.data()); + size_t old_size = output_->size(); + output_->resize(old_size + interleaved_buffer_.size()); + FloatToFloatS16(interleaved_buffer_.data(), interleaved_buffer_.size(), + output_->data() + old_size); +} + +void WriteIntData(const int16_t* data, + size_t length, + WavWriter* wav_file, + RawFile* raw_file) { + if (wav_file) { + wav_file->WriteSamples(data, length); + } + if (raw_file) { + raw_file->WriteSamples(data, length); + } +} + +void WriteFloatData(const float* const* data, + size_t samples_per_channel, + size_t num_channels, + WavWriter* wav_file, + RawFile* raw_file) { + size_t length = num_channels * samples_per_channel; + std::unique_ptr buffer(new float[length]); + Interleave(data, samples_per_channel, num_channels, buffer.get()); + if (raw_file) { + raw_file->WriteSamples(buffer.get(), length); + } + // TODO(aluebs): Use ScaleToInt16Range() from audio_util + for (size_t i = 0; i < length; ++i) { + buffer[i] = buffer[i] > 0 + ? buffer[i] * std::numeric_limits::max() + : -buffer[i] * std::numeric_limits::min(); + } + if (wav_file) { + wav_file->WriteSamples(buffer.get(), length); + } +} + +FILE* OpenFile(const std::string& filename, const char* mode) { + FILE* file = fopen(filename.c_str(), mode); + if (!file) { + printf("Unable to open file %s\n", filename.c_str()); + exit(1); + } + return file; +} + +size_t SamplesFromRate(int rate) { + return static_cast(AudioProcessing::kChunkSizeMs * rate / 1000); +} + +void SetFrameSampleRate(Int16FrameData* frame, int sample_rate_hz) { + frame->sample_rate_hz = sample_rate_hz; + frame->samples_per_channel = + AudioProcessing::kChunkSizeMs * sample_rate_hz / 1000; +} + +AudioProcessing::ChannelLayout LayoutFromChannels(size_t num_channels) { + switch (num_channels) { + case 1: + return AudioProcessing::kMono; + case 2: + return AudioProcessing::kStereo; + default: + RTC_CHECK(false); + return AudioProcessing::kMono; + } +} + +} // namespace webrtc diff --git a/modules/audio_processing/test/test_utils.h b/modules/audio_processing/test/test_utils.h new file mode 100644 index 0000000..e2d243e --- /dev/null +++ b/modules/audio_processing/test/test_utils.h @@ -0,0 +1,196 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TEST_TEST_UTILS_H_ +#define MODULES_AUDIO_PROCESSING_TEST_TEST_UTILS_H_ + +#include + +#include +#include +#include +#include // no-presubmit-check TODO(webrtc:8982) +#include +#include + +#include "common_audio/channel_buffer.h" +#include "common_audio/wav_file.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "rtc_base/constructor_magic.h" + +namespace webrtc { + +static const AudioProcessing::Error kNoErr = AudioProcessing::kNoError; +#define EXPECT_NOERR(expr) EXPECT_EQ(kNoErr, (expr)) + +class RawFile final { + public: + explicit RawFile(const std::string& filename); + ~RawFile(); + + void WriteSamples(const int16_t* samples, size_t num_samples); + void WriteSamples(const float* samples, size_t num_samples); + + private: + FILE* file_handle_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RawFile); +}; + +// Encapsulates samples and metadata for an integer frame. +struct Int16FrameData { + // Max data size that matches the data size of the AudioFrame class, providing + // storage for 8 channels of 96 kHz data. + static const int kMaxDataSizeSamples = 7680; + + Int16FrameData() { + sample_rate_hz = 0; + num_channels = 0; + samples_per_channel = 0; + data.fill(0); + } + + void CopyFrom(const Int16FrameData& src) { + samples_per_channel = src.samples_per_channel; + sample_rate_hz = src.sample_rate_hz; + num_channels = src.num_channels; + + const size_t length = samples_per_channel * num_channels; + RTC_CHECK_LE(length, kMaxDataSizeSamples); + memcpy(data.data(), src.data.data(), sizeof(int16_t) * length); + } + std::array data; + int32_t sample_rate_hz; + size_t num_channels; + size_t samples_per_channel; +}; + +// Reads ChannelBuffers from a provided WavReader. +class ChannelBufferWavReader final { + public: + explicit ChannelBufferWavReader(std::unique_ptr file); + ~ChannelBufferWavReader(); + + // Reads data from the file according to the |buffer| format. Returns false if + // a full buffer can't be read from the file. + bool Read(ChannelBuffer* buffer); + + private: + std::unique_ptr file_; + std::vector interleaved_; + + RTC_DISALLOW_COPY_AND_ASSIGN(ChannelBufferWavReader); +}; + +// Writes ChannelBuffers to a provided WavWriter. +class ChannelBufferWavWriter final { + public: + explicit ChannelBufferWavWriter(std::unique_ptr file); + ~ChannelBufferWavWriter(); + + void Write(const ChannelBuffer& buffer); + + private: + std::unique_ptr file_; + std::vector interleaved_; + + RTC_DISALLOW_COPY_AND_ASSIGN(ChannelBufferWavWriter); +}; + +// Takes a pointer to a vector. Allows appending the samples of channel buffers +// to the given vector, by interleaving the samples and converting them to float +// S16. +class ChannelBufferVectorWriter final { + public: + explicit ChannelBufferVectorWriter(std::vector* output); + ChannelBufferVectorWriter(const ChannelBufferVectorWriter&) = delete; + ChannelBufferVectorWriter& operator=(const ChannelBufferVectorWriter&) = + delete; + ~ChannelBufferVectorWriter(); + + // Creates an interleaved copy of |buffer|, converts the samples to float S16 + // and appends the result to output_. + void Write(const ChannelBuffer& buffer); + + private: + std::vector interleaved_buffer_; + std::vector* output_; +}; + +void WriteIntData(const int16_t* data, + size_t length, + WavWriter* wav_file, + RawFile* raw_file); + +void WriteFloatData(const float* const* data, + size_t samples_per_channel, + size_t num_channels, + WavWriter* wav_file, + RawFile* raw_file); + +// Exits on failure; do not use in unit tests. +FILE* OpenFile(const std::string& filename, const char* mode); + +size_t SamplesFromRate(int rate); + +void SetFrameSampleRate(Int16FrameData* frame, int sample_rate_hz); + +template +void SetContainerFormat(int sample_rate_hz, + size_t num_channels, + Int16FrameData* frame, + std::unique_ptr >* cb) { + SetFrameSampleRate(frame, sample_rate_hz); + frame->num_channels = num_channels; + cb->reset(new ChannelBuffer(frame->samples_per_channel, num_channels)); +} + +AudioProcessing::ChannelLayout LayoutFromChannels(size_t num_channels); + +template +float ComputeSNR(const T* ref, const T* test, size_t length, float* variance) { + float mse = 0; + float mean = 0; + *variance = 0; + for (size_t i = 0; i < length; ++i) { + T error = ref[i] - test[i]; + mse += error * error; + *variance += ref[i] * ref[i]; + mean += ref[i]; + } + mse /= length; + *variance /= length; + mean /= length; + *variance -= mean * mean; + + float snr = 100; // We assign 100 dB to the zero-error case. + if (mse > 0) + snr = 10 * log10(*variance / mse); + return snr; +} + +// Returns a vector parsed from whitespace delimited values in to_parse, +// or an empty vector if the string could not be parsed. +template +std::vector ParseList(const std::string& to_parse) { + std::vector values; + + std::istringstream str(to_parse); + std::copy( + std::istream_iterator(str), // no-presubmit-check TODO(webrtc:8982) + std::istream_iterator(), // no-presubmit-check TODO(webrtc:8982) + std::back_inserter(values)); + + return values; +} + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TEST_TEST_UTILS_H_ diff --git a/modules/audio_processing/test/unittest.proto b/modules/audio_processing/test/unittest.proto new file mode 100644 index 0000000..07d1cda --- /dev/null +++ b/modules/audio_processing/test/unittest.proto @@ -0,0 +1,48 @@ +syntax = "proto2"; +option optimize_for = LITE_RUNTIME; +package webrtc.audioproc; + +message Test { + optional int32 num_reverse_channels = 1; + optional int32 num_input_channels = 2; + optional int32 num_output_channels = 3; + optional int32 sample_rate = 4; + + message Frame { + } + + repeated Frame frame = 5; + + optional int32 analog_level_average = 6; + optional int32 max_output_average = 7; + optional int32 has_voice_count = 9; + optional int32 is_saturated_count = 10; + + message EchoMetrics { + optional float echo_return_loss = 1; + optional float echo_return_loss_enhancement = 2; + optional float divergent_filter_fraction = 3; + optional float residual_echo_likelihood = 4; + optional float residual_echo_likelihood_recent_max = 5; + } + + repeated EchoMetrics echo_metrics = 11; + + message DelayMetrics { + optional int32 median = 1; + optional int32 std = 2; + } + + repeated DelayMetrics delay_metrics = 12; + + optional float rms_dbfs_average = 13; + + optional float ns_speech_probability_average = 14; + + optional bool use_aec_extended_filter = 15; +} + +message OutputData { + repeated Test test = 1; +} + diff --git a/modules/audio_processing/test/wav_based_simulator.cc b/modules/audio_processing/test/wav_based_simulator.cc new file mode 100644 index 0000000..75946fb --- /dev/null +++ b/modules/audio_processing/test/wav_based_simulator.cc @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/test/wav_based_simulator.h" + +#include + +#include + +#include "modules/audio_processing/test/test_utils.h" +#include "rtc_base/checks.h" +#include "rtc_base/system/file_wrapper.h" + +namespace webrtc { +namespace test { + +std::vector +WavBasedSimulator::GetCustomEventChain(const std::string& filename) { + std::vector call_chain; + FileWrapper file_wrapper = FileWrapper::OpenReadOnly(filename.c_str()); + + RTC_CHECK(file_wrapper.is_open()) + << "Could not open the custom call order file, reverting " + "to using the default call order"; + + char c; + size_t num_read = file_wrapper.Read(&c, sizeof(char)); + while (num_read > 0) { + switch (c) { + case 'r': + call_chain.push_back(SimulationEventType::kProcessReverseStream); + break; + case 'c': + call_chain.push_back(SimulationEventType::kProcessStream); + break; + case '\n': + break; + default: + FATAL() << "Incorrect custom call order file, reverting to using the " + "default call order"; + return WavBasedSimulator::GetDefaultEventChain(); + } + + num_read = file_wrapper.Read(&c, sizeof(char)); + } + + return call_chain; +} + +WavBasedSimulator::WavBasedSimulator( + const SimulationSettings& settings, + rtc::scoped_refptr audio_processing, + std::unique_ptr ap_builder) + : AudioProcessingSimulator(settings, + std::move(audio_processing), + std::move(ap_builder)) { + if (settings_.call_order_input_filename) { + call_chain_ = WavBasedSimulator::GetCustomEventChain( + *settings_.call_order_input_filename); + } else { + call_chain_ = WavBasedSimulator::GetDefaultEventChain(); + } +} + +WavBasedSimulator::~WavBasedSimulator() = default; + +std::vector +WavBasedSimulator::GetDefaultEventChain() { + std::vector call_chain(2); + call_chain[0] = SimulationEventType::kProcessStream; + call_chain[1] = SimulationEventType::kProcessReverseStream; + return call_chain; +} + +void WavBasedSimulator::PrepareProcessStreamCall() { + if (settings_.fixed_interface) { + fwd_frame_.CopyFrom(*in_buf_); + } + ap_->set_stream_key_pressed(settings_.use_ts && (*settings_.use_ts)); + + if (!settings_.use_stream_delay || *settings_.use_stream_delay) { + RTC_CHECK_EQ(AudioProcessing::kNoError, + ap_->set_stream_delay_ms( + settings_.stream_delay ? *settings_.stream_delay : 0)); + } +} + +void WavBasedSimulator::PrepareReverseProcessStreamCall() { + if (settings_.fixed_interface) { + rev_frame_.CopyFrom(*reverse_in_buf_); + } +} + +void WavBasedSimulator::Process() { + ConfigureAudioProcessor(); + + Initialize(); + + bool samples_left_to_process = true; + int call_chain_index = 0; + int num_forward_chunks_processed = 0; + while (samples_left_to_process) { + switch (call_chain_[call_chain_index]) { + case SimulationEventType::kProcessStream: + samples_left_to_process = HandleProcessStreamCall(); + ++num_forward_chunks_processed; + break; + case SimulationEventType::kProcessReverseStream: + if (settings_.reverse_input_filename) { + samples_left_to_process = HandleProcessReverseStreamCall(); + } + break; + default: + RTC_CHECK(false); + } + + call_chain_index = (call_chain_index + 1) % call_chain_.size(); + } + + DetachAecDump(); +} + +bool WavBasedSimulator::HandleProcessStreamCall() { + bool samples_left_to_process = buffer_reader_->Read(in_buf_.get()); + if (samples_left_to_process) { + PrepareProcessStreamCall(); + ProcessStream(settings_.fixed_interface); + } + return samples_left_to_process; +} + +bool WavBasedSimulator::HandleProcessReverseStreamCall() { + bool samples_left_to_process = + reverse_buffer_reader_->Read(reverse_in_buf_.get()); + if (samples_left_to_process) { + PrepareReverseProcessStreamCall(); + ProcessReverseStream(settings_.fixed_interface); + } + return samples_left_to_process; +} + +void WavBasedSimulator::Initialize() { + std::unique_ptr in_file( + new WavReader(settings_.input_filename->c_str())); + int input_sample_rate_hz = in_file->sample_rate(); + int input_num_channels = in_file->num_channels(); + buffer_reader_.reset(new ChannelBufferWavReader(std::move(in_file))); + + int output_sample_rate_hz = settings_.output_sample_rate_hz + ? *settings_.output_sample_rate_hz + : input_sample_rate_hz; + int output_num_channels = settings_.output_num_channels + ? *settings_.output_num_channels + : input_num_channels; + + int reverse_sample_rate_hz = 48000; + int reverse_num_channels = 1; + int reverse_output_sample_rate_hz = 48000; + int reverse_output_num_channels = 1; + if (settings_.reverse_input_filename) { + std::unique_ptr reverse_in_file( + new WavReader(settings_.reverse_input_filename->c_str())); + reverse_sample_rate_hz = reverse_in_file->sample_rate(); + reverse_num_channels = reverse_in_file->num_channels(); + reverse_buffer_reader_.reset( + new ChannelBufferWavReader(std::move(reverse_in_file))); + + reverse_output_sample_rate_hz = + settings_.reverse_output_sample_rate_hz + ? *settings_.reverse_output_sample_rate_hz + : reverse_sample_rate_hz; + reverse_output_num_channels = settings_.reverse_output_num_channels + ? *settings_.reverse_output_num_channels + : reverse_num_channels; + } + + SetupBuffersConfigsOutputs( + input_sample_rate_hz, output_sample_rate_hz, reverse_sample_rate_hz, + reverse_output_sample_rate_hz, input_num_channels, output_num_channels, + reverse_num_channels, reverse_output_num_channels); +} + +} // namespace test +} // namespace webrtc diff --git a/modules/audio_processing/test/wav_based_simulator.h b/modules/audio_processing/test/wav_based_simulator.h new file mode 100644 index 0000000..3adbe70 --- /dev/null +++ b/modules/audio_processing/test/wav_based_simulator.h @@ -0,0 +1,56 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TEST_WAV_BASED_SIMULATOR_H_ +#define MODULES_AUDIO_PROCESSING_TEST_WAV_BASED_SIMULATOR_H_ + +#include + +#include "modules/audio_processing/test/audio_processing_simulator.h" +#include "rtc_base/constructor_magic.h" + +namespace webrtc { +namespace test { + +// Used to perform an audio processing simulation from wav files. +class WavBasedSimulator final : public AudioProcessingSimulator { + public: + WavBasedSimulator(const SimulationSettings& settings, + rtc::scoped_refptr audio_processing, + std::unique_ptr ap_builder); + ~WavBasedSimulator() override; + + // Processes the WAV input. + void Process() override; + + private: + enum SimulationEventType { + kProcessStream, + kProcessReverseStream, + }; + + void Initialize(); + bool HandleProcessStreamCall(); + bool HandleProcessReverseStreamCall(); + void PrepareProcessStreamCall(); + void PrepareReverseProcessStreamCall(); + static std::vector GetDefaultEventChain(); + static std::vector GetCustomEventChain( + const std::string& filename); + + std::vector call_chain_; + + RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WavBasedSimulator); +}; + +} // namespace test +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TEST_WAV_BASED_SIMULATOR_H_ diff --git a/modules/audio_processing/three_band_filter_bank.cc b/modules/audio_processing/three_band_filter_bank.cc new file mode 100644 index 0000000..2a7d272 --- /dev/null +++ b/modules/audio_processing/three_band_filter_bank.cc @@ -0,0 +1,276 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// An implementation of a 3-band FIR filter-bank with DCT modulation, similar to +// the proposed in "Multirate Signal Processing for Communication Systems" by +// Fredric J Harris. +// +// The idea is to take a heterodyne system and change the order of the +// components to get something which is efficient to implement digitally. +// +// It is possible to separate the filter using the noble identity as follows: +// +// H(z) = H0(z^3) + z^-1 * H1(z^3) + z^-2 * H2(z^3) +// +// This is used in the analysis stage to first downsample serial to parallel +// and then filter each branch with one of these polyphase decompositions of the +// lowpass prototype. Because each filter is only a modulation of the prototype, +// it is enough to multiply each coefficient by the respective cosine value to +// shift it to the desired band. But because the cosine period is 12 samples, +// it requires separating the prototype even further using the noble identity. +// After filtering and modulating for each band, the output of all filters is +// accumulated to get the downsampled bands. +// +// A similar logic can be applied to the synthesis stage. + +#include "modules/audio_processing/three_band_filter_bank.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { + +// Factors to take into account when choosing |kFilterSize|: +// 1. Higher |kFilterSize|, means faster transition, which ensures less +// aliasing. This is especially important when there is non-linear +// processing between the splitting and merging. +// 2. The delay that this filter bank introduces is +// |kNumBands| * |kSparsity| * |kFilterSize| / 2, so it increases linearly +// with |kFilterSize|. +// 3. The computation complexity also increases linearly with |kFilterSize|. + +// The Matlab code to generate these |kFilterCoeffs| is: +// +// N = kNumBands * kSparsity * kFilterSize - 1; +// h = fir1(N, 1 / (2 * kNumBands), kaiser(N + 1, 3.5)); +// reshape(h, kNumBands * kSparsity, kFilterSize); +// +// The code below uses the values of kFilterSize, kNumBands and kSparsity +// specified in the header. + +// Because the total bandwidth of the lower and higher band is double the middle +// one (because of the spectrum parity), the low-pass prototype is half the +// bandwidth of 1 / (2 * |kNumBands|) and is then shifted with cosine modulation +// to the right places. +// A Kaiser window is used because of its flexibility and the alpha is set to +// 3.5, since that sets a stop band attenuation of 40dB ensuring a fast +// transition. + +constexpr int kSubSampling = ThreeBandFilterBank::kNumBands; +constexpr int kDctSize = ThreeBandFilterBank::kNumBands; +static_assert(ThreeBandFilterBank::kNumBands * + ThreeBandFilterBank::kSplitBandSize == + ThreeBandFilterBank::kFullBandSize, + "The full band must be split in equally sized subbands"); + +const float + kFilterCoeffs[ThreeBandFilterBank::kNumNonZeroFilters][kFilterSize] = { + {-0.00047749f, -0.00496888f, +0.16547118f, +0.00425496f}, + {-0.00173287f, -0.01585778f, +0.14989004f, +0.00994113f}, + {-0.00304815f, -0.02536082f, +0.12154542f, +0.01157993f}, + {-0.00346946f, -0.02587886f, +0.04760441f, +0.00607594f}, + {-0.00154717f, -0.01136076f, +0.01387458f, +0.00186353f}, + {+0.00186353f, +0.01387458f, -0.01136076f, -0.00154717f}, + {+0.00607594f, +0.04760441f, -0.02587886f, -0.00346946f}, + {+0.00983212f, +0.08543175f, -0.02982767f, -0.00383509f}, + {+0.00994113f, +0.14989004f, -0.01585778f, -0.00173287f}, + {+0.00425496f, +0.16547118f, -0.00496888f, -0.00047749f}}; + +constexpr int kZeroFilterIndex1 = 3; +constexpr int kZeroFilterIndex2 = 9; + +const float kDctModulation[ThreeBandFilterBank::kNumNonZeroFilters][kDctSize] = + {{2.f, 2.f, 2.f}, + {1.73205077f, 0.f, -1.73205077f}, + {1.f, -2.f, 1.f}, + {-1.f, 2.f, -1.f}, + {-1.73205077f, 0.f, 1.73205077f}, + {-2.f, -2.f, -2.f}, + {-1.73205077f, 0.f, 1.73205077f}, + {-1.f, 2.f, -1.f}, + {1.f, -2.f, 1.f}, + {1.73205077f, 0.f, -1.73205077f}}; + +// Filters the input signal |in| with the filter |filter| using a shift by +// |in_shift|, taking into account the previous state. +void FilterCore( + rtc::ArrayView filter, + rtc::ArrayView in, + const int in_shift, + rtc::ArrayView out, + rtc::ArrayView state) { + constexpr int kMaxInShift = (kStride - 1); + RTC_DCHECK_GE(in_shift, 0); + RTC_DCHECK_LE(in_shift, kMaxInShift); + std::fill(out.begin(), out.end(), 0.f); + + for (int k = 0; k < in_shift; ++k) { + for (int i = 0, j = kMemorySize + k - in_shift; i < kFilterSize; + ++i, j -= kStride) { + out[k] += state[j] * filter[i]; + } + } + + for (int k = in_shift, shift = 0; k < kFilterSize * kStride; ++k, ++shift) { + RTC_DCHECK_GE(shift, 0); + const int loop_limit = std::min(kFilterSize, 1 + (shift >> kStrideLog2)); + for (int i = 0, j = shift; i < loop_limit; ++i, j -= kStride) { + out[k] += in[j] * filter[i]; + } + for (int i = loop_limit, j = kMemorySize + shift - loop_limit * kStride; + i < kFilterSize; ++i, j -= kStride) { + out[k] += state[j] * filter[i]; + } + } + + for (int k = kFilterSize * kStride, shift = kFilterSize * kStride - in_shift; + k < ThreeBandFilterBank::kSplitBandSize; ++k, ++shift) { + for (int i = 0, j = shift; i < kFilterSize; ++i, j -= kStride) { + out[k] += in[j] * filter[i]; + } + } + + // Update current state. + std::copy(in.begin() + ThreeBandFilterBank::kSplitBandSize - kMemorySize, + in.end(), state.begin()); +} + +} // namespace + +// Because the low-pass filter prototype has half bandwidth it is possible to +// use a DCT to shift it in both directions at the same time, to the center +// frequencies [1 / 12, 3 / 12, 5 / 12]. +ThreeBandFilterBank::ThreeBandFilterBank() { + RTC_DCHECK_EQ(state_analysis_.size(), kNumNonZeroFilters); + RTC_DCHECK_EQ(state_synthesis_.size(), kNumNonZeroFilters); + for (int k = 0; k < kNumNonZeroFilters; ++k) { + RTC_DCHECK_EQ(state_analysis_[k].size(), kMemorySize); + RTC_DCHECK_EQ(state_synthesis_[k].size(), kMemorySize); + + state_analysis_[k].fill(0.f); + state_synthesis_[k].fill(0.f); + } +} + +ThreeBandFilterBank::~ThreeBandFilterBank() = default; + +// The analysis can be separated in these steps: +// 1. Serial to parallel downsampling by a factor of |kNumBands|. +// 2. Filtering of |kSparsity| different delayed signals with polyphase +// decomposition of the low-pass prototype filter and upsampled by a factor +// of |kSparsity|. +// 3. Modulating with cosines and accumulating to get the desired band. +void ThreeBandFilterBank::Analysis( + rtc::ArrayView in, + rtc::ArrayView, ThreeBandFilterBank::kNumBands> + out) { + // Initialize the output to zero. + for (int band = 0; band < ThreeBandFilterBank::kNumBands; ++band) { + RTC_DCHECK_EQ(out[band].size(), kSplitBandSize); + std::fill(out[band].begin(), out[band].end(), 0); + } + + for (int downsampling_index = 0; downsampling_index < kSubSampling; + ++downsampling_index) { + // Downsample to form the filter input. + std::array in_subsampled; + for (int k = 0; k < kSplitBandSize; ++k) { + in_subsampled[k] = + in[(kSubSampling - 1) - downsampling_index + kSubSampling * k]; + } + + for (int in_shift = 0; in_shift < kStride; ++in_shift) { + // Choose filter, skip zero filters. + const int index = downsampling_index + in_shift * kSubSampling; + if (index == kZeroFilterIndex1 || index == kZeroFilterIndex2) { + continue; + } + const int filter_index = + index < kZeroFilterIndex1 + ? index + : (index < kZeroFilterIndex2 ? index - 1 : index - 2); + + rtc::ArrayView filter( + kFilterCoeffs[filter_index]); + rtc::ArrayView dct_modulation( + kDctModulation[filter_index]); + rtc::ArrayView state(state_analysis_[filter_index]); + + // Filter. + std::array out_subsampled; + FilterCore(filter, in_subsampled, in_shift, out_subsampled, state); + + // Band and modulate the output. + for (int band = 0; band < ThreeBandFilterBank::kNumBands; ++band) { + for (int n = 0; n < kSplitBandSize; ++n) { + out[band][n] += dct_modulation[band] * out_subsampled[n]; + } + } + } + } +} + +// The synthesis can be separated in these steps: +// 1. Modulating with cosines. +// 2. Filtering each one with a polyphase decomposition of the low-pass +// prototype filter upsampled by a factor of |kSparsity| and accumulating +// |kSparsity| signals with different delays. +// 3. Parallel to serial upsampling by a factor of |kNumBands|. +void ThreeBandFilterBank::Synthesis( + rtc::ArrayView, ThreeBandFilterBank::kNumBands> + in, + rtc::ArrayView out) { + std::fill(out.begin(), out.end(), 0); + for (int upsampling_index = 0; upsampling_index < kSubSampling; + ++upsampling_index) { + for (int in_shift = 0; in_shift < kStride; ++in_shift) { + // Choose filter, skip zero filters. + const int index = upsampling_index + in_shift * kSubSampling; + if (index == kZeroFilterIndex1 || index == kZeroFilterIndex2) { + continue; + } + const int filter_index = + index < kZeroFilterIndex1 + ? index + : (index < kZeroFilterIndex2 ? index - 1 : index - 2); + + rtc::ArrayView filter( + kFilterCoeffs[filter_index]); + rtc::ArrayView dct_modulation( + kDctModulation[filter_index]); + rtc::ArrayView state(state_synthesis_[filter_index]); + + // Prepare filter input by modulating the banded input. + std::array in_subsampled; + std::fill(in_subsampled.begin(), in_subsampled.end(), 0.f); + for (int band = 0; band < ThreeBandFilterBank::kNumBands; ++band) { + RTC_DCHECK_EQ(in[band].size(), kSplitBandSize); + for (int n = 0; n < kSplitBandSize; ++n) { + in_subsampled[n] += dct_modulation[band] * in[band][n]; + } + } + + // Filter. + std::array out_subsampled; + FilterCore(filter, in_subsampled, in_shift, out_subsampled, state); + + // Upsample. + constexpr float kUpsamplingScaling = kSubSampling; + for (int k = 0; k < kSplitBandSize; ++k) { + out[upsampling_index + kSubSampling * k] += + kUpsamplingScaling * out_subsampled[k]; + } + } + } +} + +} // namespace webrtc diff --git a/modules/audio_processing/three_band_filter_bank.h b/modules/audio_processing/three_band_filter_bank.h new file mode 100644 index 0000000..e6346de --- /dev/null +++ b/modules/audio_processing/three_band_filter_bank.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_THREE_BAND_FILTER_BANK_H_ +#define MODULES_AUDIO_PROCESSING_THREE_BAND_FILTER_BANK_H_ + +#include +#include +#include +#include + +#include "api/array_view.h" + +namespace webrtc { + +constexpr int kSparsity = 4; +constexpr int kStrideLog2 = 2; +constexpr int kStride = 1 << kStrideLog2; +constexpr int kNumZeroFilters = 2; +constexpr int kFilterSize = 4; +constexpr int kMemorySize = kFilterSize * kStride - 1; +static_assert(kMemorySize == 15, + "The memory size must be sufficient to provide memory for the " + "shifted filters"); + +// An implementation of a 3-band FIR filter-bank with DCT modulation, similar to +// the proposed in "Multirate Signal Processing for Communication Systems" by +// Fredric J Harris. +// The low-pass filter prototype has these characteristics: +// * Pass-band ripple = 0.3dB +// * Pass-band frequency = 0.147 (7kHz at 48kHz) +// * Stop-band attenuation = 40dB +// * Stop-band frequency = 0.192 (9.2kHz at 48kHz) +// * Delay = 24 samples (500us at 48kHz) +// * Linear phase +// This filter bank does not satisfy perfect reconstruction. The SNR after +// analysis and synthesis (with no processing in between) is approximately 9.5dB +// depending on the input signal after compensating for the delay. +class ThreeBandFilterBank final { + public: + static const int kNumBands = 3; + static const int kFullBandSize = 480; + static const int kSplitBandSize = + ThreeBandFilterBank::kFullBandSize / ThreeBandFilterBank::kNumBands; + static const int kNumNonZeroFilters = + kSparsity * ThreeBandFilterBank::kNumBands - kNumZeroFilters; + + ThreeBandFilterBank(); + ~ThreeBandFilterBank(); + + // Splits |in| of size kFullBandSize into 3 downsampled frequency bands in + // |out|, each of size 160. + void Analysis(rtc::ArrayView in, + rtc::ArrayView, kNumBands> out); + + // Merges the 3 downsampled frequency bands in |in|, each of size 160, into + // |out|, which is of size kFullBandSize. + void Synthesis(rtc::ArrayView, kNumBands> in, + rtc::ArrayView out); + + private: + std::array, kNumNonZeroFilters> + state_analysis_; + std::array, kNumNonZeroFilters> + state_synthesis_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_THREE_BAND_FILTER_BANK_H_ diff --git a/modules/audio_processing/transient/BUILD.gn b/modules/audio_processing/transient/BUILD.gn new file mode 100644 index 0000000..13e319f --- /dev/null +++ b/modules/audio_processing/transient/BUILD.gn @@ -0,0 +1,112 @@ +# Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_source_set("transient_suppressor_api") { + sources = [ "transient_suppressor.h" ] +} + +rtc_library("transient_suppressor_impl") { + visibility = [ + "..:optionally_built_submodule_creators", + ":transient_suppression_test", + ":transient_suppression_unittests", + ":click_annotate", + ] + sources = [ + "common.h", + "daubechies_8_wavelet_coeffs.h", + "dyadic_decimator.h", + "moving_moments.cc", + "moving_moments.h", + "transient_detector.cc", + "transient_detector.h", + "transient_suppressor_impl.cc", + "transient_suppressor_impl.h", + "windows_private.h", + "wpd_node.cc", + "wpd_node.h", + "wpd_tree.cc", + "wpd_tree.h", + ] + deps = [ + ":transient_suppressor_api", + "../../../common_audio:common_audio", + "../../../common_audio:common_audio_c", + "../../../common_audio:fir_filter", + "../../../common_audio:fir_filter_factory", + "../../../common_audio/third_party/ooura:fft_size_256", + "../../../rtc_base:checks", + "../../../rtc_base:gtest_prod", + "../../../rtc_base:logging", + ] +} + +if (rtc_include_tests) { + rtc_executable("click_annotate") { + testonly = true + sources = [ + "click_annotate.cc", + "file_utils.cc", + "file_utils.h", + ] + deps = [ + ":transient_suppressor_impl", + "..:audio_processing", + "../../../rtc_base/system:file_wrapper", + "../../../system_wrappers", + ] + } + + rtc_executable("transient_suppression_test") { + testonly = true + sources = [ + "file_utils.cc", + "file_utils.h", + "transient_suppression_test.cc", + ] + deps = [ + ":transient_suppressor_impl", + "..:audio_processing", + "../../../common_audio", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/system:file_wrapper", + "../../../system_wrappers", + "../../../test:fileutils", + "../../../test:test_support", + "../agc:level_estimation", + "//testing/gtest", + "//third_party/abseil-cpp/absl/flags:flag", + "//third_party/abseil-cpp/absl/flags:parse", + ] + } + + rtc_library("transient_suppression_unittests") { + testonly = true + sources = [ + "dyadic_decimator_unittest.cc", + "file_utils.cc", + "file_utils.h", + "file_utils_unittest.cc", + "moving_moments_unittest.cc", + "transient_detector_unittest.cc", + "transient_suppressor_unittest.cc", + "wpd_node_unittest.cc", + "wpd_tree_unittest.cc", + ] + deps = [ + ":transient_suppressor_impl", + "../../../rtc_base:stringutils", + "../../../rtc_base/system:file_wrapper", + "../../../test:fileutils", + "../../../test:test_support", + "//testing/gtest", + ] + } +} diff --git a/modules/audio_processing/transient/click_annotate.cc b/modules/audio_processing/transient/click_annotate.cc new file mode 100644 index 0000000..21641f8 --- /dev/null +++ b/modules/audio_processing/transient/click_annotate.cc @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include +#include +#include + +#include "modules/audio_processing/transient/file_utils.h" +#include "modules/audio_processing/transient/transient_detector.h" +#include "rtc_base/system/file_wrapper.h" + +using webrtc::FileWrapper; +using webrtc::TransientDetector; + +// Application to generate a RTP timing file. +// Opens the PCM file and divides the signal in frames. +// Creates a send times array, one for each step. +// Each block that contains a transient, has an infinite send time. +// The resultant array is written to a DAT file +// Returns -1 on error or |lost_packets| otherwise. +int main(int argc, char* argv[]) { + if (argc != 5) { + printf("\n%s - Application to generate a RTP timing file.\n\n", argv[0]); + printf("%s PCMfile DATfile chunkSize sampleRate\n\n", argv[0]); + printf("Opens the PCMfile with sampleRate in Hertz.\n"); + printf("Creates a send times array, one for each chunkSize "); + printf("milliseconds step.\n"); + printf("Each block that contains a transient, has an infinite send time. "); + printf("The resultant array is written to a DATfile.\n\n"); + return 0; + } + + FileWrapper pcm_file = FileWrapper::OpenReadOnly(argv[1]); + if (!pcm_file.is_open()) { + printf("\nThe %s could not be opened.\n\n", argv[1]); + return -1; + } + + FileWrapper dat_file = FileWrapper::OpenWriteOnly(argv[2]); + if (!dat_file.is_open()) { + printf("\nThe %s could not be opened.\n\n", argv[2]); + return -1; + } + + int chunk_size_ms = atoi(argv[3]); + if (chunk_size_ms <= 0) { + printf("\nThe chunkSize must be a positive integer\n\n"); + return -1; + } + + int sample_rate_hz = atoi(argv[4]); + if (sample_rate_hz <= 0) { + printf("\nThe sampleRate must be a positive integer\n\n"); + return -1; + } + + TransientDetector detector(sample_rate_hz); + int lost_packets = 0; + size_t audio_buffer_length = chunk_size_ms * sample_rate_hz / 1000; + std::unique_ptr audio_buffer(new float[audio_buffer_length]); + std::vector send_times; + + // Read first buffer from the PCM test file. + size_t file_samples_read = ReadInt16FromFileToFloatBuffer( + &pcm_file, audio_buffer_length, audio_buffer.get()); + for (int time = 0; file_samples_read > 0; time += chunk_size_ms) { + // Pad the rest of the buffer with zeros. + for (size_t i = file_samples_read; i < audio_buffer_length; ++i) { + audio_buffer[i] = 0.0; + } + float value = + detector.Detect(audio_buffer.get(), audio_buffer_length, NULL, 0); + if (value < 0.5f) { + value = time; + } else { + value = FLT_MAX; + ++lost_packets; + } + send_times.push_back(value); + + // Read next buffer from the PCM test file. + file_samples_read = ReadInt16FromFileToFloatBuffer( + &pcm_file, audio_buffer_length, audio_buffer.get()); + } + + size_t floats_written = + WriteFloatBufferToFile(&dat_file, send_times.size(), &send_times[0]); + + if (floats_written == 0) { + printf("\nThe send times could not be written to DAT file\n\n"); + return -1; + } + + pcm_file.Close(); + dat_file.Close(); + + return lost_packets; +} diff --git a/modules/audio_processing/transient/common.h b/modules/audio_processing/transient/common.h new file mode 100644 index 0000000..63c9a7b --- /dev/null +++ b/modules/audio_processing/transient/common.h @@ -0,0 +1,27 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_COMMON_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_COMMON_H_ +namespace webrtc { +namespace ts { + +static const float kPi = 3.14159265358979323846f; +static const int kChunkSizeMs = 10; +enum { + kSampleRate8kHz = 8000, + kSampleRate16kHz = 16000, + kSampleRate32kHz = 32000, + kSampleRate48kHz = 48000 +}; + +} // namespace ts +} // namespace webrtc +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_COMMON_H_ diff --git a/modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h b/modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h new file mode 100644 index 0000000..92233bf --- /dev/null +++ b/modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h @@ -0,0 +1,44 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// This header file defines the coefficients of the FIR based approximation of +// the Meyer Wavelet +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_DAUBECHIES_8_WAVELET_COEFFS_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_DAUBECHIES_8_WAVELET_COEFFS_H_ + +// Decomposition coefficients Daubechies 8. + +namespace webrtc { + +const int kDaubechies8CoefficientsLength = 16; + +const float kDaubechies8HighPassCoefficients[kDaubechies8CoefficientsLength] = { + -5.44158422430816093862e-02f, 3.12871590914465924627e-01f, + -6.75630736298012846142e-01f, 5.85354683654869090148e-01f, + 1.58291052560238926228e-02f, -2.84015542962428091389e-01f, + -4.72484573997972536787e-04f, 1.28747426620186011803e-01f, + 1.73693010020221083600e-02f, -4.40882539310647192377e-02f, + -1.39810279170155156436e-02f, 8.74609404701565465445e-03f, + 4.87035299301066034600e-03f, -3.91740372995977108837e-04f, + -6.75449405998556772109e-04f, -1.17476784002281916305e-04f}; + +const float kDaubechies8LowPassCoefficients[kDaubechies8CoefficientsLength] = { + -1.17476784002281916305e-04f, 6.75449405998556772109e-04f, + -3.91740372995977108837e-04f, -4.87035299301066034600e-03f, + 8.74609404701565465445e-03f, 1.39810279170155156436e-02f, + -4.40882539310647192377e-02f, -1.73693010020221083600e-02f, + 1.28747426620186011803e-01f, 4.72484573997972536787e-04f, + -2.84015542962428091389e-01f, -1.58291052560238926228e-02f, + 5.85354683654869090148e-01f, 6.75630736298012846142e-01f, + 3.12871590914465924627e-01f, 5.44158422430816093862e-02f}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_DAUBECHIES_8_WAVELET_COEFFS_H_ diff --git a/modules/audio_processing/transient/dyadic_decimator.h b/modules/audio_processing/transient/dyadic_decimator.h new file mode 100644 index 0000000..fcb56b7 --- /dev/null +++ b/modules/audio_processing/transient/dyadic_decimator.h @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_DYADIC_DECIMATOR_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_DYADIC_DECIMATOR_H_ + +#include + +// Provides a set of static methods to perform dyadic decimations. + +namespace webrtc { + +// Returns the proper length of the output buffer that you should use for the +// given |in_length| and decimation |odd_sequence|. +// Return -1 on error. +inline size_t GetOutLengthToDyadicDecimate(size_t in_length, + bool odd_sequence) { + size_t out_length = in_length / 2; + + if (in_length % 2 == 1 && !odd_sequence) { + ++out_length; + } + + return out_length; +} + +// Performs a dyadic decimation: removes every odd/even member of a sequence +// halving its overall length. +// Arguments: +// in: array of |in_length|. +// odd_sequence: If false, the odd members will be removed (1, 3, 5, ...); +// if true, the even members will be removed (0, 2, 4, ...). +// out: array of |out_length|. |out_length| must be large enough to +// hold the decimated output. The necessary length can be provided by +// GetOutLengthToDyadicDecimate(). +// Must be previously allocated. +// Returns the number of output samples, -1 on error. +template +static size_t DyadicDecimate(const T* in, + size_t in_length, + bool odd_sequence, + T* out, + size_t out_length) { + size_t half_length = GetOutLengthToDyadicDecimate(in_length, odd_sequence); + + if (!in || !out || in_length <= 0 || out_length < half_length) { + return 0; + } + + size_t output_samples = 0; + size_t index_adjustment = odd_sequence ? 1 : 0; + for (output_samples = 0; output_samples < half_length; ++output_samples) { + out[output_samples] = in[output_samples * 2 + index_adjustment]; + } + + return output_samples; +} + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_DYADIC_DECIMATOR_H_ diff --git a/modules/audio_processing/transient/dyadic_decimator_unittest.cc b/modules/audio_processing/transient/dyadic_decimator_unittest.cc new file mode 100644 index 0000000..3e65a7b --- /dev/null +++ b/modules/audio_processing/transient/dyadic_decimator_unittest.cc @@ -0,0 +1,111 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/dyadic_decimator.h" + +#include "test/gtest.h" + +namespace webrtc { + +static const size_t kEvenBufferLength = 6; +static const size_t kOddBufferLength = 5; +static const size_t kOutBufferLength = 3; + +int16_t const test_buffer_even_len[] = {0, 1, 2, 3, 4, 5}; +int16_t const test_buffer_odd_len[] = {0, 1, 2, 3, 4}; +int16_t test_buffer_out[kOutBufferLength]; + +TEST(DyadicDecimatorTest, GetOutLengthToDyadicDecimate) { + EXPECT_EQ(3u, GetOutLengthToDyadicDecimate(6, false)); + EXPECT_EQ(3u, GetOutLengthToDyadicDecimate(6, true)); + EXPECT_EQ(3u, GetOutLengthToDyadicDecimate(5, false)); + EXPECT_EQ(2u, GetOutLengthToDyadicDecimate(5, true)); +} + +TEST(DyadicDecimatorTest, DyadicDecimateErrorValues) { + size_t out_samples = 0; + + out_samples = DyadicDecimate(static_cast(NULL), kEvenBufferLength, + false, // Even sequence. + test_buffer_out, kOutBufferLength); + EXPECT_EQ(0u, out_samples); + + out_samples = DyadicDecimate(test_buffer_even_len, kEvenBufferLength, + false, // Even sequence. + static_cast(NULL), kOutBufferLength); + EXPECT_EQ(0u, out_samples); + + // Less than required |out_length|. + out_samples = DyadicDecimate(test_buffer_even_len, kEvenBufferLength, + false, // Even sequence. + test_buffer_out, 2); + EXPECT_EQ(0u, out_samples); +} + +TEST(DyadicDecimatorTest, DyadicDecimateEvenLengthEvenSequence) { + size_t expected_out_samples = + GetOutLengthToDyadicDecimate(kEvenBufferLength, false); + + size_t out_samples = DyadicDecimate(test_buffer_even_len, kEvenBufferLength, + false, // Even sequence. + test_buffer_out, kOutBufferLength); + + EXPECT_EQ(expected_out_samples, out_samples); + + EXPECT_EQ(0, test_buffer_out[0]); + EXPECT_EQ(2, test_buffer_out[1]); + EXPECT_EQ(4, test_buffer_out[2]); +} + +TEST(DyadicDecimatorTest, DyadicDecimateEvenLengthOddSequence) { + size_t expected_out_samples = + GetOutLengthToDyadicDecimate(kEvenBufferLength, true); + + size_t out_samples = DyadicDecimate(test_buffer_even_len, kEvenBufferLength, + true, // Odd sequence. + test_buffer_out, kOutBufferLength); + + EXPECT_EQ(expected_out_samples, out_samples); + + EXPECT_EQ(1, test_buffer_out[0]); + EXPECT_EQ(3, test_buffer_out[1]); + EXPECT_EQ(5, test_buffer_out[2]); +} + +TEST(DyadicDecimatorTest, DyadicDecimateOddLengthEvenSequence) { + size_t expected_out_samples = + GetOutLengthToDyadicDecimate(kOddBufferLength, false); + + size_t out_samples = DyadicDecimate(test_buffer_odd_len, kOddBufferLength, + false, // Even sequence. + test_buffer_out, kOutBufferLength); + + EXPECT_EQ(expected_out_samples, out_samples); + + EXPECT_EQ(0, test_buffer_out[0]); + EXPECT_EQ(2, test_buffer_out[1]); + EXPECT_EQ(4, test_buffer_out[2]); +} + +TEST(DyadicDecimatorTest, DyadicDecimateOddLengthOddSequence) { + size_t expected_out_samples = + GetOutLengthToDyadicDecimate(kOddBufferLength, true); + + size_t out_samples = DyadicDecimate(test_buffer_odd_len, kOddBufferLength, + true, // Odd sequence. + test_buffer_out, kOutBufferLength); + + EXPECT_EQ(expected_out_samples, out_samples); + + EXPECT_EQ(1, test_buffer_out[0]); + EXPECT_EQ(3, test_buffer_out[1]); +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/file_utils.cc b/modules/audio_processing/transient/file_utils.cc new file mode 100644 index 0000000..58f9932 --- /dev/null +++ b/modules/audio_processing/transient/file_utils.cc @@ -0,0 +1,257 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/file_utils.h" + +#include + +#include "rtc_base/system/file_wrapper.h" + +namespace webrtc { + +int ConvertByteArrayToFloat(const uint8_t bytes[4], float* out) { + if (!bytes || !out) { + return -1; + } + + uint32_t binary_value = 0; + for (int i = 3; i >= 0; --i) { + binary_value <<= 8; + binary_value += bytes[i]; + } + + *out = bit_cast(binary_value); + + return 0; +} + +int ConvertByteArrayToDouble(const uint8_t bytes[8], double* out) { + if (!bytes || !out) { + return -1; + } + + uint64_t binary_value = 0; + for (int i = 7; i >= 0; --i) { + binary_value <<= 8; + binary_value += bytes[i]; + } + + *out = bit_cast(binary_value); + + return 0; +} + +int ConvertFloatToByteArray(float value, uint8_t out_bytes[4]) { + if (!out_bytes) { + return -1; + } + + uint32_t binary_value = bit_cast(value); + for (size_t i = 0; i < 4; ++i) { + out_bytes[i] = binary_value; + binary_value >>= 8; + } + + return 0; +} + +int ConvertDoubleToByteArray(double value, uint8_t out_bytes[8]) { + if (!out_bytes) { + return -1; + } + + uint64_t binary_value = bit_cast(value); + for (size_t i = 0; i < 8; ++i) { + out_bytes[i] = binary_value; + binary_value >>= 8; + } + + return 0; +} + +size_t ReadInt16BufferFromFile(FileWrapper* file, + size_t length, + int16_t* buffer) { + if (!file || !file->is_open() || !buffer || length <= 0) { + return 0; + } + + std::unique_ptr byte_array(new uint8_t[2]); + + size_t int16s_read = 0; + + while (int16s_read < length) { + size_t bytes_read = file->Read(byte_array.get(), 2); + if (bytes_read < 2) { + break; + } + int16_t value = byte_array[1]; + value <<= 8; + value += byte_array[0]; + buffer[int16s_read] = value; + ++int16s_read; + } + + return int16s_read; +} + +size_t ReadInt16FromFileToFloatBuffer(FileWrapper* file, + size_t length, + float* buffer) { + if (!file || !file->is_open() || !buffer || length <= 0) { + return 0; + } + + std::unique_ptr buffer16(new int16_t[length]); + + size_t int16s_read = ReadInt16BufferFromFile(file, length, buffer16.get()); + + for (size_t i = 0; i < int16s_read; ++i) { + buffer[i] = buffer16[i]; + } + + return int16s_read; +} + +size_t ReadInt16FromFileToDoubleBuffer(FileWrapper* file, + size_t length, + double* buffer) { + if (!file || !file->is_open() || !buffer || length <= 0) { + return 0; + } + + std::unique_ptr buffer16(new int16_t[length]); + + size_t int16s_read = ReadInt16BufferFromFile(file, length, buffer16.get()); + + for (size_t i = 0; i < int16s_read; ++i) { + buffer[i] = buffer16[i]; + } + + return int16s_read; +} + +size_t ReadFloatBufferFromFile(FileWrapper* file, + size_t length, + float* buffer) { + if (!file || !file->is_open() || !buffer || length <= 0) { + return 0; + } + + std::unique_ptr byte_array(new uint8_t[4]); + + size_t floats_read = 0; + + while (floats_read < length) { + size_t bytes_read = file->Read(byte_array.get(), 4); + if (bytes_read < 4) { + break; + } + ConvertByteArrayToFloat(byte_array.get(), &buffer[floats_read]); + ++floats_read; + } + + return floats_read; +} + +size_t ReadDoubleBufferFromFile(FileWrapper* file, + size_t length, + double* buffer) { + if (!file || !file->is_open() || !buffer || length <= 0) { + return 0; + } + + std::unique_ptr byte_array(new uint8_t[8]); + + size_t doubles_read = 0; + + while (doubles_read < length) { + size_t bytes_read = file->Read(byte_array.get(), 8); + if (bytes_read < 8) { + break; + } + ConvertByteArrayToDouble(byte_array.get(), &buffer[doubles_read]); + ++doubles_read; + } + + return doubles_read; +} + +size_t WriteInt16BufferToFile(FileWrapper* file, + size_t length, + const int16_t* buffer) { + if (!file || !file->is_open() || !buffer || length <= 0) { + return 0; + } + + std::unique_ptr byte_array(new uint8_t[2]); + + size_t int16s_written = 0; + + for (int16s_written = 0; int16s_written < length; ++int16s_written) { + // Get byte representation. + byte_array[0] = buffer[int16s_written] & 0xFF; + byte_array[1] = (buffer[int16s_written] >> 8) & 0xFF; + + file->Write(byte_array.get(), 2); + } + + file->Flush(); + + return int16s_written; +} + +size_t WriteFloatBufferToFile(FileWrapper* file, + size_t length, + const float* buffer) { + if (!file || !file->is_open() || !buffer || length <= 0) { + return 0; + } + + std::unique_ptr byte_array(new uint8_t[4]); + + size_t floats_written = 0; + + for (floats_written = 0; floats_written < length; ++floats_written) { + // Get byte representation. + ConvertFloatToByteArray(buffer[floats_written], byte_array.get()); + + file->Write(byte_array.get(), 4); + } + + file->Flush(); + + return floats_written; +} + +size_t WriteDoubleBufferToFile(FileWrapper* file, + size_t length, + const double* buffer) { + if (!file || !file->is_open() || !buffer || length <= 0) { + return 0; + } + + std::unique_ptr byte_array(new uint8_t[8]); + + size_t doubles_written = 0; + + for (doubles_written = 0; doubles_written < length; ++doubles_written) { + // Get byte representation. + ConvertDoubleToByteArray(buffer[doubles_written], byte_array.get()); + + file->Write(byte_array.get(), 8); + } + + file->Flush(); + + return doubles_written; +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/file_utils.h b/modules/audio_processing/transient/file_utils.h new file mode 100644 index 0000000..6184017 --- /dev/null +++ b/modules/audio_processing/transient/file_utils.h @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_FILE_UTILS_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_FILE_UTILS_H_ + +#include + +#include "rtc_base/system/file_wrapper.h" + +namespace webrtc { + +// This is a copy of the cast included in the Chromium codebase here: +// http://cs.chromium.org/src/third_party/cld/base/casts.h +template +inline Dest bit_cast(const Source& source) { + // A compile error here means your Dest and Source have different sizes. + static_assert(sizeof(Dest) == sizeof(Source), + "Dest and Source have different sizes"); + + Dest dest; + memcpy(&dest, &source, sizeof(dest)); + return dest; +} + +// Converts the byte array with binary float representation to float. +// Bytes must be in little-endian order. +// Returns 0 if correct, -1 on error. +int ConvertByteArrayToFloat(const uint8_t bytes[4], float* out); + +// Converts the byte array with binary double representation to double. +// Bytes must be in little-endian order. +// Returns 0 if correct, -1 on error. +int ConvertByteArrayToDouble(const uint8_t bytes[8], double* out); + +// Converts a float to a byte array with binary float representation. +// Bytes will be in little-endian order. +// Returns 0 if correct, -1 on error. +int ConvertFloatToByteArray(float value, uint8_t out_bytes[4]); + +// Converts a double to a byte array with binary double representation. +// Bytes will be in little-endian order. +// Returns 0 if correct, -1 on error. +int ConvertDoubleToByteArray(double value, uint8_t out_bytes[8]); + +// Reads |length| 16-bit integers from |file| to |buffer|. +// |file| must be previously opened. +// Returns the number of 16-bit integers read or -1 on error. +size_t ReadInt16BufferFromFile(FileWrapper* file, + size_t length, + int16_t* buffer); + +// Reads |length| 16-bit integers from |file| and stores those values +// (converting them) in |buffer|. +// |file| must be previously opened. +// Returns the number of 16-bit integers read or -1 on error. +size_t ReadInt16FromFileToFloatBuffer(FileWrapper* file, + size_t length, + float* buffer); + +// Reads |length| 16-bit integers from |file| and stores those values +// (converting them) in |buffer|. +// |file| must be previously opened. +// Returns the number of 16-bit integers read or -1 on error. +size_t ReadInt16FromFileToDoubleBuffer(FileWrapper* file, + size_t length, + double* buffer); + +// Reads |length| floats in binary representation (4 bytes) from |file| to +// |buffer|. +// |file| must be previously opened. +// Returns the number of floats read or -1 on error. +size_t ReadFloatBufferFromFile(FileWrapper* file, size_t length, float* buffer); + +// Reads |length| doubles in binary representation (8 bytes) from |file| to +// |buffer|. +// |file| must be previously opened. +// Returns the number of doubles read or -1 on error. +size_t ReadDoubleBufferFromFile(FileWrapper* file, + size_t length, + double* buffer); + +// Writes |length| 16-bit integers from |buffer| in binary representation (2 +// bytes) to |file|. It flushes |file|, so after this call there are no +// writings pending. +// |file| must be previously opened. +// Returns the number of doubles written or -1 on error. +size_t WriteInt16BufferToFile(FileWrapper* file, + size_t length, + const int16_t* buffer); + +// Writes |length| floats from |buffer| in binary representation (4 bytes) to +// |file|. It flushes |file|, so after this call there are no writtings pending. +// |file| must be previously opened. +// Returns the number of doubles written or -1 on error. +size_t WriteFloatBufferToFile(FileWrapper* file, + size_t length, + const float* buffer); + +// Writes |length| doubles from |buffer| in binary representation (8 bytes) to +// |file|. It flushes |file|, so after this call there are no writings pending. +// |file| must be previously opened. +// Returns the number of doubles written or -1 on error. +size_t WriteDoubleBufferToFile(FileWrapper* file, + size_t length, + const double* buffer); + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_FILE_UTILS_H_ diff --git a/modules/audio_processing/transient/file_utils_unittest.cc b/modules/audio_processing/transient/file_utils_unittest.cc new file mode 100644 index 0000000..1bcf6f9 --- /dev/null +++ b/modules/audio_processing/transient/file_utils_unittest.cc @@ -0,0 +1,500 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/file_utils.h" + +#include + +#include +#include +#include + +#include "rtc_base/system/file_wrapper.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { + +static const uint8_t kPiBytesf[4] = {0xDB, 0x0F, 0x49, 0x40}; +static const uint8_t kEBytesf[4] = {0x54, 0xF8, 0x2D, 0x40}; +static const uint8_t kAvogadroBytesf[4] = {0x2F, 0x0C, 0xFF, 0x66}; + +static const uint8_t kPiBytes[8] = {0x18, 0x2D, 0x44, 0x54, + 0xFB, 0x21, 0x09, 0x40}; +static const uint8_t kEBytes[8] = {0x69, 0x57, 0x14, 0x8B, + 0x0A, 0xBF, 0x05, 0x40}; +static const uint8_t kAvogadroBytes[8] = {0xF4, 0xBC, 0xA8, 0xDF, + 0x85, 0xE1, 0xDF, 0x44}; + +static const double kPi = 3.14159265358979323846; +static const double kE = 2.71828182845904523536; +static const double kAvogadro = 602214100000000000000000.0; + +class TransientFileUtilsTest : public ::testing::Test { + protected: + TransientFileUtilsTest() + : kTestFileName( + test::ResourcePath("audio_processing/transient/double-utils", + "dat")), + kTestFileNamef( + test::ResourcePath("audio_processing/transient/float-utils", + "dat")) {} + + ~TransientFileUtilsTest() override { CleanupTempFiles(); } + + std::string CreateTempFilename(const std::string& dir, + const std::string& prefix) { + std::string filename = test::TempFilename(dir, prefix); + temp_filenames_.push_back(filename); + return filename; + } + + void CleanupTempFiles() { + for (const std::string& filename : temp_filenames_) { + remove(filename.c_str()); + } + temp_filenames_.clear(); + } + + // This file (used in some tests) contains binary data. The data correspond to + // the double representation of the constants: Pi, E, and the Avogadro's + // Number; + // appended in that order. + const std::string kTestFileName; + + // This file (used in some tests) contains binary data. The data correspond to + // the float representation of the constants: Pi, E, and the Avogadro's + // Number; + // appended in that order. + const std::string kTestFileNamef; + + // List of temporary filenames created by CreateTempFilename. + std::vector temp_filenames_; +}; + +#if defined(WEBRTC_IOS) +#define MAYBE_ConvertByteArrayToFloat DISABLED_ConvertByteArrayToFloat +#else +#define MAYBE_ConvertByteArrayToFloat ConvertByteArrayToFloat +#endif +TEST_F(TransientFileUtilsTest, MAYBE_ConvertByteArrayToFloat) { + float value = 0.0; + + EXPECT_EQ(0, ConvertByteArrayToFloat(kPiBytesf, &value)); + EXPECT_FLOAT_EQ(kPi, value); + + EXPECT_EQ(0, ConvertByteArrayToFloat(kEBytesf, &value)); + EXPECT_FLOAT_EQ(kE, value); + + EXPECT_EQ(0, ConvertByteArrayToFloat(kAvogadroBytesf, &value)); + EXPECT_FLOAT_EQ(kAvogadro, value); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_ConvertByteArrayToDouble DISABLED_ConvertByteArrayToDouble +#else +#define MAYBE_ConvertByteArrayToDouble ConvertByteArrayToDouble +#endif +TEST_F(TransientFileUtilsTest, MAYBE_ConvertByteArrayToDouble) { + double value = 0.0; + + EXPECT_EQ(0, ConvertByteArrayToDouble(kPiBytes, &value)); + EXPECT_DOUBLE_EQ(kPi, value); + + EXPECT_EQ(0, ConvertByteArrayToDouble(kEBytes, &value)); + EXPECT_DOUBLE_EQ(kE, value); + + EXPECT_EQ(0, ConvertByteArrayToDouble(kAvogadroBytes, &value)); + EXPECT_DOUBLE_EQ(kAvogadro, value); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_ConvertFloatToByteArray DISABLED_ConvertFloatToByteArray +#else +#define MAYBE_ConvertFloatToByteArray ConvertFloatToByteArray +#endif +TEST_F(TransientFileUtilsTest, MAYBE_ConvertFloatToByteArray) { + std::unique_ptr bytes(new uint8_t[4]); + + EXPECT_EQ(0, ConvertFloatToByteArray(kPi, bytes.get())); + EXPECT_EQ(0, memcmp(bytes.get(), kPiBytesf, 4)); + + EXPECT_EQ(0, ConvertFloatToByteArray(kE, bytes.get())); + EXPECT_EQ(0, memcmp(bytes.get(), kEBytesf, 4)); + + EXPECT_EQ(0, ConvertFloatToByteArray(kAvogadro, bytes.get())); + EXPECT_EQ(0, memcmp(bytes.get(), kAvogadroBytesf, 4)); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_ConvertDoubleToByteArray DISABLED_ConvertDoubleToByteArray +#else +#define MAYBE_ConvertDoubleToByteArray ConvertDoubleToByteArray +#endif +TEST_F(TransientFileUtilsTest, MAYBE_ConvertDoubleToByteArray) { + std::unique_ptr bytes(new uint8_t[8]); + + EXPECT_EQ(0, ConvertDoubleToByteArray(kPi, bytes.get())); + EXPECT_EQ(0, memcmp(bytes.get(), kPiBytes, 8)); + + EXPECT_EQ(0, ConvertDoubleToByteArray(kE, bytes.get())); + EXPECT_EQ(0, memcmp(bytes.get(), kEBytes, 8)); + + EXPECT_EQ(0, ConvertDoubleToByteArray(kAvogadro, bytes.get())); + EXPECT_EQ(0, memcmp(bytes.get(), kAvogadroBytes, 8)); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_ReadInt16BufferFromFile DISABLED_ReadInt16BufferFromFile +#else +#define MAYBE_ReadInt16BufferFromFile ReadInt16BufferFromFile +#endif +TEST_F(TransientFileUtilsTest, MAYBE_ReadInt16BufferFromFile) { + std::string test_filename = kTestFileName; + + FileWrapper file = FileWrapper::OpenReadOnly(test_filename.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kTestFileName.c_str(); + + const size_t kBufferLength = 12; + std::unique_ptr buffer(new int16_t[kBufferLength]); + + EXPECT_EQ(kBufferLength, + ReadInt16BufferFromFile(&file, kBufferLength, buffer.get())); + EXPECT_EQ(22377, buffer[4]); + EXPECT_EQ(16389, buffer[7]); + EXPECT_EQ(17631, buffer[kBufferLength - 1]); + + file.Rewind(); + + // The next test is for checking the case where there are not as much data as + // needed in the file, but reads to the end, and it returns the number of + // int16s read. + const size_t kBufferLenghtLargerThanFile = kBufferLength * 2; + buffer.reset(new int16_t[kBufferLenghtLargerThanFile]); + EXPECT_EQ(kBufferLength, + ReadInt16BufferFromFile(&file, kBufferLenghtLargerThanFile, + buffer.get())); + EXPECT_EQ(11544, buffer[0]); + EXPECT_EQ(22377, buffer[4]); + EXPECT_EQ(16389, buffer[7]); + EXPECT_EQ(17631, buffer[kBufferLength - 1]); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_ReadInt16FromFileToFloatBuffer \ + DISABLED_ReadInt16FromFileToFloatBuffer +#else +#define MAYBE_ReadInt16FromFileToFloatBuffer ReadInt16FromFileToFloatBuffer +#endif +TEST_F(TransientFileUtilsTest, MAYBE_ReadInt16FromFileToFloatBuffer) { + std::string test_filename = kTestFileName; + + FileWrapper file = FileWrapper::OpenReadOnly(test_filename.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kTestFileName.c_str(); + + const size_t kBufferLength = 12; + std::unique_ptr buffer(new float[kBufferLength]); + + EXPECT_EQ(kBufferLength, + ReadInt16FromFileToFloatBuffer(&file, kBufferLength, buffer.get())); + + EXPECT_DOUBLE_EQ(11544, buffer[0]); + EXPECT_DOUBLE_EQ(22377, buffer[4]); + EXPECT_DOUBLE_EQ(16389, buffer[7]); + EXPECT_DOUBLE_EQ(17631, buffer[kBufferLength - 1]); + + file.Rewind(); + + // The next test is for checking the case where there are not as much data as + // needed in the file, but reads to the end, and it returns the number of + // int16s read. + const size_t kBufferLenghtLargerThanFile = kBufferLength * 2; + buffer.reset(new float[kBufferLenghtLargerThanFile]); + EXPECT_EQ(kBufferLength, + ReadInt16FromFileToFloatBuffer(&file, kBufferLenghtLargerThanFile, + buffer.get())); + EXPECT_DOUBLE_EQ(11544, buffer[0]); + EXPECT_DOUBLE_EQ(22377, buffer[4]); + EXPECT_DOUBLE_EQ(16389, buffer[7]); + EXPECT_DOUBLE_EQ(17631, buffer[kBufferLength - 1]); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_ReadInt16FromFileToDoubleBuffer \ + DISABLED_ReadInt16FromFileToDoubleBuffer +#else +#define MAYBE_ReadInt16FromFileToDoubleBuffer ReadInt16FromFileToDoubleBuffer +#endif +TEST_F(TransientFileUtilsTest, MAYBE_ReadInt16FromFileToDoubleBuffer) { + std::string test_filename = kTestFileName; + + FileWrapper file = FileWrapper::OpenReadOnly(test_filename.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kTestFileName.c_str(); + + const size_t kBufferLength = 12; + std::unique_ptr buffer(new double[kBufferLength]); + + EXPECT_EQ(kBufferLength, ReadInt16FromFileToDoubleBuffer(&file, kBufferLength, + buffer.get())); + EXPECT_DOUBLE_EQ(11544, buffer[0]); + EXPECT_DOUBLE_EQ(22377, buffer[4]); + EXPECT_DOUBLE_EQ(16389, buffer[7]); + EXPECT_DOUBLE_EQ(17631, buffer[kBufferLength - 1]); + + file.Rewind(); + + // The next test is for checking the case where there are not as much data as + // needed in the file, but reads to the end, and it returns the number of + // int16s read. + const size_t kBufferLenghtLargerThanFile = kBufferLength * 2; + buffer.reset(new double[kBufferLenghtLargerThanFile]); + EXPECT_EQ(kBufferLength, + ReadInt16FromFileToDoubleBuffer(&file, kBufferLenghtLargerThanFile, + buffer.get())); + EXPECT_DOUBLE_EQ(11544, buffer[0]); + EXPECT_DOUBLE_EQ(22377, buffer[4]); + EXPECT_DOUBLE_EQ(16389, buffer[7]); + EXPECT_DOUBLE_EQ(17631, buffer[kBufferLength - 1]); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_ReadFloatBufferFromFile DISABLED_ReadFloatBufferFromFile +#else +#define MAYBE_ReadFloatBufferFromFile ReadFloatBufferFromFile +#endif +TEST_F(TransientFileUtilsTest, MAYBE_ReadFloatBufferFromFile) { + std::string test_filename = kTestFileNamef; + + FileWrapper file = FileWrapper::OpenReadOnly(test_filename.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kTestFileNamef.c_str(); + + const size_t kBufferLength = 3; + std::unique_ptr buffer(new float[kBufferLength]); + + EXPECT_EQ(kBufferLength, + ReadFloatBufferFromFile(&file, kBufferLength, buffer.get())); + EXPECT_FLOAT_EQ(kPi, buffer[0]); + EXPECT_FLOAT_EQ(kE, buffer[1]); + EXPECT_FLOAT_EQ(kAvogadro, buffer[2]); + + file.Rewind(); + + // The next test is for checking the case where there are not as much data as + // needed in the file, but reads to the end, and it returns the number of + // doubles read. + const size_t kBufferLenghtLargerThanFile = kBufferLength * 2; + buffer.reset(new float[kBufferLenghtLargerThanFile]); + EXPECT_EQ(kBufferLength, + ReadFloatBufferFromFile(&file, kBufferLenghtLargerThanFile, + buffer.get())); + EXPECT_FLOAT_EQ(kPi, buffer[0]); + EXPECT_FLOAT_EQ(kE, buffer[1]); + EXPECT_FLOAT_EQ(kAvogadro, buffer[2]); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_ReadDoubleBufferFromFile DISABLED_ReadDoubleBufferFromFile +#else +#define MAYBE_ReadDoubleBufferFromFile ReadDoubleBufferFromFile +#endif +TEST_F(TransientFileUtilsTest, MAYBE_ReadDoubleBufferFromFile) { + std::string test_filename = kTestFileName; + + FileWrapper file = FileWrapper::OpenReadOnly(test_filename.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kTestFileName.c_str(); + + const size_t kBufferLength = 3; + std::unique_ptr buffer(new double[kBufferLength]); + + EXPECT_EQ(kBufferLength, + ReadDoubleBufferFromFile(&file, kBufferLength, buffer.get())); + EXPECT_DOUBLE_EQ(kPi, buffer[0]); + EXPECT_DOUBLE_EQ(kE, buffer[1]); + EXPECT_DOUBLE_EQ(kAvogadro, buffer[2]); + + file.Rewind(); + + // The next test is for checking the case where there are not as much data as + // needed in the file, but reads to the end, and it returns the number of + // doubles read. + const size_t kBufferLenghtLargerThanFile = kBufferLength * 2; + buffer.reset(new double[kBufferLenghtLargerThanFile]); + EXPECT_EQ(kBufferLength, + ReadDoubleBufferFromFile(&file, kBufferLenghtLargerThanFile, + buffer.get())); + EXPECT_DOUBLE_EQ(kPi, buffer[0]); + EXPECT_DOUBLE_EQ(kE, buffer[1]); + EXPECT_DOUBLE_EQ(kAvogadro, buffer[2]); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_WriteInt16BufferToFile DISABLED_WriteInt16BufferToFile +#else +#define MAYBE_WriteInt16BufferToFile WriteInt16BufferToFile +#endif +TEST_F(TransientFileUtilsTest, MAYBE_WriteInt16BufferToFile) { + std::string kOutFileName = + CreateTempFilename(test::OutputPath(), "utils_test"); + + FileWrapper file = FileWrapper::OpenWriteOnly(kOutFileName.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kOutFileName.c_str(); + + const size_t kBufferLength = 3; + std::unique_ptr written_buffer(new int16_t[kBufferLength]); + std::unique_ptr read_buffer(new int16_t[kBufferLength]); + + written_buffer[0] = 1; + written_buffer[1] = 2; + written_buffer[2] = 3; + + EXPECT_EQ(kBufferLength, + WriteInt16BufferToFile(&file, kBufferLength, written_buffer.get())); + + file.Close(); + + file = FileWrapper::OpenReadOnly(kOutFileName.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kOutFileName.c_str(); + + EXPECT_EQ(kBufferLength, + ReadInt16BufferFromFile(&file, kBufferLength, read_buffer.get())); + EXPECT_EQ(0, memcmp(written_buffer.get(), read_buffer.get(), + kBufferLength * sizeof(written_buffer[0]))); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_WriteFloatBufferToFile DISABLED_WriteFloatBufferToFile +#else +#define MAYBE_WriteFloatBufferToFile WriteFloatBufferToFile +#endif +TEST_F(TransientFileUtilsTest, MAYBE_WriteFloatBufferToFile) { + std::string kOutFileName = + CreateTempFilename(test::OutputPath(), "utils_test"); + + FileWrapper file = FileWrapper::OpenWriteOnly(kOutFileName.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kOutFileName.c_str(); + + const size_t kBufferLength = 3; + std::unique_ptr written_buffer(new float[kBufferLength]); + std::unique_ptr read_buffer(new float[kBufferLength]); + + written_buffer[0] = static_cast(kPi); + written_buffer[1] = static_cast(kE); + written_buffer[2] = static_cast(kAvogadro); + + EXPECT_EQ(kBufferLength, + WriteFloatBufferToFile(&file, kBufferLength, written_buffer.get())); + + file.Close(); + + file = FileWrapper::OpenReadOnly(kOutFileName.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kOutFileName.c_str(); + + EXPECT_EQ(kBufferLength, + ReadFloatBufferFromFile(&file, kBufferLength, read_buffer.get())); + EXPECT_EQ(0, memcmp(written_buffer.get(), read_buffer.get(), + kBufferLength * sizeof(written_buffer[0]))); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_WriteDoubleBufferToFile DISABLED_WriteDoubleBufferToFile +#else +#define MAYBE_WriteDoubleBufferToFile WriteDoubleBufferToFile +#endif +TEST_F(TransientFileUtilsTest, MAYBE_WriteDoubleBufferToFile) { + std::string kOutFileName = + CreateTempFilename(test::OutputPath(), "utils_test"); + + FileWrapper file = FileWrapper::OpenWriteOnly(kOutFileName.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kOutFileName.c_str(); + + const size_t kBufferLength = 3; + std::unique_ptr written_buffer(new double[kBufferLength]); + std::unique_ptr read_buffer(new double[kBufferLength]); + + written_buffer[0] = kPi; + written_buffer[1] = kE; + written_buffer[2] = kAvogadro; + + EXPECT_EQ(kBufferLength, WriteDoubleBufferToFile(&file, kBufferLength, + written_buffer.get())); + + file.Close(); + + file = FileWrapper::OpenReadOnly(kOutFileName.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kOutFileName.c_str(); + + EXPECT_EQ(kBufferLength, + ReadDoubleBufferFromFile(&file, kBufferLength, read_buffer.get())); + EXPECT_EQ(0, memcmp(written_buffer.get(), read_buffer.get(), + kBufferLength * sizeof(written_buffer[0]))); +} + +#if defined(WEBRTC_IOS) +#define MAYBE_ExpectedErrorReturnValues DISABLED_ExpectedErrorReturnValues +#else +#define MAYBE_ExpectedErrorReturnValues ExpectedErrorReturnValues +#endif +TEST_F(TransientFileUtilsTest, MAYBE_ExpectedErrorReturnValues) { + std::string test_filename = kTestFileName; + + double value; + std::unique_ptr int16_buffer(new int16_t[1]); + std::unique_ptr double_buffer(new double[1]); + FileWrapper file; + + EXPECT_EQ(-1, ConvertByteArrayToDouble(NULL, &value)); + EXPECT_EQ(-1, ConvertByteArrayToDouble(kPiBytes, NULL)); + + EXPECT_EQ(-1, ConvertDoubleToByteArray(kPi, NULL)); + + // Tests with file not opened. + EXPECT_EQ(0u, ReadInt16BufferFromFile(&file, 1, int16_buffer.get())); + EXPECT_EQ(0u, ReadInt16FromFileToDoubleBuffer(&file, 1, double_buffer.get())); + EXPECT_EQ(0u, ReadDoubleBufferFromFile(&file, 1, double_buffer.get())); + EXPECT_EQ(0u, WriteInt16BufferToFile(&file, 1, int16_buffer.get())); + EXPECT_EQ(0u, WriteDoubleBufferToFile(&file, 1, double_buffer.get())); + + file = FileWrapper::OpenReadOnly(test_filename.c_str()); + ASSERT_TRUE(file.is_open()) << "File could not be opened:\n" + << kTestFileName.c_str(); + + EXPECT_EQ(0u, ReadInt16BufferFromFile(NULL, 1, int16_buffer.get())); + EXPECT_EQ(0u, ReadInt16BufferFromFile(&file, 1, NULL)); + EXPECT_EQ(0u, ReadInt16BufferFromFile(&file, 0, int16_buffer.get())); + + EXPECT_EQ(0u, ReadInt16FromFileToDoubleBuffer(NULL, 1, double_buffer.get())); + EXPECT_EQ(0u, ReadInt16FromFileToDoubleBuffer(&file, 1, NULL)); + EXPECT_EQ(0u, ReadInt16FromFileToDoubleBuffer(&file, 0, double_buffer.get())); + + EXPECT_EQ(0u, ReadDoubleBufferFromFile(NULL, 1, double_buffer.get())); + EXPECT_EQ(0u, ReadDoubleBufferFromFile(&file, 1, NULL)); + EXPECT_EQ(0u, ReadDoubleBufferFromFile(&file, 0, double_buffer.get())); + + EXPECT_EQ(0u, WriteInt16BufferToFile(NULL, 1, int16_buffer.get())); + EXPECT_EQ(0u, WriteInt16BufferToFile(&file, 1, NULL)); + EXPECT_EQ(0u, WriteInt16BufferToFile(&file, 0, int16_buffer.get())); + + EXPECT_EQ(0u, WriteDoubleBufferToFile(NULL, 1, double_buffer.get())); + EXPECT_EQ(0u, WriteDoubleBufferToFile(&file, 1, NULL)); + EXPECT_EQ(0u, WriteDoubleBufferToFile(&file, 0, double_buffer.get())); +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/moving_moments.cc b/modules/audio_processing/transient/moving_moments.cc new file mode 100644 index 0000000..83810bf --- /dev/null +++ b/modules/audio_processing/transient/moving_moments.cc @@ -0,0 +1,50 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/moving_moments.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +MovingMoments::MovingMoments(size_t length) + : length_(length), queue_(), sum_(0.0), sum_of_squares_(0.0) { + RTC_DCHECK_GT(length, 0); + for (size_t i = 0; i < length; ++i) { + queue_.push(0.0); + } +} + +MovingMoments::~MovingMoments() {} + +void MovingMoments::CalculateMoments(const float* in, + size_t in_length, + float* first, + float* second) { + RTC_DCHECK(in); + RTC_DCHECK_GT(in_length, 0); + RTC_DCHECK(first); + RTC_DCHECK(second); + + for (size_t i = 0; i < in_length; ++i) { + const float old_value = queue_.front(); + queue_.pop(); + queue_.push(in[i]); + + sum_ += in[i] - old_value; + sum_of_squares_ += in[i] * in[i] - old_value * old_value; + first[i] = sum_ / length_; + second[i] = std::max(0.f, sum_of_squares_ / length_); + } +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/moving_moments.h b/modules/audio_processing/transient/moving_moments.h new file mode 100644 index 0000000..6dc0520 --- /dev/null +++ b/modules/audio_processing/transient/moving_moments.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_MOVING_MOMENTS_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_MOVING_MOMENTS_H_ + +#include + +#include + +namespace webrtc { + +// Calculates the first and second moments for each value of a buffer taking +// into account a given number of previous values. +// It preserves its state, so it can be multiple-called. +// TODO(chadan): Implement a function that takes a buffer of first moments and a +// buffer of second moments; and calculates the variances. When needed. +// TODO(chadan): Add functionality to update with a buffer but only output are +// the last values of the moments. When needed. +class MovingMoments { + public: + // Creates a Moving Moments object, that uses the last |length| values + // (including the new value introduced in every new calculation). + explicit MovingMoments(size_t length); + ~MovingMoments(); + + // Calculates the new values using |in|. Results will be in the out buffers. + // |first| and |second| must be allocated with at least |in_length|. + void CalculateMoments(const float* in, + size_t in_length, + float* first, + float* second); + + private: + size_t length_; + // A queue holding the |length_| latest input values. + std::queue queue_; + // Sum of the values of the queue. + float sum_; + // Sum of the squares of the values of the queue. + float sum_of_squares_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_MOVING_MOMENTS_H_ diff --git a/modules/audio_processing/transient/moving_moments_unittest.cc b/modules/audio_processing/transient/moving_moments_unittest.cc new file mode 100644 index 0000000..b0e613e --- /dev/null +++ b/modules/audio_processing/transient/moving_moments_unittest.cc @@ -0,0 +1,207 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/moving_moments.h" + +#include + +#include "test/gtest.h" + +namespace webrtc { + +static const float kTolerance = 0.0001f; + +class MovingMomentsTest : public ::testing::Test { + protected: + static const size_t kMovingMomentsBufferLength = 5; + static const size_t kMaxOutputLength = 20; // Valid for this tests only. + + virtual void SetUp(); + // Calls CalculateMoments and verifies that it produces the expected + // outputs. + void CalculateMomentsAndVerify(const float* input, + size_t input_length, + const float* expected_mean, + const float* expected_mean_squares); + + std::unique_ptr moving_moments_; + float output_mean_[kMaxOutputLength]; + float output_mean_squares_[kMaxOutputLength]; +}; + +const size_t MovingMomentsTest::kMaxOutputLength; + +void MovingMomentsTest::SetUp() { + moving_moments_.reset(new MovingMoments(kMovingMomentsBufferLength)); +} + +void MovingMomentsTest::CalculateMomentsAndVerify( + const float* input, + size_t input_length, + const float* expected_mean, + const float* expected_mean_squares) { + ASSERT_LE(input_length, kMaxOutputLength); + + moving_moments_->CalculateMoments(input, input_length, output_mean_, + output_mean_squares_); + + for (size_t i = 1; i < input_length; ++i) { + EXPECT_NEAR(expected_mean[i], output_mean_[i], kTolerance); + EXPECT_NEAR(expected_mean_squares[i], output_mean_squares_[i], kTolerance); + } +} + +TEST_F(MovingMomentsTest, CorrectMomentsOfAnAllZerosBuffer) { + const float kInput[] = {0.f, 0.f, 0.f, 0.f, 0.f}; + const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); + + const float expected_mean[kInputLength] = {0.f, 0.f, 0.f, 0.f, 0.f}; + const float expected_mean_squares[kInputLength] = {0.f, 0.f, 0.f, 0.f, 0.f}; + + CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, + expected_mean_squares); +} + +TEST_F(MovingMomentsTest, CorrectMomentsOfAConstantBuffer) { + const float kInput[] = {5.f, 5.f, 5.f, 5.f, 5.f, 5.f, 5.f, 5.f, 5.f, 5.f}; + const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); + + const float expected_mean[kInputLength] = {1.f, 2.f, 3.f, 4.f, 5.f, + 5.f, 5.f, 5.f, 5.f, 5.f}; + const float expected_mean_squares[kInputLength] = { + 5.f, 10.f, 15.f, 20.f, 25.f, 25.f, 25.f, 25.f, 25.f, 25.f}; + + CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, + expected_mean_squares); +} + +TEST_F(MovingMomentsTest, CorrectMomentsOfAnIncreasingBuffer) { + const float kInput[] = {1.f, 2.f, 3.f, 4.f, 5.f, 6.f, 7.f, 8.f, 9.f}; + const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); + + const float expected_mean[kInputLength] = {0.2f, 0.6f, 1.2f, 2.f, 3.f, + 4.f, 5.f, 6.f, 7.f}; + const float expected_mean_squares[kInputLength] = { + 0.2f, 1.f, 2.8f, 6.f, 11.f, 18.f, 27.f, 38.f, 51.f}; + + CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, + expected_mean_squares); +} + +TEST_F(MovingMomentsTest, CorrectMomentsOfADecreasingBuffer) { + const float kInput[] = {-1.f, -2.f, -3.f, -4.f, -5.f, -6.f, -7.f, -8.f, -9.f}; + const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); + + const float expected_mean[kInputLength] = {-0.2f, -0.6f, -1.2f, -2.f, -3.f, + -4.f, -5.f, -6.f, -7.f}; + const float expected_mean_squares[kInputLength] = { + 0.2f, 1.f, 2.8f, 6.f, 11.f, 18.f, 27.f, 38.f, 51.f}; + + CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, + expected_mean_squares); +} + +TEST_F(MovingMomentsTest, CorrectMomentsOfAZeroMeanSequence) { + const size_t kMovingMomentsBufferLength = 4; + moving_moments_.reset(new MovingMoments(kMovingMomentsBufferLength)); + const float kInput[] = {1.f, -1.f, 1.f, -1.f, 1.f, + -1.f, 1.f, -1.f, 1.f, -1.f}; + const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); + + const float expected_mean[kInputLength] = {0.25f, 0.f, 0.25f, 0.f, 0.f, + 0.f, 0.f, 0.f, 0.f, 0.f}; + const float expected_mean_squares[kInputLength] = { + 0.25f, 0.5f, 0.75f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f, 1.f}; + + CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, + expected_mean_squares); +} + +TEST_F(MovingMomentsTest, CorrectMomentsOfAnArbitraryBuffer) { + const float kInput[] = {0.2f, 0.3f, 0.5f, 0.7f, 0.11f, + 0.13f, 0.17f, 0.19f, 0.23f}; + const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); + + const float expected_mean[kInputLength] = { + 0.04f, 0.1f, 0.2f, 0.34f, 0.362f, 0.348f, 0.322f, 0.26f, 0.166f}; + const float expected_mean_squares[kInputLength] = {0.008f, 0.026f, 0.076f, + 0.174f, 0.1764f, 0.1718f, + 0.1596f, 0.1168f, 0.0294f}; + + CalculateMomentsAndVerify(kInput, kInputLength, expected_mean, + expected_mean_squares); +} + +TEST_F(MovingMomentsTest, MutipleCalculateMomentsCalls) { + const float kInputFirstCall[] = {0.2f, 0.3f, 0.5f, 0.7f, 0.11f, + 0.13f, 0.17f, 0.19f, 0.23f}; + const size_t kInputFirstCallLength = + sizeof(kInputFirstCall) / sizeof(kInputFirstCall[0]); + const float kInputSecondCall[] = {0.29f, 0.31f}; + const size_t kInputSecondCallLength = + sizeof(kInputSecondCall) / sizeof(kInputSecondCall[0]); + const float kInputThirdCall[] = {0.37f, 0.41f, 0.43f, 0.47f}; + const size_t kInputThirdCallLength = + sizeof(kInputThirdCall) / sizeof(kInputThirdCall[0]); + + const float expected_mean_first_call[kInputFirstCallLength] = { + 0.04f, 0.1f, 0.2f, 0.34f, 0.362f, 0.348f, 0.322f, 0.26f, 0.166f}; + const float expected_mean_squares_first_call[kInputFirstCallLength] = { + 0.008f, 0.026f, 0.076f, 0.174f, 0.1764f, + 0.1718f, 0.1596f, 0.1168f, 0.0294f}; + + const float expected_mean_second_call[kInputSecondCallLength] = {0.202f, + 0.238f}; + const float expected_mean_squares_second_call[kInputSecondCallLength] = { + 0.0438f, 0.0596f}; + + const float expected_mean_third_call[kInputThirdCallLength] = { + 0.278f, 0.322f, 0.362f, 0.398f}; + const float expected_mean_squares_third_call[kInputThirdCallLength] = { + 0.0812f, 0.1076f, 0.134f, 0.1614f}; + + CalculateMomentsAndVerify(kInputFirstCall, kInputFirstCallLength, + expected_mean_first_call, + expected_mean_squares_first_call); + + CalculateMomentsAndVerify(kInputSecondCall, kInputSecondCallLength, + expected_mean_second_call, + expected_mean_squares_second_call); + + CalculateMomentsAndVerify(kInputThirdCall, kInputThirdCallLength, + expected_mean_third_call, + expected_mean_squares_third_call); +} + +TEST_F(MovingMomentsTest, VerifySampleBasedVsBlockBasedCalculation) { + const float kInput[] = {0.2f, 0.3f, 0.5f, 0.7f, 0.11f, + 0.13f, 0.17f, 0.19f, 0.23f}; + const size_t kInputLength = sizeof(kInput) / sizeof(kInput[0]); + + float output_mean_block_based[kInputLength]; + float output_mean_squares_block_based[kInputLength]; + + float output_mean_sample_based; + float output_mean_squares_sample_based; + + moving_moments_->CalculateMoments(kInput, kInputLength, + output_mean_block_based, + output_mean_squares_block_based); + moving_moments_.reset(new MovingMoments(kMovingMomentsBufferLength)); + for (size_t i = 0; i < kInputLength; ++i) { + moving_moments_->CalculateMoments(&kInput[i], 1, &output_mean_sample_based, + &output_mean_squares_sample_based); + EXPECT_FLOAT_EQ(output_mean_block_based[i], output_mean_sample_based); + EXPECT_FLOAT_EQ(output_mean_squares_block_based[i], + output_mean_squares_sample_based); + } +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/test/plotDetection.m b/modules/audio_processing/transient/test/plotDetection.m new file mode 100644 index 0000000..8e12ab9 --- /dev/null +++ b/modules/audio_processing/transient/test/plotDetection.m @@ -0,0 +1,22 @@ +% +% Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +% +% Use of this source code is governed by a BSD-style license +% that can be found in the LICENSE file in the root of the source +% tree. An additional intellectual property rights grant can be found +% in the file PATENTS. All contributing project authors may +% be found in the AUTHORS file in the root of the source tree. +% + +function [] = plotDetection(PCMfile, DATfile, fs, chunkSize) +%[] = plotDetection(PCMfile, DATfile, fs, chunkSize) +% +%Plots the signal alongside the detection values. +% +%PCMfile: The file of the input signal in PCM format. +%DATfile: The file containing the detection values in binary float format. +%fs: The sample rate of the signal in Hertz. +%chunkSize: The chunk size used to compute the detection values in seconds. +[x, tx] = readPCM(PCMfile, fs); +[d, td] = readDetection(DATfile, fs, chunkSize); +plot(tx, x, td, d); diff --git a/modules/audio_processing/transient/test/readDetection.m b/modules/audio_processing/transient/test/readDetection.m new file mode 100644 index 0000000..832bf31 --- /dev/null +++ b/modules/audio_processing/transient/test/readDetection.m @@ -0,0 +1,26 @@ +% +% Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +% +% Use of this source code is governed by a BSD-style license +% that can be found in the LICENSE file in the root of the source +% tree. An additional intellectual property rights grant can be found +% in the file PATENTS. All contributing project authors may +% be found in the AUTHORS file in the root of the source tree. +% + +function [d, t] = readDetection(file, fs, chunkSize) +%[d, t] = readDetection(file, fs, chunkSize) +% +%Reads a detection signal from a DAT file. +% +%d: The detection signal. +%t: The respective time vector. +% +%file: The DAT file where the detection signal is stored in float format. +%fs: The signal sample rate in Hertz. +%chunkSize: The chunk size used for the detection in seconds. +fid = fopen(file); +d = fread(fid, inf, 'float'); +fclose(fid); +t = 0:(1 / fs):(length(d) * chunkSize - 1 / fs); +d = d(floor(t / chunkSize) + 1); diff --git a/modules/audio_processing/transient/test/readPCM.m b/modules/audio_processing/transient/test/readPCM.m new file mode 100644 index 0000000..cd3cef8 --- /dev/null +++ b/modules/audio_processing/transient/test/readPCM.m @@ -0,0 +1,26 @@ +% +% Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +% +% Use of this source code is governed by a BSD-style license +% that can be found in the LICENSE file in the root of the source +% tree. An additional intellectual property rights grant can be found +% in the file PATENTS. All contributing project authors may +% be found in the AUTHORS file in the root of the source tree. +% + +function [x, t] = readPCM(file, fs) +%[x, t] = readPCM(file, fs) +% +%Reads a signal from a PCM file. +% +%x: The read signal after normalization. +%t: The respective time vector. +% +%file: The PCM file where the signal is stored in int16 format. +%fs: The signal sample rate in Hertz. +fid = fopen(file); +x = fread(fid, inf, 'int16'); +fclose(fid); +x = x - mean(x); +x = x / max(abs(x)); +t = 0:(1 / fs):((length(x) - 1) / fs); diff --git a/modules/audio_processing/transient/transient_detector.cc b/modules/audio_processing/transient/transient_detector.cc new file mode 100644 index 0000000..f03a2ea --- /dev/null +++ b/modules/audio_processing/transient/transient_detector.cc @@ -0,0 +1,176 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/transient_detector.h" + +#include +#include + +#include +#include + +#include "modules/audio_processing/transient/common.h" +#include "modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h" +#include "modules/audio_processing/transient/moving_moments.h" +#include "modules/audio_processing/transient/wpd_node.h" +#include "modules/audio_processing/transient/wpd_tree.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +static const int kTransientLengthMs = 30; +static const int kChunksAtStartupLeftToDelete = + kTransientLengthMs / ts::kChunkSizeMs; +static const float kDetectThreshold = 16.f; + +TransientDetector::TransientDetector(int sample_rate_hz) + : samples_per_chunk_(sample_rate_hz * ts::kChunkSizeMs / 1000), + last_first_moment_(), + last_second_moment_(), + chunks_at_startup_left_to_delete_(kChunksAtStartupLeftToDelete), + reference_energy_(1.f), + using_reference_(false) { + RTC_DCHECK(sample_rate_hz == ts::kSampleRate8kHz || + sample_rate_hz == ts::kSampleRate16kHz || + sample_rate_hz == ts::kSampleRate32kHz || + sample_rate_hz == ts::kSampleRate48kHz); + int samples_per_transient = sample_rate_hz * kTransientLengthMs / 1000; + // Adjustment to avoid data loss while downsampling, making + // |samples_per_chunk_| and |samples_per_transient| always divisible by + // |kLeaves|. + samples_per_chunk_ -= samples_per_chunk_ % kLeaves; + samples_per_transient -= samples_per_transient % kLeaves; + + tree_leaves_data_length_ = samples_per_chunk_ / kLeaves; + wpd_tree_.reset(new WPDTree(samples_per_chunk_, + kDaubechies8HighPassCoefficients, + kDaubechies8LowPassCoefficients, + kDaubechies8CoefficientsLength, kLevels)); + for (size_t i = 0; i < kLeaves; ++i) { + moving_moments_[i].reset( + new MovingMoments(samples_per_transient / kLeaves)); + } + + first_moments_.reset(new float[tree_leaves_data_length_]); + second_moments_.reset(new float[tree_leaves_data_length_]); + + for (int i = 0; i < kChunksAtStartupLeftToDelete; ++i) { + previous_results_.push_back(0.f); + } +} + +TransientDetector::~TransientDetector() {} + +float TransientDetector::Detect(const float* data, + size_t data_length, + const float* reference_data, + size_t reference_length) { + RTC_DCHECK(data); + RTC_DCHECK_EQ(samples_per_chunk_, data_length); + + // TODO(aluebs): Check if these errors can logically happen and if not assert + // on them. + if (wpd_tree_->Update(data, samples_per_chunk_) != 0) { + return -1.f; + } + + float result = 0.f; + + for (size_t i = 0; i < kLeaves; ++i) { + WPDNode* leaf = wpd_tree_->NodeAt(kLevels, i); + + moving_moments_[i]->CalculateMoments(leaf->data(), tree_leaves_data_length_, + first_moments_.get(), + second_moments_.get()); + + // Add value delayed (Use the last moments from the last call to Detect). + float unbiased_data = leaf->data()[0] - last_first_moment_[i]; + result += + unbiased_data * unbiased_data / (last_second_moment_[i] + FLT_MIN); + + // Add new values. + for (size_t j = 1; j < tree_leaves_data_length_; ++j) { + unbiased_data = leaf->data()[j] - first_moments_[j - 1]; + result += + unbiased_data * unbiased_data / (second_moments_[j - 1] + FLT_MIN); + } + + last_first_moment_[i] = first_moments_[tree_leaves_data_length_ - 1]; + last_second_moment_[i] = second_moments_[tree_leaves_data_length_ - 1]; + } + + result /= tree_leaves_data_length_; + + result *= ReferenceDetectionValue(reference_data, reference_length); + + if (chunks_at_startup_left_to_delete_ > 0) { + chunks_at_startup_left_to_delete_--; + result = 0.f; + } + + if (result >= kDetectThreshold) { + result = 1.f; + } else { + // Get proportional value. + // Proportion achieved with a squared raised cosine function with domain + // [0, kDetectThreshold) and image [0, 1), it's always increasing. + const float horizontal_scaling = ts::kPi / kDetectThreshold; + const float kHorizontalShift = ts::kPi; + const float kVerticalScaling = 0.5f; + const float kVerticalShift = 1.f; + + result = (std::cos(result * horizontal_scaling + kHorizontalShift) + + kVerticalShift) * + kVerticalScaling; + result *= result; + } + + previous_results_.pop_front(); + previous_results_.push_back(result); + + // In the current implementation we return the max of the current result and + // the previous results, so the high results have a width equals to + // |transient_length|. + return *std::max_element(previous_results_.begin(), previous_results_.end()); +} + +// Looks for the highest slope and compares it with the previous ones. +// An exponential transformation takes this to the [0, 1] range. This value is +// multiplied by the detection result to avoid false positives. +float TransientDetector::ReferenceDetectionValue(const float* data, + size_t length) { + if (data == NULL) { + using_reference_ = false; + return 1.f; + } + static const float kEnergyRatioThreshold = 0.2f; + static const float kReferenceNonLinearity = 20.f; + static const float kMemory = 0.99f; + float reference_energy = 0.f; + for (size_t i = 1; i < length; ++i) { + reference_energy += data[i] * data[i]; + } + if (reference_energy == 0.f) { + using_reference_ = false; + return 1.f; + } + RTC_DCHECK_NE(0, reference_energy_); + float result = 1.f / (1.f + std::exp(kReferenceNonLinearity * + (kEnergyRatioThreshold - + reference_energy / reference_energy_))); + reference_energy_ = + kMemory * reference_energy_ + (1.f - kMemory) * reference_energy; + + using_reference_ = true; + + return result; +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/transient_detector.h b/modules/audio_processing/transient/transient_detector.h new file mode 100644 index 0000000..5ede2e8 --- /dev/null +++ b/modules/audio_processing/transient/transient_detector.h @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_DETECTOR_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_DETECTOR_H_ + +#include + +#include +#include + +#include "modules/audio_processing/transient/moving_moments.h" +#include "modules/audio_processing/transient/wpd_tree.h" + +namespace webrtc { + +// This is an implementation of the transient detector described in "Causal +// Wavelet based transient detector". +// Calculates the log-likelihood of a transient to happen on a signal at any +// given time based on the previous samples; it uses a WPD tree to analyze the +// signal. It preserves its state, so it can be multiple-called. +class TransientDetector { + public: + // TODO(chadan): The only supported wavelet is Daubechies 8 using a WPD tree + // of 3 levels. Make an overloaded constructor to allow different wavelets and + // depths of the tree. When needed. + + // Creates a wavelet based transient detector. + TransientDetector(int sample_rate_hz); + + ~TransientDetector(); + + // Calculates the log-likelihood of the existence of a transient in |data|. + // |data_length| has to be equal to |samples_per_chunk_|. + // Returns a value between 0 and 1, as a non linear representation of this + // likelihood. + // Returns a negative value on error. + float Detect(const float* data, + size_t data_length, + const float* reference_data, + size_t reference_length); + + bool using_reference() { return using_reference_; } + + private: + float ReferenceDetectionValue(const float* data, size_t length); + + static const size_t kLevels = 3; + static const size_t kLeaves = 1 << kLevels; + + size_t samples_per_chunk_; + + std::unique_ptr wpd_tree_; + size_t tree_leaves_data_length_; + + // A MovingMoments object is needed for each leaf in the WPD tree. + std::unique_ptr moving_moments_[kLeaves]; + + std::unique_ptr first_moments_; + std::unique_ptr second_moments_; + + // Stores the last calculated moments from the previous detection. + float last_first_moment_[kLeaves]; + float last_second_moment_[kLeaves]; + + // We keep track of the previous results from the previous chunks, so it can + // be used to effectively give results according to the |transient_length|. + std::deque previous_results_; + + // Number of chunks that are going to return only zeros at the beginning of + // the detection. It helps to avoid infs and nans due to the lack of + // information. + int chunks_at_startup_left_to_delete_; + + float reference_energy_; + + bool using_reference_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_DETECTOR_H_ diff --git a/modules/audio_processing/transient/transient_detector_unittest.cc b/modules/audio_processing/transient/transient_detector_unittest.cc new file mode 100644 index 0000000..0425133 --- /dev/null +++ b/modules/audio_processing/transient/transient_detector_unittest.cc @@ -0,0 +1,95 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/transient_detector.h" + +#include +#include + +#include "modules/audio_processing/transient/common.h" +#include "modules/audio_processing/transient/file_utils.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/system/file_wrapper.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { + +static const int kSampleRatesHz[] = {ts::kSampleRate8kHz, ts::kSampleRate16kHz, + ts::kSampleRate32kHz, + ts::kSampleRate48kHz}; +static const size_t kNumberOfSampleRates = + sizeof(kSampleRatesHz) / sizeof(*kSampleRatesHz); + +// This test is for the correctness of the transient detector. +// Checks the results comparing them with the ones stored in the detect files in +// the directory: resources/audio_processing/transient/ +// The files contain all the results in double precision (Little endian). +// The audio files used with different sample rates are stored in the same +// directory. +#if defined(WEBRTC_IOS) +TEST(TransientDetectorTest, DISABLED_CorrectnessBasedOnFiles) { +#else +TEST(TransientDetectorTest, CorrectnessBasedOnFiles) { +#endif + for (size_t i = 0; i < kNumberOfSampleRates; ++i) { + int sample_rate_hz = kSampleRatesHz[i]; + + // Prepare detect file. + rtc::StringBuilder detect_file_name; + detect_file_name << "audio_processing/transient/detect" + << (sample_rate_hz / 1000) << "kHz"; + + FileWrapper detect_file = FileWrapper::OpenReadOnly( + test::ResourcePath(detect_file_name.str(), "dat").c_str()); + + bool file_opened = detect_file.is_open(); + ASSERT_TRUE(file_opened) << "File could not be opened.\n" + << detect_file_name.str().c_str(); + + // Prepare audio file. + rtc::StringBuilder audio_file_name; + audio_file_name << "audio_processing/transient/audio" + << (sample_rate_hz / 1000) << "kHz"; + + FileWrapper audio_file = FileWrapper::OpenReadOnly( + test::ResourcePath(audio_file_name.str(), "pcm").c_str()); + + // Create detector. + TransientDetector detector(sample_rate_hz); + + const size_t buffer_length = sample_rate_hz * ts::kChunkSizeMs / 1000; + std::unique_ptr buffer(new float[buffer_length]); + + const float kTolerance = 0.02f; + + size_t frames_read = 0; + + while (ReadInt16FromFileToFloatBuffer(&audio_file, buffer_length, + buffer.get()) == buffer_length) { + ++frames_read; + + float detector_value = + detector.Detect(buffer.get(), buffer_length, NULL, 0); + double file_value; + ASSERT_EQ(1u, ReadDoubleBufferFromFile(&detect_file, 1, &file_value)) + << "Detect test file is malformed.\n"; + + // Compare results with data from the matlab test file. + EXPECT_NEAR(file_value, detector_value, kTolerance) + << "Frame: " << frames_read; + } + + detect_file.Close(); + audio_file.Close(); + } +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/transient_suppression_test.cc b/modules/audio_processing/transient/transient_suppression_test.cc new file mode 100644 index 0000000..d06fd96 --- /dev/null +++ b/modules/audio_processing/transient/transient_suppression_test.cc @@ -0,0 +1,238 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include +#include +#include + +#include +#include +#include + +#include "absl/flags/flag.h" +#include "absl/flags/parse.h" +#include "common_audio/include/audio_util.h" +#include "modules/audio_processing/agc/agc.h" +#include "modules/audio_processing/transient/transient_suppressor_impl.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +ABSL_FLAG(std::string, in_file_name, "", "PCM file that contains the signal."); +ABSL_FLAG(std::string, + detection_file_name, + "", + "PCM file that contains the detection signal."); +ABSL_FLAG(std::string, + reference_file_name, + "", + "PCM file that contains the reference signal."); + +ABSL_FLAG(int, + chunk_size_ms, + 10, + "Time between each chunk of samples in milliseconds."); + +ABSL_FLAG(int, + sample_rate_hz, + 16000, + "Sampling frequency of the signal in Hertz."); +ABSL_FLAG(int, + detection_rate_hz, + 0, + "Sampling frequency of the detection signal in Hertz."); + +ABSL_FLAG(int, num_channels, 1, "Number of channels."); + +namespace webrtc { + +const char kUsage[] = + "\nDetects and suppresses transients from file.\n\n" + "This application loads the signal from the in_file_name with a specific\n" + "num_channels and sample_rate_hz, the detection signal from the\n" + "detection_file_name with a specific detection_rate_hz, and the reference\n" + "signal from the reference_file_name with sample_rate_hz, divides them\n" + "into chunk_size_ms blocks, computes its voice value and depending on the\n" + "voice_threshold does the respective restoration. You can always get the\n" + "all-voiced or all-unvoiced cases by setting the voice_threshold to 0 or\n" + "1 respectively.\n\n"; + +// Read next buffers from the test files (signed 16-bit host-endian PCM +// format). audio_buffer has int16 samples, detection_buffer has float samples +// with range [-32768,32767], and reference_buffer has float samples with range +// [-1,1]. Return true iff all the buffers were filled completely. +bool ReadBuffers(FILE* in_file, + size_t audio_buffer_size, + int num_channels, + int16_t* audio_buffer, + FILE* detection_file, + size_t detection_buffer_size, + float* detection_buffer, + FILE* reference_file, + float* reference_buffer) { + std::unique_ptr tmpbuf; + int16_t* read_ptr = audio_buffer; + if (num_channels > 1) { + tmpbuf.reset(new int16_t[num_channels * audio_buffer_size]); + read_ptr = tmpbuf.get(); + } + if (fread(read_ptr, sizeof(*read_ptr), num_channels * audio_buffer_size, + in_file) != num_channels * audio_buffer_size) { + return false; + } + // De-interleave. + if (num_channels > 1) { + for (int i = 0; i < num_channels; ++i) { + for (size_t j = 0; j < audio_buffer_size; ++j) { + audio_buffer[i * audio_buffer_size + j] = + read_ptr[i + j * num_channels]; + } + } + } + if (detection_file) { + std::unique_ptr ibuf(new int16_t[detection_buffer_size]); + if (fread(ibuf.get(), sizeof(ibuf[0]), detection_buffer_size, + detection_file) != detection_buffer_size) + return false; + for (size_t i = 0; i < detection_buffer_size; ++i) + detection_buffer[i] = ibuf[i]; + } + if (reference_file) { + std::unique_ptr ibuf(new int16_t[audio_buffer_size]); + if (fread(ibuf.get(), sizeof(ibuf[0]), audio_buffer_size, reference_file) != + audio_buffer_size) + return false; + S16ToFloat(ibuf.get(), audio_buffer_size, reference_buffer); + } + return true; +} + +// Write a number of samples to an open signed 16-bit host-endian PCM file. +static void WritePCM(FILE* f, + size_t num_samples, + int num_channels, + const float* buffer) { + std::unique_ptr ibuf(new int16_t[num_channels * num_samples]); + // Interleave. + for (int i = 0; i < num_channels; ++i) { + for (size_t j = 0; j < num_samples; ++j) { + ibuf[i + j * num_channels] = FloatS16ToS16(buffer[i * num_samples + j]); + } + } + fwrite(ibuf.get(), sizeof(ibuf[0]), num_channels * num_samples, f); +} + +// This application tests the transient suppression by providing a processed +// PCM file, which has to be listened to in order to evaluate the +// performance. +// It gets an audio file, and its voice gain information, and the suppressor +// process it giving the output file "suppressed_keystrokes.pcm". +void void_main() { + // TODO(aluebs): Remove all FileWrappers. + // Prepare the input file. + FILE* in_file = fopen(absl::GetFlag(FLAGS_in_file_name).c_str(), "rb"); + ASSERT_TRUE(in_file != NULL); + + // Prepare the detection file. + FILE* detection_file = NULL; + if (!absl::GetFlag(FLAGS_detection_file_name).empty()) { + detection_file = + fopen(absl::GetFlag(FLAGS_detection_file_name).c_str(), "rb"); + } + + // Prepare the reference file. + FILE* reference_file = NULL; + if (!absl::GetFlag(FLAGS_reference_file_name).empty()) { + reference_file = + fopen(absl::GetFlag(FLAGS_reference_file_name).c_str(), "rb"); + } + + // Prepare the output file. + std::string out_file_name = test::OutputPath() + "suppressed_keystrokes.pcm"; + FILE* out_file = fopen(out_file_name.c_str(), "wb"); + ASSERT_TRUE(out_file != NULL); + + int detection_rate_hz = absl::GetFlag(FLAGS_detection_rate_hz); + if (detection_rate_hz == 0) { + detection_rate_hz = absl::GetFlag(FLAGS_sample_rate_hz); + } + + Agc agc; + + TransientSuppressorImpl suppressor; + suppressor.Initialize(absl::GetFlag(FLAGS_sample_rate_hz), detection_rate_hz, + absl::GetFlag(FLAGS_num_channels)); + + const size_t audio_buffer_size = absl::GetFlag(FLAGS_chunk_size_ms) * + absl::GetFlag(FLAGS_sample_rate_hz) / 1000; + const size_t detection_buffer_size = + absl::GetFlag(FLAGS_chunk_size_ms) * detection_rate_hz / 1000; + + // int16 and float variants of the same data. + std::unique_ptr audio_buffer_i( + new int16_t[absl::GetFlag(FLAGS_num_channels) * audio_buffer_size]); + std::unique_ptr audio_buffer_f( + new float[absl::GetFlag(FLAGS_num_channels) * audio_buffer_size]); + + std::unique_ptr detection_buffer, reference_buffer; + + if (detection_file) + detection_buffer.reset(new float[detection_buffer_size]); + if (reference_file) + reference_buffer.reset(new float[audio_buffer_size]); + + while (ReadBuffers( + in_file, audio_buffer_size, absl::GetFlag(FLAGS_num_channels), + audio_buffer_i.get(), detection_file, detection_buffer_size, + detection_buffer.get(), reference_file, reference_buffer.get())) { + agc.Process(audio_buffer_i.get(), static_cast(audio_buffer_size), + absl::GetFlag(FLAGS_sample_rate_hz)); + + for (size_t i = 0; + i < absl::GetFlag(FLAGS_num_channels) * audio_buffer_size; ++i) { + audio_buffer_f[i] = audio_buffer_i[i]; + } + + ASSERT_EQ(0, suppressor.Suppress( + audio_buffer_f.get(), audio_buffer_size, + absl::GetFlag(FLAGS_num_channels), detection_buffer.get(), + detection_buffer_size, reference_buffer.get(), + audio_buffer_size, agc.voice_probability(), true)) + << "The transient suppressor could not suppress the frame"; + + // Write result to out file. + WritePCM(out_file, audio_buffer_size, absl::GetFlag(FLAGS_num_channels), + audio_buffer_f.get()); + } + + fclose(in_file); + if (detection_file) { + fclose(detection_file); + } + if (reference_file) { + fclose(reference_file); + } + fclose(out_file); +} + +} // namespace webrtc + +int main(int argc, char* argv[]) { + std::vector args = absl::ParseCommandLine(argc, argv); + if (args.size() != 1) { + printf("%s", webrtc::kUsage); + return 1; + } + RTC_CHECK_GT(absl::GetFlag(FLAGS_chunk_size_ms), 0); + RTC_CHECK_GT(absl::GetFlag(FLAGS_sample_rate_hz), 0); + RTC_CHECK_GT(absl::GetFlag(FLAGS_num_channels), 0); + + webrtc::void_main(); + return 0; +} diff --git a/modules/audio_processing/transient/transient_suppressor.h b/modules/audio_processing/transient/transient_suppressor.h new file mode 100644 index 0000000..bb262b0 --- /dev/null +++ b/modules/audio_processing/transient/transient_suppressor.h @@ -0,0 +1,60 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_H_ + +#include +#include +#include + +namespace webrtc { + +// Detects transients in an audio stream and suppress them using a simple +// restoration algorithm that attenuates unexpected spikes in the spectrum. +class TransientSuppressor { + public: + virtual ~TransientSuppressor() {} + + virtual int Initialize(int sample_rate_hz, + int detector_rate_hz, + int num_channels) = 0; + + // Processes a |data| chunk, and returns it with keystrokes suppressed from + // it. The float format is assumed to be int16 ranged. If there are more than + // one channel, the chunks are concatenated one after the other in |data|. + // |data_length| must be equal to |data_length_|. + // |num_channels| must be equal to |num_channels_|. + // A sub-band, ideally the higher, can be used as |detection_data|. If it is + // NULL, |data| is used for the detection too. The |detection_data| is always + // assumed mono. + // If a reference signal (e.g. keyboard microphone) is available, it can be + // passed in as |reference_data|. It is assumed mono and must have the same + // length as |data|. NULL is accepted if unavailable. + // This suppressor performs better if voice information is available. + // |voice_probability| is the probability of voice being present in this chunk + // of audio. If voice information is not available, |voice_probability| must + // always be set to 1. + // |key_pressed| determines if a key was pressed on this audio chunk. + // Returns 0 on success and -1 otherwise. + virtual int Suppress(float* data, + size_t data_length, + int num_channels, + const float* detection_data, + size_t detection_length, + const float* reference_data, + size_t reference_length, + float voice_probability, + bool key_pressed) = 0; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_H_ diff --git a/modules/audio_processing/transient/transient_suppressor_impl.cc b/modules/audio_processing/transient/transient_suppressor_impl.cc new file mode 100644 index 0000000..d515d30 --- /dev/null +++ b/modules/audio_processing/transient/transient_suppressor_impl.cc @@ -0,0 +1,411 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/transient_suppressor_impl.h" + +#include + +#include +#include +#include +#include +#include +#include + +#include "common_audio/include/audio_util.h" +#include "common_audio/signal_processing/include/signal_processing_library.h" +#include "common_audio/third_party/ooura/fft_size_256/fft4g.h" +#include "modules/audio_processing/transient/common.h" +#include "modules/audio_processing/transient/transient_detector.h" +#include "modules/audio_processing/transient/transient_suppressor.h" +#include "modules/audio_processing/transient/windows_private.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +static const float kMeanIIRCoefficient = 0.5f; +static const float kVoiceThreshold = 0.02f; + +// TODO(aluebs): Check if these values work also for 48kHz. +static const size_t kMinVoiceBin = 3; +static const size_t kMaxVoiceBin = 60; + +namespace { + +float ComplexMagnitude(float a, float b) { + return std::abs(a) + std::abs(b); +} + +} // namespace + +TransientSuppressorImpl::TransientSuppressorImpl() + : data_length_(0), + detection_length_(0), + analysis_length_(0), + buffer_delay_(0), + complex_analysis_length_(0), + num_channels_(0), + window_(NULL), + detector_smoothed_(0.f), + keypress_counter_(0), + chunks_since_keypress_(0), + detection_enabled_(false), + suppression_enabled_(false), + use_hard_restoration_(false), + chunks_since_voice_change_(0), + seed_(182), + using_reference_(false) {} + +TransientSuppressorImpl::~TransientSuppressorImpl() {} + +int TransientSuppressorImpl::Initialize(int sample_rate_hz, + int detection_rate_hz, + int num_channels) { + switch (sample_rate_hz) { + case ts::kSampleRate8kHz: + analysis_length_ = 128u; + window_ = kBlocks80w128; + break; + case ts::kSampleRate16kHz: + analysis_length_ = 256u; + window_ = kBlocks160w256; + break; + case ts::kSampleRate32kHz: + analysis_length_ = 512u; + window_ = kBlocks320w512; + break; + case ts::kSampleRate48kHz: + analysis_length_ = 1024u; + window_ = kBlocks480w1024; + break; + default: + return -1; + } + if (detection_rate_hz != ts::kSampleRate8kHz && + detection_rate_hz != ts::kSampleRate16kHz && + detection_rate_hz != ts::kSampleRate32kHz && + detection_rate_hz != ts::kSampleRate48kHz) { + return -1; + } + if (num_channels <= 0) { + return -1; + } + + detector_.reset(new TransientDetector(detection_rate_hz)); + data_length_ = sample_rate_hz * ts::kChunkSizeMs / 1000; + if (data_length_ > analysis_length_) { + RTC_NOTREACHED(); + return -1; + } + buffer_delay_ = analysis_length_ - data_length_; + + complex_analysis_length_ = analysis_length_ / 2 + 1; + RTC_DCHECK_GE(complex_analysis_length_, kMaxVoiceBin); + num_channels_ = num_channels; + in_buffer_.reset(new float[analysis_length_ * num_channels_]); + memset(in_buffer_.get(), 0, + analysis_length_ * num_channels_ * sizeof(in_buffer_[0])); + detection_length_ = detection_rate_hz * ts::kChunkSizeMs / 1000; + detection_buffer_.reset(new float[detection_length_]); + memset(detection_buffer_.get(), 0, + detection_length_ * sizeof(detection_buffer_[0])); + out_buffer_.reset(new float[analysis_length_ * num_channels_]); + memset(out_buffer_.get(), 0, + analysis_length_ * num_channels_ * sizeof(out_buffer_[0])); + // ip[0] must be zero to trigger initialization using rdft(). + size_t ip_length = 2 + sqrtf(analysis_length_); + ip_.reset(new size_t[ip_length]()); + memset(ip_.get(), 0, ip_length * sizeof(ip_[0])); + wfft_.reset(new float[complex_analysis_length_ - 1]); + memset(wfft_.get(), 0, (complex_analysis_length_ - 1) * sizeof(wfft_[0])); + spectral_mean_.reset(new float[complex_analysis_length_ * num_channels_]); + memset(spectral_mean_.get(), 0, + complex_analysis_length_ * num_channels_ * sizeof(spectral_mean_[0])); + fft_buffer_.reset(new float[analysis_length_ + 2]); + memset(fft_buffer_.get(), 0, (analysis_length_ + 2) * sizeof(fft_buffer_[0])); + magnitudes_.reset(new float[complex_analysis_length_]); + memset(magnitudes_.get(), 0, + complex_analysis_length_ * sizeof(magnitudes_[0])); + mean_factor_.reset(new float[complex_analysis_length_]); + + static const float kFactorHeight = 10.f; + static const float kLowSlope = 1.f; + static const float kHighSlope = 0.3f; + for (size_t i = 0; i < complex_analysis_length_; ++i) { + mean_factor_[i] = + kFactorHeight / + (1.f + std::exp(kLowSlope * static_cast(i - kMinVoiceBin))) + + kFactorHeight / + (1.f + std::exp(kHighSlope * static_cast(kMaxVoiceBin - i))); + } + detector_smoothed_ = 0.f; + keypress_counter_ = 0; + chunks_since_keypress_ = 0; + detection_enabled_ = false; + suppression_enabled_ = false; + use_hard_restoration_ = false; + chunks_since_voice_change_ = 0; + seed_ = 182; + using_reference_ = false; + return 0; +} + +int TransientSuppressorImpl::Suppress(float* data, + size_t data_length, + int num_channels, + const float* detection_data, + size_t detection_length, + const float* reference_data, + size_t reference_length, + float voice_probability, + bool key_pressed) { + if (!data || data_length != data_length_ || num_channels != num_channels_ || + detection_length != detection_length_ || voice_probability < 0 || + voice_probability > 1) { + return -1; + } + + UpdateKeypress(key_pressed); + UpdateBuffers(data); + + int result = 0; + if (detection_enabled_) { + UpdateRestoration(voice_probability); + + if (!detection_data) { + // Use the input data of the first channel if special detection data is + // not supplied. + detection_data = &in_buffer_[buffer_delay_]; + } + + float detector_result = detector_->Detect(detection_data, detection_length, + reference_data, reference_length); + if (detector_result < 0) { + return -1; + } + + using_reference_ = detector_->using_reference(); + + // |detector_smoothed_| follows the |detector_result| when this last one is + // increasing, but has an exponential decaying tail to be able to suppress + // the ringing of keyclicks. + float smooth_factor = using_reference_ ? 0.6 : 0.1; + detector_smoothed_ = detector_result >= detector_smoothed_ + ? detector_result + : smooth_factor * detector_smoothed_ + + (1 - smooth_factor) * detector_result; + + for (int i = 0; i < num_channels_; ++i) { + Suppress(&in_buffer_[i * analysis_length_], + &spectral_mean_[i * complex_analysis_length_], + &out_buffer_[i * analysis_length_]); + } + } + + // If the suppression isn't enabled, we use the in buffer to delay the signal + // appropriately. This also gives time for the out buffer to be refreshed with + // new data between detection and suppression getting enabled. + for (int i = 0; i < num_channels_; ++i) { + memcpy(&data[i * data_length_], + suppression_enabled_ ? &out_buffer_[i * analysis_length_] + : &in_buffer_[i * analysis_length_], + data_length_ * sizeof(*data)); + } + return result; +} + +// This should only be called when detection is enabled. UpdateBuffers() must +// have been called. At return, |out_buffer_| will be filled with the +// processed output. +void TransientSuppressorImpl::Suppress(float* in_ptr, + float* spectral_mean, + float* out_ptr) { + // Go to frequency domain. + for (size_t i = 0; i < analysis_length_; ++i) { + // TODO(aluebs): Rename windows + fft_buffer_[i] = in_ptr[i] * window_[i]; + } + + WebRtc_rdft(analysis_length_, 1, fft_buffer_.get(), ip_.get(), wfft_.get()); + + // Since WebRtc_rdft puts R[n/2] in fft_buffer_[1], we move it to the end + // for convenience. + fft_buffer_[analysis_length_] = fft_buffer_[1]; + fft_buffer_[analysis_length_ + 1] = 0.f; + fft_buffer_[1] = 0.f; + + for (size_t i = 0; i < complex_analysis_length_; ++i) { + magnitudes_[i] = + ComplexMagnitude(fft_buffer_[i * 2], fft_buffer_[i * 2 + 1]); + } + // Restore audio if necessary. + if (suppression_enabled_) { + if (use_hard_restoration_) { + HardRestoration(spectral_mean); + } else { + SoftRestoration(spectral_mean); + } + } + + // Update the spectral mean. + for (size_t i = 0; i < complex_analysis_length_; ++i) { + spectral_mean[i] = (1 - kMeanIIRCoefficient) * spectral_mean[i] + + kMeanIIRCoefficient * magnitudes_[i]; + } + + // Back to time domain. + // Put R[n/2] back in fft_buffer_[1]. + fft_buffer_[1] = fft_buffer_[analysis_length_]; + + WebRtc_rdft(analysis_length_, -1, fft_buffer_.get(), ip_.get(), wfft_.get()); + const float fft_scaling = 2.f / analysis_length_; + + for (size_t i = 0; i < analysis_length_; ++i) { + out_ptr[i] += fft_buffer_[i] * window_[i] * fft_scaling; + } +} + +void TransientSuppressorImpl::UpdateKeypress(bool key_pressed) { + const int kKeypressPenalty = 1000 / ts::kChunkSizeMs; + const int kIsTypingThreshold = 1000 / ts::kChunkSizeMs; + const int kChunksUntilNotTyping = 4000 / ts::kChunkSizeMs; // 4 seconds. + + if (key_pressed) { + keypress_counter_ += kKeypressPenalty; + chunks_since_keypress_ = 0; + detection_enabled_ = true; + } + keypress_counter_ = std::max(0, keypress_counter_ - 1); + + if (keypress_counter_ > kIsTypingThreshold) { + if (!suppression_enabled_) { + RTC_LOG(LS_INFO) << "[ts] Transient suppression is now enabled."; + } + suppression_enabled_ = true; + keypress_counter_ = 0; + } + + if (detection_enabled_ && ++chunks_since_keypress_ > kChunksUntilNotTyping) { + if (suppression_enabled_) { + RTC_LOG(LS_INFO) << "[ts] Transient suppression is now disabled."; + } + detection_enabled_ = false; + suppression_enabled_ = false; + keypress_counter_ = 0; + } +} + +void TransientSuppressorImpl::UpdateRestoration(float voice_probability) { + const int kHardRestorationOffsetDelay = 3; + const int kHardRestorationOnsetDelay = 80; + + bool not_voiced = voice_probability < kVoiceThreshold; + + if (not_voiced == use_hard_restoration_) { + chunks_since_voice_change_ = 0; + } else { + ++chunks_since_voice_change_; + + if ((use_hard_restoration_ && + chunks_since_voice_change_ > kHardRestorationOffsetDelay) || + (!use_hard_restoration_ && + chunks_since_voice_change_ > kHardRestorationOnsetDelay)) { + use_hard_restoration_ = not_voiced; + chunks_since_voice_change_ = 0; + } + } +} + +// Shift buffers to make way for new data. Must be called after +// |detection_enabled_| is updated by UpdateKeypress(). +void TransientSuppressorImpl::UpdateBuffers(float* data) { + // TODO(aluebs): Change to ring buffer. + memmove(in_buffer_.get(), &in_buffer_[data_length_], + (buffer_delay_ + (num_channels_ - 1) * analysis_length_) * + sizeof(in_buffer_[0])); + // Copy new chunk to buffer. + for (int i = 0; i < num_channels_; ++i) { + memcpy(&in_buffer_[buffer_delay_ + i * analysis_length_], + &data[i * data_length_], data_length_ * sizeof(*data)); + } + if (detection_enabled_) { + // Shift previous chunk in out buffer. + memmove(out_buffer_.get(), &out_buffer_[data_length_], + (buffer_delay_ + (num_channels_ - 1) * analysis_length_) * + sizeof(out_buffer_[0])); + // Initialize new chunk in out buffer. + for (int i = 0; i < num_channels_; ++i) { + memset(&out_buffer_[buffer_delay_ + i * analysis_length_], 0, + data_length_ * sizeof(out_buffer_[0])); + } + } +} + +// Restores the unvoiced signal if a click is present. +// Attenuates by a certain factor every peak in the |fft_buffer_| that exceeds +// the spectral mean. The attenuation depends on |detector_smoothed_|. +// If a restoration takes place, the |magnitudes_| are updated to the new value. +void TransientSuppressorImpl::HardRestoration(float* spectral_mean) { + const float detector_result = + 1.f - std::pow(1.f - detector_smoothed_, using_reference_ ? 200.f : 50.f); + // To restore, we get the peaks in the spectrum. If higher than the previous + // spectral mean we adjust them. + for (size_t i = 0; i < complex_analysis_length_; ++i) { + if (magnitudes_[i] > spectral_mean[i] && magnitudes_[i] > 0) { + // RandU() generates values on [0, int16::max()] + const float phase = 2 * ts::kPi * WebRtcSpl_RandU(&seed_) / + std::numeric_limits::max(); + const float scaled_mean = detector_result * spectral_mean[i]; + + fft_buffer_[i * 2] = (1 - detector_result) * fft_buffer_[i * 2] + + scaled_mean * cosf(phase); + fft_buffer_[i * 2 + 1] = (1 - detector_result) * fft_buffer_[i * 2 + 1] + + scaled_mean * sinf(phase); + magnitudes_[i] = magnitudes_[i] - + detector_result * (magnitudes_[i] - spectral_mean[i]); + } + } +} + +// Restores the voiced signal if a click is present. +// Attenuates by a certain factor every peak in the |fft_buffer_| that exceeds +// the spectral mean and that is lower than some function of the current block +// frequency mean. The attenuation depends on |detector_smoothed_|. +// If a restoration takes place, the |magnitudes_| are updated to the new value. +void TransientSuppressorImpl::SoftRestoration(float* spectral_mean) { + // Get the spectral magnitude mean of the current block. + float block_frequency_mean = 0; + for (size_t i = kMinVoiceBin; i < kMaxVoiceBin; ++i) { + block_frequency_mean += magnitudes_[i]; + } + block_frequency_mean /= (kMaxVoiceBin - kMinVoiceBin); + + // To restore, we get the peaks in the spectrum. If higher than the + // previous spectral mean and lower than a factor of the block mean + // we adjust them. The factor is a double sigmoid that has a minimum in the + // voice frequency range (300Hz - 3kHz). + for (size_t i = 0; i < complex_analysis_length_; ++i) { + if (magnitudes_[i] > spectral_mean[i] && magnitudes_[i] > 0 && + (using_reference_ || + magnitudes_[i] < block_frequency_mean * mean_factor_[i])) { + const float new_magnitude = + magnitudes_[i] - + detector_smoothed_ * (magnitudes_[i] - spectral_mean[i]); + const float magnitude_ratio = new_magnitude / magnitudes_[i]; + + fft_buffer_[i * 2] *= magnitude_ratio; + fft_buffer_[i * 2 + 1] *= magnitude_ratio; + magnitudes_[i] = new_magnitude; + } + } +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/transient_suppressor_impl.h b/modules/audio_processing/transient/transient_suppressor_impl.h new file mode 100644 index 0000000..4737af5 --- /dev/null +++ b/modules/audio_processing/transient/transient_suppressor_impl.h @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_IMPL_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_IMPL_H_ + +#include +#include + +#include + +#include "modules/audio_processing/transient/transient_suppressor.h" +#include "rtc_base/gtest_prod_util.h" + +namespace webrtc { + +class TransientDetector; + +// Detects transients in an audio stream and suppress them using a simple +// restoration algorithm that attenuates unexpected spikes in the spectrum. +class TransientSuppressorImpl : public TransientSuppressor { + public: + TransientSuppressorImpl(); + ~TransientSuppressorImpl() override; + + int Initialize(int sample_rate_hz, + int detector_rate_hz, + int num_channels) override; + + // Processes a |data| chunk, and returns it with keystrokes suppressed from + // it. The float format is assumed to be int16 ranged. If there are more than + // one channel, the chunks are concatenated one after the other in |data|. + // |data_length| must be equal to |data_length_|. + // |num_channels| must be equal to |num_channels_|. + // A sub-band, ideally the higher, can be used as |detection_data|. If it is + // NULL, |data| is used for the detection too. The |detection_data| is always + // assumed mono. + // If a reference signal (e.g. keyboard microphone) is available, it can be + // passed in as |reference_data|. It is assumed mono and must have the same + // length as |data|. NULL is accepted if unavailable. + // This suppressor performs better if voice information is available. + // |voice_probability| is the probability of voice being present in this chunk + // of audio. If voice information is not available, |voice_probability| must + // always be set to 1. + // |key_pressed| determines if a key was pressed on this audio chunk. + // Returns 0 on success and -1 otherwise. + int Suppress(float* data, + size_t data_length, + int num_channels, + const float* detection_data, + size_t detection_length, + const float* reference_data, + size_t reference_length, + float voice_probability, + bool key_pressed) override; + + private: + FRIEND_TEST_ALL_PREFIXES(TransientSuppressorImplTest, + TypingDetectionLogicWorksAsExpectedForMono); + void Suppress(float* in_ptr, float* spectral_mean, float* out_ptr); + + void UpdateKeypress(bool key_pressed); + void UpdateRestoration(float voice_probability); + + void UpdateBuffers(float* data); + + void HardRestoration(float* spectral_mean); + void SoftRestoration(float* spectral_mean); + + std::unique_ptr detector_; + + size_t data_length_; + size_t detection_length_; + size_t analysis_length_; + size_t buffer_delay_; + size_t complex_analysis_length_; + int num_channels_; + // Input buffer where the original samples are stored. + std::unique_ptr in_buffer_; + std::unique_ptr detection_buffer_; + // Output buffer where the restored samples are stored. + std::unique_ptr out_buffer_; + + // Arrays for fft. + std::unique_ptr ip_; + std::unique_ptr wfft_; + + std::unique_ptr spectral_mean_; + + // Stores the data for the fft. + std::unique_ptr fft_buffer_; + + std::unique_ptr magnitudes_; + + const float* window_; + + std::unique_ptr mean_factor_; + + float detector_smoothed_; + + int keypress_counter_; + int chunks_since_keypress_; + bool detection_enabled_; + bool suppression_enabled_; + + bool use_hard_restoration_; + int chunks_since_voice_change_; + + uint32_t seed_; + + bool using_reference_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_TRANSIENT_SUPPRESSOR_IMPL_H_ diff --git a/modules/audio_processing/transient/transient_suppressor_unittest.cc b/modules/audio_processing/transient/transient_suppressor_unittest.cc new file mode 100644 index 0000000..a5c6bb1 --- /dev/null +++ b/modules/audio_processing/transient/transient_suppressor_unittest.cc @@ -0,0 +1,85 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/transient_suppressor_impl.h" + +#include "modules/audio_processing/transient/common.h" +#include "test/gtest.h" + +namespace webrtc { + +TEST(TransientSuppressorImplTest, TypingDetectionLogicWorksAsExpectedForMono) { + static const int kNumChannels = 1; + + TransientSuppressorImpl ts; + ts.Initialize(ts::kSampleRate16kHz, ts::kSampleRate16kHz, kNumChannels); + + // Each key-press enables detection. + EXPECT_FALSE(ts.detection_enabled_); + ts.UpdateKeypress(true); + EXPECT_TRUE(ts.detection_enabled_); + + // It takes four seconds without any key-press to disable the detection + for (int time_ms = 0; time_ms < 3990; time_ms += ts::kChunkSizeMs) { + ts.UpdateKeypress(false); + EXPECT_TRUE(ts.detection_enabled_); + } + ts.UpdateKeypress(false); + EXPECT_FALSE(ts.detection_enabled_); + + // Key-presses that are more than a second apart from each other don't enable + // suppression. + for (int i = 0; i < 100; ++i) { + EXPECT_FALSE(ts.suppression_enabled_); + ts.UpdateKeypress(true); + EXPECT_TRUE(ts.detection_enabled_); + EXPECT_FALSE(ts.suppression_enabled_); + for (int time_ms = 0; time_ms < 990; time_ms += ts::kChunkSizeMs) { + ts.UpdateKeypress(false); + EXPECT_TRUE(ts.detection_enabled_); + EXPECT_FALSE(ts.suppression_enabled_); + } + ts.UpdateKeypress(false); + } + + // Two consecutive key-presses is enough to enable the suppression. + ts.UpdateKeypress(true); + EXPECT_FALSE(ts.suppression_enabled_); + ts.UpdateKeypress(true); + EXPECT_TRUE(ts.suppression_enabled_); + + // Key-presses that are less than a second apart from each other don't disable + // detection nor suppression. + for (int i = 0; i < 100; ++i) { + for (int time_ms = 0; time_ms < 1000; time_ms += ts::kChunkSizeMs) { + ts.UpdateKeypress(false); + EXPECT_TRUE(ts.detection_enabled_); + EXPECT_TRUE(ts.suppression_enabled_); + } + ts.UpdateKeypress(true); + EXPECT_TRUE(ts.detection_enabled_); + EXPECT_TRUE(ts.suppression_enabled_); + } + + // It takes four seconds without any key-press to disable the detection and + // suppression. + for (int time_ms = 0; time_ms < 3990; time_ms += ts::kChunkSizeMs) { + ts.UpdateKeypress(false); + EXPECT_TRUE(ts.detection_enabled_); + EXPECT_TRUE(ts.suppression_enabled_); + } + for (int time_ms = 0; time_ms < 1000; time_ms += ts::kChunkSizeMs) { + ts.UpdateKeypress(false); + EXPECT_FALSE(ts.detection_enabled_); + EXPECT_FALSE(ts.suppression_enabled_); + } +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/windows_private.h b/modules/audio_processing/transient/windows_private.h new file mode 100644 index 0000000..54e3c25 --- /dev/null +++ b/modules/audio_processing/transient/windows_private.h @@ -0,0 +1,557 @@ +/* + * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_WINDOWS_PRIVATE_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_WINDOWS_PRIVATE_H_ + +namespace webrtc { + +// Hanning window for 4ms 16kHz +static const float kHanning64w128[128] = { + 0.00000000000000f, 0.02454122852291f, 0.04906767432742f, 0.07356456359967f, + 0.09801714032956f, 0.12241067519922f, 0.14673047445536f, 0.17096188876030f, + 0.19509032201613f, 0.21910124015687f, 0.24298017990326f, 0.26671275747490f, + 0.29028467725446f, 0.31368174039889f, 0.33688985339222f, 0.35989503653499f, + 0.38268343236509f, 0.40524131400499f, 0.42755509343028f, 0.44961132965461f, + 0.47139673682600f, 0.49289819222978f, 0.51410274419322f, 0.53499761988710f, + 0.55557023301960f, 0.57580819141785f, 0.59569930449243f, 0.61523159058063f, + 0.63439328416365f, 0.65317284295378f, 0.67155895484702f, 0.68954054473707f, + 0.70710678118655f, 0.72424708295147f, 0.74095112535496f, 0.75720884650648f, + 0.77301045336274f, 0.78834642762661f, 0.80320753148064f, 0.81758481315158f, + 0.83146961230255f, 0.84485356524971f, 0.85772861000027f, 0.87008699110871f, + 0.88192126434835f, 0.89322430119552f, 0.90398929312344f, 0.91420975570353f, + 0.92387953251129f, 0.93299279883474f, 0.94154406518302f, 0.94952818059304f, + 0.95694033573221f, 0.96377606579544f, 0.97003125319454f, 0.97570213003853f, + 0.98078528040323f, 0.98527764238894f, 0.98917650996478f, 0.99247953459871f, + 0.99518472667220f, 0.99729045667869f, 0.99879545620517f, 0.99969881869620f, + 1.00000000000000f, 0.99969881869620f, 0.99879545620517f, 0.99729045667869f, + 0.99518472667220f, 0.99247953459871f, 0.98917650996478f, 0.98527764238894f, + 0.98078528040323f, 0.97570213003853f, 0.97003125319454f, 0.96377606579544f, + 0.95694033573221f, 0.94952818059304f, 0.94154406518302f, 0.93299279883474f, + 0.92387953251129f, 0.91420975570353f, 0.90398929312344f, 0.89322430119552f, + 0.88192126434835f, 0.87008699110871f, 0.85772861000027f, 0.84485356524971f, + 0.83146961230255f, 0.81758481315158f, 0.80320753148064f, 0.78834642762661f, + 0.77301045336274f, 0.75720884650648f, 0.74095112535496f, 0.72424708295147f, + 0.70710678118655f, 0.68954054473707f, 0.67155895484702f, 0.65317284295378f, + 0.63439328416365f, 0.61523159058063f, 0.59569930449243f, 0.57580819141785f, + 0.55557023301960f, 0.53499761988710f, 0.51410274419322f, 0.49289819222978f, + 0.47139673682600f, 0.44961132965461f, 0.42755509343028f, 0.40524131400499f, + 0.38268343236509f, 0.35989503653499f, 0.33688985339222f, 0.31368174039889f, + 0.29028467725446f, 0.26671275747490f, 0.24298017990326f, 0.21910124015687f, + 0.19509032201613f, 0.17096188876030f, 0.14673047445536f, 0.12241067519922f, + 0.09801714032956f, 0.07356456359967f, 0.04906767432742f, 0.02454122852291f}; + +// hybrib Hanning & flat window +static const float kBlocks80w128[128] = { + 0.00000000f, 0.03271908f, 0.06540313f, 0.09801714f, 0.13052619f, + 0.16289547f, 0.19509032f, 0.22707626f, 0.25881905f, 0.29028468f, + 0.32143947f, 0.35225005f, 0.38268343f, 0.41270703f, 0.44228869f, + 0.47139674f, 0.50000000f, 0.52806785f, 0.55557023f, 0.58247770f, + 0.60876143f, 0.63439328f, 0.65934582f, 0.68359230f, 0.70710678f, + 0.72986407f, 0.75183981f, 0.77301045f, 0.79335334f, 0.81284668f, + 0.83146961f, 0.84920218f, 0.86602540f, 0.88192126f, 0.89687274f, + 0.91086382f, 0.92387953f, 0.93590593f, 0.94693013f, 0.95694034f, + 0.96592583f, 0.97387698f, 0.98078528f, 0.98664333f, 0.99144486f, + 0.99518473f, 0.99785892f, 0.99946459f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 0.99946459f, 0.99785892f, 0.99518473f, 0.99144486f, + 0.98664333f, 0.98078528f, 0.97387698f, 0.96592583f, 0.95694034f, + 0.94693013f, 0.93590593f, 0.92387953f, 0.91086382f, 0.89687274f, + 0.88192126f, 0.86602540f, 0.84920218f, 0.83146961f, 0.81284668f, + 0.79335334f, 0.77301045f, 0.75183981f, 0.72986407f, 0.70710678f, + 0.68359230f, 0.65934582f, 0.63439328f, 0.60876143f, 0.58247770f, + 0.55557023f, 0.52806785f, 0.50000000f, 0.47139674f, 0.44228869f, + 0.41270703f, 0.38268343f, 0.35225005f, 0.32143947f, 0.29028468f, + 0.25881905f, 0.22707626f, 0.19509032f, 0.16289547f, 0.13052619f, + 0.09801714f, 0.06540313f, 0.03271908f}; + +// hybrib Hanning & flat window +static const float kBlocks160w256[256] = { + 0.00000000f, 0.01636173f, 0.03271908f, 0.04906767f, 0.06540313f, + 0.08172107f, 0.09801714f, 0.11428696f, 0.13052619f, 0.14673047f, + 0.16289547f, 0.17901686f, 0.19509032f, 0.21111155f, 0.22707626f, + 0.24298018f, 0.25881905f, 0.27458862f, 0.29028468f, 0.30590302f, + 0.32143947f, 0.33688985f, 0.35225005f, 0.36751594f, 0.38268343f, + 0.39774847f, 0.41270703f, 0.42755509f, 0.44228869f, 0.45690388f, + 0.47139674f, 0.48576339f, 0.50000000f, 0.51410274f, 0.52806785f, + 0.54189158f, 0.55557023f, 0.56910015f, 0.58247770f, 0.59569930f, + 0.60876143f, 0.62166057f, 0.63439328f, 0.64695615f, 0.65934582f, + 0.67155895f, 0.68359230f, 0.69544264f, 0.70710678f, 0.71858162f, + 0.72986407f, 0.74095113f, 0.75183981f, 0.76252720f, 0.77301045f, + 0.78328675f, 0.79335334f, 0.80320753f, 0.81284668f, 0.82226822f, + 0.83146961f, 0.84044840f, 0.84920218f, 0.85772861f, 0.86602540f, + 0.87409034f, 0.88192126f, 0.88951608f, 0.89687274f, 0.90398929f, + 0.91086382f, 0.91749450f, 0.92387953f, 0.93001722f, 0.93590593f, + 0.94154407f, 0.94693013f, 0.95206268f, 0.95694034f, 0.96156180f, + 0.96592583f, 0.97003125f, 0.97387698f, 0.97746197f, 0.98078528f, + 0.98384601f, 0.98664333f, 0.98917651f, 0.99144486f, 0.99344778f, + 0.99518473f, 0.99665524f, 0.99785892f, 0.99879546f, 0.99946459f, + 0.99986614f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 0.99986614f, 0.99946459f, 0.99879546f, 0.99785892f, + 0.99665524f, 0.99518473f, 0.99344778f, 0.99144486f, 0.98917651f, + 0.98664333f, 0.98384601f, 0.98078528f, 0.97746197f, 0.97387698f, + 0.97003125f, 0.96592583f, 0.96156180f, 0.95694034f, 0.95206268f, + 0.94693013f, 0.94154407f, 0.93590593f, 0.93001722f, 0.92387953f, + 0.91749450f, 0.91086382f, 0.90398929f, 0.89687274f, 0.88951608f, + 0.88192126f, 0.87409034f, 0.86602540f, 0.85772861f, 0.84920218f, + 0.84044840f, 0.83146961f, 0.82226822f, 0.81284668f, 0.80320753f, + 0.79335334f, 0.78328675f, 0.77301045f, 0.76252720f, 0.75183981f, + 0.74095113f, 0.72986407f, 0.71858162f, 0.70710678f, 0.69544264f, + 0.68359230f, 0.67155895f, 0.65934582f, 0.64695615f, 0.63439328f, + 0.62166057f, 0.60876143f, 0.59569930f, 0.58247770f, 0.56910015f, + 0.55557023f, 0.54189158f, 0.52806785f, 0.51410274f, 0.50000000f, + 0.48576339f, 0.47139674f, 0.45690388f, 0.44228869f, 0.42755509f, + 0.41270703f, 0.39774847f, 0.38268343f, 0.36751594f, 0.35225005f, + 0.33688985f, 0.32143947f, 0.30590302f, 0.29028468f, 0.27458862f, + 0.25881905f, 0.24298018f, 0.22707626f, 0.21111155f, 0.19509032f, + 0.17901686f, 0.16289547f, 0.14673047f, 0.13052619f, 0.11428696f, + 0.09801714f, 0.08172107f, 0.06540313f, 0.04906767f, 0.03271908f, + 0.01636173f}; + +// hybrib Hanning & flat window: for 20ms +static const float kBlocks320w512[512] = { + 0.00000000f, 0.00818114f, 0.01636173f, 0.02454123f, 0.03271908f, + 0.04089475f, 0.04906767f, 0.05723732f, 0.06540313f, 0.07356456f, + 0.08172107f, 0.08987211f, 0.09801714f, 0.10615561f, 0.11428696f, + 0.12241068f, 0.13052619f, 0.13863297f, 0.14673047f, 0.15481816f, + 0.16289547f, 0.17096189f, 0.17901686f, 0.18705985f, 0.19509032f, + 0.20310773f, 0.21111155f, 0.21910124f, 0.22707626f, 0.23503609f, + 0.24298018f, 0.25090801f, 0.25881905f, 0.26671276f, 0.27458862f, + 0.28244610f, 0.29028468f, 0.29810383f, 0.30590302f, 0.31368174f, + 0.32143947f, 0.32917568f, 0.33688985f, 0.34458148f, 0.35225005f, + 0.35989504f, 0.36751594f, 0.37511224f, 0.38268343f, 0.39022901f, + 0.39774847f, 0.40524131f, 0.41270703f, 0.42014512f, 0.42755509f, + 0.43493645f, 0.44228869f, 0.44961133f, 0.45690388f, 0.46416584f, + 0.47139674f, 0.47859608f, 0.48576339f, 0.49289819f, 0.50000000f, + 0.50706834f, 0.51410274f, 0.52110274f, 0.52806785f, 0.53499762f, + 0.54189158f, 0.54874927f, 0.55557023f, 0.56235401f, 0.56910015f, + 0.57580819f, 0.58247770f, 0.58910822f, 0.59569930f, 0.60225052f, + 0.60876143f, 0.61523159f, 0.62166057f, 0.62804795f, 0.63439328f, + 0.64069616f, 0.64695615f, 0.65317284f, 0.65934582f, 0.66547466f, + 0.67155895f, 0.67759830f, 0.68359230f, 0.68954054f, 0.69544264f, + 0.70129818f, 0.70710678f, 0.71286806f, 0.71858162f, 0.72424708f, + 0.72986407f, 0.73543221f, 0.74095113f, 0.74642045f, 0.75183981f, + 0.75720885f, 0.76252720f, 0.76779452f, 0.77301045f, 0.77817464f, + 0.78328675f, 0.78834643f, 0.79335334f, 0.79830715f, 0.80320753f, + 0.80805415f, 0.81284668f, 0.81758481f, 0.82226822f, 0.82689659f, + 0.83146961f, 0.83598698f, 0.84044840f, 0.84485357f, 0.84920218f, + 0.85349396f, 0.85772861f, 0.86190585f, 0.86602540f, 0.87008699f, + 0.87409034f, 0.87803519f, 0.88192126f, 0.88574831f, 0.88951608f, + 0.89322430f, 0.89687274f, 0.90046115f, 0.90398929f, 0.90745693f, + 0.91086382f, 0.91420976f, 0.91749450f, 0.92071783f, 0.92387953f, + 0.92697940f, 0.93001722f, 0.93299280f, 0.93590593f, 0.93875641f, + 0.94154407f, 0.94426870f, 0.94693013f, 0.94952818f, 0.95206268f, + 0.95453345f, 0.95694034f, 0.95928317f, 0.96156180f, 0.96377607f, + 0.96592583f, 0.96801094f, 0.97003125f, 0.97198664f, 0.97387698f, + 0.97570213f, 0.97746197f, 0.97915640f, 0.98078528f, 0.98234852f, + 0.98384601f, 0.98527764f, 0.98664333f, 0.98794298f, 0.98917651f, + 0.99034383f, 0.99144486f, 0.99247953f, 0.99344778f, 0.99434953f, + 0.99518473f, 0.99595331f, 0.99665524f, 0.99729046f, 0.99785892f, + 0.99836060f, 0.99879546f, 0.99916346f, 0.99946459f, 0.99969882f, + 0.99986614f, 0.99996653f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, 1.00000000f, + 1.00000000f, 0.99996653f, 0.99986614f, 0.99969882f, 0.99946459f, + 0.99916346f, 0.99879546f, 0.99836060f, 0.99785892f, 0.99729046f, + 0.99665524f, 0.99595331f, 0.99518473f, 0.99434953f, 0.99344778f, + 0.99247953f, 0.99144486f, 0.99034383f, 0.98917651f, 0.98794298f, + 0.98664333f, 0.98527764f, 0.98384601f, 0.98234852f, 0.98078528f, + 0.97915640f, 0.97746197f, 0.97570213f, 0.97387698f, 0.97198664f, + 0.97003125f, 0.96801094f, 0.96592583f, 0.96377607f, 0.96156180f, + 0.95928317f, 0.95694034f, 0.95453345f, 0.95206268f, 0.94952818f, + 0.94693013f, 0.94426870f, 0.94154407f, 0.93875641f, 0.93590593f, + 0.93299280f, 0.93001722f, 0.92697940f, 0.92387953f, 0.92071783f, + 0.91749450f, 0.91420976f, 0.91086382f, 0.90745693f, 0.90398929f, + 0.90046115f, 0.89687274f, 0.89322430f, 0.88951608f, 0.88574831f, + 0.88192126f, 0.87803519f, 0.87409034f, 0.87008699f, 0.86602540f, + 0.86190585f, 0.85772861f, 0.85349396f, 0.84920218f, 0.84485357f, + 0.84044840f, 0.83598698f, 0.83146961f, 0.82689659f, 0.82226822f, + 0.81758481f, 0.81284668f, 0.80805415f, 0.80320753f, 0.79830715f, + 0.79335334f, 0.78834643f, 0.78328675f, 0.77817464f, 0.77301045f, + 0.76779452f, 0.76252720f, 0.75720885f, 0.75183981f, 0.74642045f, + 0.74095113f, 0.73543221f, 0.72986407f, 0.72424708f, 0.71858162f, + 0.71286806f, 0.70710678f, 0.70129818f, 0.69544264f, 0.68954054f, + 0.68359230f, 0.67759830f, 0.67155895f, 0.66547466f, 0.65934582f, + 0.65317284f, 0.64695615f, 0.64069616f, 0.63439328f, 0.62804795f, + 0.62166057f, 0.61523159f, 0.60876143f, 0.60225052f, 0.59569930f, + 0.58910822f, 0.58247770f, 0.57580819f, 0.56910015f, 0.56235401f, + 0.55557023f, 0.54874927f, 0.54189158f, 0.53499762f, 0.52806785f, + 0.52110274f, 0.51410274f, 0.50706834f, 0.50000000f, 0.49289819f, + 0.48576339f, 0.47859608f, 0.47139674f, 0.46416584f, 0.45690388f, + 0.44961133f, 0.44228869f, 0.43493645f, 0.42755509f, 0.42014512f, + 0.41270703f, 0.40524131f, 0.39774847f, 0.39022901f, 0.38268343f, + 0.37511224f, 0.36751594f, 0.35989504f, 0.35225005f, 0.34458148f, + 0.33688985f, 0.32917568f, 0.32143947f, 0.31368174f, 0.30590302f, + 0.29810383f, 0.29028468f, 0.28244610f, 0.27458862f, 0.26671276f, + 0.25881905f, 0.25090801f, 0.24298018f, 0.23503609f, 0.22707626f, + 0.21910124f, 0.21111155f, 0.20310773f, 0.19509032f, 0.18705985f, + 0.17901686f, 0.17096189f, 0.16289547f, 0.15481816f, 0.14673047f, + 0.13863297f, 0.13052619f, 0.12241068f, 0.11428696f, 0.10615561f, + 0.09801714f, 0.08987211f, 0.08172107f, 0.07356456f, 0.06540313f, + 0.05723732f, 0.04906767f, 0.04089475f, 0.03271908f, 0.02454123f, + 0.01636173f, 0.00818114f}; + +// Hanning window: for 15ms at 16kHz with symmetric zeros +static const float kBlocks240w512[512] = { + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00654494f, 0.01308960f, 0.01963369f, + 0.02617695f, 0.03271908f, 0.03925982f, 0.04579887f, 0.05233596f, + 0.05887080f, 0.06540313f, 0.07193266f, 0.07845910f, 0.08498218f, + 0.09150162f, 0.09801714f, 0.10452846f, 0.11103531f, 0.11753740f, + 0.12403446f, 0.13052620f, 0.13701233f, 0.14349262f, 0.14996676f, + 0.15643448f, 0.16289547f, 0.16934951f, 0.17579629f, 0.18223552f, + 0.18866697f, 0.19509032f, 0.20150533f, 0.20791170f, 0.21430916f, + 0.22069745f, 0.22707628f, 0.23344538f, 0.23980446f, 0.24615330f, + 0.25249159f, 0.25881904f, 0.26513544f, 0.27144045f, 0.27773386f, + 0.28401536f, 0.29028466f, 0.29654160f, 0.30278578f, 0.30901700f, + 0.31523499f, 0.32143945f, 0.32763019f, 0.33380687f, 0.33996925f, + 0.34611708f, 0.35225007f, 0.35836795f, 0.36447051f, 0.37055743f, + 0.37662852f, 0.38268346f, 0.38872197f, 0.39474389f, 0.40074885f, + 0.40673664f, 0.41270703f, 0.41865975f, 0.42459452f, 0.43051112f, + 0.43640924f, 0.44228873f, 0.44814920f, 0.45399052f, 0.45981237f, + 0.46561453f, 0.47139674f, 0.47715878f, 0.48290035f, 0.48862126f, + 0.49432120f, 0.50000000f, 0.50565743f, 0.51129311f, 0.51690692f, + 0.52249855f, 0.52806789f, 0.53361452f, 0.53913832f, 0.54463905f, + 0.55011642f, 0.55557024f, 0.56100029f, 0.56640625f, 0.57178795f, + 0.57714522f, 0.58247769f, 0.58778524f, 0.59306765f, 0.59832460f, + 0.60355598f, 0.60876143f, 0.61394083f, 0.61909395f, 0.62422055f, + 0.62932038f, 0.63439333f, 0.63943899f, 0.64445734f, 0.64944810f, + 0.65441096f, 0.65934587f, 0.66425246f, 0.66913062f, 0.67398012f, + 0.67880076f, 0.68359232f, 0.68835455f, 0.69308740f, 0.69779050f, + 0.70246369f, 0.70710677f, 0.71171963f, 0.71630198f, 0.72085363f, + 0.72537440f, 0.72986406f, 0.73432255f, 0.73874950f, 0.74314487f, + 0.74750835f, 0.75183982f, 0.75613910f, 0.76040596f, 0.76464027f, + 0.76884186f, 0.77301043f, 0.77714598f, 0.78124821f, 0.78531694f, + 0.78935206f, 0.79335338f, 0.79732066f, 0.80125386f, 0.80515265f, + 0.80901700f, 0.81284672f, 0.81664157f, 0.82040149f, 0.82412618f, + 0.82781565f, 0.83146966f, 0.83508795f, 0.83867061f, 0.84221727f, + 0.84572780f, 0.84920216f, 0.85264021f, 0.85604161f, 0.85940641f, + 0.86273444f, 0.86602545f, 0.86927933f, 0.87249607f, 0.87567532f, + 0.87881714f, 0.88192129f, 0.88498765f, 0.88801610f, 0.89100653f, + 0.89395881f, 0.89687276f, 0.89974827f, 0.90258533f, 0.90538365f, + 0.90814316f, 0.91086388f, 0.91354549f, 0.91618794f, 0.91879123f, + 0.92135513f, 0.92387950f, 0.92636442f, 0.92880958f, 0.93121493f, + 0.93358046f, 0.93590593f, 0.93819135f, 0.94043654f, 0.94264150f, + 0.94480604f, 0.94693011f, 0.94901365f, 0.95105654f, 0.95305866f, + 0.95501995f, 0.95694035f, 0.95881975f, 0.96065807f, 0.96245527f, + 0.96421117f, 0.96592581f, 0.96759909f, 0.96923089f, 0.97082120f, + 0.97236991f, 0.97387701f, 0.97534233f, 0.97676587f, 0.97814763f, + 0.97948742f, 0.98078531f, 0.98204112f, 0.98325491f, 0.98442656f, + 0.98555607f, 0.98664331f, 0.98768836f, 0.98869103f, 0.98965138f, + 0.99056935f, 0.99144489f, 0.99227792f, 0.99306846f, 0.99381649f, + 0.99452192f, 0.99518472f, 0.99580491f, 0.99638247f, 0.99691731f, + 0.99740952f, 0.99785894f, 0.99826562f, 0.99862951f, 0.99895066f, + 0.99922901f, 0.99946457f, 0.99965733f, 0.99980724f, 0.99991435f, + 0.99997860f, 1.00000000f, 0.99997860f, 0.99991435f, 0.99980724f, + 0.99965733f, 0.99946457f, 0.99922901f, 0.99895066f, 0.99862951f, + 0.99826562f, 0.99785894f, 0.99740946f, 0.99691731f, 0.99638247f, + 0.99580491f, 0.99518472f, 0.99452192f, 0.99381644f, 0.99306846f, + 0.99227792f, 0.99144489f, 0.99056935f, 0.98965138f, 0.98869103f, + 0.98768836f, 0.98664331f, 0.98555607f, 0.98442656f, 0.98325491f, + 0.98204112f, 0.98078525f, 0.97948742f, 0.97814757f, 0.97676587f, + 0.97534227f, 0.97387695f, 0.97236991f, 0.97082120f, 0.96923089f, + 0.96759909f, 0.96592581f, 0.96421117f, 0.96245521f, 0.96065807f, + 0.95881969f, 0.95694029f, 0.95501995f, 0.95305860f, 0.95105648f, + 0.94901365f, 0.94693011f, 0.94480604f, 0.94264150f, 0.94043654f, + 0.93819129f, 0.93590593f, 0.93358046f, 0.93121493f, 0.92880952f, + 0.92636436f, 0.92387950f, 0.92135507f, 0.91879123f, 0.91618794f, + 0.91354543f, 0.91086382f, 0.90814310f, 0.90538365f, 0.90258527f, + 0.89974827f, 0.89687276f, 0.89395875f, 0.89100647f, 0.88801610f, + 0.88498759f, 0.88192123f, 0.87881714f, 0.87567532f, 0.87249595f, + 0.86927933f, 0.86602539f, 0.86273432f, 0.85940641f, 0.85604161f, + 0.85264009f, 0.84920216f, 0.84572780f, 0.84221715f, 0.83867055f, + 0.83508795f, 0.83146954f, 0.82781565f, 0.82412612f, 0.82040137f, + 0.81664157f, 0.81284660f, 0.80901700f, 0.80515265f, 0.80125374f, + 0.79732066f, 0.79335332f, 0.78935200f, 0.78531694f, 0.78124815f, + 0.77714586f, 0.77301049f, 0.76884180f, 0.76464021f, 0.76040596f, + 0.75613904f, 0.75183970f, 0.74750835f, 0.74314481f, 0.73874938f, + 0.73432249f, 0.72986400f, 0.72537428f, 0.72085363f, 0.71630186f, + 0.71171951f, 0.70710677f, 0.70246363f, 0.69779032f, 0.69308734f, + 0.68835449f, 0.68359220f, 0.67880070f, 0.67398006f, 0.66913044f, + 0.66425240f, 0.65934575f, 0.65441096f, 0.64944804f, 0.64445722f, + 0.63943905f, 0.63439327f, 0.62932026f, 0.62422055f, 0.61909389f, + 0.61394072f, 0.60876143f, 0.60355592f, 0.59832448f, 0.59306765f, + 0.58778518f, 0.58247757f, 0.57714522f, 0.57178789f, 0.56640613f, + 0.56100023f, 0.55557019f, 0.55011630f, 0.54463905f, 0.53913826f, + 0.53361434f, 0.52806783f, 0.52249849f, 0.51690674f, 0.51129305f, + 0.50565726f, 0.50000006f, 0.49432117f, 0.48862115f, 0.48290038f, + 0.47715873f, 0.47139663f, 0.46561456f, 0.45981231f, 0.45399037f, + 0.44814920f, 0.44228864f, 0.43640912f, 0.43051112f, 0.42459446f, + 0.41865960f, 0.41270703f, 0.40673658f, 0.40074870f, 0.39474386f, + 0.38872188f, 0.38268328f, 0.37662849f, 0.37055734f, 0.36447033f, + 0.35836792f, 0.35224995f, 0.34611690f, 0.33996922f, 0.33380675f, + 0.32763001f, 0.32143945f, 0.31523487f, 0.30901679f, 0.30278572f, + 0.29654145f, 0.29028472f, 0.28401530f, 0.27773371f, 0.27144048f, + 0.26513538f, 0.25881892f, 0.25249159f, 0.24615324f, 0.23980433f, + 0.23344538f, 0.22707619f, 0.22069728f, 0.21430916f, 0.20791161f, + 0.20150517f, 0.19509031f, 0.18866688f, 0.18223536f, 0.17579627f, + 0.16934940f, 0.16289529f, 0.15643445f, 0.14996666f, 0.14349243f, + 0.13701232f, 0.13052608f, 0.12403426f, 0.11753736f, 0.11103519f, + 0.10452849f, 0.09801710f, 0.09150149f, 0.08498220f, 0.07845904f, + 0.07193252f, 0.06540315f, 0.05887074f, 0.05233581f, 0.04579888f, + 0.03925974f, 0.03271893f, 0.02617695f, 0.01963361f, 0.01308943f, + 0.00654493f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f}; + +// Hanning window: for 30ms with 1024 fft with symmetric zeros at 16kHz +static const float kBlocks480w1024[1024] = { + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00327249f, 0.00654494f, + 0.00981732f, 0.01308960f, 0.01636173f, 0.01963369f, 0.02290544f, + 0.02617695f, 0.02944817f, 0.03271908f, 0.03598964f, 0.03925982f, + 0.04252957f, 0.04579887f, 0.04906768f, 0.05233596f, 0.05560368f, + 0.05887080f, 0.06213730f, 0.06540313f, 0.06866825f, 0.07193266f, + 0.07519628f, 0.07845910f, 0.08172107f, 0.08498218f, 0.08824237f, + 0.09150162f, 0.09475989f, 0.09801714f, 0.10127335f, 0.10452846f, + 0.10778246f, 0.11103531f, 0.11428697f, 0.11753740f, 0.12078657f, + 0.12403446f, 0.12728101f, 0.13052620f, 0.13376999f, 0.13701233f, + 0.14025325f, 0.14349262f, 0.14673047f, 0.14996676f, 0.15320145f, + 0.15643448f, 0.15966582f, 0.16289547f, 0.16612339f, 0.16934951f, + 0.17257382f, 0.17579629f, 0.17901687f, 0.18223552f, 0.18545224f, + 0.18866697f, 0.19187967f, 0.19509032f, 0.19829889f, 0.20150533f, + 0.20470962f, 0.20791170f, 0.21111156f, 0.21430916f, 0.21750447f, + 0.22069745f, 0.22388805f, 0.22707628f, 0.23026206f, 0.23344538f, + 0.23662618f, 0.23980446f, 0.24298020f, 0.24615330f, 0.24932377f, + 0.25249159f, 0.25565669f, 0.25881904f, 0.26197866f, 0.26513544f, + 0.26828939f, 0.27144045f, 0.27458861f, 0.27773386f, 0.28087610f, + 0.28401536f, 0.28715158f, 0.29028466f, 0.29341471f, 0.29654160f, + 0.29966527f, 0.30278578f, 0.30590302f, 0.30901700f, 0.31212768f, + 0.31523499f, 0.31833893f, 0.32143945f, 0.32453656f, 0.32763019f, + 0.33072028f, 0.33380687f, 0.33688986f, 0.33996925f, 0.34304500f, + 0.34611708f, 0.34918544f, 0.35225007f, 0.35531089f, 0.35836795f, + 0.36142117f, 0.36447051f, 0.36751595f, 0.37055743f, 0.37359497f, + 0.37662852f, 0.37965801f, 0.38268346f, 0.38570479f, 0.38872197f, + 0.39173502f, 0.39474389f, 0.39774847f, 0.40074885f, 0.40374491f, + 0.40673664f, 0.40972406f, 0.41270703f, 0.41568562f, 0.41865975f, + 0.42162940f, 0.42459452f, 0.42755508f, 0.43051112f, 0.43346250f, + 0.43640924f, 0.43935132f, 0.44228873f, 0.44522133f, 0.44814920f, + 0.45107228f, 0.45399052f, 0.45690390f, 0.45981237f, 0.46271592f, + 0.46561453f, 0.46850815f, 0.47139674f, 0.47428030f, 0.47715878f, + 0.48003215f, 0.48290035f, 0.48576337f, 0.48862126f, 0.49147385f, + 0.49432120f, 0.49716330f, 0.50000000f, 0.50283140f, 0.50565743f, + 0.50847799f, 0.51129311f, 0.51410276f, 0.51690692f, 0.51970553f, + 0.52249855f, 0.52528602f, 0.52806789f, 0.53084403f, 0.53361452f, + 0.53637928f, 0.53913832f, 0.54189163f, 0.54463905f, 0.54738063f, + 0.55011642f, 0.55284631f, 0.55557024f, 0.55828828f, 0.56100029f, + 0.56370628f, 0.56640625f, 0.56910014f, 0.57178795f, 0.57446963f, + 0.57714522f, 0.57981455f, 0.58247769f, 0.58513463f, 0.58778524f, + 0.59042960f, 0.59306765f, 0.59569931f, 0.59832460f, 0.60094351f, + 0.60355598f, 0.60616195f, 0.60876143f, 0.61135441f, 0.61394083f, + 0.61652070f, 0.61909395f, 0.62166059f, 0.62422055f, 0.62677383f, + 0.62932038f, 0.63186020f, 0.63439333f, 0.63691956f, 0.63943899f, + 0.64195162f, 0.64445734f, 0.64695615f, 0.64944810f, 0.65193301f, + 0.65441096f, 0.65688187f, 0.65934587f, 0.66180271f, 0.66425246f, + 0.66669512f, 0.66913062f, 0.67155898f, 0.67398012f, 0.67639405f, + 0.67880076f, 0.68120021f, 0.68359232f, 0.68597710f, 0.68835455f, + 0.69072467f, 0.69308740f, 0.69544262f, 0.69779050f, 0.70013082f, + 0.70246369f, 0.70478904f, 0.70710677f, 0.70941699f, 0.71171963f, + 0.71401459f, 0.71630198f, 0.71858168f, 0.72085363f, 0.72311789f, + 0.72537440f, 0.72762316f, 0.72986406f, 0.73209721f, 0.73432255f, + 0.73653996f, 0.73874950f, 0.74095118f, 0.74314487f, 0.74533057f, + 0.74750835f, 0.74967808f, 0.75183982f, 0.75399351f, 0.75613910f, + 0.75827658f, 0.76040596f, 0.76252723f, 0.76464027f, 0.76674515f, + 0.76884186f, 0.77093029f, 0.77301043f, 0.77508241f, 0.77714598f, + 0.77920127f, 0.78124821f, 0.78328675f, 0.78531694f, 0.78733873f, + 0.78935206f, 0.79135692f, 0.79335338f, 0.79534125f, 0.79732066f, + 0.79929149f, 0.80125386f, 0.80320752f, 0.80515265f, 0.80708915f, + 0.80901700f, 0.81093621f, 0.81284672f, 0.81474853f, 0.81664157f, + 0.81852591f, 0.82040149f, 0.82226825f, 0.82412618f, 0.82597536f, + 0.82781565f, 0.82964706f, 0.83146966f, 0.83328325f, 0.83508795f, + 0.83688378f, 0.83867061f, 0.84044838f, 0.84221727f, 0.84397703f, + 0.84572780f, 0.84746957f, 0.84920216f, 0.85092574f, 0.85264021f, + 0.85434544f, 0.85604161f, 0.85772866f, 0.85940641f, 0.86107504f, + 0.86273444f, 0.86438453f, 0.86602545f, 0.86765707f, 0.86927933f, + 0.87089235f, 0.87249607f, 0.87409031f, 0.87567532f, 0.87725097f, + 0.87881714f, 0.88037390f, 0.88192129f, 0.88345921f, 0.88498765f, + 0.88650668f, 0.88801610f, 0.88951612f, 0.89100653f, 0.89248741f, + 0.89395881f, 0.89542055f, 0.89687276f, 0.89831537f, 0.89974827f, + 0.90117162f, 0.90258533f, 0.90398932f, 0.90538365f, 0.90676826f, + 0.90814316f, 0.90950841f, 0.91086388f, 0.91220951f, 0.91354549f, + 0.91487163f, 0.91618794f, 0.91749454f, 0.91879123f, 0.92007810f, + 0.92135513f, 0.92262226f, 0.92387950f, 0.92512691f, 0.92636442f, + 0.92759192f, 0.92880958f, 0.93001723f, 0.93121493f, 0.93240267f, + 0.93358046f, 0.93474817f, 0.93590593f, 0.93705362f, 0.93819135f, + 0.93931901f, 0.94043654f, 0.94154406f, 0.94264150f, 0.94372880f, + 0.94480604f, 0.94587320f, 0.94693011f, 0.94797695f, 0.94901365f, + 0.95004016f, 0.95105654f, 0.95206273f, 0.95305866f, 0.95404440f, + 0.95501995f, 0.95598525f, 0.95694035f, 0.95788521f, 0.95881975f, + 0.95974404f, 0.96065807f, 0.96156180f, 0.96245527f, 0.96333838f, + 0.96421117f, 0.96507370f, 0.96592581f, 0.96676767f, 0.96759909f, + 0.96842021f, 0.96923089f, 0.97003126f, 0.97082120f, 0.97160077f, + 0.97236991f, 0.97312868f, 0.97387701f, 0.97461486f, 0.97534233f, + 0.97605932f, 0.97676587f, 0.97746199f, 0.97814763f, 0.97882277f, + 0.97948742f, 0.98014158f, 0.98078531f, 0.98141843f, 0.98204112f, + 0.98265332f, 0.98325491f, 0.98384601f, 0.98442656f, 0.98499662f, + 0.98555607f, 0.98610497f, 0.98664331f, 0.98717111f, 0.98768836f, + 0.98819500f, 0.98869103f, 0.98917651f, 0.98965138f, 0.99011570f, + 0.99056935f, 0.99101239f, 0.99144489f, 0.99186671f, 0.99227792f, + 0.99267852f, 0.99306846f, 0.99344778f, 0.99381649f, 0.99417448f, + 0.99452192f, 0.99485862f, 0.99518472f, 0.99550015f, 0.99580491f, + 0.99609905f, 0.99638247f, 0.99665523f, 0.99691731f, 0.99716878f, + 0.99740952f, 0.99763954f, 0.99785894f, 0.99806762f, 0.99826562f, + 0.99845290f, 0.99862951f, 0.99879545f, 0.99895066f, 0.99909520f, + 0.99922901f, 0.99935216f, 0.99946457f, 0.99956632f, 0.99965733f, + 0.99973762f, 0.99980724f, 0.99986613f, 0.99991435f, 0.99995178f, + 0.99997860f, 0.99999464f, 1.00000000f, 0.99999464f, 0.99997860f, + 0.99995178f, 0.99991435f, 0.99986613f, 0.99980724f, 0.99973762f, + 0.99965733f, 0.99956632f, 0.99946457f, 0.99935216f, 0.99922901f, + 0.99909520f, 0.99895066f, 0.99879545f, 0.99862951f, 0.99845290f, + 0.99826562f, 0.99806762f, 0.99785894f, 0.99763954f, 0.99740946f, + 0.99716872f, 0.99691731f, 0.99665523f, 0.99638247f, 0.99609905f, + 0.99580491f, 0.99550015f, 0.99518472f, 0.99485862f, 0.99452192f, + 0.99417448f, 0.99381644f, 0.99344778f, 0.99306846f, 0.99267852f, + 0.99227792f, 0.99186671f, 0.99144489f, 0.99101239f, 0.99056935f, + 0.99011564f, 0.98965138f, 0.98917651f, 0.98869103f, 0.98819494f, + 0.98768836f, 0.98717111f, 0.98664331f, 0.98610497f, 0.98555607f, + 0.98499656f, 0.98442656f, 0.98384601f, 0.98325491f, 0.98265326f, + 0.98204112f, 0.98141843f, 0.98078525f, 0.98014158f, 0.97948742f, + 0.97882277f, 0.97814757f, 0.97746193f, 0.97676587f, 0.97605932f, + 0.97534227f, 0.97461486f, 0.97387695f, 0.97312862f, 0.97236991f, + 0.97160077f, 0.97082120f, 0.97003126f, 0.96923089f, 0.96842015f, + 0.96759909f, 0.96676761f, 0.96592581f, 0.96507365f, 0.96421117f, + 0.96333838f, 0.96245521f, 0.96156180f, 0.96065807f, 0.95974404f, + 0.95881969f, 0.95788515f, 0.95694029f, 0.95598525f, 0.95501995f, + 0.95404440f, 0.95305860f, 0.95206267f, 0.95105648f, 0.95004016f, + 0.94901365f, 0.94797695f, 0.94693011f, 0.94587314f, 0.94480604f, + 0.94372880f, 0.94264150f, 0.94154406f, 0.94043654f, 0.93931895f, + 0.93819129f, 0.93705362f, 0.93590593f, 0.93474817f, 0.93358046f, + 0.93240267f, 0.93121493f, 0.93001723f, 0.92880952f, 0.92759192f, + 0.92636436f, 0.92512691f, 0.92387950f, 0.92262226f, 0.92135507f, + 0.92007804f, 0.91879123f, 0.91749448f, 0.91618794f, 0.91487157f, + 0.91354543f, 0.91220951f, 0.91086382f, 0.90950835f, 0.90814310f, + 0.90676820f, 0.90538365f, 0.90398932f, 0.90258527f, 0.90117157f, + 0.89974827f, 0.89831525f, 0.89687276f, 0.89542055f, 0.89395875f, + 0.89248741f, 0.89100647f, 0.88951600f, 0.88801610f, 0.88650662f, + 0.88498759f, 0.88345915f, 0.88192123f, 0.88037384f, 0.87881714f, + 0.87725091f, 0.87567532f, 0.87409031f, 0.87249595f, 0.87089223f, + 0.86927933f, 0.86765701f, 0.86602539f, 0.86438447f, 0.86273432f, + 0.86107504f, 0.85940641f, 0.85772860f, 0.85604161f, 0.85434544f, + 0.85264009f, 0.85092574f, 0.84920216f, 0.84746951f, 0.84572780f, + 0.84397697f, 0.84221715f, 0.84044844f, 0.83867055f, 0.83688372f, + 0.83508795f, 0.83328319f, 0.83146954f, 0.82964706f, 0.82781565f, + 0.82597530f, 0.82412612f, 0.82226813f, 0.82040137f, 0.81852591f, + 0.81664157f, 0.81474847f, 0.81284660f, 0.81093609f, 0.80901700f, + 0.80708915f, 0.80515265f, 0.80320752f, 0.80125374f, 0.79929143f, + 0.79732066f, 0.79534125f, 0.79335332f, 0.79135686f, 0.78935200f, + 0.78733861f, 0.78531694f, 0.78328675f, 0.78124815f, 0.77920121f, + 0.77714586f, 0.77508223f, 0.77301049f, 0.77093029f, 0.76884180f, + 0.76674509f, 0.76464021f, 0.76252711f, 0.76040596f, 0.75827658f, + 0.75613904f, 0.75399339f, 0.75183970f, 0.74967796f, 0.74750835f, + 0.74533057f, 0.74314481f, 0.74095106f, 0.73874938f, 0.73653996f, + 0.73432249f, 0.73209721f, 0.72986400f, 0.72762305f, 0.72537428f, + 0.72311789f, 0.72085363f, 0.71858162f, 0.71630186f, 0.71401453f, + 0.71171951f, 0.70941705f, 0.70710677f, 0.70478898f, 0.70246363f, + 0.70013070f, 0.69779032f, 0.69544268f, 0.69308734f, 0.69072461f, + 0.68835449f, 0.68597704f, 0.68359220f, 0.68120021f, 0.67880070f, + 0.67639399f, 0.67398006f, 0.67155886f, 0.66913044f, 0.66669512f, + 0.66425240f, 0.66180259f, 0.65934575f, 0.65688181f, 0.65441096f, + 0.65193301f, 0.64944804f, 0.64695609f, 0.64445722f, 0.64195150f, + 0.63943905f, 0.63691956f, 0.63439327f, 0.63186014f, 0.62932026f, + 0.62677372f, 0.62422055f, 0.62166059f, 0.61909389f, 0.61652064f, + 0.61394072f, 0.61135429f, 0.60876143f, 0.60616189f, 0.60355592f, + 0.60094339f, 0.59832448f, 0.59569913f, 0.59306765f, 0.59042960f, + 0.58778518f, 0.58513451f, 0.58247757f, 0.57981461f, 0.57714522f, + 0.57446963f, 0.57178789f, 0.56910002f, 0.56640613f, 0.56370628f, + 0.56100023f, 0.55828822f, 0.55557019f, 0.55284619f, 0.55011630f, + 0.54738069f, 0.54463905f, 0.54189152f, 0.53913826f, 0.53637916f, + 0.53361434f, 0.53084403f, 0.52806783f, 0.52528596f, 0.52249849f, + 0.51970541f, 0.51690674f, 0.51410276f, 0.51129305f, 0.50847787f, + 0.50565726f, 0.50283122f, 0.50000006f, 0.49716327f, 0.49432117f, + 0.49147379f, 0.48862115f, 0.48576325f, 0.48290038f, 0.48003212f, + 0.47715873f, 0.47428021f, 0.47139663f, 0.46850798f, 0.46561456f, + 0.46271589f, 0.45981231f, 0.45690379f, 0.45399037f, 0.45107210f, + 0.44814920f, 0.44522130f, 0.44228864f, 0.43935123f, 0.43640912f, + 0.43346232f, 0.43051112f, 0.42755505f, 0.42459446f, 0.42162928f, + 0.41865960f, 0.41568545f, 0.41270703f, 0.40972400f, 0.40673658f, + 0.40374479f, 0.40074870f, 0.39774850f, 0.39474386f, 0.39173496f, + 0.38872188f, 0.38570464f, 0.38268328f, 0.37965804f, 0.37662849f, + 0.37359491f, 0.37055734f, 0.36751580f, 0.36447033f, 0.36142117f, + 0.35836792f, 0.35531086f, 0.35224995f, 0.34918529f, 0.34611690f, + 0.34304500f, 0.33996922f, 0.33688980f, 0.33380675f, 0.33072016f, + 0.32763001f, 0.32453656f, 0.32143945f, 0.31833887f, 0.31523487f, + 0.31212750f, 0.30901679f, 0.30590302f, 0.30278572f, 0.29966521f, + 0.29654145f, 0.29341453f, 0.29028472f, 0.28715155f, 0.28401530f, + 0.28087601f, 0.27773371f, 0.27458847f, 0.27144048f, 0.26828936f, + 0.26513538f, 0.26197854f, 0.25881892f, 0.25565651f, 0.25249159f, + 0.24932374f, 0.24615324f, 0.24298008f, 0.23980433f, 0.23662600f, + 0.23344538f, 0.23026201f, 0.22707619f, 0.22388794f, 0.22069728f, + 0.21750426f, 0.21430916f, 0.21111152f, 0.20791161f, 0.20470949f, + 0.20150517f, 0.19829892f, 0.19509031f, 0.19187963f, 0.18866688f, + 0.18545210f, 0.18223536f, 0.17901689f, 0.17579627f, 0.17257376f, + 0.16934940f, 0.16612324f, 0.16289529f, 0.15966584f, 0.15643445f, + 0.15320137f, 0.14996666f, 0.14673033f, 0.14349243f, 0.14025325f, + 0.13701232f, 0.13376991f, 0.13052608f, 0.12728085f, 0.12403426f, + 0.12078657f, 0.11753736f, 0.11428688f, 0.11103519f, 0.10778230f, + 0.10452849f, 0.10127334f, 0.09801710f, 0.09475980f, 0.09150149f, + 0.08824220f, 0.08498220f, 0.08172106f, 0.07845904f, 0.07519618f, + 0.07193252f, 0.06866808f, 0.06540315f, 0.06213728f, 0.05887074f, + 0.05560357f, 0.05233581f, 0.04906749f, 0.04579888f, 0.04252954f, + 0.03925974f, 0.03598953f, 0.03271893f, 0.02944798f, 0.02617695f, + 0.02290541f, 0.01963361f, 0.01636161f, 0.01308943f, 0.00981712f, + 0.00654493f, 0.00327244f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f, + 0.00000000f, 0.00000000f, 0.00000000f, 0.00000000f}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_WINDOWS_PRIVATE_H_ diff --git a/modules/audio_processing/transient/wpd_node.cc b/modules/audio_processing/transient/wpd_node.cc new file mode 100644 index 0000000..2e0ee7e --- /dev/null +++ b/modules/audio_processing/transient/wpd_node.cc @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/wpd_node.h" + +#include +#include + +#include "common_audio/fir_filter.h" +#include "common_audio/fir_filter_factory.h" +#include "modules/audio_processing/transient/dyadic_decimator.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +WPDNode::WPDNode(size_t length, + const float* coefficients, + size_t coefficients_length) + : // The data buffer has parent data length to be able to contain and + // filter it. + data_(new float[2 * length + 1]), + length_(length), + filter_( + CreateFirFilter(coefficients, coefficients_length, 2 * length + 1)) { + RTC_DCHECK_GT(length, 0); + RTC_DCHECK(coefficients); + RTC_DCHECK_GT(coefficients_length, 0); + memset(data_.get(), 0.f, (2 * length + 1) * sizeof(data_[0])); +} + +WPDNode::~WPDNode() {} + +int WPDNode::Update(const float* parent_data, size_t parent_data_length) { + if (!parent_data || (parent_data_length / 2) != length_) { + return -1; + } + + // Filter data. + filter_->Filter(parent_data, parent_data_length, data_.get()); + + // Decimate data. + const bool kOddSequence = true; + size_t output_samples = DyadicDecimate(data_.get(), parent_data_length, + kOddSequence, data_.get(), length_); + if (output_samples != length_) { + return -1; + } + + // Get abs to all values. + for (size_t i = 0; i < length_; ++i) { + data_[i] = fabs(data_[i]); + } + + return 0; +} + +int WPDNode::set_data(const float* new_data, size_t length) { + if (!new_data || length != length_) { + return -1; + } + memcpy(data_.get(), new_data, length * sizeof(data_[0])); + return 0; +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/wpd_node.h b/modules/audio_processing/transient/wpd_node.h new file mode 100644 index 0000000..6a52fb7 --- /dev/null +++ b/modules/audio_processing/transient/wpd_node.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_NODE_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_NODE_H_ + +#include + +namespace webrtc { + +class FIRFilter; + +// A single node of a Wavelet Packet Decomposition (WPD) tree. +class WPDNode { + public: + // Creates a WPDNode. The data vector will contain zeros. The filter will have + // the coefficients provided. + WPDNode(size_t length, const float* coefficients, size_t coefficients_length); + ~WPDNode(); + + // Updates the node data. |parent_data| / 2 must be equals to |length_|. + // Returns 0 if correct, and -1 otherwise. + int Update(const float* parent_data, size_t parent_data_length); + + const float* data() const { return data_.get(); } + // Returns 0 if correct, and -1 otherwise. + int set_data(const float* new_data, size_t length); + size_t length() const { return length_; } + + private: + std::unique_ptr data_; + size_t length_; + std::unique_ptr filter_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_NODE_H_ diff --git a/modules/audio_processing/transient/wpd_node_unittest.cc b/modules/audio_processing/transient/wpd_node_unittest.cc new file mode 100644 index 0000000..5f92382 --- /dev/null +++ b/modules/audio_processing/transient/wpd_node_unittest.cc @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/wpd_node.h" + +#include + +#include "test/gtest.h" + +namespace webrtc { + +static const size_t kDataLength = 5; +static const float kTolerance = 0.0001f; + +static const size_t kParentDataLength = kDataLength * 2; +static const float kParentData[kParentDataLength] = {1.f, 2.f, 3.f, 4.f, 5.f, + 6.f, 7.f, 8.f, 9.f, 10.f}; + +static const float kCoefficients[] = {0.2f, -0.3f, 0.5f, -0.7f, 0.11f}; +static const size_t kCoefficientsLength = + sizeof(kCoefficients) / sizeof(kCoefficients[0]); + +TEST(WPDNodeTest, Accessors) { + WPDNode node(kDataLength, kCoefficients, kCoefficientsLength); + EXPECT_EQ(0, node.set_data(kParentData, kDataLength)); + EXPECT_EQ(0, memcmp(node.data(), kParentData, + kDataLength * sizeof(node.data()[0]))); +} + +TEST(WPDNodeTest, UpdateThatOnlyDecimates) { + const float kIndentyCoefficient = 1.f; + WPDNode node(kDataLength, &kIndentyCoefficient, 1); + EXPECT_EQ(0, node.Update(kParentData, kParentDataLength)); + for (size_t i = 0; i < kDataLength; ++i) { + EXPECT_FLOAT_EQ(kParentData[i * 2 + 1], node.data()[i]); + } +} + +TEST(WPDNodeTest, UpdateWithArbitraryDataAndArbitraryFilter) { + WPDNode node(kDataLength, kCoefficients, kCoefficientsLength); + EXPECT_EQ(0, node.Update(kParentData, kParentDataLength)); + EXPECT_NEAR(0.1f, node.data()[0], kTolerance); + EXPECT_NEAR(0.2f, node.data()[1], kTolerance); + EXPECT_NEAR(0.18f, node.data()[2], kTolerance); + EXPECT_NEAR(0.56f, node.data()[3], kTolerance); + EXPECT_NEAR(0.94f, node.data()[4], kTolerance); +} + +TEST(WPDNodeTest, ExpectedErrorReturnValue) { + WPDNode node(kDataLength, kCoefficients, kCoefficientsLength); + EXPECT_EQ(-1, node.Update(kParentData, kParentDataLength - 1)); + EXPECT_EQ(-1, node.Update(NULL, kParentDataLength)); + EXPECT_EQ(-1, node.set_data(kParentData, kDataLength - 1)); + EXPECT_EQ(-1, node.set_data(NULL, kDataLength)); +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/wpd_tree.cc b/modules/audio_processing/transient/wpd_tree.cc new file mode 100644 index 0000000..c8aa615 --- /dev/null +++ b/modules/audio_processing/transient/wpd_tree.cc @@ -0,0 +1,118 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/wpd_tree.h" + +#include + +#include "modules/audio_processing/transient/wpd_node.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +WPDTree::WPDTree(size_t data_length, + const float* high_pass_coefficients, + const float* low_pass_coefficients, + size_t coefficients_length, + int levels) + : data_length_(data_length), + levels_(levels), + num_nodes_((1 << (levels + 1)) - 1) { + RTC_DCHECK_GT(data_length, (static_cast(1) << levels)); + RTC_DCHECK(high_pass_coefficients); + RTC_DCHECK(low_pass_coefficients); + RTC_DCHECK_GT(levels, 0); + // Size is 1 more, so we can use the array as 1-based. nodes_[0] is never + // allocated. + nodes_.reset(new std::unique_ptr[num_nodes_ + 1]); + + // Create the first node + const float kRootCoefficient = 1.f; // Identity Coefficient. + nodes_[1].reset(new WPDNode(data_length, &kRootCoefficient, 1)); + // Variables used to create the rest of the nodes. + size_t index = 1; + size_t index_left_child = 0; + size_t index_right_child = 0; + + int num_nodes_at_curr_level = 0; + + // Branching each node in each level to create its children. The last level is + // not branched (all the nodes of that level are leaves). + for (int current_level = 0; current_level < levels; ++current_level) { + num_nodes_at_curr_level = 1 << current_level; + for (int i = 0; i < num_nodes_at_curr_level; ++i) { + index = (1 << current_level) + i; + // Obtain the index of the current node children. + index_left_child = index * 2; + index_right_child = index_left_child + 1; + nodes_[index_left_child].reset(new WPDNode(nodes_[index]->length() / 2, + low_pass_coefficients, + coefficients_length)); + nodes_[index_right_child].reset(new WPDNode(nodes_[index]->length() / 2, + high_pass_coefficients, + coefficients_length)); + } + } +} + +WPDTree::~WPDTree() {} + +WPDNode* WPDTree::NodeAt(int level, int index) { + if (level < 0 || level > levels_ || index < 0 || index >= 1 << level) { + return NULL; + } + + return nodes_[(1 << level) + index].get(); +} + +int WPDTree::Update(const float* data, size_t data_length) { + if (!data || data_length != data_length_) { + return -1; + } + + // Update the root node. + int update_result = nodes_[1]->set_data(data, data_length); + if (update_result != 0) { + return -1; + } + + // Variables used to update the rest of the nodes. + size_t index = 1; + size_t index_left_child = 0; + size_t index_right_child = 0; + + int num_nodes_at_curr_level = 0; + + for (int current_level = 0; current_level < levels_; ++current_level) { + num_nodes_at_curr_level = 1 << current_level; + for (int i = 0; i < num_nodes_at_curr_level; ++i) { + index = (1 << current_level) + i; + // Obtain the index of the current node children. + index_left_child = index * 2; + index_right_child = index_left_child + 1; + + update_result = nodes_[index_left_child]->Update(nodes_[index]->data(), + nodes_[index]->length()); + if (update_result != 0) { + return -1; + } + + update_result = nodes_[index_right_child]->Update( + nodes_[index]->data(), nodes_[index]->length()); + if (update_result != 0) { + return -1; + } + } + } + + return 0; +} + +} // namespace webrtc diff --git a/modules/audio_processing/transient/wpd_tree.h b/modules/audio_processing/transient/wpd_tree.h new file mode 100644 index 0000000..c54220f --- /dev/null +++ b/modules/audio_processing/transient/wpd_tree.h @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_TREE_H_ +#define MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_TREE_H_ + +#include + +#include + +#include "modules/audio_processing/transient/wpd_node.h" + +namespace webrtc { + +// Tree of a Wavelet Packet Decomposition (WPD). +// +// The root node contains all the data provided; for each node in the tree, the +// left child contains the approximation coefficients extracted from the node, +// and the right child contains the detail coefficients. +// It preserves its state, so it can be multiple-called. +// +// The number of nodes in the tree will be 2 ^ levels - 1. +// +// Implementation details: Since the tree always will be a complete binary tree, +// it is implemented using a single linear array instead of managing the +// relationships in each node. For convience is better to use a array that +// starts in 1 (instead of 0). Taking that into account, the following formulas +// apply: +// Root node index: 1. +// Node(Level, Index in that level): 2 ^ Level + (Index in that level). +// Left Child: Current node index * 2. +// Right Child: Current node index * 2 + 1. +// Parent: Current Node Index / 2 (Integer division). +class WPDTree { + public: + // Creates a WPD tree using the data length and coefficients provided. + WPDTree(size_t data_length, + const float* high_pass_coefficients, + const float* low_pass_coefficients, + size_t coefficients_length, + int levels); + ~WPDTree(); + + // Returns the number of nodes at any given level. + static int NumberOfNodesAtLevel(int level) { return 1 << level; } + + // Returns a pointer to the node at the given level and index(of that level). + // Level goes from 0 to levels(). + // Index goes from 0 to the number of NumberOfNodesAtLevel(level) - 1. + // + // You can use the following formulas to get any node within the tree: + // Notation: (Level, Index of node in that level). + // Root node: (0/0). + // Left Child: (Current node level + 1, Current node index * 2). + // Right Child: (Current node level + 1, Current node index * 2 + 1). + // Parent: (Current node level - 1, Current node index / 2) (Integer division) + // + // If level or index are out of bounds the function will return NULL. + WPDNode* NodeAt(int level, int index); + + // Updates all the nodes of the tree with the new data. |data_length| must be + // teh same that was used for the creation of the tree. + // Returns 0 if correct, and -1 otherwise. + int Update(const float* data, size_t data_length); + + // Returns the total number of levels below the root. Root is cosidered level + // 0. + int levels() const { return levels_; } + + // Returns the total number of nodes. + int num_nodes() const { return num_nodes_; } + + // Returns the total number of leaves. + int num_leaves() const { return 1 << levels_; } + + private: + size_t data_length_; + int levels_; + int num_nodes_; + std::unique_ptr[]> nodes_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_TRANSIENT_WPD_TREE_H_ diff --git a/modules/audio_processing/transient/wpd_tree_unittest.cc b/modules/audio_processing/transient/wpd_tree_unittest.cc new file mode 100644 index 0000000..97d69ae --- /dev/null +++ b/modules/audio_processing/transient/wpd_tree_unittest.cc @@ -0,0 +1,177 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/transient/wpd_tree.h" + +#include +#include + +#include "modules/audio_processing/transient/daubechies_8_wavelet_coeffs.h" +#include "modules/audio_processing/transient/file_utils.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/system/file_wrapper.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { + +TEST(WPDTreeTest, Construction) { + const size_t kTestBufferSize = 100; + const int kLevels = 5; + const int kExpectedNumberOfNodes = (1 << (kLevels + 1)) - 1; + + float test_buffer[kTestBufferSize]; + memset(test_buffer, 0.f, kTestBufferSize * sizeof(*test_buffer)); + float test_coefficients[] = {1.f, 2.f, 3.f, 4.f, 5.f}; + const size_t kTestCoefficientsLength = + sizeof(test_coefficients) / sizeof(test_coefficients[0]); + WPDTree tree(kTestBufferSize, test_coefficients, test_coefficients, + kTestCoefficientsLength, kLevels); + ASSERT_EQ(kExpectedNumberOfNodes, tree.num_nodes()); + // Checks for NodeAt(level, index). + int nodes_at_level = 0; + for (int level = 0; level <= kLevels; ++level) { + nodes_at_level = 1 << level; + for (int i = 0; i < nodes_at_level; ++i) { + ASSERT_TRUE(NULL != tree.NodeAt(level, i)); + } + // Out of bounds. + EXPECT_EQ(NULL, tree.NodeAt(level, -1)); + EXPECT_EQ(NULL, tree.NodeAt(level, -12)); + EXPECT_EQ(NULL, tree.NodeAt(level, nodes_at_level)); + EXPECT_EQ(NULL, tree.NodeAt(level, nodes_at_level + 5)); + } + // Out of bounds. + EXPECT_EQ(NULL, tree.NodeAt(-1, 0)); + EXPECT_EQ(NULL, tree.NodeAt(-12, 0)); + EXPECT_EQ(NULL, tree.NodeAt(kLevels + 1, 0)); + EXPECT_EQ(NULL, tree.NodeAt(kLevels + 5, 0)); + // Checks for Update(). + EXPECT_EQ(0, tree.Update(test_buffer, kTestBufferSize)); + EXPECT_EQ(-1, tree.Update(NULL, kTestBufferSize)); + EXPECT_EQ(-1, tree.Update(test_buffer, kTestBufferSize - 1)); +} + +// This test is for the correctness of the tree. +// Checks the results from the Matlab equivalent, it is done comparing the +// results that are stored in the output files from Matlab. +// It also writes the results in its own set of files in the out directory. +// Matlab and output files contain all the results in double precision (Little +// endian) appended. +#if defined(WEBRTC_IOS) +TEST(WPDTreeTest, DISABLED_CorrectnessBasedOnMatlabFiles) { +#else +TEST(WPDTreeTest, CorrectnessBasedOnMatlabFiles) { +#endif + // 10 ms at 16000 Hz. + const size_t kTestBufferSize = 160; + const int kLevels = 3; + const int kLeaves = 1 << kLevels; + const size_t kLeavesSamples = kTestBufferSize >> kLevels; + // Create tree with Discrete Meyer Wavelet Coefficients. + WPDTree tree(kTestBufferSize, kDaubechies8HighPassCoefficients, + kDaubechies8LowPassCoefficients, kDaubechies8CoefficientsLength, + kLevels); + // Allocate and open all matlab and out files. + FileWrapper matlab_files_data[kLeaves]; + FileWrapper out_files_data[kLeaves]; + + for (int i = 0; i < kLeaves; ++i) { + // Matlab files. + rtc::StringBuilder matlab_stream; + matlab_stream << "audio_processing/transient/wpd" << i; + std::string matlab_string = test::ResourcePath(matlab_stream.str(), "dat"); + matlab_files_data[i] = FileWrapper::OpenReadOnly(matlab_string.c_str()); + + bool file_opened = matlab_files_data[i].is_open(); + ASSERT_TRUE(file_opened) << "File could not be opened.\n" << matlab_string; + + // Out files. + rtc::StringBuilder out_stream; + out_stream << test::OutputPath() << "wpd_" << i << ".out"; + std::string out_string = out_stream.str(); + + out_files_data[i] = FileWrapper::OpenWriteOnly(out_string.c_str()); + + file_opened = out_files_data[i].is_open(); + ASSERT_TRUE(file_opened) << "File could not be opened.\n" << out_string; + } + + // Prepare the test file. + std::string test_file_name = test::ResourcePath( + "audio_processing/transient/ajm-macbook-1-spke16m", "pcm"); + + FileWrapper test_file = FileWrapper::OpenReadOnly(test_file_name.c_str()); + + bool file_opened = test_file.is_open(); + ASSERT_TRUE(file_opened) << "File could not be opened.\n" << test_file_name; + + float test_buffer[kTestBufferSize]; + + // Only the first frames of the audio file are tested. The matlab files also + // only contains information about the first frames. + const size_t kMaxFramesToTest = 100; + const float kTolerance = 0.03f; + + size_t frames_read = 0; + + // Read first buffer from the PCM test file. + size_t file_samples_read = + ReadInt16FromFileToFloatBuffer(&test_file, kTestBufferSize, test_buffer); + while (file_samples_read > 0 && frames_read < kMaxFramesToTest) { + ++frames_read; + + if (file_samples_read < kTestBufferSize) { + // Pad the rest of the buffer with zeros. + for (size_t i = file_samples_read; i < kTestBufferSize; ++i) { + test_buffer[i] = 0.0; + } + } + tree.Update(test_buffer, kTestBufferSize); + double matlab_buffer[kTestBufferSize]; + + // Compare results with data from the matlab test files. + for (int i = 0; i < kLeaves; ++i) { + // Compare data values + size_t matlab_samples_read = ReadDoubleBufferFromFile( + &matlab_files_data[i], kLeavesSamples, matlab_buffer); + + ASSERT_EQ(kLeavesSamples, matlab_samples_read) + << "Matlab test files are malformed.\n" + "File: 3_" + << i; + // Get output data from the corresponding node + const float* node_data = tree.NodeAt(kLevels, i)->data(); + // Compare with matlab files. + for (size_t j = 0; j < kLeavesSamples; ++j) { + EXPECT_NEAR(matlab_buffer[j], node_data[j], kTolerance) + << "\nLeaf: " << i << "\nSample: " << j + << "\nFrame: " << frames_read - 1; + } + + // Write results to out files. + WriteFloatBufferToFile(&out_files_data[i], kLeavesSamples, node_data); + } + + // Read next buffer from the PCM test file. + file_samples_read = ReadInt16FromFileToFloatBuffer( + &test_file, kTestBufferSize, test_buffer); + } + + // Close all matlab and out files. + for (int i = 0; i < kLeaves; ++i) { + matlab_files_data[i].Close(); + out_files_data[i].Close(); + } + + test_file.Close(); +} + +} // namespace webrtc diff --git a/modules/audio_processing/typing_detection.cc b/modules/audio_processing/typing_detection.cc new file mode 100644 index 0000000..e725b26 --- /dev/null +++ b/modules/audio_processing/typing_detection.cc @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/typing_detection.h" + +namespace webrtc { + +TypingDetection::TypingDetection() + : time_active_(0), + time_since_last_typing_(0), + penalty_counter_(0), + counter_since_last_detection_update_(0), + detection_to_report_(false), + new_detection_to_report_(false), + time_window_(10), + cost_per_typing_(100), + reporting_threshold_(300), + penalty_decay_(1), + type_event_delay_(2), + report_detection_update_period_(1) {} + +TypingDetection::~TypingDetection() {} + +bool TypingDetection::Process(bool key_pressed, bool vad_activity) { + if (vad_activity) + time_active_++; + else + time_active_ = 0; + + // Keep track if time since last typing event + if (key_pressed) + time_since_last_typing_ = 0; + else + ++time_since_last_typing_; + + if (time_since_last_typing_ < type_event_delay_ && vad_activity && + time_active_ < time_window_) { + penalty_counter_ += cost_per_typing_; + if (penalty_counter_ > reporting_threshold_) + new_detection_to_report_ = true; + } + + if (penalty_counter_ > 0) + penalty_counter_ -= penalty_decay_; + + if (++counter_since_last_detection_update_ == + report_detection_update_period_) { + detection_to_report_ = new_detection_to_report_; + new_detection_to_report_ = false; + counter_since_last_detection_update_ = 0; + } + + return detection_to_report_; +} + +int TypingDetection::TimeSinceLastDetectionInSeconds() { + // Round to whole seconds. + return (time_since_last_typing_ + 50) / 100; +} + +void TypingDetection::SetParameters(int time_window, + int cost_per_typing, + int reporting_threshold, + int penalty_decay, + int type_event_delay, + int report_detection_update_period) { + if (time_window) + time_window_ = time_window; + + if (cost_per_typing) + cost_per_typing_ = cost_per_typing; + + if (reporting_threshold) + reporting_threshold_ = reporting_threshold; + + if (penalty_decay) + penalty_decay_ = penalty_decay; + + if (type_event_delay) + type_event_delay_ = type_event_delay; + + if (report_detection_update_period) + report_detection_update_period_ = report_detection_update_period; +} + +} // namespace webrtc diff --git a/modules/audio_processing/typing_detection.h b/modules/audio_processing/typing_detection.h new file mode 100644 index 0000000..d8fb359 --- /dev/null +++ b/modules/audio_processing/typing_detection.h @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_ +#define MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_ + +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class RTC_EXPORT TypingDetection { + public: + TypingDetection(); + virtual ~TypingDetection(); + + // Run the detection algortihm. Shall be called every 10 ms. Returns true if + // typing is detected, or false if not, based on the update period as set with + // SetParameters(). See |report_detection_update_period_| description below. + bool Process(bool key_pressed, bool vad_activity); + + // Gets the time in seconds since the last detection. + int TimeSinceLastDetectionInSeconds(); + + // Sets the algorithm parameters. A parameter value of 0 leaves it unchanged. + // See the correspondning member variables below for descriptions. + void SetParameters(int time_window, + int cost_per_typing, + int reporting_threshold, + int penalty_decay, + int type_event_delay, + int report_detection_update_period); + + private: + int time_active_; + int time_since_last_typing_; + int penalty_counter_; + + // Counter since last time the detection status reported by Process() was + // updated. See also |report_detection_update_period_|. + int counter_since_last_detection_update_; + + // The detection status to report. Updated every + // |report_detection_update_period_| call to Process(). + bool detection_to_report_; + + // What |detection_to_report_| should be set to next time it is updated. + bool new_detection_to_report_; + + // Settable threshold values. + + // Number of 10 ms slots accepted to count as a hit. + int time_window_; + + // Penalty added for a typing + activity coincide. + int cost_per_typing_; + + // Threshold for |penalty_counter_|. + int reporting_threshold_; + + // How much we reduce |penalty_counter_| every 10 ms. + int penalty_decay_; + + // How old typing events we allow. + int type_event_delay_; + + // Settable update period. + + // Number of 10 ms slots between each update of the detection status returned + // by Process(). This inertia added to the algorithm is usually desirable and + // provided so that consumers of the class don't have to implement that + // themselves if they don't wish. + // If set to 1, each call to Process() will return the detection status for + // that 10 ms slot. + // If set to N (where N > 1), the detection status returned from Process() + // will remain the same until Process() has been called N times. Then, if none + // of the last N calls to Process() has detected typing for each respective + // 10 ms slot, Process() will return false. If at least one of the last N + // calls has detected typing, Process() will return true. And that returned + // status will then remain the same until the next N calls have been done. + int report_detection_update_period_; +}; + +} // namespace webrtc + +#endif // #ifndef MODULES_AUDIO_PROCESSING_TYPING_DETECTION_H_ diff --git a/modules/audio_processing/utility/BUILD.gn b/modules/audio_processing/utility/BUILD.gn new file mode 100644 index 0000000..437b544 --- /dev/null +++ b/modules/audio_processing/utility/BUILD.gn @@ -0,0 +1,81 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_library("cascaded_biquad_filter") { + sources = [ + "cascaded_biquad_filter.cc", + "cascaded_biquad_filter.h", + ] + deps = [ + "../../../api:array_view", + "../../../rtc_base:checks", + ] +} + +rtc_library("legacy_delay_estimator") { + sources = [ + "delay_estimator.cc", + "delay_estimator.h", + "delay_estimator_internal.h", + "delay_estimator_wrapper.cc", + "delay_estimator_wrapper.h", + ] + deps = [ "../../../rtc_base:checks" ] +} + +rtc_library("pffft_wrapper") { + visibility = [ "../*" ] + sources = [ + "pffft_wrapper.cc", + "pffft_wrapper.h", + ] + deps = [ + "../../../api:array_view", + "../../../rtc_base:checks", + "//third_party/pffft", + ] +} + +if (rtc_include_tests) { + rtc_library("cascaded_biquad_filter_unittest") { + testonly = true + + sources = [ "cascaded_biquad_filter_unittest.cc" ] + deps = [ + ":cascaded_biquad_filter", + "../../../rtc_base:rtc_base_approved", + "../../../test:test_support", + "//testing/gtest", + ] + } + + rtc_library("legacy_delay_estimator_unittest") { + testonly = true + + sources = [ "delay_estimator_unittest.cc" ] + deps = [ + ":legacy_delay_estimator", + "../../../rtc_base:rtc_base_approved", + "../../../test:test_support", + "//testing/gtest", + ] + } + + rtc_library("pffft_wrapper_unittest") { + testonly = true + sources = [ "pffft_wrapper_unittest.cc" ] + deps = [ + ":pffft_wrapper", + "../../../test:test_support", + "//testing/gtest", + "//third_party/pffft", + ] + } +} diff --git a/modules/audio_processing/utility/DEPS b/modules/audio_processing/utility/DEPS new file mode 100644 index 0000000..c72d810 --- /dev/null +++ b/modules/audio_processing/utility/DEPS @@ -0,0 +1,3 @@ +include_rules = [ + "+third_party/pffft", +] diff --git a/modules/audio_processing/utility/cascaded_biquad_filter.cc b/modules/audio_processing/utility/cascaded_biquad_filter.cc new file mode 100644 index 0000000..08b9464 --- /dev/null +++ b/modules/audio_processing/utility/cascaded_biquad_filter.cc @@ -0,0 +1,117 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/audio_processing/utility/cascaded_biquad_filter.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +CascadedBiQuadFilter::BiQuadParam::BiQuadParam(std::complex zero, + std::complex pole, + float gain, + bool mirror_zero_along_i_axis) + : zero(zero), + pole(pole), + gain(gain), + mirror_zero_along_i_axis(mirror_zero_along_i_axis) {} + +CascadedBiQuadFilter::BiQuadParam::BiQuadParam(const BiQuadParam&) = default; + +CascadedBiQuadFilter::BiQuad::BiQuad( + const CascadedBiQuadFilter::BiQuadParam& param) + : x(), y() { + float z_r = std::real(param.zero); + float z_i = std::imag(param.zero); + float p_r = std::real(param.pole); + float p_i = std::imag(param.pole); + float gain = param.gain; + + if (param.mirror_zero_along_i_axis) { + // Assuming zeroes at z_r and -z_r. + RTC_DCHECK(z_i == 0.f); + coefficients.b[0] = gain * 1.f; + coefficients.b[1] = 0.f; + coefficients.b[2] = gain * -(z_r * z_r); + } else { + // Assuming zeros at (z_r + z_i*i) and (z_r - z_i*i). + coefficients.b[0] = gain * 1.f; + coefficients.b[1] = gain * -2.f * z_r; + coefficients.b[2] = gain * (z_r * z_r + z_i * z_i); + } + + // Assuming poles at (p_r + p_i*i) and (p_r - p_i*i). + coefficients.a[0] = -2.f * p_r; + coefficients.a[1] = p_r * p_r + p_i * p_i; +} + +void CascadedBiQuadFilter::BiQuad::BiQuad::Reset() { + x[0] = x[1] = y[0] = y[1] = 0.f; +} + +CascadedBiQuadFilter::CascadedBiQuadFilter( + const CascadedBiQuadFilter::BiQuadCoefficients& coefficients, + size_t num_biquads) + : biquads_(num_biquads, BiQuad(coefficients)) {} + +CascadedBiQuadFilter::CascadedBiQuadFilter( + const std::vector& biquad_params) { + for (const auto& param : biquad_params) { + biquads_.push_back(BiQuad(param)); + } +} + +CascadedBiQuadFilter::~CascadedBiQuadFilter() = default; + +void CascadedBiQuadFilter::Process(rtc::ArrayView x, + rtc::ArrayView y) { + if (biquads_.size() > 0) { + ApplyBiQuad(x, y, &biquads_[0]); + for (size_t k = 1; k < biquads_.size(); ++k) { + ApplyBiQuad(y, y, &biquads_[k]); + } + } else { + std::copy(x.begin(), x.end(), y.begin()); + } +} + +void CascadedBiQuadFilter::Process(rtc::ArrayView y) { + for (auto& biquad : biquads_) { + ApplyBiQuad(y, y, &biquad); + } +} + +void CascadedBiQuadFilter::Reset() { + for (auto& biquad : biquads_) { + biquad.Reset(); + } +} + +void CascadedBiQuadFilter::ApplyBiQuad(rtc::ArrayView x, + rtc::ArrayView y, + CascadedBiQuadFilter::BiQuad* biquad) { + RTC_DCHECK_EQ(x.size(), y.size()); + const auto* c_b = biquad->coefficients.b; + const auto* c_a = biquad->coefficients.a; + auto* m_x = biquad->x; + auto* m_y = biquad->y; + for (size_t k = 0; k < x.size(); ++k) { + const float tmp = x[k]; + y[k] = c_b[0] * tmp + c_b[1] * m_x[0] + c_b[2] * m_x[1] - c_a[0] * m_y[0] - + c_a[1] * m_y[1]; + m_x[1] = m_x[0]; + m_x[0] = tmp; + m_y[1] = m_y[0]; + m_y[0] = y[k]; + } +} + +} // namespace webrtc diff --git a/modules/audio_processing/utility/cascaded_biquad_filter.h b/modules/audio_processing/utility/cascaded_biquad_filter.h new file mode 100644 index 0000000..120b52a --- /dev/null +++ b/modules/audio_processing/utility/cascaded_biquad_filter.h @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_UTILITY_CASCADED_BIQUAD_FILTER_H_ +#define MODULES_AUDIO_PROCESSING_UTILITY_CASCADED_BIQUAD_FILTER_H_ + +#include + +#include +#include + +#include "api/array_view.h" + +namespace webrtc { + +// Applies a number of biquads in a cascaded manner. The filter implementation +// is direct form 1. +class CascadedBiQuadFilter { + public: + struct BiQuadParam { + BiQuadParam(std::complex zero, + std::complex pole, + float gain, + bool mirror_zero_along_i_axis = false); + explicit BiQuadParam(const BiQuadParam&); + std::complex zero; + std::complex pole; + float gain; + bool mirror_zero_along_i_axis; + }; + + struct BiQuadCoefficients { + float b[3]; + float a[2]; + }; + + struct BiQuad { + explicit BiQuad(const BiQuadCoefficients& coefficients) + : coefficients(coefficients), x(), y() {} + explicit BiQuad(const CascadedBiQuadFilter::BiQuadParam& param); + void Reset(); + BiQuadCoefficients coefficients; + float x[2]; + float y[2]; + }; + + CascadedBiQuadFilter( + const CascadedBiQuadFilter::BiQuadCoefficients& coefficients, + size_t num_biquads); + explicit CascadedBiQuadFilter( + const std::vector& biquad_params); + ~CascadedBiQuadFilter(); + CascadedBiQuadFilter(const CascadedBiQuadFilter&) = delete; + CascadedBiQuadFilter& operator=(const CascadedBiQuadFilter&) = delete; + + // Applies the biquads on the values in x in order to form the output in y. + void Process(rtc::ArrayView x, rtc::ArrayView y); + // Applies the biquads on the values in y in an in-place manner. + void Process(rtc::ArrayView y); + // Resets the filter to its initial state. + void Reset(); + + private: + void ApplyBiQuad(rtc::ArrayView x, + rtc::ArrayView y, + CascadedBiQuadFilter::BiQuad* biquad); + + std::vector biquads_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_UTILITY_CASCADED_BIQUAD_FILTER_H_ diff --git a/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc b/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc new file mode 100644 index 0000000..ff7022d --- /dev/null +++ b/modules/audio_processing/utility/cascaded_biquad_filter_unittest.cc @@ -0,0 +1,157 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/utility/cascaded_biquad_filter.h" + +#include + +#include "test/gtest.h" + +namespace webrtc { + +namespace { + +// Coefficients for a second order Butterworth high-pass filter with cutoff +// frequency 100 Hz. +const CascadedBiQuadFilter::BiQuadCoefficients kHighPassFilterCoefficients = { + {0.97261f, -1.94523f, 0.97261f}, + {-1.94448f, 0.94598f}}; + +const CascadedBiQuadFilter::BiQuadCoefficients kTransparentCoefficients = { + {1.f, 0.f, 0.f}, + {0.f, 0.f}}; + +const CascadedBiQuadFilter::BiQuadCoefficients kBlockingCoefficients = { + {0.f, 0.f, 0.f}, + {0.f, 0.f}}; + +std::vector CreateInputWithIncreasingValues(size_t vector_length) { + std::vector v(vector_length); + for (size_t k = 0; k < v.size(); ++k) { + v[k] = k; + } + return v; +} + +} // namespace + +// Verifies that the filter applies an effect which removes the input signal. +// The test also verifies that the in-place Process API call works as intended. +TEST(CascadedBiquadFilter, BlockingConfiguration) { + std::vector values = CreateInputWithIncreasingValues(1000); + + CascadedBiQuadFilter filter(kBlockingCoefficients, 1); + filter.Process(values); + + EXPECT_EQ(std::vector(1000, 0.f), values); +} + +// Verifies that the filter is able to form a zero-mean output from a +// non-zeromean input signal when coefficients for a high-pass filter are +// applied. The test also verifies that the filter works with multiple biquads. +TEST(CascadedBiquadFilter, HighPassConfiguration) { + std::vector values(1000); + for (size_t k = 0; k < values.size(); ++k) { + values[k] = 1.f; + } + + CascadedBiQuadFilter filter(kHighPassFilterCoefficients, 2); + filter.Process(values); + + for (size_t k = values.size() / 2; k < values.size(); ++k) { + EXPECT_NEAR(0.f, values[k], 1e-4); + } +} + +// Verifies that the reset functionality works as intended. +TEST(CascadedBiquadFilter, HighPassConfigurationResetFunctionality) { + CascadedBiQuadFilter filter(kHighPassFilterCoefficients, 2); + + std::vector values1(100, 1.f); + filter.Process(values1); + + filter.Reset(); + + std::vector values2(100, 1.f); + filter.Process(values2); + + for (size_t k = 0; k < values1.size(); ++k) { + EXPECT_EQ(values1[k], values2[k]); + } +} + +// Verifies that the filter is able to produce a transparent effect with no +// impact on the data when the proper coefficients are applied. The test also +// verifies that the non-in-place Process API call works as intended. +TEST(CascadedBiquadFilter, TransparentConfiguration) { + const std::vector input = CreateInputWithIncreasingValues(1000); + std::vector output(input.size()); + + CascadedBiQuadFilter filter(kTransparentCoefficients, 1); + filter.Process(input, output); + + EXPECT_EQ(input, output); +} + +#if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) +// Verifies that the check of the lengths for the input and output works for the +// non-in-place call. +TEST(CascadedBiquadFilterDeathTest, InputSizeCheckVerification) { + const std::vector input = CreateInputWithIncreasingValues(10); + std::vector output(input.size() - 1); + + CascadedBiQuadFilter filter(kTransparentCoefficients, 1); + EXPECT_DEATH(filter.Process(input, output), ""); +} +#endif + +// Verifies the conversion from zero, pole, gain to filter coefficients for +// lowpass filter. +TEST(CascadedBiquadFilter, BiQuadParamLowPass) { + CascadedBiQuadFilter::BiQuadParam param( + {-1.0f, 0.0f}, {0.23146901f, 0.39514232f}, 0.1866943331163784f); + CascadedBiQuadFilter::BiQuad filter(param); + const float epsilon = 1e-6f; + EXPECT_NEAR(filter.coefficients.b[0], 0.18669433f, epsilon); + EXPECT_NEAR(filter.coefficients.b[1], 0.37338867f, epsilon); + EXPECT_NEAR(filter.coefficients.b[2], 0.18669433f, epsilon); + EXPECT_NEAR(filter.coefficients.a[0], -0.46293803f, epsilon); + EXPECT_NEAR(filter.coefficients.a[1], 0.20971536f, epsilon); +} + +// Verifies the conversion from zero, pole, gain to filter coefficients for +// highpass filter. +TEST(CascadedBiquadFilter, BiQuadParamHighPass) { + CascadedBiQuadFilter::BiQuadParam param( + {1.0f, 0.0f}, {0.72712179f, 0.21296904f}, 0.75707637533388494f); + CascadedBiQuadFilter::BiQuad filter(param); + const float epsilon = 1e-6f; + EXPECT_NEAR(filter.coefficients.b[0], 0.75707638f, epsilon); + EXPECT_NEAR(filter.coefficients.b[1], -1.51415275f, epsilon); + EXPECT_NEAR(filter.coefficients.b[2], 0.75707638f, epsilon); + EXPECT_NEAR(filter.coefficients.a[0], -1.45424359f, epsilon); + EXPECT_NEAR(filter.coefficients.a[1], 0.57406192f, epsilon); +} + +// Verifies the conversion from zero, pole, gain to filter coefficients for +// bandpass filter. +TEST(CascadedBiquadFilter, BiQuadParamBandPass) { + CascadedBiQuadFilter::BiQuadParam param( + {1.0f, 0.0f}, {1.11022302e-16f, 0.71381051f}, 0.2452372752527856f, true); + CascadedBiQuadFilter::BiQuad filter(param); + const float epsilon = 1e-6f; + EXPECT_NEAR(filter.coefficients.b[0], 0.24523728f, epsilon); + EXPECT_NEAR(filter.coefficients.b[1], 0.f, epsilon); + EXPECT_NEAR(filter.coefficients.b[2], -0.24523728f, epsilon); + EXPECT_NEAR(filter.coefficients.a[0], -2.22044605e-16f, epsilon); + EXPECT_NEAR(filter.coefficients.a[1], 5.09525449e-01f, epsilon); +} + +} // namespace webrtc diff --git a/modules/audio_processing/utility/delay_estimator.cc b/modules/audio_processing/utility/delay_estimator.cc new file mode 100644 index 0000000..73c70b0 --- /dev/null +++ b/modules/audio_processing/utility/delay_estimator.cc @@ -0,0 +1,708 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/utility/delay_estimator.h" + +#include +#include + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +namespace { + +// Number of right shifts for scaling is linearly depending on number of bits in +// the far-end binary spectrum. +static const int kShiftsAtZero = 13; // Right shifts at zero binary spectrum. +static const int kShiftsLinearSlope = 3; + +static const int32_t kProbabilityOffset = 1024; // 2 in Q9. +static const int32_t kProbabilityLowerLimit = 8704; // 17 in Q9. +static const int32_t kProbabilityMinSpread = 2816; // 5.5 in Q9. + +// Robust validation settings +static const float kHistogramMax = 3000.f; +static const float kLastHistogramMax = 250.f; +static const float kMinHistogramThreshold = 1.5f; +static const int kMinRequiredHits = 10; +static const int kMaxHitsWhenPossiblyNonCausal = 10; +static const int kMaxHitsWhenPossiblyCausal = 1000; +static const float kQ14Scaling = 1.f / (1 << 14); // Scaling by 2^14 to get Q0. +static const float kFractionSlope = 0.05f; +static const float kMinFractionWhenPossiblyCausal = 0.5f; +static const float kMinFractionWhenPossiblyNonCausal = 0.25f; + +} // namespace + +// Counts and returns number of bits of a 32-bit word. +static int BitCount(uint32_t u32) { + uint32_t tmp = + u32 - ((u32 >> 1) & 033333333333) - ((u32 >> 2) & 011111111111); + tmp = ((tmp + (tmp >> 3)) & 030707070707); + tmp = (tmp + (tmp >> 6)); + tmp = (tmp + (tmp >> 12) + (tmp >> 24)) & 077; + + return ((int)tmp); +} + +// Compares the |binary_vector| with all rows of the |binary_matrix| and counts +// per row the number of times they have the same value. +// +// Inputs: +// - binary_vector : binary "vector" stored in a long +// - binary_matrix : binary "matrix" stored as a vector of long +// - matrix_size : size of binary "matrix" +// +// Output: +// - bit_counts : "Vector" stored as a long, containing for each +// row the number of times the matrix row and the +// input vector have the same value +// +static void BitCountComparison(uint32_t binary_vector, + const uint32_t* binary_matrix, + int matrix_size, + int32_t* bit_counts) { + int n = 0; + + // Compare |binary_vector| with all rows of the |binary_matrix| + for (; n < matrix_size; n++) { + bit_counts[n] = (int32_t)BitCount(binary_vector ^ binary_matrix[n]); + } +} + +// Collects necessary statistics for the HistogramBasedValidation(). This +// function has to be called prior to calling HistogramBasedValidation(). The +// statistics updated and used by the HistogramBasedValidation() are: +// 1. the number of |candidate_hits|, which states for how long we have had the +// same |candidate_delay| +// 2. the |histogram| of candidate delays over time. This histogram is +// weighted with respect to a reliability measure and time-varying to cope +// with possible delay shifts. +// For further description see commented code. +// +// Inputs: +// - candidate_delay : The delay to validate. +// - valley_depth_q14 : The cost function has a valley/minimum at the +// |candidate_delay| location. |valley_depth_q14| is the +// cost function difference between the minimum and +// maximum locations. The value is in the Q14 domain. +// - valley_level_q14 : Is the cost function value at the minimum, in Q14. +static void UpdateRobustValidationStatistics(BinaryDelayEstimator* self, + int candidate_delay, + int32_t valley_depth_q14, + int32_t valley_level_q14) { + const float valley_depth = valley_depth_q14 * kQ14Scaling; + float decrease_in_last_set = valley_depth; + const int max_hits_for_slow_change = (candidate_delay < self->last_delay) + ? kMaxHitsWhenPossiblyNonCausal + : kMaxHitsWhenPossiblyCausal; + int i = 0; + + RTC_DCHECK_EQ(self->history_size, self->farend->history_size); + // Reset |candidate_hits| if we have a new candidate. + if (candidate_delay != self->last_candidate_delay) { + self->candidate_hits = 0; + self->last_candidate_delay = candidate_delay; + } + self->candidate_hits++; + + // The |histogram| is updated differently across the bins. + // 1. The |candidate_delay| histogram bin is increased with the + // |valley_depth|, which is a simple measure of how reliable the + // |candidate_delay| is. The histogram is not increased above + // |kHistogramMax|. + self->histogram[candidate_delay] += valley_depth; + if (self->histogram[candidate_delay] > kHistogramMax) { + self->histogram[candidate_delay] = kHistogramMax; + } + // 2. The histogram bins in the neighborhood of |candidate_delay| are + // unaffected. The neighborhood is defined as x + {-2, -1, 0, 1}. + // 3. The histogram bins in the neighborhood of |last_delay| are decreased + // with |decrease_in_last_set|. This value equals the difference between + // the cost function values at the locations |candidate_delay| and + // |last_delay| until we reach |max_hits_for_slow_change| consecutive hits + // at the |candidate_delay|. If we exceed this amount of hits the + // |candidate_delay| is a "potential" candidate and we start decreasing + // these histogram bins more rapidly with |valley_depth|. + if (self->candidate_hits < max_hits_for_slow_change) { + decrease_in_last_set = + (self->mean_bit_counts[self->compare_delay] - valley_level_q14) * + kQ14Scaling; + } + // 4. All other bins are decreased with |valley_depth|. + // TODO(bjornv): Investigate how to make this loop more efficient. Split up + // the loop? Remove parts that doesn't add too much. + for (i = 0; i < self->history_size; ++i) { + int is_in_last_set = (i >= self->last_delay - 2) && + (i <= self->last_delay + 1) && (i != candidate_delay); + int is_in_candidate_set = + (i >= candidate_delay - 2) && (i <= candidate_delay + 1); + self->histogram[i] -= + decrease_in_last_set * is_in_last_set + + valley_depth * (!is_in_last_set && !is_in_candidate_set); + // 5. No histogram bin can go below 0. + if (self->histogram[i] < 0) { + self->histogram[i] = 0; + } + } +} + +// Validates the |candidate_delay|, estimated in WebRtc_ProcessBinarySpectrum(), +// based on a mix of counting concurring hits with a modified histogram +// of recent delay estimates. In brief a candidate is valid (returns 1) if it +// is the most likely according to the histogram. There are a couple of +// exceptions that are worth mentioning: +// 1. If the |candidate_delay| < |last_delay| it can be that we are in a +// non-causal state, breaking a possible echo control algorithm. Hence, we +// open up for a quicker change by allowing the change even if the +// |candidate_delay| is not the most likely one according to the histogram. +// 2. There's a minimum number of hits (kMinRequiredHits) and the histogram +// value has to reached a minimum (kMinHistogramThreshold) to be valid. +// 3. The action is also depending on the filter length used for echo control. +// If the delay difference is larger than what the filter can capture, we +// also move quicker towards a change. +// For further description see commented code. +// +// Input: +// - candidate_delay : The delay to validate. +// +// Return value: +// - is_histogram_valid : 1 - The |candidate_delay| is valid. +// 0 - Otherwise. +static int HistogramBasedValidation(const BinaryDelayEstimator* self, + int candidate_delay) { + float fraction = 1.f; + float histogram_threshold = self->histogram[self->compare_delay]; + const int delay_difference = candidate_delay - self->last_delay; + int is_histogram_valid = 0; + + // The histogram based validation of |candidate_delay| is done by comparing + // the |histogram| at bin |candidate_delay| with a |histogram_threshold|. + // This |histogram_threshold| equals a |fraction| of the |histogram| at bin + // |last_delay|. The |fraction| is a piecewise linear function of the + // |delay_difference| between the |candidate_delay| and the |last_delay| + // allowing for a quicker move if + // i) a potential echo control filter can not handle these large differences. + // ii) keeping |last_delay| instead of updating to |candidate_delay| could + // force an echo control into a non-causal state. + // We further require the histogram to have reached a minimum value of + // |kMinHistogramThreshold|. In addition, we also require the number of + // |candidate_hits| to be more than |kMinRequiredHits| to remove spurious + // values. + + // Calculate a comparison histogram value (|histogram_threshold|) that is + // depending on the distance between the |candidate_delay| and |last_delay|. + // TODO(bjornv): How much can we gain by turning the fraction calculation + // into tables? + if (delay_difference > self->allowed_offset) { + fraction = 1.f - kFractionSlope * (delay_difference - self->allowed_offset); + fraction = (fraction > kMinFractionWhenPossiblyCausal + ? fraction + : kMinFractionWhenPossiblyCausal); + } else if (delay_difference < 0) { + fraction = + kMinFractionWhenPossiblyNonCausal - kFractionSlope * delay_difference; + fraction = (fraction > 1.f ? 1.f : fraction); + } + histogram_threshold *= fraction; + histogram_threshold = + (histogram_threshold > kMinHistogramThreshold ? histogram_threshold + : kMinHistogramThreshold); + + is_histogram_valid = + (self->histogram[candidate_delay] >= histogram_threshold) && + (self->candidate_hits > kMinRequiredHits); + + return is_histogram_valid; +} + +// Performs a robust validation of the |candidate_delay| estimated in +// WebRtc_ProcessBinarySpectrum(). The algorithm takes the +// |is_instantaneous_valid| and the |is_histogram_valid| and combines them +// into a robust validation. The HistogramBasedValidation() has to be called +// prior to this call. +// For further description on how the combination is done, see commented code. +// +// Inputs: +// - candidate_delay : The delay to validate. +// - is_instantaneous_valid : The instantaneous validation performed in +// WebRtc_ProcessBinarySpectrum(). +// - is_histogram_valid : The histogram based validation. +// +// Return value: +// - is_robust : 1 - The candidate_delay is valid according to a +// combination of the two inputs. +// : 0 - Otherwise. +static int RobustValidation(const BinaryDelayEstimator* self, + int candidate_delay, + int is_instantaneous_valid, + int is_histogram_valid) { + int is_robust = 0; + + // The final robust validation is based on the two algorithms; 1) the + // |is_instantaneous_valid| and 2) the histogram based with result stored in + // |is_histogram_valid|. + // i) Before we actually have a valid estimate (|last_delay| == -2), we say + // a candidate is valid if either algorithm states so + // (|is_instantaneous_valid| OR |is_histogram_valid|). + is_robust = + (self->last_delay < 0) && (is_instantaneous_valid || is_histogram_valid); + // ii) Otherwise, we need both algorithms to be certain + // (|is_instantaneous_valid| AND |is_histogram_valid|) + is_robust |= is_instantaneous_valid && is_histogram_valid; + // iii) With one exception, i.e., the histogram based algorithm can overrule + // the instantaneous one if |is_histogram_valid| = 1 and the histogram + // is significantly strong. + is_robust |= is_histogram_valid && + (self->histogram[candidate_delay] > self->last_delay_histogram); + + return is_robust; +} + +void WebRtc_FreeBinaryDelayEstimatorFarend(BinaryDelayEstimatorFarend* self) { + if (self == NULL) { + return; + } + + free(self->binary_far_history); + self->binary_far_history = NULL; + + free(self->far_bit_counts); + self->far_bit_counts = NULL; + + free(self); +} + +BinaryDelayEstimatorFarend* WebRtc_CreateBinaryDelayEstimatorFarend( + int history_size) { + BinaryDelayEstimatorFarend* self = NULL; + + if (history_size > 1) { + // Sanity conditions fulfilled. + self = static_cast( + malloc(sizeof(BinaryDelayEstimatorFarend))); + } + if (self == NULL) { + return NULL; + } + + self->history_size = 0; + self->binary_far_history = NULL; + self->far_bit_counts = NULL; + if (WebRtc_AllocateFarendBufferMemory(self, history_size) == 0) { + WebRtc_FreeBinaryDelayEstimatorFarend(self); + self = NULL; + } + return self; +} + +int WebRtc_AllocateFarendBufferMemory(BinaryDelayEstimatorFarend* self, + int history_size) { + RTC_DCHECK(self); + // (Re-)Allocate memory for history buffers. + self->binary_far_history = static_cast( + realloc(self->binary_far_history, + history_size * sizeof(*self->binary_far_history))); + self->far_bit_counts = static_cast(realloc( + self->far_bit_counts, history_size * sizeof(*self->far_bit_counts))); + if ((self->binary_far_history == NULL) || (self->far_bit_counts == NULL)) { + history_size = 0; + } + // Fill with zeros if we have expanded the buffers. + if (history_size > self->history_size) { + int size_diff = history_size - self->history_size; + memset(&self->binary_far_history[self->history_size], 0, + sizeof(*self->binary_far_history) * size_diff); + memset(&self->far_bit_counts[self->history_size], 0, + sizeof(*self->far_bit_counts) * size_diff); + } + self->history_size = history_size; + + return self->history_size; +} + +void WebRtc_InitBinaryDelayEstimatorFarend(BinaryDelayEstimatorFarend* self) { + RTC_DCHECK(self); + memset(self->binary_far_history, 0, sizeof(uint32_t) * self->history_size); + memset(self->far_bit_counts, 0, sizeof(int) * self->history_size); +} + +void WebRtc_SoftResetBinaryDelayEstimatorFarend( + BinaryDelayEstimatorFarend* self, + int delay_shift) { + int abs_shift = abs(delay_shift); + int shift_size = 0; + int dest_index = 0; + int src_index = 0; + int padding_index = 0; + + RTC_DCHECK(self); + shift_size = self->history_size - abs_shift; + RTC_DCHECK_GT(shift_size, 0); + if (delay_shift == 0) { + return; + } else if (delay_shift > 0) { + dest_index = abs_shift; + } else if (delay_shift < 0) { + src_index = abs_shift; + padding_index = shift_size; + } + + // Shift and zero pad buffers. + memmove(&self->binary_far_history[dest_index], + &self->binary_far_history[src_index], + sizeof(*self->binary_far_history) * shift_size); + memset(&self->binary_far_history[padding_index], 0, + sizeof(*self->binary_far_history) * abs_shift); + memmove(&self->far_bit_counts[dest_index], &self->far_bit_counts[src_index], + sizeof(*self->far_bit_counts) * shift_size); + memset(&self->far_bit_counts[padding_index], 0, + sizeof(*self->far_bit_counts) * abs_shift); +} + +void WebRtc_AddBinaryFarSpectrum(BinaryDelayEstimatorFarend* handle, + uint32_t binary_far_spectrum) { + RTC_DCHECK(handle); + // Shift binary spectrum history and insert current |binary_far_spectrum|. + memmove(&(handle->binary_far_history[1]), &(handle->binary_far_history[0]), + (handle->history_size - 1) * sizeof(uint32_t)); + handle->binary_far_history[0] = binary_far_spectrum; + + // Shift history of far-end binary spectrum bit counts and insert bit count + // of current |binary_far_spectrum|. + memmove(&(handle->far_bit_counts[1]), &(handle->far_bit_counts[0]), + (handle->history_size - 1) * sizeof(int)); + handle->far_bit_counts[0] = BitCount(binary_far_spectrum); +} + +void WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* self) { + if (self == NULL) { + return; + } + + free(self->mean_bit_counts); + self->mean_bit_counts = NULL; + + free(self->bit_counts); + self->bit_counts = NULL; + + free(self->binary_near_history); + self->binary_near_history = NULL; + + free(self->histogram); + self->histogram = NULL; + + // BinaryDelayEstimator does not have ownership of |farend|, hence we do not + // free the memory here. That should be handled separately by the user. + self->farend = NULL; + + free(self); +} + +BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator( + BinaryDelayEstimatorFarend* farend, + int max_lookahead) { + BinaryDelayEstimator* self = NULL; + + if ((farend != NULL) && (max_lookahead >= 0)) { + // Sanity conditions fulfilled. + self = static_cast( + malloc(sizeof(BinaryDelayEstimator))); + } + if (self == NULL) { + return NULL; + } + + self->farend = farend; + self->near_history_size = max_lookahead + 1; + self->history_size = 0; + self->robust_validation_enabled = 0; // Disabled by default. + self->allowed_offset = 0; + + self->lookahead = max_lookahead; + + // Allocate memory for spectrum and history buffers. + self->mean_bit_counts = NULL; + self->bit_counts = NULL; + self->histogram = NULL; + self->binary_near_history = static_cast( + malloc((max_lookahead + 1) * sizeof(*self->binary_near_history))); + if (self->binary_near_history == NULL || + WebRtc_AllocateHistoryBufferMemory(self, farend->history_size) == 0) { + WebRtc_FreeBinaryDelayEstimator(self); + self = NULL; + } + + return self; +} + +int WebRtc_AllocateHistoryBufferMemory(BinaryDelayEstimator* self, + int history_size) { + BinaryDelayEstimatorFarend* far = self->farend; + // (Re-)Allocate memory for spectrum and history buffers. + if (history_size != far->history_size) { + // Only update far-end buffers if we need. + history_size = WebRtc_AllocateFarendBufferMemory(far, history_size); + } + // The extra array element in |mean_bit_counts| and |histogram| is a dummy + // element only used while |last_delay| == -2, i.e., before we have a valid + // estimate. + self->mean_bit_counts = static_cast( + realloc(self->mean_bit_counts, + (history_size + 1) * sizeof(*self->mean_bit_counts))); + self->bit_counts = static_cast( + realloc(self->bit_counts, history_size * sizeof(*self->bit_counts))); + self->histogram = static_cast( + realloc(self->histogram, (history_size + 1) * sizeof(*self->histogram))); + + if ((self->mean_bit_counts == NULL) || (self->bit_counts == NULL) || + (self->histogram == NULL)) { + history_size = 0; + } + // Fill with zeros if we have expanded the buffers. + if (history_size > self->history_size) { + int size_diff = history_size - self->history_size; + memset(&self->mean_bit_counts[self->history_size], 0, + sizeof(*self->mean_bit_counts) * size_diff); + memset(&self->bit_counts[self->history_size], 0, + sizeof(*self->bit_counts) * size_diff); + memset(&self->histogram[self->history_size], 0, + sizeof(*self->histogram) * size_diff); + } + self->history_size = history_size; + + return self->history_size; +} + +void WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* self) { + int i = 0; + RTC_DCHECK(self); + + memset(self->bit_counts, 0, sizeof(int32_t) * self->history_size); + memset(self->binary_near_history, 0, + sizeof(uint32_t) * self->near_history_size); + for (i = 0; i <= self->history_size; ++i) { + self->mean_bit_counts[i] = (20 << 9); // 20 in Q9. + self->histogram[i] = 0.f; + } + self->minimum_probability = kMaxBitCountsQ9; // 32 in Q9. + self->last_delay_probability = (int)kMaxBitCountsQ9; // 32 in Q9. + + // Default return value if we're unable to estimate. -1 is used for errors. + self->last_delay = -2; + + self->last_candidate_delay = -2; + self->compare_delay = self->history_size; + self->candidate_hits = 0; + self->last_delay_histogram = 0.f; +} + +int WebRtc_SoftResetBinaryDelayEstimator(BinaryDelayEstimator* self, + int delay_shift) { + int lookahead = 0; + RTC_DCHECK(self); + lookahead = self->lookahead; + self->lookahead -= delay_shift; + if (self->lookahead < 0) { + self->lookahead = 0; + } + if (self->lookahead > self->near_history_size - 1) { + self->lookahead = self->near_history_size - 1; + } + return lookahead - self->lookahead; +} + +int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self, + uint32_t binary_near_spectrum) { + int i = 0; + int candidate_delay = -1; + int valid_candidate = 0; + + int32_t value_best_candidate = kMaxBitCountsQ9; + int32_t value_worst_candidate = 0; + int32_t valley_depth = 0; + + RTC_DCHECK(self); + if (self->farend->history_size != self->history_size) { + // Non matching history sizes. + return -1; + } + if (self->near_history_size > 1) { + // If we apply lookahead, shift near-end binary spectrum history. Insert + // current |binary_near_spectrum| and pull out the delayed one. + memmove(&(self->binary_near_history[1]), &(self->binary_near_history[0]), + (self->near_history_size - 1) * sizeof(uint32_t)); + self->binary_near_history[0] = binary_near_spectrum; + binary_near_spectrum = self->binary_near_history[self->lookahead]; + } + + // Compare with delayed spectra and store the |bit_counts| for each delay. + BitCountComparison(binary_near_spectrum, self->farend->binary_far_history, + self->history_size, self->bit_counts); + + // Update |mean_bit_counts|, which is the smoothed version of |bit_counts|. + for (i = 0; i < self->history_size; i++) { + // |bit_counts| is constrained to [0, 32], meaning we can smooth with a + // factor up to 2^26. We use Q9. + int32_t bit_count = (self->bit_counts[i] << 9); // Q9. + + // Update |mean_bit_counts| only when far-end signal has something to + // contribute. If |far_bit_counts| is zero the far-end signal is weak and + // we likely have a poor echo condition, hence don't update. + if (self->farend->far_bit_counts[i] > 0) { + // Make number of right shifts piecewise linear w.r.t. |far_bit_counts|. + int shifts = kShiftsAtZero; + shifts -= (kShiftsLinearSlope * self->farend->far_bit_counts[i]) >> 4; + WebRtc_MeanEstimatorFix(bit_count, shifts, &(self->mean_bit_counts[i])); + } + } + + // Find |candidate_delay|, |value_best_candidate| and |value_worst_candidate| + // of |mean_bit_counts|. + for (i = 0; i < self->history_size; i++) { + if (self->mean_bit_counts[i] < value_best_candidate) { + value_best_candidate = self->mean_bit_counts[i]; + candidate_delay = i; + } + if (self->mean_bit_counts[i] > value_worst_candidate) { + value_worst_candidate = self->mean_bit_counts[i]; + } + } + valley_depth = value_worst_candidate - value_best_candidate; + + // The |value_best_candidate| is a good indicator on the probability of + // |candidate_delay| being an accurate delay (a small |value_best_candidate| + // means a good binary match). In the following sections we make a decision + // whether to update |last_delay| or not. + // 1) If the difference bit counts between the best and the worst delay + // candidates is too small we consider the situation to be unreliable and + // don't update |last_delay|. + // 2) If the situation is reliable we update |last_delay| if the value of the + // best candidate delay has a value less than + // i) an adaptive threshold |minimum_probability|, or + // ii) this corresponding value |last_delay_probability|, but updated at + // this time instant. + + // Update |minimum_probability|. + if ((self->minimum_probability > kProbabilityLowerLimit) && + (valley_depth > kProbabilityMinSpread)) { + // The "hard" threshold can't be lower than 17 (in Q9). + // The valley in the curve also has to be distinct, i.e., the + // difference between |value_worst_candidate| and |value_best_candidate| has + // to be large enough. + int32_t threshold = value_best_candidate + kProbabilityOffset; + if (threshold < kProbabilityLowerLimit) { + threshold = kProbabilityLowerLimit; + } + if (self->minimum_probability > threshold) { + self->minimum_probability = threshold; + } + } + // Update |last_delay_probability|. + // We use a Markov type model, i.e., a slowly increasing level over time. + self->last_delay_probability++; + // Validate |candidate_delay|. We have a reliable instantaneous delay + // estimate if + // 1) The valley is distinct enough (|valley_depth| > |kProbabilityOffset|) + // and + // 2) The depth of the valley is deep enough + // (|value_best_candidate| < |minimum_probability|) + // and deeper than the best estimate so far + // (|value_best_candidate| < |last_delay_probability|) + valid_candidate = ((valley_depth > kProbabilityOffset) && + ((value_best_candidate < self->minimum_probability) || + (value_best_candidate < self->last_delay_probability))); + + // Check for nonstationary farend signal. + const bool non_stationary_farend = + std::any_of(self->farend->far_bit_counts, + self->farend->far_bit_counts + self->history_size, + [](int a) { return a > 0; }); + + if (non_stationary_farend) { + // Only update the validation statistics when the farend is nonstationary + // as the underlying estimates are otherwise frozen. + UpdateRobustValidationStatistics(self, candidate_delay, valley_depth, + value_best_candidate); + } + + if (self->robust_validation_enabled) { + int is_histogram_valid = HistogramBasedValidation(self, candidate_delay); + valid_candidate = RobustValidation(self, candidate_delay, valid_candidate, + is_histogram_valid); + } + + // Only update the delay estimate when the farend is nonstationary and when + // a valid delay candidate is available. + if (non_stationary_farend && valid_candidate) { + if (candidate_delay != self->last_delay) { + self->last_delay_histogram = + (self->histogram[candidate_delay] > kLastHistogramMax + ? kLastHistogramMax + : self->histogram[candidate_delay]); + // Adjust the histogram if we made a change to |last_delay|, though it was + // not the most likely one according to the histogram. + if (self->histogram[candidate_delay] < + self->histogram[self->compare_delay]) { + self->histogram[self->compare_delay] = self->histogram[candidate_delay]; + } + } + self->last_delay = candidate_delay; + if (value_best_candidate < self->last_delay_probability) { + self->last_delay_probability = value_best_candidate; + } + self->compare_delay = self->last_delay; + } + + return self->last_delay; +} + +int WebRtc_binary_last_delay(BinaryDelayEstimator* self) { + RTC_DCHECK(self); + return self->last_delay; +} + +float WebRtc_binary_last_delay_quality(BinaryDelayEstimator* self) { + float quality = 0; + RTC_DCHECK(self); + + if (self->robust_validation_enabled) { + // Simply a linear function of the histogram height at delay estimate. + quality = self->histogram[self->compare_delay] / kHistogramMax; + } else { + // Note that |last_delay_probability| states how deep the minimum of the + // cost function is, so it is rather an error probability. + quality = (float)(kMaxBitCountsQ9 - self->last_delay_probability) / + kMaxBitCountsQ9; + if (quality < 0) { + quality = 0; + } + } + return quality; +} + +void WebRtc_MeanEstimatorFix(int32_t new_value, + int factor, + int32_t* mean_value) { + int32_t diff = new_value - *mean_value; + + // mean_new = mean_value + ((new_value - mean_value) >> factor); + if (diff < 0) { + diff = -((-diff) >> factor); + } else { + diff = (diff >> factor); + } + *mean_value += diff; +} + +} // namespace webrtc diff --git a/modules/audio_processing/utility/delay_estimator.h b/modules/audio_processing/utility/delay_estimator.h new file mode 100644 index 0000000..df281bc --- /dev/null +++ b/modules/audio_processing/utility/delay_estimator.h @@ -0,0 +1,257 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Performs delay estimation on binary converted spectra. +// The return value is 0 - OK and -1 - Error, unless otherwise stated. + +#ifndef MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_H_ +#define MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_H_ + +#include + +namespace webrtc { + +static const int32_t kMaxBitCountsQ9 = (32 << 9); // 32 matching bits in Q9. + +typedef struct { + // Pointer to bit counts. + int* far_bit_counts; + // Binary history variables. + uint32_t* binary_far_history; + int history_size; +} BinaryDelayEstimatorFarend; + +typedef struct { + // Pointer to bit counts. + int32_t* mean_bit_counts; + // Array only used locally in ProcessBinarySpectrum() but whose size is + // determined at run-time. + int32_t* bit_counts; + + // Binary history variables. + uint32_t* binary_near_history; + int near_history_size; + int history_size; + + // Delay estimation variables. + int32_t minimum_probability; + int last_delay_probability; + + // Delay memory. + int last_delay; + + // Robust validation + int robust_validation_enabled; + int allowed_offset; + int last_candidate_delay; + int compare_delay; + int candidate_hits; + float* histogram; + float last_delay_histogram; + + // For dynamically changing the lookahead when using SoftReset...(). + int lookahead; + + // Far-end binary spectrum history buffer etc. + BinaryDelayEstimatorFarend* farend; +} BinaryDelayEstimator; + +// Releases the memory allocated by +// WebRtc_CreateBinaryDelayEstimatorFarend(...). +// Input: +// - self : Pointer to the binary delay estimation far-end +// instance which is the return value of +// WebRtc_CreateBinaryDelayEstimatorFarend(). +// +void WebRtc_FreeBinaryDelayEstimatorFarend(BinaryDelayEstimatorFarend* self); + +// Allocates the memory needed by the far-end part of the binary delay +// estimation. The memory needs to be initialized separately through +// WebRtc_InitBinaryDelayEstimatorFarend(...). +// +// Inputs: +// - history_size : Size of the far-end binary spectrum history. +// +// Return value: +// - BinaryDelayEstimatorFarend* +// : Created |handle|. If the memory can't be allocated +// or if any of the input parameters are invalid NULL +// is returned. +// +BinaryDelayEstimatorFarend* WebRtc_CreateBinaryDelayEstimatorFarend( + int history_size); + +// Re-allocates the buffers. +// +// Inputs: +// - self : Pointer to the binary estimation far-end instance +// which is the return value of +// WebRtc_CreateBinaryDelayEstimatorFarend(). +// - history_size : Size of the far-end binary spectrum history. +// +// Return value: +// - history_size : The history size allocated. +int WebRtc_AllocateFarendBufferMemory(BinaryDelayEstimatorFarend* self, + int history_size); + +// Initializes the delay estimation far-end instance created with +// WebRtc_CreateBinaryDelayEstimatorFarend(...). +// +// Input: +// - self : Pointer to the delay estimation far-end instance. +// +// Output: +// - self : Initialized far-end instance. +// +void WebRtc_InitBinaryDelayEstimatorFarend(BinaryDelayEstimatorFarend* self); + +// Soft resets the delay estimation far-end instance created with +// WebRtc_CreateBinaryDelayEstimatorFarend(...). +// +// Input: +// - delay_shift : The amount of blocks to shift history buffers. +// +void WebRtc_SoftResetBinaryDelayEstimatorFarend( + BinaryDelayEstimatorFarend* self, + int delay_shift); + +// Adds the binary far-end spectrum to the internal far-end history buffer. This +// spectrum is used as reference when calculating the delay using +// WebRtc_ProcessBinarySpectrum(). +// +// Inputs: +// - self : Pointer to the delay estimation far-end +// instance. +// - binary_far_spectrum : Far-end binary spectrum. +// +// Output: +// - self : Updated far-end instance. +// +void WebRtc_AddBinaryFarSpectrum(BinaryDelayEstimatorFarend* self, + uint32_t binary_far_spectrum); + +// Releases the memory allocated by WebRtc_CreateBinaryDelayEstimator(...). +// +// Note that BinaryDelayEstimator utilizes BinaryDelayEstimatorFarend, but does +// not take ownership of it, hence the BinaryDelayEstimator has to be torn down +// before the far-end. +// +// Input: +// - self : Pointer to the binary delay estimation instance +// which is the return value of +// WebRtc_CreateBinaryDelayEstimator(). +// +void WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* self); + +// Allocates the memory needed by the binary delay estimation. The memory needs +// to be initialized separately through WebRtc_InitBinaryDelayEstimator(...). +// +// See WebRtc_CreateDelayEstimator(..) in delay_estimator_wrapper.c for detailed +// description. +BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator( + BinaryDelayEstimatorFarend* farend, + int max_lookahead); + +// Re-allocates |history_size| dependent buffers. The far-end buffers will be +// updated at the same time if needed. +// +// Input: +// - self : Pointer to the binary estimation instance which is +// the return value of +// WebRtc_CreateBinaryDelayEstimator(). +// - history_size : Size of the history buffers. +// +// Return value: +// - history_size : The history size allocated. +int WebRtc_AllocateHistoryBufferMemory(BinaryDelayEstimator* self, + int history_size); + +// Initializes the delay estimation instance created with +// WebRtc_CreateBinaryDelayEstimator(...). +// +// Input: +// - self : Pointer to the delay estimation instance. +// +// Output: +// - self : Initialized instance. +// +void WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* self); + +// Soft resets the delay estimation instance created with +// WebRtc_CreateBinaryDelayEstimator(...). +// +// Input: +// - delay_shift : The amount of blocks to shift history buffers. +// +// Return value: +// - actual_shifts : The actual number of shifts performed. +// +int WebRtc_SoftResetBinaryDelayEstimator(BinaryDelayEstimator* self, + int delay_shift); + +// Estimates and returns the delay between the binary far-end and binary near- +// end spectra. It is assumed the binary far-end spectrum has been added using +// WebRtc_AddBinaryFarSpectrum() prior to this call. The value will be offset by +// the lookahead (i.e. the lookahead should be subtracted from the returned +// value). +// +// Inputs: +// - self : Pointer to the delay estimation instance. +// - binary_near_spectrum : Near-end binary spectrum of the current block. +// +// Output: +// - self : Updated instance. +// +// Return value: +// - delay : >= 0 - Calculated delay value. +// -2 - Insufficient data for estimation. +// +int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* self, + uint32_t binary_near_spectrum); + +// Returns the last calculated delay updated by the function +// WebRtc_ProcessBinarySpectrum(...). +// +// Input: +// - self : Pointer to the delay estimation instance. +// +// Return value: +// - delay : >= 0 - Last calculated delay value +// -2 - Insufficient data for estimation. +// +int WebRtc_binary_last_delay(BinaryDelayEstimator* self); + +// Returns the estimation quality of the last calculated delay updated by the +// function WebRtc_ProcessBinarySpectrum(...). The estimation quality is a value +// in the interval [0, 1]. The higher the value, the better the quality. +// +// Return value: +// - delay_quality : >= 0 - Estimation quality of last calculated +// delay value. +float WebRtc_binary_last_delay_quality(BinaryDelayEstimator* self); + +// Updates the |mean_value| recursively with a step size of 2^-|factor|. This +// function is used internally in the Binary Delay Estimator as well as the +// Fixed point wrapper. +// +// Inputs: +// - new_value : The new value the mean should be updated with. +// - factor : The step size, in number of right shifts. +// +// Input/Output: +// - mean_value : Pointer to the mean value. +// +void WebRtc_MeanEstimatorFix(int32_t new_value, + int factor, + int32_t* mean_value); + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_H_ diff --git a/modules/audio_processing/utility/delay_estimator_internal.h b/modules/audio_processing/utility/delay_estimator_internal.h new file mode 100644 index 0000000..fce95d8 --- /dev/null +++ b/modules/audio_processing/utility/delay_estimator_internal.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Header file including the delay estimator handle used for testing. + +#ifndef MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_INTERNAL_H_ +#define MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_INTERNAL_H_ + +#include "modules/audio_processing/utility/delay_estimator.h" + +namespace webrtc { + +typedef union { + float float_; + int32_t int32_; +} SpectrumType; + +typedef struct { + // Pointers to mean values of spectrum. + SpectrumType* mean_far_spectrum; + // |mean_far_spectrum| initialization indicator. + int far_spectrum_initialized; + + int spectrum_size; + + // Far-end part of binary spectrum based delay estimation. + BinaryDelayEstimatorFarend* binary_farend; +} DelayEstimatorFarend; + +typedef struct { + // Pointers to mean values of spectrum. + SpectrumType* mean_near_spectrum; + // |mean_near_spectrum| initialization indicator. + int near_spectrum_initialized; + + int spectrum_size; + + // Binary spectrum based delay estimator + BinaryDelayEstimator* binary_handle; +} DelayEstimator; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_INTERNAL_H_ diff --git a/modules/audio_processing/utility/delay_estimator_unittest.cc b/modules/audio_processing/utility/delay_estimator_unittest.cc new file mode 100644 index 0000000..65d8e14 --- /dev/null +++ b/modules/audio_processing/utility/delay_estimator_unittest.cc @@ -0,0 +1,621 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/utility/delay_estimator.h" + +#include "modules/audio_processing/utility/delay_estimator_internal.h" +#include "modules/audio_processing/utility/delay_estimator_wrapper.h" +#include "test/gtest.h" + +namespace webrtc { + +namespace { + +enum { kSpectrumSize = 65 }; +// Delay history sizes. +enum { kMaxDelay = 100 }; +enum { kLookahead = 10 }; +enum { kHistorySize = kMaxDelay + kLookahead }; +// Length of binary spectrum sequence. +enum { kSequenceLength = 400 }; + +const int kDifferentHistorySize = 3; +const int kDifferentLookahead = 1; + +const int kEnable[] = {0, 1}; +const size_t kSizeEnable = sizeof(kEnable) / sizeof(*kEnable); + +class DelayEstimatorTest : public ::testing::Test { + protected: + DelayEstimatorTest(); + void SetUp() override; + void TearDown() override; + + void Init(); + void InitBinary(); + void VerifyDelay(BinaryDelayEstimator* binary_handle, int offset, int delay); + void RunBinarySpectra(BinaryDelayEstimator* binary1, + BinaryDelayEstimator* binary2, + int near_offset, + int lookahead_offset, + int far_offset); + void RunBinarySpectraTest(int near_offset, + int lookahead_offset, + int ref_robust_validation, + int robust_validation); + + void* handle_; + DelayEstimator* self_; + void* farend_handle_; + DelayEstimatorFarend* farend_self_; + BinaryDelayEstimator* binary_; + BinaryDelayEstimatorFarend* binary_farend_; + int spectrum_size_; + // Dummy input spectra. + float far_f_[kSpectrumSize]; + float near_f_[kSpectrumSize]; + uint16_t far_u16_[kSpectrumSize]; + uint16_t near_u16_[kSpectrumSize]; + uint32_t binary_spectrum_[kSequenceLength + kHistorySize]; +}; + +DelayEstimatorTest::DelayEstimatorTest() + : handle_(NULL), + self_(NULL), + farend_handle_(NULL), + farend_self_(NULL), + binary_(NULL), + binary_farend_(NULL), + spectrum_size_(kSpectrumSize) { + // Dummy input data are set with more or less arbitrary non-zero values. + memset(far_f_, 1, sizeof(far_f_)); + memset(near_f_, 2, sizeof(near_f_)); + memset(far_u16_, 1, sizeof(far_u16_)); + memset(near_u16_, 2, sizeof(near_u16_)); + // Construct a sequence of binary spectra used to verify delay estimate. The + // |kSequenceLength| has to be long enough for the delay estimation to leave + // the initialized state. + binary_spectrum_[0] = 1; + for (int i = 1; i < (kSequenceLength + kHistorySize); i++) { + binary_spectrum_[i] = 3 * binary_spectrum_[i - 1]; + } +} + +void DelayEstimatorTest::SetUp() { + farend_handle_ = + WebRtc_CreateDelayEstimatorFarend(kSpectrumSize, kHistorySize); + ASSERT_TRUE(farend_handle_ != NULL); + farend_self_ = reinterpret_cast(farend_handle_); + handle_ = WebRtc_CreateDelayEstimator(farend_handle_, kLookahead); + ASSERT_TRUE(handle_ != NULL); + self_ = reinterpret_cast(handle_); + binary_farend_ = WebRtc_CreateBinaryDelayEstimatorFarend(kHistorySize); + ASSERT_TRUE(binary_farend_ != NULL); + binary_ = WebRtc_CreateBinaryDelayEstimator(binary_farend_, kLookahead); + ASSERT_TRUE(binary_ != NULL); +} + +void DelayEstimatorTest::TearDown() { + WebRtc_FreeDelayEstimator(handle_); + handle_ = NULL; + self_ = NULL; + WebRtc_FreeDelayEstimatorFarend(farend_handle_); + farend_handle_ = NULL; + farend_self_ = NULL; + WebRtc_FreeBinaryDelayEstimator(binary_); + binary_ = NULL; + WebRtc_FreeBinaryDelayEstimatorFarend(binary_farend_); + binary_farend_ = NULL; +} + +void DelayEstimatorTest::Init() { + // Initialize Delay Estimator + EXPECT_EQ(0, WebRtc_InitDelayEstimatorFarend(farend_handle_)); + EXPECT_EQ(0, WebRtc_InitDelayEstimator(handle_)); + // Verify initialization. + EXPECT_EQ(0, farend_self_->far_spectrum_initialized); + EXPECT_EQ(0, self_->near_spectrum_initialized); + EXPECT_EQ(-2, WebRtc_last_delay(handle_)); // Delay in initial state. + EXPECT_FLOAT_EQ(0, WebRtc_last_delay_quality(handle_)); // Zero quality. +} + +void DelayEstimatorTest::InitBinary() { + // Initialize Binary Delay Estimator (far-end part). + WebRtc_InitBinaryDelayEstimatorFarend(binary_farend_); + // Initialize Binary Delay Estimator + WebRtc_InitBinaryDelayEstimator(binary_); + // Verify initialization. This does not guarantee a complete check, since + // |last_delay| may be equal to -2 before initialization if done on the fly. + EXPECT_EQ(-2, binary_->last_delay); +} + +void DelayEstimatorTest::VerifyDelay(BinaryDelayEstimator* binary_handle, + int offset, + int delay) { + // Verify that we WebRtc_binary_last_delay() returns correct delay. + EXPECT_EQ(delay, WebRtc_binary_last_delay(binary_handle)); + + if (delay != -2) { + // Verify correct delay estimate. In the non-causal case the true delay + // is equivalent with the |offset|. + EXPECT_EQ(offset, delay); + } +} + +void DelayEstimatorTest::RunBinarySpectra(BinaryDelayEstimator* binary1, + BinaryDelayEstimator* binary2, + int near_offset, + int lookahead_offset, + int far_offset) { + int different_validations = + binary1->robust_validation_enabled ^ binary2->robust_validation_enabled; + WebRtc_InitBinaryDelayEstimatorFarend(binary_farend_); + WebRtc_InitBinaryDelayEstimator(binary1); + WebRtc_InitBinaryDelayEstimator(binary2); + // Verify initialization. This does not guarantee a complete check, since + // |last_delay| may be equal to -2 before initialization if done on the fly. + EXPECT_EQ(-2, binary1->last_delay); + EXPECT_EQ(-2, binary2->last_delay); + for (int i = kLookahead; i < (kSequenceLength + kLookahead); i++) { + WebRtc_AddBinaryFarSpectrum(binary_farend_, + binary_spectrum_[i + far_offset]); + int delay_1 = WebRtc_ProcessBinarySpectrum(binary1, binary_spectrum_[i]); + int delay_2 = WebRtc_ProcessBinarySpectrum( + binary2, binary_spectrum_[i - near_offset]); + + VerifyDelay(binary1, far_offset + kLookahead, delay_1); + VerifyDelay(binary2, + far_offset + kLookahead + lookahead_offset + near_offset, + delay_2); + // Expect the two delay estimates to be offset by |lookahead_offset| + + // |near_offset| when we have left the initial state. + if ((delay_1 != -2) && (delay_2 != -2)) { + EXPECT_EQ(delay_1, delay_2 - lookahead_offset - near_offset); + } + // For the case of identical signals |delay_1| and |delay_2| should match + // all the time, unless one of them has robust validation turned on. In + // that case the robust validation leaves the initial state faster. + if ((near_offset == 0) && (lookahead_offset == 0)) { + if (!different_validations) { + EXPECT_EQ(delay_1, delay_2); + } else { + if (binary1->robust_validation_enabled) { + EXPECT_GE(delay_1, delay_2); + } else { + EXPECT_GE(delay_2, delay_1); + } + } + } + } + // Verify that we have left the initialized state. + EXPECT_NE(-2, WebRtc_binary_last_delay(binary1)); + EXPECT_LT(0, WebRtc_binary_last_delay_quality(binary1)); + EXPECT_NE(-2, WebRtc_binary_last_delay(binary2)); + EXPECT_LT(0, WebRtc_binary_last_delay_quality(binary2)); +} + +void DelayEstimatorTest::RunBinarySpectraTest(int near_offset, + int lookahead_offset, + int ref_robust_validation, + int robust_validation) { + BinaryDelayEstimator* binary2 = WebRtc_CreateBinaryDelayEstimator( + binary_farend_, kLookahead + lookahead_offset); + // Verify the delay for both causal and non-causal systems. For causal systems + // the delay is equivalent with a positive |offset| of the far-end sequence. + // For non-causal systems the delay is equivalent with a negative |offset| of + // the far-end sequence. + binary_->robust_validation_enabled = ref_robust_validation; + binary2->robust_validation_enabled = robust_validation; + for (int offset = -kLookahead; + offset < kMaxDelay - lookahead_offset - near_offset; offset++) { + RunBinarySpectra(binary_, binary2, near_offset, lookahead_offset, offset); + } + WebRtc_FreeBinaryDelayEstimator(binary2); + binary2 = NULL; + binary_->robust_validation_enabled = 0; // Reset reference. +} + +TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) { + // In this test we verify correct error returns on invalid API calls. + + // WebRtc_CreateDelayEstimatorFarend() and WebRtc_CreateDelayEstimator() + // should return a NULL pointer on invalid input values. + // Make sure we have a non-NULL value at start, so we can detect NULL after + // create failure. + void* handle = farend_handle_; + handle = WebRtc_CreateDelayEstimatorFarend(33, kHistorySize); + EXPECT_TRUE(handle == NULL); + handle = WebRtc_CreateDelayEstimatorFarend(kSpectrumSize, 1); + EXPECT_TRUE(handle == NULL); + + handle = handle_; + handle = WebRtc_CreateDelayEstimator(NULL, kLookahead); + EXPECT_TRUE(handle == NULL); + handle = WebRtc_CreateDelayEstimator(farend_handle_, -1); + EXPECT_TRUE(handle == NULL); + + // WebRtc_InitDelayEstimatorFarend() and WebRtc_InitDelayEstimator() should + // return -1 if we have a NULL pointer as |handle|. + EXPECT_EQ(-1, WebRtc_InitDelayEstimatorFarend(NULL)); + EXPECT_EQ(-1, WebRtc_InitDelayEstimator(NULL)); + + // WebRtc_AddFarSpectrumFloat() should return -1 if we have: + // 1) NULL pointer as |handle|. + // 2) NULL pointer as far-end spectrum. + // 3) Incorrect spectrum size. + EXPECT_EQ(-1, WebRtc_AddFarSpectrumFloat(NULL, far_f_, spectrum_size_)); + // Use |farend_handle_| which is properly created at SetUp(). + EXPECT_EQ(-1, + WebRtc_AddFarSpectrumFloat(farend_handle_, NULL, spectrum_size_)); + EXPECT_EQ(-1, WebRtc_AddFarSpectrumFloat(farend_handle_, far_f_, + spectrum_size_ + 1)); + + // WebRtc_AddFarSpectrumFix() should return -1 if we have: + // 1) NULL pointer as |handle|. + // 2) NULL pointer as far-end spectrum. + // 3) Incorrect spectrum size. + // 4) Too high precision in far-end spectrum (Q-domain > 15). + EXPECT_EQ(-1, WebRtc_AddFarSpectrumFix(NULL, far_u16_, spectrum_size_, 0)); + EXPECT_EQ(-1, + WebRtc_AddFarSpectrumFix(farend_handle_, NULL, spectrum_size_, 0)); + EXPECT_EQ(-1, WebRtc_AddFarSpectrumFix(farend_handle_, far_u16_, + spectrum_size_ + 1, 0)); + EXPECT_EQ(-1, WebRtc_AddFarSpectrumFix(farend_handle_, far_u16_, + spectrum_size_, 16)); + + // WebRtc_set_history_size() should return -1 if: + // 1) |handle| is a NULL. + // 2) |history_size| <= 1. + EXPECT_EQ(-1, WebRtc_set_history_size(NULL, 1)); + EXPECT_EQ(-1, WebRtc_set_history_size(handle_, 1)); + // WebRtc_history_size() should return -1 if: + // 1) NULL pointer input. + EXPECT_EQ(-1, WebRtc_history_size(NULL)); + // 2) there is a mismatch between history size. + void* tmp_handle = WebRtc_CreateDelayEstimator(farend_handle_, kHistorySize); + EXPECT_EQ(0, WebRtc_InitDelayEstimator(tmp_handle)); + EXPECT_EQ(kDifferentHistorySize, + WebRtc_set_history_size(tmp_handle, kDifferentHistorySize)); + EXPECT_EQ(kDifferentHistorySize, WebRtc_history_size(tmp_handle)); + EXPECT_EQ(kHistorySize, WebRtc_set_history_size(handle_, kHistorySize)); + EXPECT_EQ(-1, WebRtc_history_size(tmp_handle)); + + // WebRtc_set_lookahead() should return -1 if we try a value outside the + /// buffer. + EXPECT_EQ(-1, WebRtc_set_lookahead(handle_, kLookahead + 1)); + EXPECT_EQ(-1, WebRtc_set_lookahead(handle_, -1)); + + // WebRtc_set_allowed_offset() should return -1 if we have: + // 1) NULL pointer as |handle|. + // 2) |allowed_offset| < 0. + EXPECT_EQ(-1, WebRtc_set_allowed_offset(NULL, 0)); + EXPECT_EQ(-1, WebRtc_set_allowed_offset(handle_, -1)); + + EXPECT_EQ(-1, WebRtc_get_allowed_offset(NULL)); + + // WebRtc_enable_robust_validation() should return -1 if we have: + // 1) NULL pointer as |handle|. + // 2) Incorrect |enable| value (not 0 or 1). + EXPECT_EQ(-1, WebRtc_enable_robust_validation(NULL, kEnable[0])); + EXPECT_EQ(-1, WebRtc_enable_robust_validation(handle_, -1)); + EXPECT_EQ(-1, WebRtc_enable_robust_validation(handle_, 2)); + + // WebRtc_is_robust_validation_enabled() should return -1 if we have NULL + // pointer as |handle|. + EXPECT_EQ(-1, WebRtc_is_robust_validation_enabled(NULL)); + + // WebRtc_DelayEstimatorProcessFloat() should return -1 if we have: + // 1) NULL pointer as |handle|. + // 2) NULL pointer as near-end spectrum. + // 3) Incorrect spectrum size. + // 4) Non matching history sizes if multiple delay estimators using the same + // far-end reference. + EXPECT_EQ(-1, + WebRtc_DelayEstimatorProcessFloat(NULL, near_f_, spectrum_size_)); + // Use |handle_| which is properly created at SetUp(). + EXPECT_EQ(-1, + WebRtc_DelayEstimatorProcessFloat(handle_, NULL, spectrum_size_)); + EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFloat(handle_, near_f_, + spectrum_size_ + 1)); + // |tmp_handle| is already in a non-matching state. + EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFloat(tmp_handle, near_f_, + spectrum_size_)); + + // WebRtc_DelayEstimatorProcessFix() should return -1 if we have: + // 1) NULL pointer as |handle|. + // 2) NULL pointer as near-end spectrum. + // 3) Incorrect spectrum size. + // 4) Too high precision in near-end spectrum (Q-domain > 15). + // 5) Non matching history sizes if multiple delay estimators using the same + // far-end reference. + EXPECT_EQ( + -1, WebRtc_DelayEstimatorProcessFix(NULL, near_u16_, spectrum_size_, 0)); + EXPECT_EQ(-1, + WebRtc_DelayEstimatorProcessFix(handle_, NULL, spectrum_size_, 0)); + EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(handle_, near_u16_, + spectrum_size_ + 1, 0)); + EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(handle_, near_u16_, + spectrum_size_, 16)); + // |tmp_handle| is already in a non-matching state. + EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(tmp_handle, near_u16_, + spectrum_size_, 0)); + WebRtc_FreeDelayEstimator(tmp_handle); + + // WebRtc_last_delay() should return -1 if we have a NULL pointer as |handle|. + EXPECT_EQ(-1, WebRtc_last_delay(NULL)); + + // Free any local memory if needed. + WebRtc_FreeDelayEstimator(handle); +} + +TEST_F(DelayEstimatorTest, VerifyAllowedOffset) { + // Is set to zero by default. + EXPECT_EQ(0, WebRtc_get_allowed_offset(handle_)); + for (int i = 1; i >= 0; i--) { + EXPECT_EQ(0, WebRtc_set_allowed_offset(handle_, i)); + EXPECT_EQ(i, WebRtc_get_allowed_offset(handle_)); + Init(); + // Unaffected over a reset. + EXPECT_EQ(i, WebRtc_get_allowed_offset(handle_)); + } +} + +TEST_F(DelayEstimatorTest, VerifyEnableRobustValidation) { + // Disabled by default. + EXPECT_EQ(0, WebRtc_is_robust_validation_enabled(handle_)); + for (size_t i = 0; i < kSizeEnable; ++i) { + EXPECT_EQ(0, WebRtc_enable_robust_validation(handle_, kEnable[i])); + EXPECT_EQ(kEnable[i], WebRtc_is_robust_validation_enabled(handle_)); + Init(); + // Unaffected over a reset. + EXPECT_EQ(kEnable[i], WebRtc_is_robust_validation_enabled(handle_)); + } +} + +TEST_F(DelayEstimatorTest, InitializedSpectrumAfterProcess) { + // In this test we verify that the mean spectra are initialized after first + // time we call WebRtc_AddFarSpectrum() and Process() respectively. The test + // also verifies the state is not left for zero spectra. + const float kZerosFloat[kSpectrumSize] = {0.0}; + const uint16_t kZerosU16[kSpectrumSize] = {0}; + + // For floating point operations, process one frame and verify initialization + // flag. + Init(); + EXPECT_EQ(0, WebRtc_AddFarSpectrumFloat(farend_handle_, kZerosFloat, + spectrum_size_)); + EXPECT_EQ(0, farend_self_->far_spectrum_initialized); + EXPECT_EQ(0, + WebRtc_AddFarSpectrumFloat(farend_handle_, far_f_, spectrum_size_)); + EXPECT_EQ(1, farend_self_->far_spectrum_initialized); + EXPECT_EQ(-2, WebRtc_DelayEstimatorProcessFloat(handle_, kZerosFloat, + spectrum_size_)); + EXPECT_EQ(0, self_->near_spectrum_initialized); + EXPECT_EQ( + -2, WebRtc_DelayEstimatorProcessFloat(handle_, near_f_, spectrum_size_)); + EXPECT_EQ(1, self_->near_spectrum_initialized); + + // For fixed point operations, process one frame and verify initialization + // flag. + Init(); + EXPECT_EQ(0, WebRtc_AddFarSpectrumFix(farend_handle_, kZerosU16, + spectrum_size_, 0)); + EXPECT_EQ(0, farend_self_->far_spectrum_initialized); + EXPECT_EQ( + 0, WebRtc_AddFarSpectrumFix(farend_handle_, far_u16_, spectrum_size_, 0)); + EXPECT_EQ(1, farend_self_->far_spectrum_initialized); + EXPECT_EQ(-2, WebRtc_DelayEstimatorProcessFix(handle_, kZerosU16, + spectrum_size_, 0)); + EXPECT_EQ(0, self_->near_spectrum_initialized); + EXPECT_EQ(-2, WebRtc_DelayEstimatorProcessFix(handle_, near_u16_, + spectrum_size_, 0)); + EXPECT_EQ(1, self_->near_spectrum_initialized); +} + +TEST_F(DelayEstimatorTest, CorrectLastDelay) { + // In this test we verify that we get the correct last delay upon valid call. + // We simply process the same data until we leave the initialized state + // (|last_delay| = -2). Then we compare the Process() output with the + // last_delay() call. + + // TODO(bjornv): Update quality values for robust validation. + int last_delay = 0; + // Floating point operations. + Init(); + for (int i = 0; i < 200; i++) { + EXPECT_EQ( + 0, WebRtc_AddFarSpectrumFloat(farend_handle_, far_f_, spectrum_size_)); + last_delay = + WebRtc_DelayEstimatorProcessFloat(handle_, near_f_, spectrum_size_); + if (last_delay != -2) { + EXPECT_EQ(last_delay, WebRtc_last_delay(handle_)); + if (!WebRtc_is_robust_validation_enabled(handle_)) { + EXPECT_FLOAT_EQ(7203.f / kMaxBitCountsQ9, + WebRtc_last_delay_quality(handle_)); + } + break; + } + } + // Verify that we have left the initialized state. + EXPECT_NE(-2, WebRtc_last_delay(handle_)); + EXPECT_LT(0, WebRtc_last_delay_quality(handle_)); + + // Fixed point operations. + Init(); + for (int i = 0; i < 200; i++) { + EXPECT_EQ(0, WebRtc_AddFarSpectrumFix(farend_handle_, far_u16_, + spectrum_size_, 0)); + last_delay = + WebRtc_DelayEstimatorProcessFix(handle_, near_u16_, spectrum_size_, 0); + if (last_delay != -2) { + EXPECT_EQ(last_delay, WebRtc_last_delay(handle_)); + if (!WebRtc_is_robust_validation_enabled(handle_)) { + EXPECT_FLOAT_EQ(7203.f / kMaxBitCountsQ9, + WebRtc_last_delay_quality(handle_)); + } + break; + } + } + // Verify that we have left the initialized state. + EXPECT_NE(-2, WebRtc_last_delay(handle_)); + EXPECT_LT(0, WebRtc_last_delay_quality(handle_)); +} + +TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfBinaryEstimatorFarend) { + // In this test we verify correct output on invalid API calls to the Binary + // Delay Estimator (far-end part). + + BinaryDelayEstimatorFarend* binary = binary_farend_; + // WebRtc_CreateBinaryDelayEstimatorFarend() should return -1 if the input + // history size is less than 2. This is to make sure the buffer shifting + // applies properly. + // Make sure we have a non-NULL value at start, so we can detect NULL after + // create failure. + binary = WebRtc_CreateBinaryDelayEstimatorFarend(1); + EXPECT_TRUE(binary == NULL); +} + +TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfBinaryEstimator) { + // In this test we verify correct output on invalid API calls to the Binary + // Delay Estimator. + + BinaryDelayEstimator* binary_handle = binary_; + // WebRtc_CreateBinaryDelayEstimator() should return -1 if we have a NULL + // pointer as |binary_farend| or invalid input values. Upon failure, the + // |binary_handle| should be NULL. + // Make sure we have a non-NULL value at start, so we can detect NULL after + // create failure. + binary_handle = WebRtc_CreateBinaryDelayEstimator(NULL, kLookahead); + EXPECT_TRUE(binary_handle == NULL); + binary_handle = WebRtc_CreateBinaryDelayEstimator(binary_farend_, -1); + EXPECT_TRUE(binary_handle == NULL); +} + +TEST_F(DelayEstimatorTest, MeanEstimatorFix) { + // In this test we verify that we update the mean value in correct direction + // only. With "direction" we mean increase or decrease. + + int32_t mean_value = 4000; + int32_t mean_value_before = mean_value; + int32_t new_mean_value = mean_value * 2; + + // Increasing |mean_value|. + WebRtc_MeanEstimatorFix(new_mean_value, 10, &mean_value); + EXPECT_LT(mean_value_before, mean_value); + EXPECT_GT(new_mean_value, mean_value); + + // Decreasing |mean_value|. + new_mean_value = mean_value / 2; + mean_value_before = mean_value; + WebRtc_MeanEstimatorFix(new_mean_value, 10, &mean_value); + EXPECT_GT(mean_value_before, mean_value); + EXPECT_LT(new_mean_value, mean_value); +} + +TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearSameSpectrum) { + // In this test we verify that we get the correct delay estimates if we shift + // the signal accordingly. We create two Binary Delay Estimators and feed them + // with the same signals, so they should output the same results. + // We verify both causal and non-causal delays. + // For these noise free signals, the robust validation should not have an + // impact, hence we turn robust validation on/off for both reference and + // delayed near end. + + for (size_t i = 0; i < kSizeEnable; ++i) { + for (size_t j = 0; j < kSizeEnable; ++j) { + RunBinarySpectraTest(0, 0, kEnable[i], kEnable[j]); + } + } +} + +TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearDifferentSpectrum) { + // In this test we use the same setup as above, but we now feed the two Binary + // Delay Estimators with different signals, so they should output different + // results. + // For these noise free signals, the robust validation should not have an + // impact, hence we turn robust validation on/off for both reference and + // delayed near end. + + const int kNearOffset = 1; + for (size_t i = 0; i < kSizeEnable; ++i) { + for (size_t j = 0; j < kSizeEnable; ++j) { + RunBinarySpectraTest(kNearOffset, 0, kEnable[i], kEnable[j]); + } + } +} + +TEST_F(DelayEstimatorTest, ExactDelayEstimateMultipleNearDifferentLookahead) { + // In this test we use the same setup as above, feeding the two Binary + // Delay Estimators with the same signals. The difference is that we create + // them with different lookahead. + // For these noise free signals, the robust validation should not have an + // impact, hence we turn robust validation on/off for both reference and + // delayed near end. + + const int kLookaheadOffset = 1; + for (size_t i = 0; i < kSizeEnable; ++i) { + for (size_t j = 0; j < kSizeEnable; ++j) { + RunBinarySpectraTest(0, kLookaheadOffset, kEnable[i], kEnable[j]); + } + } +} + +TEST_F(DelayEstimatorTest, AllowedOffsetNoImpactWhenRobustValidationDisabled) { + // The same setup as in ExactDelayEstimateMultipleNearSameSpectrum with the + // difference that |allowed_offset| is set for the reference binary delay + // estimator. + + binary_->allowed_offset = 10; + RunBinarySpectraTest(0, 0, 0, 0); + binary_->allowed_offset = 0; // Reset reference. +} + +TEST_F(DelayEstimatorTest, VerifyLookaheadAtCreate) { + void* farend_handle = + WebRtc_CreateDelayEstimatorFarend(kSpectrumSize, kMaxDelay); + ASSERT_TRUE(farend_handle != NULL); + void* handle = WebRtc_CreateDelayEstimator(farend_handle, kLookahead); + ASSERT_TRUE(handle != NULL); + EXPECT_EQ(kLookahead, WebRtc_lookahead(handle)); + WebRtc_FreeDelayEstimator(handle); + WebRtc_FreeDelayEstimatorFarend(farend_handle); +} + +TEST_F(DelayEstimatorTest, VerifyLookaheadIsSetAndKeptAfterInit) { + EXPECT_EQ(kLookahead, WebRtc_lookahead(handle_)); + EXPECT_EQ(kDifferentLookahead, + WebRtc_set_lookahead(handle_, kDifferentLookahead)); + EXPECT_EQ(kDifferentLookahead, WebRtc_lookahead(handle_)); + EXPECT_EQ(0, WebRtc_InitDelayEstimatorFarend(farend_handle_)); + EXPECT_EQ(kDifferentLookahead, WebRtc_lookahead(handle_)); + EXPECT_EQ(0, WebRtc_InitDelayEstimator(handle_)); + EXPECT_EQ(kDifferentLookahead, WebRtc_lookahead(handle_)); +} + +TEST_F(DelayEstimatorTest, VerifyHistorySizeAtCreate) { + EXPECT_EQ(kHistorySize, WebRtc_history_size(handle_)); +} + +TEST_F(DelayEstimatorTest, VerifyHistorySizeIsSetAndKeptAfterInit) { + EXPECT_EQ(kHistorySize, WebRtc_history_size(handle_)); + EXPECT_EQ(kDifferentHistorySize, + WebRtc_set_history_size(handle_, kDifferentHistorySize)); + EXPECT_EQ(kDifferentHistorySize, WebRtc_history_size(handle_)); + EXPECT_EQ(0, WebRtc_InitDelayEstimator(handle_)); + EXPECT_EQ(kDifferentHistorySize, WebRtc_history_size(handle_)); + EXPECT_EQ(0, WebRtc_InitDelayEstimatorFarend(farend_handle_)); + EXPECT_EQ(kDifferentHistorySize, WebRtc_history_size(handle_)); +} + +// TODO(bjornv): Add tests for SoftReset...(...). + +} // namespace + +} // namespace webrtc diff --git a/modules/audio_processing/utility/delay_estimator_wrapper.cc b/modules/audio_processing/utility/delay_estimator_wrapper.cc new file mode 100644 index 0000000..8eac2f6 --- /dev/null +++ b/modules/audio_processing/utility/delay_estimator_wrapper.cc @@ -0,0 +1,489 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/utility/delay_estimator_wrapper.h" + +#include +#include + +#include "modules/audio_processing/utility/delay_estimator.h" +#include "modules/audio_processing/utility/delay_estimator_internal.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +// Only bit |kBandFirst| through bit |kBandLast| are processed and +// |kBandFirst| - |kBandLast| must be < 32. +enum { kBandFirst = 12 }; +enum { kBandLast = 43 }; + +static __inline uint32_t SetBit(uint32_t in, int pos) { + uint32_t mask = (1 << pos); + uint32_t out = (in | mask); + + return out; +} + +// Calculates the mean recursively. Same version as WebRtc_MeanEstimatorFix(), +// but for float. +// +// Inputs: +// - new_value : New additional value. +// - scale : Scale for smoothing (should be less than 1.0). +// +// Input/Output: +// - mean_value : Pointer to the mean value for updating. +// +static void MeanEstimatorFloat(float new_value, + float scale, + float* mean_value) { + RTC_DCHECK_LT(scale, 1.0f); + *mean_value += (new_value - *mean_value) * scale; +} + +// Computes the binary spectrum by comparing the input |spectrum| with a +// |threshold_spectrum|. Float and fixed point versions. +// +// Inputs: +// - spectrum : Spectrum of which the binary spectrum should be +// calculated. +// - threshold_spectrum : Threshold spectrum with which the input +// spectrum is compared. +// Return: +// - out : Binary spectrum. +// +static uint32_t BinarySpectrumFix(const uint16_t* spectrum, + SpectrumType* threshold_spectrum, + int q_domain, + int* threshold_initialized) { + int i = kBandFirst; + uint32_t out = 0; + + RTC_DCHECK_LT(q_domain, 16); + + if (!(*threshold_initialized)) { + // Set the |threshold_spectrum| to half the input |spectrum| as starting + // value. This speeds up the convergence. + for (i = kBandFirst; i <= kBandLast; i++) { + if (spectrum[i] > 0) { + // Convert input spectrum from Q(|q_domain|) to Q15. + int32_t spectrum_q15 = ((int32_t)spectrum[i]) << (15 - q_domain); + threshold_spectrum[i].int32_ = (spectrum_q15 >> 1); + *threshold_initialized = 1; + } + } + } + for (i = kBandFirst; i <= kBandLast; i++) { + // Convert input spectrum from Q(|q_domain|) to Q15. + int32_t spectrum_q15 = ((int32_t)spectrum[i]) << (15 - q_domain); + // Update the |threshold_spectrum|. + WebRtc_MeanEstimatorFix(spectrum_q15, 6, &(threshold_spectrum[i].int32_)); + // Convert |spectrum| at current frequency bin to a binary value. + if (spectrum_q15 > threshold_spectrum[i].int32_) { + out = SetBit(out, i - kBandFirst); + } + } + + return out; +} + +static uint32_t BinarySpectrumFloat(const float* spectrum, + SpectrumType* threshold_spectrum, + int* threshold_initialized) { + int i = kBandFirst; + uint32_t out = 0; + const float kScale = 1 / 64.0; + + if (!(*threshold_initialized)) { + // Set the |threshold_spectrum| to half the input |spectrum| as starting + // value. This speeds up the convergence. + for (i = kBandFirst; i <= kBandLast; i++) { + if (spectrum[i] > 0.0f) { + threshold_spectrum[i].float_ = (spectrum[i] / 2); + *threshold_initialized = 1; + } + } + } + + for (i = kBandFirst; i <= kBandLast; i++) { + // Update the |threshold_spectrum|. + MeanEstimatorFloat(spectrum[i], kScale, &(threshold_spectrum[i].float_)); + // Convert |spectrum| at current frequency bin to a binary value. + if (spectrum[i] > threshold_spectrum[i].float_) { + out = SetBit(out, i - kBandFirst); + } + } + + return out; +} + +void WebRtc_FreeDelayEstimatorFarend(void* handle) { + DelayEstimatorFarend* self = (DelayEstimatorFarend*)handle; + + if (handle == NULL) { + return; + } + + free(self->mean_far_spectrum); + self->mean_far_spectrum = NULL; + + WebRtc_FreeBinaryDelayEstimatorFarend(self->binary_farend); + self->binary_farend = NULL; + + free(self); +} + +void* WebRtc_CreateDelayEstimatorFarend(int spectrum_size, int history_size) { + DelayEstimatorFarend* self = NULL; + + // Check if the sub band used in the delay estimation is small enough to fit + // the binary spectra in a uint32_t. + static_assert(kBandLast - kBandFirst < 32, ""); + + if (spectrum_size >= kBandLast) { + self = static_cast( + malloc(sizeof(DelayEstimatorFarend))); + } + + if (self != NULL) { + int memory_fail = 0; + + // Allocate memory for the binary far-end spectrum handling. + self->binary_farend = WebRtc_CreateBinaryDelayEstimatorFarend(history_size); + memory_fail |= (self->binary_farend == NULL); + + // Allocate memory for spectrum buffers. + self->mean_far_spectrum = static_cast( + malloc(spectrum_size * sizeof(SpectrumType))); + memory_fail |= (self->mean_far_spectrum == NULL); + + self->spectrum_size = spectrum_size; + + if (memory_fail) { + WebRtc_FreeDelayEstimatorFarend(self); + self = NULL; + } + } + + return self; +} + +int WebRtc_InitDelayEstimatorFarend(void* handle) { + DelayEstimatorFarend* self = (DelayEstimatorFarend*)handle; + + if (self == NULL) { + return -1; + } + + // Initialize far-end part of binary delay estimator. + WebRtc_InitBinaryDelayEstimatorFarend(self->binary_farend); + + // Set averaged far and near end spectra to zero. + memset(self->mean_far_spectrum, 0, + sizeof(SpectrumType) * self->spectrum_size); + // Reset initialization indicators. + self->far_spectrum_initialized = 0; + + return 0; +} + +void WebRtc_SoftResetDelayEstimatorFarend(void* handle, int delay_shift) { + DelayEstimatorFarend* self = (DelayEstimatorFarend*)handle; + RTC_DCHECK(self); + WebRtc_SoftResetBinaryDelayEstimatorFarend(self->binary_farend, delay_shift); +} + +int WebRtc_AddFarSpectrumFix(void* handle, + const uint16_t* far_spectrum, + int spectrum_size, + int far_q) { + DelayEstimatorFarend* self = (DelayEstimatorFarend*)handle; + uint32_t binary_spectrum = 0; + + if (self == NULL) { + return -1; + } + if (far_spectrum == NULL) { + // Empty far end spectrum. + return -1; + } + if (spectrum_size != self->spectrum_size) { + // Data sizes don't match. + return -1; + } + if (far_q > 15) { + // If |far_q| is larger than 15 we cannot guarantee no wrap around. + return -1; + } + + // Get binary spectrum. + binary_spectrum = BinarySpectrumFix(far_spectrum, self->mean_far_spectrum, + far_q, &(self->far_spectrum_initialized)); + WebRtc_AddBinaryFarSpectrum(self->binary_farend, binary_spectrum); + + return 0; +} + +int WebRtc_AddFarSpectrumFloat(void* handle, + const float* far_spectrum, + int spectrum_size) { + DelayEstimatorFarend* self = (DelayEstimatorFarend*)handle; + uint32_t binary_spectrum = 0; + + if (self == NULL) { + return -1; + } + if (far_spectrum == NULL) { + // Empty far end spectrum. + return -1; + } + if (spectrum_size != self->spectrum_size) { + // Data sizes don't match. + return -1; + } + + // Get binary spectrum. + binary_spectrum = BinarySpectrumFloat(far_spectrum, self->mean_far_spectrum, + &(self->far_spectrum_initialized)); + WebRtc_AddBinaryFarSpectrum(self->binary_farend, binary_spectrum); + + return 0; +} + +void WebRtc_FreeDelayEstimator(void* handle) { + DelayEstimator* self = (DelayEstimator*)handle; + + if (handle == NULL) { + return; + } + + free(self->mean_near_spectrum); + self->mean_near_spectrum = NULL; + + WebRtc_FreeBinaryDelayEstimator(self->binary_handle); + self->binary_handle = NULL; + + free(self); +} + +void* WebRtc_CreateDelayEstimator(void* farend_handle, int max_lookahead) { + DelayEstimator* self = NULL; + DelayEstimatorFarend* farend = (DelayEstimatorFarend*)farend_handle; + + if (farend_handle != NULL) { + self = static_cast(malloc(sizeof(DelayEstimator))); + } + + if (self != NULL) { + int memory_fail = 0; + + // Allocate memory for the farend spectrum handling. + self->binary_handle = + WebRtc_CreateBinaryDelayEstimator(farend->binary_farend, max_lookahead); + memory_fail |= (self->binary_handle == NULL); + + // Allocate memory for spectrum buffers. + self->mean_near_spectrum = static_cast( + malloc(farend->spectrum_size * sizeof(SpectrumType))); + memory_fail |= (self->mean_near_spectrum == NULL); + + self->spectrum_size = farend->spectrum_size; + + if (memory_fail) { + WebRtc_FreeDelayEstimator(self); + self = NULL; + } + } + + return self; +} + +int WebRtc_InitDelayEstimator(void* handle) { + DelayEstimator* self = (DelayEstimator*)handle; + + if (self == NULL) { + return -1; + } + + // Initialize binary delay estimator. + WebRtc_InitBinaryDelayEstimator(self->binary_handle); + + // Set averaged far and near end spectra to zero. + memset(self->mean_near_spectrum, 0, + sizeof(SpectrumType) * self->spectrum_size); + // Reset initialization indicators. + self->near_spectrum_initialized = 0; + + return 0; +} + +int WebRtc_SoftResetDelayEstimator(void* handle, int delay_shift) { + DelayEstimator* self = (DelayEstimator*)handle; + RTC_DCHECK(self); + return WebRtc_SoftResetBinaryDelayEstimator(self->binary_handle, delay_shift); +} + +int WebRtc_set_history_size(void* handle, int history_size) { + DelayEstimator* self = static_cast(handle); + + if ((self == NULL) || (history_size <= 1)) { + return -1; + } + return WebRtc_AllocateHistoryBufferMemory(self->binary_handle, history_size); +} + +int WebRtc_history_size(const void* handle) { + const DelayEstimator* self = static_cast(handle); + + if (self == NULL) { + return -1; + } + if (self->binary_handle->farend->history_size != + self->binary_handle->history_size) { + // Non matching history sizes. + return -1; + } + return self->binary_handle->history_size; +} + +int WebRtc_set_lookahead(void* handle, int lookahead) { + DelayEstimator* self = (DelayEstimator*)handle; + RTC_DCHECK(self); + RTC_DCHECK(self->binary_handle); + if ((lookahead > self->binary_handle->near_history_size - 1) || + (lookahead < 0)) { + return -1; + } + self->binary_handle->lookahead = lookahead; + return self->binary_handle->lookahead; +} + +int WebRtc_lookahead(void* handle) { + DelayEstimator* self = (DelayEstimator*)handle; + RTC_DCHECK(self); + RTC_DCHECK(self->binary_handle); + return self->binary_handle->lookahead; +} + +int WebRtc_set_allowed_offset(void* handle, int allowed_offset) { + DelayEstimator* self = (DelayEstimator*)handle; + + if ((self == NULL) || (allowed_offset < 0)) { + return -1; + } + self->binary_handle->allowed_offset = allowed_offset; + return 0; +} + +int WebRtc_get_allowed_offset(const void* handle) { + const DelayEstimator* self = (const DelayEstimator*)handle; + + if (self == NULL) { + return -1; + } + return self->binary_handle->allowed_offset; +} + +int WebRtc_enable_robust_validation(void* handle, int enable) { + DelayEstimator* self = (DelayEstimator*)handle; + + if (self == NULL) { + return -1; + } + if ((enable < 0) || (enable > 1)) { + return -1; + } + RTC_DCHECK(self->binary_handle); + self->binary_handle->robust_validation_enabled = enable; + return 0; +} + +int WebRtc_is_robust_validation_enabled(const void* handle) { + const DelayEstimator* self = (const DelayEstimator*)handle; + + if (self == NULL) { + return -1; + } + return self->binary_handle->robust_validation_enabled; +} + +int WebRtc_DelayEstimatorProcessFix(void* handle, + const uint16_t* near_spectrum, + int spectrum_size, + int near_q) { + DelayEstimator* self = (DelayEstimator*)handle; + uint32_t binary_spectrum = 0; + + if (self == NULL) { + return -1; + } + if (near_spectrum == NULL) { + // Empty near end spectrum. + return -1; + } + if (spectrum_size != self->spectrum_size) { + // Data sizes don't match. + return -1; + } + if (near_q > 15) { + // If |near_q| is larger than 15 we cannot guarantee no wrap around. + return -1; + } + + // Get binary spectra. + binary_spectrum = + BinarySpectrumFix(near_spectrum, self->mean_near_spectrum, near_q, + &(self->near_spectrum_initialized)); + + return WebRtc_ProcessBinarySpectrum(self->binary_handle, binary_spectrum); +} + +int WebRtc_DelayEstimatorProcessFloat(void* handle, + const float* near_spectrum, + int spectrum_size) { + DelayEstimator* self = (DelayEstimator*)handle; + uint32_t binary_spectrum = 0; + + if (self == NULL) { + return -1; + } + if (near_spectrum == NULL) { + // Empty near end spectrum. + return -1; + } + if (spectrum_size != self->spectrum_size) { + // Data sizes don't match. + return -1; + } + + // Get binary spectrum. + binary_spectrum = BinarySpectrumFloat(near_spectrum, self->mean_near_spectrum, + &(self->near_spectrum_initialized)); + + return WebRtc_ProcessBinarySpectrum(self->binary_handle, binary_spectrum); +} + +int WebRtc_last_delay(void* handle) { + DelayEstimator* self = (DelayEstimator*)handle; + + if (self == NULL) { + return -1; + } + + return WebRtc_binary_last_delay(self->binary_handle); +} + +float WebRtc_last_delay_quality(void* handle) { + DelayEstimator* self = (DelayEstimator*)handle; + RTC_DCHECK(self); + return WebRtc_binary_last_delay_quality(self->binary_handle); +} + +} // namespace webrtc diff --git a/modules/audio_processing/utility/delay_estimator_wrapper.h b/modules/audio_processing/utility/delay_estimator_wrapper.h new file mode 100644 index 0000000..dbcafaf --- /dev/null +++ b/modules/audio_processing/utility/delay_estimator_wrapper.h @@ -0,0 +1,248 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// Performs delay estimation on block by block basis. +// The return value is 0 - OK and -1 - Error, unless otherwise stated. + +#ifndef MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_WRAPPER_H_ +#define MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_WRAPPER_H_ + +#include + +namespace webrtc { + +// Releases the memory allocated by WebRtc_CreateDelayEstimatorFarend(...) +void WebRtc_FreeDelayEstimatorFarend(void* handle); + +// Allocates the memory needed by the far-end part of the delay estimation. The +// memory needs to be initialized separately through +// WebRtc_InitDelayEstimatorFarend(...). +// +// Inputs: +// - spectrum_size : Size of the spectrum used both in far-end and +// near-end. Used to allocate memory for spectrum +// specific buffers. +// - history_size : The far-end history buffer size. A change in buffer +// size can be forced with WebRtc_set_history_size(). +// Note that the maximum delay which can be estimated is +// determined together with WebRtc_set_lookahead(). +// +// Return value: +// - void* : Created |handle|. If the memory can't be allocated or +// if any of the input parameters are invalid NULL is +// returned. +void* WebRtc_CreateDelayEstimatorFarend(int spectrum_size, int history_size); + +// Initializes the far-end part of the delay estimation instance returned by +// WebRtc_CreateDelayEstimatorFarend(...) +int WebRtc_InitDelayEstimatorFarend(void* handle); + +// Soft resets the far-end part of the delay estimation instance returned by +// WebRtc_CreateDelayEstimatorFarend(...). +// Input: +// - delay_shift : The amount of blocks to shift history buffers. +void WebRtc_SoftResetDelayEstimatorFarend(void* handle, int delay_shift); + +// Adds the far-end spectrum to the far-end history buffer. This spectrum is +// used as reference when calculating the delay using +// WebRtc_ProcessSpectrum(). +// +// Inputs: +// - far_spectrum : Far-end spectrum. +// - spectrum_size : The size of the data arrays (same for both far- and +// near-end). +// - far_q : The Q-domain of the far-end data. +// +// Output: +// - handle : Updated far-end instance. +// +int WebRtc_AddFarSpectrumFix(void* handle, + const uint16_t* far_spectrum, + int spectrum_size, + int far_q); + +// See WebRtc_AddFarSpectrumFix() for description. +int WebRtc_AddFarSpectrumFloat(void* handle, + const float* far_spectrum, + int spectrum_size); + +// Releases the memory allocated by WebRtc_CreateDelayEstimator(...) +void WebRtc_FreeDelayEstimator(void* handle); + +// Allocates the memory needed by the delay estimation. The memory needs to be +// initialized separately through WebRtc_InitDelayEstimator(...). +// +// Inputs: +// - farend_handle : Pointer to the far-end part of the delay estimation +// instance created prior to this call using +// WebRtc_CreateDelayEstimatorFarend(). +// +// Note that WebRtc_CreateDelayEstimator does not take +// ownership of |farend_handle|, which has to be torn +// down properly after this instance. +// +// - max_lookahead : Maximum amount of non-causal lookahead allowed. The +// actual amount of lookahead used can be controlled by +// WebRtc_set_lookahead(...). The default |lookahead| is +// set to |max_lookahead| at create time. Use +// WebRtc_set_lookahead(...) before start if a different +// value is desired. +// +// Using lookahead can detect cases in which a near-end +// signal occurs before the corresponding far-end signal. +// It will delay the estimate for the current block by an +// equal amount, and the returned values will be offset +// by it. +// +// A value of zero is the typical no-lookahead case. +// This also represents the minimum delay which can be +// estimated. +// +// Note that the effective range of delay estimates is +// [-|lookahead|,... ,|history_size|-|lookahead|) +// where |history_size| is set through +// WebRtc_set_history_size(). +// +// Return value: +// - void* : Created |handle|. If the memory can't be allocated or +// if any of the input parameters are invalid NULL is +// returned. +void* WebRtc_CreateDelayEstimator(void* farend_handle, int max_lookahead); + +// Initializes the delay estimation instance returned by +// WebRtc_CreateDelayEstimator(...) +int WebRtc_InitDelayEstimator(void* handle); + +// Soft resets the delay estimation instance returned by +// WebRtc_CreateDelayEstimator(...) +// Input: +// - delay_shift : The amount of blocks to shift history buffers. +// +// Return value: +// - actual_shifts : The actual number of shifts performed. +int WebRtc_SoftResetDelayEstimator(void* handle, int delay_shift); + +// Sets the effective |history_size| used. Valid values from 2. We simply need +// at least two delays to compare to perform an estimate. If |history_size| is +// changed, buffers are reallocated filling in with zeros if necessary. +// Note that changing the |history_size| affects both buffers in far-end and +// near-end. Hence it is important to change all DelayEstimators that use the +// same reference far-end, to the same |history_size| value. +// Inputs: +// - handle : Pointer to the delay estimation instance. +// - history_size : Effective history size to be used. +// Return value: +// - new_history_size : The new history size used. If the memory was not able +// to be allocated 0 is returned. +int WebRtc_set_history_size(void* handle, int history_size); + +// Returns the history_size currently used. +// Input: +// - handle : Pointer to the delay estimation instance. +int WebRtc_history_size(const void* handle); + +// Sets the amount of |lookahead| to use. Valid values are [0, max_lookahead] +// where |max_lookahead| was set at create time through +// WebRtc_CreateDelayEstimator(...). +// +// Input: +// - handle : Pointer to the delay estimation instance. +// - lookahead : The amount of lookahead to be used. +// +// Return value: +// - new_lookahead : The actual amount of lookahead set, unless |handle| is +// a NULL pointer or |lookahead| is invalid, for which an +// error is returned. +int WebRtc_set_lookahead(void* handle, int lookahead); + +// Returns the amount of lookahead we currently use. +// Input: +// - handle : Pointer to the delay estimation instance. +int WebRtc_lookahead(void* handle); + +// Sets the |allowed_offset| used in the robust validation scheme. If the +// delay estimator is used in an echo control component, this parameter is +// related to the filter length. In principle |allowed_offset| should be set to +// the echo control filter length minus the expected echo duration, i.e., the +// delay offset the echo control can handle without quality regression. The +// default value, used if not set manually, is zero. Note that |allowed_offset| +// has to be non-negative. +// Inputs: +// - handle : Pointer to the delay estimation instance. +// - allowed_offset : The amount of delay offset, measured in partitions, +// the echo control filter can handle. +int WebRtc_set_allowed_offset(void* handle, int allowed_offset); + +// Returns the |allowed_offset| in number of partitions. +int WebRtc_get_allowed_offset(const void* handle); + +// Enables/Disables a robust validation functionality in the delay estimation. +// This is by default set to disabled at create time. The state is preserved +// over a reset. +// Inputs: +// - handle : Pointer to the delay estimation instance. +// - enable : Enable (1) or disable (0) this feature. +int WebRtc_enable_robust_validation(void* handle, int enable); + +// Returns 1 if robust validation is enabled and 0 if disabled. +int WebRtc_is_robust_validation_enabled(const void* handle); + +// Estimates and returns the delay between the far-end and near-end blocks. The +// value will be offset by the lookahead (i.e. the lookahead should be +// subtracted from the returned value). +// Inputs: +// - handle : Pointer to the delay estimation instance. +// - near_spectrum : Pointer to the near-end spectrum data of the current +// block. +// - spectrum_size : The size of the data arrays (same for both far- and +// near-end). +// - near_q : The Q-domain of the near-end data. +// +// Output: +// - handle : Updated instance. +// +// Return value: +// - delay : >= 0 - Calculated delay value. +// -1 - Error. +// -2 - Insufficient data for estimation. +int WebRtc_DelayEstimatorProcessFix(void* handle, + const uint16_t* near_spectrum, + int spectrum_size, + int near_q); + +// See WebRtc_DelayEstimatorProcessFix() for description. +int WebRtc_DelayEstimatorProcessFloat(void* handle, + const float* near_spectrum, + int spectrum_size); + +// Returns the last calculated delay updated by the function +// WebRtc_DelayEstimatorProcess(...). +// +// Input: +// - handle : Pointer to the delay estimation instance. +// +// Return value: +// - delay : >= 0 - Last calculated delay value. +// -1 - Error. +// -2 - Insufficient data for estimation. +int WebRtc_last_delay(void* handle); + +// Returns the estimation quality/probability of the last calculated delay +// updated by the function WebRtc_DelayEstimatorProcess(...). The estimation +// quality is a value in the interval [0, 1]. The higher the value, the better +// the quality. +// +// Return value: +// - delay_quality : >= 0 - Estimation quality of last calculated delay. +float WebRtc_last_delay_quality(void* handle); + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_WRAPPER_H_ diff --git a/modules/audio_processing/utility/pffft_wrapper.cc b/modules/audio_processing/utility/pffft_wrapper.cc new file mode 100644 index 0000000..88642fb --- /dev/null +++ b/modules/audio_processing/utility/pffft_wrapper.cc @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/utility/pffft_wrapper.h" + +#include "rtc_base/checks.h" +#include "third_party/pffft/src/pffft.h" + +namespace webrtc { +namespace { + +size_t GetBufferSize(size_t fft_size, Pffft::FftType fft_type) { + return fft_size * (fft_type == Pffft::FftType::kReal ? 1 : 2); +} + +float* AllocatePffftBuffer(size_t size) { + return static_cast(pffft_aligned_malloc(size * sizeof(float))); +} + +} // namespace + +Pffft::FloatBuffer::FloatBuffer(size_t fft_size, FftType fft_type) + : size_(GetBufferSize(fft_size, fft_type)), + data_(AllocatePffftBuffer(size_)) {} + +Pffft::FloatBuffer::~FloatBuffer() { + pffft_aligned_free(data_); +} + +rtc::ArrayView Pffft::FloatBuffer::GetConstView() const { + return {data_, size_}; +} + +rtc::ArrayView Pffft::FloatBuffer::GetView() { + return {data_, size_}; +} + +Pffft::Pffft(size_t fft_size, FftType fft_type) + : fft_size_(fft_size), + fft_type_(fft_type), + pffft_status_(pffft_new_setup( + fft_size_, + fft_type == Pffft::FftType::kReal ? PFFFT_REAL : PFFFT_COMPLEX)), + scratch_buffer_( + AllocatePffftBuffer(GetBufferSize(fft_size_, fft_type_))) { + RTC_DCHECK(pffft_status_); + RTC_DCHECK(scratch_buffer_); +} + +Pffft::~Pffft() { + pffft_destroy_setup(pffft_status_); + pffft_aligned_free(scratch_buffer_); +} + +bool Pffft::IsValidFftSize(size_t fft_size, FftType fft_type) { + if (fft_size == 0) { + return false; + } + // PFFFT only supports transforms for inputs of length N of the form + // N = (2^a)*(3^b)*(5^c) where b >=0 and c >= 0 and a >= 5 for the real FFT + // and a >= 4 for the complex FFT. + constexpr int kFactors[] = {2, 3, 5}; + int factorization[] = {0, 0, 0}; + int n = static_cast(fft_size); + for (int i = 0; i < 3; ++i) { + while (n % kFactors[i] == 0) { + n = n / kFactors[i]; + factorization[i]++; + } + } + int a_min = (fft_type == Pffft::FftType::kReal) ? 5 : 4; + return factorization[0] >= a_min && n == 1; +} + +bool Pffft::IsSimdEnabled() { + return pffft_simd_size() > 1; +} + +std::unique_ptr Pffft::CreateBuffer() const { + // Cannot use make_unique from absl because Pffft is the only friend of + // Pffft::FloatBuffer. + std::unique_ptr buffer( + new Pffft::FloatBuffer(fft_size_, fft_type_)); + return buffer; +} + +void Pffft::ForwardTransform(const FloatBuffer& in, + FloatBuffer* out, + bool ordered) { + RTC_DCHECK_EQ(in.size(), GetBufferSize(fft_size_, fft_type_)); + RTC_DCHECK_EQ(in.size(), out->size()); + RTC_DCHECK(scratch_buffer_); + if (ordered) { + pffft_transform_ordered(pffft_status_, in.const_data(), out->data(), + scratch_buffer_, PFFFT_FORWARD); + } else { + pffft_transform(pffft_status_, in.const_data(), out->data(), + scratch_buffer_, PFFFT_FORWARD); + } +} + +void Pffft::BackwardTransform(const FloatBuffer& in, + FloatBuffer* out, + bool ordered) { + RTC_DCHECK_EQ(in.size(), GetBufferSize(fft_size_, fft_type_)); + RTC_DCHECK_EQ(in.size(), out->size()); + RTC_DCHECK(scratch_buffer_); + if (ordered) { + pffft_transform_ordered(pffft_status_, in.const_data(), out->data(), + scratch_buffer_, PFFFT_BACKWARD); + } else { + pffft_transform(pffft_status_, in.const_data(), out->data(), + scratch_buffer_, PFFFT_BACKWARD); + } +} + +void Pffft::FrequencyDomainConvolve(const FloatBuffer& fft_x, + const FloatBuffer& fft_y, + FloatBuffer* out, + float scaling) { + RTC_DCHECK_EQ(fft_x.size(), GetBufferSize(fft_size_, fft_type_)); + RTC_DCHECK_EQ(fft_x.size(), fft_y.size()); + RTC_DCHECK_EQ(fft_x.size(), out->size()); + pffft_zconvolve_accumulate(pffft_status_, fft_x.const_data(), + fft_y.const_data(), out->data(), scaling); +} + +} // namespace webrtc diff --git a/modules/audio_processing/utility/pffft_wrapper.h b/modules/audio_processing/utility/pffft_wrapper.h new file mode 100644 index 0000000..160f0da --- /dev/null +++ b/modules/audio_processing/utility/pffft_wrapper.h @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_UTILITY_PFFFT_WRAPPER_H_ +#define MODULES_AUDIO_PROCESSING_UTILITY_PFFFT_WRAPPER_H_ + +#include + +#include "api/array_view.h" + +// Forward declaration. +struct PFFFT_Setup; + +namespace webrtc { + +// Pretty-Fast Fast Fourier Transform (PFFFT) wrapper class. +// Not thread safe. +class Pffft { + public: + enum class FftType { kReal, kComplex }; + + // 1D floating point buffer used as input/output data type for the FFT ops. + // It must be constructed using Pffft::CreateBuffer(). + class FloatBuffer { + public: + FloatBuffer(const FloatBuffer&) = delete; + FloatBuffer& operator=(const FloatBuffer&) = delete; + ~FloatBuffer(); + + rtc::ArrayView GetConstView() const; + rtc::ArrayView GetView(); + + private: + friend class Pffft; + FloatBuffer(size_t fft_size, FftType fft_type); + const float* const_data() const { return data_; } + float* data() { return data_; } + size_t size() const { return size_; } + + const size_t size_; + float* const data_; + }; + + // TODO(https://crbug.com/webrtc/9577): Consider adding a factory and making + // the ctor private. + // static std::unique_ptr Create(size_t fft_size, + // FftType fft_type); Ctor. |fft_size| must be a supported size (see + // Pffft::IsValidFftSize()). If not supported, the code will crash. + Pffft(size_t fft_size, FftType fft_type); + Pffft(const Pffft&) = delete; + Pffft& operator=(const Pffft&) = delete; + ~Pffft(); + + // Returns true if the FFT size is supported. + static bool IsValidFftSize(size_t fft_size, FftType fft_type); + + // Returns true if SIMD code optimizations are being used. + static bool IsSimdEnabled(); + + // Creates a buffer of the right size. + std::unique_ptr CreateBuffer() const; + + // TODO(https://crbug.com/webrtc/9577): Overload with rtc::ArrayView args. + // Computes the forward fast Fourier transform. + void ForwardTransform(const FloatBuffer& in, FloatBuffer* out, bool ordered); + // Computes the backward fast Fourier transform. + void BackwardTransform(const FloatBuffer& in, FloatBuffer* out, bool ordered); + + // Multiplies the frequency components of |fft_x| and |fft_y| and accumulates + // them into |out|. The arrays must have been obtained with + // ForwardTransform(..., /*ordered=*/false) - i.e., |fft_x| and |fft_y| must + // not be ordered. + void FrequencyDomainConvolve(const FloatBuffer& fft_x, + const FloatBuffer& fft_y, + FloatBuffer* out, + float scaling = 1.f); + + private: + const size_t fft_size_; + const FftType fft_type_; + PFFFT_Setup* pffft_status_; + float* const scratch_buffer_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_UTILITY_PFFFT_WRAPPER_H_ diff --git a/modules/audio_processing/utility/pffft_wrapper_unittest.cc b/modules/audio_processing/utility/pffft_wrapper_unittest.cc new file mode 100644 index 0000000..2ad6849 --- /dev/null +++ b/modules/audio_processing/utility/pffft_wrapper_unittest.cc @@ -0,0 +1,182 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/utility/pffft_wrapper.h" + +#include +#include +#include + +#include "test/gtest.h" +#include "third_party/pffft/src/pffft.h" + +namespace webrtc { +namespace test { +namespace { + +constexpr size_t kMaxValidSizeCheck = 1024; + +static constexpr int kFftSizes[] = { + 16, 32, 64, 96, 128, 160, 192, 256, 288, 384, 5 * 96, 512, + 576, 5 * 128, 800, 864, 1024, 2048, 2592, 4000, 4096, 12000, 36864}; + +void CreatePffftWrapper(size_t fft_size, Pffft::FftType fft_type) { + Pffft pffft_wrapper(fft_size, fft_type); +} + +float* AllocateScratchBuffer(size_t fft_size, bool complex_fft) { + return static_cast( + pffft_aligned_malloc(fft_size * (complex_fft ? 2 : 1) * sizeof(float))); +} + +double frand() { + return std::rand() / static_cast(RAND_MAX); +} + +void ExpectArrayViewsEquality(rtc::ArrayView a, + rtc::ArrayView b) { + ASSERT_EQ(a.size(), b.size()); + for (size_t i = 0; i < a.size(); ++i) { + SCOPED_TRACE(i); + EXPECT_EQ(a[i], b[i]); + } +} + +// Compares the output of the PFFFT C++ wrapper to that of the C PFFFT. +// Bit-exactness is expected. +void PffftValidateWrapper(size_t fft_size, bool complex_fft) { + // Always use the same seed to avoid flakiness. + std::srand(0); + + // Init PFFFT. + PFFFT_Setup* pffft_status = + pffft_new_setup(fft_size, complex_fft ? PFFFT_COMPLEX : PFFFT_REAL); + ASSERT_TRUE(pffft_status) << "FFT size (" << fft_size << ") not supported."; + size_t num_floats = fft_size * (complex_fft ? 2 : 1); + int num_bytes = static_cast(num_floats) * sizeof(float); + float* in = static_cast(pffft_aligned_malloc(num_bytes)); + float* out = static_cast(pffft_aligned_malloc(num_bytes)); + float* scratch = AllocateScratchBuffer(fft_size, complex_fft); + + // Init PFFFT C++ wrapper. + Pffft::FftType fft_type = + complex_fft ? Pffft::FftType::kComplex : Pffft::FftType::kReal; + ASSERT_TRUE(Pffft::IsValidFftSize(fft_size, fft_type)); + Pffft pffft_wrapper(fft_size, fft_type); + auto in_wrapper = pffft_wrapper.CreateBuffer(); + auto out_wrapper = pffft_wrapper.CreateBuffer(); + + // Input and output buffers views. + rtc::ArrayView in_view(in, num_floats); + rtc::ArrayView out_view(out, num_floats); + auto in_wrapper_view = in_wrapper->GetView(); + EXPECT_EQ(in_wrapper_view.size(), num_floats); + auto out_wrapper_view = out_wrapper->GetConstView(); + EXPECT_EQ(out_wrapper_view.size(), num_floats); + + // Random input data. + for (size_t i = 0; i < num_floats; ++i) { + in_wrapper_view[i] = in[i] = static_cast(frand() * 2.0 - 1.0); + } + + // Forward transform. + pffft_transform(pffft_status, in, out, scratch, PFFFT_FORWARD); + pffft_wrapper.ForwardTransform(*in_wrapper, out_wrapper.get(), + /*ordered=*/false); + ExpectArrayViewsEquality(out_view, out_wrapper_view); + + // Copy the FFT results into the input buffers to compute the backward FFT. + std::copy(out_view.begin(), out_view.end(), in_view.begin()); + std::copy(out_wrapper_view.begin(), out_wrapper_view.end(), + in_wrapper_view.begin()); + + // Backward transform. + pffft_transform(pffft_status, in, out, scratch, PFFFT_BACKWARD); + pffft_wrapper.BackwardTransform(*in_wrapper, out_wrapper.get(), + /*ordered=*/false); + ExpectArrayViewsEquality(out_view, out_wrapper_view); + + pffft_destroy_setup(pffft_status); + pffft_aligned_free(in); + pffft_aligned_free(out); + pffft_aligned_free(scratch); +} + +} // namespace + +TEST(PffftTest, CreateWrapperWithValidSize) { + for (size_t fft_size = 0; fft_size < kMaxValidSizeCheck; ++fft_size) { + SCOPED_TRACE(fft_size); + if (Pffft::IsValidFftSize(fft_size, Pffft::FftType::kReal)) { + CreatePffftWrapper(fft_size, Pffft::FftType::kReal); + } + if (Pffft::IsValidFftSize(fft_size, Pffft::FftType::kComplex)) { + CreatePffftWrapper(fft_size, Pffft::FftType::kComplex); + } + } +} + +#if !defined(NDEBUG) && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) + +class PffftInvalidSizeDeathTest : public ::testing::Test, + public ::testing::WithParamInterface { +}; + +TEST_P(PffftInvalidSizeDeathTest, DoNotCreateRealWrapper) { + size_t fft_size = GetParam(); + ASSERT_FALSE(Pffft::IsValidFftSize(fft_size, Pffft::FftType::kReal)); + EXPECT_DEATH(CreatePffftWrapper(fft_size, Pffft::FftType::kReal), ""); +} + +TEST_P(PffftInvalidSizeDeathTest, DoNotCreateComplexWrapper) { + size_t fft_size = GetParam(); + ASSERT_FALSE(Pffft::IsValidFftSize(fft_size, Pffft::FftType::kComplex)); + EXPECT_DEATH(CreatePffftWrapper(fft_size, Pffft::FftType::kComplex), ""); +} + +INSTANTIATE_TEST_SUITE_P(PffftTest, + PffftInvalidSizeDeathTest, + ::testing::Values(17, + 33, + 65, + 97, + 129, + 161, + 193, + 257, + 289, + 385, + 481, + 513, + 577, + 641, + 801, + 865, + 1025)); + +#endif + +// TODO(https://crbug.com/webrtc/9577): Enable once SIMD is always enabled. +TEST(PffftTest, DISABLED_CheckSimd) { + EXPECT_TRUE(Pffft::IsSimdEnabled()); +} + +TEST(PffftTest, FftBitExactness) { + for (int fft_size : kFftSizes) { + SCOPED_TRACE(fft_size); + if (fft_size != 16) { + PffftValidateWrapper(fft_size, /*complex_fft=*/false); + } + PffftValidateWrapper(fft_size, /*complex_fft=*/true); + } +} + +} // namespace test +} // namespace webrtc diff --git a/modules/audio_processing/vad/BUILD.gn b/modules/audio_processing/vad/BUILD.gn new file mode 100644 index 0000000..71e079d --- /dev/null +++ b/modules/audio_processing/vad/BUILD.gn @@ -0,0 +1,69 @@ +# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") +rtc_library("vad") { + visibility = [ + "../*", + "../../../rtc_tools:*", + ] + sources = [ + "common.h", + "gmm.cc", + "gmm.h", + "noise_gmm_tables.h", + "pitch_based_vad.cc", + "pitch_based_vad.h", + "pitch_internal.cc", + "pitch_internal.h", + "pole_zero_filter.cc", + "pole_zero_filter.h", + "standalone_vad.cc", + "standalone_vad.h", + "vad_audio_proc.cc", + "vad_audio_proc.h", + "vad_audio_proc_internal.h", + "vad_circular_buffer.cc", + "vad_circular_buffer.h", + "voice_activity_detector.cc", + "voice_activity_detector.h", + "voice_gmm_tables.h", + ] + deps = [ + "../../../audio/utility:audio_frame_operations", + "../../../common_audio", + "../../../common_audio:common_audio_c", + "../../../common_audio/third_party/ooura:fft_size_256", + "../../../rtc_base:checks", + "../../audio_coding:isac_vad", + ] +} + +if (rtc_include_tests) { + rtc_library("vad_unittests") { + testonly = true + sources = [ + "gmm_unittest.cc", + "pitch_based_vad_unittest.cc", + "pitch_internal_unittest.cc", + "pole_zero_filter_unittest.cc", + "standalone_vad_unittest.cc", + "vad_audio_proc_unittest.cc", + "vad_circular_buffer_unittest.cc", + "voice_activity_detector_unittest.cc", + ] + deps = [ + ":vad", + "../../../common_audio", + "../../../test:fileutils", + "../../../test:test_support", + "//testing/gmock", + "//testing/gtest", + ] + } +} diff --git a/modules/audio_processing/vad/common.h b/modules/audio_processing/vad/common.h new file mode 100644 index 0000000..b5a5fb3 --- /dev/null +++ b/modules/audio_processing/vad/common.h @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_VAD_COMMON_H_ +#define MODULES_AUDIO_PROCESSING_VAD_COMMON_H_ + +#include + +static const int kSampleRateHz = 16000; +static const size_t kLength10Ms = kSampleRateHz / 100; +static const size_t kMaxNumFrames = 4; + +struct AudioFeatures { + double log_pitch_gain[kMaxNumFrames]; + double pitch_lag_hz[kMaxNumFrames]; + double spectral_peak[kMaxNumFrames]; + double rms[kMaxNumFrames]; + size_t num_frames; + bool silence; +}; + +#endif // MODULES_AUDIO_PROCESSING_VAD_COMMON_H_ diff --git a/modules/audio_processing/vad/gmm.cc b/modules/audio_processing/vad/gmm.cc new file mode 100644 index 0000000..3b8764c --- /dev/null +++ b/modules/audio_processing/vad/gmm.cc @@ -0,0 +1,61 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/gmm.h" + +#include + +namespace webrtc { + +static const int kMaxDimension = 10; + +static void RemoveMean(const double* in, + const double* mean_vec, + int dimension, + double* out) { + for (int n = 0; n < dimension; ++n) + out[n] = in[n] - mean_vec[n]; +} + +static double ComputeExponent(const double* in, + const double* covar_inv, + int dimension) { + double q = 0; + for (int i = 0; i < dimension; ++i) { + double v = 0; + for (int j = 0; j < dimension; j++) + v += (*covar_inv++) * in[j]; + q += v * in[i]; + } + q *= -0.5; + return q; +} + +double EvaluateGmm(const double* x, const GmmParameters& gmm_parameters) { + if (gmm_parameters.dimension > kMaxDimension) { + return -1; // This is invalid pdf so the caller can check this. + } + double f = 0; + double v[kMaxDimension]; + const double* mean_vec = gmm_parameters.mean; + const double* covar_inv = gmm_parameters.covar_inverse; + + for (int n = 0; n < gmm_parameters.num_mixtures; n++) { + RemoveMean(x, mean_vec, gmm_parameters.dimension, v); + double q = ComputeExponent(v, covar_inv, gmm_parameters.dimension) + + gmm_parameters.weight[n]; + f += exp(q); + mean_vec += gmm_parameters.dimension; + covar_inv += gmm_parameters.dimension * gmm_parameters.dimension; + } + return f; +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/gmm.h b/modules/audio_processing/vad/gmm.h new file mode 100644 index 0000000..93eb675 --- /dev/null +++ b/modules/audio_processing/vad/gmm.h @@ -0,0 +1,45 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_VAD_GMM_H_ +#define MODULES_AUDIO_PROCESSING_VAD_GMM_H_ + +namespace webrtc { + +// A structure that specifies a GMM. +// A GMM is formulated as +// f(x) = w[0] * mixture[0] + w[1] * mixture[1] + ... + +// w[num_mixtures - 1] * mixture[num_mixtures - 1]; +// Where a 'mixture' is a Gaussian density. + +struct GmmParameters { + // weight[n] = log(w[n]) - |dimension|/2 * log(2*pi) - 1/2 * log(det(cov[n])); + // where cov[n] is the covariance matrix of mixture n; + const double* weight; + // pointer to the first element of a |num_mixtures|x|dimension| matrix + // where kth row is the mean of the kth mixture. + const double* mean; + // pointer to the first element of a |num_mixtures|x|dimension|x|dimension| + // 3D-matrix, where the kth 2D-matrix is the inverse of the covariance + // matrix of the kth mixture. + const double* covar_inverse; + // Dimensionality of the mixtures. + int dimension; + // number of the mixtures. + int num_mixtures; +}; + +// Evaluate the given GMM, according to |gmm_parameters|, at the given point +// |x|. If the dimensionality of the given GMM is larger that the maximum +// acceptable dimension by the following function -1 is returned. +double EvaluateGmm(const double* x, const GmmParameters& gmm_parameters); + +} // namespace webrtc +#endif // MODULES_AUDIO_PROCESSING_VAD_GMM_H_ diff --git a/modules/audio_processing/vad/gmm_unittest.cc b/modules/audio_processing/vad/gmm_unittest.cc new file mode 100644 index 0000000..d895afa --- /dev/null +++ b/modules/audio_processing/vad/gmm_unittest.cc @@ -0,0 +1,65 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/gmm.h" + +#include + +#include "modules/audio_processing/vad/noise_gmm_tables.h" +#include "modules/audio_processing/vad/voice_gmm_tables.h" +#include "test/gtest.h" + +namespace webrtc { + +TEST(GmmTest, EvaluateGmm) { + GmmParameters noise_gmm; + GmmParameters voice_gmm; + + // Setup noise GMM. + noise_gmm.dimension = kNoiseGmmDim; + noise_gmm.num_mixtures = kNoiseGmmNumMixtures; + noise_gmm.weight = kNoiseGmmWeights; + noise_gmm.mean = &kNoiseGmmMean[0][0]; + noise_gmm.covar_inverse = &kNoiseGmmCovarInverse[0][0][0]; + + // Setup voice GMM. + voice_gmm.dimension = kVoiceGmmDim; + voice_gmm.num_mixtures = kVoiceGmmNumMixtures; + voice_gmm.weight = kVoiceGmmWeights; + voice_gmm.mean = &kVoiceGmmMean[0][0]; + voice_gmm.covar_inverse = &kVoiceGmmCovarInverse[0][0][0]; + + // Test vectors. These are the mean of the GMM means. + const double kXVoice[kVoiceGmmDim] = {-1.35893162459863, 602.862491970368, + 178.022069191324}; + const double kXNoise[kNoiseGmmDim] = {-2.33443722724409, 2827.97828765184, + 141.114178166812}; + + // Expected pdf values. These values are computed in MATLAB using EvalGmm.m + const double kPdfNoise = 1.88904409403101e-07; + const double kPdfVoice = 1.30453996982266e-06; + + // Relative error should be smaller that the following value. + const double kAcceptedRelativeErr = 1e-10; + + // Test Voice. + double pdf = EvaluateGmm(kXVoice, voice_gmm); + EXPECT_GT(pdf, 0); + double relative_error = fabs(pdf - kPdfVoice) / kPdfVoice; + EXPECT_LE(relative_error, kAcceptedRelativeErr); + + // Test Noise. + pdf = EvaluateGmm(kXNoise, noise_gmm); + EXPECT_GT(pdf, 0); + relative_error = fabs(pdf - kPdfNoise) / kPdfNoise; + EXPECT_LE(relative_error, kAcceptedRelativeErr); +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/noise_gmm_tables.h b/modules/audio_processing/vad/noise_gmm_tables.h new file mode 100644 index 0000000..944a540 --- /dev/null +++ b/modules/audio_processing/vad/noise_gmm_tables.h @@ -0,0 +1,82 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// GMM tables for inactive segments. Generated by MakeGmmTables.m. + +#ifndef MODULES_AUDIO_PROCESSING_VAD_NOISE_GMM_TABLES_H_ +#define MODULES_AUDIO_PROCESSING_VAD_NOISE_GMM_TABLES_H_ + +namespace webrtc { + +static const int kNoiseGmmNumMixtures = 12; +static const int kNoiseGmmDim = 3; + +static const double + kNoiseGmmCovarInverse[kNoiseGmmNumMixtures][kNoiseGmmDim][kNoiseGmmDim] = { + {{7.36219567592941e+00, 4.83060785179861e-03, 1.23335151497610e-02}, + {4.83060785179861e-03, 1.65289507047817e-04, -2.41490588169997e-04}, + {1.23335151497610e-02, -2.41490588169997e-04, 6.59472060689382e-03}}, + {{8.70265239309140e+00, -5.30636201431086e-04, 5.44014966585347e-03}, + {-5.30636201431086e-04, 3.11095453521008e-04, -1.86287206836035e-04}, + {5.44014966585347e-03, -1.86287206836035e-04, 6.29493388790744e-04}}, + {{4.53467851955055e+00, -3.92977536695197e-03, -2.46521420693317e-03}, + {-3.92977536695197e-03, 4.94650752632750e-05, -1.08587438501826e-05}, + {-2.46521420693317e-03, -1.08587438501826e-05, 9.28793975422261e-05}}, + {{9.26817997114275e-01, -4.03976069276753e-04, -3.56441427392165e-03}, + {-4.03976069276753e-04, 2.51976251631430e-06, 1.46914206734572e-07}, + {-3.56441427392165e-03, 1.46914206734572e-07, 8.19914567685373e-05}}, + {{7.61715986787441e+00, -1.54889041216888e-04, 2.41756280071656e-02}, + {-1.54889041216888e-04, 3.50282550461672e-07, -6.27251196972490e-06}, + {2.41756280071656e-02, -6.27251196972490e-06, 1.45061847649872e-02}}, + {{8.31193642663158e+00, -3.84070508164323e-04, -3.09750630821876e-02}, + {-3.84070508164323e-04, 3.80433432277336e-07, -1.14321142836636e-06}, + {-3.09750630821876e-02, -1.14321142836636e-06, 8.35091486289997e-04}}, + {{9.67283151270894e-01, 5.82465812445039e-05, -3.18350798617053e-03}, + {5.82465812445039e-05, 2.23762672000318e-07, -7.74196587408623e-07}, + {-3.18350798617053e-03, -7.74196587408623e-07, 3.85120938338325e-04}}, + {{8.28066236985388e+00, 5.87634508319763e-05, 6.99303090891743e-03}, + {5.87634508319763e-05, 2.93746018618058e-07, 3.40843332882272e-07}, + {6.99303090891743e-03, 3.40843332882272e-07, 1.99379171190344e-04}}, + {{6.07488998675646e+00, -1.11494526618473e-02, 5.10013111123381e-03}, + {-1.11494526618473e-02, 6.99238879921751e-04, 5.36718550370870e-05}, + {5.10013111123381e-03, 5.36718550370870e-05, 5.26909853276753e-04}}, + {{6.90492021419175e+00, 4.20639355257863e-04, -2.38612752336481e-03}, + {4.20639355257863e-04, 3.31246767338153e-06, -2.42052288150859e-08}, + {-2.38612752336481e-03, -2.42052288150859e-08, 4.46608368363412e-04}}, + {{1.31069150869715e+01, -1.73718583865670e-04, -1.97591814508578e-02}, + {-1.73718583865670e-04, 2.80451716300124e-07, 9.96570755379865e-07}, + {-1.97591814508578e-02, 9.96570755379865e-07, 2.41361900868847e-03}}, + {{4.69566344239814e+00, -2.61077567563690e-04, 5.26359000761433e-03}, + {-2.61077567563690e-04, 1.82420859823767e-06, -7.83645887541601e-07}, + {5.26359000761433e-03, -7.83645887541601e-07, 1.33586288288802e-02}}}; + +static const double kNoiseGmmMean[kNoiseGmmNumMixtures][kNoiseGmmDim] = { + {-2.01386094766163e+00, 1.69702162045397e+02, 7.41715804872181e+01}, + {-1.94684591777290e+00, 1.42398396732668e+02, 1.64186321157831e+02}, + {-2.29319297562437e+00, 3.86415425589868e+02, 2.13452215267125e+02}, + {-3.25487177070268e+00, 1.08668712553616e+03, 2.33119949467419e+02}, + {-2.13159632447467e+00, 4.83821702557717e+03, 6.86786166673740e+01}, + {-2.26171410780526e+00, 4.79420193982422e+03, 1.53222513286450e+02}, + {-3.32166740703185e+00, 4.35161135834358e+03, 1.33206448431316e+02}, + {-2.19290322814343e+00, 3.98325506609408e+03, 2.13249167359934e+02}, + {-2.02898459255404e+00, 7.37039893155007e+03, 1.12518527491926e+02}, + {-2.26150236399500e+00, 1.54896745196145e+03, 1.49717357868579e+02}, + {-2.00417668301790e+00, 3.82434760310304e+03, 1.07438913004312e+02}, + {-2.30193040814533e+00, 1.43953696546439e+03, 7.04085275122649e+01}}; + +static const double kNoiseGmmWeights[kNoiseGmmNumMixtures] = { + -1.09422832086193e+01, -1.10847897513425e+01, -1.36767587732187e+01, + -1.79789356118641e+01, -1.42830169160894e+01, -1.56500228061379e+01, + -1.83124990950113e+01, -1.69979436177477e+01, -1.12329424387828e+01, + -1.41311785780639e+01, -1.47171861448585e+01, -1.35963362781839e+01}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_VAD_NOISE_GMM_TABLES_H_ diff --git a/modules/audio_processing/vad/pitch_based_vad.cc b/modules/audio_processing/vad/pitch_based_vad.cc new file mode 100644 index 0000000..68e60dc --- /dev/null +++ b/modules/audio_processing/vad/pitch_based_vad.cc @@ -0,0 +1,120 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/pitch_based_vad.h" + +#include + +#include "modules/audio_processing/vad/common.h" +#include "modules/audio_processing/vad/noise_gmm_tables.h" +#include "modules/audio_processing/vad/vad_circular_buffer.h" +#include "modules/audio_processing/vad/voice_gmm_tables.h" + +namespace webrtc { + +static_assert(kNoiseGmmDim == kVoiceGmmDim, + "noise and voice gmm dimension not equal"); + +// These values should match MATLAB counterparts for unit-tests to pass. +static const int kPosteriorHistorySize = 500; // 5 sec of 10 ms frames. +static const double kInitialPriorProbability = 0.3; +static const int kTransientWidthThreshold = 7; +static const double kLowProbabilityThreshold = 0.2; + +static double LimitProbability(double p) { + const double kLimHigh = 0.99; + const double kLimLow = 0.01; + + if (p > kLimHigh) + p = kLimHigh; + else if (p < kLimLow) + p = kLimLow; + return p; +} + +PitchBasedVad::PitchBasedVad() + : p_prior_(kInitialPriorProbability), + circular_buffer_(VadCircularBuffer::Create(kPosteriorHistorySize)) { + // Setup noise GMM. + noise_gmm_.dimension = kNoiseGmmDim; + noise_gmm_.num_mixtures = kNoiseGmmNumMixtures; + noise_gmm_.weight = kNoiseGmmWeights; + noise_gmm_.mean = &kNoiseGmmMean[0][0]; + noise_gmm_.covar_inverse = &kNoiseGmmCovarInverse[0][0][0]; + + // Setup voice GMM. + voice_gmm_.dimension = kVoiceGmmDim; + voice_gmm_.num_mixtures = kVoiceGmmNumMixtures; + voice_gmm_.weight = kVoiceGmmWeights; + voice_gmm_.mean = &kVoiceGmmMean[0][0]; + voice_gmm_.covar_inverse = &kVoiceGmmCovarInverse[0][0][0]; +} + +PitchBasedVad::~PitchBasedVad() {} + +int PitchBasedVad::VoicingProbability(const AudioFeatures& features, + double* p_combined) { + double p; + double gmm_features[3]; + double pdf_features_given_voice; + double pdf_features_given_noise; + // These limits are the same in matlab implementation 'VoicingProbGMM().' + const double kLimLowLogPitchGain = -2.0; + const double kLimHighLogPitchGain = -0.9; + const double kLimLowSpectralPeak = 200; + const double kLimHighSpectralPeak = 2000; + const double kEps = 1e-12; + for (size_t n = 0; n < features.num_frames; n++) { + gmm_features[0] = features.log_pitch_gain[n]; + gmm_features[1] = features.spectral_peak[n]; + gmm_features[2] = features.pitch_lag_hz[n]; + + pdf_features_given_voice = EvaluateGmm(gmm_features, voice_gmm_); + pdf_features_given_noise = EvaluateGmm(gmm_features, noise_gmm_); + + if (features.spectral_peak[n] < kLimLowSpectralPeak || + features.spectral_peak[n] > kLimHighSpectralPeak || + features.log_pitch_gain[n] < kLimLowLogPitchGain) { + pdf_features_given_voice = kEps * pdf_features_given_noise; + } else if (features.log_pitch_gain[n] > kLimHighLogPitchGain) { + pdf_features_given_noise = kEps * pdf_features_given_voice; + } + + p = p_prior_ * pdf_features_given_voice / + (pdf_features_given_voice * p_prior_ + + pdf_features_given_noise * (1 - p_prior_)); + + p = LimitProbability(p); + + // Combine pitch-based probability with standalone probability, before + // updating prior probabilities. + double prod_active = p * p_combined[n]; + double prod_inactive = (1 - p) * (1 - p_combined[n]); + p_combined[n] = prod_active / (prod_active + prod_inactive); + + if (UpdatePrior(p_combined[n]) < 0) + return -1; + // Limit prior probability. With a zero prior probability the posterior + // probability is always zero. + p_prior_ = LimitProbability(p_prior_); + } + return 0; +} + +int PitchBasedVad::UpdatePrior(double p) { + circular_buffer_->Insert(p); + if (circular_buffer_->RemoveTransient(kTransientWidthThreshold, + kLowProbabilityThreshold) < 0) + return -1; + p_prior_ = circular_buffer_->Mean(); + return 0; +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/pitch_based_vad.h b/modules/audio_processing/vad/pitch_based_vad.h new file mode 100644 index 0000000..e005e23 --- /dev/null +++ b/modules/audio_processing/vad/pitch_based_vad.h @@ -0,0 +1,57 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_VAD_PITCH_BASED_VAD_H_ +#define MODULES_AUDIO_PROCESSING_VAD_PITCH_BASED_VAD_H_ + +#include + +#include "modules/audio_processing/vad/common.h" +#include "modules/audio_processing/vad/gmm.h" + +namespace webrtc { + +class VadCircularBuffer; + +// Computes the probability of the input audio frame to be active given +// the corresponding pitch-gain and lag of the frame. +class PitchBasedVad { + public: + PitchBasedVad(); + ~PitchBasedVad(); + + // Compute pitch-based voicing probability, given the features. + // features: a structure containing features required for computing voicing + // probabilities. + // + // p_combined: an array which contains the combined activity probabilities + // computed prior to the call of this function. The method, + // then, computes the voicing probabilities and combine them + // with the given values. The result are returned in |p|. + int VoicingProbability(const AudioFeatures& features, double* p_combined); + + private: + int UpdatePrior(double p); + + // TODO(turajs): maybe defining this at a higher level (maybe enum) so that + // all the code recognize it as "no-error." + static const int kNoError = 0; + + GmmParameters noise_gmm_; + GmmParameters voice_gmm_; + + double p_prior_; + + std::unique_ptr circular_buffer_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_VAD_PITCH_BASED_VAD_H_ diff --git a/modules/audio_processing/vad/pitch_based_vad_unittest.cc b/modules/audio_processing/vad/pitch_based_vad_unittest.cc new file mode 100644 index 0000000..4a8331a --- /dev/null +++ b/modules/audio_processing/vad/pitch_based_vad_unittest.cc @@ -0,0 +1,75 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/pitch_based_vad.h" + +#include +#include + +#include + +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { + +TEST(PitchBasedVadTest, VoicingProbabilityTest) { + std::string spectral_peak_file_name = + test::ResourcePath("audio_processing/agc/agc_spectral_peak", "dat"); + FILE* spectral_peak_file = fopen(spectral_peak_file_name.c_str(), "rb"); + ASSERT_TRUE(spectral_peak_file != NULL); + + std::string pitch_gain_file_name = + test::ResourcePath("audio_processing/agc/agc_pitch_gain", "dat"); + FILE* pitch_gain_file = fopen(pitch_gain_file_name.c_str(), "rb"); + ASSERT_TRUE(pitch_gain_file != NULL); + + std::string pitch_lag_file_name = + test::ResourcePath("audio_processing/agc/agc_pitch_lag", "dat"); + FILE* pitch_lag_file = fopen(pitch_lag_file_name.c_str(), "rb"); + ASSERT_TRUE(pitch_lag_file != NULL); + + std::string voicing_prob_file_name = + test::ResourcePath("audio_processing/agc/agc_voicing_prob", "dat"); + FILE* voicing_prob_file = fopen(voicing_prob_file_name.c_str(), "rb"); + ASSERT_TRUE(voicing_prob_file != NULL); + + PitchBasedVad vad_; + + double reference_activity_probability; + + AudioFeatures audio_features; + memset(&audio_features, 0, sizeof(audio_features)); + audio_features.num_frames = 1; + while (fread(audio_features.spectral_peak, + sizeof(audio_features.spectral_peak[0]), 1, + spectral_peak_file) == 1u) { + double p; + ASSERT_EQ(1u, fread(audio_features.log_pitch_gain, + sizeof(audio_features.log_pitch_gain[0]), 1, + pitch_gain_file)); + ASSERT_EQ(1u, + fread(audio_features.pitch_lag_hz, + sizeof(audio_features.pitch_lag_hz[0]), 1, pitch_lag_file)); + ASSERT_EQ(1u, fread(&reference_activity_probability, + sizeof(reference_activity_probability), 1, + voicing_prob_file)); + + p = 0.5; // Initialize to the neutral value for combining probabilities. + EXPECT_EQ(0, vad_.VoicingProbability(audio_features, &p)); + EXPECT_NEAR(p, reference_activity_probability, 0.01); + } + + fclose(spectral_peak_file); + fclose(pitch_gain_file); + fclose(pitch_lag_file); +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/pitch_internal.cc b/modules/audio_processing/vad/pitch_internal.cc new file mode 100644 index 0000000..8f86918 --- /dev/null +++ b/modules/audio_processing/vad/pitch_internal.cc @@ -0,0 +1,55 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/pitch_internal.h" + +#include + +namespace webrtc { + +// A 4-to-3 linear interpolation. +// The interpolation constants are derived as following: +// Input pitch parameters are updated every 7.5 ms. Within a 30-ms interval +// we are interested in pitch parameters of 0-5 ms, 10-15ms and 20-25ms. This is +// like interpolating 4-to-6 and keep the odd samples. +// The reason behind this is that LPC coefficients are computed for the first +// half of each 10ms interval. +static void PitchInterpolation(double old_val, const double* in, double* out) { + out[0] = 1. / 6. * old_val + 5. / 6. * in[0]; + out[1] = 5. / 6. * in[1] + 1. / 6. * in[2]; + out[2] = 0.5 * in[2] + 0.5 * in[3]; +} + +void GetSubframesPitchParameters(int sampling_rate_hz, + double* gains, + double* lags, + int num_in_frames, + int num_out_frames, + double* log_old_gain, + double* old_lag, + double* log_pitch_gain, + double* pitch_lag_hz) { + // Gain interpolation is in log-domain, also returned in log-domain. + for (int n = 0; n < num_in_frames; n++) + gains[n] = log(gains[n] + 1e-12); + + // Interpolate lags and gains. + PitchInterpolation(*log_old_gain, gains, log_pitch_gain); + *log_old_gain = gains[num_in_frames - 1]; + PitchInterpolation(*old_lag, lags, pitch_lag_hz); + *old_lag = lags[num_in_frames - 1]; + + // Convert pitch-lags to Hertz. + for (int n = 0; n < num_out_frames; n++) { + pitch_lag_hz[n] = (sampling_rate_hz) / (pitch_lag_hz[n]); + } +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/pitch_internal.h b/modules/audio_processing/vad/pitch_internal.h new file mode 100644 index 0000000..938745d --- /dev/null +++ b/modules/audio_processing/vad/pitch_internal.h @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_VAD_PITCH_INTERNAL_H_ +#define MODULES_AUDIO_PROCESSING_VAD_PITCH_INTERNAL_H_ + +namespace webrtc { + +// TODO(turajs): Write a description of this function. Also be consistent with +// usage of |sampling_rate_hz| vs |kSamplingFreqHz|. +void GetSubframesPitchParameters(int sampling_rate_hz, + double* gains, + double* lags, + int num_in_frames, + int num_out_frames, + double* log_old_gain, + double* old_lag, + double* log_pitch_gain, + double* pitch_lag_hz); + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_VAD_PITCH_INTERNAL_H_ diff --git a/modules/audio_processing/vad/pitch_internal_unittest.cc b/modules/audio_processing/vad/pitch_internal_unittest.cc new file mode 100644 index 0000000..c851421 --- /dev/null +++ b/modules/audio_processing/vad/pitch_internal_unittest.cc @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/pitch_internal.h" + +#include + +#include "test/gtest.h" + +namespace webrtc { + +TEST(PitchInternalTest, test) { + const int kSamplingRateHz = 8000; + const int kNumInputParameters = 4; + const int kNumOutputParameters = 3; + // Inputs + double log_old_gain = log(0.5); + double gains[] = {0.6, 0.2, 0.5, 0.4}; + + double old_lag = 70; + double lags[] = {90, 111, 122, 50}; + + // Expected outputs + double expected_log_pitch_gain[] = {-0.541212549898316, -1.45672279045507, + -0.80471895621705}; + double expected_log_old_gain = log(gains[kNumInputParameters - 1]); + + double expected_pitch_lag_hz[] = {92.3076923076923, 70.9010339734121, + 93.0232558139535}; + double expected_old_lag = lags[kNumInputParameters - 1]; + + double log_pitch_gain[kNumOutputParameters]; + double pitch_lag_hz[kNumInputParameters]; + + GetSubframesPitchParameters(kSamplingRateHz, gains, lags, kNumInputParameters, + kNumOutputParameters, &log_old_gain, &old_lag, + log_pitch_gain, pitch_lag_hz); + + for (int n = 0; n < 3; n++) { + EXPECT_NEAR(pitch_lag_hz[n], expected_pitch_lag_hz[n], 1e-6); + EXPECT_NEAR(log_pitch_gain[n], expected_log_pitch_gain[n], 1e-8); + } + EXPECT_NEAR(old_lag, expected_old_lag, 1e-6); + EXPECT_NEAR(log_old_gain, expected_log_old_gain, 1e-8); +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/pole_zero_filter.cc b/modules/audio_processing/vad/pole_zero_filter.cc new file mode 100644 index 0000000..e7a6113 --- /dev/null +++ b/modules/audio_processing/vad/pole_zero_filter.cc @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/pole_zero_filter.h" + +#include + +#include + +namespace webrtc { + +PoleZeroFilter* PoleZeroFilter::Create(const float* numerator_coefficients, + size_t order_numerator, + const float* denominator_coefficients, + size_t order_denominator) { + if (order_numerator > kMaxFilterOrder || + order_denominator > kMaxFilterOrder || denominator_coefficients[0] == 0 || + numerator_coefficients == NULL || denominator_coefficients == NULL) + return NULL; + return new PoleZeroFilter(numerator_coefficients, order_numerator, + denominator_coefficients, order_denominator); +} + +PoleZeroFilter::PoleZeroFilter(const float* numerator_coefficients, + size_t order_numerator, + const float* denominator_coefficients, + size_t order_denominator) + : past_input_(), + past_output_(), + numerator_coefficients_(), + denominator_coefficients_(), + order_numerator_(order_numerator), + order_denominator_(order_denominator), + highest_order_(std::max(order_denominator, order_numerator)) { + memcpy(numerator_coefficients_, numerator_coefficients, + sizeof(numerator_coefficients_[0]) * (order_numerator_ + 1)); + memcpy(denominator_coefficients_, denominator_coefficients, + sizeof(denominator_coefficients_[0]) * (order_denominator_ + 1)); + + if (denominator_coefficients_[0] != 1) { + for (size_t n = 0; n <= order_numerator_; n++) + numerator_coefficients_[n] /= denominator_coefficients_[0]; + for (size_t n = 0; n <= order_denominator_; n++) + denominator_coefficients_[n] /= denominator_coefficients_[0]; + } +} + +template +static float FilterArPast(const T* past, + size_t order, + const float* coefficients) { + float sum = 0.0f; + size_t past_index = order - 1; + for (size_t k = 1; k <= order; k++, past_index--) + sum += coefficients[k] * past[past_index]; + return sum; +} + +int PoleZeroFilter::Filter(const int16_t* in, + size_t num_input_samples, + float* output) { + if (in == NULL || output == NULL) + return -1; + // This is the typical case, just a memcpy. + const size_t k = std::min(num_input_samples, highest_order_); + size_t n; + for (n = 0; n < k; n++) { + output[n] = in[n] * numerator_coefficients_[0]; + output[n] += FilterArPast(&past_input_[n], order_numerator_, + numerator_coefficients_); + output[n] -= FilterArPast(&past_output_[n], order_denominator_, + denominator_coefficients_); + + past_input_[n + order_numerator_] = in[n]; + past_output_[n + order_denominator_] = output[n]; + } + if (highest_order_ < num_input_samples) { + for (size_t m = 0; n < num_input_samples; n++, m++) { + output[n] = in[n] * numerator_coefficients_[0]; + output[n] += + FilterArPast(&in[m], order_numerator_, numerator_coefficients_); + output[n] -= FilterArPast(&output[m], order_denominator_, + denominator_coefficients_); + } + // Record into the past signal. + memcpy(past_input_, &in[num_input_samples - order_numerator_], + sizeof(in[0]) * order_numerator_); + memcpy(past_output_, &output[num_input_samples - order_denominator_], + sizeof(output[0]) * order_denominator_); + } else { + // Odd case that the length of the input is shorter that filter order. + memmove(past_input_, &past_input_[num_input_samples], + order_numerator_ * sizeof(past_input_[0])); + memmove(past_output_, &past_output_[num_input_samples], + order_denominator_ * sizeof(past_output_[0])); + } + return 0; +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/pole_zero_filter.h b/modules/audio_processing/vad/pole_zero_filter.h new file mode 100644 index 0000000..11a0511 --- /dev/null +++ b/modules/audio_processing/vad/pole_zero_filter.h @@ -0,0 +1,51 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_VAD_POLE_ZERO_FILTER_H_ +#define MODULES_AUDIO_PROCESSING_VAD_POLE_ZERO_FILTER_H_ + +#include +#include + +namespace webrtc { + +class PoleZeroFilter { + public: + ~PoleZeroFilter() {} + + static PoleZeroFilter* Create(const float* numerator_coefficients, + size_t order_numerator, + const float* denominator_coefficients, + size_t order_denominator); + + int Filter(const int16_t* in, size_t num_input_samples, float* output); + + private: + PoleZeroFilter(const float* numerator_coefficients, + size_t order_numerator, + const float* denominator_coefficients, + size_t order_denominator); + + static const int kMaxFilterOrder = 24; + + int16_t past_input_[kMaxFilterOrder * 2]; + float past_output_[kMaxFilterOrder * 2]; + + float numerator_coefficients_[kMaxFilterOrder + 1]; + float denominator_coefficients_[kMaxFilterOrder + 1]; + + size_t order_numerator_; + size_t order_denominator_; + size_t highest_order_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_VAD_POLE_ZERO_FILTER_H_ diff --git a/modules/audio_processing/vad/pole_zero_filter_unittest.cc b/modules/audio_processing/vad/pole_zero_filter_unittest.cc new file mode 100644 index 0000000..8088b40 --- /dev/null +++ b/modules/audio_processing/vad/pole_zero_filter_unittest.cc @@ -0,0 +1,103 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/pole_zero_filter.h" + +#include +#include + +#include + +#include "modules/audio_processing/vad/vad_audio_proc_internal.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { + +static const int kInputSamples = 50; + +static const int16_t kInput[kInputSamples] = { + -2136, -7116, 10715, 2464, 3164, 8139, 11393, 24013, -32117, -5544, + -27740, 10181, 14190, -24055, -15912, 17393, 6359, -9950, -13894, 32432, + -23944, 3437, -8381, 19768, 3087, -19795, -5920, 13310, 1407, 3876, + 4059, 3524, -23130, 19121, -27900, -24840, 4089, 21422, -3625, 3015, + -11236, 28856, 13424, 6571, -19761, -6361, 15821, -9469, 29727, 32229}; + +static const float kReferenceOutput[kInputSamples] = { + -2082.230472f, -6878.572941f, 10697.090871f, 2358.373952f, + 2973.936512f, 7738.580650f, 10690.803213f, 22687.091576f, + -32676.684717f, -5879.621684f, -27359.297432f, 10368.735888f, + 13994.584604f, -23676.126249f, -15078.250390f, 17818.253338f, + 6577.743123f, -9498.369315f, -13073.651079f, 32460.026588f, + -23391.849347f, 3953.805667f, -7667.761363f, 19995.153447f, + 3185.575477f, -19207.365160f, -5143.103201f, 13756.317237f, + 1779.654794f, 4142.269755f, 4209.475034f, 3572.991789f, + -22509.089546f, 19307.878964f, -27060.439759f, -23319.042810f, + 5547.685267f, 22312.718676f, -2707.309027f, 3852.358490f, + -10135.510093f, 29241.509970f, 13394.397233f, 6340.721417f, + -19510.207905f, -5908.442086f, 15882.301634f, -9211.335255f, + 29253.056735f, 30874.443046f}; + +class PoleZeroFilterTest : public ::testing::Test { + protected: + PoleZeroFilterTest() + : my_filter_(PoleZeroFilter::Create(kCoeffNumerator, + kFilterOrder, + kCoeffDenominator, + kFilterOrder)) {} + + ~PoleZeroFilterTest() override {} + + void FilterSubframes(int num_subframes); + + private: + void TestClean(); + std::unique_ptr my_filter_; +}; + +void PoleZeroFilterTest::FilterSubframes(int num_subframes) { + float output[kInputSamples]; + const int num_subframe_samples = kInputSamples / num_subframes; + EXPECT_EQ(num_subframe_samples * num_subframes, kInputSamples); + + for (int n = 0; n < num_subframes; n++) { + my_filter_->Filter(&kInput[n * num_subframe_samples], num_subframe_samples, + &output[n * num_subframe_samples]); + } + for (int n = 0; n < kInputSamples; n++) { + EXPECT_NEAR(output[n], kReferenceOutput[n], 1); + } +} + +TEST_F(PoleZeroFilterTest, OneSubframe) { + FilterSubframes(1); +} + +TEST_F(PoleZeroFilterTest, TwoSubframes) { + FilterSubframes(2); +} + +TEST_F(PoleZeroFilterTest, FiveSubframes) { + FilterSubframes(5); +} + +TEST_F(PoleZeroFilterTest, TenSubframes) { + FilterSubframes(10); +} + +TEST_F(PoleZeroFilterTest, TwentyFiveSubframes) { + FilterSubframes(25); +} + +TEST_F(PoleZeroFilterTest, FiftySubframes) { + FilterSubframes(50); +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/standalone_vad.cc b/modules/audio_processing/vad/standalone_vad.cc new file mode 100644 index 0000000..1397668 --- /dev/null +++ b/modules/audio_processing/vad/standalone_vad.cc @@ -0,0 +1,91 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/standalone_vad.h" + +#include + +#include "common_audio/vad/include/webrtc_vad.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +static const int kDefaultStandaloneVadMode = 3; + +StandaloneVad::StandaloneVad(VadInst* vad) + : vad_(vad), buffer_(), index_(0), mode_(kDefaultStandaloneVadMode) {} + +StandaloneVad::~StandaloneVad() { + WebRtcVad_Free(vad_); +} + +StandaloneVad* StandaloneVad::Create() { + VadInst* vad = WebRtcVad_Create(); + if (!vad) + return nullptr; + + int err = WebRtcVad_Init(vad); + err |= WebRtcVad_set_mode(vad, kDefaultStandaloneVadMode); + if (err != 0) { + WebRtcVad_Free(vad); + return nullptr; + } + return new StandaloneVad(vad); +} + +int StandaloneVad::AddAudio(const int16_t* data, size_t length) { + if (length != kLength10Ms) + return -1; + + if (index_ + length > kLength10Ms * kMaxNum10msFrames) + // Reset the buffer if it's full. + // TODO(ajm): Instead, consider just processing every 10 ms frame. Then we + // can forgo the buffering. + index_ = 0; + + memcpy(&buffer_[index_], data, sizeof(int16_t) * length); + index_ += length; + return 0; +} + +int StandaloneVad::GetActivity(double* p, size_t length_p) { + if (index_ == 0) + return -1; + + const size_t num_frames = index_ / kLength10Ms; + if (num_frames > length_p) + return -1; + RTC_DCHECK_EQ(0, WebRtcVad_ValidRateAndFrameLength(kSampleRateHz, index_)); + + int activity = WebRtcVad_Process(vad_, kSampleRateHz, buffer_, index_); + if (activity < 0) + return -1; + else if (activity == 0) + p[0] = 0.01; // Arbitrary but small and non-zero. + else + p[0] = 0.5; // 0.5 is neutral values when combinned by other probabilities. + for (size_t n = 1; n < num_frames; n++) + p[n] = p[0]; + // Reset the buffer to start from the beginning. + index_ = 0; + return activity; +} + +int StandaloneVad::set_mode(int mode) { + if (mode < 0 || mode > 3) + return -1; + if (WebRtcVad_set_mode(vad_, mode) != 0) + return -1; + + mode_ = mode; + return 0; +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/standalone_vad.h b/modules/audio_processing/vad/standalone_vad.h new file mode 100644 index 0000000..3dff416 --- /dev/null +++ b/modules/audio_processing/vad/standalone_vad.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_AGC_STANDALONE_VAD_H_ +#define MODULES_AUDIO_PROCESSING_AGC_STANDALONE_VAD_H_ + +#include +#include + +#include "common_audio/vad/include/webrtc_vad.h" +#include "modules/audio_processing/vad/common.h" + +namespace webrtc { + +class StandaloneVad { + public: + static StandaloneVad* Create(); + ~StandaloneVad(); + + // Outputs + // p: a buffer where probabilities are written to. + // length_p: number of elements of |p|. + // + // return value: + // -1: if no audio is stored or VAD returns error. + // 0: in success. + // In case of error the content of |activity| is unchanged. + // + // Note that due to a high false-positive (VAD decision is active while the + // processed audio is just background noise) rate, stand-alone VAD is used as + // a one-sided indicator. The activity probability is 0.5 if the frame is + // classified as active, and the probability is 0.01 if the audio is + // classified as passive. In this way, when probabilities are combined, the + // effect of the stand-alone VAD is neutral if the input is classified as + // active. + int GetActivity(double* p, size_t length_p); + + // Expecting 10 ms of 16 kHz audio to be pushed in. + int AddAudio(const int16_t* data, size_t length); + + // Set aggressiveness of VAD, 0 is the least aggressive and 3 is the most + // aggressive mode. Returns -1 if the input is less than 0 or larger than 3, + // otherwise 0 is returned. + int set_mode(int mode); + // Get the agressiveness of the current VAD. + int mode() const { return mode_; } + + private: + explicit StandaloneVad(VadInst* vad); + + static const size_t kMaxNum10msFrames = 3; + + // TODO(turajs): Is there a way to use scoped-pointer here? + VadInst* vad_; + int16_t buffer_[kMaxNum10msFrames * kLength10Ms]; + size_t index_; + int mode_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_AGC_STANDALONE_VAD_H_ diff --git a/modules/audio_processing/vad/standalone_vad_unittest.cc b/modules/audio_processing/vad/standalone_vad_unittest.cc new file mode 100644 index 0000000..22b1f49 --- /dev/null +++ b/modules/audio_processing/vad/standalone_vad_unittest.cc @@ -0,0 +1,107 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/standalone_vad.h" + +#include + +#include + +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { + +TEST(StandaloneVadTest, Api) { + std::unique_ptr vad(StandaloneVad::Create()); + int16_t data[kLength10Ms] = {0}; + + // Valid frame length (for 32 kHz rate), but not what the VAD is expecting. + EXPECT_EQ(-1, vad->AddAudio(data, 320)); + + const size_t kMaxNumFrames = 3; + double p[kMaxNumFrames]; + for (size_t n = 0; n < kMaxNumFrames; n++) + EXPECT_EQ(0, vad->AddAudio(data, kLength10Ms)); + + // Pretend |p| is shorter that it should be. + EXPECT_EQ(-1, vad->GetActivity(p, kMaxNumFrames - 1)); + + EXPECT_EQ(0, vad->GetActivity(p, kMaxNumFrames)); + + // Ask for activity when buffer is empty. + EXPECT_EQ(-1, vad->GetActivity(p, kMaxNumFrames)); + + // Should reset and result in one buffer. + for (size_t n = 0; n < kMaxNumFrames + 1; n++) + EXPECT_EQ(0, vad->AddAudio(data, kLength10Ms)); + EXPECT_EQ(0, vad->GetActivity(p, 1)); + + // Wrong modes + EXPECT_EQ(-1, vad->set_mode(-1)); + EXPECT_EQ(-1, vad->set_mode(4)); + + // Valid mode. + const int kMode = 2; + EXPECT_EQ(0, vad->set_mode(kMode)); + EXPECT_EQ(kMode, vad->mode()); +} + +#if defined(WEBRTC_IOS) +TEST(StandaloneVadTest, DISABLED_ActivityDetection) { +#else +TEST(StandaloneVadTest, ActivityDetection) { +#endif + std::unique_ptr vad(StandaloneVad::Create()); + const size_t kDataLength = kLength10Ms; + int16_t data[kDataLength] = {0}; + + FILE* pcm_file = + fopen(test::ResourcePath("audio_processing/agc/agc_audio", "pcm").c_str(), + "rb"); + ASSERT_TRUE(pcm_file != NULL); + + FILE* reference_file = fopen( + test::ResourcePath("audio_processing/agc/agc_vad", "dat").c_str(), "rb"); + ASSERT_TRUE(reference_file != NULL); + + // Reference activities are prepared with 0 aggressiveness. + ASSERT_EQ(0, vad->set_mode(0)); + + // Stand-alone VAD can operate on 1, 2 or 3 frames of length 10 ms. The + // reference file is created for 30 ms frame. + const int kNumVadFramesToProcess = 3; + int num_frames = 0; + while (fread(data, sizeof(int16_t), kDataLength, pcm_file) == kDataLength) { + vad->AddAudio(data, kDataLength); + num_frames++; + if (num_frames == kNumVadFramesToProcess) { + num_frames = 0; + int referece_activity; + double p[kNumVadFramesToProcess]; + EXPECT_EQ(1u, fread(&referece_activity, sizeof(referece_activity), 1, + reference_file)); + int activity = vad->GetActivity(p, kNumVadFramesToProcess); + EXPECT_EQ(referece_activity, activity); + if (activity != 0) { + // When active, probabilities are set to 0.5. + for (int n = 0; n < kNumVadFramesToProcess; n++) + EXPECT_EQ(0.5, p[n]); + } else { + // When inactive, probabilities are set to 0.01. + for (int n = 0; n < kNumVadFramesToProcess; n++) + EXPECT_EQ(0.01, p[n]); + } + } + } + fclose(reference_file); + fclose(pcm_file); +} +} // namespace webrtc diff --git a/modules/audio_processing/vad/vad_audio_proc.cc b/modules/audio_processing/vad/vad_audio_proc.cc new file mode 100644 index 0000000..97cf651 --- /dev/null +++ b/modules/audio_processing/vad/vad_audio_proc.cc @@ -0,0 +1,275 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/vad_audio_proc.h" + +#include +#include +#include + +#include "common_audio/third_party/ooura/fft_size_256/fft4g.h" +#include "modules/audio_processing/vad/pitch_internal.h" +#include "modules/audio_processing/vad/pole_zero_filter.h" +#include "modules/audio_processing/vad/vad_audio_proc_internal.h" +#include "rtc_base/checks.h" +extern "C" { +#include "modules/audio_coding/codecs/isac/main/source/filter_functions.h" +#include "modules/audio_coding/codecs/isac/main/source/isac_vad.h" +#include "modules/audio_coding/codecs/isac/main/source/pitch_estimator.h" +#include "modules/audio_coding/codecs/isac/main/source/structs.h" +} + +namespace webrtc { + +// The following structures are declared anonymous in iSAC's structs.h. To +// forward declare them, we use this derived class trick. +struct VadAudioProc::PitchAnalysisStruct : public ::PitchAnalysisStruct {}; +struct VadAudioProc::PreFiltBankstr : public ::PreFiltBankstr {}; + +static constexpr float kFrequencyResolution = + kSampleRateHz / static_cast(VadAudioProc::kDftSize); +static constexpr int kSilenceRms = 5; + +// TODO(turajs): Make a Create or Init for VadAudioProc. +VadAudioProc::VadAudioProc() + : audio_buffer_(), + num_buffer_samples_(kNumPastSignalSamples), + log_old_gain_(-2), + old_lag_(50), // Arbitrary but valid as pitch-lag (in samples). + pitch_analysis_handle_(new PitchAnalysisStruct), + pre_filter_handle_(new PreFiltBankstr), + high_pass_filter_(PoleZeroFilter::Create(kCoeffNumerator, + kFilterOrder, + kCoeffDenominator, + kFilterOrder)) { + static_assert(kNumPastSignalSamples + kNumSubframeSamples == + sizeof(kLpcAnalWin) / sizeof(kLpcAnalWin[0]), + "lpc analysis window incorrect size"); + static_assert(kLpcOrder + 1 == sizeof(kCorrWeight) / sizeof(kCorrWeight[0]), + "correlation weight incorrect size"); + + // TODO(turajs): Are we doing too much in the constructor? + float data[kDftSize]; + // Make FFT to initialize. + ip_[0] = 0; + WebRtc_rdft(kDftSize, 1, data, ip_, w_fft_); + // TODO(turajs): Need to initialize high-pass filter. + + // Initialize iSAC components. + WebRtcIsac_InitPreFilterbank(pre_filter_handle_.get()); + WebRtcIsac_InitPitchAnalysis(pitch_analysis_handle_.get()); +} + +VadAudioProc::~VadAudioProc() {} + +void VadAudioProc::ResetBuffer() { + memcpy(audio_buffer_, &audio_buffer_[kNumSamplesToProcess], + sizeof(audio_buffer_[0]) * kNumPastSignalSamples); + num_buffer_samples_ = kNumPastSignalSamples; +} + +int VadAudioProc::ExtractFeatures(const int16_t* frame, + size_t length, + AudioFeatures* features) { + features->num_frames = 0; + if (length != kNumSubframeSamples) { + return -1; + } + + // High-pass filter to remove the DC component and very low frequency content. + // We have experienced that this high-pass filtering improves voice/non-voiced + // classification. + if (high_pass_filter_->Filter(frame, kNumSubframeSamples, + &audio_buffer_[num_buffer_samples_]) != 0) { + return -1; + } + + num_buffer_samples_ += kNumSubframeSamples; + if (num_buffer_samples_ < kBufferLength) { + return 0; + } + RTC_DCHECK_EQ(num_buffer_samples_, kBufferLength); + features->num_frames = kNum10msSubframes; + features->silence = false; + + Rms(features->rms, kMaxNumFrames); + for (size_t i = 0; i < kNum10msSubframes; ++i) { + if (features->rms[i] < kSilenceRms) { + // PitchAnalysis can cause NaNs in the pitch gain if it's fed silence. + // Bail out here instead. + features->silence = true; + ResetBuffer(); + return 0; + } + } + + PitchAnalysis(features->log_pitch_gain, features->pitch_lag_hz, + kMaxNumFrames); + FindFirstSpectralPeaks(features->spectral_peak, kMaxNumFrames); + ResetBuffer(); + return 0; +} + +// Computes |kLpcOrder + 1| correlation coefficients. +void VadAudioProc::SubframeCorrelation(double* corr, + size_t length_corr, + size_t subframe_index) { + RTC_DCHECK_GE(length_corr, kLpcOrder + 1); + double windowed_audio[kNumSubframeSamples + kNumPastSignalSamples]; + size_t buffer_index = subframe_index * kNumSubframeSamples; + + for (size_t n = 0; n < kNumSubframeSamples + kNumPastSignalSamples; n++) + windowed_audio[n] = audio_buffer_[buffer_index++] * kLpcAnalWin[n]; + + WebRtcIsac_AutoCorr(corr, windowed_audio, + kNumSubframeSamples + kNumPastSignalSamples, kLpcOrder); +} + +// Compute |kNum10msSubframes| sets of LPC coefficients, one per 10 ms input. +// The analysis window is 15 ms long and it is centered on the first half of +// each 10ms sub-frame. This is equivalent to computing LPC coefficients for the +// first half of each 10 ms subframe. +void VadAudioProc::GetLpcPolynomials(double* lpc, size_t length_lpc) { + RTC_DCHECK_GE(length_lpc, kNum10msSubframes * (kLpcOrder + 1)); + double corr[kLpcOrder + 1]; + double reflec_coeff[kLpcOrder]; + for (size_t i = 0, offset_lpc = 0; i < kNum10msSubframes; + i++, offset_lpc += kLpcOrder + 1) { + SubframeCorrelation(corr, kLpcOrder + 1, i); + corr[0] *= 1.0001; + // This makes Lev-Durb a bit more stable. + for (size_t k = 0; k < kLpcOrder + 1; k++) { + corr[k] *= kCorrWeight[k]; + } + WebRtcIsac_LevDurb(&lpc[offset_lpc], reflec_coeff, corr, kLpcOrder); + } +} + +// Fit a second order curve to these 3 points and find the location of the +// extremum. The points are inverted before curve fitting. +static float QuadraticInterpolation(float prev_val, + float curr_val, + float next_val) { + // Doing the interpolation in |1 / A(z)|^2. + float fractional_index = 0; + next_val = 1.0f / next_val; + prev_val = 1.0f / prev_val; + curr_val = 1.0f / curr_val; + + fractional_index = + -(next_val - prev_val) * 0.5f / (next_val + prev_val - 2.f * curr_val); + RTC_DCHECK_LT(fabs(fractional_index), 1); + return fractional_index; +} + +// 1 / A(z), where A(z) is defined by |lpc| is a model of the spectral envelope +// of the input signal. The local maximum of the spectral envelope corresponds +// with the local minimum of A(z). It saves complexity, as we save one +// inversion. Furthermore, we find the first local maximum of magnitude squared, +// to save on one square root. +void VadAudioProc::FindFirstSpectralPeaks(double* f_peak, + size_t length_f_peak) { + RTC_DCHECK_GE(length_f_peak, kNum10msSubframes); + double lpc[kNum10msSubframes * (kLpcOrder + 1)]; + // For all sub-frames. + GetLpcPolynomials(lpc, kNum10msSubframes * (kLpcOrder + 1)); + + const size_t kNumDftCoefficients = kDftSize / 2 + 1; + float data[kDftSize]; + + for (size_t i = 0; i < kNum10msSubframes; i++) { + // Convert to float with zero pad. + memset(data, 0, sizeof(data)); + for (size_t n = 0; n < kLpcOrder + 1; n++) { + data[n] = static_cast(lpc[i * (kLpcOrder + 1) + n]); + } + // Transform to frequency domain. + WebRtc_rdft(kDftSize, 1, data, ip_, w_fft_); + + size_t index_peak = 0; + float prev_magn_sqr = data[0] * data[0]; + float curr_magn_sqr = data[2] * data[2] + data[3] * data[3]; + float next_magn_sqr; + bool found_peak = false; + for (size_t n = 2; n < kNumDftCoefficients - 1; n++) { + next_magn_sqr = + data[2 * n] * data[2 * n] + data[2 * n + 1] * data[2 * n + 1]; + if (curr_magn_sqr < prev_magn_sqr && curr_magn_sqr < next_magn_sqr) { + found_peak = true; + index_peak = n - 1; + break; + } + prev_magn_sqr = curr_magn_sqr; + curr_magn_sqr = next_magn_sqr; + } + float fractional_index = 0; + if (!found_peak) { + // Checking if |kNumDftCoefficients - 1| is the local minimum. + next_magn_sqr = data[1] * data[1]; + if (curr_magn_sqr < prev_magn_sqr && curr_magn_sqr < next_magn_sqr) { + index_peak = kNumDftCoefficients - 1; + } + } else { + // A peak is found, do a simple quadratic interpolation to get a more + // accurate estimate of the peak location. + fractional_index = + QuadraticInterpolation(prev_magn_sqr, curr_magn_sqr, next_magn_sqr); + } + f_peak[i] = (index_peak + fractional_index) * kFrequencyResolution; + } +} + +// Using iSAC functions to estimate pitch gains & lags. +void VadAudioProc::PitchAnalysis(double* log_pitch_gains, + double* pitch_lags_hz, + size_t length) { + // TODO(turajs): This can be "imported" from iSAC & and the next two + // constants. + RTC_DCHECK_GE(length, kNum10msSubframes); + const int kNumPitchSubframes = 4; + double gains[kNumPitchSubframes]; + double lags[kNumPitchSubframes]; + + const int kNumSubbandFrameSamples = 240; + const int kNumLookaheadSamples = 24; + + float lower[kNumSubbandFrameSamples]; + float upper[kNumSubbandFrameSamples]; + double lower_lookahead[kNumSubbandFrameSamples]; + double upper_lookahead[kNumSubbandFrameSamples]; + double lower_lookahead_pre_filter[kNumSubbandFrameSamples + + kNumLookaheadSamples]; + + // Split signal to lower and upper bands + WebRtcIsac_SplitAndFilterFloat(&audio_buffer_[kNumPastSignalSamples], lower, + upper, lower_lookahead, upper_lookahead, + pre_filter_handle_.get()); + WebRtcIsac_PitchAnalysis(lower_lookahead, lower_lookahead_pre_filter, + pitch_analysis_handle_.get(), lags, gains); + + // Lags are computed on lower-band signal with sampling rate half of the + // input signal. + GetSubframesPitchParameters( + kSampleRateHz / 2, gains, lags, kNumPitchSubframes, kNum10msSubframes, + &log_old_gain_, &old_lag_, log_pitch_gains, pitch_lags_hz); +} + +void VadAudioProc::Rms(double* rms, size_t length_rms) { + RTC_DCHECK_GE(length_rms, kNum10msSubframes); + size_t offset = kNumPastSignalSamples; + for (size_t i = 0; i < kNum10msSubframes; i++) { + rms[i] = 0; + for (size_t n = 0; n < kNumSubframeSamples; n++, offset++) + rms[i] += audio_buffer_[offset] * audio_buffer_[offset]; + rms[i] = sqrt(rms[i] / kNumSubframeSamples); + } +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/vad_audio_proc.h b/modules/audio_processing/vad/vad_audio_proc.h new file mode 100644 index 0000000..4a71ce3 --- /dev/null +++ b/modules/audio_processing/vad/vad_audio_proc.h @@ -0,0 +1,94 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_VAD_VAD_AUDIO_PROC_H_ +#define MODULES_AUDIO_PROCESSING_VAD_VAD_AUDIO_PROC_H_ + +#include +#include + +#include + +#include "modules/audio_processing/vad/common.h" // AudioFeatures, kSampleR... + +namespace webrtc { + +class PoleZeroFilter; + +class VadAudioProc { + public: + // Forward declare iSAC structs. + struct PitchAnalysisStruct; + struct PreFiltBankstr; + + VadAudioProc(); + ~VadAudioProc(); + + int ExtractFeatures(const int16_t* audio_frame, + size_t length, + AudioFeatures* audio_features); + + static const size_t kDftSize = 512; + + private: + void PitchAnalysis(double* pitch_gains, double* pitch_lags_hz, size_t length); + void SubframeCorrelation(double* corr, + size_t length_corr, + size_t subframe_index); + void GetLpcPolynomials(double* lpc, size_t length_lpc); + void FindFirstSpectralPeaks(double* f_peak, size_t length_f_peak); + void Rms(double* rms, size_t length_rms); + void ResetBuffer(); + + // To compute spectral peak we perform LPC analysis to get spectral envelope. + // For every 30 ms we compute 3 spectral peak there for 3 LPC analysis. + // LPC is computed over 15 ms of windowed audio. For every 10 ms sub-frame + // we need 5 ms of past signal to create the input of LPC analysis. + enum : size_t { + kNumPastSignalSamples = static_cast(kSampleRateHz / 200) + }; + + // TODO(turajs): maybe defining this at a higher level (maybe enum) so that + // all the code recognize it as "no-error." + enum : int { kNoError = 0 }; + + enum : size_t { kNum10msSubframes = 3 }; + enum : size_t { + kNumSubframeSamples = static_cast(kSampleRateHz / 100) + }; + enum : size_t { + // Samples in 30 ms @ given sampling rate. + kNumSamplesToProcess = kNum10msSubframes * kNumSubframeSamples + }; + enum : size_t { + kBufferLength = kNumPastSignalSamples + kNumSamplesToProcess + }; + enum : size_t { kIpLength = kDftSize >> 1 }; + enum : size_t { kWLength = kDftSize >> 1 }; + enum : size_t { kLpcOrder = 16 }; + + size_t ip_[kIpLength]; + float w_fft_[kWLength]; + + // A buffer of 5 ms (past audio) + 30 ms (one iSAC frame ). + float audio_buffer_[kBufferLength]; + size_t num_buffer_samples_; + + double log_old_gain_; + double old_lag_; + + std::unique_ptr pitch_analysis_handle_; + std::unique_ptr pre_filter_handle_; + std::unique_ptr high_pass_filter_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_VAD_VAD_AUDIO_PROC_H_ diff --git a/modules/audio_processing/vad/vad_audio_proc_internal.h b/modules/audio_processing/vad/vad_audio_proc_internal.h new file mode 100644 index 0000000..915524f --- /dev/null +++ b/modules/audio_processing/vad/vad_audio_proc_internal.h @@ -0,0 +1,79 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_VAD_VAD_AUDIO_PROC_INTERNAL_H_ +#define MODULES_AUDIO_PROCESSING_VAD_VAD_AUDIO_PROC_INTERNAL_H_ + +namespace webrtc { + +// These values should match MATLAB counterparts for unit-tests to pass. +static const double kCorrWeight[] = { + 1.000000, 0.985000, 0.970225, 0.955672, 0.941337, 0.927217, + 0.913308, 0.899609, 0.886115, 0.872823, 0.859730, 0.846834, + 0.834132, 0.821620, 0.809296, 0.797156, 0.785199}; + +static const double kLpcAnalWin[] = { + 0.00000000, 0.01314436, 0.02628645, 0.03942400, 0.05255473, 0.06567639, + 0.07878670, 0.09188339, 0.10496421, 0.11802689, 0.13106918, 0.14408883, + 0.15708358, 0.17005118, 0.18298941, 0.19589602, 0.20876878, 0.22160547, + 0.23440387, 0.24716177, 0.25987696, 0.27254725, 0.28517045, 0.29774438, + 0.31026687, 0.32273574, 0.33514885, 0.34750406, 0.35979922, 0.37203222, + 0.38420093, 0.39630327, 0.40833713, 0.42030043, 0.43219112, 0.44400713, + 0.45574642, 0.46740697, 0.47898676, 0.49048379, 0.50189608, 0.51322164, + 0.52445853, 0.53560481, 0.54665854, 0.55761782, 0.56848075, 0.57924546, + 0.58991008, 0.60047278, 0.61093173, 0.62128512, 0.63153117, 0.64166810, + 0.65169416, 0.66160761, 0.67140676, 0.68108990, 0.69065536, 0.70010148, + 0.70942664, 0.71862923, 0.72770765, 0.73666033, 0.74548573, 0.75418233, + 0.76274862, 0.77118312, 0.77948437, 0.78765094, 0.79568142, 0.80357442, + 0.81132858, 0.81894256, 0.82641504, 0.83374472, 0.84093036, 0.84797069, + 0.85486451, 0.86161063, 0.86820787, 0.87465511, 0.88095122, 0.88709512, + 0.89308574, 0.89892206, 0.90460306, 0.91012776, 0.91549520, 0.92070447, + 0.92575465, 0.93064488, 0.93537432, 0.93994213, 0.94434755, 0.94858979, + 0.95266814, 0.95658189, 0.96033035, 0.96391289, 0.96732888, 0.97057773, + 0.97365889, 0.97657181, 0.97931600, 0.98189099, 0.98429632, 0.98653158, + 0.98859639, 0.99049038, 0.99221324, 0.99376466, 0.99514438, 0.99635215, + 0.99738778, 0.99825107, 0.99894188, 0.99946010, 0.99980562, 0.99997840, + 0.99997840, 0.99980562, 0.99946010, 0.99894188, 0.99825107, 0.99738778, + 0.99635215, 0.99514438, 0.99376466, 0.99221324, 0.99049038, 0.98859639, + 0.98653158, 0.98429632, 0.98189099, 0.97931600, 0.97657181, 0.97365889, + 0.97057773, 0.96732888, 0.96391289, 0.96033035, 0.95658189, 0.95266814, + 0.94858979, 0.94434755, 0.93994213, 0.93537432, 0.93064488, 0.92575465, + 0.92070447, 0.91549520, 0.91012776, 0.90460306, 0.89892206, 0.89308574, + 0.88709512, 0.88095122, 0.87465511, 0.86820787, 0.86161063, 0.85486451, + 0.84797069, 0.84093036, 0.83374472, 0.82641504, 0.81894256, 0.81132858, + 0.80357442, 0.79568142, 0.78765094, 0.77948437, 0.77118312, 0.76274862, + 0.75418233, 0.74548573, 0.73666033, 0.72770765, 0.71862923, 0.70942664, + 0.70010148, 0.69065536, 0.68108990, 0.67140676, 0.66160761, 0.65169416, + 0.64166810, 0.63153117, 0.62128512, 0.61093173, 0.60047278, 0.58991008, + 0.57924546, 0.56848075, 0.55761782, 0.54665854, 0.53560481, 0.52445853, + 0.51322164, 0.50189608, 0.49048379, 0.47898676, 0.46740697, 0.45574642, + 0.44400713, 0.43219112, 0.42030043, 0.40833713, 0.39630327, 0.38420093, + 0.37203222, 0.35979922, 0.34750406, 0.33514885, 0.32273574, 0.31026687, + 0.29774438, 0.28517045, 0.27254725, 0.25987696, 0.24716177, 0.23440387, + 0.22160547, 0.20876878, 0.19589602, 0.18298941, 0.17005118, 0.15708358, + 0.14408883, 0.13106918, 0.11802689, 0.10496421, 0.09188339, 0.07878670, + 0.06567639, 0.05255473, 0.03942400, 0.02628645, 0.01314436, 0.00000000}; + +static const size_t kFilterOrder = 2; +static const float kCoeffNumerator[kFilterOrder + 1] = {0.974827f, -1.949650f, + 0.974827f}; +static const float kCoeffDenominator[kFilterOrder + 1] = {1.0f, -1.971999f, + 0.972457f}; + +static_assert(kFilterOrder + 1 == + sizeof(kCoeffNumerator) / sizeof(kCoeffNumerator[0]), + "numerator coefficients incorrect size"); +static_assert(kFilterOrder + 1 == + sizeof(kCoeffDenominator) / sizeof(kCoeffDenominator[0]), + "denominator coefficients incorrect size"); + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_VAD_VAD_AUDIO_PROCESSING_H_ diff --git a/modules/audio_processing/vad/vad_audio_proc_unittest.cc b/modules/audio_processing/vad/vad_audio_proc_unittest.cc new file mode 100644 index 0000000..0afed84 --- /dev/null +++ b/modules/audio_processing/vad/vad_audio_proc_unittest.cc @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// We don't test the value of pitch gain and lags as they are created by iSAC +// routines. However, interpolation of pitch-gain and lags is in a separate +// class and has its own unit-test. + +#include "modules/audio_processing/vad/vad_audio_proc.h" + +#include +#include + +#include + +#include "modules/audio_processing/vad/common.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { + +TEST(AudioProcessingTest, DISABLED_ComputingFirstSpectralPeak) { + VadAudioProc audioproc; + + std::string peak_file_name = + test::ResourcePath("audio_processing/agc/agc_spectral_peak", "dat"); + FILE* peak_file = fopen(peak_file_name.c_str(), "rb"); + ASSERT_TRUE(peak_file != NULL); + + std::string pcm_file_name = + test::ResourcePath("audio_processing/agc/agc_audio", "pcm"); + FILE* pcm_file = fopen(pcm_file_name.c_str(), "rb"); + ASSERT_TRUE(pcm_file != NULL); + + // Read 10 ms audio in each iteration. + const size_t kDataLength = kLength10Ms; + int16_t data[kDataLength] = {0}; + AudioFeatures features; + double sp[kMaxNumFrames]; + while (fread(data, sizeof(int16_t), kDataLength, pcm_file) == kDataLength) { + audioproc.ExtractFeatures(data, kDataLength, &features); + if (features.num_frames > 0) { + ASSERT_LT(features.num_frames, kMaxNumFrames); + // Read reference values. + const size_t num_frames = features.num_frames; + ASSERT_EQ(num_frames, fread(sp, sizeof(sp[0]), num_frames, peak_file)); + for (size_t n = 0; n < features.num_frames; n++) + EXPECT_NEAR(features.spectral_peak[n], sp[n], 3); + } + } + + fclose(peak_file); + fclose(pcm_file); +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/vad_circular_buffer.cc b/modules/audio_processing/vad/vad_circular_buffer.cc new file mode 100644 index 0000000..31f14d7 --- /dev/null +++ b/modules/audio_processing/vad/vad_circular_buffer.cc @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/vad_circular_buffer.h" + +#include + +namespace webrtc { + +VadCircularBuffer::VadCircularBuffer(int buffer_size) + : buffer_(new double[buffer_size]), + is_full_(false), + index_(0), + buffer_size_(buffer_size), + sum_(0) {} + +VadCircularBuffer::~VadCircularBuffer() {} + +void VadCircularBuffer::Reset() { + is_full_ = false; + index_ = 0; + sum_ = 0; +} + +VadCircularBuffer* VadCircularBuffer::Create(int buffer_size) { + if (buffer_size <= 0) + return NULL; + return new VadCircularBuffer(buffer_size); +} + +double VadCircularBuffer::Oldest() const { + if (!is_full_) + return buffer_[0]; + else + return buffer_[index_]; +} + +double VadCircularBuffer::Mean() { + double m; + if (is_full_) { + m = sum_ / buffer_size_; + } else { + if (index_ > 0) + m = sum_ / index_; + else + m = 0; + } + return m; +} + +void VadCircularBuffer::Insert(double value) { + if (is_full_) { + sum_ -= buffer_[index_]; + } + sum_ += value; + buffer_[index_] = value; + index_++; + if (index_ >= buffer_size_) { + is_full_ = true; + index_ = 0; + } +} +int VadCircularBuffer::BufferLevel() { + if (is_full_) + return buffer_size_; + return index_; +} + +int VadCircularBuffer::Get(int index, double* value) const { + int err = ConvertToLinearIndex(&index); + if (err < 0) + return -1; + *value = buffer_[index]; + return 0; +} + +int VadCircularBuffer::Set(int index, double value) { + int err = ConvertToLinearIndex(&index); + if (err < 0) + return -1; + + sum_ -= buffer_[index]; + buffer_[index] = value; + sum_ += value; + return 0; +} + +int VadCircularBuffer::ConvertToLinearIndex(int* index) const { + if (*index < 0 || *index >= buffer_size_) + return -1; + + if (!is_full_ && *index >= index_) + return -1; + + *index = index_ - 1 - *index; + if (*index < 0) + *index += buffer_size_; + return 0; +} + +int VadCircularBuffer::RemoveTransient(int width_threshold, + double val_threshold) { + if (!is_full_ && index_ < width_threshold + 2) + return 0; + + int index_1 = 0; + int index_2 = width_threshold + 1; + double v = 0; + if (Get(index_1, &v) < 0) + return -1; + if (v < val_threshold) { + Set(index_1, 0); + int index; + for (index = index_2; index > index_1; index--) { + if (Get(index, &v) < 0) + return -1; + if (v < val_threshold) + break; + } + for (; index > index_1; index--) { + if (Set(index, 0.0) < 0) + return -1; + } + } + return 0; +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/vad_circular_buffer.h b/modules/audio_processing/vad/vad_circular_buffer.h new file mode 100644 index 0000000..46b03d4 --- /dev/null +++ b/modules/audio_processing/vad/vad_circular_buffer.h @@ -0,0 +1,69 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_VAD_VAD_CIRCULAR_BUFFER_H_ +#define MODULES_AUDIO_PROCESSING_VAD_VAD_CIRCULAR_BUFFER_H_ + +#include + +namespace webrtc { + +// A circular buffer tailored to the need of this project. It stores last +// K samples of the input, and keeps track of the mean of the last samples. +// +// It is used in class "PitchBasedActivity" to keep track of posterior +// probabilities in the past few seconds. The posterior probabilities are used +// to recursively update prior probabilities. +class VadCircularBuffer { + public: + static VadCircularBuffer* Create(int buffer_size); + ~VadCircularBuffer(); + + // If buffer is wrapped around. + bool is_full() const { return is_full_; } + // Get the oldest entry in the buffer. + double Oldest() const; + // Insert new value into the buffer. + void Insert(double value); + // Reset buffer, forget the past, start fresh. + void Reset(); + + // The mean value of the elements in the buffer. The return value is zero if + // buffer is empty, i.e. no value is inserted. + double Mean(); + // Remove transients. If the values exceed |val_threshold| for a period + // shorter then or equal to |width_threshold|, then that period is considered + // transient and set to zero. + int RemoveTransient(int width_threshold, double val_threshold); + + private: + explicit VadCircularBuffer(int buffer_size); + // Get previous values. |index = 0| corresponds to the most recent + // insertion. |index = 1| is the one before the most recent insertion, and + // so on. + int Get(int index, double* value) const; + // Set a given position to |value|. |index| is interpreted as above. + int Set(int index, double value); + // Return the number of valid elements in the buffer. + int BufferLevel(); + + // Convert an index with the interpretation as get() method to the + // corresponding linear index. + int ConvertToLinearIndex(int* index) const; + + std::unique_ptr buffer_; + bool is_full_; + int index_; + int buffer_size_; + double sum_; +}; + +} // namespace webrtc +#endif // MODULES_AUDIO_PROCESSING_VAD_VAD_CIRCULAR_BUFFER_H_ diff --git a/modules/audio_processing/vad/vad_circular_buffer_unittest.cc b/modules/audio_processing/vad/vad_circular_buffer_unittest.cc new file mode 100644 index 0000000..efbd70d --- /dev/null +++ b/modules/audio_processing/vad/vad_circular_buffer_unittest.cc @@ -0,0 +1,134 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/vad_circular_buffer.h" + +#include + +#include + +#include "test/gtest.h" + +namespace webrtc { + +static const int kWidthThreshold = 7; +static const double kValThreshold = 1.0; +static const int kLongBuffSize = 100; +static const int kShortBuffSize = 10; + +static void InsertSequentially(int k, VadCircularBuffer* circular_buffer) { + double mean_val; + for (int n = 1; n <= k; n++) { + EXPECT_TRUE(!circular_buffer->is_full()); + circular_buffer->Insert(n); + mean_val = circular_buffer->Mean(); + EXPECT_EQ((n + 1.0) / 2., mean_val); + } +} + +static void Insert(double value, + int num_insertion, + VadCircularBuffer* circular_buffer) { + for (int n = 0; n < num_insertion; n++) + circular_buffer->Insert(value); +} + +static void InsertZeros(int num_zeros, VadCircularBuffer* circular_buffer) { + Insert(0.0, num_zeros, circular_buffer); +} + +TEST(VadCircularBufferTest, GeneralTest) { + std::unique_ptr circular_buffer( + VadCircularBuffer::Create(kShortBuffSize)); + double mean_val; + + // Mean should return zero if nothing is inserted. + mean_val = circular_buffer->Mean(); + EXPECT_DOUBLE_EQ(0.0, mean_val); + InsertSequentially(kShortBuffSize, circular_buffer.get()); + + // Should be full. + EXPECT_TRUE(circular_buffer->is_full()); + // Correct update after being full. + for (int n = 1; n < kShortBuffSize; n++) { + circular_buffer->Insert(n); + mean_val = circular_buffer->Mean(); + EXPECT_DOUBLE_EQ((kShortBuffSize + 1.) / 2., mean_val); + EXPECT_TRUE(circular_buffer->is_full()); + } + + // Check reset. This should be like starting fresh. + circular_buffer->Reset(); + mean_val = circular_buffer->Mean(); + EXPECT_DOUBLE_EQ(0, mean_val); + InsertSequentially(kShortBuffSize, circular_buffer.get()); + EXPECT_TRUE(circular_buffer->is_full()); +} + +TEST(VadCircularBufferTest, TransientsRemoval) { + std::unique_ptr circular_buffer( + VadCircularBuffer::Create(kLongBuffSize)); + // Let the first transient be in wrap-around. + InsertZeros(kLongBuffSize - kWidthThreshold / 2, circular_buffer.get()); + + double push_val = kValThreshold; + double mean_val; + for (int k = kWidthThreshold; k >= 1; k--) { + Insert(push_val, k, circular_buffer.get()); + circular_buffer->Insert(0); + mean_val = circular_buffer->Mean(); + EXPECT_DOUBLE_EQ(k * push_val / kLongBuffSize, mean_val); + circular_buffer->RemoveTransient(kWidthThreshold, kValThreshold); + mean_val = circular_buffer->Mean(); + EXPECT_DOUBLE_EQ(0, mean_val); + } +} + +TEST(VadCircularBufferTest, TransientDetection) { + std::unique_ptr circular_buffer( + VadCircularBuffer::Create(kLongBuffSize)); + // Let the first transient be in wrap-around. + int num_insertion = kLongBuffSize - kWidthThreshold / 2; + InsertZeros(num_insertion, circular_buffer.get()); + + double push_val = 2; + // This is longer than a transient and shouldn't be removed. + int num_non_zero_elements = kWidthThreshold + 1; + Insert(push_val, num_non_zero_elements, circular_buffer.get()); + + double mean_val = circular_buffer->Mean(); + EXPECT_DOUBLE_EQ(num_non_zero_elements * push_val / kLongBuffSize, mean_val); + circular_buffer->Insert(0); + EXPECT_EQ(0, + circular_buffer->RemoveTransient(kWidthThreshold, kValThreshold)); + mean_val = circular_buffer->Mean(); + EXPECT_DOUBLE_EQ(num_non_zero_elements * push_val / kLongBuffSize, mean_val); + + // A transient right after a non-transient, should be removed and mean is + // not changed. + num_insertion = 3; + Insert(push_val, num_insertion, circular_buffer.get()); + circular_buffer->Insert(0); + EXPECT_EQ(0, + circular_buffer->RemoveTransient(kWidthThreshold, kValThreshold)); + mean_val = circular_buffer->Mean(); + EXPECT_DOUBLE_EQ(num_non_zero_elements * push_val / kLongBuffSize, mean_val); + + // Last input is larger than threshold, although the sequence is short but + // it shouldn't be considered transient. + Insert(push_val, num_insertion, circular_buffer.get()); + num_non_zero_elements += num_insertion; + EXPECT_EQ(0, + circular_buffer->RemoveTransient(kWidthThreshold, kValThreshold)); + mean_val = circular_buffer->Mean(); + EXPECT_DOUBLE_EQ(num_non_zero_elements * push_val / kLongBuffSize, mean_val); +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/voice_activity_detector.cc b/modules/audio_processing/vad/voice_activity_detector.cc new file mode 100644 index 0000000..f0d34c6 --- /dev/null +++ b/modules/audio_processing/vad/voice_activity_detector.cc @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/voice_activity_detector.h" + +#include + +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { + +const size_t kNumChannels = 1; + +const double kDefaultVoiceValue = 1.0; +const double kNeutralProbability = 0.5; +const double kLowProbability = 0.01; + +} // namespace + +VoiceActivityDetector::VoiceActivityDetector() + : last_voice_probability_(kDefaultVoiceValue), + standalone_vad_(StandaloneVad::Create()) {} + +VoiceActivityDetector::~VoiceActivityDetector() = default; + +// Because ISAC has a different chunk length, it updates +// |chunkwise_voice_probabilities_| and |chunkwise_rms_| when there is new data. +// Otherwise it clears them. +void VoiceActivityDetector::ProcessChunk(const int16_t* audio, + size_t length, + int sample_rate_hz) { + RTC_DCHECK_EQ(length, sample_rate_hz / 100); + // Resample to the required rate. + const int16_t* resampled_ptr = audio; + if (sample_rate_hz != kSampleRateHz) { + RTC_CHECK_EQ( + resampler_.ResetIfNeeded(sample_rate_hz, kSampleRateHz, kNumChannels), + 0); + resampler_.Push(audio, length, resampled_, kLength10Ms, length); + resampled_ptr = resampled_; + } + RTC_DCHECK_EQ(length, kLength10Ms); + + // Each chunk needs to be passed into |standalone_vad_|, because internally it + // buffers the audio and processes it all at once when GetActivity() is + // called. + RTC_CHECK_EQ(standalone_vad_->AddAudio(resampled_ptr, length), 0); + + audio_processing_.ExtractFeatures(resampled_ptr, length, &features_); + + chunkwise_voice_probabilities_.resize(features_.num_frames); + chunkwise_rms_.resize(features_.num_frames); + std::copy(features_.rms, features_.rms + chunkwise_rms_.size(), + chunkwise_rms_.begin()); + if (features_.num_frames > 0) { + if (features_.silence) { + // The other features are invalid, so set the voice probabilities to an + // arbitrary low value. + std::fill(chunkwise_voice_probabilities_.begin(), + chunkwise_voice_probabilities_.end(), kLowProbability); + } else { + std::fill(chunkwise_voice_probabilities_.begin(), + chunkwise_voice_probabilities_.end(), kNeutralProbability); + RTC_CHECK_GE( + standalone_vad_->GetActivity(&chunkwise_voice_probabilities_[0], + chunkwise_voice_probabilities_.size()), + 0); + RTC_CHECK_GE(pitch_based_vad_.VoicingProbability( + features_, &chunkwise_voice_probabilities_[0]), + 0); + } + last_voice_probability_ = chunkwise_voice_probabilities_.back(); + } +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/voice_activity_detector.h b/modules/audio_processing/vad/voice_activity_detector.h new file mode 100644 index 0000000..a19883d --- /dev/null +++ b/modules/audio_processing/vad/voice_activity_detector.h @@ -0,0 +1,72 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_VAD_VOICE_ACTIVITY_DETECTOR_H_ +#define MODULES_AUDIO_PROCESSING_VAD_VOICE_ACTIVITY_DETECTOR_H_ + +#include +#include + +#include +#include + +#include "common_audio/resampler/include/resampler.h" +#include "modules/audio_processing/vad/common.h" +#include "modules/audio_processing/vad/pitch_based_vad.h" +#include "modules/audio_processing/vad/standalone_vad.h" +#include "modules/audio_processing/vad/vad_audio_proc.h" + +namespace webrtc { + +// A Voice Activity Detector (VAD) that combines the voice probability from the +// StandaloneVad and PitchBasedVad to get a more robust estimation. +class VoiceActivityDetector { + public: + VoiceActivityDetector(); + ~VoiceActivityDetector(); + + // Processes each audio chunk and estimates the voice probability. + void ProcessChunk(const int16_t* audio, size_t length, int sample_rate_hz); + + // Returns a vector of voice probabilities for each chunk. It can be empty for + // some chunks, but it catches up afterwards returning multiple values at + // once. + const std::vector& chunkwise_voice_probabilities() const { + return chunkwise_voice_probabilities_; + } + + // Returns a vector of RMS values for each chunk. It has the same length as + // chunkwise_voice_probabilities(). + const std::vector& chunkwise_rms() const { return chunkwise_rms_; } + + // Returns the last voice probability, regardless of the internal + // implementation, although it has a few chunks of delay. + float last_voice_probability() const { return last_voice_probability_; } + + private: + // TODO(aluebs): Change these to float. + std::vector chunkwise_voice_probabilities_; + std::vector chunkwise_rms_; + + float last_voice_probability_; + + Resampler resampler_; + VadAudioProc audio_processing_; + + std::unique_ptr standalone_vad_; + PitchBasedVad pitch_based_vad_; + + int16_t resampled_[kLength10Ms]; + AudioFeatures features_; +}; + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_VAD_VOICE_ACTIVITY_DETECTOR_H_ diff --git a/modules/audio_processing/vad/voice_activity_detector_unittest.cc b/modules/audio_processing/vad/voice_activity_detector_unittest.cc new file mode 100644 index 0000000..3214bd9 --- /dev/null +++ b/modules/audio_processing/vad/voice_activity_detector_unittest.cc @@ -0,0 +1,168 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/vad/voice_activity_detector.h" + +#include +#include + +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { +namespace { + +const int kStartTimeSec = 16; +const float kMeanSpeechProbability = 0.3f; +const float kMaxNoiseProbability = 0.1f; +const size_t kNumChunks = 300u; +const size_t kNumChunksPerIsacBlock = 3; + +void GenerateNoise(std::vector* data) { + for (size_t i = 0; i < data->size(); ++i) { + // std::rand returns between 0 and RAND_MAX, but this will work because it + // wraps into some random place. + (*data)[i] = std::rand(); + } +} + +} // namespace + +TEST(VoiceActivityDetectorTest, ConstructorSetsDefaultValues) { + const float kDefaultVoiceValue = 1.f; + + VoiceActivityDetector vad; + + std::vector p = vad.chunkwise_voice_probabilities(); + std::vector rms = vad.chunkwise_rms(); + + EXPECT_EQ(p.size(), 0u); + EXPECT_EQ(rms.size(), 0u); + + EXPECT_FLOAT_EQ(vad.last_voice_probability(), kDefaultVoiceValue); +} + +TEST(VoiceActivityDetectorTest, Speech16kHzHasHighVoiceProbabilities) { + const int kSampleRateHz = 16000; + const int kLength10Ms = kSampleRateHz / 100; + + VoiceActivityDetector vad; + + std::vector data(kLength10Ms); + float mean_probability = 0.f; + + FILE* pcm_file = + fopen(test::ResourcePath("audio_processing/transient/audio16kHz", "pcm") + .c_str(), + "rb"); + ASSERT_TRUE(pcm_file != nullptr); + // The silences in the file are skipped to get a more robust voice probability + // for speech. + ASSERT_EQ(fseek(pcm_file, kStartTimeSec * kSampleRateHz * sizeof(data[0]), + SEEK_SET), + 0); + + size_t num_chunks = 0; + while (fread(&data[0], sizeof(data[0]), data.size(), pcm_file) == + data.size()) { + vad.ProcessChunk(&data[0], data.size(), kSampleRateHz); + + mean_probability += vad.last_voice_probability(); + + ++num_chunks; + } + + mean_probability /= num_chunks; + + EXPECT_GT(mean_probability, kMeanSpeechProbability); +} + +TEST(VoiceActivityDetectorTest, Speech32kHzHasHighVoiceProbabilities) { + const int kSampleRateHz = 32000; + const int kLength10Ms = kSampleRateHz / 100; + + VoiceActivityDetector vad; + + std::vector data(kLength10Ms); + float mean_probability = 0.f; + + FILE* pcm_file = + fopen(test::ResourcePath("audio_processing/transient/audio32kHz", "pcm") + .c_str(), + "rb"); + ASSERT_TRUE(pcm_file != nullptr); + // The silences in the file are skipped to get a more robust voice probability + // for speech. + ASSERT_EQ(fseek(pcm_file, kStartTimeSec * kSampleRateHz * sizeof(data[0]), + SEEK_SET), + 0); + + size_t num_chunks = 0; + while (fread(&data[0], sizeof(data[0]), data.size(), pcm_file) == + data.size()) { + vad.ProcessChunk(&data[0], data.size(), kSampleRateHz); + + mean_probability += vad.last_voice_probability(); + + ++num_chunks; + } + + mean_probability /= num_chunks; + + EXPECT_GT(mean_probability, kMeanSpeechProbability); +} + +TEST(VoiceActivityDetectorTest, Noise16kHzHasLowVoiceProbabilities) { + VoiceActivityDetector vad; + + std::vector data(kLength10Ms); + float max_probability = 0.f; + + std::srand(42); + + for (size_t i = 0; i < kNumChunks; ++i) { + GenerateNoise(&data); + + vad.ProcessChunk(&data[0], data.size(), kSampleRateHz); + + // Before the |vad has enough data to process an ISAC block it will return + // the default value, 1.f, which would ruin the |max_probability| value. + if (i > kNumChunksPerIsacBlock) { + max_probability = std::max(max_probability, vad.last_voice_probability()); + } + } + + EXPECT_LT(max_probability, kMaxNoiseProbability); +} + +TEST(VoiceActivityDetectorTest, Noise32kHzHasLowVoiceProbabilities) { + VoiceActivityDetector vad; + + std::vector data(2 * kLength10Ms); + float max_probability = 0.f; + + std::srand(42); + + for (size_t i = 0; i < kNumChunks; ++i) { + GenerateNoise(&data); + + vad.ProcessChunk(&data[0], data.size(), 2 * kSampleRateHz); + + // Before the |vad has enough data to process an ISAC block it will return + // the default value, 1.f, which would ruin the |max_probability| value. + if (i > kNumChunksPerIsacBlock) { + max_probability = std::max(max_probability, vad.last_voice_probability()); + } + } + + EXPECT_LT(max_probability, kMaxNoiseProbability); +} + +} // namespace webrtc diff --git a/modules/audio_processing/vad/voice_gmm_tables.h b/modules/audio_processing/vad/voice_gmm_tables.h new file mode 100644 index 0000000..ef4ad7e --- /dev/null +++ b/modules/audio_processing/vad/voice_gmm_tables.h @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +// GMM tables for active segments. Generated by MakeGmmTables.m. + +#ifndef MODULES_AUDIO_PROCESSING_VAD_VOICE_GMM_TABLES_H_ +#define MODULES_AUDIO_PROCESSING_VAD_VOICE_GMM_TABLES_H_ + +static const int kVoiceGmmNumMixtures = 12; +static const int kVoiceGmmDim = 3; + +static const double + kVoiceGmmCovarInverse[kVoiceGmmNumMixtures][kVoiceGmmDim][kVoiceGmmDim] = { + {{1.83673825579513e+00, -8.09791637570095e-04, 4.60106414365986e-03}, + {-8.09791637570095e-04, 8.89351738394608e-04, -9.80188953277734e-04}, + {4.60106414365986e-03, -9.80188953277734e-04, 1.38706060206582e-03}}, + {{6.76228912850703e+01, -1.98893120119660e-02, -3.53548357253551e-03}, + {-1.98893120119660e-02, 3.96216858500530e-05, -4.08492938394097e-05}, + {-3.53548357253551e-03, -4.08492938394097e-05, 9.31864352856416e-04}}, + {{9.98612435944558e+00, -5.27880954316893e-03, -6.30342541619017e-03}, + {-5.27880954316893e-03, 4.54359480225226e-05, 6.30804591626044e-05}, + {-6.30342541619017e-03, 6.30804591626044e-05, 5.36466441382942e-04}}, + {{3.39917474216349e+01, -1.56213579433191e-03, -4.01459014990225e-02}, + {-1.56213579433191e-03, 6.40415424897724e-05, 6.20076342427833e-05}, + {-4.01459014990225e-02, 6.20076342427833e-05, 3.51199070103063e-03}}, + {{1.34545062271428e+01, -7.94513610147144e-03, -5.34401019341728e-02}, + {-7.94513610147144e-03, 1.16511820098649e-04, 4.66063702069293e-05}, + {-5.34401019341728e-02, 4.66063702069293e-05, 2.72354323774163e-03}}, + {{1.08557844314806e+02, -1.54885805673668e-02, -1.88029692674851e-02}, + {-1.54885805673668e-02, 1.16404042786406e-04, 6.45579292702802e-06}, + {-1.88029692674851e-02, 6.45579292702802e-06, 4.32330478391416e-04}}, + {{8.22940066541450e+01, -1.15903110231303e-02, -4.92166764865343e-02}, + {-1.15903110231303e-02, 7.42510742165261e-05, 3.73007314191290e-06}, + {-4.92166764865343e-02, 3.73007314191290e-06, 3.64005221593244e-03}}, + {{2.31133605685660e+00, -7.83261568950254e-04, 7.45744012346313e-04}, + {-7.83261568950254e-04, 1.29460648214142e-05, -2.22774455093730e-06}, + {7.45744012346313e-04, -2.22774455093730e-06, 1.05117294093010e-04}}, + {{3.78767849189611e+02, 1.57759761011568e-03, -2.08551217988774e-02}, + {1.57759761011568e-03, 4.76066236886865e-05, -2.33977412299324e-05}, + {-2.08551217988774e-02, -2.33977412299324e-05, 5.24261005371196e-04}}, + {{6.98580096506135e-01, -5.13850255217378e-04, -4.01124551717056e-04}, + {-5.13850255217378e-04, 1.40501021984840e-06, -2.09496928716569e-06}, + {-4.01124551717056e-04, -2.09496928716569e-06, 2.82879357740037e-04}}, + {{2.62770945162399e+00, -2.31825753241430e-03, -5.30447217466318e-03}, + {-2.31825753241430e-03, 4.59108572227649e-05, 7.67631886355405e-05}, + {-5.30447217466318e-03, 7.67631886355405e-05, 2.28521601674098e-03}}, + {{1.89940391362152e+02, -4.23280856852379e-03, -2.70608873541399e-02}, + {-4.23280856852379e-03, 6.77547582742563e-05, 2.69154203800467e-05}, + {-2.70608873541399e-02, 2.69154203800467e-05, 3.88574543373470e-03}}}; + +static const double kVoiceGmmMean[kVoiceGmmNumMixtures][kVoiceGmmDim] = { + {-2.15020241646536e+00, 4.97079062999877e+02, 4.77078119504505e+02}, + {-8.92097680029190e-01, 5.92064964199921e+02, 1.81045145941059e+02}, + {-1.29435784144398e+00, 4.98450293410611e+02, 1.71991263804064e+02}, + {-1.03925228397884e+00, 4.99511274321571e+02, 1.05838336539105e+02}, + {-1.29229047206129e+00, 4.15026762566707e+02, 1.12861119017125e+02}, + {-7.88748114599810e-01, 4.48739336688113e+02, 1.89784216956337e+02}, + {-8.77777402332642e-01, 4.86620285054533e+02, 1.13477708016491e+02}, + {-2.06465957063057e+00, 6.33385049870607e+02, 2.32758546796149e+02}, + {-6.98893789231685e-01, 5.93622051503385e+02, 1.92536982473203e+02}, + {-2.55901217508894e+00, 1.55914919756205e+03, 1.39769980835570e+02}, + {-1.92070024165837e+00, 4.87983940444185e+02, 1.02745468128289e+02}, + {-7.29187507662854e-01, 5.22717685022855e+02, 1.16377942283991e+02}}; + +static const double kVoiceGmmWeights[kVoiceGmmNumMixtures] = { + -1.39789694361035e+01, -1.19527720202104e+01, -1.32396317929055e+01, + -1.09436815209238e+01, -1.13440027478149e+01, -1.12200721834504e+01, + -1.02537324043693e+01, -1.60789861938302e+01, -1.03394494048344e+01, + -1.83207938586818e+01, -1.31186044948288e+01, -9.52479998673554e+00}; +#endif // MODULES_AUDIO_PROCESSING_VAD_VOICE_GMM_TABLES_H_ diff --git a/modules/audio_processing/voice_detection.cc b/modules/audio_processing/voice_detection.cc new file mode 100644 index 0000000..e6c92ae --- /dev/null +++ b/modules/audio_processing/voice_detection.cc @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/voice_detection.h" + +#include "common_audio/vad/include/webrtc_vad.h" +#include "modules/audio_processing/audio_buffer.h" +#include "rtc_base/checks.h" + +namespace webrtc { +class VoiceDetection::Vad { + public: + Vad() { + state_ = WebRtcVad_Create(); + RTC_CHECK(state_); + int error = WebRtcVad_Init(state_); + RTC_DCHECK_EQ(0, error); + } + ~Vad() { WebRtcVad_Free(state_); } + + Vad(Vad&) = delete; + Vad& operator=(Vad&) = delete; + + VadInst* state() { return state_; } + + private: + VadInst* state_ = nullptr; +}; + +VoiceDetection::VoiceDetection(int sample_rate_hz, Likelihood likelihood) + : sample_rate_hz_(sample_rate_hz), + frame_size_samples_(static_cast(sample_rate_hz_ / 100)), + likelihood_(likelihood), + vad_(new Vad()) { + int mode = 2; + switch (likelihood) { + case VoiceDetection::kVeryLowLikelihood: + mode = 3; + break; + case VoiceDetection::kLowLikelihood: + mode = 2; + break; + case VoiceDetection::kModerateLikelihood: + mode = 1; + break; + case VoiceDetection::kHighLikelihood: + mode = 0; + break; + default: + RTC_NOTREACHED(); + break; + } + int error = WebRtcVad_set_mode(vad_->state(), mode); + RTC_DCHECK_EQ(0, error); +} + +VoiceDetection::~VoiceDetection() {} + +bool VoiceDetection::ProcessCaptureAudio(AudioBuffer* audio) { + RTC_DCHECK_GE(AudioBuffer::kMaxSplitFrameLength, + audio->num_frames_per_band()); + std::array mixed_low_pass_data; + rtc::ArrayView mixed_low_pass(mixed_low_pass_data.data(), + audio->num_frames_per_band()); + if (audio->num_channels() == 1) { + FloatS16ToS16(audio->split_bands_const(0)[kBand0To8kHz], + audio->num_frames_per_band(), mixed_low_pass_data.data()); + } else { + const int num_channels = static_cast(audio->num_channels()); + for (size_t i = 0; i < audio->num_frames_per_band(); ++i) { + int32_t value = + FloatS16ToS16(audio->split_channels_const(kBand0To8kHz)[0][i]); + for (int j = 1; j < num_channels; ++j) { + value += FloatS16ToS16(audio->split_channels_const(kBand0To8kHz)[j][i]); + } + mixed_low_pass_data[i] = value / num_channels; + } + } + + int vad_ret = WebRtcVad_Process(vad_->state(), sample_rate_hz_, + mixed_low_pass.data(), frame_size_samples_); + RTC_DCHECK(vad_ret == 0 || vad_ret == 1); + return vad_ret == 0 ? false : true; +} +} // namespace webrtc diff --git a/modules/audio_processing/voice_detection.h b/modules/audio_processing/voice_detection.h new file mode 100644 index 0000000..79d44e6 --- /dev/null +++ b/modules/audio_processing/voice_detection.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_VOICE_DETECTION_H_ +#define MODULES_AUDIO_PROCESSING_VOICE_DETECTION_H_ + +#include + +#include + +#include "modules/audio_processing/include/audio_processing.h" + +namespace webrtc { + +class AudioBuffer; + +// The voice activity detection (VAD) component analyzes the stream to +// determine if voice is present. +class VoiceDetection { + public: + // Specifies the likelihood that a frame will be declared to contain voice. + // A higher value makes it more likely that speech will not be clipped, at + // the expense of more noise being detected as voice. + enum Likelihood { + kVeryLowLikelihood, + kLowLikelihood, + kModerateLikelihood, + kHighLikelihood + }; + + VoiceDetection(int sample_rate_hz, Likelihood likelihood); + ~VoiceDetection(); + + VoiceDetection(VoiceDetection&) = delete; + VoiceDetection& operator=(VoiceDetection&) = delete; + + // Returns true if voice is detected in the current frame. + bool ProcessCaptureAudio(AudioBuffer* audio); + + Likelihood likelihood() const { return likelihood_; } + + private: + class Vad; + + int sample_rate_hz_; + size_t frame_size_samples_; + Likelihood likelihood_; + std::unique_ptr vad_; +}; +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_VOICE_DETECTION_H_ diff --git a/modules/audio_processing/voice_detection_unittest.cc b/modules/audio_processing/voice_detection_unittest.cc new file mode 100644 index 0000000..9a52fa6 --- /dev/null +++ b/modules/audio_processing/voice_detection_unittest.cc @@ -0,0 +1,104 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include + +#include "api/array_view.h" +#include "modules/audio_processing/audio_buffer.h" +#include "modules/audio_processing/test/audio_buffer_tools.h" +#include "modules/audio_processing/test/bitexactness_tools.h" +#include "modules/audio_processing/voice_detection.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +const int kNumFramesToProcess = 1000; + +// Process one frame of data and produce the output. +bool ProcessOneFrame(int sample_rate_hz, + AudioBuffer* audio_buffer, + VoiceDetection* voice_detection) { + if (sample_rate_hz > AudioProcessing::kSampleRate16kHz) { + audio_buffer->SplitIntoFrequencyBands(); + } + + return voice_detection->ProcessCaptureAudio(audio_buffer); +} + +// Processes a specified amount of frames, verifies the results and reports +// any errors. +void RunBitexactnessTest(int sample_rate_hz, + size_t num_channels, + bool stream_has_voice_reference) { + int sample_rate_to_use = std::min(sample_rate_hz, 16000); + VoiceDetection voice_detection(sample_rate_to_use, + VoiceDetection::kLowLikelihood); + + int samples_per_channel = rtc::CheckedDivExact(sample_rate_hz, 100); + const StreamConfig capture_config(sample_rate_hz, num_channels, false); + AudioBuffer capture_buffer( + capture_config.sample_rate_hz(), capture_config.num_channels(), + capture_config.sample_rate_hz(), capture_config.num_channels(), + capture_config.sample_rate_hz(), capture_config.num_channels()); + test::InputAudioFile capture_file( + test::GetApmCaptureTestVectorFileName(sample_rate_hz)); + std::vector capture_input(samples_per_channel * num_channels); + bool stream_has_voice = false; + for (int frame_no = 0; frame_no < kNumFramesToProcess; ++frame_no) { + ReadFloatSamplesFromStereoFile(samples_per_channel, num_channels, + &capture_file, capture_input); + + test::CopyVectorToAudioBuffer(capture_config, capture_input, + &capture_buffer); + + stream_has_voice = + ProcessOneFrame(sample_rate_hz, &capture_buffer, &voice_detection); + } + + EXPECT_EQ(stream_has_voice_reference, stream_has_voice); +} + +const bool kStreamHasVoiceReference = true; + +} // namespace + +TEST(VoiceDetectionBitExactnessTest, Mono8kHz) { + RunBitexactnessTest(8000, 1, kStreamHasVoiceReference); +} + +TEST(VoiceDetectionBitExactnessTest, Mono16kHz) { + RunBitexactnessTest(16000, 1, kStreamHasVoiceReference); +} + +TEST(VoiceDetectionBitExactnessTest, Mono32kHz) { + RunBitexactnessTest(32000, 1, kStreamHasVoiceReference); +} + +TEST(VoiceDetectionBitExactnessTest, Mono48kHz) { + RunBitexactnessTest(48000, 1, kStreamHasVoiceReference); +} + +TEST(VoiceDetectionBitExactnessTest, Stereo8kHz) { + RunBitexactnessTest(8000, 2, kStreamHasVoiceReference); +} + +TEST(VoiceDetectionBitExactnessTest, Stereo16kHz) { + RunBitexactnessTest(16000, 2, kStreamHasVoiceReference); +} + +TEST(VoiceDetectionBitExactnessTest, Stereo32kHz) { + RunBitexactnessTest(32000, 2, kStreamHasVoiceReference); +} + +TEST(VoiceDetectionBitExactnessTest, Stereo48kHz) { + RunBitexactnessTest(48000, 2, kStreamHasVoiceReference); +} + +} // namespace webrtc diff --git a/modules/congestion_controller/BUILD.gn b/modules/congestion_controller/BUILD.gn new file mode 100644 index 0000000..231ff5e --- /dev/null +++ b/modules/congestion_controller/BUILD.gn @@ -0,0 +1,59 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../webrtc.gni") + +config("bwe_test_logging") { + if (rtc_enable_bwe_test_logging) { + defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1" ] + } else { + defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0" ] + } +} + +rtc_library("congestion_controller") { + visibility = [ "*" ] + configs += [ ":bwe_test_logging" ] + sources = [ + "include/receive_side_congestion_controller.h", + "receive_side_congestion_controller.cc", + ] + + deps = [ + "..:module_api", + "../../api/transport:field_trial_based_config", + "../../api/transport:network_control", + "../../rtc_base/synchronization:mutex", + "../pacing", + "../remote_bitrate_estimator", + "../rtp_rtcp:rtp_rtcp_format", + ] + + if (!build_with_mozilla) { + deps += [ "../../rtc_base" ] + } +} + +if (rtc_include_tests) { + rtc_library("congestion_controller_unittests") { + testonly = true + + sources = [ "receive_side_congestion_controller_unittest.cc" ] + deps = [ + ":congestion_controller", + "../../system_wrappers", + "../../test:test_support", + "../../test/scenario", + "../pacing", + "goog_cc:estimators", + "goog_cc:goog_cc_unittests", + "pcc:pcc_unittests", + "rtp:congestion_controller_unittests", + ] + } +} diff --git a/modules/congestion_controller/DEPS b/modules/congestion_controller/DEPS new file mode 100644 index 0000000..2ed9952 --- /dev/null +++ b/modules/congestion_controller/DEPS @@ -0,0 +1,5 @@ +include_rules = [ + "+logging/rtc_event_log", + "+system_wrappers", + "+video", +] diff --git a/modules/congestion_controller/OWNERS b/modules/congestion_controller/OWNERS new file mode 100644 index 0000000..3304c67 --- /dev/null +++ b/modules/congestion_controller/OWNERS @@ -0,0 +1,7 @@ +srte@webrtc.org +stefan@webrtc.org +terelius@webrtc.org +crodbro@webrtc.org +philipel@webrtc.org +mflodman@webrtc.org +yinwa@webrtc.org diff --git a/modules/congestion_controller/goog_cc/BUILD.gn b/modules/congestion_controller/goog_cc/BUILD.gn new file mode 100644 index 0000000..52daad2 --- /dev/null +++ b/modules/congestion_controller/goog_cc/BUILD.gn @@ -0,0 +1,304 @@ +# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +config("bwe_test_logging") { + if (rtc_enable_bwe_test_logging) { + defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1" ] + } else { + defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0" ] + } +} + +rtc_library("goog_cc") { + configs += [ ":bwe_test_logging" ] + sources = [ + "goog_cc_network_control.cc", + "goog_cc_network_control.h", + ] + + deps = [ + ":alr_detector", + ":delay_based_bwe", + ":estimators", + ":loss_based_controller", + ":probe_controller", + ":pushback_controller", + "../..:module_api", + "../../..:webrtc_common", + "../../../api:network_state_predictor_api", + "../../../api/rtc_event_log", + "../../../api/transport:field_trial_based_config", + "../../../api/transport:network_control", + "../../../api/transport:webrtc_key_value_config", + "../../../api/units:data_rate", + "../../../api/units:data_size", + "../../../api/units:time_delta", + "../../../api/units:timestamp", + "../../../logging:rtc_event_bwe", + "../../../logging:rtc_event_pacing", + "../../../rtc_base:checks", + "../../../rtc_base:logging", + "../../../rtc_base:macromagic", + "../../../rtc_base/experiments:alr_experiment", + "../../../rtc_base/experiments:field_trial_parser", + "../../../rtc_base/experiments:rate_control_settings", + "../../../system_wrappers", + "../../remote_bitrate_estimator", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("link_capacity_estimator") { + sources = [ + "link_capacity_estimator.cc", + "link_capacity_estimator.h", + ] + deps = [ + "../../../api/units:data_rate", + "../../../rtc_base:safe_minmax", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("pushback_controller") { + sources = [ + "congestion_window_pushback_controller.cc", + "congestion_window_pushback_controller.h", + ] + deps = [ + "../../../api/transport:network_control", + "../../../api/transport:webrtc_key_value_config", + "../../../api/units:data_size", + "../../../rtc_base:checks", + "../../../rtc_base/experiments:rate_control_settings", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("alr_detector") { + sources = [ + "alr_detector.cc", + "alr_detector.h", + ] + deps = [ + "../../../api/rtc_event_log", + "../../../api/transport:field_trial_based_config", + "../../../api/transport:webrtc_key_value_config", + "../../../logging:rtc_event_pacing", + "../../../rtc_base:checks", + "../../../rtc_base:safe_conversions", + "../../../rtc_base:timeutils", + "../../../rtc_base/experiments:alr_experiment", + "../../../rtc_base/experiments:field_trial_parser", + "../../pacing:interval_budget", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} +rtc_library("estimators") { + configs += [ ":bwe_test_logging" ] + sources = [ + "acknowledged_bitrate_estimator.cc", + "acknowledged_bitrate_estimator.h", + "acknowledged_bitrate_estimator_interface.cc", + "acknowledged_bitrate_estimator_interface.h", + "bitrate_estimator.cc", + "bitrate_estimator.h", + "delay_increase_detector_interface.h", + "probe_bitrate_estimator.cc", + "probe_bitrate_estimator.h", + "robust_throughput_estimator.cc", + "robust_throughput_estimator.h", + "trendline_estimator.cc", + "trendline_estimator.h", + ] + + deps = [ + "../../../api:network_state_predictor_api", + "../../../api/rtc_event_log", + "../../../api/transport:network_control", + "../../../api/transport:webrtc_key_value_config", + "../../../api/units:data_rate", + "../../../api/units:timestamp", + "../../../logging:rtc_event_bwe", + "../../../rtc_base:checks", + "../../../rtc_base:logging", + "../../../rtc_base:macromagic", + "../../../rtc_base:rtc_numerics", + "../../../rtc_base:safe_conversions", + "../../../rtc_base:safe_minmax", + "../../../rtc_base/experiments:field_trial_parser", + "../../remote_bitrate_estimator", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("loss_based_controller") { + configs += [ ":bwe_test_logging" ] + sources = [ + "loss_based_bandwidth_estimation.cc", + "loss_based_bandwidth_estimation.h", + "send_side_bandwidth_estimation.cc", + "send_side_bandwidth_estimation.h", + ] + deps = [ + "../../../api/rtc_event_log", + "../../../api/transport:network_control", + "../../../api/units:data_rate", + "../../../api/units:time_delta", + "../../../api/units:timestamp", + "../../../logging:rtc_event_bwe", + "../../../rtc_base:checks", + "../../../rtc_base:logging", + "../../../rtc_base/experiments:field_trial_parser", + "../../../system_wrappers:field_trial", + "../../../system_wrappers:metrics", + "../../remote_bitrate_estimator", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("delay_based_bwe") { + configs += [ ":bwe_test_logging" ] + sources = [ + "delay_based_bwe.cc", + "delay_based_bwe.h", + ] + + deps = [ + ":estimators", + "../../../api:network_state_predictor_api", + "../../../api/rtc_event_log", + "../../../api/transport:network_control", + "../../../api/transport:network_control", + "../../../api/transport:webrtc_key_value_config", + "../../../logging:rtc_event_bwe", + "../../../rtc_base:checks", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/experiments:field_trial_parser", + "../../../system_wrappers:metrics", + "../../pacing", + "../../remote_bitrate_estimator", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +rtc_library("probe_controller") { + sources = [ + "probe_controller.cc", + "probe_controller.h", + ] + + deps = [ + "../../../api/rtc_event_log", + "../../../api/transport:network_control", + "../../../api/transport:webrtc_key_value_config", + "../../../api/units:data_rate", + "../../../api/units:time_delta", + "../../../api/units:timestamp", + "../../../logging:rtc_event_bwe", + "../../../logging:rtc_event_pacing", + "../../../rtc_base:checks", + "../../../rtc_base:logging", + "../../../rtc_base:macromagic", + "../../../rtc_base:safe_conversions", + "../../../rtc_base/experiments:field_trial_parser", + "../../../rtc_base/system:unused", + "../../../system_wrappers:metrics", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/strings", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +if (rtc_include_tests) { + rtc_library("test_goog_cc_printer") { + testonly = true + sources = [ + "test/goog_cc_printer.cc", + "test/goog_cc_printer.h", + ] + deps = [ + ":alr_detector", + ":delay_based_bwe", + ":estimators", + ":goog_cc", + "../../../api/rtc_event_log", + "../../../api/transport:goog_cc", + "../../../api/transport:network_control", + "../../../api/units:timestamp", + "../../../rtc_base:checks", + "../../../test/logging:log_writer", + "../../remote_bitrate_estimator", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + } + rtc_library("goog_cc_unittests") { + testonly = true + + sources = [ + "acknowledged_bitrate_estimator_unittest.cc", + "alr_detector_unittest.cc", + "congestion_window_pushback_controller_unittest.cc", + "delay_based_bwe_unittest.cc", + "delay_based_bwe_unittest_helper.cc", + "delay_based_bwe_unittest_helper.h", + "goog_cc_network_control_unittest.cc", + "probe_bitrate_estimator_unittest.cc", + "probe_controller_unittest.cc", + "robust_throughput_estimator_unittest.cc", + "send_side_bandwidth_estimation_unittest.cc", + "trendline_estimator_unittest.cc", + ] + deps = [ + ":alr_detector", + ":delay_based_bwe", + ":estimators", + ":goog_cc", + ":loss_based_controller", + ":probe_controller", + ":pushback_controller", + "../../../api/rtc_event_log", + "../../../api/transport:field_trial_based_config", + "../../../api/transport:goog_cc", + "../../../api/transport:network_control", + "../../../api/transport:webrtc_key_value_config", + "../../../api/units:data_rate", + "../../../api/units:timestamp", + "../../../logging:mocks", + "../../../logging:rtc_event_bwe", + "../../../rtc_base:checks", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base:rtc_base_tests_utils", + "../../../rtc_base/experiments:alr_experiment", + "../../../system_wrappers", + "../../../test:field_trial", + "../../../test:test_support", + "../../../test/scenario", + "../../pacing", + "//testing/gmock", + ] + } +} diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc new file mode 100644 index 0000000..f3c992f --- /dev/null +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.cc @@ -0,0 +1,70 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" + +#include + +#include +#include +#include + +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_conversions.h" + +namespace webrtc { + +AcknowledgedBitrateEstimator::AcknowledgedBitrateEstimator( + const WebRtcKeyValueConfig* key_value_config) + : AcknowledgedBitrateEstimator( + key_value_config, + std::make_unique(key_value_config)) {} + +AcknowledgedBitrateEstimator::~AcknowledgedBitrateEstimator() {} + +AcknowledgedBitrateEstimator::AcknowledgedBitrateEstimator( + const WebRtcKeyValueConfig* key_value_config, + std::unique_ptr bitrate_estimator) + : in_alr_(false), bitrate_estimator_(std::move(bitrate_estimator)) {} + +void AcknowledgedBitrateEstimator::IncomingPacketFeedbackVector( + const std::vector& packet_feedback_vector) { + RTC_DCHECK(std::is_sorted(packet_feedback_vector.begin(), + packet_feedback_vector.end(), + PacketResult::ReceiveTimeOrder())); + for (const auto& packet : packet_feedback_vector) { + if (alr_ended_time_ && packet.sent_packet.send_time > *alr_ended_time_) { + bitrate_estimator_->ExpectFastRateChange(); + alr_ended_time_.reset(); + } + DataSize acknowledged_estimate = packet.sent_packet.size; + acknowledged_estimate += packet.sent_packet.prior_unacked_data; + bitrate_estimator_->Update(packet.receive_time, acknowledged_estimate, + in_alr_); + } +} + +absl::optional AcknowledgedBitrateEstimator::bitrate() const { + return bitrate_estimator_->bitrate(); +} + +absl::optional AcknowledgedBitrateEstimator::PeekRate() const { + return bitrate_estimator_->PeekRate(); +} + +void AcknowledgedBitrateEstimator::SetAlrEndedTime(Timestamp alr_ended_time) { + alr_ended_time_.emplace(alr_ended_time); +} + +void AcknowledgedBitrateEstimator::SetAlr(bool in_alr) { + in_alr_ = in_alr; +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h new file mode 100644 index 0000000..97dd965 --- /dev/null +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h @@ -0,0 +1,52 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_ACKNOWLEDGED_BITRATE_ESTIMATOR_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_ACKNOWLEDGED_BITRATE_ESTIMATOR_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/transport/network_types.h" +#include "api/transport/webrtc_key_value_config.h" +#include "api/units/data_rate.h" +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" +#include "modules/congestion_controller/goog_cc/bitrate_estimator.h" + +namespace webrtc { + +class AcknowledgedBitrateEstimator + : public AcknowledgedBitrateEstimatorInterface { + public: + AcknowledgedBitrateEstimator( + const WebRtcKeyValueConfig* key_value_config, + std::unique_ptr bitrate_estimator); + + explicit AcknowledgedBitrateEstimator( + const WebRtcKeyValueConfig* key_value_config); + ~AcknowledgedBitrateEstimator() override; + + void IncomingPacketFeedbackVector( + const std::vector& packet_feedback_vector) override; + absl::optional bitrate() const override; + absl::optional PeekRate() const override; + void SetAlr(bool in_alr) override; + void SetAlrEndedTime(Timestamp alr_ended_time) override; + + private: + absl::optional alr_ended_time_; + bool in_alr_; + std::unique_ptr bitrate_estimator_; +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_ACKNOWLEDGED_BITRATE_ESTIMATOR_H_ diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc new file mode 100644 index 0000000..d5b1a13 --- /dev/null +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.cc @@ -0,0 +1,77 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" + +#include + +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" +#include "modules/congestion_controller/goog_cc/robust_throughput_estimator.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +constexpr char RobustThroughputEstimatorSettings::kKey[]; + +RobustThroughputEstimatorSettings::RobustThroughputEstimatorSettings( + const WebRtcKeyValueConfig* key_value_config) { + Parser()->Parse( + key_value_config->Lookup(RobustThroughputEstimatorSettings::kKey)); + if (min_packets < 10 || kMaxPackets < min_packets) { + RTC_LOG(LS_WARNING) << "Window size must be between 10 and " << kMaxPackets + << " packets"; + min_packets = 20; + } + if (initial_packets < 10 || kMaxPackets < initial_packets) { + RTC_LOG(LS_WARNING) << "Initial size must be between 10 and " << kMaxPackets + << " packets"; + initial_packets = 20; + } + initial_packets = std::min(initial_packets, min_packets); + if (window_duration < TimeDelta::Millis(100) || + TimeDelta::Millis(2000) < window_duration) { + RTC_LOG(LS_WARNING) << "Window duration must be between 100 and 2000 ms"; + window_duration = TimeDelta::Millis(500); + } + if (unacked_weight < 0.0 || 1.0 < unacked_weight) { + RTC_LOG(LS_WARNING) + << "Weight for prior unacked size must be between 0 and 1."; + unacked_weight = 1.0; + } +} + +std::unique_ptr +RobustThroughputEstimatorSettings::Parser() { + return StructParametersParser::Create("enabled", &enabled, // + "reduce_bias", &reduce_bias, // + "assume_shared_link", // + &assume_shared_link, // + "min_packets", &min_packets, // + "window_duration", &window_duration, // + "initial_packets", &initial_packets, // + "unacked_weight", &unacked_weight); +} + +AcknowledgedBitrateEstimatorInterface:: + ~AcknowledgedBitrateEstimatorInterface() {} + +std::unique_ptr +AcknowledgedBitrateEstimatorInterface::Create( + const WebRtcKeyValueConfig* key_value_config) { + RobustThroughputEstimatorSettings simplified_estimator_settings( + key_value_config); + if (simplified_estimator_settings.enabled) { + return std::make_unique( + simplified_estimator_settings); + } + return std::make_unique(key_value_config); +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h new file mode 100644 index 0000000..f802191 --- /dev/null +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h @@ -0,0 +1,80 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_ACKNOWLEDGED_BITRATE_ESTIMATOR_INTERFACE_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_ACKNOWLEDGED_BITRATE_ESTIMATOR_INTERFACE_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/transport/network_types.h" +#include "api/transport/webrtc_key_value_config.h" +#include "api/units/data_rate.h" +#include "rtc_base/experiments/struct_parameters_parser.h" + +namespace webrtc { + +struct RobustThroughputEstimatorSettings { + static constexpr char kKey[] = "WebRTC-Bwe-RobustThroughputEstimatorSettings"; + static constexpr size_t kMaxPackets = 500; + + RobustThroughputEstimatorSettings() = delete; + explicit RobustThroughputEstimatorSettings( + const WebRtcKeyValueConfig* key_value_config); + + bool enabled = false; // Set to true to use RobustThroughputEstimator. + + // The estimator handles delay spikes by removing the largest receive time + // gap, but this introduces some bias that may lead to overestimation when + // there isn't any delay spike. If |reduce_bias| is true, we instead replace + // the largest receive time gap by the second largest. This reduces the bias + // at the cost of not completely removing the genuine delay spikes. + bool reduce_bias = true; + + // If |assume_shared_link| is false, we ignore the size of the first packet + // when computing the receive rate. Otherwise, we remove half of the first + // and last packet's sizes. + bool assume_shared_link = false; + + // The estimator window keeps at least |min_packets| packets and up to + // kMaxPackets received during the last |window_duration|. + unsigned min_packets = 20; + TimeDelta window_duration = TimeDelta::Millis(500); + + // The estimator window requires at least |initial_packets| packets received + // over at least |initial_duration|. + unsigned initial_packets = 20; + + // If audio packets are included in allocation, but not in bandwidth + // estimation and the sent audio packets get double counted, + // then it might be useful to reduce the weight to 0.5. + double unacked_weight = 1.0; + + std::unique_ptr Parser(); +}; + +class AcknowledgedBitrateEstimatorInterface { + public: + static std::unique_ptr Create( + const WebRtcKeyValueConfig* key_value_config); + virtual ~AcknowledgedBitrateEstimatorInterface(); + + virtual void IncomingPacketFeedbackVector( + const std::vector& packet_feedback_vector) = 0; + virtual absl::optional bitrate() const = 0; + virtual absl::optional PeekRate() const = 0; + virtual void SetAlr(bool in_alr) = 0; + virtual void SetAlrEndedTime(Timestamp alr_ended_time) = 0; +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_ACKNOWLEDGED_BITRATE_ESTIMATOR_INTERFACE_H_ diff --git a/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc new file mode 100644 index 0000000..e5b733b --- /dev/null +++ b/modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_unittest.cc @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" + +#include +#include + +#include "api/transport/field_trial_based_config.h" +#include "rtc_base/fake_clock.h" +#include "test/gmock.h" +#include "test/gtest.h" + +using ::testing::_; +using ::testing::InSequence; +using ::testing::NiceMock; +using ::testing::Return; + +namespace webrtc { + +namespace { + +constexpr int64_t kFirstArrivalTimeMs = 10; +constexpr int64_t kFirstSendTimeMs = 10; +constexpr uint16_t kSequenceNumber = 1; +constexpr size_t kPayloadSize = 10; + +class MockBitrateEstimator : public BitrateEstimator { + public: + using BitrateEstimator::BitrateEstimator; + MOCK_METHOD(void, + Update, + (Timestamp at_time, DataSize data_size, bool in_alr), + (override)); + MOCK_METHOD(absl::optional, bitrate, (), (const, override)); + MOCK_METHOD(void, ExpectFastRateChange, (), (override)); +}; + +struct AcknowledgedBitrateEstimatorTestStates { + FieldTrialBasedConfig field_trial_config; + std::unique_ptr acknowledged_bitrate_estimator; + MockBitrateEstimator* mock_bitrate_estimator; +}; + +AcknowledgedBitrateEstimatorTestStates CreateTestStates() { + AcknowledgedBitrateEstimatorTestStates states; + auto mock_bitrate_estimator = + std::make_unique(&states.field_trial_config); + states.mock_bitrate_estimator = mock_bitrate_estimator.get(); + states.acknowledged_bitrate_estimator = + std::make_unique( + &states.field_trial_config, std::move(mock_bitrate_estimator)); + return states; +} + +std::vector CreateFeedbackVector() { + std::vector packet_feedback_vector(2); + packet_feedback_vector[0].receive_time = + Timestamp::Millis(kFirstArrivalTimeMs); + packet_feedback_vector[0].sent_packet.send_time = + Timestamp::Millis(kFirstSendTimeMs); + packet_feedback_vector[0].sent_packet.sequence_number = kSequenceNumber; + packet_feedback_vector[0].sent_packet.size = DataSize::Bytes(kPayloadSize); + packet_feedback_vector[1].receive_time = + Timestamp::Millis(kFirstArrivalTimeMs + 10); + packet_feedback_vector[1].sent_packet.send_time = + Timestamp::Millis(kFirstSendTimeMs + 10); + packet_feedback_vector[1].sent_packet.sequence_number = kSequenceNumber; + packet_feedback_vector[1].sent_packet.size = + DataSize::Bytes(kPayloadSize + 10); + return packet_feedback_vector; +} + +} // anonymous namespace + +TEST(TestAcknowledgedBitrateEstimator, UpdateBandwidth) { + auto states = CreateTestStates(); + auto packet_feedback_vector = CreateFeedbackVector(); + { + InSequence dummy; + EXPECT_CALL(*states.mock_bitrate_estimator, + Update(packet_feedback_vector[0].receive_time, + packet_feedback_vector[0].sent_packet.size, + /*in_alr*/ false)) + .Times(1); + EXPECT_CALL(*states.mock_bitrate_estimator, + Update(packet_feedback_vector[1].receive_time, + packet_feedback_vector[1].sent_packet.size, + /*in_alr*/ false)) + .Times(1); + } + states.acknowledged_bitrate_estimator->IncomingPacketFeedbackVector( + packet_feedback_vector); +} + +TEST(TestAcknowledgedBitrateEstimator, ExpectFastRateChangeWhenLeftAlr) { + auto states = CreateTestStates(); + auto packet_feedback_vector = CreateFeedbackVector(); + { + InSequence dummy; + EXPECT_CALL(*states.mock_bitrate_estimator, + Update(packet_feedback_vector[0].receive_time, + packet_feedback_vector[0].sent_packet.size, + /*in_alr*/ false)) + .Times(1); + EXPECT_CALL(*states.mock_bitrate_estimator, ExpectFastRateChange()) + .Times(1); + EXPECT_CALL(*states.mock_bitrate_estimator, + Update(packet_feedback_vector[1].receive_time, + packet_feedback_vector[1].sent_packet.size, + /*in_alr*/ false)) + .Times(1); + } + states.acknowledged_bitrate_estimator->SetAlrEndedTime( + Timestamp::Millis(kFirstArrivalTimeMs + 1)); + states.acknowledged_bitrate_estimator->IncomingPacketFeedbackVector( + packet_feedback_vector); +} + +TEST(TestAcknowledgedBitrateEstimator, ReturnBitrate) { + auto states = CreateTestStates(); + absl::optional return_value = DataRate::KilobitsPerSec(42); + EXPECT_CALL(*states.mock_bitrate_estimator, bitrate()) + .Times(1) + .WillOnce(Return(return_value)); + EXPECT_EQ(return_value, states.acknowledged_bitrate_estimator->bitrate()); +} + +} // namespace webrtc*/ diff --git a/modules/congestion_controller/goog_cc/alr_detector.cc b/modules/congestion_controller/goog_cc/alr_detector.cc new file mode 100644 index 0000000..6a62954 --- /dev/null +++ b/modules/congestion_controller/goog_cc/alr_detector.cc @@ -0,0 +1,112 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/alr_detector.h" + +#include +#include +#include + +#include "api/rtc_event_log/rtc_event.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "logging/rtc_event_log/events/rtc_event_alr_state.h" +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { + +namespace { +AlrDetectorConfig GetConfigFromTrials( + const WebRtcKeyValueConfig* key_value_config) { + RTC_CHECK(AlrExperimentSettings::MaxOneFieldTrialEnabled(*key_value_config)); + absl::optional experiment_settings = + AlrExperimentSettings::CreateFromFieldTrial( + *key_value_config, + AlrExperimentSettings::kScreenshareProbingBweExperimentName); + if (!experiment_settings) { + experiment_settings = AlrExperimentSettings::CreateFromFieldTrial( + *key_value_config, + AlrExperimentSettings::kStrictPacingAndProbingExperimentName); + } + AlrDetectorConfig conf; + if (experiment_settings) { + conf.bandwidth_usage_ratio = + experiment_settings->alr_bandwidth_usage_percent / 100.0; + conf.start_budget_level_ratio = + experiment_settings->alr_start_budget_level_percent / 100.0; + conf.stop_budget_level_ratio = + experiment_settings->alr_stop_budget_level_percent / 100.0; + } + conf.Parser()->Parse( + key_value_config->Lookup("WebRTC-AlrDetectorParameters")); + return conf; +} +} // namespace + +std::unique_ptr AlrDetectorConfig::Parser() { + return StructParametersParser::Create( // + "bw_usage", &bandwidth_usage_ratio, // + "start", &start_budget_level_ratio, // + "stop", &stop_budget_level_ratio); +} + +AlrDetector::AlrDetector(AlrDetectorConfig config, RtcEventLog* event_log) + : conf_(config), alr_budget_(0, true), event_log_(event_log) {} + +AlrDetector::AlrDetector(const WebRtcKeyValueConfig* key_value_config) + : AlrDetector(GetConfigFromTrials(key_value_config), nullptr) {} + +AlrDetector::AlrDetector(const WebRtcKeyValueConfig* key_value_config, + RtcEventLog* event_log) + : AlrDetector(GetConfigFromTrials(key_value_config), event_log) {} +AlrDetector::~AlrDetector() {} + +void AlrDetector::OnBytesSent(size_t bytes_sent, int64_t send_time_ms) { + if (!last_send_time_ms_.has_value()) { + last_send_time_ms_ = send_time_ms; + // Since the duration for sending the bytes is unknwon, return without + // updating alr state. + return; + } + int64_t delta_time_ms = send_time_ms - *last_send_time_ms_; + last_send_time_ms_ = send_time_ms; + + alr_budget_.UseBudget(bytes_sent); + alr_budget_.IncreaseBudget(delta_time_ms); + bool state_changed = false; + if (alr_budget_.budget_ratio() > conf_.start_budget_level_ratio && + !alr_started_time_ms_) { + alr_started_time_ms_.emplace(rtc::TimeMillis()); + state_changed = true; + } else if (alr_budget_.budget_ratio() < conf_.stop_budget_level_ratio && + alr_started_time_ms_) { + state_changed = true; + alr_started_time_ms_.reset(); + } + if (event_log_ && state_changed) { + event_log_->Log( + std::make_unique(alr_started_time_ms_.has_value())); + } +} + +void AlrDetector::SetEstimatedBitrate(int bitrate_bps) { + RTC_DCHECK(bitrate_bps); + int target_rate_kbps = + static_cast(bitrate_bps) * conf_.bandwidth_usage_ratio / 1000; + alr_budget_.set_target_rate_kbps(target_rate_kbps); +} + +absl::optional AlrDetector::GetApplicationLimitedRegionStartTime() + const { + return alr_started_time_ms_; +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/alr_detector.h b/modules/congestion_controller/goog_cc/alr_detector.h new file mode 100644 index 0000000..ee3fe92 --- /dev/null +++ b/modules/congestion_controller/goog_cc/alr_detector.h @@ -0,0 +1,76 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_ALR_DETECTOR_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_ALR_DETECTOR_H_ + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/transport/webrtc_key_value_config.h" +#include "modules/pacing/interval_budget.h" +#include "rtc_base/experiments/alr_experiment.h" +#include "rtc_base/experiments/struct_parameters_parser.h" + +namespace webrtc { + +class RtcEventLog; + +struct AlrDetectorConfig { + // Sent traffic ratio as a function of network capacity used to determine + // application-limited region. ALR region start when bandwidth usage drops + // below kAlrStartUsageRatio and ends when it raises above + // kAlrEndUsageRatio. NOTE: This is intentionally conservative at the moment + // until BW adjustments of application limited region is fine tuned. + double bandwidth_usage_ratio = 0.65; + double start_budget_level_ratio = 0.80; + double stop_budget_level_ratio = 0.50; + std::unique_ptr Parser(); +}; +// Application limited region detector is a class that utilizes signals of +// elapsed time and bytes sent to estimate whether network traffic is +// currently limited by the application's ability to generate traffic. +// +// AlrDetector provides a signal that can be utilized to adjust +// estimate bandwidth. +// Note: This class is not thread-safe. +class AlrDetector { + public: + AlrDetector(AlrDetectorConfig config, RtcEventLog* event_log); + explicit AlrDetector(const WebRtcKeyValueConfig* key_value_config); + AlrDetector(const WebRtcKeyValueConfig* key_value_config, + RtcEventLog* event_log); + ~AlrDetector(); + + void OnBytesSent(size_t bytes_sent, int64_t send_time_ms); + + // Set current estimated bandwidth. + void SetEstimatedBitrate(int bitrate_bps); + + // Returns time in milliseconds when the current application-limited region + // started or empty result if the sender is currently not application-limited. + absl::optional GetApplicationLimitedRegionStartTime() const; + + private: + friend class GoogCcStatePrinter; + const AlrDetectorConfig conf_; + + absl::optional last_send_time_ms_; + + IntervalBudget alr_budget_; + absl::optional alr_started_time_ms_; + + RtcEventLog* event_log_; +}; +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_ALR_DETECTOR_H_ diff --git a/modules/congestion_controller/goog_cc/alr_detector_unittest.cc b/modules/congestion_controller/goog_cc/alr_detector_unittest.cc new file mode 100644 index 0000000..eac19d0 --- /dev/null +++ b/modules/congestion_controller/goog_cc/alr_detector_unittest.cc @@ -0,0 +1,206 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/alr_detector.h" + +#include "api/transport/field_trial_based_config.h" +#include "rtc_base/checks.h" +#include "rtc_base/experiments/alr_experiment.h" +#include "test/field_trial.h" +#include "test/gtest.h" + +namespace { + +constexpr int kEstimatedBitrateBps = 300000; + +} // namespace + +namespace webrtc { +namespace { +class SimulateOutgoingTrafficIn { + public: + explicit SimulateOutgoingTrafficIn(AlrDetector* alr_detector, + int64_t* timestamp_ms) + : alr_detector_(alr_detector), timestamp_ms_(timestamp_ms) { + RTC_CHECK(alr_detector_); + } + + SimulateOutgoingTrafficIn& ForTimeMs(int time_ms) { + interval_ms_ = time_ms; + ProduceTraffic(); + return *this; + } + + SimulateOutgoingTrafficIn& AtPercentOfEstimatedBitrate(int usage_percentage) { + usage_percentage_.emplace(usage_percentage); + ProduceTraffic(); + return *this; + } + + private: + void ProduceTraffic() { + if (!interval_ms_ || !usage_percentage_) + return; + const int kTimeStepMs = 10; + for (int t = 0; t < *interval_ms_; t += kTimeStepMs) { + *timestamp_ms_ += kTimeStepMs; + alr_detector_->OnBytesSent(kEstimatedBitrateBps * *usage_percentage_ * + kTimeStepMs / (8 * 100 * 1000), + *timestamp_ms_); + } + int remainder_ms = *interval_ms_ % kTimeStepMs; + if (remainder_ms > 0) { + *timestamp_ms_ += kTimeStepMs; + alr_detector_->OnBytesSent(kEstimatedBitrateBps * *usage_percentage_ * + remainder_ms / (8 * 100 * 1000), + *timestamp_ms_); + } + } + AlrDetector* const alr_detector_; + int64_t* timestamp_ms_; + absl::optional interval_ms_; + absl::optional usage_percentage_; +}; +} // namespace + +TEST(AlrDetectorTest, AlrDetection) { + FieldTrialBasedConfig field_trials; + int64_t timestamp_ms = 1000; + AlrDetector alr_detector(&field_trials); + alr_detector.SetEstimatedBitrate(kEstimatedBitrateBps); + + // Start in non-ALR state. + EXPECT_FALSE(alr_detector.GetApplicationLimitedRegionStartTime()); + + // Stay in non-ALR state when usage is close to 100%. + SimulateOutgoingTrafficIn(&alr_detector, ×tamp_ms) + .ForTimeMs(1000) + .AtPercentOfEstimatedBitrate(90); + EXPECT_FALSE(alr_detector.GetApplicationLimitedRegionStartTime()); + + // Verify that we ALR starts when bitrate drops below 20%. + SimulateOutgoingTrafficIn(&alr_detector, ×tamp_ms) + .ForTimeMs(1500) + .AtPercentOfEstimatedBitrate(20); + EXPECT_TRUE(alr_detector.GetApplicationLimitedRegionStartTime()); + + // Verify that ALR ends when usage is above 65%. + SimulateOutgoingTrafficIn(&alr_detector, ×tamp_ms) + .ForTimeMs(4000) + .AtPercentOfEstimatedBitrate(100); + EXPECT_FALSE(alr_detector.GetApplicationLimitedRegionStartTime()); +} + +TEST(AlrDetectorTest, ShortSpike) { + FieldTrialBasedConfig field_trials; + int64_t timestamp_ms = 1000; + AlrDetector alr_detector(&field_trials); + alr_detector.SetEstimatedBitrate(kEstimatedBitrateBps); + // Start in non-ALR state. + EXPECT_FALSE(alr_detector.GetApplicationLimitedRegionStartTime()); + + // Verify that we ALR starts when bitrate drops below 20%. + SimulateOutgoingTrafficIn(&alr_detector, ×tamp_ms) + .ForTimeMs(1000) + .AtPercentOfEstimatedBitrate(20); + EXPECT_TRUE(alr_detector.GetApplicationLimitedRegionStartTime()); + + // Verify that we stay in ALR region even after a short bitrate spike. + SimulateOutgoingTrafficIn(&alr_detector, ×tamp_ms) + .ForTimeMs(100) + .AtPercentOfEstimatedBitrate(150); + EXPECT_TRUE(alr_detector.GetApplicationLimitedRegionStartTime()); + + // ALR ends when usage is above 65%. + SimulateOutgoingTrafficIn(&alr_detector, ×tamp_ms) + .ForTimeMs(3000) + .AtPercentOfEstimatedBitrate(100); + EXPECT_FALSE(alr_detector.GetApplicationLimitedRegionStartTime()); +} + +TEST(AlrDetectorTest, BandwidthEstimateChanges) { + FieldTrialBasedConfig field_trials; + int64_t timestamp_ms = 1000; + AlrDetector alr_detector(&field_trials); + alr_detector.SetEstimatedBitrate(kEstimatedBitrateBps); + + // Start in non-ALR state. + EXPECT_FALSE(alr_detector.GetApplicationLimitedRegionStartTime()); + + // ALR starts when bitrate drops below 20%. + SimulateOutgoingTrafficIn(&alr_detector, ×tamp_ms) + .ForTimeMs(1000) + .AtPercentOfEstimatedBitrate(20); + EXPECT_TRUE(alr_detector.GetApplicationLimitedRegionStartTime()); + + // When bandwidth estimate drops the detector should stay in ALR mode and quit + // it shortly afterwards as the sender continues sending the same amount of + // traffic. This is necessary to ensure that ProbeController can still react + // to the BWE drop by initiating a new probe. + alr_detector.SetEstimatedBitrate(kEstimatedBitrateBps / 5); + EXPECT_TRUE(alr_detector.GetApplicationLimitedRegionStartTime()); + SimulateOutgoingTrafficIn(&alr_detector, ×tamp_ms) + .ForTimeMs(1000) + .AtPercentOfEstimatedBitrate(50); + EXPECT_FALSE(alr_detector.GetApplicationLimitedRegionStartTime()); +} + +TEST(AlrDetectorTest, ParseControlFieldTrial) { + webrtc::test::ScopedFieldTrials scoped_field_trial( + "WebRTC-ProbingScreenshareBwe/Control/"); + absl::optional parsed_params = + AlrExperimentSettings::CreateFromFieldTrial( + FieldTrialBasedConfig(), "WebRTC-ProbingScreenshareBwe"); + EXPECT_FALSE(static_cast(parsed_params)); +} + +TEST(AlrDetectorTest, ParseActiveFieldTrial) { + webrtc::test::ScopedFieldTrials scoped_field_trial( + "WebRTC-ProbingScreenshareBwe/1.1,2875,85,20,-20,1/"); + absl::optional parsed_params = + AlrExperimentSettings::CreateFromFieldTrial( + FieldTrialBasedConfig(), "WebRTC-ProbingScreenshareBwe"); + ASSERT_TRUE(static_cast(parsed_params)); + EXPECT_EQ(1.1f, parsed_params->pacing_factor); + EXPECT_EQ(2875, parsed_params->max_paced_queue_time); + EXPECT_EQ(85, parsed_params->alr_bandwidth_usage_percent); + EXPECT_EQ(20, parsed_params->alr_start_budget_level_percent); + EXPECT_EQ(-20, parsed_params->alr_stop_budget_level_percent); + EXPECT_EQ(1, parsed_params->group_id); +} + +TEST(AlrDetectorTest, ParseAlrSpecificFieldTrial) { + webrtc::test::ScopedFieldTrials scoped_field_trial( + "WebRTC-AlrDetectorParameters/" + "bw_usage:90%,start:0%,stop:-10%/"); + FieldTrialBasedConfig field_trials; + AlrDetector alr_detector(&field_trials); + int64_t timestamp_ms = 1000; + alr_detector.SetEstimatedBitrate(kEstimatedBitrateBps); + + // Start in non-ALR state. + EXPECT_FALSE(alr_detector.GetApplicationLimitedRegionStartTime()); + + // ALR does not start at 100% utilization. + SimulateOutgoingTrafficIn(&alr_detector, ×tamp_ms) + .ForTimeMs(1000) + .AtPercentOfEstimatedBitrate(100); + EXPECT_FALSE(alr_detector.GetApplicationLimitedRegionStartTime()); + + // ALR does start at 85% utilization. + // Overused 10% above so it should take about 2s to reach a budget level of + // 0%. + SimulateOutgoingTrafficIn(&alr_detector, ×tamp_ms) + .ForTimeMs(2100) + .AtPercentOfEstimatedBitrate(85); + EXPECT_TRUE(alr_detector.GetApplicationLimitedRegionStartTime()); +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/bitrate_estimator.cc b/modules/congestion_controller/goog_cc/bitrate_estimator.cc new file mode 100644 index 0000000..09b214a --- /dev/null +++ b/modules/congestion_controller/goog_cc/bitrate_estimator.cc @@ -0,0 +1,166 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/bitrate_estimator.h" + +#include + +#include +#include +#include + +#include "api/units/data_rate.h" +#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace { +constexpr int kInitialRateWindowMs = 500; +constexpr int kRateWindowMs = 150; +constexpr int kMinRateWindowMs = 150; +constexpr int kMaxRateWindowMs = 1000; + +const char kBweThroughputWindowConfig[] = "WebRTC-BweThroughputWindowConfig"; + +} // namespace + +BitrateEstimator::BitrateEstimator(const WebRtcKeyValueConfig* key_value_config) + : sum_(0), + initial_window_ms_("initial_window_ms", + kInitialRateWindowMs, + kMinRateWindowMs, + kMaxRateWindowMs), + noninitial_window_ms_("window_ms", + kRateWindowMs, + kMinRateWindowMs, + kMaxRateWindowMs), + uncertainty_scale_("scale", 10.0), + uncertainty_scale_in_alr_("scale_alr", uncertainty_scale_), + small_sample_uncertainty_scale_("scale_small", uncertainty_scale_), + small_sample_threshold_("small_thresh", DataSize::Zero()), + uncertainty_symmetry_cap_("symmetry_cap", DataRate::Zero()), + estimate_floor_("floor", DataRate::Zero()), + current_window_ms_(0), + prev_time_ms_(-1), + bitrate_estimate_kbps_(-1.0f), + bitrate_estimate_var_(50.0f) { + // E.g WebRTC-BweThroughputWindowConfig/initial_window_ms:350,window_ms:250/ + ParseFieldTrial( + {&initial_window_ms_, &noninitial_window_ms_, &uncertainty_scale_, + &uncertainty_scale_in_alr_, &small_sample_uncertainty_scale_, + &small_sample_threshold_, &uncertainty_symmetry_cap_, &estimate_floor_}, + key_value_config->Lookup(kBweThroughputWindowConfig)); +} + +BitrateEstimator::~BitrateEstimator() = default; + +void BitrateEstimator::Update(Timestamp at_time, DataSize amount, bool in_alr) { + int rate_window_ms = noninitial_window_ms_.Get(); + // We use a larger window at the beginning to get a more stable sample that + // we can use to initialize the estimate. + if (bitrate_estimate_kbps_ < 0.f) + rate_window_ms = initial_window_ms_.Get(); + bool is_small_sample = false; + float bitrate_sample_kbps = UpdateWindow(at_time.ms(), amount.bytes(), + rate_window_ms, &is_small_sample); + if (bitrate_sample_kbps < 0.0f) + return; + if (bitrate_estimate_kbps_ < 0.0f) { + // This is the very first sample we get. Use it to initialize the estimate. + bitrate_estimate_kbps_ = bitrate_sample_kbps; + return; + } + // Optionally use higher uncertainty for very small samples to avoid dropping + // estimate and for samples obtained in ALR. + float scale = uncertainty_scale_; + if (is_small_sample && bitrate_sample_kbps < bitrate_estimate_kbps_) { + scale = small_sample_uncertainty_scale_; + } else if (in_alr && bitrate_sample_kbps < bitrate_estimate_kbps_) { + // Optionally use higher uncertainty for samples obtained during ALR. + scale = uncertainty_scale_in_alr_; + } + // Define the sample uncertainty as a function of how far away it is from the + // current estimate. With low values of uncertainty_symmetry_cap_ we add more + // uncertainty to increases than to decreases. For higher values we approach + // symmetry. + float sample_uncertainty = + scale * std::abs(bitrate_estimate_kbps_ - bitrate_sample_kbps) / + (bitrate_estimate_kbps_ + + std::min(bitrate_sample_kbps, + uncertainty_symmetry_cap_.Get().kbps())); + + float sample_var = sample_uncertainty * sample_uncertainty; + // Update a bayesian estimate of the rate, weighting it lower if the sample + // uncertainty is large. + // The bitrate estimate uncertainty is increased with each update to model + // that the bitrate changes over time. + float pred_bitrate_estimate_var = bitrate_estimate_var_ + 5.f; + bitrate_estimate_kbps_ = (sample_var * bitrate_estimate_kbps_ + + pred_bitrate_estimate_var * bitrate_sample_kbps) / + (sample_var + pred_bitrate_estimate_var); + bitrate_estimate_kbps_ = + std::max(bitrate_estimate_kbps_, estimate_floor_.Get().kbps()); + bitrate_estimate_var_ = sample_var * pred_bitrate_estimate_var / + (sample_var + pred_bitrate_estimate_var); + BWE_TEST_LOGGING_PLOT(1, "acknowledged_bitrate", at_time.ms(), + bitrate_estimate_kbps_ * 1000); +} + +float BitrateEstimator::UpdateWindow(int64_t now_ms, + int bytes, + int rate_window_ms, + bool* is_small_sample) { + RTC_DCHECK(is_small_sample != nullptr); + // Reset if time moves backwards. + if (now_ms < prev_time_ms_) { + prev_time_ms_ = -1; + sum_ = 0; + current_window_ms_ = 0; + } + if (prev_time_ms_ >= 0) { + current_window_ms_ += now_ms - prev_time_ms_; + // Reset if nothing has been received for more than a full window. + if (now_ms - prev_time_ms_ > rate_window_ms) { + sum_ = 0; + current_window_ms_ %= rate_window_ms; + } + } + prev_time_ms_ = now_ms; + float bitrate_sample = -1.0f; + if (current_window_ms_ >= rate_window_ms) { + *is_small_sample = sum_ < small_sample_threshold_->bytes(); + bitrate_sample = 8.0f * sum_ / static_cast(rate_window_ms); + current_window_ms_ -= rate_window_ms; + sum_ = 0; + } + sum_ += bytes; + return bitrate_sample; +} + +absl::optional BitrateEstimator::bitrate() const { + if (bitrate_estimate_kbps_ < 0.f) + return absl::nullopt; + return DataRate::KilobitsPerSec(bitrate_estimate_kbps_); +} + +absl::optional BitrateEstimator::PeekRate() const { + if (current_window_ms_ > 0) + return DataSize::Bytes(sum_) / TimeDelta::Millis(current_window_ms_); + return absl::nullopt; +} + +void BitrateEstimator::ExpectFastRateChange() { + // By setting the bitrate-estimate variance to a higher value we allow the + // bitrate to change fast for the next few samples. + bitrate_estimate_var_ += 200; +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/bitrate_estimator.h b/modules/congestion_controller/goog_cc/bitrate_estimator.h new file mode 100644 index 0000000..34114f0 --- /dev/null +++ b/modules/congestion_controller/goog_cc/bitrate_estimator.h @@ -0,0 +1,62 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_BITRATE_ESTIMATOR_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_BITRATE_ESTIMATOR_H_ + +#include + +#include "absl/types/optional.h" +#include "api/transport/webrtc_key_value_config.h" +#include "api/units/data_rate.h" +#include "api/units/timestamp.h" +#include "rtc_base/experiments/field_trial_parser.h" + +namespace webrtc { + +// Computes a bayesian estimate of the throughput given acks containing +// the arrival time and payload size. Samples which are far from the current +// estimate or are based on few packets are given a smaller weight, as they +// are considered to be more likely to have been caused by, e.g., delay spikes +// unrelated to congestion. +class BitrateEstimator { + public: + explicit BitrateEstimator(const WebRtcKeyValueConfig* key_value_config); + virtual ~BitrateEstimator(); + virtual void Update(Timestamp at_time, DataSize amount, bool in_alr); + + virtual absl::optional bitrate() const; + absl::optional PeekRate() const; + + virtual void ExpectFastRateChange(); + + private: + float UpdateWindow(int64_t now_ms, + int bytes, + int rate_window_ms, + bool* is_small_sample); + int sum_; + FieldTrialConstrained initial_window_ms_; + FieldTrialConstrained noninitial_window_ms_; + FieldTrialParameter uncertainty_scale_; + FieldTrialParameter uncertainty_scale_in_alr_; + FieldTrialParameter small_sample_uncertainty_scale_; + FieldTrialParameter small_sample_threshold_; + FieldTrialParameter uncertainty_symmetry_cap_; + FieldTrialParameter estimate_floor_; + int64_t current_window_ms_; + int64_t prev_time_ms_; + float bitrate_estimate_kbps_; + float bitrate_estimate_var_; +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_BITRATE_ESTIMATOR_H_ diff --git a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc new file mode 100644 index 0000000..ec64282 --- /dev/null +++ b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.cc @@ -0,0 +1,81 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h" + +#include +#include + +#include +#include + +#include "absl/strings/match.h" +#include "rtc_base/checks.h" +#include "rtc_base/experiments/rate_control_settings.h" + +namespace webrtc { + +CongestionWindowPushbackController::CongestionWindowPushbackController( + const WebRtcKeyValueConfig* key_value_config) + : add_pacing_( + absl::StartsWith(key_value_config->Lookup( + "WebRTC-AddPacingToCongestionWindowPushback"), + "Enabled")), + min_pushback_target_bitrate_bps_( + RateControlSettings::ParseFromKeyValueConfig(key_value_config) + .CongestionWindowMinPushbackTargetBitrateBps()), + current_data_window_( + RateControlSettings::ParseFromKeyValueConfig(key_value_config) + .CongestionWindowInitialDataWindow()) {} + +void CongestionWindowPushbackController::UpdateOutstandingData( + int64_t outstanding_bytes) { + outstanding_bytes_ = outstanding_bytes; +} +void CongestionWindowPushbackController::UpdatePacingQueue( + int64_t pacing_bytes) { + pacing_bytes_ = pacing_bytes; +} + +void CongestionWindowPushbackController::SetDataWindow(DataSize data_window) { + current_data_window_ = data_window; +} + +uint32_t CongestionWindowPushbackController::UpdateTargetBitrate( + uint32_t bitrate_bps) { + if (!current_data_window_ || current_data_window_->IsZero()) + return bitrate_bps; + int64_t total_bytes = outstanding_bytes_; + if (add_pacing_) + total_bytes += pacing_bytes_; + double fill_ratio = + total_bytes / static_cast(current_data_window_->bytes()); + if (fill_ratio > 1.5) { + encoding_rate_ratio_ *= 0.9; + } else if (fill_ratio > 1) { + encoding_rate_ratio_ *= 0.95; + } else if (fill_ratio < 0.1) { + encoding_rate_ratio_ = 1.0; + } else { + encoding_rate_ratio_ *= 1.05; + encoding_rate_ratio_ = std::min(encoding_rate_ratio_, 1.0); + } + uint32_t adjusted_target_bitrate_bps = + static_cast(bitrate_bps * encoding_rate_ratio_); + + // Do not adjust below the minimum pushback bitrate but do obey if the + // original estimate is below it. + bitrate_bps = adjusted_target_bitrate_bps < min_pushback_target_bitrate_bps_ + ? std::min(bitrate_bps, min_pushback_target_bitrate_bps_) + : adjusted_target_bitrate_bps; + return bitrate_bps; +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h new file mode 100644 index 0000000..7a49a83 --- /dev/null +++ b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_CONGESTION_WINDOW_PUSHBACK_CONTROLLER_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_CONGESTION_WINDOW_PUSHBACK_CONTROLLER_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/transport/webrtc_key_value_config.h" +#include "api/units/data_size.h" + +namespace webrtc { + +// This class enables pushback from congestion window directly to video encoder. +// When the congestion window is filling up, the video encoder target bitrate +// will be reduced accordingly to accommodate the network changes. To avoid +// pausing video too frequently, a minimum encoder target bitrate threshold is +// used to prevent video pause due to a full congestion window. +class CongestionWindowPushbackController { + public: + explicit CongestionWindowPushbackController( + const WebRtcKeyValueConfig* key_value_config); + void UpdateOutstandingData(int64_t outstanding_bytes); + void UpdatePacingQueue(int64_t pacing_bytes); + uint32_t UpdateTargetBitrate(uint32_t bitrate_bps); + void SetDataWindow(DataSize data_window); + + private: + const bool add_pacing_; + const uint32_t min_pushback_target_bitrate_bps_; + absl::optional current_data_window_; + int64_t outstanding_bytes_ = 0; + int64_t pacing_bytes_ = 0; + double encoding_rate_ratio_ = 1.0; +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_CONGESTION_WINDOW_PUSHBACK_CONTROLLER_H_ diff --git a/modules/congestion_controller/goog_cc/congestion_window_pushback_controller_unittest.cc b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller_unittest.cc new file mode 100644 index 0000000..62dde02 --- /dev/null +++ b/modules/congestion_controller/goog_cc/congestion_window_pushback_controller_unittest.cc @@ -0,0 +1,105 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h" + +#include + +#include "api/transport/field_trial_based_config.h" +#include "test/field_trial.h" +#include "test/gmock.h" +#include "test/gtest.h" + +using ::testing::_; + +namespace webrtc { +namespace test { + +class CongestionWindowPushbackControllerTest : public ::testing::Test { + public: + CongestionWindowPushbackControllerTest() { + cwnd_controller_.reset( + new CongestionWindowPushbackController(&field_trial_config_)); + } + + protected: + FieldTrialBasedConfig field_trial_config_; + + std::unique_ptr cwnd_controller_; +}; + +TEST_F(CongestionWindowPushbackControllerTest, FullCongestionWindow) { + cwnd_controller_->UpdateOutstandingData(100000); + cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); + + uint32_t bitrate_bps = 80000; + bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + EXPECT_EQ(72000u, bitrate_bps); + + cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); + bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + EXPECT_EQ(static_cast(72000 * 0.9 * 0.9), bitrate_bps); +} + +TEST_F(CongestionWindowPushbackControllerTest, NormalCongestionWindow) { + cwnd_controller_->UpdateOutstandingData(199999); + cwnd_controller_->SetDataWindow(DataSize::Bytes(200000)); + + uint32_t bitrate_bps = 80000; + bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + EXPECT_EQ(80000u, bitrate_bps); +} + +TEST_F(CongestionWindowPushbackControllerTest, LowBitrate) { + cwnd_controller_->UpdateOutstandingData(100000); + cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); + + uint32_t bitrate_bps = 35000; + bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + EXPECT_EQ(static_cast(35000 * 0.9), bitrate_bps); + + cwnd_controller_->SetDataWindow(DataSize::Bytes(20000)); + bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + EXPECT_EQ(30000u, bitrate_bps); +} + +TEST_F(CongestionWindowPushbackControllerTest, NoPushbackOnDataWindowUnset) { + cwnd_controller_->UpdateOutstandingData(1e8); // Large number + + uint32_t bitrate_bps = 80000; + bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + EXPECT_EQ(80000u, bitrate_bps); +} + +TEST_F(CongestionWindowPushbackControllerTest, PushbackOnInititialDataWindow) { + test::ScopedFieldTrials trials("WebRTC-CongestionWindow/InitWin:100000/"); + cwnd_controller_.reset( + new CongestionWindowPushbackController(&field_trial_config_)); + cwnd_controller_->UpdateOutstandingData(1e8); // Large number + + uint32_t bitrate_bps = 80000; + bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + EXPECT_GT(80000u, bitrate_bps); +} + +TEST_F(CongestionWindowPushbackControllerTest, PushbackDropFrame) { + test::ScopedFieldTrials trials("WebRTC-CongestionWindow/DropFrame:true/"); + cwnd_controller_.reset( + new CongestionWindowPushbackController(&field_trial_config_)); + cwnd_controller_->UpdateOutstandingData(1e8); // Large number + cwnd_controller_->SetDataWindow(DataSize::Bytes(50000)); + + uint32_t bitrate_bps = 80000; + bitrate_bps = cwnd_controller_->UpdateTargetBitrate(bitrate_bps); + EXPECT_GT(80000u, bitrate_bps); +} + +} // namespace test +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe.cc b/modules/congestion_controller/goog_cc/delay_based_bwe.cc new file mode 100644 index 0000000..1c02301 --- /dev/null +++ b/modules/congestion_controller/goog_cc/delay_based_bwe.cc @@ -0,0 +1,375 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/delay_based_bwe.h" + +#include +#include +#include +#include +#include +#include + +#include "absl/strings/match.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" +#include "modules/congestion_controller/goog_cc/trendline_estimator.h" +#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { +namespace { +constexpr TimeDelta kStreamTimeOut = TimeDelta::Seconds(2); +constexpr int kTimestampGroupLengthMs = 5; +constexpr int kAbsSendTimeFraction = 18; +constexpr int kAbsSendTimeInterArrivalUpshift = 8; +constexpr int kInterArrivalShift = + kAbsSendTimeFraction + kAbsSendTimeInterArrivalUpshift; +constexpr int kTimestampGroupTicks = + (kTimestampGroupLengthMs << kInterArrivalShift) / 1000; +constexpr double kTimestampToMs = + 1000.0 / static_cast(1 << kInterArrivalShift); + +// This ssrc is used to fulfill the current API but will be removed +// after the API has been changed. +constexpr uint32_t kFixedSsrc = 0; +} // namespace + +constexpr char BweIgnoreSmallPacketsSettings::kKey[]; +constexpr char BweSeparateAudioPacketsSettings::kKey[]; + +BweIgnoreSmallPacketsSettings::BweIgnoreSmallPacketsSettings( + const WebRtcKeyValueConfig* key_value_config) { + Parser()->Parse( + key_value_config->Lookup(BweIgnoreSmallPacketsSettings::kKey)); +} + +std::unique_ptr +BweIgnoreSmallPacketsSettings::Parser() { + return StructParametersParser::Create("smoothing", &smoothing_factor, // + "fraction_large", &fraction_large, // + "large", &large_threshold, // + "small", &small_threshold); +} + +BweSeparateAudioPacketsSettings::BweSeparateAudioPacketsSettings( + const WebRtcKeyValueConfig* key_value_config) { + Parser()->Parse( + key_value_config->Lookup(BweSeparateAudioPacketsSettings::kKey)); +} + +std::unique_ptr +BweSeparateAudioPacketsSettings::Parser() { + return StructParametersParser::Create( // + "enabled", &enabled, // + "packet_threshold", &packet_threshold, // + "time_threshold", &time_threshold); +} + +DelayBasedBwe::Result::Result() + : updated(false), + probe(false), + target_bitrate(DataRate::Zero()), + recovered_from_overuse(false), + backoff_in_alr(false) {} + +DelayBasedBwe::Result::Result(bool probe, DataRate target_bitrate) + : updated(true), + probe(probe), + target_bitrate(target_bitrate), + recovered_from_overuse(false), + backoff_in_alr(false) {} + +DelayBasedBwe::DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, + RtcEventLog* event_log, + NetworkStatePredictor* network_state_predictor) + : event_log_(event_log), + key_value_config_(key_value_config), + ignore_small_(key_value_config), + fraction_large_packets_(0.5), + separate_audio_(key_value_config), + audio_packets_since_last_video_(0), + last_video_packet_recv_time_(Timestamp::MinusInfinity()), + network_state_predictor_(network_state_predictor), + video_inter_arrival_(), + video_delay_detector_( + new TrendlineEstimator(key_value_config_, network_state_predictor_)), + audio_inter_arrival_(), + audio_delay_detector_( + new TrendlineEstimator(key_value_config_, network_state_predictor_)), + active_delay_detector_(video_delay_detector_.get()), + last_seen_packet_(Timestamp::MinusInfinity()), + uma_recorded_(false), + rate_control_(key_value_config, /*send_side=*/true), + prev_bitrate_(DataRate::Zero()), + has_once_detected_overuse_(false), + prev_state_(BandwidthUsage::kBwNormal), + alr_limited_backoff_enabled_(absl::StartsWith( + key_value_config->Lookup("WebRTC-Bwe-AlrLimitedBackoff"), + "Enabled")) { + RTC_LOG(LS_INFO) << "Initialized DelayBasedBwe with small packet filtering " + << ignore_small_.Parser()->Encode() + << ", separate audio overuse detection" + << separate_audio_.Parser()->Encode() + << " and alr limited backoff " + << (alr_limited_backoff_enabled_ ? "enabled" : "disabled"); +} + +DelayBasedBwe::~DelayBasedBwe() {} + +DelayBasedBwe::Result DelayBasedBwe::IncomingPacketFeedbackVector( + const TransportPacketsFeedback& msg, + absl::optional acked_bitrate, + absl::optional probe_bitrate, + absl::optional network_estimate, + bool in_alr) { + RTC_DCHECK_RUNS_SERIALIZED(&network_race_); + + auto packet_feedback_vector = msg.SortedByReceiveTime(); + // TODO(holmer): An empty feedback vector here likely means that + // all acks were too late and that the send time history had + // timed out. We should reduce the rate when this occurs. + if (packet_feedback_vector.empty()) { + RTC_LOG(LS_WARNING) << "Very late feedback received."; + return DelayBasedBwe::Result(); + } + + if (!uma_recorded_) { + RTC_HISTOGRAM_ENUMERATION(kBweTypeHistogram, + BweNames::kSendSideTransportSeqNum, + BweNames::kBweNamesMax); + uma_recorded_ = true; + } + bool delayed_feedback = true; + bool recovered_from_overuse = false; + BandwidthUsage prev_detector_state = active_delay_detector_->State(); + for (const auto& packet_feedback : packet_feedback_vector) { + delayed_feedback = false; + IncomingPacketFeedback(packet_feedback, msg.feedback_time); + if (prev_detector_state == BandwidthUsage::kBwUnderusing && + active_delay_detector_->State() == BandwidthUsage::kBwNormal) { + recovered_from_overuse = true; + } + prev_detector_state = active_delay_detector_->State(); + } + + if (delayed_feedback) { + // TODO(bugs.webrtc.org/10125): Design a better mechanism to safe-guard + // against building very large network queues. + return Result(); + } + rate_control_.SetInApplicationLimitedRegion(in_alr); + rate_control_.SetNetworkStateEstimate(network_estimate); + return MaybeUpdateEstimate(acked_bitrate, probe_bitrate, + std::move(network_estimate), + recovered_from_overuse, in_alr, msg.feedback_time); +} + +void DelayBasedBwe::IncomingPacketFeedback(const PacketResult& packet_feedback, + Timestamp at_time) { + // Reset if the stream has timed out. + if (last_seen_packet_.IsInfinite() || + at_time - last_seen_packet_ > kStreamTimeOut) { + video_inter_arrival_.reset( + new InterArrival(kTimestampGroupTicks, kTimestampToMs, true)); + video_delay_detector_.reset( + new TrendlineEstimator(key_value_config_, network_state_predictor_)); + audio_inter_arrival_.reset( + new InterArrival(kTimestampGroupTicks, kTimestampToMs, true)); + audio_delay_detector_.reset( + new TrendlineEstimator(key_value_config_, network_state_predictor_)); + active_delay_detector_ = video_delay_detector_.get(); + } + last_seen_packet_ = at_time; + + // Ignore "small" packets if many/most packets in the call are "large". The + // packet size may have a significant effect on the propagation delay, + // especially at low bandwidths. Variations in packet size will then show up + // as noise in the delay measurement. By default, we include all packets. + DataSize packet_size = packet_feedback.sent_packet.size; + if (!ignore_small_.small_threshold.IsZero()) { + double is_large = + static_cast(packet_size >= ignore_small_.large_threshold); + fraction_large_packets_ += + ignore_small_.smoothing_factor * (is_large - fraction_large_packets_); + if (packet_size <= ignore_small_.small_threshold && + fraction_large_packets_ >= ignore_small_.fraction_large) { + return; + } + } + + // As an alternative to ignoring small packets, we can separate audio and + // video packets for overuse detection. + InterArrival* inter_arrival_for_packet = video_inter_arrival_.get(); + DelayIncreaseDetectorInterface* delay_detector_for_packet = + video_delay_detector_.get(); + if (separate_audio_.enabled) { + if (packet_feedback.sent_packet.audio) { + inter_arrival_for_packet = audio_inter_arrival_.get(); + delay_detector_for_packet = audio_delay_detector_.get(); + audio_packets_since_last_video_++; + if (audio_packets_since_last_video_ > separate_audio_.packet_threshold && + packet_feedback.receive_time - last_video_packet_recv_time_ > + separate_audio_.time_threshold) { + active_delay_detector_ = audio_delay_detector_.get(); + } + } else { + audio_packets_since_last_video_ = 0; + last_video_packet_recv_time_ = + std::max(last_video_packet_recv_time_, packet_feedback.receive_time); + active_delay_detector_ = video_delay_detector_.get(); + } + } + + uint32_t send_time_24bits = + static_cast( + ((static_cast(packet_feedback.sent_packet.send_time.ms()) + << kAbsSendTimeFraction) + + 500) / + 1000) & + 0x00FFFFFF; + // Shift up send time to use the full 32 bits that inter_arrival works with, + // so wrapping works properly. + uint32_t timestamp = send_time_24bits << kAbsSendTimeInterArrivalUpshift; + + uint32_t timestamp_delta = 0; + int64_t recv_delta_ms = 0; + int size_delta = 0; + bool calculated_deltas = inter_arrival_for_packet->ComputeDeltas( + timestamp, packet_feedback.receive_time.ms(), at_time.ms(), + packet_size.bytes(), ×tamp_delta, &recv_delta_ms, &size_delta); + double send_delta_ms = (1000.0 * timestamp_delta) / (1 << kInterArrivalShift); + delay_detector_for_packet->Update(recv_delta_ms, send_delta_ms, + packet_feedback.sent_packet.send_time.ms(), + packet_feedback.receive_time.ms(), + packet_size.bytes(), calculated_deltas); +} + +DataRate DelayBasedBwe::TriggerOveruse(Timestamp at_time, + absl::optional link_capacity) { + RateControlInput input(BandwidthUsage::kBwOverusing, link_capacity); + return rate_control_.Update(&input, at_time); +} + +DelayBasedBwe::Result DelayBasedBwe::MaybeUpdateEstimate( + absl::optional acked_bitrate, + absl::optional probe_bitrate, + absl::optional state_estimate, + bool recovered_from_overuse, + bool in_alr, + Timestamp at_time) { + Result result; + + // Currently overusing the bandwidth. + if (active_delay_detector_->State() == BandwidthUsage::kBwOverusing) { + if (has_once_detected_overuse_ && in_alr && alr_limited_backoff_enabled_) { + if (rate_control_.TimeToReduceFurther(at_time, prev_bitrate_)) { + result.updated = + UpdateEstimate(at_time, prev_bitrate_, &result.target_bitrate); + result.backoff_in_alr = true; + } + } else if (acked_bitrate && + rate_control_.TimeToReduceFurther(at_time, *acked_bitrate)) { + result.updated = + UpdateEstimate(at_time, acked_bitrate, &result.target_bitrate); + } else if (!acked_bitrate && rate_control_.ValidEstimate() && + rate_control_.InitialTimeToReduceFurther(at_time)) { + // Overusing before we have a measured acknowledged bitrate. Reduce send + // rate by 50% every 200 ms. + // TODO(tschumim): Improve this and/or the acknowledged bitrate estimator + // so that we (almost) always have a bitrate estimate. + rate_control_.SetEstimate(rate_control_.LatestEstimate() / 2, at_time); + result.updated = true; + result.probe = false; + result.target_bitrate = rate_control_.LatestEstimate(); + } + has_once_detected_overuse_ = true; + } else { + if (probe_bitrate) { + result.probe = true; + result.updated = true; + result.target_bitrate = *probe_bitrate; + rate_control_.SetEstimate(*probe_bitrate, at_time); + } else { + result.updated = + UpdateEstimate(at_time, acked_bitrate, &result.target_bitrate); + result.recovered_from_overuse = recovered_from_overuse; + } + } + BandwidthUsage detector_state = active_delay_detector_->State(); + if ((result.updated && prev_bitrate_ != result.target_bitrate) || + detector_state != prev_state_) { + DataRate bitrate = result.updated ? result.target_bitrate : prev_bitrate_; + + BWE_TEST_LOGGING_PLOT(1, "target_bitrate_bps", at_time.ms(), bitrate.bps()); + + if (event_log_) { + event_log_->Log(std::make_unique( + bitrate.bps(), detector_state)); + } + + prev_bitrate_ = bitrate; + prev_state_ = detector_state; + } + return result; +} + +bool DelayBasedBwe::UpdateEstimate(Timestamp at_time, + absl::optional acked_bitrate, + DataRate* target_rate) { + const RateControlInput input(active_delay_detector_->State(), acked_bitrate); + *target_rate = rate_control_.Update(&input, at_time); + return rate_control_.ValidEstimate(); +} + +void DelayBasedBwe::OnRttUpdate(TimeDelta avg_rtt) { + rate_control_.SetRtt(avg_rtt); +} + +bool DelayBasedBwe::LatestEstimate(std::vector* ssrcs, + DataRate* bitrate) const { + // Currently accessed from both the process thread (see + // ModuleRtpRtcpImpl::Process()) and the configuration thread (see + // Call::GetStats()). Should in the future only be accessed from a single + // thread. + RTC_DCHECK(ssrcs); + RTC_DCHECK(bitrate); + if (!rate_control_.ValidEstimate()) + return false; + + *ssrcs = {kFixedSsrc}; + *bitrate = rate_control_.LatestEstimate(); + return true; +} + +void DelayBasedBwe::SetStartBitrate(DataRate start_bitrate) { + RTC_LOG(LS_INFO) << "BWE Setting start bitrate to: " + << ToString(start_bitrate); + rate_control_.SetStartBitrate(start_bitrate); +} + +void DelayBasedBwe::SetMinBitrate(DataRate min_bitrate) { + // Called from both the configuration thread and the network thread. Shouldn't + // be called from the network thread in the future. + rate_control_.SetMinBitrate(min_bitrate); +} + +TimeDelta DelayBasedBwe::GetExpectedBwePeriod() const { + return rate_control_.GetExpectedBandwidthPeriod(); +} + +void DelayBasedBwe::SetAlrLimitedBackoffExperiment(bool enabled) { + alr_limited_backoff_enabled_ = enabled; +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe.h b/modules/congestion_controller/goog_cc/delay_based_bwe.h new file mode 100644 index 0000000..25f5a3b --- /dev/null +++ b/modules/congestion_controller/goog_cc/delay_based_bwe.h @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_DELAY_BASED_BWE_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_DELAY_BASED_BWE_H_ + +#include +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/network_state_predictor.h" +#include "api/transport/network_types.h" +#include "api/transport/webrtc_key_value_config.h" +#include "modules/congestion_controller/goog_cc/delay_increase_detector_interface.h" +#include "modules/congestion_controller/goog_cc/probe_bitrate_estimator.h" +#include "modules/remote_bitrate_estimator/aimd_rate_control.h" +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "modules/remote_bitrate_estimator/inter_arrival.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/experiments/struct_parameters_parser.h" +#include "rtc_base/race_checker.h" + +namespace webrtc { +class RtcEventLog; + +struct BweIgnoreSmallPacketsSettings { + static constexpr char kKey[] = "WebRTC-BweIgnoreSmallPacketsFix"; + + BweIgnoreSmallPacketsSettings() = default; + explicit BweIgnoreSmallPacketsSettings( + const WebRtcKeyValueConfig* key_value_config); + + double smoothing_factor = 0.1; + double fraction_large = 1.0; + DataSize large_threshold = DataSize::Zero(); + DataSize small_threshold = DataSize::Zero(); + + std::unique_ptr Parser(); +}; + +struct BweSeparateAudioPacketsSettings { + static constexpr char kKey[] = "WebRTC-Bwe-SeparateAudioPackets"; + + BweSeparateAudioPacketsSettings() = default; + explicit BweSeparateAudioPacketsSettings( + const WebRtcKeyValueConfig* key_value_config); + + bool enabled = false; + int packet_threshold = 10; + TimeDelta time_threshold = TimeDelta::Seconds(1); + + std::unique_ptr Parser(); +}; + +class DelayBasedBwe { + public: + struct Result { + Result(); + Result(bool probe, DataRate target_bitrate); + ~Result() = default; + bool updated; + bool probe; + DataRate target_bitrate = DataRate::Zero(); + bool recovered_from_overuse; + bool backoff_in_alr; + }; + + explicit DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, + RtcEventLog* event_log, + NetworkStatePredictor* network_state_predictor); + virtual ~DelayBasedBwe(); + + Result IncomingPacketFeedbackVector( + const TransportPacketsFeedback& msg, + absl::optional acked_bitrate, + absl::optional probe_bitrate, + absl::optional network_estimate, + bool in_alr); + void OnRttUpdate(TimeDelta avg_rtt); + bool LatestEstimate(std::vector* ssrcs, DataRate* bitrate) const; + void SetStartBitrate(DataRate start_bitrate); + void SetMinBitrate(DataRate min_bitrate); + TimeDelta GetExpectedBwePeriod() const; + void SetAlrLimitedBackoffExperiment(bool enabled); + DataRate TriggerOveruse(Timestamp at_time, + absl::optional link_capacity); + DataRate last_estimate() const { return prev_bitrate_; } + + private: + friend class GoogCcStatePrinter; + void IncomingPacketFeedback(const PacketResult& packet_feedback, + Timestamp at_time); + Result MaybeUpdateEstimate( + absl::optional acked_bitrate, + absl::optional probe_bitrate, + absl::optional state_estimate, + bool recovered_from_overuse, + bool in_alr, + Timestamp at_time); + // Updates the current remote rate estimate and returns true if a valid + // estimate exists. + bool UpdateEstimate(Timestamp now, + absl::optional acked_bitrate, + DataRate* target_bitrate); + + rtc::RaceChecker network_race_; + RtcEventLog* const event_log_; + const WebRtcKeyValueConfig* const key_value_config_; + + // Filtering out small packets. Intention is to base the detection only + // on video packets even if we have TWCC sequence numbers for audio. + BweIgnoreSmallPacketsSettings ignore_small_; + double fraction_large_packets_; + + // Alternatively, run two separate overuse detectors for audio and video, + // and fall back to the audio one if we haven't seen a video packet in a + // while. + BweSeparateAudioPacketsSettings separate_audio_; + int64_t audio_packets_since_last_video_; + Timestamp last_video_packet_recv_time_; + + NetworkStatePredictor* network_state_predictor_; + std::unique_ptr video_inter_arrival_; + std::unique_ptr video_delay_detector_; + std::unique_ptr audio_inter_arrival_; + std::unique_ptr audio_delay_detector_; + DelayIncreaseDetectorInterface* active_delay_detector_; + + Timestamp last_seen_packet_; + bool uma_recorded_; + AimdRateControl rate_control_; + DataRate prev_bitrate_; + bool has_once_detected_overuse_; + BandwidthUsage prev_state_; + bool alr_limited_backoff_enabled_; + RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(DelayBasedBwe); +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_DELAY_BASED_BWE_H_ diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest.cc b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest.cc new file mode 100644 index 0000000..7860c3d --- /dev/null +++ b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest.cc @@ -0,0 +1,272 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/delay_based_bwe.h" + +#include "api/transport/network_types.h" +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" +#include "modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h" +#include "system_wrappers/include/clock.h" +#include "test/gtest.h" + +namespace webrtc { + +namespace { +constexpr int kNumProbesCluster0 = 5; +constexpr int kNumProbesCluster1 = 8; +const PacedPacketInfo kPacingInfo0(0, kNumProbesCluster0, 2000); +const PacedPacketInfo kPacingInfo1(1, kNumProbesCluster1, 4000); +constexpr float kTargetUtilizationFraction = 0.95f; +} // namespace + +TEST_F(DelayBasedBweTest, ProbeDetection) { + int64_t now_ms = clock_.TimeInMilliseconds(); + + // First burst sent at 8 * 1000 / 10 = 800 kbps. + for (int i = 0; i < kNumProbesCluster0; ++i) { + clock_.AdvanceTimeMilliseconds(10); + now_ms = clock_.TimeInMilliseconds(); + IncomingFeedback(now_ms, now_ms, 1000, kPacingInfo0); + } + EXPECT_TRUE(bitrate_observer_.updated()); + + // Second burst sent at 8 * 1000 / 5 = 1600 kbps. + for (int i = 0; i < kNumProbesCluster1; ++i) { + clock_.AdvanceTimeMilliseconds(5); + now_ms = clock_.TimeInMilliseconds(); + IncomingFeedback(now_ms, now_ms, 1000, kPacingInfo1); + } + + EXPECT_TRUE(bitrate_observer_.updated()); + EXPECT_GT(bitrate_observer_.latest_bitrate(), 1500000u); +} + +TEST_F(DelayBasedBweTest, ProbeDetectionNonPacedPackets) { + int64_t now_ms = clock_.TimeInMilliseconds(); + // First burst sent at 8 * 1000 / 10 = 800 kbps, but with every other packet + // not being paced which could mess things up. + for (int i = 0; i < kNumProbesCluster0; ++i) { + clock_.AdvanceTimeMilliseconds(5); + now_ms = clock_.TimeInMilliseconds(); + IncomingFeedback(now_ms, now_ms, 1000, kPacingInfo0); + // Non-paced packet, arriving 5 ms after. + clock_.AdvanceTimeMilliseconds(5); + IncomingFeedback(now_ms, now_ms, 100, PacedPacketInfo()); + } + + EXPECT_TRUE(bitrate_observer_.updated()); + EXPECT_GT(bitrate_observer_.latest_bitrate(), 800000u); +} + +TEST_F(DelayBasedBweTest, ProbeDetectionFasterArrival) { + int64_t now_ms = clock_.TimeInMilliseconds(); + // First burst sent at 8 * 1000 / 10 = 800 kbps. + // Arriving at 8 * 1000 / 5 = 1600 kbps. + int64_t send_time_ms = 0; + for (int i = 0; i < kNumProbesCluster0; ++i) { + clock_.AdvanceTimeMilliseconds(1); + send_time_ms += 10; + now_ms = clock_.TimeInMilliseconds(); + IncomingFeedback(now_ms, send_time_ms, 1000, kPacingInfo0); + } + + EXPECT_FALSE(bitrate_observer_.updated()); +} + +TEST_F(DelayBasedBweTest, ProbeDetectionSlowerArrival) { + int64_t now_ms = clock_.TimeInMilliseconds(); + // First burst sent at 8 * 1000 / 5 = 1600 kbps. + // Arriving at 8 * 1000 / 7 = 1142 kbps. + // Since the receive rate is significantly below the send rate, we expect to + // use 95% of the estimated capacity. + int64_t send_time_ms = 0; + for (int i = 0; i < kNumProbesCluster1; ++i) { + clock_.AdvanceTimeMilliseconds(7); + send_time_ms += 5; + now_ms = clock_.TimeInMilliseconds(); + IncomingFeedback(now_ms, send_time_ms, 1000, kPacingInfo1); + } + + EXPECT_TRUE(bitrate_observer_.updated()); + EXPECT_NEAR(bitrate_observer_.latest_bitrate(), + kTargetUtilizationFraction * 1140000u, 10000u); +} + +TEST_F(DelayBasedBweTest, ProbeDetectionSlowerArrivalHighBitrate) { + int64_t now_ms = clock_.TimeInMilliseconds(); + // Burst sent at 8 * 1000 / 1 = 8000 kbps. + // Arriving at 8 * 1000 / 2 = 4000 kbps. + // Since the receive rate is significantly below the send rate, we expect to + // use 95% of the estimated capacity. + int64_t send_time_ms = 0; + for (int i = 0; i < kNumProbesCluster1; ++i) { + clock_.AdvanceTimeMilliseconds(2); + send_time_ms += 1; + now_ms = clock_.TimeInMilliseconds(); + IncomingFeedback(now_ms, send_time_ms, 1000, kPacingInfo1); + } + + EXPECT_TRUE(bitrate_observer_.updated()); + EXPECT_NEAR(bitrate_observer_.latest_bitrate(), + kTargetUtilizationFraction * 4000000u, 10000u); +} + +TEST_F(DelayBasedBweTest, GetExpectedBwePeriodMs) { + auto default_interval = bitrate_estimator_->GetExpectedBwePeriod(); + EXPECT_GT(default_interval.ms(), 0); + CapacityDropTestHelper(1, true, 333, 0); + auto interval = bitrate_estimator_->GetExpectedBwePeriod(); + EXPECT_GT(interval.ms(), 0); + EXPECT_NE(interval.ms(), default_interval.ms()); +} + +TEST_F(DelayBasedBweTest, InitialBehavior) { + InitialBehaviorTestHelper(730000); +} + +TEST_F(DelayBasedBweTest, RateIncreaseReordering) { + RateIncreaseReorderingTestHelper(730000); +} +TEST_F(DelayBasedBweTest, RateIncreaseRtpTimestamps) { + RateIncreaseRtpTimestampsTestHelper(622); +} + +TEST_F(DelayBasedBweTest, CapacityDropOneStream) { + CapacityDropTestHelper(1, false, 300, 0); +} + +TEST_F(DelayBasedBweTest, CapacityDropPosOffsetChange) { + CapacityDropTestHelper(1, false, 867, 30000); +} + +TEST_F(DelayBasedBweTest, CapacityDropNegOffsetChange) { + CapacityDropTestHelper(1, false, 933, -30000); +} + +TEST_F(DelayBasedBweTest, CapacityDropOneStreamWrap) { + CapacityDropTestHelper(1, true, 333, 0); +} + +TEST_F(DelayBasedBweTest, TestTimestampGrouping) { + TestTimestampGroupingTestHelper(); +} + +TEST_F(DelayBasedBweTest, TestShortTimeoutAndWrap) { + // Simulate a client leaving and rejoining the call after 35 seconds. This + // will make abs send time wrap, so if streams aren't timed out properly + // the next 30 seconds of packets will be out of order. + TestWrappingHelper(35); +} + +TEST_F(DelayBasedBweTest, TestLongTimeoutAndWrap) { + // Simulate a client leaving and rejoining the call after some multiple of + // 64 seconds later. This will cause a zero difference in abs send times due + // to the wrap, but a big difference in arrival time, if streams aren't + // properly timed out. + TestWrappingHelper(10 * 64); +} + +TEST_F(DelayBasedBweTest, TestInitialOveruse) { + const DataRate kStartBitrate = DataRate::KilobitsPerSec(300); + const DataRate kInitialCapacity = DataRate::KilobitsPerSec(200); + const uint32_t kDummySsrc = 0; + // High FPS to ensure that we send a lot of packets in a short time. + const int kFps = 90; + + stream_generator_->AddStream(new test::RtpStream(kFps, kStartBitrate.bps())); + stream_generator_->set_capacity_bps(kInitialCapacity.bps()); + + // Needed to initialize the AimdRateControl. + bitrate_estimator_->SetStartBitrate(kStartBitrate); + + // Produce 30 frames (in 1/3 second) and give them to the estimator. + int64_t bitrate_bps = kStartBitrate.bps(); + bool seen_overuse = false; + for (int i = 0; i < 30; ++i) { + bool overuse = GenerateAndProcessFrame(kDummySsrc, bitrate_bps); + // The purpose of this test is to ensure that we back down even if we don't + // have any acknowledged bitrate estimate yet. Hence, if the test works + // as expected, we should not have a measured bitrate yet. + EXPECT_FALSE(acknowledged_bitrate_estimator_->bitrate().has_value()); + if (overuse) { + EXPECT_TRUE(bitrate_observer_.updated()); + EXPECT_NEAR(bitrate_observer_.latest_bitrate(), kStartBitrate.bps() / 2, + 15000); + bitrate_bps = bitrate_observer_.latest_bitrate(); + seen_overuse = true; + break; + } else if (bitrate_observer_.updated()) { + bitrate_bps = bitrate_observer_.latest_bitrate(); + bitrate_observer_.Reset(); + } + } + EXPECT_TRUE(seen_overuse); + EXPECT_NEAR(bitrate_observer_.latest_bitrate(), kStartBitrate.bps() / 2, + 15000); +} + +class DelayBasedBweTestWithBackoffTimeoutExperiment : public DelayBasedBweTest { + public: + DelayBasedBweTestWithBackoffTimeoutExperiment() + : DelayBasedBweTest( + "WebRTC-BweAimdRateControlConfig/initial_backoff_interval:200ms/") { + } +}; + +// This test subsumes and improves DelayBasedBweTest.TestInitialOveruse above. +TEST_F(DelayBasedBweTestWithBackoffTimeoutExperiment, TestInitialOveruse) { + const DataRate kStartBitrate = DataRate::KilobitsPerSec(300); + const DataRate kInitialCapacity = DataRate::KilobitsPerSec(200); + const uint32_t kDummySsrc = 0; + // High FPS to ensure that we send a lot of packets in a short time. + const int kFps = 90; + + stream_generator_->AddStream(new test::RtpStream(kFps, kStartBitrate.bps())); + stream_generator_->set_capacity_bps(kInitialCapacity.bps()); + + // Needed to initialize the AimdRateControl. + bitrate_estimator_->SetStartBitrate(kStartBitrate); + + // Produce 30 frames (in 1/3 second) and give them to the estimator. + int64_t bitrate_bps = kStartBitrate.bps(); + bool seen_overuse = false; + for (int frames = 0; frames < 30 && !seen_overuse; ++frames) { + bool overuse = GenerateAndProcessFrame(kDummySsrc, bitrate_bps); + // The purpose of this test is to ensure that we back down even if we don't + // have any acknowledged bitrate estimate yet. Hence, if the test works + // as expected, we should not have a measured bitrate yet. + EXPECT_FALSE(acknowledged_bitrate_estimator_->bitrate().has_value()); + if (overuse) { + EXPECT_TRUE(bitrate_observer_.updated()); + EXPECT_NEAR(bitrate_observer_.latest_bitrate(), kStartBitrate.bps() / 2, + 15000); + bitrate_bps = bitrate_observer_.latest_bitrate(); + seen_overuse = true; + } else if (bitrate_observer_.updated()) { + bitrate_bps = bitrate_observer_.latest_bitrate(); + bitrate_observer_.Reset(); + } + } + EXPECT_TRUE(seen_overuse); + // Continue generating an additional 15 frames (equivalent to 167 ms) and + // verify that we don't back down further. + for (int frames = 0; frames < 15 && seen_overuse; ++frames) { + bool overuse = GenerateAndProcessFrame(kDummySsrc, bitrate_bps); + EXPECT_FALSE(overuse); + if (bitrate_observer_.updated()) { + bitrate_bps = bitrate_observer_.latest_bitrate(); + EXPECT_GE(bitrate_bps, kStartBitrate.bps() / 2 - 15000); + EXPECT_LE(bitrate_bps, kInitialCapacity.bps() + 15000); + bitrate_observer_.Reset(); + } + } +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.cc b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.cc new file mode 100644 index 0000000..14bac1e --- /dev/null +++ b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.cc @@ -0,0 +1,534 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h" + +#include +#include +#include + +#include "modules/congestion_controller/goog_cc/delay_based_bwe.h" +#include "rtc_base/checks.h" + +namespace webrtc { +constexpr size_t kMtu = 1200; +constexpr uint32_t kAcceptedBitrateErrorBps = 50000; + +// Number of packets needed before we have a valid estimate. +constexpr int kNumInitialPackets = 2; + +constexpr int kInitialProbingPackets = 5; + +namespace test { + +void TestBitrateObserver::OnReceiveBitrateChanged(uint32_t bitrate) { + latest_bitrate_ = bitrate; + updated_ = true; +} + +RtpStream::RtpStream(int fps, int bitrate_bps) + : fps_(fps), bitrate_bps_(bitrate_bps), next_rtp_time_(0) { + RTC_CHECK_GT(fps_, 0); +} + +// Generates a new frame for this stream. If called too soon after the +// previous frame, no frame will be generated. The frame is split into +// packets. +int64_t RtpStream::GenerateFrame(int64_t time_now_us, + std::vector* packets) { + if (time_now_us < next_rtp_time_) { + return next_rtp_time_; + } + RTC_CHECK(packets != NULL); + size_t bits_per_frame = (bitrate_bps_ + fps_ / 2) / fps_; + size_t n_packets = + std::max((bits_per_frame + 4 * kMtu) / (8 * kMtu), 1u); + size_t payload_size = (bits_per_frame + 4 * n_packets) / (8 * n_packets); + for (size_t i = 0; i < n_packets; ++i) { + PacketResult packet; + packet.sent_packet.send_time = + Timestamp::Micros(time_now_us + kSendSideOffsetUs); + packet.sent_packet.size = DataSize::Bytes(payload_size); + packets->push_back(packet); + } + next_rtp_time_ = time_now_us + (1000000 + fps_ / 2) / fps_; + return next_rtp_time_; +} + +// The send-side time when the next frame can be generated. +int64_t RtpStream::next_rtp_time() const { + return next_rtp_time_; +} + +void RtpStream::set_bitrate_bps(int bitrate_bps) { + ASSERT_GE(bitrate_bps, 0); + bitrate_bps_ = bitrate_bps; +} + +int RtpStream::bitrate_bps() const { + return bitrate_bps_; +} + +bool RtpStream::Compare(const std::unique_ptr& lhs, + const std::unique_ptr& rhs) { + return lhs->next_rtp_time_ < rhs->next_rtp_time_; +} + +StreamGenerator::StreamGenerator(int capacity, int64_t time_now) + : capacity_(capacity), prev_arrival_time_us_(time_now) {} + +StreamGenerator::~StreamGenerator() = default; + +// Add a new stream. +void StreamGenerator::AddStream(RtpStream* stream) { + streams_.push_back(std::unique_ptr(stream)); +} + +// Set the link capacity. +void StreamGenerator::set_capacity_bps(int capacity_bps) { + ASSERT_GT(capacity_bps, 0); + capacity_ = capacity_bps; +} + +// Divides |bitrate_bps| among all streams. The allocated bitrate per stream +// is decided by the current allocation ratios. +void StreamGenerator::SetBitrateBps(int bitrate_bps) { + ASSERT_GE(streams_.size(), 0u); + int total_bitrate_before = 0; + for (const auto& stream : streams_) { + total_bitrate_before += stream->bitrate_bps(); + } + int64_t bitrate_before = 0; + int total_bitrate_after = 0; + for (const auto& stream : streams_) { + bitrate_before += stream->bitrate_bps(); + int64_t bitrate_after = + (bitrate_before * bitrate_bps + total_bitrate_before / 2) / + total_bitrate_before; + stream->set_bitrate_bps(bitrate_after - total_bitrate_after); + total_bitrate_after += stream->bitrate_bps(); + } + ASSERT_EQ(bitrate_before, total_bitrate_before); + EXPECT_EQ(total_bitrate_after, bitrate_bps); +} + +// TODO(holmer): Break out the channel simulation part from this class to make +// it possible to simulate different types of channels. +int64_t StreamGenerator::GenerateFrame(std::vector* packets, + int64_t time_now_us) { + RTC_CHECK(packets != NULL); + RTC_CHECK(packets->empty()); + RTC_CHECK_GT(capacity_, 0); + auto it = + std::min_element(streams_.begin(), streams_.end(), RtpStream::Compare); + (*it)->GenerateFrame(time_now_us, packets); + int i = 0; + for (PacketResult& packet : *packets) { + int capacity_bpus = capacity_ / 1000; + int64_t required_network_time_us = + (8 * 1000 * packet.sent_packet.size.bytes() + capacity_bpus / 2) / + capacity_bpus; + prev_arrival_time_us_ = + std::max(time_now_us + required_network_time_us, + prev_arrival_time_us_ + required_network_time_us); + packet.receive_time = Timestamp::Micros(prev_arrival_time_us_); + ++i; + } + it = std::min_element(streams_.begin(), streams_.end(), RtpStream::Compare); + return std::max((*it)->next_rtp_time(), time_now_us); +} +} // namespace test + +DelayBasedBweTest::DelayBasedBweTest() + : field_trial(), + clock_(100000000), + acknowledged_bitrate_estimator_( + AcknowledgedBitrateEstimatorInterface::Create(&field_trial_config_)), + probe_bitrate_estimator_(new ProbeBitrateEstimator(nullptr)), + bitrate_estimator_( + new DelayBasedBwe(&field_trial_config_, nullptr, nullptr)), + stream_generator_(new test::StreamGenerator(1e6, // Capacity. + clock_.TimeInMicroseconds())), + arrival_time_offset_ms_(0), + first_update_(true) {} + +DelayBasedBweTest::DelayBasedBweTest(const std::string& field_trial_string) + : field_trial( + std::make_unique(field_trial_string)), + clock_(100000000), + acknowledged_bitrate_estimator_( + AcknowledgedBitrateEstimatorInterface::Create(&field_trial_config_)), + probe_bitrate_estimator_(new ProbeBitrateEstimator(nullptr)), + bitrate_estimator_( + new DelayBasedBwe(&field_trial_config_, nullptr, nullptr)), + stream_generator_(new test::StreamGenerator(1e6, // Capacity. + clock_.TimeInMicroseconds())), + arrival_time_offset_ms_(0), + first_update_(true) {} + +DelayBasedBweTest::~DelayBasedBweTest() {} + +void DelayBasedBweTest::AddDefaultStream() { + stream_generator_->AddStream(new test::RtpStream(30, 3e5)); +} + +const uint32_t DelayBasedBweTest::kDefaultSsrc = 0; + +void DelayBasedBweTest::IncomingFeedback(int64_t arrival_time_ms, + int64_t send_time_ms, + size_t payload_size) { + IncomingFeedback(arrival_time_ms, send_time_ms, payload_size, + PacedPacketInfo()); +} + +void DelayBasedBweTest::IncomingFeedback(int64_t arrival_time_ms, + int64_t send_time_ms, + size_t payload_size, + const PacedPacketInfo& pacing_info) { + RTC_CHECK_GE(arrival_time_ms + arrival_time_offset_ms_, 0); + PacketResult packet; + packet.receive_time = + Timestamp::Millis(arrival_time_ms + arrival_time_offset_ms_); + packet.sent_packet.send_time = Timestamp::Millis(send_time_ms); + packet.sent_packet.size = DataSize::Bytes(payload_size); + packet.sent_packet.pacing_info = pacing_info; + if (packet.sent_packet.pacing_info.probe_cluster_id != + PacedPacketInfo::kNotAProbe) + probe_bitrate_estimator_->HandleProbeAndEstimateBitrate(packet); + + TransportPacketsFeedback msg; + msg.feedback_time = Timestamp::Millis(clock_.TimeInMilliseconds()); + msg.packet_feedbacks.push_back(packet); + acknowledged_bitrate_estimator_->IncomingPacketFeedbackVector( + msg.SortedByReceiveTime()); + DelayBasedBwe::Result result = + bitrate_estimator_->IncomingPacketFeedbackVector( + msg, acknowledged_bitrate_estimator_->bitrate(), + probe_bitrate_estimator_->FetchAndResetLastEstimatedBitrate(), + /*network_estimate*/ absl::nullopt, /*in_alr*/ false); + if (result.updated) { + bitrate_observer_.OnReceiveBitrateChanged(result.target_bitrate.bps()); + } +} + +// Generates a frame of packets belonging to a stream at a given bitrate and +// with a given ssrc. The stream is pushed through a very simple simulated +// network, and is then given to the receive-side bandwidth estimator. +// Returns true if an over-use was seen, false otherwise. +// The StreamGenerator::updated() should be used to check for any changes in +// target bitrate after the call to this function. +bool DelayBasedBweTest::GenerateAndProcessFrame(uint32_t ssrc, + uint32_t bitrate_bps) { + stream_generator_->SetBitrateBps(bitrate_bps); + std::vector packets; + + int64_t next_time_us = + stream_generator_->GenerateFrame(&packets, clock_.TimeInMicroseconds()); + if (packets.empty()) + return false; + + bool overuse = false; + bitrate_observer_.Reset(); + clock_.AdvanceTimeMicroseconds(packets.back().receive_time.us() - + clock_.TimeInMicroseconds()); + for (auto& packet : packets) { + RTC_CHECK_GE(packet.receive_time.ms() + arrival_time_offset_ms_, 0); + packet.receive_time += TimeDelta::Millis(arrival_time_offset_ms_); + + if (packet.sent_packet.pacing_info.probe_cluster_id != + PacedPacketInfo::kNotAProbe) + probe_bitrate_estimator_->HandleProbeAndEstimateBitrate(packet); + } + + acknowledged_bitrate_estimator_->IncomingPacketFeedbackVector(packets); + TransportPacketsFeedback msg; + msg.packet_feedbacks = packets; + msg.feedback_time = Timestamp::Millis(clock_.TimeInMilliseconds()); + + DelayBasedBwe::Result result = + bitrate_estimator_->IncomingPacketFeedbackVector( + msg, acknowledged_bitrate_estimator_->bitrate(), + probe_bitrate_estimator_->FetchAndResetLastEstimatedBitrate(), + /*network_estimate*/ absl::nullopt, /*in_alr*/ false); + if (result.updated) { + bitrate_observer_.OnReceiveBitrateChanged(result.target_bitrate.bps()); + if (!first_update_ && result.target_bitrate.bps() < bitrate_bps) + overuse = true; + first_update_ = false; + } + + clock_.AdvanceTimeMicroseconds(next_time_us - clock_.TimeInMicroseconds()); + return overuse; +} + +// Run the bandwidth estimator with a stream of |number_of_frames| frames, or +// until it reaches |target_bitrate|. +// Can for instance be used to run the estimator for some time to get it +// into a steady state. +uint32_t DelayBasedBweTest::SteadyStateRun(uint32_t ssrc, + int max_number_of_frames, + uint32_t start_bitrate, + uint32_t min_bitrate, + uint32_t max_bitrate, + uint32_t target_bitrate) { + uint32_t bitrate_bps = start_bitrate; + bool bitrate_update_seen = false; + // Produce |number_of_frames| frames and give them to the estimator. + for (int i = 0; i < max_number_of_frames; ++i) { + bool overuse = GenerateAndProcessFrame(ssrc, bitrate_bps); + if (overuse) { + EXPECT_LT(bitrate_observer_.latest_bitrate(), max_bitrate); + EXPECT_GT(bitrate_observer_.latest_bitrate(), min_bitrate); + bitrate_bps = bitrate_observer_.latest_bitrate(); + bitrate_update_seen = true; + } else if (bitrate_observer_.updated()) { + bitrate_bps = bitrate_observer_.latest_bitrate(); + bitrate_observer_.Reset(); + } + if (bitrate_update_seen && bitrate_bps > target_bitrate) { + break; + } + } + EXPECT_TRUE(bitrate_update_seen); + return bitrate_bps; +} + +void DelayBasedBweTest::InitialBehaviorTestHelper( + uint32_t expected_converge_bitrate) { + const int kFramerate = 50; // 50 fps to avoid rounding errors. + const int kFrameIntervalMs = 1000 / kFramerate; + const PacedPacketInfo kPacingInfo(0, 5, 5000); + DataRate bitrate = DataRate::Zero(); + int64_t send_time_ms = 0; + std::vector ssrcs; + EXPECT_FALSE(bitrate_estimator_->LatestEstimate(&ssrcs, &bitrate)); + EXPECT_EQ(0u, ssrcs.size()); + clock_.AdvanceTimeMilliseconds(1000); + EXPECT_FALSE(bitrate_estimator_->LatestEstimate(&ssrcs, &bitrate)); + EXPECT_FALSE(bitrate_observer_.updated()); + bitrate_observer_.Reset(); + clock_.AdvanceTimeMilliseconds(1000); + // Inserting packets for 5 seconds to get a valid estimate. + for (int i = 0; i < 5 * kFramerate + 1 + kNumInitialPackets; ++i) { + // NOTE!!! If the following line is moved under the if case then this test + // wont work on windows realease bots. + PacedPacketInfo pacing_info = + i < kInitialProbingPackets ? kPacingInfo : PacedPacketInfo(); + + if (i == kNumInitialPackets) { + EXPECT_FALSE(bitrate_estimator_->LatestEstimate(&ssrcs, &bitrate)); + EXPECT_EQ(0u, ssrcs.size()); + EXPECT_FALSE(bitrate_observer_.updated()); + bitrate_observer_.Reset(); + } + IncomingFeedback(clock_.TimeInMilliseconds(), send_time_ms, kMtu, + pacing_info); + clock_.AdvanceTimeMilliseconds(1000 / kFramerate); + send_time_ms += kFrameIntervalMs; + } + EXPECT_TRUE(bitrate_estimator_->LatestEstimate(&ssrcs, &bitrate)); + ASSERT_EQ(1u, ssrcs.size()); + EXPECT_EQ(kDefaultSsrc, ssrcs.front()); + EXPECT_NEAR(expected_converge_bitrate, bitrate.bps(), + kAcceptedBitrateErrorBps); + EXPECT_TRUE(bitrate_observer_.updated()); + bitrate_observer_.Reset(); + EXPECT_EQ(bitrate_observer_.latest_bitrate(), bitrate.bps()); +} + +void DelayBasedBweTest::RateIncreaseReorderingTestHelper( + uint32_t expected_bitrate_bps) { + const int kFramerate = 50; // 50 fps to avoid rounding errors. + const int kFrameIntervalMs = 1000 / kFramerate; + const PacedPacketInfo kPacingInfo(0, 5, 5000); + int64_t send_time_ms = 0; + // Inserting packets for five seconds to get a valid estimate. + for (int i = 0; i < 5 * kFramerate + 1 + kNumInitialPackets; ++i) { + // NOTE!!! If the following line is moved under the if case then this test + // wont work on windows realease bots. + PacedPacketInfo pacing_info = + i < kInitialProbingPackets ? kPacingInfo : PacedPacketInfo(); + + // TODO(sprang): Remove this hack once the single stream estimator is gone, + // as it doesn't do anything in Process(). + if (i == kNumInitialPackets) { + // Process after we have enough frames to get a valid input rate estimate. + + EXPECT_FALSE(bitrate_observer_.updated()); // No valid estimate. + } + IncomingFeedback(clock_.TimeInMilliseconds(), send_time_ms, kMtu, + pacing_info); + clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); + send_time_ms += kFrameIntervalMs; + } + EXPECT_TRUE(bitrate_observer_.updated()); + EXPECT_NEAR(expected_bitrate_bps, bitrate_observer_.latest_bitrate(), + kAcceptedBitrateErrorBps); + for (int i = 0; i < 10; ++i) { + clock_.AdvanceTimeMilliseconds(2 * kFrameIntervalMs); + send_time_ms += 2 * kFrameIntervalMs; + IncomingFeedback(clock_.TimeInMilliseconds(), send_time_ms, 1000); + IncomingFeedback(clock_.TimeInMilliseconds(), + send_time_ms - kFrameIntervalMs, 1000); + } + EXPECT_TRUE(bitrate_observer_.updated()); + EXPECT_NEAR(expected_bitrate_bps, bitrate_observer_.latest_bitrate(), + kAcceptedBitrateErrorBps); +} + +// Make sure we initially increase the bitrate as expected. +void DelayBasedBweTest::RateIncreaseRtpTimestampsTestHelper( + int expected_iterations) { + // This threshold corresponds approximately to increasing linearly with + // bitrate(i) = 1.04 * bitrate(i-1) + 1000 + // until bitrate(i) > 500000, with bitrate(1) ~= 30000. + uint32_t bitrate_bps = 30000; + int iterations = 0; + AddDefaultStream(); + // Feed the estimator with a stream of packets and verify that it reaches + // 500 kbps at the expected time. + while (bitrate_bps < 5e5) { + bool overuse = GenerateAndProcessFrame(kDefaultSsrc, bitrate_bps); + if (overuse) { + EXPECT_GT(bitrate_observer_.latest_bitrate(), bitrate_bps); + bitrate_bps = bitrate_observer_.latest_bitrate(); + bitrate_observer_.Reset(); + } else if (bitrate_observer_.updated()) { + bitrate_bps = bitrate_observer_.latest_bitrate(); + bitrate_observer_.Reset(); + } + ++iterations; + } + ASSERT_EQ(expected_iterations, iterations); +} + +void DelayBasedBweTest::CapacityDropTestHelper( + int number_of_streams, + bool wrap_time_stamp, + uint32_t expected_bitrate_drop_delta, + int64_t receiver_clock_offset_change_ms) { + const int kFramerate = 30; + const int kStartBitrate = 900e3; + const int kMinExpectedBitrate = 800e3; + const int kMaxExpectedBitrate = 1100e3; + const uint32_t kInitialCapacityBps = 1000e3; + const uint32_t kReducedCapacityBps = 500e3; + + int steady_state_time = 0; + if (number_of_streams <= 1) { + steady_state_time = 10; + AddDefaultStream(); + } else { + steady_state_time = 10 * number_of_streams; + int bitrate_sum = 0; + int kBitrateDenom = number_of_streams * (number_of_streams - 1); + for (int i = 0; i < number_of_streams; i++) { + // First stream gets half available bitrate, while the rest share the + // remaining half i.e.: 1/2 = Sum[n/(N*(N-1))] for n=1..N-1 (rounded up) + int bitrate = kStartBitrate / 2; + if (i > 0) { + bitrate = (kStartBitrate * i + kBitrateDenom / 2) / kBitrateDenom; + } + stream_generator_->AddStream(new test::RtpStream(kFramerate, bitrate)); + bitrate_sum += bitrate; + } + ASSERT_EQ(bitrate_sum, kStartBitrate); + } + + // Run in steady state to make the estimator converge. + stream_generator_->set_capacity_bps(kInitialCapacityBps); + uint32_t bitrate_bps = SteadyStateRun( + kDefaultSsrc, steady_state_time * kFramerate, kStartBitrate, + kMinExpectedBitrate, kMaxExpectedBitrate, kInitialCapacityBps); + EXPECT_NEAR(kInitialCapacityBps, bitrate_bps, 180000u); + bitrate_observer_.Reset(); + + // Add an offset to make sure the BWE can handle it. + arrival_time_offset_ms_ += receiver_clock_offset_change_ms; + + // Reduce the capacity and verify the decrease time. + stream_generator_->set_capacity_bps(kReducedCapacityBps); + int64_t overuse_start_time = clock_.TimeInMilliseconds(); + int64_t bitrate_drop_time = -1; + for (int i = 0; i < 100 * number_of_streams; ++i) { + GenerateAndProcessFrame(kDefaultSsrc, bitrate_bps); + if (bitrate_drop_time == -1 && + bitrate_observer_.latest_bitrate() <= kReducedCapacityBps) { + bitrate_drop_time = clock_.TimeInMilliseconds(); + } + if (bitrate_observer_.updated()) + bitrate_bps = bitrate_observer_.latest_bitrate(); + } + + EXPECT_NEAR(expected_bitrate_drop_delta, + bitrate_drop_time - overuse_start_time, 33); +} + +void DelayBasedBweTest::TestTimestampGroupingTestHelper() { + const int kFramerate = 50; // 50 fps to avoid rounding errors. + const int kFrameIntervalMs = 1000 / kFramerate; + int64_t send_time_ms = 0; + // Initial set of frames to increase the bitrate. 6 seconds to have enough + // time for the first estimate to be generated and for Process() to be called. + for (int i = 0; i <= 6 * kFramerate; ++i) { + IncomingFeedback(clock_.TimeInMilliseconds(), send_time_ms, 1000); + + clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); + send_time_ms += kFrameIntervalMs; + } + EXPECT_TRUE(bitrate_observer_.updated()); + EXPECT_GE(bitrate_observer_.latest_bitrate(), 400000u); + + // Insert batches of frames which were sent very close in time. Also simulate + // capacity over-use to see that we back off correctly. + const int kTimestampGroupLength = 15; + for (int i = 0; i < 100; ++i) { + for (int j = 0; j < kTimestampGroupLength; ++j) { + // Insert |kTimestampGroupLength| frames with just 1 timestamp ticks in + // between. Should be treated as part of the same group by the estimator. + IncomingFeedback(clock_.TimeInMilliseconds(), send_time_ms, 100); + clock_.AdvanceTimeMilliseconds(kFrameIntervalMs / kTimestampGroupLength); + send_time_ms += 1; + } + // Increase time until next batch to simulate over-use. + clock_.AdvanceTimeMilliseconds(10); + send_time_ms += kFrameIntervalMs - kTimestampGroupLength; + } + EXPECT_TRUE(bitrate_observer_.updated()); + // Should have reduced the estimate. + EXPECT_LT(bitrate_observer_.latest_bitrate(), 400000u); +} + +void DelayBasedBweTest::TestWrappingHelper(int silence_time_s) { + const int kFramerate = 100; + const int kFrameIntervalMs = 1000 / kFramerate; + int64_t send_time_ms = 0; + + for (size_t i = 0; i < 3000; ++i) { + IncomingFeedback(clock_.TimeInMilliseconds(), send_time_ms, 1000); + clock_.AdvanceTimeMilliseconds(kFrameIntervalMs); + send_time_ms += kFrameIntervalMs; + } + DataRate bitrate_before = DataRate::Zero(); + std::vector ssrcs; + bitrate_estimator_->LatestEstimate(&ssrcs, &bitrate_before); + + clock_.AdvanceTimeMilliseconds(silence_time_s * 1000); + send_time_ms += silence_time_s * 1000; + + for (size_t i = 0; i < 24; ++i) { + IncomingFeedback(clock_.TimeInMilliseconds(), send_time_ms, 1000); + clock_.AdvanceTimeMilliseconds(2 * kFrameIntervalMs); + send_time_ms += kFrameIntervalMs; + } + DataRate bitrate_after = DataRate::Zero(); + bitrate_estimator_->LatestEstimate(&ssrcs, &bitrate_after); + EXPECT_LT(bitrate_after, bitrate_before); +} +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h new file mode 100644 index 0000000..608cd6b --- /dev/null +++ b/modules/congestion_controller/goog_cc/delay_based_bwe_unittest_helper.h @@ -0,0 +1,184 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_DELAY_BASED_BWE_UNITTEST_HELPER_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_DELAY_BASED_BWE_UNITTEST_HELPER_H_ + +#include +#include + +#include +#include +#include + +#include "api/transport/field_trial_based_config.h" +#include "api/transport/network_types.h" +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator.h" +#include "modules/congestion_controller/goog_cc/delay_based_bwe.h" +#include "rtc_base/constructor_magic.h" +#include "system_wrappers/include/clock.h" +#include "test/field_trial.h" +#include "test/gtest.h" + +namespace webrtc { +namespace test { + +class TestBitrateObserver { + public: + TestBitrateObserver() : updated_(false), latest_bitrate_(0) {} + ~TestBitrateObserver() {} + + void OnReceiveBitrateChanged(uint32_t bitrate); + + void Reset() { updated_ = false; } + + bool updated() const { return updated_; } + + uint32_t latest_bitrate() const { return latest_bitrate_; } + + private: + bool updated_; + uint32_t latest_bitrate_; +}; + +class RtpStream { + public: + enum { kSendSideOffsetUs = 1000000 }; + + RtpStream(int fps, int bitrate_bps); + + // Generates a new frame for this stream. If called too soon after the + // previous frame, no frame will be generated. The frame is split into + // packets. + int64_t GenerateFrame(int64_t time_now_us, + std::vector* packets); + + // The send-side time when the next frame can be generated. + int64_t next_rtp_time() const; + + void set_bitrate_bps(int bitrate_bps); + + int bitrate_bps() const; + + static bool Compare(const std::unique_ptr& lhs, + const std::unique_ptr& rhs); + + private: + int fps_; + int bitrate_bps_; + int64_t next_rtp_time_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RtpStream); +}; + +class StreamGenerator { + public: + StreamGenerator(int capacity, int64_t time_now); + ~StreamGenerator(); + + // Add a new stream. + void AddStream(RtpStream* stream); + + // Set the link capacity. + void set_capacity_bps(int capacity_bps); + + // Divides |bitrate_bps| among all streams. The allocated bitrate per stream + // is decided by the initial allocation ratios. + void SetBitrateBps(int bitrate_bps); + + // Set the RTP timestamp offset for the stream identified by |ssrc|. + void set_rtp_timestamp_offset(uint32_t ssrc, uint32_t offset); + + // TODO(holmer): Break out the channel simulation part from this class to make + // it possible to simulate different types of channels. + int64_t GenerateFrame(std::vector* packets, + int64_t time_now_us); + + private: + // Capacity of the simulated channel in bits per second. + int capacity_; + // The time when the last packet arrived. + int64_t prev_arrival_time_us_; + // All streams being transmitted on this simulated channel. + std::vector> streams_; + + RTC_DISALLOW_COPY_AND_ASSIGN(StreamGenerator); +}; +} // namespace test + +class DelayBasedBweTest : public ::testing::Test { + public: + DelayBasedBweTest(); + explicit DelayBasedBweTest(const std::string& field_trial_string); + ~DelayBasedBweTest() override; + + protected: + void AddDefaultStream(); + + // Helpers to insert a single packet into the delay-based BWE. + void IncomingFeedback(int64_t arrival_time_ms, + int64_t send_time_ms, + size_t payload_size); + void IncomingFeedback(int64_t arrival_time_ms, + int64_t send_time_ms, + size_t payload_size, + const PacedPacketInfo& pacing_info); + + // Generates a frame of packets belonging to a stream at a given bitrate and + // with a given ssrc. The stream is pushed through a very simple simulated + // network, and is then given to the receive-side bandwidth estimator. + // Returns true if an over-use was seen, false otherwise. + // The StreamGenerator::updated() should be used to check for any changes in + // target bitrate after the call to this function. + bool GenerateAndProcessFrame(uint32_t ssrc, uint32_t bitrate_bps); + + // Run the bandwidth estimator with a stream of |number_of_frames| frames, or + // until it reaches |target_bitrate|. + // Can for instance be used to run the estimator for some time to get it + // into a steady state. + uint32_t SteadyStateRun(uint32_t ssrc, + int number_of_frames, + uint32_t start_bitrate, + uint32_t min_bitrate, + uint32_t max_bitrate, + uint32_t target_bitrate); + + void TestTimestampGroupingTestHelper(); + + void TestWrappingHelper(int silence_time_s); + + void InitialBehaviorTestHelper(uint32_t expected_converge_bitrate); + void RateIncreaseReorderingTestHelper(uint32_t expected_bitrate); + void RateIncreaseRtpTimestampsTestHelper(int expected_iterations); + void CapacityDropTestHelper(int number_of_streams, + bool wrap_time_stamp, + uint32_t expected_bitrate_drop_delta, + int64_t receiver_clock_offset_change_ms); + + static const uint32_t kDefaultSsrc; + FieldTrialBasedConfig field_trial_config_; + + std::unique_ptr + field_trial; // Must be initialized first. + SimulatedClock clock_; // Time at the receiver. + test::TestBitrateObserver bitrate_observer_; + std::unique_ptr + acknowledged_bitrate_estimator_; + const std::unique_ptr probe_bitrate_estimator_; + std::unique_ptr bitrate_estimator_; + std::unique_ptr stream_generator_; + int64_t arrival_time_offset_ms_; + bool first_update_; + + RTC_DISALLOW_COPY_AND_ASSIGN(DelayBasedBweTest); +}; +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_DELAY_BASED_BWE_UNITTEST_HELPER_H_ diff --git a/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h b/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h new file mode 100644 index 0000000..8fe3f66 --- /dev/null +++ b/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_DELAY_INCREASE_DETECTOR_INTERFACE_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_DELAY_INCREASE_DETECTOR_INTERFACE_H_ + +#include + +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "rtc_base/constructor_magic.h" + +namespace webrtc { + +class DelayIncreaseDetectorInterface { + public: + DelayIncreaseDetectorInterface() {} + virtual ~DelayIncreaseDetectorInterface() {} + + // Update the detector with a new sample. The deltas should represent deltas + // between timestamp groups as defined by the InterArrival class. + virtual void Update(double recv_delta_ms, + double send_delta_ms, + int64_t send_time_ms, + int64_t arrival_time_ms, + size_t packet_size, + bool calculated_deltas) = 0; + + virtual BandwidthUsage State() const = 0; + + RTC_DISALLOW_COPY_AND_ASSIGN(DelayIncreaseDetectorInterface); +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_DELAY_INCREASE_DETECTOR_INTERFACE_H_ diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc new file mode 100644 index 0000000..b8be098 --- /dev/null +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc @@ -0,0 +1,709 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/goog_cc_network_control.h" + +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "absl/strings/match.h" +#include "api/units/time_delta.h" +#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" +#include "modules/congestion_controller/goog_cc/alr_detector.h" +#include "modules/congestion_controller/goog_cc/probe_controller.h" +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace { +// From RTCPSender video report interval. +constexpr TimeDelta kLossUpdateInterval = TimeDelta::Millis(1000); + +// Pacing-rate relative to our target send rate. +// Multiplicative factor that is applied to the target bitrate to calculate +// the number of bytes that can be transmitted per interval. +// Increasing this factor will result in lower delays in cases of bitrate +// overshoots from the encoder. +constexpr float kDefaultPaceMultiplier = 2.5f; + +// If the probe result is far below the current throughput estimate +// it's unlikely that the probe is accurate, so we don't want to drop too far. +// However, if we actually are overusing, we want to drop to something slightly +// below the current throughput estimate to drain the network queues. +constexpr double kProbeDropThroughputFraction = 0.85; + +int64_t GetBpsOrDefault(const absl::optional& rate, + int64_t fallback_bps) { + if (rate && rate->IsFinite()) { + return rate->bps(); + } else { + return fallback_bps; + } +} + +bool IsEnabled(const WebRtcKeyValueConfig* config, absl::string_view key) { + return absl::StartsWith(config->Lookup(key), "Enabled"); +} + +bool IsNotDisabled(const WebRtcKeyValueConfig* config, absl::string_view key) { + return !absl::StartsWith(config->Lookup(key), "Disabled"); +} +} // namespace + +GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, + GoogCcConfig goog_cc_config) + : key_value_config_(config.key_value_config ? config.key_value_config + : &trial_based_config_), + event_log_(config.event_log), + packet_feedback_only_(goog_cc_config.feedback_only), + safe_reset_on_route_change_("Enabled"), + safe_reset_acknowledged_rate_("ack"), + use_min_allocatable_as_lower_bound_( + IsNotDisabled(key_value_config_, "WebRTC-Bwe-MinAllocAsLowerBound")), + ignore_probes_lower_than_network_estimate_(IsNotDisabled( + key_value_config_, + "WebRTC-Bwe-IgnoreProbesLowerThanNetworkStateEstimate")), + limit_probes_lower_than_throughput_estimate_( + IsEnabled(key_value_config_, + "WebRTC-Bwe-LimitProbesLowerThanThroughputEstimate")), + rate_control_settings_( + RateControlSettings::ParseFromKeyValueConfig(key_value_config_)), + loss_based_stable_rate_( + IsEnabled(key_value_config_, "WebRTC-Bwe-LossBasedStableRate")), + probe_controller_( + new ProbeController(key_value_config_, config.event_log)), + congestion_window_pushback_controller_( + rate_control_settings_.UseCongestionWindowPushback() + ? std::make_unique( + key_value_config_) + : nullptr), + bandwidth_estimation_( + std::make_unique(event_log_)), + alr_detector_( + std::make_unique(key_value_config_, config.event_log)), + probe_bitrate_estimator_(new ProbeBitrateEstimator(config.event_log)), + network_estimator_(std::move(goog_cc_config.network_state_estimator)), + network_state_predictor_( + std::move(goog_cc_config.network_state_predictor)), + delay_based_bwe_(new DelayBasedBwe(key_value_config_, + event_log_, + network_state_predictor_.get())), + acknowledged_bitrate_estimator_( + AcknowledgedBitrateEstimatorInterface::Create(key_value_config_)), + initial_config_(config), + last_loss_based_target_rate_(*config.constraints.starting_rate), + last_pushback_target_rate_(last_loss_based_target_rate_), + last_stable_target_rate_(last_loss_based_target_rate_), + pacing_factor_(config.stream_based_config.pacing_factor.value_or( + kDefaultPaceMultiplier)), + min_total_allocated_bitrate_( + config.stream_based_config.min_total_allocated_bitrate.value_or( + DataRate::Zero())), + max_padding_rate_(config.stream_based_config.max_padding_rate.value_or( + DataRate::Zero())), + max_total_allocated_bitrate_(DataRate::Zero()) { + RTC_DCHECK(config.constraints.at_time.IsFinite()); + ParseFieldTrial( + {&safe_reset_on_route_change_, &safe_reset_acknowledged_rate_}, + key_value_config_->Lookup("WebRTC-Bwe-SafeResetOnRouteChange")); + if (delay_based_bwe_) + delay_based_bwe_->SetMinBitrate(congestion_controller::GetMinBitrate()); +} + +GoogCcNetworkController::~GoogCcNetworkController() {} + +NetworkControlUpdate GoogCcNetworkController::OnNetworkAvailability( + NetworkAvailability msg) { + NetworkControlUpdate update; + update.probe_cluster_configs = probe_controller_->OnNetworkAvailability(msg); + return update; +} + +NetworkControlUpdate GoogCcNetworkController::OnNetworkRouteChange( + NetworkRouteChange msg) { + if (safe_reset_on_route_change_) { + absl::optional estimated_bitrate; + if (safe_reset_acknowledged_rate_) { + estimated_bitrate = acknowledged_bitrate_estimator_->bitrate(); + if (!estimated_bitrate) + estimated_bitrate = acknowledged_bitrate_estimator_->PeekRate(); + } else { + estimated_bitrate = bandwidth_estimation_->target_rate(); + } + if (estimated_bitrate) { + if (msg.constraints.starting_rate) { + msg.constraints.starting_rate = + std::min(*msg.constraints.starting_rate, *estimated_bitrate); + } else { + msg.constraints.starting_rate = estimated_bitrate; + } + } + } + + acknowledged_bitrate_estimator_ = + AcknowledgedBitrateEstimatorInterface::Create(key_value_config_); + probe_bitrate_estimator_.reset(new ProbeBitrateEstimator(event_log_)); + if (network_estimator_) + network_estimator_->OnRouteChange(msg); + delay_based_bwe_.reset(new DelayBasedBwe(key_value_config_, event_log_, + network_state_predictor_.get())); + bandwidth_estimation_->OnRouteChange(); + probe_controller_->Reset(msg.at_time.ms()); + NetworkControlUpdate update; + update.probe_cluster_configs = ResetConstraints(msg.constraints); + MaybeTriggerOnNetworkChanged(&update, msg.at_time); + return update; +} + +NetworkControlUpdate GoogCcNetworkController::OnProcessInterval( + ProcessInterval msg) { + NetworkControlUpdate update; + if (initial_config_) { + update.probe_cluster_configs = + ResetConstraints(initial_config_->constraints); + update.pacer_config = GetPacingRates(msg.at_time); + + if (initial_config_->stream_based_config.requests_alr_probing) { + probe_controller_->EnablePeriodicAlrProbing( + *initial_config_->stream_based_config.requests_alr_probing); + } + absl::optional total_bitrate = + initial_config_->stream_based_config.max_total_allocated_bitrate; + if (total_bitrate) { + auto probes = probe_controller_->OnMaxTotalAllocatedBitrate( + total_bitrate->bps(), msg.at_time.ms()); + update.probe_cluster_configs.insert(update.probe_cluster_configs.end(), + probes.begin(), probes.end()); + + max_total_allocated_bitrate_ = *total_bitrate; + } + initial_config_.reset(); + } + if (congestion_window_pushback_controller_ && msg.pacer_queue) { + congestion_window_pushback_controller_->UpdatePacingQueue( + msg.pacer_queue->bytes()); + } + bandwidth_estimation_->UpdateEstimate(msg.at_time); + absl::optional start_time_ms = + alr_detector_->GetApplicationLimitedRegionStartTime(); + probe_controller_->SetAlrStartTimeMs(start_time_ms); + + auto probes = probe_controller_->Process(msg.at_time.ms()); + update.probe_cluster_configs.insert(update.probe_cluster_configs.end(), + probes.begin(), probes.end()); + + if (rate_control_settings_.UseCongestionWindow() && + last_packet_received_time_.IsFinite() && !feedback_max_rtts_.empty()) { + UpdateCongestionWindowSize(); + } + if (congestion_window_pushback_controller_ && current_data_window_) { + congestion_window_pushback_controller_->SetDataWindow( + *current_data_window_); + } else { + update.congestion_window = current_data_window_; + } + MaybeTriggerOnNetworkChanged(&update, msg.at_time); + return update; +} + +NetworkControlUpdate GoogCcNetworkController::OnRemoteBitrateReport( + RemoteBitrateReport msg) { + if (packet_feedback_only_) { + RTC_LOG(LS_ERROR) << "Received REMB for packet feedback only GoogCC"; + return NetworkControlUpdate(); + } + bandwidth_estimation_->UpdateReceiverEstimate(msg.receive_time, + msg.bandwidth); + BWE_TEST_LOGGING_PLOT(1, "REMB_kbps", msg.receive_time.ms(), + msg.bandwidth.bps() / 1000); + return NetworkControlUpdate(); +} + +NetworkControlUpdate GoogCcNetworkController::OnRoundTripTimeUpdate( + RoundTripTimeUpdate msg) { + if (packet_feedback_only_ || msg.smoothed) + return NetworkControlUpdate(); + RTC_DCHECK(!msg.round_trip_time.IsZero()); + if (delay_based_bwe_) + delay_based_bwe_->OnRttUpdate(msg.round_trip_time); + bandwidth_estimation_->UpdateRtt(msg.round_trip_time, msg.receive_time); + return NetworkControlUpdate(); +} + +NetworkControlUpdate GoogCcNetworkController::OnSentPacket( + SentPacket sent_packet) { + alr_detector_->OnBytesSent(sent_packet.size.bytes(), + sent_packet.send_time.ms()); + acknowledged_bitrate_estimator_->SetAlr( + alr_detector_->GetApplicationLimitedRegionStartTime().has_value()); + + if (!first_packet_sent_) { + first_packet_sent_ = true; + // Initialize feedback time to send time to allow estimation of RTT until + // first feedback is received. + bandwidth_estimation_->UpdatePropagationRtt(sent_packet.send_time, + TimeDelta::Zero()); + } + bandwidth_estimation_->OnSentPacket(sent_packet); + + if (congestion_window_pushback_controller_) { + congestion_window_pushback_controller_->UpdateOutstandingData( + sent_packet.data_in_flight.bytes()); + NetworkControlUpdate update; + MaybeTriggerOnNetworkChanged(&update, sent_packet.send_time); + return update; + } else { + return NetworkControlUpdate(); + } +} + +NetworkControlUpdate GoogCcNetworkController::OnReceivedPacket( + ReceivedPacket received_packet) { + last_packet_received_time_ = received_packet.receive_time; + return NetworkControlUpdate(); +} + +NetworkControlUpdate GoogCcNetworkController::OnStreamsConfig( + StreamsConfig msg) { + NetworkControlUpdate update; + if (msg.requests_alr_probing) { + probe_controller_->EnablePeriodicAlrProbing(*msg.requests_alr_probing); + } + if (msg.max_total_allocated_bitrate && + *msg.max_total_allocated_bitrate != max_total_allocated_bitrate_) { + if (rate_control_settings_.TriggerProbeOnMaxAllocatedBitrateChange()) { + update.probe_cluster_configs = + probe_controller_->OnMaxTotalAllocatedBitrate( + msg.max_total_allocated_bitrate->bps(), msg.at_time.ms()); + } else { + probe_controller_->SetMaxBitrate(msg.max_total_allocated_bitrate->bps()); + } + max_total_allocated_bitrate_ = *msg.max_total_allocated_bitrate; + } + bool pacing_changed = false; + if (msg.pacing_factor && *msg.pacing_factor != pacing_factor_) { + pacing_factor_ = *msg.pacing_factor; + pacing_changed = true; + } + if (msg.min_total_allocated_bitrate && + *msg.min_total_allocated_bitrate != min_total_allocated_bitrate_) { + min_total_allocated_bitrate_ = *msg.min_total_allocated_bitrate; + pacing_changed = true; + + if (use_min_allocatable_as_lower_bound_) { + ClampConstraints(); + delay_based_bwe_->SetMinBitrate(min_data_rate_); + bandwidth_estimation_->SetMinMaxBitrate(min_data_rate_, max_data_rate_); + } + } + if (msg.max_padding_rate && *msg.max_padding_rate != max_padding_rate_) { + max_padding_rate_ = *msg.max_padding_rate; + pacing_changed = true; + } + + if (pacing_changed) + update.pacer_config = GetPacingRates(msg.at_time); + return update; +} + +NetworkControlUpdate GoogCcNetworkController::OnTargetRateConstraints( + TargetRateConstraints constraints) { + NetworkControlUpdate update; + update.probe_cluster_configs = ResetConstraints(constraints); + MaybeTriggerOnNetworkChanged(&update, constraints.at_time); + return update; +} + +void GoogCcNetworkController::ClampConstraints() { + // TODO(holmer): We should make sure the default bitrates are set to 10 kbps, + // and that we don't try to set the min bitrate to 0 from any applications. + // The congestion controller should allow a min bitrate of 0. + min_data_rate_ = + std::max(min_target_rate_, congestion_controller::GetMinBitrate()); + if (use_min_allocatable_as_lower_bound_) { + min_data_rate_ = std::max(min_data_rate_, min_total_allocated_bitrate_); + } + if (max_data_rate_ < min_data_rate_) { + RTC_LOG(LS_WARNING) << "max bitrate smaller than min bitrate"; + max_data_rate_ = min_data_rate_; + } + if (starting_rate_ && starting_rate_ < min_data_rate_) { + RTC_LOG(LS_WARNING) << "start bitrate smaller than min bitrate"; + starting_rate_ = min_data_rate_; + } +} + +std::vector GoogCcNetworkController::ResetConstraints( + TargetRateConstraints new_constraints) { + min_target_rate_ = new_constraints.min_data_rate.value_or(DataRate::Zero()); + max_data_rate_ = + new_constraints.max_data_rate.value_or(DataRate::PlusInfinity()); + starting_rate_ = new_constraints.starting_rate; + ClampConstraints(); + + bandwidth_estimation_->SetBitrates(starting_rate_, min_data_rate_, + max_data_rate_, new_constraints.at_time); + + if (starting_rate_) + delay_based_bwe_->SetStartBitrate(*starting_rate_); + delay_based_bwe_->SetMinBitrate(min_data_rate_); + + return probe_controller_->SetBitrates( + min_data_rate_.bps(), GetBpsOrDefault(starting_rate_, -1), + max_data_rate_.bps_or(-1), new_constraints.at_time.ms()); +} + +NetworkControlUpdate GoogCcNetworkController::OnTransportLossReport( + TransportLossReport msg) { + if (packet_feedback_only_) + return NetworkControlUpdate(); + int64_t total_packets_delta = + msg.packets_received_delta + msg.packets_lost_delta; + bandwidth_estimation_->UpdatePacketsLost( + msg.packets_lost_delta, total_packets_delta, msg.receive_time); + return NetworkControlUpdate(); +} + +void GoogCcNetworkController::UpdateCongestionWindowSize() { + TimeDelta min_feedback_max_rtt = TimeDelta::Millis( + *std::min_element(feedback_max_rtts_.begin(), feedback_max_rtts_.end())); + + const DataSize kMinCwnd = DataSize::Bytes(2 * 1500); + TimeDelta time_window = + min_feedback_max_rtt + + TimeDelta::Millis( + rate_control_settings_.GetCongestionWindowAdditionalTimeMs()); + + DataSize data_window = last_loss_based_target_rate_ * time_window; + if (current_data_window_) { + data_window = + std::max(kMinCwnd, (data_window + current_data_window_.value()) / 2); + } else { + data_window = std::max(kMinCwnd, data_window); + } + current_data_window_ = data_window; +} + +NetworkControlUpdate GoogCcNetworkController::OnTransportPacketsFeedback( + TransportPacketsFeedback report) { + if (report.packet_feedbacks.empty()) { + // TODO(bugs.webrtc.org/10125): Design a better mechanism to safe-guard + // against building very large network queues. + return NetworkControlUpdate(); + } + + if (congestion_window_pushback_controller_) { + congestion_window_pushback_controller_->UpdateOutstandingData( + report.data_in_flight.bytes()); + } + TimeDelta max_feedback_rtt = TimeDelta::MinusInfinity(); + TimeDelta min_propagation_rtt = TimeDelta::PlusInfinity(); + Timestamp max_recv_time = Timestamp::MinusInfinity(); + + std::vector feedbacks = report.ReceivedWithSendInfo(); + for (const auto& feedback : feedbacks) + max_recv_time = std::max(max_recv_time, feedback.receive_time); + + for (const auto& feedback : feedbacks) { + TimeDelta feedback_rtt = + report.feedback_time - feedback.sent_packet.send_time; + TimeDelta min_pending_time = feedback.receive_time - max_recv_time; + TimeDelta propagation_rtt = feedback_rtt - min_pending_time; + max_feedback_rtt = std::max(max_feedback_rtt, feedback_rtt); + min_propagation_rtt = std::min(min_propagation_rtt, propagation_rtt); + } + + if (max_feedback_rtt.IsFinite()) { + feedback_max_rtts_.push_back(max_feedback_rtt.ms()); + const size_t kMaxFeedbackRttWindow = 32; + if (feedback_max_rtts_.size() > kMaxFeedbackRttWindow) + feedback_max_rtts_.pop_front(); + // TODO(srte): Use time since last unacknowledged packet. + bandwidth_estimation_->UpdatePropagationRtt(report.feedback_time, + min_propagation_rtt); + } + if (packet_feedback_only_) { + if (!feedback_max_rtts_.empty()) { + int64_t sum_rtt_ms = std::accumulate(feedback_max_rtts_.begin(), + feedback_max_rtts_.end(), 0); + int64_t mean_rtt_ms = sum_rtt_ms / feedback_max_rtts_.size(); + if (delay_based_bwe_) + delay_based_bwe_->OnRttUpdate(TimeDelta::Millis(mean_rtt_ms)); + } + + TimeDelta feedback_min_rtt = TimeDelta::PlusInfinity(); + for (const auto& packet_feedback : feedbacks) { + TimeDelta pending_time = packet_feedback.receive_time - max_recv_time; + TimeDelta rtt = report.feedback_time - + packet_feedback.sent_packet.send_time - pending_time; + // Value used for predicting NACK round trip time in FEC controller. + feedback_min_rtt = std::min(rtt, feedback_min_rtt); + } + if (feedback_min_rtt.IsFinite()) { + bandwidth_estimation_->UpdateRtt(feedback_min_rtt, report.feedback_time); + } + + expected_packets_since_last_loss_update_ += + report.PacketsWithFeedback().size(); + for (const auto& packet_feedback : report.PacketsWithFeedback()) { + if (packet_feedback.receive_time.IsInfinite()) + lost_packets_since_last_loss_update_ += 1; + } + if (report.feedback_time > next_loss_update_) { + next_loss_update_ = report.feedback_time + kLossUpdateInterval; + bandwidth_estimation_->UpdatePacketsLost( + lost_packets_since_last_loss_update_, + expected_packets_since_last_loss_update_, report.feedback_time); + expected_packets_since_last_loss_update_ = 0; + lost_packets_since_last_loss_update_ = 0; + } + } + absl::optional alr_start_time = + alr_detector_->GetApplicationLimitedRegionStartTime(); + + if (previously_in_alr_ && !alr_start_time.has_value()) { + int64_t now_ms = report.feedback_time.ms(); + acknowledged_bitrate_estimator_->SetAlrEndedTime(report.feedback_time); + probe_controller_->SetAlrEndedTimeMs(now_ms); + } + previously_in_alr_ = alr_start_time.has_value(); + acknowledged_bitrate_estimator_->IncomingPacketFeedbackVector( + report.SortedByReceiveTime()); + auto acknowledged_bitrate = acknowledged_bitrate_estimator_->bitrate(); + bandwidth_estimation_->SetAcknowledgedRate(acknowledged_bitrate, + report.feedback_time); + bandwidth_estimation_->IncomingPacketFeedbackVector(report); + for (const auto& feedback : report.SortedByReceiveTime()) { + if (feedback.sent_packet.pacing_info.probe_cluster_id != + PacedPacketInfo::kNotAProbe) { + probe_bitrate_estimator_->HandleProbeAndEstimateBitrate(feedback); + } + } + + if (network_estimator_) { + network_estimator_->OnTransportPacketsFeedback(report); + auto prev_estimate = estimate_; + estimate_ = network_estimator_->GetCurrentEstimate(); + // TODO(srte): Make OnTransportPacketsFeedback signal whether the state + // changed to avoid the need for this check. + if (estimate_ && (!prev_estimate || estimate_->last_feed_time != + prev_estimate->last_feed_time)) { + event_log_->Log(std::make_unique( + estimate_->link_capacity_lower, estimate_->link_capacity_upper)); + } + } + absl::optional probe_bitrate = + probe_bitrate_estimator_->FetchAndResetLastEstimatedBitrate(); + if (ignore_probes_lower_than_network_estimate_ && probe_bitrate && + estimate_ && *probe_bitrate < delay_based_bwe_->last_estimate() && + *probe_bitrate < estimate_->link_capacity_lower) { + probe_bitrate.reset(); + } + if (limit_probes_lower_than_throughput_estimate_ && probe_bitrate && + acknowledged_bitrate) { + // Limit the backoff to something slightly below the acknowledged + // bitrate. ("Slightly below" because we want to drain the queues + // if we are actually overusing.) + // The acknowledged bitrate shouldn't normally be higher than the delay + // based estimate, but it could happen e.g. due to packet bursts or + // encoder overshoot. We use std::min to ensure that a probe result + // below the current BWE never causes an increase. + DataRate limit = + std::min(delay_based_bwe_->last_estimate(), + *acknowledged_bitrate * kProbeDropThroughputFraction); + probe_bitrate = std::max(*probe_bitrate, limit); + } + + NetworkControlUpdate update; + bool recovered_from_overuse = false; + bool backoff_in_alr = false; + + DelayBasedBwe::Result result; + result = delay_based_bwe_->IncomingPacketFeedbackVector( + report, acknowledged_bitrate, probe_bitrate, estimate_, + alr_start_time.has_value()); + + if (result.updated) { + if (result.probe) { + bandwidth_estimation_->SetSendBitrate(result.target_bitrate, + report.feedback_time); + } + // Since SetSendBitrate now resets the delay-based estimate, we have to + // call UpdateDelayBasedEstimate after SetSendBitrate. + bandwidth_estimation_->UpdateDelayBasedEstimate(report.feedback_time, + result.target_bitrate); + // Update the estimate in the ProbeController, in case we want to probe. + MaybeTriggerOnNetworkChanged(&update, report.feedback_time); + } + recovered_from_overuse = result.recovered_from_overuse; + backoff_in_alr = result.backoff_in_alr; + + if (recovered_from_overuse) { + probe_controller_->SetAlrStartTimeMs(alr_start_time); + auto probes = probe_controller_->RequestProbe(report.feedback_time.ms()); + update.probe_cluster_configs.insert(update.probe_cluster_configs.end(), + probes.begin(), probes.end()); + } else if (backoff_in_alr) { + // If we just backed off during ALR, request a new probe. + auto probes = probe_controller_->RequestProbe(report.feedback_time.ms()); + update.probe_cluster_configs.insert(update.probe_cluster_configs.end(), + probes.begin(), probes.end()); + } + + // No valid RTT could be because send-side BWE isn't used, in which case + // we don't try to limit the outstanding packets. + if (rate_control_settings_.UseCongestionWindow() && + max_feedback_rtt.IsFinite()) { + UpdateCongestionWindowSize(); + } + if (congestion_window_pushback_controller_ && current_data_window_) { + congestion_window_pushback_controller_->SetDataWindow( + *current_data_window_); + } else { + update.congestion_window = current_data_window_; + } + + return update; +} + +NetworkControlUpdate GoogCcNetworkController::OnNetworkStateEstimate( + NetworkStateEstimate msg) { + estimate_ = msg; + return NetworkControlUpdate(); +} + +NetworkControlUpdate GoogCcNetworkController::GetNetworkState( + Timestamp at_time) const { + NetworkControlUpdate update; + update.target_rate = TargetTransferRate(); + update.target_rate->network_estimate.at_time = at_time; + update.target_rate->network_estimate.loss_rate_ratio = + last_estimated_fraction_loss_.value_or(0) / 255.0; + update.target_rate->network_estimate.round_trip_time = + last_estimated_round_trip_time_; + update.target_rate->network_estimate.bwe_period = + delay_based_bwe_->GetExpectedBwePeriod(); + + update.target_rate->at_time = at_time; + update.target_rate->target_rate = last_pushback_target_rate_; + update.target_rate->stable_target_rate = + bandwidth_estimation_->GetEstimatedLinkCapacity(); + update.pacer_config = GetPacingRates(at_time); + update.congestion_window = current_data_window_; + return update; +} + +void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( + NetworkControlUpdate* update, + Timestamp at_time) { + uint8_t fraction_loss = bandwidth_estimation_->fraction_loss(); + TimeDelta round_trip_time = bandwidth_estimation_->round_trip_time(); + DataRate loss_based_target_rate = bandwidth_estimation_->target_rate(); + DataRate pushback_target_rate = loss_based_target_rate; + + BWE_TEST_LOGGING_PLOT(1, "fraction_loss_%", at_time.ms(), + (fraction_loss * 100) / 256); + BWE_TEST_LOGGING_PLOT(1, "rtt_ms", at_time.ms(), round_trip_time.ms()); + BWE_TEST_LOGGING_PLOT(1, "Target_bitrate_kbps", at_time.ms(), + loss_based_target_rate.kbps()); + + double cwnd_reduce_ratio = 0.0; + if (congestion_window_pushback_controller_) { + int64_t pushback_rate = + congestion_window_pushback_controller_->UpdateTargetBitrate( + loss_based_target_rate.bps()); + pushback_rate = std::max(bandwidth_estimation_->GetMinBitrate(), + pushback_rate); + pushback_target_rate = DataRate::BitsPerSec(pushback_rate); + if (rate_control_settings_.UseCongestionWindowDropFrameOnly()) { + cwnd_reduce_ratio = static_cast(loss_based_target_rate.bps() - + pushback_target_rate.bps()) / + loss_based_target_rate.bps(); + } + } + DataRate stable_target_rate = + bandwidth_estimation_->GetEstimatedLinkCapacity(); + if (loss_based_stable_rate_) { + stable_target_rate = std::min(stable_target_rate, loss_based_target_rate); + } else { + stable_target_rate = std::min(stable_target_rate, pushback_target_rate); + } + + if ((loss_based_target_rate != last_loss_based_target_rate_) || + (fraction_loss != last_estimated_fraction_loss_) || + (round_trip_time != last_estimated_round_trip_time_) || + (pushback_target_rate != last_pushback_target_rate_) || + (stable_target_rate != last_stable_target_rate_)) { + last_loss_based_target_rate_ = loss_based_target_rate; + last_pushback_target_rate_ = pushback_target_rate; + last_estimated_fraction_loss_ = fraction_loss; + last_estimated_round_trip_time_ = round_trip_time; + last_stable_target_rate_ = stable_target_rate; + + alr_detector_->SetEstimatedBitrate(loss_based_target_rate.bps()); + + TimeDelta bwe_period = delay_based_bwe_->GetExpectedBwePeriod(); + + TargetTransferRate target_rate_msg; + target_rate_msg.at_time = at_time; + if (rate_control_settings_.UseCongestionWindowDropFrameOnly()) { + target_rate_msg.target_rate = loss_based_target_rate; + target_rate_msg.cwnd_reduce_ratio = cwnd_reduce_ratio; + } else { + target_rate_msg.target_rate = pushback_target_rate; + } + target_rate_msg.stable_target_rate = stable_target_rate; + target_rate_msg.network_estimate.at_time = at_time; + target_rate_msg.network_estimate.round_trip_time = round_trip_time; + target_rate_msg.network_estimate.loss_rate_ratio = fraction_loss / 255.0f; + target_rate_msg.network_estimate.bwe_period = bwe_period; + + update->target_rate = target_rate_msg; + + auto probes = probe_controller_->SetEstimatedBitrate( + loss_based_target_rate.bps(), at_time.ms()); + update->probe_cluster_configs.insert(update->probe_cluster_configs.end(), + probes.begin(), probes.end()); + update->pacer_config = GetPacingRates(at_time); + + RTC_LOG(LS_VERBOSE) << "bwe " << at_time.ms() << " pushback_target_bps=" + << last_pushback_target_rate_.bps() + << " estimate_bps=" << loss_based_target_rate.bps(); + } +} + +PacerConfig GoogCcNetworkController::GetPacingRates(Timestamp at_time) const { + // Pacing rate is based on target rate before congestion window pushback, + // because we don't want to build queues in the pacer when pushback occurs. + DataRate pacing_rate = + std::max(min_total_allocated_bitrate_, last_loss_based_target_rate_) * + pacing_factor_; + DataRate padding_rate = + std::min(max_padding_rate_, last_pushback_target_rate_); + PacerConfig msg; + msg.at_time = at_time; + msg.time_window = TimeDelta::Seconds(1); + msg.data_window = pacing_rate * msg.time_window; + msg.pad_window = padding_rate * msg.time_window; + return msg; +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control.h b/modules/congestion_controller/goog_cc/goog_cc_network_control.h new file mode 100644 index 0000000..1e4dcf6 --- /dev/null +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control.h @@ -0,0 +1,146 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_GOOG_CC_NETWORK_CONTROL_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_GOOG_CC_NETWORK_CONTROL_H_ + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/network_state_predictor.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/field_trial_based_config.h" +#include "api/transport/network_control.h" +#include "api/transport/network_types.h" +#include "api/transport/webrtc_key_value_config.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/timestamp.h" +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" +#include "modules/congestion_controller/goog_cc/alr_detector.h" +#include "modules/congestion_controller/goog_cc/congestion_window_pushback_controller.h" +#include "modules/congestion_controller/goog_cc/delay_based_bwe.h" +#include "modules/congestion_controller/goog_cc/probe_controller.h" +#include "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/experiments/rate_control_settings.h" + +namespace webrtc { +struct GoogCcConfig { + std::unique_ptr network_state_estimator = nullptr; + std::unique_ptr network_state_predictor = nullptr; + bool feedback_only = false; +}; + +class GoogCcNetworkController : public NetworkControllerInterface { + public: + GoogCcNetworkController(NetworkControllerConfig config, + GoogCcConfig goog_cc_config); + ~GoogCcNetworkController() override; + + // NetworkControllerInterface + NetworkControlUpdate OnNetworkAvailability(NetworkAvailability msg) override; + NetworkControlUpdate OnNetworkRouteChange(NetworkRouteChange msg) override; + NetworkControlUpdate OnProcessInterval(ProcessInterval msg) override; + NetworkControlUpdate OnRemoteBitrateReport(RemoteBitrateReport msg) override; + NetworkControlUpdate OnRoundTripTimeUpdate(RoundTripTimeUpdate msg) override; + NetworkControlUpdate OnSentPacket(SentPacket msg) override; + NetworkControlUpdate OnReceivedPacket(ReceivedPacket msg) override; + NetworkControlUpdate OnStreamsConfig(StreamsConfig msg) override; + NetworkControlUpdate OnTargetRateConstraints( + TargetRateConstraints msg) override; + NetworkControlUpdate OnTransportLossReport(TransportLossReport msg) override; + NetworkControlUpdate OnTransportPacketsFeedback( + TransportPacketsFeedback msg) override; + NetworkControlUpdate OnNetworkStateEstimate( + NetworkStateEstimate msg) override; + + NetworkControlUpdate GetNetworkState(Timestamp at_time) const; + + private: + friend class GoogCcStatePrinter; + std::vector ResetConstraints( + TargetRateConstraints new_constraints); + void ClampConstraints(); + void MaybeTriggerOnNetworkChanged(NetworkControlUpdate* update, + Timestamp at_time); + void UpdateCongestionWindowSize(); + PacerConfig GetPacingRates(Timestamp at_time) const; + const FieldTrialBasedConfig trial_based_config_; + + const WebRtcKeyValueConfig* const key_value_config_; + RtcEventLog* const event_log_; + const bool packet_feedback_only_; + FieldTrialFlag safe_reset_on_route_change_; + FieldTrialFlag safe_reset_acknowledged_rate_; + const bool use_min_allocatable_as_lower_bound_; + const bool ignore_probes_lower_than_network_estimate_; + const bool limit_probes_lower_than_throughput_estimate_; + const RateControlSettings rate_control_settings_; + const bool loss_based_stable_rate_; + + const std::unique_ptr probe_controller_; + const std::unique_ptr + congestion_window_pushback_controller_; + + std::unique_ptr bandwidth_estimation_; + std::unique_ptr alr_detector_; + std::unique_ptr probe_bitrate_estimator_; + std::unique_ptr network_estimator_; + std::unique_ptr network_state_predictor_; + std::unique_ptr delay_based_bwe_; + std::unique_ptr + acknowledged_bitrate_estimator_; + + absl::optional initial_config_; + + DataRate min_target_rate_ = DataRate::Zero(); + DataRate min_data_rate_ = DataRate::Zero(); + DataRate max_data_rate_ = DataRate::PlusInfinity(); + absl::optional starting_rate_; + + bool first_packet_sent_ = false; + + absl::optional estimate_; + + Timestamp next_loss_update_ = Timestamp::MinusInfinity(); + int lost_packets_since_last_loss_update_ = 0; + int expected_packets_since_last_loss_update_ = 0; + + std::deque feedback_max_rtts_; + + DataRate last_loss_based_target_rate_; + DataRate last_pushback_target_rate_; + DataRate last_stable_target_rate_; + + absl::optional last_estimated_fraction_loss_ = 0; + TimeDelta last_estimated_round_trip_time_ = TimeDelta::PlusInfinity(); + Timestamp last_packet_received_time_ = Timestamp::MinusInfinity(); + + double pacing_factor_; + DataRate min_total_allocated_bitrate_; + DataRate max_padding_rate_; + DataRate max_total_allocated_bitrate_; + + bool previously_in_alr_ = false; + + absl::optional current_data_window_; + + RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(GoogCcNetworkController); +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_GOOG_CC_NETWORK_CONTROL_H_ diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc new file mode 100644 index 0000000..361da92 --- /dev/null +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control_unittest.cc @@ -0,0 +1,849 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include + +#include "api/transport/goog_cc_factory.h" +#include "logging/rtc_event_log/mock/mock_rtc_event_log.h" +#include "test/field_trial.h" +#include "test/gtest.h" +#include "test/scenario/scenario.h" + +using ::testing::_; +using ::testing::Field; +using ::testing::Matcher; +using ::testing::NiceMock; +using ::testing::Property; + +namespace webrtc { +namespace test { +namespace { +// Count dips from a constant high bandwidth level within a short window. +int CountBandwidthDips(std::queue bandwidth_history, + DataRate threshold) { + if (bandwidth_history.empty()) + return true; + DataRate first = bandwidth_history.front(); + bandwidth_history.pop(); + + int dips = 0; + bool state_high = true; + while (!bandwidth_history.empty()) { + if (bandwidth_history.front() + threshold < first && state_high) { + ++dips; + state_high = false; + } else if (bandwidth_history.front() == first) { + state_high = true; + } else if (bandwidth_history.front() > first) { + // If this is toggling we will catch it later when front becomes first. + state_high = false; + } + bandwidth_history.pop(); + } + return dips; +} +GoogCcNetworkControllerFactory CreateFeedbackOnlyFactory() { + GoogCcFactoryConfig config; + config.feedback_only = true; + return GoogCcNetworkControllerFactory(std::move(config)); +} + +const uint32_t kInitialBitrateKbps = 60; +const DataRate kInitialBitrate = DataRate::KilobitsPerSec(kInitialBitrateKbps); +const float kDefaultPacingRate = 2.5f; + +CallClient* CreateVideoSendingClient( + Scenario* s, + CallClientConfig config, + std::vector send_link, + std::vector return_link) { + auto* client = s->CreateClient("send", std::move(config)); + auto* route = s->CreateRoutes(client, send_link, + s->CreateClient("return", CallClientConfig()), + return_link); + s->CreateVideoStream(route->forward(), VideoStreamConfig()); + return client; +} + +void UpdatesTargetRateBasedOnLinkCapacity(std::string test_name = "") { + ScopedFieldTrials trial("WebRTC-SendSideBwe-WithOverhead/Enabled/"); + auto factory = CreateFeedbackOnlyFactory(); + Scenario s("googcc_unit/target_capacity" + test_name, false); + CallClientConfig config; + config.transport.cc_factory = &factory; + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(1500); + config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); + auto send_net = s.CreateMutableSimulationNode([](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(500); + c->delay = TimeDelta::Millis(100); + c->loss_rate = 0.0; + }); + auto ret_net = s.CreateMutableSimulationNode( + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); + StatesPrinter* truth = s.CreatePrinter( + "send.truth.txt", TimeDelta::PlusInfinity(), {send_net->ConfigPrinter()}); + + auto* client = CreateVideoSendingClient(&s, config, {send_net->node()}, + {ret_net->node()}); + + truth->PrintRow(); + s.RunFor(TimeDelta::Seconds(25)); + truth->PrintRow(); + EXPECT_NEAR(client->target_rate().kbps(), 450, 100); + + send_net->UpdateConfig([](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(800); + c->delay = TimeDelta::Millis(100); + }); + + truth->PrintRow(); + s.RunFor(TimeDelta::Seconds(20)); + truth->PrintRow(); + EXPECT_NEAR(client->target_rate().kbps(), 750, 150); + + send_net->UpdateConfig([](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(100); + c->delay = TimeDelta::Millis(200); + }); + ret_net->UpdateConfig( + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(200); }); + + truth->PrintRow(); + s.RunFor(TimeDelta::Seconds(50)); + truth->PrintRow(); + EXPECT_NEAR(client->target_rate().kbps(), 90, 25); +} +} // namespace + +class GoogCcNetworkControllerTest : public ::testing::Test { + protected: + GoogCcNetworkControllerTest() + : current_time_(Timestamp::Millis(123456)), factory_() {} + ~GoogCcNetworkControllerTest() override {} + + void SetUp() override { + controller_ = factory_.Create(InitialConfig()); + NetworkControlUpdate update = + controller_->OnProcessInterval(DefaultInterval()); + EXPECT_EQ(update.target_rate->target_rate, kInitialBitrate); + EXPECT_EQ(update.pacer_config->data_rate(), + kInitialBitrate * kDefaultPacingRate); + + EXPECT_EQ(update.probe_cluster_configs[0].target_data_rate, + kInitialBitrate * 3); + EXPECT_EQ(update.probe_cluster_configs[1].target_data_rate, + kInitialBitrate * 5); + } + // Custom setup - use an observer that tracks the target bitrate, without + // prescribing on which iterations it must change (like a mock would). + void TargetBitrateTrackingSetup() { + controller_ = factory_.Create(InitialConfig()); + OnUpdate(controller_->OnProcessInterval(DefaultInterval())); + } + + NetworkControllerConfig InitialConfig( + int starting_bandwidth_kbps = kInitialBitrateKbps, + int min_data_rate_kbps = 0, + int max_data_rate_kbps = 5 * kInitialBitrateKbps) { + NetworkControllerConfig config; + config.constraints.at_time = current_time_; + config.constraints.min_data_rate = + DataRate::KilobitsPerSec(min_data_rate_kbps); + config.constraints.max_data_rate = + DataRate::KilobitsPerSec(max_data_rate_kbps); + config.constraints.starting_rate = + DataRate::KilobitsPerSec(starting_bandwidth_kbps); + config.event_log = &event_log_; + return config; + } + ProcessInterval DefaultInterval() { + ProcessInterval interval; + interval.at_time = current_time_; + return interval; + } + RemoteBitrateReport CreateBitrateReport(DataRate rate) { + RemoteBitrateReport report; + report.receive_time = current_time_; + report.bandwidth = rate; + return report; + } + PacketResult CreateResult(int64_t arrival_time_ms, + int64_t send_time_ms, + size_t payload_size, + PacedPacketInfo pacing_info) { + PacketResult packet_result; + packet_result.sent_packet = SentPacket(); + packet_result.sent_packet.send_time = Timestamp::Millis(send_time_ms); + packet_result.sent_packet.size = DataSize::Bytes(payload_size); + packet_result.sent_packet.pacing_info = pacing_info; + packet_result.receive_time = Timestamp::Millis(arrival_time_ms); + return packet_result; + } + + NetworkRouteChange CreateRouteChange( + absl::optional start_rate = absl::nullopt, + absl::optional min_rate = absl::nullopt, + absl::optional max_rate = absl::nullopt) { + NetworkRouteChange route_change; + route_change.at_time = current_time_; + route_change.constraints.at_time = current_time_; + route_change.constraints.min_data_rate = min_rate; + route_change.constraints.max_data_rate = max_rate; + route_change.constraints.starting_rate = start_rate; + return route_change; + } + + void AdvanceTimeMilliseconds(int timedelta_ms) { + current_time_ += TimeDelta::Millis(timedelta_ms); + } + + void OnUpdate(NetworkControlUpdate update) { + if (update.target_rate) + target_bitrate_ = update.target_rate->target_rate; + } + + void PacketTransmissionAndFeedbackBlock(int64_t runtime_ms, int64_t delay) { + int64_t delay_buildup = 0; + int64_t start_time_ms = current_time_.ms(); + while (current_time_.ms() - start_time_ms < runtime_ms) { + constexpr size_t kPayloadSize = 1000; + PacketResult packet = + CreateResult(current_time_.ms() + delay_buildup, current_time_.ms(), + kPayloadSize, PacedPacketInfo()); + delay_buildup += delay; + OnUpdate(controller_->OnSentPacket(packet.sent_packet)); + TransportPacketsFeedback feedback; + feedback.feedback_time = packet.receive_time; + feedback.packet_feedbacks.push_back(packet); + OnUpdate(controller_->OnTransportPacketsFeedback(feedback)); + AdvanceTimeMilliseconds(50); + OnUpdate(controller_->OnProcessInterval(DefaultInterval())); + } + } + Timestamp current_time_; + absl::optional target_bitrate_; + NiceMock event_log_; + GoogCcNetworkControllerFactory factory_; + std::unique_ptr controller_; +}; + +TEST_F(GoogCcNetworkControllerTest, ReactsToChangedNetworkConditions) { + // Test no change. + AdvanceTimeMilliseconds(25); + OnUpdate(controller_->OnProcessInterval(DefaultInterval())); + + NetworkControlUpdate update; + OnUpdate(controller_->OnRemoteBitrateReport( + CreateBitrateReport(kInitialBitrate * 2))); + AdvanceTimeMilliseconds(25); + update = controller_->OnProcessInterval(DefaultInterval()); + EXPECT_EQ(update.target_rate->target_rate, kInitialBitrate * 2); + EXPECT_EQ(update.pacer_config->data_rate(), + kInitialBitrate * 2 * kDefaultPacingRate); + + OnUpdate( + controller_->OnRemoteBitrateReport(CreateBitrateReport(kInitialBitrate))); + AdvanceTimeMilliseconds(25); + update = controller_->OnProcessInterval(DefaultInterval()); + EXPECT_EQ(update.target_rate->target_rate, kInitialBitrate); + EXPECT_EQ(update.pacer_config->data_rate(), + kInitialBitrate * kDefaultPacingRate); +} + +// Test congestion window pushback on network delay happens. +TEST_F(GoogCcNetworkControllerTest, CongestionWindowPushbackOnNetworkDelay) { + auto factory = CreateFeedbackOnlyFactory(); + ScopedFieldTrials trial( + "WebRTC-CongestionWindow/QueueSize:800,MinBitrate:30000/"); + Scenario s("googcc_unit/cwnd_on_delay", false); + auto send_net = + s.CreateMutableSimulationNode([=](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(1000); + c->delay = TimeDelta::Millis(100); + }); + auto ret_net = s.CreateSimulationNode( + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); + CallClientConfig config; + config.transport.cc_factory = &factory; + // Start high so bandwidth drop has max effect. + config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(2000); + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); + + auto* client = CreateVideoSendingClient(&s, std::move(config), + {send_net->node()}, {ret_net}); + + s.RunFor(TimeDelta::Seconds(10)); + send_net->PauseTransmissionUntil(s.Now() + TimeDelta::Seconds(10)); + s.RunFor(TimeDelta::Seconds(3)); + + // After 3 seconds without feedback from any sent packets, we expect that the + // target rate is reduced to the minimum pushback threshold + // kDefaultMinPushbackTargetBitrateBps, which is defined as 30 kbps in + // congestion_window_pushback_controller. + EXPECT_LT(client->target_rate().kbps(), 40); +} + +// Test congestion window pushback on network delay happens. +TEST_F(GoogCcNetworkControllerTest, + CongestionWindowPushbackDropFrameOnNetworkDelay) { + auto factory = CreateFeedbackOnlyFactory(); + ScopedFieldTrials trial( + "WebRTC-CongestionWindow/QueueSize:800,MinBitrate:30000,DropFrame:true/"); + Scenario s("googcc_unit/cwnd_on_delay", false); + auto send_net = + s.CreateMutableSimulationNode([=](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(1000); + c->delay = TimeDelta::Millis(100); + }); + auto ret_net = s.CreateSimulationNode( + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); + CallClientConfig config; + config.transport.cc_factory = &factory; + // Start high so bandwidth drop has max effect. + config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(2000); + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); + + auto* client = CreateVideoSendingClient(&s, std::move(config), + {send_net->node()}, {ret_net}); + + s.RunFor(TimeDelta::Seconds(10)); + send_net->PauseTransmissionUntil(s.Now() + TimeDelta::Seconds(10)); + s.RunFor(TimeDelta::Seconds(3)); + + // As the dropframe is set, after 3 seconds without feedback from any sent + // packets, we expect that the target rate is not reduced by congestion + // window. + EXPECT_GT(client->target_rate().kbps(), 300); +} + +TEST_F(GoogCcNetworkControllerTest, OnNetworkRouteChanged) { + NetworkControlUpdate update; + DataRate new_bitrate = DataRate::BitsPerSec(200000); + update = controller_->OnNetworkRouteChange(CreateRouteChange(new_bitrate)); + EXPECT_EQ(update.target_rate->target_rate, new_bitrate); + EXPECT_EQ(update.pacer_config->data_rate(), new_bitrate * kDefaultPacingRate); + EXPECT_EQ(update.probe_cluster_configs.size(), 2u); + + // If the bitrate is reset to -1, the new starting bitrate will be + // the minimum default bitrate. + const DataRate kDefaultMinBitrate = DataRate::KilobitsPerSec(5); + update = controller_->OnNetworkRouteChange(CreateRouteChange()); + EXPECT_EQ(update.target_rate->target_rate, kDefaultMinBitrate); + EXPECT_NEAR(update.pacer_config->data_rate().bps(), + kDefaultMinBitrate.bps() * kDefaultPacingRate, 10); + EXPECT_EQ(update.probe_cluster_configs.size(), 2u); +} + +TEST_F(GoogCcNetworkControllerTest, ProbeOnRouteChange) { + NetworkControlUpdate update; + update = controller_->OnNetworkRouteChange(CreateRouteChange( + 2 * kInitialBitrate, DataRate::Zero(), 20 * kInitialBitrate)); + + EXPECT_TRUE(update.pacer_config.has_value()); + EXPECT_EQ(update.target_rate->target_rate, kInitialBitrate * 2); + EXPECT_EQ(update.probe_cluster_configs.size(), 2u); + EXPECT_EQ(update.probe_cluster_configs[0].target_data_rate, + kInitialBitrate * 6); + EXPECT_EQ(update.probe_cluster_configs[1].target_data_rate, + kInitialBitrate * 12); + + update = controller_->OnProcessInterval(DefaultInterval()); +} + +// Bandwidth estimation is updated when feedbacks are received. +// Feedbacks which show an increasing delay cause the estimation to be reduced. +TEST_F(GoogCcNetworkControllerTest, UpdatesDelayBasedEstimate) { + TargetBitrateTrackingSetup(); + const int64_t kRunTimeMs = 6000; + + // The test must run and insert packets/feedback long enough that the + // BWE computes a valid estimate. This is first done in an environment which + // simulates no bandwidth limitation, and therefore not built-up delay. + PacketTransmissionAndFeedbackBlock(kRunTimeMs, 0); + ASSERT_TRUE(target_bitrate_.has_value()); + + // Repeat, but this time with a building delay, and make sure that the + // estimation is adjusted downwards. + DataRate bitrate_before_delay = *target_bitrate_; + PacketTransmissionAndFeedbackBlock(kRunTimeMs, 50); + EXPECT_LT(*target_bitrate_, bitrate_before_delay); +} + +TEST_F(GoogCcNetworkControllerTest, + PaddingRateLimitedByCongestionWindowInTrial) { + ScopedFieldTrials trial( + "WebRTC-CongestionWindow/QueueSize:200,MinBitrate:30000/"); + + Scenario s("googcc_unit/padding_limited", false); + auto send_net = + s.CreateMutableSimulationNode([=](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(1000); + c->delay = TimeDelta::Millis(100); + }); + auto ret_net = s.CreateSimulationNode( + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); + CallClientConfig config; + // Start high so bandwidth drop has max effect. + config.transport.rates.start_rate = DataRate::KilobitsPerSec(1000); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(2000); + auto* client = s.CreateClient("send", config); + auto* route = + s.CreateRoutes(client, {send_net->node()}, + s.CreateClient("return", CallClientConfig()), {ret_net}); + VideoStreamConfig video; + video.stream.pad_to_rate = config.transport.rates.max_rate; + s.CreateVideoStream(route->forward(), video); + + // Run for a few seconds to allow the controller to stabilize. + s.RunFor(TimeDelta::Seconds(10)); + + // Check that padding rate matches target rate. + EXPECT_NEAR(client->padding_rate().kbps(), client->target_rate().kbps(), 1); + + // Check this is also the case when congestion window pushback kicks in. + send_net->PauseTransmissionUntil(s.Now() + TimeDelta::Seconds(1)); + EXPECT_NEAR(client->padding_rate().kbps(), client->target_rate().kbps(), 1); +} + +TEST_F(GoogCcNetworkControllerTest, LimitsToFloorIfRttIsHighInTrial) { + // The field trial limits maximum RTT to 2 seconds, higher RTT means that the + // controller backs off until it reaches the minimum configured bitrate. This + // allows the RTT to recover faster than the regular control mechanism would + // achieve. + const DataRate kBandwidthFloor = DataRate::KilobitsPerSec(50); + ScopedFieldTrials trial("WebRTC-Bwe-MaxRttLimit/limit:2s,floor:" + + std::to_string(kBandwidthFloor.kbps()) + "kbps/"); + // In the test case, we limit the capacity and add a cross traffic packet + // burst that blocks media from being sent. This causes the RTT to quickly + // increase above the threshold in the trial. + const DataRate kLinkCapacity = DataRate::KilobitsPerSec(100); + const TimeDelta kBufferBloatDuration = TimeDelta::Seconds(10); + Scenario s("googcc_unit/limit_trial", false); + auto send_net = s.CreateSimulationNode([=](NetworkSimulationConfig* c) { + c->bandwidth = kLinkCapacity; + c->delay = TimeDelta::Millis(100); + }); + auto ret_net = s.CreateSimulationNode( + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); + CallClientConfig config; + config.transport.rates.start_rate = kLinkCapacity; + + auto* client = CreateVideoSendingClient(&s, config, {send_net}, {ret_net}); + // Run for a few seconds to allow the controller to stabilize. + s.RunFor(TimeDelta::Seconds(10)); + const DataSize kBloatPacketSize = DataSize::Bytes(1000); + const int kBloatPacketCount = + static_cast(kBufferBloatDuration * kLinkCapacity / kBloatPacketSize); + // This will cause the RTT to be large for a while. + s.TriggerPacketBurst({send_net}, kBloatPacketCount, kBloatPacketSize.bytes()); + // Wait to allow the high RTT to be detected and acted upon. + s.RunFor(TimeDelta::Seconds(6)); + // By now the target rate should have dropped to the minimum configured rate. + EXPECT_NEAR(client->target_rate().kbps(), kBandwidthFloor.kbps(), 5); +} + +TEST_F(GoogCcNetworkControllerTest, UpdatesTargetRateBasedOnLinkCapacity) { + UpdatesTargetRateBasedOnLinkCapacity(); +} + +TEST_F(GoogCcNetworkControllerTest, StableEstimateDoesNotVaryInSteadyState) { + auto factory = CreateFeedbackOnlyFactory(); + Scenario s("googcc_unit/stable_target", false); + CallClientConfig config; + config.transport.cc_factory = &factory; + NetworkSimulationConfig net_conf; + net_conf.bandwidth = DataRate::KilobitsPerSec(500); + net_conf.delay = TimeDelta::Millis(100); + auto send_net = s.CreateSimulationNode(net_conf); + auto ret_net = s.CreateSimulationNode(net_conf); + + auto* client = CreateVideoSendingClient(&s, config, {send_net}, {ret_net}); + // Run for a while to allow the estimate to stabilize. + s.RunFor(TimeDelta::Seconds(30)); + DataRate min_stable_target = DataRate::PlusInfinity(); + DataRate max_stable_target = DataRate::MinusInfinity(); + DataRate min_target = DataRate::PlusInfinity(); + DataRate max_target = DataRate::MinusInfinity(); + + // Measure variation in steady state. + for (int i = 0; i < 20; ++i) { + auto stable_target_rate = client->stable_target_rate(); + auto target_rate = client->target_rate(); + EXPECT_LE(stable_target_rate, target_rate); + + min_stable_target = std::min(min_stable_target, stable_target_rate); + max_stable_target = std::max(max_stable_target, stable_target_rate); + min_target = std::min(min_target, target_rate); + max_target = std::max(max_target, target_rate); + s.RunFor(TimeDelta::Seconds(1)); + } + // We should expect drops by at least 15% (default backoff.) + EXPECT_LT(min_target / max_target, 0.85); + // We should expect the stable target to be more stable than the immediate one + EXPECT_GE(min_stable_target / max_stable_target, min_target / max_target); +} + +TEST_F(GoogCcNetworkControllerTest, + LossBasedControlUpdatesTargetRateBasedOnLinkCapacity) { + ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); + // TODO(srte): Should the behavior be unaffected at low loss rates? + UpdatesTargetRateBasedOnLinkCapacity("_loss_based"); +} + +TEST_F(GoogCcNetworkControllerTest, + LossBasedControlDoesModestBackoffToHighLoss) { + ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); + Scenario s("googcc_unit/high_loss_channel", false); + CallClientConfig config; + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(1500); + config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); + auto send_net = s.CreateSimulationNode([](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(2000); + c->delay = TimeDelta::Millis(200); + c->loss_rate = 0.1; + }); + auto ret_net = s.CreateSimulationNode( + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(200); }); + + auto* client = CreateVideoSendingClient(&s, config, {send_net}, {ret_net}); + + s.RunFor(TimeDelta::Seconds(120)); + // Without LossBasedControl trial, bandwidth drops to ~10 kbps. + EXPECT_GT(client->target_rate().kbps(), 100); +} + +DataRate AverageBitrateAfterCrossInducedLoss(std::string name) { + Scenario s(name, false); + NetworkSimulationConfig net_conf; + net_conf.bandwidth = DataRate::KilobitsPerSec(1000); + net_conf.delay = TimeDelta::Millis(100); + // Short queue length means that we'll induce loss when sudden TCP traffic + // spikes are induced. This corresponds to ca 200 ms for a packet size of 1000 + // bytes. Such limited buffers are common on for instance wifi routers. + net_conf.packet_queue_length_limit = 25; + + auto send_net = {s.CreateSimulationNode(net_conf)}; + auto ret_net = {s.CreateSimulationNode(net_conf)}; + + auto* client = s.CreateClient("send", CallClientConfig()); + auto* callee = s.CreateClient("return", CallClientConfig()); + auto* route = s.CreateRoutes(client, send_net, callee, ret_net); + // TODO(srte): Make this work with RTX enabled or remove it. + auto* video = s.CreateVideoStream(route->forward(), [](VideoStreamConfig* c) { + c->stream.use_rtx = false; + }); + s.RunFor(TimeDelta::Seconds(10)); + for (int i = 0; i < 4; ++i) { + // Sends TCP cross traffic inducing loss. + auto* tcp_traffic = + s.net()->StartFakeTcpCrossTraffic(send_net, ret_net, FakeTcpConfig()); + s.RunFor(TimeDelta::Seconds(2)); + // Allow the ccongestion controller to recover. + s.net()->StopCrossTraffic(tcp_traffic); + s.RunFor(TimeDelta::Seconds(20)); + } + + // Querying the video stats from within the expected runtime environment + // (i.e. the TQ that belongs to the CallClient, not the Scenario TQ that + // we're currently on). + VideoReceiveStream::Stats video_receive_stats; + auto* video_stream = video->receive(); + callee->SendTask([&video_stream, &video_receive_stats]() { + video_receive_stats = video_stream->GetStats(); + }); + return DataSize::Bytes( + video_receive_stats.rtp_stats.packet_counter.TotalBytes()) / + s.TimeSinceStart(); +} + +TEST_F(GoogCcNetworkControllerTest, + NoLossBasedRecoversSlowerAfterCrossInducedLoss) { + // This test acts as a reference for the test below, showing that without the + // trial, we have worse behavior. + DataRate average_bitrate = + AverageBitrateAfterCrossInducedLoss("googcc_unit/no_cross_loss_based"); + RTC_DCHECK_LE(average_bitrate, DataRate::KilobitsPerSec(650)); +} + +TEST_F(GoogCcNetworkControllerTest, + LossBasedRecoversFasterAfterCrossInducedLoss) { + // We recover bitrate better when subject to loss spikes from cross traffic + // when loss based controller is used. + ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); + DataRate average_bitrate = + AverageBitrateAfterCrossInducedLoss("googcc_unit/cross_loss_based"); + RTC_DCHECK_GE(average_bitrate, DataRate::KilobitsPerSec(750)); +} + +TEST_F(GoogCcNetworkControllerTest, LossBasedEstimatorCapsRateAtModerateLoss) { + ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); + Scenario s("googcc_unit/moderate_loss_channel", false); + CallClientConfig config; + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(5000); + config.transport.rates.start_rate = DataRate::KilobitsPerSec(1000); + + NetworkSimulationConfig network; + network.bandwidth = DataRate::KilobitsPerSec(2000); + network.delay = TimeDelta::Millis(100); + // 3% loss rate is in the moderate loss rate region at 2000 kbps, limiting the + // bitrate increase. + network.loss_rate = 0.03; + auto send_net = s.CreateMutableSimulationNode(network); + auto* client = s.CreateClient("send", std::move(config)); + auto* route = s.CreateRoutes(client, {send_net->node()}, + s.CreateClient("return", CallClientConfig()), + {s.CreateSimulationNode(network)}); + s.CreateVideoStream(route->forward(), VideoStreamConfig()); + // Allow the controller to stabilize at the lower bitrate. + s.RunFor(TimeDelta::Seconds(1)); + // This increase in capacity would cause the target bitrate to increase to + // over 4000 kbps without LossBasedControl. + send_net->UpdateConfig([](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(5000); + }); + s.RunFor(TimeDelta::Seconds(20)); + // Using LossBasedControl, the bitrate will not increase over 2500 kbps since + // we have detected moderate loss. + EXPECT_LT(client->target_rate().kbps(), 2500); +} + +TEST_F(GoogCcNetworkControllerTest, MaintainsLowRateInSafeResetTrial) { + const DataRate kLinkCapacity = DataRate::KilobitsPerSec(200); + const DataRate kStartRate = DataRate::KilobitsPerSec(300); + + ScopedFieldTrials trial("WebRTC-Bwe-SafeResetOnRouteChange/Enabled/"); + Scenario s("googcc_unit/safe_reset_low"); + auto* send_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { + c->bandwidth = kLinkCapacity; + c->delay = TimeDelta::Millis(10); + }); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = kStartRate; + }); + auto* route = s.CreateRoutes( + client, {send_net}, s.CreateClient("return", CallClientConfig()), + {s.CreateSimulationNode(NetworkSimulationConfig())}); + s.CreateVideoStream(route->forward(), VideoStreamConfig()); + // Allow the controller to stabilize. + s.RunFor(TimeDelta::Millis(500)); + EXPECT_NEAR(client->send_bandwidth().kbps(), kLinkCapacity.kbps(), 50); + s.ChangeRoute(route->forward(), {send_net}); + // Allow new settings to propagate. + s.RunFor(TimeDelta::Millis(100)); + // Under the trial, the target should be unchanged for low rates. + EXPECT_NEAR(client->send_bandwidth().kbps(), kLinkCapacity.kbps(), 50); +} + +TEST_F(GoogCcNetworkControllerTest, CutsHighRateInSafeResetTrial) { + const DataRate kLinkCapacity = DataRate::KilobitsPerSec(1000); + const DataRate kStartRate = DataRate::KilobitsPerSec(300); + + ScopedFieldTrials trial("WebRTC-Bwe-SafeResetOnRouteChange/Enabled/"); + Scenario s("googcc_unit/safe_reset_high_cut"); + auto send_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { + c->bandwidth = kLinkCapacity; + c->delay = TimeDelta::Millis(50); + }); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = kStartRate; + }); + auto* route = s.CreateRoutes( + client, {send_net}, s.CreateClient("return", CallClientConfig()), + {s.CreateSimulationNode(NetworkSimulationConfig())}); + s.CreateVideoStream(route->forward(), VideoStreamConfig()); + // Allow the controller to stabilize. + s.RunFor(TimeDelta::Millis(500)); + EXPECT_NEAR(client->send_bandwidth().kbps(), kLinkCapacity.kbps(), 300); + s.ChangeRoute(route->forward(), {send_net}); + // Allow new settings to propagate. + s.RunFor(TimeDelta::Millis(50)); + // Under the trial, the target should be reset from high values. + EXPECT_NEAR(client->send_bandwidth().kbps(), kStartRate.kbps(), 30); +} + +TEST_F(GoogCcNetworkControllerTest, DetectsHighRateInSafeResetTrial) { + ScopedFieldTrials trial( + "WebRTC-Bwe-SafeResetOnRouteChange/Enabled,ack/" + "WebRTC-SendSideBwe-WithOverhead/Enabled/"); + const DataRate kInitialLinkCapacity = DataRate::KilobitsPerSec(200); + const DataRate kNewLinkCapacity = DataRate::KilobitsPerSec(800); + const DataRate kStartRate = DataRate::KilobitsPerSec(300); + + Scenario s("googcc_unit/safe_reset_high_detect"); + auto* initial_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { + c->bandwidth = kInitialLinkCapacity; + c->delay = TimeDelta::Millis(50); + }); + auto* new_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { + c->bandwidth = kNewLinkCapacity; + c->delay = TimeDelta::Millis(50); + }); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = kStartRate; + }); + auto* route = s.CreateRoutes( + client, {initial_net}, s.CreateClient("return", CallClientConfig()), + {s.CreateSimulationNode(NetworkSimulationConfig())}); + s.CreateVideoStream(route->forward(), VideoStreamConfig()); + // Allow the controller to stabilize. + s.RunFor(TimeDelta::Millis(1000)); + EXPECT_NEAR(client->send_bandwidth().kbps(), kInitialLinkCapacity.kbps(), 50); + s.ChangeRoute(route->forward(), {new_net}); + // Allow new settings to propagate, but not probes to be received. + s.RunFor(TimeDelta::Millis(50)); + // Under the field trial, the target rate should be unchanged since it's lower + // than the starting rate. + EXPECT_NEAR(client->send_bandwidth().kbps(), kInitialLinkCapacity.kbps(), 50); + // However, probing should have made us detect the higher rate. + // NOTE: This test causes high loss rate, and the loss-based estimator reduces + // the bitrate, making the test fail if we wait longer than one second here. + s.RunFor(TimeDelta::Millis(1000)); + EXPECT_GT(client->send_bandwidth().kbps(), kNewLinkCapacity.kbps() - 300); +} + +TEST_F(GoogCcNetworkControllerTest, + TargetRateReducedOnPacingBufferBuildupInTrial) { + // Configure strict pacing to ensure build-up. + ScopedFieldTrials trial( + "WebRTC-CongestionWindow/QueueSize:100,MinBitrate:30000/" + "WebRTC-Video-Pacing/factor:1.0/" + "WebRTC-AddPacingToCongestionWindowPushback/Enabled/"); + + const DataRate kLinkCapacity = DataRate::KilobitsPerSec(1000); + const DataRate kStartRate = DataRate::KilobitsPerSec(1000); + + Scenario s("googcc_unit/pacing_buffer_buildup"); + auto* net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { + c->bandwidth = kLinkCapacity; + c->delay = TimeDelta::Millis(50); + }); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = kStartRate; + }); + auto* route = s.CreateRoutes( + client, {net}, s.CreateClient("return", CallClientConfig()), + {s.CreateSimulationNode(NetworkSimulationConfig())}); + s.CreateVideoStream(route->forward(), VideoStreamConfig()); + // Allow some time for the buffer to build up. + s.RunFor(TimeDelta::Seconds(5)); + + // Without trial, pacer delay reaches ~250 ms. + EXPECT_LT(client->GetStats().pacer_delay_ms, 150); +} + +TEST_F(GoogCcNetworkControllerTest, NoBandwidthTogglingInLossControlTrial) { + ScopedFieldTrials trial("WebRTC-Bwe-LossBasedControl/Enabled/"); + Scenario s("googcc_unit/no_toggling"); + auto* send_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(2000); + c->loss_rate = 0.2; + c->delay = TimeDelta::Millis(10); + }); + + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = DataRate::KilobitsPerSec(300); + }); + auto* route = s.CreateRoutes( + client, {send_net}, s.CreateClient("return", CallClientConfig()), + {s.CreateSimulationNode(NetworkSimulationConfig())}); + s.CreateVideoStream(route->forward(), VideoStreamConfig()); + // Allow the controller to initialize. + s.RunFor(TimeDelta::Millis(250)); + + std::queue bandwidth_history; + const TimeDelta step = TimeDelta::Millis(50); + for (TimeDelta time = TimeDelta::Zero(); time < TimeDelta::Millis(2000); + time += step) { + s.RunFor(step); + const TimeDelta window = TimeDelta::Millis(500); + if (bandwidth_history.size() >= window / step) + bandwidth_history.pop(); + bandwidth_history.push(client->send_bandwidth()); + EXPECT_LT( + CountBandwidthDips(bandwidth_history, DataRate::KilobitsPerSec(100)), + 2); + } +} + +TEST_F(GoogCcNetworkControllerTest, NoRttBackoffCollapseWhenVideoStops) { + ScopedFieldTrials trial("WebRTC-Bwe-MaxRttLimit/limit:2s/"); + Scenario s("googcc_unit/rttbackoff_video_stop"); + auto* send_net = s.CreateSimulationNode([&](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(2000); + c->delay = TimeDelta::Millis(100); + }); + + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = DataRate::KilobitsPerSec(1000); + }); + auto* route = s.CreateRoutes( + client, {send_net}, s.CreateClient("return", CallClientConfig()), + {s.CreateSimulationNode(NetworkSimulationConfig())}); + auto* video = s.CreateVideoStream(route->forward(), VideoStreamConfig()); + // Allow the controller to initialize, then stop video. + s.RunFor(TimeDelta::Seconds(1)); + video->send()->Stop(); + s.RunFor(TimeDelta::Seconds(4)); + EXPECT_GT(client->send_bandwidth().kbps(), 1000); +} + +TEST_F(GoogCcNetworkControllerTest, NoCrashOnVeryLateFeedback) { + Scenario s; + auto ret_net = s.CreateMutableSimulationNode(NetworkSimulationConfig()); + auto* route = s.CreateRoutes( + s.CreateClient("send", CallClientConfig()), + {s.CreateSimulationNode(NetworkSimulationConfig())}, + s.CreateClient("return", CallClientConfig()), {ret_net->node()}); + auto* video = s.CreateVideoStream(route->forward(), VideoStreamConfig()); + s.RunFor(TimeDelta::Seconds(5)); + // Delay feedback by several minutes. This will cause removal of the send time + // history for the packets as long as kSendTimeHistoryWindow is configured for + // a shorter time span. + ret_net->PauseTransmissionUntil(s.Now() + TimeDelta::Seconds(300)); + // Stopping video stream while waiting to save test execution time. + video->send()->Stop(); + s.RunFor(TimeDelta::Seconds(299)); + // Starting to cause addition of new packet to history, which cause old + // packets to be removed. + video->send()->Start(); + // Runs until the lost packets are received. We expect that this will run + // without causing any runtime failures. + s.RunFor(TimeDelta::Seconds(2)); +} + +TEST_F(GoogCcNetworkControllerTest, IsFairToTCP) { + Scenario s("googcc_unit/tcp_fairness"); + NetworkSimulationConfig net_conf; + net_conf.bandwidth = DataRate::KilobitsPerSec(1000); + net_conf.delay = TimeDelta::Millis(50); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = DataRate::KilobitsPerSec(1000); + }); + auto send_net = {s.CreateSimulationNode(net_conf)}; + auto ret_net = {s.CreateSimulationNode(net_conf)}; + auto* route = s.CreateRoutes( + client, send_net, s.CreateClient("return", CallClientConfig()), ret_net); + s.CreateVideoStream(route->forward(), VideoStreamConfig()); + s.net()->StartFakeTcpCrossTraffic(send_net, ret_net, FakeTcpConfig()); + s.RunFor(TimeDelta::Seconds(10)); + + // Currently only testing for the upper limit as we in practice back out + // quite a lot in this scenario. If this behavior is fixed, we should add a + // lower bound to ensure it stays fixed. + EXPECT_LT(client->send_bandwidth().kbps(), 750); +} + +} // namespace test +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/link_capacity_estimator.cc b/modules/congestion_controller/goog_cc/link_capacity_estimator.cc new file mode 100644 index 0000000..9fd537a --- /dev/null +++ b/modules/congestion_controller/goog_cc/link_capacity_estimator.cc @@ -0,0 +1,77 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/congestion_controller/goog_cc/link_capacity_estimator.h" + +#include + +#include "rtc_base/numerics/safe_minmax.h" + +namespace webrtc { +LinkCapacityEstimator::LinkCapacityEstimator() {} + +DataRate LinkCapacityEstimator::UpperBound() const { + if (estimate_kbps_.has_value()) + return DataRate::KilobitsPerSec(estimate_kbps_.value() + + 3 * deviation_estimate_kbps()); + return DataRate::Infinity(); +} + +DataRate LinkCapacityEstimator::LowerBound() const { + if (estimate_kbps_.has_value()) + return DataRate::KilobitsPerSec( + std::max(0.0, estimate_kbps_.value() - 3 * deviation_estimate_kbps())); + return DataRate::Zero(); +} + +void LinkCapacityEstimator::Reset() { + estimate_kbps_.reset(); +} + +void LinkCapacityEstimator::OnOveruseDetected(DataRate acknowledged_rate) { + Update(acknowledged_rate, 0.05); +} + +void LinkCapacityEstimator::OnProbeRate(DataRate probe_rate) { + Update(probe_rate, 0.5); +} + +void LinkCapacityEstimator::Update(DataRate capacity_sample, double alpha) { + double sample_kbps = capacity_sample.kbps(); + if (!estimate_kbps_.has_value()) { + estimate_kbps_ = sample_kbps; + } else { + estimate_kbps_ = (1 - alpha) * estimate_kbps_.value() + alpha * sample_kbps; + } + // Estimate the variance of the link capacity estimate and normalize the + // variance with the link capacity estimate. + const double norm = std::max(estimate_kbps_.value(), 1.0); + double error_kbps = estimate_kbps_.value() - sample_kbps; + deviation_kbps_ = + (1 - alpha) * deviation_kbps_ + alpha * error_kbps * error_kbps / norm; + // 0.4 ~= 14 kbit/s at 500 kbit/s + // 2.5f ~= 35 kbit/s at 500 kbit/s + deviation_kbps_ = rtc::SafeClamp(deviation_kbps_, 0.4f, 2.5f); +} + +bool LinkCapacityEstimator::has_estimate() const { + return estimate_kbps_.has_value(); +} + +DataRate LinkCapacityEstimator::estimate() const { + return DataRate::KilobitsPerSec(*estimate_kbps_); +} + +double LinkCapacityEstimator::deviation_estimate_kbps() const { + // Calculate the max bit rate std dev given the normalized + // variance and the current throughput bitrate. The standard deviation will + // only be used if estimate_kbps_ has a value. + return sqrt(deviation_kbps_ * estimate_kbps_.value()); +} +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/link_capacity_estimator.h b/modules/congestion_controller/goog_cc/link_capacity_estimator.h new file mode 100644 index 0000000..aa23491 --- /dev/null +++ b/modules/congestion_controller/goog_cc/link_capacity_estimator.h @@ -0,0 +1,38 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_LINK_CAPACITY_ESTIMATOR_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_LINK_CAPACITY_ESTIMATOR_H_ + +#include "absl/types/optional.h" +#include "api/units/data_rate.h" + +namespace webrtc { +class LinkCapacityEstimator { + public: + LinkCapacityEstimator(); + DataRate UpperBound() const; + DataRate LowerBound() const; + void Reset(); + void OnOveruseDetected(DataRate acknowledged_rate); + void OnProbeRate(DataRate probe_rate); + bool has_estimate() const; + DataRate estimate() const; + + private: + friend class GoogCcStatePrinter; + void Update(DataRate capacity_sample, double alpha); + + double deviation_estimate_kbps() const; + absl::optional estimate_kbps_; + double deviation_kbps_ = 0.4; +}; +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_LINK_CAPACITY_ESTIMATOR_H_ diff --git a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc new file mode 100644 index 0000000..1d2aab8 --- /dev/null +++ b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.cc @@ -0,0 +1,239 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h" + +#include +#include +#include + +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "system_wrappers/include/field_trial.h" + +namespace webrtc { +namespace { +const char kBweLossBasedControl[] = "WebRTC-Bwe-LossBasedControl"; + +// Increase slower when RTT is high. +double GetIncreaseFactor(const LossBasedControlConfig& config, TimeDelta rtt) { + // Clamp the RTT + if (rtt < config.increase_low_rtt) { + rtt = config.increase_low_rtt; + } else if (rtt > config.increase_high_rtt) { + rtt = config.increase_high_rtt; + } + auto rtt_range = config.increase_high_rtt.Get() - config.increase_low_rtt; + if (rtt_range <= TimeDelta::Zero()) { + RTC_DCHECK(false); // Only on misconfiguration. + return config.min_increase_factor; + } + auto rtt_offset = rtt - config.increase_low_rtt; + auto relative_offset = std::max(0.0, std::min(rtt_offset / rtt_range, 1.0)); + auto factor_range = config.max_increase_factor - config.min_increase_factor; + return config.min_increase_factor + (1 - relative_offset) * factor_range; +} + +double LossFromBitrate(DataRate bitrate, + DataRate loss_bandwidth_balance, + double exponent) { + if (loss_bandwidth_balance >= bitrate) + return 1.0; + return pow(loss_bandwidth_balance / bitrate, exponent); +} + +DataRate BitrateFromLoss(double loss, + DataRate loss_bandwidth_balance, + double exponent) { + if (exponent <= 0) { + RTC_DCHECK(false); + return DataRate::Infinity(); + } + if (loss < 1e-5) + return DataRate::Infinity(); + return loss_bandwidth_balance * pow(loss, -1.0 / exponent); +} + +double ExponentialUpdate(TimeDelta window, TimeDelta interval) { + // Use the convention that exponential window length (which is really + // infinite) is the time it takes to dampen to 1/e. + if (window <= TimeDelta::Zero()) { + RTC_DCHECK(false); + return 1.0f; + } + return 1.0f - exp(interval / window * -1.0); +} + +} // namespace + +LossBasedControlConfig::LossBasedControlConfig() + : enabled(field_trial::IsEnabled(kBweLossBasedControl)), + min_increase_factor("min_incr", 1.02), + max_increase_factor("max_incr", 1.08), + increase_low_rtt("incr_low_rtt", TimeDelta::Millis(200)), + increase_high_rtt("incr_high_rtt", TimeDelta::Millis(800)), + decrease_factor("decr", 0.99), + loss_window("loss_win", TimeDelta::Millis(800)), + loss_max_window("loss_max_win", TimeDelta::Millis(800)), + acknowledged_rate_max_window("ackrate_max_win", TimeDelta::Millis(800)), + increase_offset("incr_offset", DataRate::BitsPerSec(1000)), + loss_bandwidth_balance_increase("balance_incr", + DataRate::KilobitsPerSec(0.5)), + loss_bandwidth_balance_decrease("balance_decr", + DataRate::KilobitsPerSec(4)), + loss_bandwidth_balance_exponent("exponent", 0.5), + allow_resets("resets", false), + decrease_interval("decr_intvl", TimeDelta::Millis(300)), + loss_report_timeout("timeout", TimeDelta::Millis(6000)) { + std::string trial_string = field_trial::FindFullName(kBweLossBasedControl); + ParseFieldTrial( + {&min_increase_factor, &max_increase_factor, &increase_low_rtt, + &increase_high_rtt, &decrease_factor, &loss_window, &loss_max_window, + &acknowledged_rate_max_window, &increase_offset, + &loss_bandwidth_balance_increase, &loss_bandwidth_balance_decrease, + &loss_bandwidth_balance_exponent, &allow_resets, &decrease_interval, + &loss_report_timeout}, + trial_string); +} +LossBasedControlConfig::LossBasedControlConfig(const LossBasedControlConfig&) = + default; +LossBasedControlConfig::~LossBasedControlConfig() = default; + +LossBasedBandwidthEstimation::LossBasedBandwidthEstimation() + : config_(LossBasedControlConfig()), + average_loss_(0), + average_loss_max_(0), + loss_based_bitrate_(DataRate::Zero()), + acknowledged_bitrate_max_(DataRate::Zero()), + acknowledged_bitrate_last_update_(Timestamp::MinusInfinity()), + time_last_decrease_(Timestamp::MinusInfinity()), + has_decreased_since_last_loss_report_(false), + last_loss_packet_report_(Timestamp::MinusInfinity()), + last_loss_ratio_(0) {} + +void LossBasedBandwidthEstimation::UpdateLossStatistics( + const std::vector& packet_results, + Timestamp at_time) { + if (packet_results.empty()) { + RTC_DCHECK(false); + return; + } + int loss_count = 0; + for (const auto& pkt : packet_results) { + loss_count += pkt.receive_time.IsInfinite() ? 1 : 0; + } + last_loss_ratio_ = static_cast(loss_count) / packet_results.size(); + const TimeDelta time_passed = last_loss_packet_report_.IsFinite() + ? at_time - last_loss_packet_report_ + : TimeDelta::Seconds(1); + last_loss_packet_report_ = at_time; + has_decreased_since_last_loss_report_ = false; + + average_loss_ += ExponentialUpdate(config_.loss_window, time_passed) * + (last_loss_ratio_ - average_loss_); + if (average_loss_ > average_loss_max_) { + average_loss_max_ = average_loss_; + } else { + average_loss_max_ += + ExponentialUpdate(config_.loss_max_window, time_passed) * + (average_loss_ - average_loss_max_); + } +} + +void LossBasedBandwidthEstimation::UpdateAcknowledgedBitrate( + DataRate acknowledged_bitrate, + Timestamp at_time) { + const TimeDelta time_passed = + acknowledged_bitrate_last_update_.IsFinite() + ? at_time - acknowledged_bitrate_last_update_ + : TimeDelta::Seconds(1); + acknowledged_bitrate_last_update_ = at_time; + if (acknowledged_bitrate > acknowledged_bitrate_max_) { + acknowledged_bitrate_max_ = acknowledged_bitrate; + } else { + acknowledged_bitrate_max_ -= + ExponentialUpdate(config_.acknowledged_rate_max_window, time_passed) * + (acknowledged_bitrate_max_ - acknowledged_bitrate); + } +} + +void LossBasedBandwidthEstimation::Update(Timestamp at_time, + DataRate min_bitrate, + TimeDelta last_round_trip_time) { + // Only increase if loss has been low for some time. + const double loss_estimate_for_increase = average_loss_max_; + // Avoid multiple decreases from averaging over one loss spike. + const double loss_estimate_for_decrease = + std::min(average_loss_, last_loss_ratio_); + const bool allow_decrease = + !has_decreased_since_last_loss_report_ && + (at_time - time_last_decrease_ >= + last_round_trip_time + config_.decrease_interval); + + if (loss_estimate_for_increase < loss_increase_threshold()) { + // Increase bitrate by RTT-adaptive ratio. + DataRate new_increased_bitrate = + min_bitrate * GetIncreaseFactor(config_, last_round_trip_time) + + config_.increase_offset; + // The bitrate that would make the loss "just high enough". + const DataRate new_increased_bitrate_cap = BitrateFromLoss( + loss_estimate_for_increase, config_.loss_bandwidth_balance_increase, + config_.loss_bandwidth_balance_exponent); + new_increased_bitrate = + std::min(new_increased_bitrate, new_increased_bitrate_cap); + loss_based_bitrate_ = std::max(new_increased_bitrate, loss_based_bitrate_); + } else if (loss_estimate_for_decrease > loss_decrease_threshold() && + allow_decrease) { + // The bitrate that would make the loss "just acceptable". + const DataRate new_decreased_bitrate_floor = BitrateFromLoss( + loss_estimate_for_decrease, config_.loss_bandwidth_balance_decrease, + config_.loss_bandwidth_balance_exponent); + DataRate new_decreased_bitrate = + std::max(decreased_bitrate(), new_decreased_bitrate_floor); + if (new_decreased_bitrate < loss_based_bitrate_) { + time_last_decrease_ = at_time; + has_decreased_since_last_loss_report_ = true; + loss_based_bitrate_ = new_decreased_bitrate; + } + } +} + +void LossBasedBandwidthEstimation::Reset(DataRate bitrate) { + loss_based_bitrate_ = bitrate; + average_loss_ = 0; + average_loss_max_ = 0; +} + +double LossBasedBandwidthEstimation::loss_increase_threshold() const { + return LossFromBitrate(loss_based_bitrate_, + config_.loss_bandwidth_balance_increase, + config_.loss_bandwidth_balance_exponent); +} + +double LossBasedBandwidthEstimation::loss_decrease_threshold() const { + return LossFromBitrate(loss_based_bitrate_, + config_.loss_bandwidth_balance_decrease, + config_.loss_bandwidth_balance_exponent); +} + +DataRate LossBasedBandwidthEstimation::decreased_bitrate() const { + return config_.decrease_factor * acknowledged_bitrate_max_; +} + +void LossBasedBandwidthEstimation::MaybeReset(DataRate bitrate) { + if (config_.allow_resets) + Reset(bitrate); +} + +void LossBasedBandwidthEstimation::SetInitialBitrate(DataRate bitrate) { + Reset(bitrate); +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h new file mode 100644 index 0000000..b63363c --- /dev/null +++ b/modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h @@ -0,0 +1,83 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_LOSS_BASED_BANDWIDTH_ESTIMATION_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_LOSS_BASED_BANDWIDTH_ESTIMATION_H_ + +#include + +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "rtc_base/experiments/field_trial_parser.h" + +namespace webrtc { + +struct LossBasedControlConfig { + LossBasedControlConfig(); + LossBasedControlConfig(const LossBasedControlConfig&); + LossBasedControlConfig& operator=(const LossBasedControlConfig&) = default; + ~LossBasedControlConfig(); + bool enabled; + FieldTrialParameter min_increase_factor; + FieldTrialParameter max_increase_factor; + FieldTrialParameter increase_low_rtt; + FieldTrialParameter increase_high_rtt; + FieldTrialParameter decrease_factor; + FieldTrialParameter loss_window; + FieldTrialParameter loss_max_window; + FieldTrialParameter acknowledged_rate_max_window; + FieldTrialParameter increase_offset; + FieldTrialParameter loss_bandwidth_balance_increase; + FieldTrialParameter loss_bandwidth_balance_decrease; + FieldTrialParameter loss_bandwidth_balance_exponent; + FieldTrialParameter allow_resets; + FieldTrialParameter decrease_interval; + FieldTrialParameter loss_report_timeout; +}; + +class LossBasedBandwidthEstimation { + public: + LossBasedBandwidthEstimation(); + void Update(Timestamp at_time, + DataRate min_bitrate, + TimeDelta last_round_trip_time); + void UpdateAcknowledgedBitrate(DataRate acknowledged_bitrate, + Timestamp at_time); + void MaybeReset(DataRate bitrate); + void SetInitialBitrate(DataRate bitrate); + bool Enabled() const { return config_.enabled; } + void UpdateLossStatistics(const std::vector& packet_results, + Timestamp at_time); + DataRate GetEstimate() const { return loss_based_bitrate_; } + + private: + friend class GoogCcStatePrinter; + void Reset(DataRate bitrate); + double loss_increase_threshold() const; + double loss_decrease_threshold() const; + DataRate decreased_bitrate() const; + + LossBasedControlConfig config_; + double average_loss_; + double average_loss_max_; + DataRate loss_based_bitrate_; + DataRate acknowledged_bitrate_max_; + Timestamp acknowledged_bitrate_last_update_; + Timestamp time_last_decrease_; + bool has_decreased_since_last_loss_report_; + Timestamp last_loss_packet_report_; + double last_loss_ratio_; +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_LOSS_BASED_BANDWIDTH_ESTIMATION_H_ diff --git a/modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc b/modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc new file mode 100644 index 0000000..fdfd531 --- /dev/null +++ b/modules/congestion_controller/goog_cc/probe_bitrate_estimator.cc @@ -0,0 +1,201 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/probe_bitrate_estimator.h" + +#include +#include + +#include "api/rtc_event_log/rtc_event_log.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" +#include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" + +namespace webrtc { +namespace { +// The minumum number of probes we need to receive feedback about in percent +// in order to have a valid estimate. +constexpr double kMinReceivedProbesRatio = .80; + +// The minumum number of bytes we need to receive feedback about in percent +// in order to have a valid estimate. +constexpr double kMinReceivedBytesRatio = .80; + +// The maximum |receive rate| / |send rate| ratio for a valid estimate. +constexpr float kMaxValidRatio = 2.0f; + +// The minimum |receive rate| / |send rate| ratio assuming that the link is +// not saturated, i.e. we assume that we will receive at least +// kMinRatioForUnsaturatedLink * |send rate| if |send rate| is less than the +// link capacity. +constexpr float kMinRatioForUnsaturatedLink = 0.9f; + +// The target utilization of the link. If we know true link capacity +// we'd like to send at 95% of that rate. +constexpr float kTargetUtilizationFraction = 0.95f; + +// The maximum time period over which the cluster history is retained. +// This is also the maximum time period beyond which a probing burst is not +// expected to last. +constexpr TimeDelta kMaxClusterHistory = TimeDelta::Seconds(1); + +// The maximum time interval between first and the last probe on a cluster +// on the sender side as well as the receive side. +constexpr TimeDelta kMaxProbeInterval = TimeDelta::Seconds(1); + +} // namespace + +ProbeBitrateEstimator::ProbeBitrateEstimator(RtcEventLog* event_log) + : event_log_(event_log) {} + +ProbeBitrateEstimator::~ProbeBitrateEstimator() = default; + +absl::optional ProbeBitrateEstimator::HandleProbeAndEstimateBitrate( + const PacketResult& packet_feedback) { + int cluster_id = packet_feedback.sent_packet.pacing_info.probe_cluster_id; + RTC_DCHECK_NE(cluster_id, PacedPacketInfo::kNotAProbe); + + EraseOldClusters(packet_feedback.receive_time); + + AggregatedCluster* cluster = &clusters_[cluster_id]; + + if (packet_feedback.sent_packet.send_time < cluster->first_send) { + cluster->first_send = packet_feedback.sent_packet.send_time; + } + if (packet_feedback.sent_packet.send_time > cluster->last_send) { + cluster->last_send = packet_feedback.sent_packet.send_time; + cluster->size_last_send = packet_feedback.sent_packet.size; + } + if (packet_feedback.receive_time < cluster->first_receive) { + cluster->first_receive = packet_feedback.receive_time; + cluster->size_first_receive = packet_feedback.sent_packet.size; + } + if (packet_feedback.receive_time > cluster->last_receive) { + cluster->last_receive = packet_feedback.receive_time; + } + cluster->size_total += packet_feedback.sent_packet.size; + cluster->num_probes += 1; + + RTC_DCHECK_GT( + packet_feedback.sent_packet.pacing_info.probe_cluster_min_probes, 0); + RTC_DCHECK_GT(packet_feedback.sent_packet.pacing_info.probe_cluster_min_bytes, + 0); + + int min_probes = + packet_feedback.sent_packet.pacing_info.probe_cluster_min_probes * + kMinReceivedProbesRatio; + DataSize min_size = + DataSize::Bytes( + packet_feedback.sent_packet.pacing_info.probe_cluster_min_bytes) * + kMinReceivedBytesRatio; + if (cluster->num_probes < min_probes || cluster->size_total < min_size) + return absl::nullopt; + + TimeDelta send_interval = cluster->last_send - cluster->first_send; + TimeDelta receive_interval = cluster->last_receive - cluster->first_receive; + + if (send_interval <= TimeDelta::Zero() || send_interval > kMaxProbeInterval || + receive_interval <= TimeDelta::Zero() || + receive_interval > kMaxProbeInterval) { + RTC_LOG(LS_INFO) << "Probing unsuccessful, invalid send/receive interval" + " [cluster id: " + << cluster_id + << "] [send interval: " << ToString(send_interval) + << "]" + " [receive interval: " + << ToString(receive_interval) << "]"; + if (event_log_) { + event_log_->Log(std::make_unique( + cluster_id, ProbeFailureReason::kInvalidSendReceiveInterval)); + } + return absl::nullopt; + } + // Since the |send_interval| does not include the time it takes to actually + // send the last packet the size of the last sent packet should not be + // included when calculating the send bitrate. + RTC_DCHECK_GT(cluster->size_total, cluster->size_last_send); + DataSize send_size = cluster->size_total - cluster->size_last_send; + DataRate send_rate = send_size / send_interval; + + // Since the |receive_interval| does not include the time it takes to + // actually receive the first packet the size of the first received packet + // should not be included when calculating the receive bitrate. + RTC_DCHECK_GT(cluster->size_total, cluster->size_first_receive); + DataSize receive_size = cluster->size_total - cluster->size_first_receive; + DataRate receive_rate = receive_size / receive_interval; + + double ratio = receive_rate / send_rate; + if (ratio > kMaxValidRatio) { + RTC_LOG(LS_INFO) << "Probing unsuccessful, receive/send ratio too high" + " [cluster id: " + << cluster_id << "] [send: " << ToString(send_size) + << " / " << ToString(send_interval) << " = " + << ToString(send_rate) + << "]" + " [receive: " + << ToString(receive_size) << " / " + << ToString(receive_interval) << " = " + << ToString(receive_rate) + << " ]" + " [ratio: " + << ToString(receive_rate) << " / " << ToString(send_rate) + << " = " << ratio << " > kMaxValidRatio (" + << kMaxValidRatio << ")]"; + if (event_log_) { + event_log_->Log(std::make_unique( + cluster_id, ProbeFailureReason::kInvalidSendReceiveRatio)); + } + return absl::nullopt; + } + RTC_LOG(LS_INFO) << "Probing successful" + " [cluster id: " + << cluster_id << "] [send: " << ToString(send_size) << " / " + << ToString(send_interval) << " = " << ToString(send_rate) + << " ]" + " [receive: " + << ToString(receive_size) << " / " + << ToString(receive_interval) << " = " + << ToString(receive_rate) << "]"; + + DataRate res = std::min(send_rate, receive_rate); + // If we're receiving at significantly lower bitrate than we were sending at, + // it suggests that we've found the true capacity of the link. In this case, + // set the target bitrate slightly lower to not immediately overuse. + if (receive_rate < kMinRatioForUnsaturatedLink * send_rate) { + RTC_DCHECK_GT(send_rate, receive_rate); + res = kTargetUtilizationFraction * receive_rate; + } + if (event_log_) { + event_log_->Log( + std::make_unique(cluster_id, res.bps())); + } + estimated_data_rate_ = res; + return estimated_data_rate_; +} + +absl::optional +ProbeBitrateEstimator::FetchAndResetLastEstimatedBitrate() { + absl::optional estimated_data_rate = estimated_data_rate_; + estimated_data_rate_.reset(); + return estimated_data_rate; +} + +void ProbeBitrateEstimator::EraseOldClusters(Timestamp timestamp) { + for (auto it = clusters_.begin(); it != clusters_.end();) { + if (it->second.last_receive + kMaxClusterHistory < timestamp) { + it = clusters_.erase(it); + } else { + ++it; + } + } +} +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/probe_bitrate_estimator.h b/modules/congestion_controller/goog_cc/probe_bitrate_estimator.h new file mode 100644 index 0000000..9f2f438 --- /dev/null +++ b/modules/congestion_controller/goog_cc/probe_bitrate_estimator.h @@ -0,0 +1,58 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_PROBE_BITRATE_ESTIMATOR_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_PROBE_BITRATE_ESTIMATOR_H_ + +#include +#include + +#include "absl/types/optional.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" + +namespace webrtc { +class RtcEventLog; + +class ProbeBitrateEstimator { + public: + explicit ProbeBitrateEstimator(RtcEventLog* event_log); + ~ProbeBitrateEstimator(); + + // Should be called for every probe packet we receive feedback about. + // Returns the estimated bitrate if the probe completes a valid cluster. + absl::optional HandleProbeAndEstimateBitrate( + const PacketResult& packet_feedback); + + absl::optional FetchAndResetLastEstimatedBitrate(); + + private: + struct AggregatedCluster { + int num_probes = 0; + Timestamp first_send = Timestamp::PlusInfinity(); + Timestamp last_send = Timestamp::MinusInfinity(); + Timestamp first_receive = Timestamp::PlusInfinity(); + Timestamp last_receive = Timestamp::MinusInfinity(); + DataSize size_last_send = DataSize::Zero(); + DataSize size_first_receive = DataSize::Zero(); + DataSize size_total = DataSize::Zero(); + }; + + // Erases old cluster data that was seen before |timestamp|. + void EraseOldClusters(Timestamp timestamp); + + std::map clusters_; + RtcEventLog* const event_log_; + absl::optional estimated_data_rate_; +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_PROBE_BITRATE_ESTIMATOR_H_ diff --git a/modules/congestion_controller/goog_cc/probe_bitrate_estimator_unittest.cc b/modules/congestion_controller/goog_cc/probe_bitrate_estimator_unittest.cc new file mode 100644 index 0000000..6b4146d --- /dev/null +++ b/modules/congestion_controller/goog_cc/probe_bitrate_estimator_unittest.cc @@ -0,0 +1,228 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/probe_bitrate_estimator.h" + +#include + +#include "api/transport/network_types.h" +#include "test/gtest.h" + +namespace webrtc { + +namespace { +constexpr int kDefaultMinProbes = 5; +constexpr int kDefaultMinBytes = 5000; +constexpr float kTargetUtilizationFraction = 0.95f; +} // anonymous namespace + +class TestProbeBitrateEstimator : public ::testing::Test { + public: + TestProbeBitrateEstimator() : probe_bitrate_estimator_(nullptr) {} + + // TODO(philipel): Use PacedPacketInfo when ProbeBitrateEstimator is rewritten + // to use that information. + void AddPacketFeedback(int probe_cluster_id, + size_t size_bytes, + int64_t send_time_ms, + int64_t arrival_time_ms, + int min_probes = kDefaultMinProbes, + int min_bytes = kDefaultMinBytes) { + const Timestamp kReferenceTime = Timestamp::Seconds(1000); + PacketResult feedback; + feedback.sent_packet.send_time = + kReferenceTime + TimeDelta::Millis(send_time_ms); + feedback.sent_packet.size = DataSize::Bytes(size_bytes); + feedback.sent_packet.pacing_info = + PacedPacketInfo(probe_cluster_id, min_probes, min_bytes); + feedback.receive_time = kReferenceTime + TimeDelta::Millis(arrival_time_ms); + measured_data_rate_ = + probe_bitrate_estimator_.HandleProbeAndEstimateBitrate(feedback); + } + + protected: + absl::optional measured_data_rate_; + ProbeBitrateEstimator probe_bitrate_estimator_; +}; + +TEST_F(TestProbeBitrateEstimator, OneCluster) { + AddPacketFeedback(0, 1000, 0, 10); + AddPacketFeedback(0, 1000, 10, 20); + AddPacketFeedback(0, 1000, 20, 30); + AddPacketFeedback(0, 1000, 30, 40); + + EXPECT_NEAR(measured_data_rate_->bps(), 800000, 10); +} + +TEST_F(TestProbeBitrateEstimator, OneClusterTooFewProbes) { + AddPacketFeedback(0, 2000, 0, 10); + AddPacketFeedback(0, 2000, 10, 20); + AddPacketFeedback(0, 2000, 20, 30); + + EXPECT_FALSE(measured_data_rate_); +} + +TEST_F(TestProbeBitrateEstimator, OneClusterTooFewBytes) { + const int kMinBytes = 6000; + AddPacketFeedback(0, 800, 0, 10, kDefaultMinProbes, kMinBytes); + AddPacketFeedback(0, 800, 10, 20, kDefaultMinProbes, kMinBytes); + AddPacketFeedback(0, 800, 20, 30, kDefaultMinProbes, kMinBytes); + AddPacketFeedback(0, 800, 30, 40, kDefaultMinProbes, kMinBytes); + AddPacketFeedback(0, 800, 40, 50, kDefaultMinProbes, kMinBytes); + + EXPECT_FALSE(measured_data_rate_); +} + +TEST_F(TestProbeBitrateEstimator, SmallCluster) { + const int kMinBytes = 1000; + AddPacketFeedback(0, 150, 0, 10, kDefaultMinProbes, kMinBytes); + AddPacketFeedback(0, 150, 10, 20, kDefaultMinProbes, kMinBytes); + AddPacketFeedback(0, 150, 20, 30, kDefaultMinProbes, kMinBytes); + AddPacketFeedback(0, 150, 30, 40, kDefaultMinProbes, kMinBytes); + AddPacketFeedback(0, 150, 40, 50, kDefaultMinProbes, kMinBytes); + AddPacketFeedback(0, 150, 50, 60, kDefaultMinProbes, kMinBytes); + EXPECT_NEAR(measured_data_rate_->bps(), 120000, 10); +} + +TEST_F(TestProbeBitrateEstimator, LargeCluster) { + const int kMinProbes = 30; + const int kMinBytes = 312500; + + int64_t send_time = 0; + int64_t receive_time = 5; + for (int i = 0; i < 25; ++i) { + AddPacketFeedback(0, 12500, send_time, receive_time, kMinProbes, kMinBytes); + ++send_time; + ++receive_time; + } + EXPECT_NEAR(measured_data_rate_->bps(), 100000000, 10); +} + +TEST_F(TestProbeBitrateEstimator, FastReceive) { + AddPacketFeedback(0, 1000, 0, 15); + AddPacketFeedback(0, 1000, 10, 30); + AddPacketFeedback(0, 1000, 20, 35); + AddPacketFeedback(0, 1000, 30, 40); + + EXPECT_NEAR(measured_data_rate_->bps(), 800000, 10); +} + +TEST_F(TestProbeBitrateEstimator, TooFastReceive) { + AddPacketFeedback(0, 1000, 0, 19); + AddPacketFeedback(0, 1000, 10, 22); + AddPacketFeedback(0, 1000, 20, 25); + AddPacketFeedback(0, 1000, 40, 27); + + EXPECT_FALSE(measured_data_rate_); +} + +TEST_F(TestProbeBitrateEstimator, SlowReceive) { + AddPacketFeedback(0, 1000, 0, 10); + AddPacketFeedback(0, 1000, 10, 40); + AddPacketFeedback(0, 1000, 20, 70); + AddPacketFeedback(0, 1000, 30, 85); + // Expected send rate = 800 kbps, expected receive rate = 320 kbps. + + EXPECT_NEAR(measured_data_rate_->bps(), kTargetUtilizationFraction * 320000, + 10); +} + +TEST_F(TestProbeBitrateEstimator, BurstReceive) { + AddPacketFeedback(0, 1000, 0, 50); + AddPacketFeedback(0, 1000, 10, 50); + AddPacketFeedback(0, 1000, 20, 50); + AddPacketFeedback(0, 1000, 40, 50); + + EXPECT_FALSE(measured_data_rate_); +} + +TEST_F(TestProbeBitrateEstimator, MultipleClusters) { + AddPacketFeedback(0, 1000, 0, 10); + AddPacketFeedback(0, 1000, 10, 20); + AddPacketFeedback(0, 1000, 20, 30); + AddPacketFeedback(0, 1000, 40, 60); + // Expected send rate = 600 kbps, expected receive rate = 480 kbps. + EXPECT_NEAR(measured_data_rate_->bps(), kTargetUtilizationFraction * 480000, + 10); + + AddPacketFeedback(0, 1000, 50, 60); + // Expected send rate = 640 kbps, expected receive rate = 640 kbps. + EXPECT_NEAR(measured_data_rate_->bps(), 640000, 10); + + AddPacketFeedback(1, 1000, 60, 70); + AddPacketFeedback(1, 1000, 65, 77); + AddPacketFeedback(1, 1000, 70, 84); + AddPacketFeedback(1, 1000, 75, 90); + // Expected send rate = 1600 kbps, expected receive rate = 1200 kbps. + + EXPECT_NEAR(measured_data_rate_->bps(), kTargetUtilizationFraction * 1200000, + 10); +} + +TEST_F(TestProbeBitrateEstimator, IgnoreOldClusters) { + AddPacketFeedback(0, 1000, 0, 10); + AddPacketFeedback(0, 1000, 10, 20); + AddPacketFeedback(0, 1000, 20, 30); + + AddPacketFeedback(1, 1000, 60, 70); + AddPacketFeedback(1, 1000, 65, 77); + AddPacketFeedback(1, 1000, 70, 84); + AddPacketFeedback(1, 1000, 75, 90); + // Expected send rate = 1600 kbps, expected receive rate = 1200 kbps. + + EXPECT_NEAR(measured_data_rate_->bps(), kTargetUtilizationFraction * 1200000, + 10); + + // Coming in 6s later + AddPacketFeedback(0, 1000, 40 + 6000, 60 + 6000); + + EXPECT_FALSE(measured_data_rate_); +} + +TEST_F(TestProbeBitrateEstimator, IgnoreSizeLastSendPacket) { + AddPacketFeedback(0, 1000, 0, 10); + AddPacketFeedback(0, 1000, 10, 20); + AddPacketFeedback(0, 1000, 20, 30); + AddPacketFeedback(0, 1000, 30, 40); + AddPacketFeedback(0, 1500, 40, 50); + // Expected send rate = 800 kbps, expected receive rate = 900 kbps. + + EXPECT_NEAR(measured_data_rate_->bps(), 800000, 10); +} + +TEST_F(TestProbeBitrateEstimator, IgnoreSizeFirstReceivePacket) { + AddPacketFeedback(0, 1500, 0, 10); + AddPacketFeedback(0, 1000, 10, 20); + AddPacketFeedback(0, 1000, 20, 30); + AddPacketFeedback(0, 1000, 30, 40); + // Expected send rate = 933 kbps, expected receive rate = 800 kbps. + + EXPECT_NEAR(measured_data_rate_->bps(), kTargetUtilizationFraction * 800000, + 10); +} + +TEST_F(TestProbeBitrateEstimator, NoLastEstimatedBitrateBps) { + EXPECT_FALSE(probe_bitrate_estimator_.FetchAndResetLastEstimatedBitrate()); +} + +TEST_F(TestProbeBitrateEstimator, FetchLastEstimatedBitrateBps) { + AddPacketFeedback(0, 1000, 0, 10); + AddPacketFeedback(0, 1000, 10, 20); + AddPacketFeedback(0, 1000, 20, 30); + AddPacketFeedback(0, 1000, 30, 40); + + auto estimated_bitrate = + probe_bitrate_estimator_.FetchAndResetLastEstimatedBitrate(); + EXPECT_TRUE(estimated_bitrate); + EXPECT_NEAR(estimated_bitrate->bps(), 800000, 10); + EXPECT_FALSE(probe_bitrate_estimator_.FetchAndResetLastEstimatedBitrate()); +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/probe_controller.cc b/modules/congestion_controller/goog_cc/probe_controller.cc new file mode 100644 index 0000000..29b472a --- /dev/null +++ b/modules/congestion_controller/goog_cc/probe_controller.cc @@ -0,0 +1,451 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/probe_controller.h" + +#include +#include +#include +#include + +#include "absl/strings/match.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { + +namespace { +// The minimum number probing packets used. +constexpr int kMinProbePacketsSent = 5; + +// The minimum probing duration in ms. +constexpr int kMinProbeDurationMs = 15; + +// Maximum waiting time from the time of initiating probing to getting +// the measured results back. +constexpr int64_t kMaxWaitingTimeForProbingResultMs = 1000; + +// Value of |min_bitrate_to_probe_further_bps_| that indicates +// further probing is disabled. +constexpr int kExponentialProbingDisabled = 0; + +// Default probing bitrate limit. Applied only when the application didn't +// specify max bitrate. +constexpr int64_t kDefaultMaxProbingBitrateBps = 5000000; + +// If the bitrate drops to a factor |kBitrateDropThreshold| or lower +// and we recover within |kBitrateDropTimeoutMs|, then we'll send +// a probe at a fraction |kProbeFractionAfterDrop| of the original bitrate. +constexpr double kBitrateDropThreshold = 0.66; +constexpr int kBitrateDropTimeoutMs = 5000; +constexpr double kProbeFractionAfterDrop = 0.85; + +// Timeout for probing after leaving ALR. If the bitrate drops significantly, +// (as determined by the delay based estimator) and we leave ALR, then we will +// send a probe if we recover within |kLeftAlrTimeoutMs| ms. +constexpr int kAlrEndedTimeoutMs = 3000; + +// The expected uncertainty of probe result (as a fraction of the target probe +// This is a limit on how often probing can be done when there is a BW +// drop detected in ALR. +constexpr int64_t kMinTimeBetweenAlrProbesMs = 5000; + +// bitrate). Used to avoid probing if the probe bitrate is close to our current +// estimate. +constexpr double kProbeUncertainty = 0.05; + +// Use probing to recover faster after large bitrate estimate drops. +constexpr char kBweRapidRecoveryExperiment[] = + "WebRTC-BweRapidRecoveryExperiment"; + +// Never probe higher than configured by OnMaxTotalAllocatedBitrate(). +constexpr char kCappedProbingFieldTrialName[] = "WebRTC-BweCappedProbing"; + +void MaybeLogProbeClusterCreated(RtcEventLog* event_log, + const ProbeClusterConfig& probe) { + RTC_DCHECK(event_log); + if (!event_log) { + return; + } + + size_t min_bytes = static_cast(probe.target_data_rate.bps() * + probe.target_duration.ms() / 8000); + event_log->Log(std::make_unique( + probe.id, probe.target_data_rate.bps(), probe.target_probe_count, + min_bytes)); +} + +} // namespace + +ProbeControllerConfig::ProbeControllerConfig( + const WebRtcKeyValueConfig* key_value_config) + : first_exponential_probe_scale("p1", 3.0), + second_exponential_probe_scale("p2", 6.0), + further_exponential_probe_scale("step_size", 2), + further_probe_threshold("further_probe_threshold", 0.7), + alr_probing_interval("alr_interval", TimeDelta::Seconds(5)), + alr_probe_scale("alr_scale", 2), + first_allocation_probe_scale("alloc_p1", 1), + second_allocation_probe_scale("alloc_p2", 2), + allocation_allow_further_probing("alloc_probe_further", false), + allocation_probe_max("alloc_probe_max", DataRate::PlusInfinity()) { + ParseFieldTrial( + {&first_exponential_probe_scale, &second_exponential_probe_scale, + &further_exponential_probe_scale, &further_probe_threshold, + &alr_probing_interval, &alr_probe_scale, &first_allocation_probe_scale, + &second_allocation_probe_scale, &allocation_allow_further_probing}, + key_value_config->Lookup("WebRTC-Bwe-ProbingConfiguration")); + + // Specialized keys overriding subsets of WebRTC-Bwe-ProbingConfiguration + ParseFieldTrial( + {&first_exponential_probe_scale, &second_exponential_probe_scale}, + key_value_config->Lookup("WebRTC-Bwe-InitialProbing")); + ParseFieldTrial({&further_exponential_probe_scale, &further_probe_threshold}, + key_value_config->Lookup("WebRTC-Bwe-ExponentialProbing")); + ParseFieldTrial({&alr_probing_interval, &alr_probe_scale}, + key_value_config->Lookup("WebRTC-Bwe-AlrProbing")); + ParseFieldTrial( + {&first_allocation_probe_scale, &second_allocation_probe_scale, + &allocation_allow_further_probing, &allocation_probe_max}, + key_value_config->Lookup("WebRTC-Bwe-AllocationProbing")); +} + +ProbeControllerConfig::ProbeControllerConfig(const ProbeControllerConfig&) = + default; +ProbeControllerConfig::~ProbeControllerConfig() = default; + +ProbeController::ProbeController(const WebRtcKeyValueConfig* key_value_config, + RtcEventLog* event_log) + : enable_periodic_alr_probing_(false), + in_rapid_recovery_experiment_(absl::StartsWith( + key_value_config->Lookup(kBweRapidRecoveryExperiment), + "Enabled")), + limit_probes_with_allocateable_rate_(!absl::StartsWith( + key_value_config->Lookup(kCappedProbingFieldTrialName), + "Disabled")), + event_log_(event_log), + config_(ProbeControllerConfig(key_value_config)) { + Reset(0); +} + +ProbeController::~ProbeController() {} + +std::vector ProbeController::SetBitrates( + int64_t min_bitrate_bps, + int64_t start_bitrate_bps, + int64_t max_bitrate_bps, + int64_t at_time_ms) { + if (start_bitrate_bps > 0) { + start_bitrate_bps_ = start_bitrate_bps; + estimated_bitrate_bps_ = start_bitrate_bps; + } else if (start_bitrate_bps_ == 0) { + start_bitrate_bps_ = min_bitrate_bps; + } + + // The reason we use the variable |old_max_bitrate_pbs| is because we + // need to set |max_bitrate_bps_| before we call InitiateProbing. + int64_t old_max_bitrate_bps = max_bitrate_bps_; + max_bitrate_bps_ = max_bitrate_bps; + + switch (state_) { + case State::kInit: + if (network_available_) + return InitiateExponentialProbing(at_time_ms); + break; + + case State::kWaitingForProbingResult: + break; + + case State::kProbingComplete: + // If the new max bitrate is higher than both the old max bitrate and the + // estimate then initiate probing. + if (estimated_bitrate_bps_ != 0 && + old_max_bitrate_bps < max_bitrate_bps_ && + estimated_bitrate_bps_ < max_bitrate_bps_) { + // The assumption is that if we jump more than 20% in the bandwidth + // estimate or if the bandwidth estimate is within 90% of the new + // max bitrate then the probing attempt was successful. + mid_call_probing_succcess_threshold_ = + std::min(estimated_bitrate_bps_ * 1.2, max_bitrate_bps_ * 0.9); + mid_call_probing_waiting_for_result_ = true; + mid_call_probing_bitrate_bps_ = max_bitrate_bps_; + + RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.Initiated", + max_bitrate_bps_ / 1000); + + return InitiateProbing(at_time_ms, {max_bitrate_bps_}, false); + } + break; + } + return std::vector(); +} + +std::vector ProbeController::OnMaxTotalAllocatedBitrate( + int64_t max_total_allocated_bitrate, + int64_t at_time_ms) { + const bool in_alr = alr_start_time_ms_.has_value(); + const bool allow_allocation_probe = in_alr; + + if (state_ == State::kProbingComplete && + max_total_allocated_bitrate != max_total_allocated_bitrate_ && + estimated_bitrate_bps_ != 0 && + (max_bitrate_bps_ <= 0 || estimated_bitrate_bps_ < max_bitrate_bps_) && + estimated_bitrate_bps_ < max_total_allocated_bitrate && + allow_allocation_probe) { + max_total_allocated_bitrate_ = max_total_allocated_bitrate; + + if (!config_.first_allocation_probe_scale) + return std::vector(); + + DataRate first_probe_rate = + DataRate::BitsPerSec(max_total_allocated_bitrate) * + config_.first_allocation_probe_scale.Value(); + DataRate probe_cap = config_.allocation_probe_max.Get(); + first_probe_rate = std::min(first_probe_rate, probe_cap); + std::vector probes = {first_probe_rate.bps()}; + if (config_.second_allocation_probe_scale) { + DataRate second_probe_rate = + DataRate::BitsPerSec(max_total_allocated_bitrate) * + config_.second_allocation_probe_scale.Value(); + second_probe_rate = std::min(second_probe_rate, probe_cap); + if (second_probe_rate > first_probe_rate) + probes.push_back(second_probe_rate.bps()); + } + return InitiateProbing(at_time_ms, probes, + config_.allocation_allow_further_probing); + } + max_total_allocated_bitrate_ = max_total_allocated_bitrate; + return std::vector(); +} + +std::vector ProbeController::OnNetworkAvailability( + NetworkAvailability msg) { + network_available_ = msg.network_available; + + if (!network_available_ && state_ == State::kWaitingForProbingResult) { + state_ = State::kProbingComplete; + min_bitrate_to_probe_further_bps_ = kExponentialProbingDisabled; + } + + if (network_available_ && state_ == State::kInit && start_bitrate_bps_ > 0) + return InitiateExponentialProbing(msg.at_time.ms()); + return std::vector(); +} + +std::vector ProbeController::InitiateExponentialProbing( + int64_t at_time_ms) { + RTC_DCHECK(network_available_); + RTC_DCHECK(state_ == State::kInit); + RTC_DCHECK_GT(start_bitrate_bps_, 0); + + // When probing at 1.8 Mbps ( 6x 300), this represents a threshold of + // 1.2 Mbps to continue probing. + std::vector probes = {static_cast( + config_.first_exponential_probe_scale * start_bitrate_bps_)}; + if (config_.second_exponential_probe_scale) { + probes.push_back(config_.second_exponential_probe_scale.Value() * + start_bitrate_bps_); + } + return InitiateProbing(at_time_ms, probes, true); +} + +std::vector ProbeController::SetEstimatedBitrate( + int64_t bitrate_bps, + int64_t at_time_ms) { + if (mid_call_probing_waiting_for_result_ && + bitrate_bps >= mid_call_probing_succcess_threshold_) { + RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.Success", + mid_call_probing_bitrate_bps_ / 1000); + RTC_HISTOGRAM_COUNTS_10000("WebRTC.BWE.MidCallProbing.ProbedKbps", + bitrate_bps / 1000); + mid_call_probing_waiting_for_result_ = false; + } + std::vector pending_probes; + if (state_ == State::kWaitingForProbingResult) { + // Continue probing if probing results indicate channel has greater + // capacity. + RTC_LOG(LS_INFO) << "Measured bitrate: " << bitrate_bps + << " Minimum to probe further: " + << min_bitrate_to_probe_further_bps_; + + if (min_bitrate_to_probe_further_bps_ != kExponentialProbingDisabled && + bitrate_bps > min_bitrate_to_probe_further_bps_) { + pending_probes = InitiateProbing( + at_time_ms, + {static_cast(config_.further_exponential_probe_scale * + bitrate_bps)}, + true); + } + } + + if (bitrate_bps < kBitrateDropThreshold * estimated_bitrate_bps_) { + time_of_last_large_drop_ms_ = at_time_ms; + bitrate_before_last_large_drop_bps_ = estimated_bitrate_bps_; + } + + estimated_bitrate_bps_ = bitrate_bps; + return pending_probes; +} + +void ProbeController::EnablePeriodicAlrProbing(bool enable) { + enable_periodic_alr_probing_ = enable; +} + +void ProbeController::SetAlrStartTimeMs( + absl::optional alr_start_time_ms) { + alr_start_time_ms_ = alr_start_time_ms; +} +void ProbeController::SetAlrEndedTimeMs(int64_t alr_end_time_ms) { + alr_end_time_ms_.emplace(alr_end_time_ms); +} + +std::vector ProbeController::RequestProbe( + int64_t at_time_ms) { + // Called once we have returned to normal state after a large drop in + // estimated bandwidth. The current response is to initiate a single probe + // session (if not already probing) at the previous bitrate. + // + // If the probe session fails, the assumption is that this drop was a + // real one from a competing flow or a network change. + bool in_alr = alr_start_time_ms_.has_value(); + bool alr_ended_recently = + (alr_end_time_ms_.has_value() && + at_time_ms - alr_end_time_ms_.value() < kAlrEndedTimeoutMs); + if (in_alr || alr_ended_recently || in_rapid_recovery_experiment_) { + if (state_ == State::kProbingComplete) { + uint32_t suggested_probe_bps = + kProbeFractionAfterDrop * bitrate_before_last_large_drop_bps_; + uint32_t min_expected_probe_result_bps = + (1 - kProbeUncertainty) * suggested_probe_bps; + int64_t time_since_drop_ms = at_time_ms - time_of_last_large_drop_ms_; + int64_t time_since_probe_ms = at_time_ms - last_bwe_drop_probing_time_ms_; + if (min_expected_probe_result_bps > estimated_bitrate_bps_ && + time_since_drop_ms < kBitrateDropTimeoutMs && + time_since_probe_ms > kMinTimeBetweenAlrProbesMs) { + RTC_LOG(LS_INFO) << "Detected big bandwidth drop, start probing."; + // Track how often we probe in response to bandwidth drop in ALR. + RTC_HISTOGRAM_COUNTS_10000( + "WebRTC.BWE.BweDropProbingIntervalInS", + (at_time_ms - last_bwe_drop_probing_time_ms_) / 1000); + last_bwe_drop_probing_time_ms_ = at_time_ms; + return InitiateProbing(at_time_ms, {suggested_probe_bps}, false); + } + } + } + return std::vector(); +} + +void ProbeController::SetMaxBitrate(int64_t max_bitrate_bps) { + max_bitrate_bps_ = max_bitrate_bps; +} + +void ProbeController::Reset(int64_t at_time_ms) { + network_available_ = true; + state_ = State::kInit; + min_bitrate_to_probe_further_bps_ = kExponentialProbingDisabled; + time_last_probing_initiated_ms_ = 0; + estimated_bitrate_bps_ = 0; + start_bitrate_bps_ = 0; + max_bitrate_bps_ = 0; + int64_t now_ms = at_time_ms; + last_bwe_drop_probing_time_ms_ = now_ms; + alr_end_time_ms_.reset(); + mid_call_probing_waiting_for_result_ = false; + time_of_last_large_drop_ms_ = now_ms; + bitrate_before_last_large_drop_bps_ = 0; + max_total_allocated_bitrate_ = 0; +} + +std::vector ProbeController::Process(int64_t at_time_ms) { + if (at_time_ms - time_last_probing_initiated_ms_ > + kMaxWaitingTimeForProbingResultMs) { + mid_call_probing_waiting_for_result_ = false; + + if (state_ == State::kWaitingForProbingResult) { + RTC_LOG(LS_INFO) << "kWaitingForProbingResult: timeout"; + state_ = State::kProbingComplete; + min_bitrate_to_probe_further_bps_ = kExponentialProbingDisabled; + } + } + + if (enable_periodic_alr_probing_ && state_ == State::kProbingComplete) { + // Probe bandwidth periodically when in ALR state. + if (alr_start_time_ms_ && estimated_bitrate_bps_ > 0) { + int64_t next_probe_time_ms = + std::max(*alr_start_time_ms_, time_last_probing_initiated_ms_) + + config_.alr_probing_interval->ms(); + if (at_time_ms >= next_probe_time_ms) { + return InitiateProbing(at_time_ms, + {static_cast(estimated_bitrate_bps_ * + config_.alr_probe_scale)}, + true); + } + } + } + return std::vector(); +} + +std::vector ProbeController::InitiateProbing( + int64_t now_ms, + std::vector bitrates_to_probe, + bool probe_further) { + int64_t max_probe_bitrate_bps = + max_bitrate_bps_ > 0 ? max_bitrate_bps_ : kDefaultMaxProbingBitrateBps; + if (limit_probes_with_allocateable_rate_ && + max_total_allocated_bitrate_ > 0) { + // If a max allocated bitrate has been configured, allow probing up to 2x + // that rate. This allows some overhead to account for bursty streams, + // which otherwise would have to ramp up when the overshoot is already in + // progress. + // It also avoids minor quality reduction caused by probes often being + // received at slightly less than the target probe bitrate. + max_probe_bitrate_bps = + std::min(max_probe_bitrate_bps, max_total_allocated_bitrate_ * 2); + } + + std::vector pending_probes; + for (int64_t bitrate : bitrates_to_probe) { + RTC_DCHECK_GT(bitrate, 0); + + if (bitrate > max_probe_bitrate_bps) { + bitrate = max_probe_bitrate_bps; + probe_further = false; + } + + ProbeClusterConfig config; + config.at_time = Timestamp::Millis(now_ms); + config.target_data_rate = + DataRate::BitsPerSec(rtc::dchecked_cast(bitrate)); + config.target_duration = TimeDelta::Millis(kMinProbeDurationMs); + config.target_probe_count = kMinProbePacketsSent; + config.id = next_probe_cluster_id_; + next_probe_cluster_id_++; + MaybeLogProbeClusterCreated(event_log_, config); + pending_probes.push_back(config); + } + time_last_probing_initiated_ms_ = now_ms; + if (probe_further) { + state_ = State::kWaitingForProbingResult; + min_bitrate_to_probe_further_bps_ = + (*(bitrates_to_probe.end() - 1)) * config_.further_probe_threshold; + } else { + state_ = State::kProbingComplete; + min_bitrate_to_probe_further_bps_ = kExponentialProbingDisabled; + } + return pending_probes; +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/probe_controller.h b/modules/congestion_controller/goog_cc/probe_controller.h new file mode 100644 index 0000000..11e92b9 --- /dev/null +++ b/modules/congestion_controller/goog_cc/probe_controller.h @@ -0,0 +1,152 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_PROBE_CONTROLLER_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_PROBE_CONTROLLER_H_ + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/network_control.h" +#include "api/transport/webrtc_key_value_config.h" +#include "api/units/data_rate.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/experiments/field_trial_parser.h" +#include "rtc_base/system/unused.h" + +namespace webrtc { + +struct ProbeControllerConfig { + explicit ProbeControllerConfig(const WebRtcKeyValueConfig* key_value_config); + ProbeControllerConfig(const ProbeControllerConfig&); + ProbeControllerConfig& operator=(const ProbeControllerConfig&) = default; + ~ProbeControllerConfig(); + + // These parameters configure the initial probes. First we send one or two + // probes of sizes p1 * start_bitrate_bps_ and p2 * start_bitrate_bps_. + // Then whenever we get a bitrate estimate of at least further_probe_threshold + // times the size of the last sent probe we'll send another one of size + // step_size times the new estimate. + FieldTrialParameter first_exponential_probe_scale; + FieldTrialOptional second_exponential_probe_scale; + FieldTrialParameter further_exponential_probe_scale; + FieldTrialParameter further_probe_threshold; + + // Configures how often we send ALR probes and how big they are. + FieldTrialParameter alr_probing_interval; + FieldTrialParameter alr_probe_scale; + + // Configures the probes emitted by changed to the allocated bitrate. + FieldTrialOptional first_allocation_probe_scale; + FieldTrialOptional second_allocation_probe_scale; + FieldTrialFlag allocation_allow_further_probing; + FieldTrialParameter allocation_probe_max; +}; + +// This class controls initiation of probing to estimate initial channel +// capacity. There is also support for probing during a session when max +// bitrate is adjusted by an application. +class ProbeController { + public: + explicit ProbeController(const WebRtcKeyValueConfig* key_value_config, + RtcEventLog* event_log); + ~ProbeController(); + + RTC_WARN_UNUSED_RESULT std::vector SetBitrates( + int64_t min_bitrate_bps, + int64_t start_bitrate_bps, + int64_t max_bitrate_bps, + int64_t at_time_ms); + + // The total bitrate, as opposed to the max bitrate, is the sum of the + // configured bitrates for all active streams. + RTC_WARN_UNUSED_RESULT std::vector + OnMaxTotalAllocatedBitrate(int64_t max_total_allocated_bitrate, + int64_t at_time_ms); + + RTC_WARN_UNUSED_RESULT std::vector OnNetworkAvailability( + NetworkAvailability msg); + + RTC_WARN_UNUSED_RESULT std::vector SetEstimatedBitrate( + int64_t bitrate_bps, + int64_t at_time_ms); + + void EnablePeriodicAlrProbing(bool enable); + + void SetAlrStartTimeMs(absl::optional alr_start_time); + void SetAlrEndedTimeMs(int64_t alr_end_time); + + RTC_WARN_UNUSED_RESULT std::vector RequestProbe( + int64_t at_time_ms); + + // Sets a new maximum probing bitrate, without generating a new probe cluster. + void SetMaxBitrate(int64_t max_bitrate_bps); + + // Resets the ProbeController to a state equivalent to as if it was just + // created EXCEPT for |enable_periodic_alr_probing_|. + void Reset(int64_t at_time_ms); + + RTC_WARN_UNUSED_RESULT std::vector Process( + int64_t at_time_ms); + + private: + enum class State { + // Initial state where no probing has been triggered yet. + kInit, + // Waiting for probing results to continue further probing. + kWaitingForProbingResult, + // Probing is complete. + kProbingComplete, + }; + + RTC_WARN_UNUSED_RESULT std::vector + InitiateExponentialProbing(int64_t at_time_ms); + RTC_WARN_UNUSED_RESULT std::vector InitiateProbing( + int64_t now_ms, + std::vector bitrates_to_probe, + bool probe_further); + + bool network_available_; + State state_; + int64_t min_bitrate_to_probe_further_bps_; + int64_t time_last_probing_initiated_ms_; + int64_t estimated_bitrate_bps_; + int64_t start_bitrate_bps_; + int64_t max_bitrate_bps_; + int64_t last_bwe_drop_probing_time_ms_; + absl::optional alr_start_time_ms_; + absl::optional alr_end_time_ms_; + bool enable_periodic_alr_probing_; + int64_t time_of_last_large_drop_ms_; + int64_t bitrate_before_last_large_drop_bps_; + int64_t max_total_allocated_bitrate_; + + const bool in_rapid_recovery_experiment_; + const bool limit_probes_with_allocateable_rate_; + // For WebRTC.BWE.MidCallProbing.* metric. + bool mid_call_probing_waiting_for_result_; + int64_t mid_call_probing_bitrate_bps_; + int64_t mid_call_probing_succcess_threshold_; + RtcEventLog* event_log_; + + int32_t next_probe_cluster_id_ = 1; + + ProbeControllerConfig config_; + + RTC_DISALLOW_COPY_AND_ASSIGN(ProbeController); +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_PROBE_CONTROLLER_H_ diff --git a/modules/congestion_controller/goog_cc/probe_controller_unittest.cc b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc new file mode 100644 index 0000000..e27bf71 --- /dev/null +++ b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc @@ -0,0 +1,373 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/congestion_controller/goog_cc/probe_controller.h" + +#include + +#include "api/transport/field_trial_based_config.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/timestamp.h" +#include "logging/rtc_event_log/mock/mock_rtc_event_log.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/clock.h" +#include "test/field_trial.h" +#include "test/gmock.h" +#include "test/gtest.h" + +using ::testing::_; +using ::testing::AtLeast; +using ::testing::Field; +using ::testing::Matcher; +using ::testing::NiceMock; +using ::testing::Return; + +namespace webrtc { +namespace test { + +namespace { + +constexpr int kMinBitrateBps = 100; +constexpr int kStartBitrateBps = 300; +constexpr int kMaxBitrateBps = 10000; + +constexpr int kExponentialProbingTimeoutMs = 5000; + +constexpr int kAlrProbeInterval = 5000; +constexpr int kAlrEndedTimeoutMs = 3000; +constexpr int kBitrateDropTimeoutMs = 5000; +} // namespace + +class ProbeControllerTest : public ::testing::Test { + protected: + ProbeControllerTest() : clock_(100000000L) { + probe_controller_.reset( + new ProbeController(&field_trial_config_, &mock_rtc_event_log)); + } + ~ProbeControllerTest() override {} + + std::vector SetNetworkAvailable(bool available) { + NetworkAvailability msg; + msg.at_time = Timestamp::Millis(NowMs()); + msg.network_available = available; + return probe_controller_->OnNetworkAvailability(msg); + } + + int64_t NowMs() { return clock_.TimeInMilliseconds(); } + + FieldTrialBasedConfig field_trial_config_; + SimulatedClock clock_; + NiceMock mock_rtc_event_log; + std::unique_ptr probe_controller_; +}; + +TEST_F(ProbeControllerTest, InitiatesProbingAtStart) { + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + EXPECT_GE(probes.size(), 2u); +} + +TEST_F(ProbeControllerTest, ProbeOnlyWhenNetworkIsUp) { + SetNetworkAvailable(false); + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + EXPECT_EQ(probes.size(), 0u); + probes = SetNetworkAvailable(true); + EXPECT_GE(probes.size(), 2u); +} + +TEST_F(ProbeControllerTest, InitiatesProbingOnMaxBitrateIncrease) { + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + // Long enough to time out exponential probing. + clock_.AdvanceTimeMilliseconds(kExponentialProbingTimeoutMs); + probes = probe_controller_->SetEstimatedBitrate(kStartBitrateBps, NowMs()); + probes = probe_controller_->Process(NowMs()); + probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps + 100, NowMs()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), kMaxBitrateBps + 100); +} + +TEST_F(ProbeControllerTest, ProbesOnMaxBitrateIncreaseOnlyWhenInAlr) { + probe_controller_.reset( + new ProbeController(&field_trial_config_, &mock_rtc_event_log)); + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + probes = probe_controller_->SetEstimatedBitrate(kMaxBitrateBps - 1, NowMs()); + + // Wait long enough to time out exponential probing. + clock_.AdvanceTimeMilliseconds(kExponentialProbingTimeoutMs); + probes = probe_controller_->Process(NowMs()); + EXPECT_EQ(probes.size(), 0u); + + // Probe when in alr. + probe_controller_->SetAlrStartTimeMs(clock_.TimeInMilliseconds()); + probes = probe_controller_->OnMaxTotalAllocatedBitrate(kMaxBitrateBps + 1, + NowMs()); + EXPECT_EQ(probes.size(), 2u); + + // Do not probe when not in alr. + probe_controller_->SetAlrStartTimeMs(absl::nullopt); + probes = probe_controller_->OnMaxTotalAllocatedBitrate(kMaxBitrateBps + 2, + NowMs()); + EXPECT_TRUE(probes.empty()); +} + +TEST_F(ProbeControllerTest, InitiatesProbingOnMaxBitrateIncreaseAtMaxBitrate) { + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + // Long enough to time out exponential probing. + clock_.AdvanceTimeMilliseconds(kExponentialProbingTimeoutMs); + probes = probe_controller_->SetEstimatedBitrate(kStartBitrateBps, NowMs()); + probes = probe_controller_->Process(NowMs()); + probes = probe_controller_->SetEstimatedBitrate(kMaxBitrateBps, NowMs()); + probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps + 100, NowMs()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), kMaxBitrateBps + 100); +} + +TEST_F(ProbeControllerTest, TestExponentialProbing) { + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + + // Repeated probe should only be sent when estimated bitrate climbs above + // 0.7 * 6 * kStartBitrateBps = 1260. + probes = probe_controller_->SetEstimatedBitrate(1000, NowMs()); + EXPECT_EQ(probes.size(), 0u); + + probes = probe_controller_->SetEstimatedBitrate(1800, NowMs()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), 2 * 1800); +} + +TEST_F(ProbeControllerTest, TestExponentialProbingTimeout) { + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + // Advance far enough to cause a time out in waiting for probing result. + clock_.AdvanceTimeMilliseconds(kExponentialProbingTimeoutMs); + probes = probe_controller_->Process(NowMs()); + + probes = probe_controller_->SetEstimatedBitrate(1800, NowMs()); + EXPECT_EQ(probes.size(), 0u); +} + +TEST_F(ProbeControllerTest, RequestProbeInAlr) { + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + EXPECT_GE(probes.size(), 2u); + probes = probe_controller_->SetEstimatedBitrate(500, NowMs()); + + probe_controller_->SetAlrStartTimeMs(clock_.TimeInMilliseconds()); + clock_.AdvanceTimeMilliseconds(kAlrProbeInterval + 1); + probes = probe_controller_->Process(NowMs()); + probes = probe_controller_->SetEstimatedBitrate(250, NowMs()); + probes = probe_controller_->RequestProbe(NowMs()); + + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), 0.85 * 500); +} + +TEST_F(ProbeControllerTest, RequestProbeWhenAlrEndedRecently) { + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + EXPECT_EQ(probes.size(), 2u); + probes = probe_controller_->SetEstimatedBitrate(500, NowMs()); + + probe_controller_->SetAlrStartTimeMs(absl::nullopt); + clock_.AdvanceTimeMilliseconds(kAlrProbeInterval + 1); + probes = probe_controller_->Process(NowMs()); + probes = probe_controller_->SetEstimatedBitrate(250, NowMs()); + probe_controller_->SetAlrEndedTimeMs(clock_.TimeInMilliseconds()); + clock_.AdvanceTimeMilliseconds(kAlrEndedTimeoutMs - 1); + probes = probe_controller_->RequestProbe(NowMs()); + + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), 0.85 * 500); +} + +TEST_F(ProbeControllerTest, RequestProbeWhenAlrNotEndedRecently) { + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + EXPECT_EQ(probes.size(), 2u); + probes = probe_controller_->SetEstimatedBitrate(500, NowMs()); + + probe_controller_->SetAlrStartTimeMs(absl::nullopt); + clock_.AdvanceTimeMilliseconds(kAlrProbeInterval + 1); + probes = probe_controller_->Process(NowMs()); + probes = probe_controller_->SetEstimatedBitrate(250, NowMs()); + probe_controller_->SetAlrEndedTimeMs(clock_.TimeInMilliseconds()); + clock_.AdvanceTimeMilliseconds(kAlrEndedTimeoutMs + 1); + probes = probe_controller_->RequestProbe(NowMs()); + EXPECT_EQ(probes.size(), 0u); +} + +TEST_F(ProbeControllerTest, RequestProbeWhenBweDropNotRecent) { + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + EXPECT_EQ(probes.size(), 2u); + probes = probe_controller_->SetEstimatedBitrate(500, NowMs()); + + probe_controller_->SetAlrStartTimeMs(clock_.TimeInMilliseconds()); + clock_.AdvanceTimeMilliseconds(kAlrProbeInterval + 1); + probes = probe_controller_->Process(NowMs()); + probes = probe_controller_->SetEstimatedBitrate(250, NowMs()); + clock_.AdvanceTimeMilliseconds(kBitrateDropTimeoutMs + 1); + probes = probe_controller_->RequestProbe(NowMs()); + EXPECT_EQ(probes.size(), 0u); +} + +TEST_F(ProbeControllerTest, PeriodicProbing) { + probe_controller_->EnablePeriodicAlrProbing(true); + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + EXPECT_EQ(probes.size(), 2u); + probes = probe_controller_->SetEstimatedBitrate(500, NowMs()); + + int64_t start_time = clock_.TimeInMilliseconds(); + + // Expect the controller to send a new probe after 5s has passed. + probe_controller_->SetAlrStartTimeMs(start_time); + clock_.AdvanceTimeMilliseconds(5000); + probes = probe_controller_->Process(NowMs()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), 1000); + + probes = probe_controller_->SetEstimatedBitrate(500, NowMs()); + + // The following probe should be sent at 10s into ALR. + probe_controller_->SetAlrStartTimeMs(start_time); + clock_.AdvanceTimeMilliseconds(4000); + probes = probe_controller_->Process(NowMs()); + probes = probe_controller_->SetEstimatedBitrate(500, NowMs()); + EXPECT_EQ(probes.size(), 0u); + + probe_controller_->SetAlrStartTimeMs(start_time); + clock_.AdvanceTimeMilliseconds(1000); + probes = probe_controller_->Process(NowMs()); + EXPECT_EQ(probes.size(), 1u); + probes = probe_controller_->SetEstimatedBitrate(500, NowMs()); + EXPECT_EQ(probes.size(), 0u); +} + +TEST_F(ProbeControllerTest, PeriodicProbingAfterReset) { + probe_controller_.reset( + new ProbeController(&field_trial_config_, &mock_rtc_event_log)); + int64_t alr_start_time = clock_.TimeInMilliseconds(); + + probe_controller_->SetAlrStartTimeMs(alr_start_time); + probe_controller_->EnablePeriodicAlrProbing(true); + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + probe_controller_->Reset(NowMs()); + + clock_.AdvanceTimeMilliseconds(10000); + probes = probe_controller_->Process(NowMs()); + // Since bitrates are not yet set, no probe is sent event though we are in ALR + // mode. + EXPECT_EQ(probes.size(), 0u); + + probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + kMaxBitrateBps, NowMs()); + EXPECT_EQ(probes.size(), 2u); + + // Make sure we use |kStartBitrateBps| as the estimated bitrate + // until SetEstimatedBitrate is called with an updated estimate. + clock_.AdvanceTimeMilliseconds(10000); + probes = probe_controller_->Process(NowMs()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), kStartBitrateBps * 2); +} + +TEST_F(ProbeControllerTest, TestExponentialProbingOverflow) { + const int64_t kMbpsMultiplier = 1000000; + auto probes = probe_controller_->SetBitrates( + kMinBitrateBps, 10 * kMbpsMultiplier, 100 * kMbpsMultiplier, NowMs()); + // Verify that probe bitrate is capped at the specified max bitrate. + probes = + probe_controller_->SetEstimatedBitrate(60 * kMbpsMultiplier, NowMs()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), 100 * kMbpsMultiplier); + // Verify that repeated probes aren't sent. + probes = + probe_controller_->SetEstimatedBitrate(100 * kMbpsMultiplier, NowMs()); + EXPECT_EQ(probes.size(), 0u); +} + +TEST_F(ProbeControllerTest, TestAllocatedBitrateCap) { + const int64_t kMbpsMultiplier = 1000000; + const int64_t kMaxBitrateBps = 100 * kMbpsMultiplier; + auto probes = probe_controller_->SetBitrates( + kMinBitrateBps, 10 * kMbpsMultiplier, kMaxBitrateBps, NowMs()); + + // Configure ALR for periodic probing. + probe_controller_->EnablePeriodicAlrProbing(true); + int64_t alr_start_time = clock_.TimeInMilliseconds(); + probe_controller_->SetAlrStartTimeMs(alr_start_time); + + int64_t estimated_bitrate_bps = kMaxBitrateBps / 10; + probes = + probe_controller_->SetEstimatedBitrate(estimated_bitrate_bps, NowMs()); + + // Set a max allocated bitrate below the current estimate. + int64_t max_allocated_bps = estimated_bitrate_bps - 1 * kMbpsMultiplier; + probes = + probe_controller_->OnMaxTotalAllocatedBitrate(max_allocated_bps, NowMs()); + EXPECT_TRUE(probes.empty()); // No probe since lower than current max. + + // Probes such as ALR capped at 2x the max allocation limit. + clock_.AdvanceTimeMilliseconds(5000); + probes = probe_controller_->Process(NowMs()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), 2 * max_allocated_bps); + + // Remove allocation limit. + EXPECT_TRUE( + probe_controller_->OnMaxTotalAllocatedBitrate(0, NowMs()).empty()); + clock_.AdvanceTimeMilliseconds(5000); + probes = probe_controller_->Process(NowMs()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), estimated_bitrate_bps * 2); +} + +TEST_F(ProbeControllerTest, ConfigurableProbingFieldTrial) { + test::ScopedFieldTrials trials( + "WebRTC-Bwe-ProbingConfiguration/" + "p1:2,p2:5,step_size:3,further_probe_threshold:0.8," + "alloc_p1:2,alloc_p2/"); + probe_controller_.reset( + new ProbeController(&field_trial_config_, &mock_rtc_event_log)); + auto probes = probe_controller_->SetBitrates(kMinBitrateBps, kStartBitrateBps, + 5000000, NowMs()); + EXPECT_EQ(probes.size(), 2u); + EXPECT_EQ(probes[0].target_data_rate.bps(), 600); + EXPECT_EQ(probes[1].target_data_rate.bps(), 1500); + + // Repeated probe should only be sent when estimated bitrate climbs above + // 0.8 * 5 * kStartBitrateBps = 1200. + probes = probe_controller_->SetEstimatedBitrate(1100, NowMs()); + EXPECT_EQ(probes.size(), 0u); + + probes = probe_controller_->SetEstimatedBitrate(1250, NowMs()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), 3 * 1250); + + clock_.AdvanceTimeMilliseconds(5000); + probes = probe_controller_->Process(NowMs()); + + probe_controller_->SetAlrStartTimeMs(NowMs()); + probes = probe_controller_->OnMaxTotalAllocatedBitrate(200000, NowMs()); + EXPECT_EQ(probes.size(), 1u); + EXPECT_EQ(probes[0].target_data_rate.bps(), 400000); +} + +} // namespace test +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc b/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc new file mode 100644 index 0000000..1169e9f --- /dev/null +++ b/modules/congestion_controller/goog_cc/robust_throughput_estimator.cc @@ -0,0 +1,135 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/robust_throughput_estimator.h" + +#include + +#include +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +RobustThroughputEstimator::RobustThroughputEstimator( + const RobustThroughputEstimatorSettings& settings) + : settings_(settings) { + RTC_DCHECK(settings.enabled); +} + +RobustThroughputEstimator::~RobustThroughputEstimator() {} + +void RobustThroughputEstimator::IncomingPacketFeedbackVector( + const std::vector& packet_feedback_vector) { + RTC_DCHECK(std::is_sorted(packet_feedback_vector.begin(), + packet_feedback_vector.end(), + PacketResult::ReceiveTimeOrder())); + for (const auto& packet : packet_feedback_vector) { + // Insert the new packet. + window_.push_back(packet); + window_.back().sent_packet.prior_unacked_data = + window_.back().sent_packet.prior_unacked_data * + settings_.unacked_weight; + // In most cases, receive timestamps should already be in order, but in the + // rare case where feedback packets have been reordered, we do some swaps to + // ensure that the window is sorted. + for (size_t i = window_.size() - 1; + i > 0 && window_[i].receive_time < window_[i - 1].receive_time; i--) { + std::swap(window_[i], window_[i - 1]); + } + // Remove old packets. + while (window_.size() > settings_.kMaxPackets || + (window_.size() > settings_.min_packets && + packet.receive_time - window_.front().receive_time > + settings_.window_duration)) { + window_.pop_front(); + } + } +} + +absl::optional RobustThroughputEstimator::bitrate() const { + if (window_.size() < settings_.initial_packets) + return absl::nullopt; + + TimeDelta largest_recv_gap(TimeDelta::Millis(0)); + TimeDelta second_largest_recv_gap(TimeDelta::Millis(0)); + for (size_t i = 1; i < window_.size(); i++) { + // Find receive time gaps + TimeDelta gap = window_[i].receive_time - window_[i - 1].receive_time; + if (gap > largest_recv_gap) { + second_largest_recv_gap = largest_recv_gap; + largest_recv_gap = gap; + } else if (gap > second_largest_recv_gap) { + second_largest_recv_gap = gap; + } + } + + Timestamp min_send_time = window_[0].sent_packet.send_time; + Timestamp max_send_time = window_[0].sent_packet.send_time; + Timestamp min_recv_time = window_[0].receive_time; + Timestamp max_recv_time = window_[0].receive_time; + DataSize data_size = DataSize::Bytes(0); + for (const auto& packet : window_) { + min_send_time = std::min(min_send_time, packet.sent_packet.send_time); + max_send_time = std::max(max_send_time, packet.sent_packet.send_time); + min_recv_time = std::min(min_recv_time, packet.receive_time); + max_recv_time = std::max(max_recv_time, packet.receive_time); + data_size += packet.sent_packet.size; + data_size += packet.sent_packet.prior_unacked_data; + } + + // Suppose a packet of size S is sent every T milliseconds. + // A window of N packets would contain N*S bytes, but the time difference + // between the first and the last packet would only be (N-1)*T. Thus, we + // need to remove one packet. + DataSize recv_size = data_size; + DataSize send_size = data_size; + if (settings_.assume_shared_link) { + // Depending on how the bottleneck queue is implemented, a large packet + // may delay sending of sebsequent packets, so the delay between packets + // i and i+1 depends on the size of both packets. In this case we minimize + // the maximum error by removing half of both the first and last packet + // size. + DataSize first_last_average_size = + (window_.front().sent_packet.size + + window_.front().sent_packet.prior_unacked_data + + window_.back().sent_packet.size + + window_.back().sent_packet.prior_unacked_data) / + 2; + recv_size -= first_last_average_size; + send_size -= first_last_average_size; + } else { + // In the simpler case where the delay between packets i and i+1 only + // depends on the size of packet i+1, the first packet doesn't give us + // any information. Analogously, we assume that the start send time + // for the last packet doesn't depend on the size of the packet. + recv_size -= (window_.front().sent_packet.size + + window_.front().sent_packet.prior_unacked_data); + send_size -= (window_.back().sent_packet.size + + window_.back().sent_packet.prior_unacked_data); + } + + // Remove the largest gap by replacing it by the second largest gap + // or the average gap. + TimeDelta send_duration = max_send_time - min_send_time; + TimeDelta recv_duration = (max_recv_time - min_recv_time) - largest_recv_gap; + if (settings_.reduce_bias) { + recv_duration += second_largest_recv_gap; + } else { + recv_duration += recv_duration / (window_.size() - 2); + } + + send_duration = std::max(send_duration, TimeDelta::Millis(1)); + recv_duration = std::max(recv_duration, TimeDelta::Millis(1)); + return std::min(send_size / send_duration, recv_size / recv_duration); +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/robust_throughput_estimator.h b/modules/congestion_controller/goog_cc/robust_throughput_estimator.h new file mode 100644 index 0000000..de48a9b --- /dev/null +++ b/modules/congestion_controller/goog_cc/robust_throughput_estimator.h @@ -0,0 +1,48 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_ROBUST_THROUGHPUT_ESTIMATOR_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_ROBUST_THROUGHPUT_ESTIMATOR_H_ + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/transport/network_types.h" +#include "api/transport/webrtc_key_value_config.h" +#include "api/units/data_rate.h" +#include "modules/congestion_controller/goog_cc/acknowledged_bitrate_estimator_interface.h" + +namespace webrtc { + +class RobustThroughputEstimator : public AcknowledgedBitrateEstimatorInterface { + public: + explicit RobustThroughputEstimator( + const RobustThroughputEstimatorSettings& settings); + ~RobustThroughputEstimator() override; + + void IncomingPacketFeedbackVector( + const std::vector& packet_feedback_vector) override; + + absl::optional bitrate() const override; + + absl::optional PeekRate() const override { return bitrate(); } + void SetAlr(bool /*in_alr*/) override {} + void SetAlrEndedTime(Timestamp /*alr_ended_time*/) override {} + + private: + const RobustThroughputEstimatorSettings settings_; + std::deque window_; +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_ROBUST_THROUGHPUT_ESTIMATOR_H_ diff --git a/modules/congestion_controller/goog_cc/robust_throughput_estimator_unittest.cc b/modules/congestion_controller/goog_cc/robust_throughput_estimator_unittest.cc new file mode 100644 index 0000000..d2e01d3 --- /dev/null +++ b/modules/congestion_controller/goog_cc/robust_throughput_estimator_unittest.cc @@ -0,0 +1,167 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/robust_throughput_estimator.h" + +#include "api/transport/field_trial_based_config.h" +#include "test/field_trial.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { +std::vector CreateFeedbackVector(size_t number_of_packets, + DataSize packet_size, + TimeDelta send_increment, + TimeDelta recv_increment, + Timestamp* send_clock, + Timestamp* recv_clock, + uint16_t* sequence_number) { + std::vector packet_feedback_vector(number_of_packets); + for (size_t i = 0; i < number_of_packets; i++) { + packet_feedback_vector[i].receive_time = *recv_clock; + packet_feedback_vector[i].sent_packet.send_time = *send_clock; + packet_feedback_vector[i].sent_packet.sequence_number = *sequence_number; + packet_feedback_vector[i].sent_packet.size = packet_size; + *send_clock += send_increment; + *recv_clock += recv_increment; + *sequence_number += 1; + } + return packet_feedback_vector; +} +} // anonymous namespace + +TEST(RobustThroughputEstimatorTest, SteadyRate) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Bwe-RobustThroughputEstimatorSettings/" + "enabled:true,assume_shared_link:false,reduce_bias:true,min_packets:10," + "window_duration:100ms/"); + FieldTrialBasedConfig field_trial_config; + RobustThroughputEstimatorSettings settings(&field_trial_config); + RobustThroughputEstimator throughput_estimator(settings); + DataSize packet_size(DataSize::Bytes(1000)); + Timestamp send_clock(Timestamp::Millis(100000)); + Timestamp recv_clock(Timestamp::Millis(10000)); + TimeDelta send_increment(TimeDelta::Millis(10)); + TimeDelta recv_increment(TimeDelta::Millis(10)); + uint16_t sequence_number = 100; + std::vector packet_feedback = + CreateFeedbackVector(9, packet_size, send_increment, recv_increment, + &send_clock, &recv_clock, &sequence_number); + throughput_estimator.IncomingPacketFeedbackVector(packet_feedback); + EXPECT_FALSE(throughput_estimator.bitrate().has_value()); + + packet_feedback = + CreateFeedbackVector(11, packet_size, send_increment, recv_increment, + &send_clock, &recv_clock, &sequence_number); + throughput_estimator.IncomingPacketFeedbackVector(packet_feedback); + auto throughput = throughput_estimator.bitrate(); + EXPECT_TRUE(throughput.has_value()); + EXPECT_NEAR(throughput.value().bytes_per_sec(), 100 * 1000.0, + 0.05 * 100 * 1000.0); // Allow 5% error +} + +TEST(RobustThroughputEstimatorTest, DelaySpike) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Bwe-RobustThroughputEstimatorSettings/" + "enabled:true,assume_shared_link:false,reduce_bias:true,min_packets:10," + "window_duration:100ms/"); + FieldTrialBasedConfig field_trial_config; + RobustThroughputEstimatorSettings settings(&field_trial_config); + RobustThroughputEstimator throughput_estimator(settings); + DataSize packet_size(DataSize::Bytes(1000)); + Timestamp send_clock(Timestamp::Millis(100000)); + Timestamp recv_clock(Timestamp::Millis(10000)); + TimeDelta send_increment(TimeDelta::Millis(10)); + TimeDelta recv_increment(TimeDelta::Millis(10)); + uint16_t sequence_number = 100; + std::vector packet_feedback = + CreateFeedbackVector(20, packet_size, send_increment, recv_increment, + &send_clock, &recv_clock, &sequence_number); + throughput_estimator.IncomingPacketFeedbackVector(packet_feedback); + auto throughput = throughput_estimator.bitrate(); + EXPECT_TRUE(throughput.has_value()); + EXPECT_NEAR(throughput.value().bytes_per_sec(), 100 * 1000.0, + 0.05 * 100 * 1000.0); // Allow 5% error + + // Delay spike + recv_clock += TimeDelta::Millis(40); + + // Faster delivery after the gap + recv_increment = TimeDelta::Millis(2); + packet_feedback = + CreateFeedbackVector(5, packet_size, send_increment, recv_increment, + &send_clock, &recv_clock, &sequence_number); + throughput_estimator.IncomingPacketFeedbackVector(packet_feedback); + throughput = throughput_estimator.bitrate(); + EXPECT_TRUE(throughput.has_value()); + EXPECT_NEAR(throughput.value().bytes_per_sec(), 100 * 1000.0, + 0.05 * 100 * 1000.0); // Allow 5% error + + // Delivery at normal rate. This will be capped by the send rate. + recv_increment = TimeDelta::Millis(10); + packet_feedback = + CreateFeedbackVector(5, packet_size, send_increment, recv_increment, + &send_clock, &recv_clock, &sequence_number); + throughput_estimator.IncomingPacketFeedbackVector(packet_feedback); + throughput = throughput_estimator.bitrate(); + EXPECT_TRUE(throughput.has_value()); + EXPECT_NEAR(throughput.value().bytes_per_sec(), 100 * 1000.0, + 0.05 * 100 * 1000.0); // Allow 5% error +} + +TEST(RobustThroughputEstimatorTest, CappedByReceiveRate) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Bwe-RobustThroughputEstimatorSettings/" + "enabled:true,assume_shared_link:false,reduce_bias:true,min_packets:10," + "window_duration:100ms/"); + FieldTrialBasedConfig field_trial_config; + RobustThroughputEstimatorSettings settings(&field_trial_config); + RobustThroughputEstimator throughput_estimator(settings); + DataSize packet_size(DataSize::Bytes(1000)); + Timestamp send_clock(Timestamp::Millis(100000)); + Timestamp recv_clock(Timestamp::Millis(10000)); + TimeDelta send_increment(TimeDelta::Millis(10)); + TimeDelta recv_increment(TimeDelta::Millis(40)); + uint16_t sequence_number = 100; + std::vector packet_feedback = + CreateFeedbackVector(20, packet_size, send_increment, recv_increment, + &send_clock, &recv_clock, &sequence_number); + throughput_estimator.IncomingPacketFeedbackVector(packet_feedback); + auto throughput = throughput_estimator.bitrate(); + EXPECT_TRUE(throughput.has_value()); + EXPECT_NEAR(throughput.value().bytes_per_sec(), 25 * 1000.0, + 0.05 * 25 * 1000.0); // Allow 5% error +} + +TEST(RobustThroughputEstimatorTest, CappedBySendRate) { + webrtc::test::ScopedFieldTrials field_trials( + "WebRTC-Bwe-RobustThroughputEstimatorSettings/" + "enabled:true,assume_shared_link:false,reduce_bias:true,min_packets:10," + "window_duration:100ms/"); + FieldTrialBasedConfig field_trial_config; + RobustThroughputEstimatorSettings settings(&field_trial_config); + RobustThroughputEstimator throughput_estimator(settings); + DataSize packet_size(DataSize::Bytes(1000)); + Timestamp send_clock(Timestamp::Millis(100000)); + Timestamp recv_clock(Timestamp::Millis(10000)); + TimeDelta send_increment(TimeDelta::Millis(20)); + TimeDelta recv_increment(TimeDelta::Millis(10)); + uint16_t sequence_number = 100; + std::vector packet_feedback = + CreateFeedbackVector(20, packet_size, send_increment, recv_increment, + &send_clock, &recv_clock, &sequence_number); + throughput_estimator.IncomingPacketFeedbackVector(packet_feedback); + auto throughput = throughput_estimator.bitrate(); + EXPECT_TRUE(throughput.has_value()); + EXPECT_NEAR(throughput.value().bytes_per_sec(), 50 * 1000.0, + 0.05 * 50 * 1000.0); // Allow 5% error +} + +} // namespace webrtc*/ diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc new file mode 100644 index 0000000..d2ae528 --- /dev/null +++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.cc @@ -0,0 +1,631 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h" + +#include +#include +#include +#include +#include + +#include "absl/strings/match.h" +#include "api/rtc_event_log/rtc_event.h" +#include "api/rtc_event_log/rtc_event_log.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/field_trial.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { +namespace { +constexpr TimeDelta kBweIncreaseInterval = TimeDelta::Millis(1000); +constexpr TimeDelta kBweDecreaseInterval = TimeDelta::Millis(300); +constexpr TimeDelta kStartPhase = TimeDelta::Millis(2000); +constexpr TimeDelta kBweConverganceTime = TimeDelta::Millis(20000); +constexpr int kLimitNumPackets = 20; +constexpr DataRate kDefaultMaxBitrate = DataRate::BitsPerSec(1000000000); +constexpr TimeDelta kLowBitrateLogPeriod = TimeDelta::Millis(10000); +constexpr TimeDelta kRtcEventLogPeriod = TimeDelta::Millis(5000); +// Expecting that RTCP feedback is sent uniformly within [0.5, 1.5]s intervals. +constexpr TimeDelta kMaxRtcpFeedbackInterval = TimeDelta::Millis(5000); + +constexpr float kDefaultLowLossThreshold = 0.02f; +constexpr float kDefaultHighLossThreshold = 0.1f; +constexpr DataRate kDefaultBitrateThreshold = DataRate::Zero(); + +struct UmaRampUpMetric { + const char* metric_name; + int bitrate_kbps; +}; + +const UmaRampUpMetric kUmaRampupMetrics[] = { + {"WebRTC.BWE.RampUpTimeTo500kbpsInMs", 500}, + {"WebRTC.BWE.RampUpTimeTo1000kbpsInMs", 1000}, + {"WebRTC.BWE.RampUpTimeTo2000kbpsInMs", 2000}}; +const size_t kNumUmaRampupMetrics = + sizeof(kUmaRampupMetrics) / sizeof(kUmaRampupMetrics[0]); + +const char kBweLosExperiment[] = "WebRTC-BweLossExperiment"; + +bool BweLossExperimentIsEnabled() { + std::string experiment_string = + webrtc::field_trial::FindFullName(kBweLosExperiment); + // The experiment is enabled iff the field trial string begins with "Enabled". + return absl::StartsWith(experiment_string, "Enabled"); +} + +bool ReadBweLossExperimentParameters(float* low_loss_threshold, + float* high_loss_threshold, + uint32_t* bitrate_threshold_kbps) { + RTC_DCHECK(low_loss_threshold); + RTC_DCHECK(high_loss_threshold); + RTC_DCHECK(bitrate_threshold_kbps); + std::string experiment_string = + webrtc::field_trial::FindFullName(kBweLosExperiment); + int parsed_values = + sscanf(experiment_string.c_str(), "Enabled-%f,%f,%u", low_loss_threshold, + high_loss_threshold, bitrate_threshold_kbps); + if (parsed_values == 3) { + RTC_CHECK_GT(*low_loss_threshold, 0.0f) + << "Loss threshold must be greater than 0."; + RTC_CHECK_LE(*low_loss_threshold, 1.0f) + << "Loss threshold must be less than or equal to 1."; + RTC_CHECK_GT(*high_loss_threshold, 0.0f) + << "Loss threshold must be greater than 0."; + RTC_CHECK_LE(*high_loss_threshold, 1.0f) + << "Loss threshold must be less than or equal to 1."; + RTC_CHECK_LE(*low_loss_threshold, *high_loss_threshold) + << "The low loss threshold must be less than or equal to the high loss " + "threshold."; + RTC_CHECK_GE(*bitrate_threshold_kbps, 0) + << "Bitrate threshold can't be negative."; + RTC_CHECK_LT(*bitrate_threshold_kbps, + std::numeric_limits::max() / 1000) + << "Bitrate must be smaller enough to avoid overflows."; + return true; + } + RTC_LOG(LS_WARNING) << "Failed to parse parameters for BweLossExperiment " + "experiment from field trial string. Using default."; + *low_loss_threshold = kDefaultLowLossThreshold; + *high_loss_threshold = kDefaultHighLossThreshold; + *bitrate_threshold_kbps = kDefaultBitrateThreshold.kbps(); + return false; +} +} // namespace + +LinkCapacityTracker::LinkCapacityTracker() + : tracking_rate("rate", TimeDelta::Seconds(10)) { + ParseFieldTrial({&tracking_rate}, + field_trial::FindFullName("WebRTC-Bwe-LinkCapacity")); +} + +LinkCapacityTracker::~LinkCapacityTracker() {} + +void LinkCapacityTracker::UpdateDelayBasedEstimate( + Timestamp at_time, + DataRate delay_based_bitrate) { + if (delay_based_bitrate < last_delay_based_estimate_) { + capacity_estimate_bps_ = + std::min(capacity_estimate_bps_, delay_based_bitrate.bps()); + last_link_capacity_update_ = at_time; + } + last_delay_based_estimate_ = delay_based_bitrate; +} + +void LinkCapacityTracker::OnStartingRate(DataRate start_rate) { + if (last_link_capacity_update_.IsInfinite()) + capacity_estimate_bps_ = start_rate.bps(); +} + +void LinkCapacityTracker::OnRateUpdate(absl::optional acknowledged, + DataRate target, + Timestamp at_time) { + if (!acknowledged) + return; + DataRate acknowledged_target = std::min(*acknowledged, target); + if (acknowledged_target.bps() > capacity_estimate_bps_) { + TimeDelta delta = at_time - last_link_capacity_update_; + double alpha = delta.IsFinite() ? exp(-(delta / tracking_rate.Get())) : 0; + capacity_estimate_bps_ = alpha * capacity_estimate_bps_ + + (1 - alpha) * acknowledged_target.bps(); + } + last_link_capacity_update_ = at_time; +} + +void LinkCapacityTracker::OnRttBackoff(DataRate backoff_rate, + Timestamp at_time) { + capacity_estimate_bps_ = + std::min(capacity_estimate_bps_, backoff_rate.bps()); + last_link_capacity_update_ = at_time; +} + +DataRate LinkCapacityTracker::estimate() const { + return DataRate::BitsPerSec(capacity_estimate_bps_); +} + +RttBasedBackoff::RttBasedBackoff() + : rtt_limit_("limit", TimeDelta::Seconds(3)), + drop_fraction_("fraction", 0.8), + drop_interval_("interval", TimeDelta::Seconds(1)), + bandwidth_floor_("floor", DataRate::KilobitsPerSec(5)), + // By initializing this to plus infinity, we make sure that we never + // trigger rtt backoff unless packet feedback is enabled. + last_propagation_rtt_update_(Timestamp::PlusInfinity()), + last_propagation_rtt_(TimeDelta::Zero()), + last_packet_sent_(Timestamp::MinusInfinity()) { + ParseFieldTrial( + {&rtt_limit_, &drop_fraction_, &drop_interval_, &bandwidth_floor_}, + field_trial::FindFullName("WebRTC-Bwe-MaxRttLimit")); +} + +void RttBasedBackoff::UpdatePropagationRtt(Timestamp at_time, + TimeDelta propagation_rtt) { + last_propagation_rtt_update_ = at_time; + last_propagation_rtt_ = propagation_rtt; +} + +TimeDelta RttBasedBackoff::CorrectedRtt(Timestamp at_time) const { + TimeDelta time_since_rtt = at_time - last_propagation_rtt_update_; + TimeDelta timeout_correction = time_since_rtt; + // Avoid timeout when no packets are being sent. + TimeDelta time_since_packet_sent = at_time - last_packet_sent_; + timeout_correction = + std::max(time_since_rtt - time_since_packet_sent, TimeDelta::Zero()); + return timeout_correction + last_propagation_rtt_; +} + +RttBasedBackoff::~RttBasedBackoff() = default; + +SendSideBandwidthEstimation::SendSideBandwidthEstimation(RtcEventLog* event_log) + : lost_packets_since_last_loss_update_(0), + expected_packets_since_last_loss_update_(0), + current_target_(DataRate::Zero()), + last_logged_target_(DataRate::Zero()), + min_bitrate_configured_( + DataRate::BitsPerSec(congestion_controller::GetMinBitrateBps())), + max_bitrate_configured_(kDefaultMaxBitrate), + last_low_bitrate_log_(Timestamp::MinusInfinity()), + has_decreased_since_last_fraction_loss_(false), + last_loss_feedback_(Timestamp::MinusInfinity()), + last_loss_packet_report_(Timestamp::MinusInfinity()), + last_fraction_loss_(0), + last_logged_fraction_loss_(0), + last_round_trip_time_(TimeDelta::Zero()), + receiver_limit_(DataRate::PlusInfinity()), + delay_based_limit_(DataRate::PlusInfinity()), + time_last_decrease_(Timestamp::MinusInfinity()), + first_report_time_(Timestamp::MinusInfinity()), + initially_lost_packets_(0), + bitrate_at_2_seconds_(DataRate::Zero()), + uma_update_state_(kNoUpdate), + uma_rtt_state_(kNoUpdate), + rampup_uma_stats_updated_(kNumUmaRampupMetrics, false), + event_log_(event_log), + last_rtc_event_log_(Timestamp::MinusInfinity()), + low_loss_threshold_(kDefaultLowLossThreshold), + high_loss_threshold_(kDefaultHighLossThreshold), + bitrate_threshold_(kDefaultBitrateThreshold) { + RTC_DCHECK(event_log); + if (BweLossExperimentIsEnabled()) { + uint32_t bitrate_threshold_kbps; + if (ReadBweLossExperimentParameters(&low_loss_threshold_, + &high_loss_threshold_, + &bitrate_threshold_kbps)) { + RTC_LOG(LS_INFO) << "Enabled BweLossExperiment with parameters " + << low_loss_threshold_ << ", " << high_loss_threshold_ + << ", " << bitrate_threshold_kbps; + bitrate_threshold_ = DataRate::KilobitsPerSec(bitrate_threshold_kbps); + } + } +} + +SendSideBandwidthEstimation::~SendSideBandwidthEstimation() {} + +void SendSideBandwidthEstimation::OnRouteChange() { + lost_packets_since_last_loss_update_ = 0; + expected_packets_since_last_loss_update_ = 0; + current_target_ = DataRate::Zero(); + min_bitrate_configured_ = + DataRate::BitsPerSec(congestion_controller::GetMinBitrateBps()); + max_bitrate_configured_ = kDefaultMaxBitrate; + last_low_bitrate_log_ = Timestamp::MinusInfinity(); + has_decreased_since_last_fraction_loss_ = false; + last_loss_feedback_ = Timestamp::MinusInfinity(); + last_loss_packet_report_ = Timestamp::MinusInfinity(); + last_fraction_loss_ = 0; + last_logged_fraction_loss_ = 0; + last_round_trip_time_ = TimeDelta::Zero(); + receiver_limit_ = DataRate::PlusInfinity(); + delay_based_limit_ = DataRate::PlusInfinity(); + time_last_decrease_ = Timestamp::MinusInfinity(); + first_report_time_ = Timestamp::MinusInfinity(); + initially_lost_packets_ = 0; + bitrate_at_2_seconds_ = DataRate::Zero(); + uma_update_state_ = kNoUpdate; + uma_rtt_state_ = kNoUpdate; + last_rtc_event_log_ = Timestamp::MinusInfinity(); +} + +void SendSideBandwidthEstimation::SetBitrates( + absl::optional send_bitrate, + DataRate min_bitrate, + DataRate max_bitrate, + Timestamp at_time) { + SetMinMaxBitrate(min_bitrate, max_bitrate); + if (send_bitrate) { + link_capacity_.OnStartingRate(*send_bitrate); + SetSendBitrate(*send_bitrate, at_time); + } +} + +void SendSideBandwidthEstimation::SetSendBitrate(DataRate bitrate, + Timestamp at_time) { + RTC_DCHECK_GT(bitrate, DataRate::Zero()); + // Reset to avoid being capped by the estimate. + delay_based_limit_ = DataRate::PlusInfinity(); + if (loss_based_bandwidth_estimation_.Enabled()) { + loss_based_bandwidth_estimation_.MaybeReset(bitrate); + } + UpdateTargetBitrate(bitrate, at_time); + // Clear last sent bitrate history so the new value can be used directly + // and not capped. + min_bitrate_history_.clear(); +} + +void SendSideBandwidthEstimation::SetMinMaxBitrate(DataRate min_bitrate, + DataRate max_bitrate) { + min_bitrate_configured_ = + std::max(min_bitrate, congestion_controller::GetMinBitrate()); + if (max_bitrate > DataRate::Zero() && max_bitrate.IsFinite()) { + max_bitrate_configured_ = std::max(min_bitrate_configured_, max_bitrate); + } else { + max_bitrate_configured_ = kDefaultMaxBitrate; + } +} + +int SendSideBandwidthEstimation::GetMinBitrate() const { + return min_bitrate_configured_.bps(); +} + +DataRate SendSideBandwidthEstimation::target_rate() const { + return std::max(min_bitrate_configured_, current_target_); +} + +DataRate SendSideBandwidthEstimation::GetEstimatedLinkCapacity() const { + return link_capacity_.estimate(); +} + +void SendSideBandwidthEstimation::UpdateReceiverEstimate(Timestamp at_time, + DataRate bandwidth) { + // TODO(srte): Ensure caller passes PlusInfinity, not zero, to represent no + // limitation. + receiver_limit_ = bandwidth.IsZero() ? DataRate::PlusInfinity() : bandwidth; + ApplyTargetLimits(at_time); +} + +void SendSideBandwidthEstimation::UpdateDelayBasedEstimate(Timestamp at_time, + DataRate bitrate) { + link_capacity_.UpdateDelayBasedEstimate(at_time, bitrate); + // TODO(srte): Ensure caller passes PlusInfinity, not zero, to represent no + // limitation. + delay_based_limit_ = bitrate.IsZero() ? DataRate::PlusInfinity() : bitrate; + ApplyTargetLimits(at_time); +} + +void SendSideBandwidthEstimation::SetAcknowledgedRate( + absl::optional acknowledged_rate, + Timestamp at_time) { + acknowledged_rate_ = acknowledged_rate; + if (acknowledged_rate && loss_based_bandwidth_estimation_.Enabled()) { + loss_based_bandwidth_estimation_.UpdateAcknowledgedBitrate( + *acknowledged_rate, at_time); + } +} + +void SendSideBandwidthEstimation::IncomingPacketFeedbackVector( + const TransportPacketsFeedback& report) { + if (loss_based_bandwidth_estimation_.Enabled()) { + loss_based_bandwidth_estimation_.UpdateLossStatistics( + report.packet_feedbacks, report.feedback_time); + } +} + +void SendSideBandwidthEstimation::UpdatePacketsLost(int packets_lost, + int number_of_packets, + Timestamp at_time) { + last_loss_feedback_ = at_time; + if (first_report_time_.IsInfinite()) + first_report_time_ = at_time; + + // Check sequence number diff and weight loss report + if (number_of_packets > 0) { + // Accumulate reports. + lost_packets_since_last_loss_update_ += packets_lost; + expected_packets_since_last_loss_update_ += number_of_packets; + + // Don't generate a loss rate until it can be based on enough packets. + if (expected_packets_since_last_loss_update_ < kLimitNumPackets) + return; + + has_decreased_since_last_fraction_loss_ = false; + int64_t lost_q8 = lost_packets_since_last_loss_update_ << 8; + int64_t expected = expected_packets_since_last_loss_update_; + last_fraction_loss_ = std::min(lost_q8 / expected, 255); + + // Reset accumulators. + + lost_packets_since_last_loss_update_ = 0; + expected_packets_since_last_loss_update_ = 0; + last_loss_packet_report_ = at_time; + UpdateEstimate(at_time); + } + UpdateUmaStatsPacketsLost(at_time, packets_lost); +} + +void SendSideBandwidthEstimation::UpdateUmaStatsPacketsLost(Timestamp at_time, + int packets_lost) { + DataRate bitrate_kbps = + DataRate::KilobitsPerSec((current_target_.bps() + 500) / 1000); + for (size_t i = 0; i < kNumUmaRampupMetrics; ++i) { + if (!rampup_uma_stats_updated_[i] && + bitrate_kbps.kbps() >= kUmaRampupMetrics[i].bitrate_kbps) { + RTC_HISTOGRAMS_COUNTS_100000(i, kUmaRampupMetrics[i].metric_name, + (at_time - first_report_time_).ms()); + rampup_uma_stats_updated_[i] = true; + } + } + if (IsInStartPhase(at_time)) { + initially_lost_packets_ += packets_lost; + } else if (uma_update_state_ == kNoUpdate) { + uma_update_state_ = kFirstDone; + bitrate_at_2_seconds_ = bitrate_kbps; + RTC_HISTOGRAM_COUNTS("WebRTC.BWE.InitiallyLostPackets", + initially_lost_packets_, 0, 100, 50); + RTC_HISTOGRAM_COUNTS("WebRTC.BWE.InitialBandwidthEstimate", + bitrate_at_2_seconds_.kbps(), 0, 2000, 50); + } else if (uma_update_state_ == kFirstDone && + at_time - first_report_time_ >= kBweConverganceTime) { + uma_update_state_ = kDone; + int bitrate_diff_kbps = std::max( + bitrate_at_2_seconds_.kbps() - bitrate_kbps.kbps(), 0); + RTC_HISTOGRAM_COUNTS("WebRTC.BWE.InitialVsConvergedDiff", bitrate_diff_kbps, + 0, 2000, 50); + } +} + +void SendSideBandwidthEstimation::UpdateRtt(TimeDelta rtt, Timestamp at_time) { + // Update RTT if we were able to compute an RTT based on this RTCP. + // FlexFEC doesn't send RTCP SR, which means we won't be able to compute RTT. + if (rtt > TimeDelta::Zero()) + last_round_trip_time_ = rtt; + + if (!IsInStartPhase(at_time) && uma_rtt_state_ == kNoUpdate) { + uma_rtt_state_ = kDone; + RTC_HISTOGRAM_COUNTS("WebRTC.BWE.InitialRtt", rtt.ms(), 0, 2000, 50); + } +} + +void SendSideBandwidthEstimation::UpdateEstimate(Timestamp at_time) { + if (rtt_backoff_.CorrectedRtt(at_time) > rtt_backoff_.rtt_limit_) { + if (at_time - time_last_decrease_ >= rtt_backoff_.drop_interval_ && + current_target_ > rtt_backoff_.bandwidth_floor_) { + time_last_decrease_ = at_time; + DataRate new_bitrate = + std::max(current_target_ * rtt_backoff_.drop_fraction_, + rtt_backoff_.bandwidth_floor_.Get()); + link_capacity_.OnRttBackoff(new_bitrate, at_time); + UpdateTargetBitrate(new_bitrate, at_time); + return; + } + // TODO(srte): This is likely redundant in most cases. + ApplyTargetLimits(at_time); + return; + } + + // We trust the REMB and/or delay-based estimate during the first 2 seconds if + // we haven't had any packet loss reported, to allow startup bitrate probing. + if (last_fraction_loss_ == 0 && IsInStartPhase(at_time)) { + DataRate new_bitrate = current_target_; + // TODO(srte): We should not allow the new_bitrate to be larger than the + // receiver limit here. + if (receiver_limit_.IsFinite()) + new_bitrate = std::max(receiver_limit_, new_bitrate); + if (delay_based_limit_.IsFinite()) + new_bitrate = std::max(delay_based_limit_, new_bitrate); + if (loss_based_bandwidth_estimation_.Enabled()) { + loss_based_bandwidth_estimation_.SetInitialBitrate(new_bitrate); + } + + if (new_bitrate != current_target_) { + min_bitrate_history_.clear(); + if (loss_based_bandwidth_estimation_.Enabled()) { + min_bitrate_history_.push_back(std::make_pair(at_time, new_bitrate)); + } else { + min_bitrate_history_.push_back( + std::make_pair(at_time, current_target_)); + } + UpdateTargetBitrate(new_bitrate, at_time); + return; + } + } + UpdateMinHistory(at_time); + if (last_loss_packet_report_.IsInfinite()) { + // No feedback received. + // TODO(srte): This is likely redundant in most cases. + ApplyTargetLimits(at_time); + return; + } + + if (loss_based_bandwidth_estimation_.Enabled()) { + loss_based_bandwidth_estimation_.Update( + at_time, min_bitrate_history_.front().second, last_round_trip_time_); + DataRate new_bitrate = MaybeRampupOrBackoff(current_target_, at_time); + UpdateTargetBitrate(new_bitrate, at_time); + return; + } + + TimeDelta time_since_loss_packet_report = at_time - last_loss_packet_report_; + if (time_since_loss_packet_report < 1.2 * kMaxRtcpFeedbackInterval) { + // We only care about loss above a given bitrate threshold. + float loss = last_fraction_loss_ / 256.0f; + // We only make decisions based on loss when the bitrate is above a + // threshold. This is a crude way of handling loss which is uncorrelated + // to congestion. + if (current_target_ < bitrate_threshold_ || loss <= low_loss_threshold_) { + // Loss < 2%: Increase rate by 8% of the min bitrate in the last + // kBweIncreaseInterval. + // Note that by remembering the bitrate over the last second one can + // rampup up one second faster than if only allowed to start ramping + // at 8% per second rate now. E.g.: + // If sending a constant 100kbps it can rampup immediately to 108kbps + // whenever a receiver report is received with lower packet loss. + // If instead one would do: current_bitrate_ *= 1.08^(delta time), + // it would take over one second since the lower packet loss to achieve + // 108kbps. + DataRate new_bitrate = DataRate::BitsPerSec( + min_bitrate_history_.front().second.bps() * 1.08 + 0.5); + + // Add 1 kbps extra, just to make sure that we do not get stuck + // (gives a little extra increase at low rates, negligible at higher + // rates). + new_bitrate += DataRate::BitsPerSec(1000); + UpdateTargetBitrate(new_bitrate, at_time); + return; + } else if (current_target_ > bitrate_threshold_) { + if (loss <= high_loss_threshold_) { + // Loss between 2% - 10%: Do nothing. + } else { + // Loss > 10%: Limit the rate decreases to once a kBweDecreaseInterval + // + rtt. + if (!has_decreased_since_last_fraction_loss_ && + (at_time - time_last_decrease_) >= + (kBweDecreaseInterval + last_round_trip_time_)) { + time_last_decrease_ = at_time; + + // Reduce rate: + // newRate = rate * (1 - 0.5*lossRate); + // where packetLoss = 256*lossRate; + DataRate new_bitrate = DataRate::BitsPerSec( + (current_target_.bps() * + static_cast(512 - last_fraction_loss_)) / + 512.0); + has_decreased_since_last_fraction_loss_ = true; + UpdateTargetBitrate(new_bitrate, at_time); + return; + } + } + } + } + // TODO(srte): This is likely redundant in most cases. + ApplyTargetLimits(at_time); +} + +void SendSideBandwidthEstimation::UpdatePropagationRtt( + Timestamp at_time, + TimeDelta propagation_rtt) { + rtt_backoff_.UpdatePropagationRtt(at_time, propagation_rtt); +} + +void SendSideBandwidthEstimation::OnSentPacket(const SentPacket& sent_packet) { + // Only feedback-triggering packets will be reported here. + rtt_backoff_.last_packet_sent_ = sent_packet.send_time; +} + +bool SendSideBandwidthEstimation::IsInStartPhase(Timestamp at_time) const { + return first_report_time_.IsInfinite() || + at_time - first_report_time_ < kStartPhase; +} + +void SendSideBandwidthEstimation::UpdateMinHistory(Timestamp at_time) { + // Remove old data points from history. + // Since history precision is in ms, add one so it is able to increase + // bitrate if it is off by as little as 0.5ms. + while (!min_bitrate_history_.empty() && + at_time - min_bitrate_history_.front().first + TimeDelta::Millis(1) > + kBweIncreaseInterval) { + min_bitrate_history_.pop_front(); + } + + // Typical minimum sliding-window algorithm: Pop values higher than current + // bitrate before pushing it. + while (!min_bitrate_history_.empty() && + current_target_ <= min_bitrate_history_.back().second) { + min_bitrate_history_.pop_back(); + } + + min_bitrate_history_.push_back(std::make_pair(at_time, current_target_)); +} + +DataRate SendSideBandwidthEstimation::MaybeRampupOrBackoff(DataRate new_bitrate, + Timestamp at_time) { + // TODO(crodbro): reuse this code in UpdateEstimate instead of current + // inlining of very similar functionality. + const TimeDelta time_since_loss_packet_report = + at_time - last_loss_packet_report_; + if (time_since_loss_packet_report < 1.2 * kMaxRtcpFeedbackInterval) { + new_bitrate = min_bitrate_history_.front().second * 1.08; + new_bitrate += DataRate::BitsPerSec(1000); + } + return new_bitrate; +} + +DataRate SendSideBandwidthEstimation::GetUpperLimit() const { + DataRate upper_limit = std::min(delay_based_limit_, receiver_limit_); + upper_limit = std::min(upper_limit, max_bitrate_configured_); + if (loss_based_bandwidth_estimation_.Enabled() && + loss_based_bandwidth_estimation_.GetEstimate() > DataRate::Zero()) { + upper_limit = + std::min(upper_limit, loss_based_bandwidth_estimation_.GetEstimate()); + } + return upper_limit; +} + +void SendSideBandwidthEstimation::MaybeLogLowBitrateWarning(DataRate bitrate, + Timestamp at_time) { + if (at_time - last_low_bitrate_log_ > kLowBitrateLogPeriod) { + RTC_LOG(LS_WARNING) << "Estimated available bandwidth " << ToString(bitrate) + << " is below configured min bitrate " + << ToString(min_bitrate_configured_) << "."; + last_low_bitrate_log_ = at_time; + } +} + +void SendSideBandwidthEstimation::MaybeLogLossBasedEvent(Timestamp at_time) { + if (current_target_ != last_logged_target_ || + last_fraction_loss_ != last_logged_fraction_loss_ || + at_time - last_rtc_event_log_ > kRtcEventLogPeriod) { + event_log_->Log(std::make_unique( + current_target_.bps(), last_fraction_loss_, + expected_packets_since_last_loss_update_)); + last_logged_fraction_loss_ = last_fraction_loss_; + last_logged_target_ = current_target_; + last_rtc_event_log_ = at_time; + } +} + +void SendSideBandwidthEstimation::UpdateTargetBitrate(DataRate new_bitrate, + Timestamp at_time) { + new_bitrate = std::min(new_bitrate, GetUpperLimit()); + if (new_bitrate < min_bitrate_configured_) { + MaybeLogLowBitrateWarning(new_bitrate, at_time); + new_bitrate = min_bitrate_configured_; + } + current_target_ = new_bitrate; + MaybeLogLossBasedEvent(at_time); + link_capacity_.OnRateUpdate(acknowledged_rate_, current_target_, at_time); +} + +void SendSideBandwidthEstimation::ApplyTargetLimits(Timestamp at_time) { + UpdateTargetBitrate(current_target_, at_time); +} +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h new file mode 100644 index 0000000..241ec8c --- /dev/null +++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h @@ -0,0 +1,193 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + * + * FEC and NACK added bitrate is handled outside class + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_SEND_SIDE_BANDWIDTH_ESTIMATION_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_SEND_SIDE_BANDWIDTH_ESTIMATION_H_ + +#include + +#include +#include +#include + +#include "absl/types/optional.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/congestion_controller/goog_cc/loss_based_bandwidth_estimation.h" +#include "rtc_base/experiments/field_trial_parser.h" + +namespace webrtc { + +class RtcEventLog; + +class LinkCapacityTracker { + public: + LinkCapacityTracker(); + ~LinkCapacityTracker(); + // Call when a new delay-based estimate is available. + void UpdateDelayBasedEstimate(Timestamp at_time, + DataRate delay_based_bitrate); + void OnStartingRate(DataRate start_rate); + void OnRateUpdate(absl::optional acknowledged, + DataRate target, + Timestamp at_time); + void OnRttBackoff(DataRate backoff_rate, Timestamp at_time); + DataRate estimate() const; + + private: + FieldTrialParameter tracking_rate; + double capacity_estimate_bps_ = 0; + Timestamp last_link_capacity_update_ = Timestamp::MinusInfinity(); + DataRate last_delay_based_estimate_ = DataRate::PlusInfinity(); +}; + +class RttBasedBackoff { + public: + RttBasedBackoff(); + ~RttBasedBackoff(); + void UpdatePropagationRtt(Timestamp at_time, TimeDelta propagation_rtt); + TimeDelta CorrectedRtt(Timestamp at_time) const; + + FieldTrialParameter rtt_limit_; + FieldTrialParameter drop_fraction_; + FieldTrialParameter drop_interval_; + FieldTrialParameter bandwidth_floor_; + + public: + Timestamp last_propagation_rtt_update_; + TimeDelta last_propagation_rtt_; + Timestamp last_packet_sent_; +}; + +class SendSideBandwidthEstimation { + public: + SendSideBandwidthEstimation() = delete; + explicit SendSideBandwidthEstimation(RtcEventLog* event_log); + ~SendSideBandwidthEstimation(); + + void OnRouteChange(); + + DataRate target_rate() const; + uint8_t fraction_loss() const { return last_fraction_loss_; } + TimeDelta round_trip_time() const { return last_round_trip_time_; } + + DataRate GetEstimatedLinkCapacity() const; + // Call periodically to update estimate. + void UpdateEstimate(Timestamp at_time); + void OnSentPacket(const SentPacket& sent_packet); + void UpdatePropagationRtt(Timestamp at_time, TimeDelta propagation_rtt); + + // Call when we receive a RTCP message with TMMBR or REMB. + void UpdateReceiverEstimate(Timestamp at_time, DataRate bandwidth); + + // Call when a new delay-based estimate is available. + void UpdateDelayBasedEstimate(Timestamp at_time, DataRate bitrate); + + // Call when we receive a RTCP message with a ReceiveBlock. + void UpdatePacketsLost(int packets_lost, + int number_of_packets, + Timestamp at_time); + + // Call when we receive a RTCP message with a ReceiveBlock. + void UpdateRtt(TimeDelta rtt, Timestamp at_time); + + void SetBitrates(absl::optional send_bitrate, + DataRate min_bitrate, + DataRate max_bitrate, + Timestamp at_time); + void SetSendBitrate(DataRate bitrate, Timestamp at_time); + void SetMinMaxBitrate(DataRate min_bitrate, DataRate max_bitrate); + int GetMinBitrate() const; + void SetAcknowledgedRate(absl::optional acknowledged_rate, + Timestamp at_time); + void IncomingPacketFeedbackVector(const TransportPacketsFeedback& report); + + private: + friend class GoogCcStatePrinter; + + enum UmaState { kNoUpdate, kFirstDone, kDone }; + + bool IsInStartPhase(Timestamp at_time) const; + + void UpdateUmaStatsPacketsLost(Timestamp at_time, int packets_lost); + + // Updates history of min bitrates. + // After this method returns min_bitrate_history_.front().second contains the + // min bitrate used during last kBweIncreaseIntervalMs. + void UpdateMinHistory(Timestamp at_time); + + DataRate MaybeRampupOrBackoff(DataRate new_bitrate, Timestamp at_time); + + // Gets the upper limit for the target bitrate. This is the minimum of the + // delay based limit, the receiver limit and the loss based controller limit. + DataRate GetUpperLimit() const; + // Prints a warning if |bitrate| if sufficiently long time has past since last + // warning. + void MaybeLogLowBitrateWarning(DataRate bitrate, Timestamp at_time); + // Stores an update to the event log if the loss rate has changed, the target + // has changed, or sufficient time has passed since last stored event. + void MaybeLogLossBasedEvent(Timestamp at_time); + + // Cap |bitrate| to [min_bitrate_configured_, max_bitrate_configured_] and + // set |current_bitrate_| to the capped value and updates the event log. + void UpdateTargetBitrate(DataRate bitrate, Timestamp at_time); + // Applies lower and upper bounds to the current target rate. + // TODO(srte): This seems to be called even when limits haven't changed, that + // should be cleaned up. + void ApplyTargetLimits(Timestamp at_time); + + RttBasedBackoff rtt_backoff_; + LinkCapacityTracker link_capacity_; + + std::deque > min_bitrate_history_; + + // incoming filters + int lost_packets_since_last_loss_update_; + int expected_packets_since_last_loss_update_; + + absl::optional acknowledged_rate_; + DataRate current_target_; + DataRate last_logged_target_; + DataRate min_bitrate_configured_; + DataRate max_bitrate_configured_; + Timestamp last_low_bitrate_log_; + + bool has_decreased_since_last_fraction_loss_; + Timestamp last_loss_feedback_; + Timestamp last_loss_packet_report_; + uint8_t last_fraction_loss_; + uint8_t last_logged_fraction_loss_; + TimeDelta last_round_trip_time_; + + // The max bitrate as set by the receiver in the call. This is typically + // signalled using the REMB RTCP message and is used when we don't have any + // send side delay based estimate. + DataRate receiver_limit_; + DataRate delay_based_limit_; + Timestamp time_last_decrease_; + Timestamp first_report_time_; + int initially_lost_packets_; + DataRate bitrate_at_2_seconds_; + UmaState uma_update_state_; + UmaState uma_rtt_state_; + std::vector rampup_uma_stats_updated_; + RtcEventLog* const event_log_; + Timestamp last_rtc_event_log_; + float low_loss_threshold_; + float high_loss_threshold_; + DataRate bitrate_threshold_; + LossBasedBandwidthEstimation loss_based_bandwidth_estimation_; +}; +} // namespace webrtc +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_SEND_SIDE_BANDWIDTH_ESTIMATION_H_ diff --git a/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation_unittest.cc b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation_unittest.cc new file mode 100644 index 0000000..06e3925 --- /dev/null +++ b/modules/congestion_controller/goog_cc/send_side_bandwidth_estimation_unittest.cc @@ -0,0 +1,166 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/send_side_bandwidth_estimation.h" + +#include "api/rtc_event_log/rtc_event.h" +#include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" +#include "logging/rtc_event_log/mock/mock_rtc_event_log.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { + +MATCHER(LossBasedBweUpdateWithBitrateOnly, "") { + if (arg->GetType() != RtcEvent::Type::BweUpdateLossBased) { + return false; + } + auto bwe_event = static_cast(arg); + return bwe_event->bitrate_bps() > 0 && bwe_event->fraction_loss() == 0; +} + +MATCHER(LossBasedBweUpdateWithBitrateAndLossFraction, "") { + if (arg->GetType() != RtcEvent::Type::BweUpdateLossBased) { + return false; + } + auto bwe_event = static_cast(arg); + return bwe_event->bitrate_bps() > 0 && bwe_event->fraction_loss() > 0; +} + +void TestProbing(bool use_delay_based) { + ::testing::NiceMock event_log; + SendSideBandwidthEstimation bwe(&event_log); + int64_t now_ms = 0; + bwe.SetMinMaxBitrate(DataRate::BitsPerSec(100000), + DataRate::BitsPerSec(1500000)); + bwe.SetSendBitrate(DataRate::BitsPerSec(200000), Timestamp::Millis(now_ms)); + + const int kRembBps = 1000000; + const int kSecondRembBps = kRembBps + 500000; + + bwe.UpdatePacketsLost(/*packets_lost=*/0, /*number_of_packets=*/1, + Timestamp::Millis(now_ms)); + bwe.UpdateRtt(TimeDelta::Millis(50), Timestamp::Millis(now_ms)); + + // Initial REMB applies immediately. + if (use_delay_based) { + bwe.UpdateDelayBasedEstimate(Timestamp::Millis(now_ms), + DataRate::BitsPerSec(kRembBps)); + } else { + bwe.UpdateReceiverEstimate(Timestamp::Millis(now_ms), + DataRate::BitsPerSec(kRembBps)); + } + bwe.UpdateEstimate(Timestamp::Millis(now_ms)); + EXPECT_EQ(kRembBps, bwe.target_rate().bps()); + + // Second REMB doesn't apply immediately. + now_ms += 2001; + if (use_delay_based) { + bwe.UpdateDelayBasedEstimate(Timestamp::Millis(now_ms), + DataRate::BitsPerSec(kSecondRembBps)); + } else { + bwe.UpdateReceiverEstimate(Timestamp::Millis(now_ms), + DataRate::BitsPerSec(kSecondRembBps)); + } + bwe.UpdateEstimate(Timestamp::Millis(now_ms)); + EXPECT_EQ(kRembBps, bwe.target_rate().bps()); +} + +TEST(SendSideBweTest, InitialRembWithProbing) { + TestProbing(false); +} + +TEST(SendSideBweTest, InitialDelayBasedBweWithProbing) { + TestProbing(true); +} + +TEST(SendSideBweTest, DoesntReapplyBitrateDecreaseWithoutFollowingRemb) { + MockRtcEventLog event_log; + EXPECT_CALL(event_log, LogProxy(LossBasedBweUpdateWithBitrateOnly())) + .Times(1); + EXPECT_CALL(event_log, + LogProxy(LossBasedBweUpdateWithBitrateAndLossFraction())) + .Times(1); + SendSideBandwidthEstimation bwe(&event_log); + static const int kMinBitrateBps = 100000; + static const int kInitialBitrateBps = 1000000; + int64_t now_ms = 1000; + bwe.SetMinMaxBitrate(DataRate::BitsPerSec(kMinBitrateBps), + DataRate::BitsPerSec(1500000)); + bwe.SetSendBitrate(DataRate::BitsPerSec(kInitialBitrateBps), + Timestamp::Millis(now_ms)); + + static const uint8_t kFractionLoss = 128; + static const int64_t kRttMs = 50; + now_ms += 10000; + + EXPECT_EQ(kInitialBitrateBps, bwe.target_rate().bps()); + EXPECT_EQ(0, bwe.fraction_loss()); + EXPECT_EQ(0, bwe.round_trip_time().ms()); + + // Signal heavy loss to go down in bitrate. + bwe.UpdatePacketsLost(/*packets_lost=*/50, /*number_of_packets=*/100, + Timestamp::Millis(now_ms)); + bwe.UpdateRtt(TimeDelta::Millis(kRttMs), Timestamp::Millis(now_ms)); + + // Trigger an update 2 seconds later to not be rate limited. + now_ms += 1000; + bwe.UpdateEstimate(Timestamp::Millis(now_ms)); + EXPECT_LT(bwe.target_rate().bps(), kInitialBitrateBps); + // Verify that the obtained bitrate isn't hitting the min bitrate, or this + // test doesn't make sense. If this ever happens, update the thresholds or + // loss rates so that it doesn't hit min bitrate after one bitrate update. + EXPECT_GT(bwe.target_rate().bps(), kMinBitrateBps); + EXPECT_EQ(kFractionLoss, bwe.fraction_loss()); + EXPECT_EQ(kRttMs, bwe.round_trip_time().ms()); + + // Triggering an update shouldn't apply further downgrade nor upgrade since + // there's no intermediate receiver block received indicating whether this is + // currently good or not. + int last_bitrate_bps = bwe.target_rate().bps(); + // Trigger an update 2 seconds later to not be rate limited (but it still + // shouldn't update). + now_ms += 1000; + bwe.UpdateEstimate(Timestamp::Millis(now_ms)); + + EXPECT_EQ(last_bitrate_bps, bwe.target_rate().bps()); + // The old loss rate should still be applied though. + EXPECT_EQ(kFractionLoss, bwe.fraction_loss()); + EXPECT_EQ(kRttMs, bwe.round_trip_time().ms()); +} + +TEST(SendSideBweTest, SettingSendBitrateOverridesDelayBasedEstimate) { + ::testing::NiceMock event_log; + SendSideBandwidthEstimation bwe(&event_log); + static const int kMinBitrateBps = 10000; + static const int kMaxBitrateBps = 10000000; + static const int kInitialBitrateBps = 300000; + static const int kDelayBasedBitrateBps = 350000; + static const int kForcedHighBitrate = 2500000; + + int64_t now_ms = 0; + + bwe.SetMinMaxBitrate(DataRate::BitsPerSec(kMinBitrateBps), + DataRate::BitsPerSec(kMaxBitrateBps)); + bwe.SetSendBitrate(DataRate::BitsPerSec(kInitialBitrateBps), + Timestamp::Millis(now_ms)); + + bwe.UpdateDelayBasedEstimate(Timestamp::Millis(now_ms), + DataRate::BitsPerSec(kDelayBasedBitrateBps)); + bwe.UpdateEstimate(Timestamp::Millis(now_ms)); + EXPECT_GE(bwe.target_rate().bps(), kInitialBitrateBps); + EXPECT_LE(bwe.target_rate().bps(), kDelayBasedBitrateBps); + + bwe.SetSendBitrate(DataRate::BitsPerSec(kForcedHighBitrate), + Timestamp::Millis(now_ms)); + EXPECT_EQ(bwe.target_rate().bps(), kForcedHighBitrate); +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc b/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc new file mode 100644 index 0000000..52baab0 --- /dev/null +++ b/modules/congestion_controller/goog_cc/test/goog_cc_printer.cc @@ -0,0 +1,199 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/congestion_controller/goog_cc/test/goog_cc_printer.h" + +#include + +#include + +#include "absl/types/optional.h" +#include "modules/congestion_controller/goog_cc/alr_detector.h" +#include "modules/congestion_controller/goog_cc/delay_based_bwe.h" +#include "modules/congestion_controller/goog_cc/trendline_estimator.h" +#include "modules/remote_bitrate_estimator/aimd_rate_control.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace { +void WriteTypedValue(RtcEventLogOutput* out, int value) { + LogWriteFormat(out, "%i", value); +} +void WriteTypedValue(RtcEventLogOutput* out, double value) { + LogWriteFormat(out, "%.6f", value); +} +void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { + LogWriteFormat(out, "%.0f", value ? value->bytes_per_sec() : NAN); +} +void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { + LogWriteFormat(out, "%.0f", value ? value->bytes() : NAN); +} +void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { + LogWriteFormat(out, "%.3f", value ? value->seconds() : NAN); +} +void WriteTypedValue(RtcEventLogOutput* out, absl::optional value) { + LogWriteFormat(out, "%.3f", value ? value->seconds() : NAN); +} + +template +class TypedFieldLogger : public FieldLogger { + public: + TypedFieldLogger(std::string name, F&& getter) + : name_(std::move(name)), getter_(std::forward(getter)) {} + const std::string& name() const override { return name_; } + void WriteValue(RtcEventLogOutput* out) override { + WriteTypedValue(out, getter_()); + } + + private: + std::string name_; + F getter_; +}; + +template +FieldLogger* Log(std::string name, F&& getter) { + return new TypedFieldLogger(std::move(name), std::forward(getter)); +} + +} // namespace +GoogCcStatePrinter::GoogCcStatePrinter() { + for (auto* logger : CreateLoggers()) { + loggers_.emplace_back(logger); + } +} + +std::deque GoogCcStatePrinter::CreateLoggers() { + auto stable_estimate = [this] { + return DataRate::KilobitsPerSec( + controller_->delay_based_bwe_->rate_control_.link_capacity_ + .estimate_kbps_.value_or(-INFINITY)); + }; + auto rate_control_state = [this] { + return static_cast( + controller_->delay_based_bwe_->rate_control_.rate_control_state_); + }; + auto trend = [this] { + return reinterpret_cast( + controller_->delay_based_bwe_->active_delay_detector_); + }; + auto acknowledged_rate = [this] { + return controller_->acknowledged_bitrate_estimator_->bitrate(); + }; + auto loss_cont = [&] { + return &controller_->bandwidth_estimation_ + ->loss_based_bandwidth_estimation_; + }; + std::deque loggers({ + Log("time", [=] { return target_.at_time; }), + Log("rtt", [=] { return target_.network_estimate.round_trip_time; }), + Log("target", [=] { return target_.target_rate; }), + Log("stable_target", [=] { return target_.stable_target_rate; }), + Log("pacing", [=] { return pacing_.data_rate(); }), + Log("padding", [=] { return pacing_.pad_rate(); }), + Log("window", [=] { return congestion_window_; }), + Log("rate_control_state", [=] { return rate_control_state(); }), + Log("stable_estimate", [=] { return stable_estimate(); }), + Log("trendline", [=] { return trend()->prev_trend_; }), + Log("trendline_modified_offset", + [=] { return trend()->prev_modified_trend_; }), + Log("trendline_offset_threshold", [=] { return trend()->threshold_; }), + Log("acknowledged_rate", [=] { return acknowledged_rate(); }), + Log("est_capacity", [=] { return est_.link_capacity; }), + Log("est_capacity_dev", [=] { return est_.link_capacity_std_dev; }), + Log("est_capacity_min", [=] { return est_.link_capacity_min; }), + Log("est_cross_traffic", [=] { return est_.cross_traffic_ratio; }), + Log("est_cross_delay", [=] { return est_.cross_delay_rate; }), + Log("est_spike_delay", [=] { return est_.spike_delay_rate; }), + Log("est_pre_buffer", [=] { return est_.pre_link_buffer_delay; }), + Log("est_post_buffer", [=] { return est_.post_link_buffer_delay; }), + Log("est_propagation", [=] { return est_.propagation_delay; }), + Log("loss_ratio", [=] { return loss_cont()->last_loss_ratio_; }), + Log("loss_average", [=] { return loss_cont()->average_loss_; }), + Log("loss_average_max", [=] { return loss_cont()->average_loss_max_; }), + Log("loss_thres_inc", + [=] { return loss_cont()->loss_increase_threshold(); }), + Log("loss_thres_dec", + [=] { return loss_cont()->loss_decrease_threshold(); }), + Log("loss_dec_rate", [=] { return loss_cont()->decreased_bitrate(); }), + Log("loss_based_rate", [=] { return loss_cont()->loss_based_bitrate_; }), + Log("loss_ack_rate", + [=] { return loss_cont()->acknowledged_bitrate_max_; }), + Log("data_window", [=] { return controller_->current_data_window_; }), + Log("pushback_target", + [=] { return controller_->last_pushback_target_rate_; }), + }); + return loggers; +} +GoogCcStatePrinter::~GoogCcStatePrinter() = default; + +void GoogCcStatePrinter::PrintHeaders(RtcEventLogOutput* log) { + int ix = 0; + for (const auto& logger : loggers_) { + if (ix++) + log->Write(" "); + log->Write(logger->name()); + } + log->Write("\n"); + log->Flush(); +} + +void GoogCcStatePrinter::PrintState(RtcEventLogOutput* log, + GoogCcNetworkController* controller, + Timestamp at_time) { + controller_ = controller; + auto state_update = controller_->GetNetworkState(at_time); + target_ = state_update.target_rate.value(); + pacing_ = state_update.pacer_config.value(); + if (state_update.congestion_window) + congestion_window_ = *state_update.congestion_window; + if (controller_->network_estimator_) { + est_ = controller_->network_estimator_->GetCurrentEstimate().value_or( + NetworkStateEstimate()); + } + + int ix = 0; + for (const auto& logger : loggers_) { + if (ix++) + log->Write(" "); + logger->WriteValue(log); + } + + log->Write("\n"); + log->Flush(); +} + +GoogCcDebugFactory::GoogCcDebugFactory() + : GoogCcDebugFactory(GoogCcFactoryConfig()) {} + +GoogCcDebugFactory::GoogCcDebugFactory(GoogCcFactoryConfig config) + : GoogCcNetworkControllerFactory(std::move(config)) {} + +std::unique_ptr GoogCcDebugFactory::Create( + NetworkControllerConfig config) { + RTC_CHECK(controller_ == nullptr); + auto controller = GoogCcNetworkControllerFactory::Create(config); + controller_ = static_cast(controller.get()); + return controller; +} + +void GoogCcDebugFactory::PrintState(const Timestamp at_time) { + if (controller_ && log_writer_) { + printer_.PrintState(log_writer_.get(), controller_, at_time); + } +} + +void GoogCcDebugFactory::AttachWriter( + std::unique_ptr log_writer) { + if (log_writer) { + log_writer_ = std::move(log_writer); + printer_.PrintHeaders(log_writer_.get()); + } +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/test/goog_cc_printer.h b/modules/congestion_controller/goog_cc/test/goog_cc_printer.h new file mode 100644 index 0000000..3eee781 --- /dev/null +++ b/modules/congestion_controller/goog_cc/test/goog_cc_printer.h @@ -0,0 +1,75 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_TEST_GOOG_CC_PRINTER_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_TEST_GOOG_CC_PRINTER_H_ + +#include +#include +#include + +#include "api/rtc_event_log/rtc_event_log.h" +#include "api/transport/goog_cc_factory.h" +#include "api/transport/network_control.h" +#include "api/transport/network_types.h" +#include "api/units/timestamp.h" +#include "modules/congestion_controller/goog_cc/goog_cc_network_control.h" +#include "test/logging/log_writer.h" + +namespace webrtc { + +class FieldLogger { + public: + virtual ~FieldLogger() = default; + virtual const std::string& name() const = 0; + virtual void WriteValue(RtcEventLogOutput* out) = 0; +}; + +class GoogCcStatePrinter { + public: + GoogCcStatePrinter(); + GoogCcStatePrinter(const GoogCcStatePrinter&) = delete; + GoogCcStatePrinter& operator=(const GoogCcStatePrinter&) = delete; + ~GoogCcStatePrinter(); + + void PrintHeaders(RtcEventLogOutput* log); + void PrintState(RtcEventLogOutput* log, + GoogCcNetworkController* controller, + Timestamp at_time); + + private: + std::deque CreateLoggers(); + std::deque> loggers_; + + GoogCcNetworkController* controller_ = nullptr; + TargetTransferRate target_; + PacerConfig pacing_; + DataSize congestion_window_ = DataSize::PlusInfinity(); + NetworkStateEstimate est_; +}; + +class GoogCcDebugFactory : public GoogCcNetworkControllerFactory { + public: + GoogCcDebugFactory(); + explicit GoogCcDebugFactory(GoogCcFactoryConfig config); + std::unique_ptr Create( + NetworkControllerConfig config) override; + + void PrintState(const Timestamp at_time); + + void AttachWriter(std::unique_ptr log_writer); + + private: + GoogCcStatePrinter printer_; + GoogCcNetworkController* controller_ = nullptr; + std::unique_ptr log_writer_; +}; +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_TEST_GOOG_CC_PRINTER_H_ diff --git a/modules/congestion_controller/goog_cc/trendline_estimator.cc b/modules/congestion_controller/goog_cc/trendline_estimator.cc new file mode 100644 index 0000000..c04db73 --- /dev/null +++ b/modules/congestion_controller/goog_cc/trendline_estimator.cc @@ -0,0 +1,333 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/trendline_estimator.h" + +#include + +#include +#include + +#include "absl/strings/match.h" +#include "absl/types/optional.h" +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "modules/remote_bitrate_estimator/test/bwe_test_logging.h" +#include "rtc_base/checks.h" +#include "rtc_base/experiments/struct_parameters_parser.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_minmax.h" + +namespace webrtc { + +namespace { + +// Parameters for linear least squares fit of regression line to noisy data. +constexpr double kDefaultTrendlineSmoothingCoeff = 0.9; +constexpr double kDefaultTrendlineThresholdGain = 4.0; +const char kBweWindowSizeInPacketsExperiment[] = + "WebRTC-BweWindowSizeInPackets"; + +size_t ReadTrendlineFilterWindowSize( + const WebRtcKeyValueConfig* key_value_config) { + std::string experiment_string = + key_value_config->Lookup(kBweWindowSizeInPacketsExperiment); + size_t window_size; + int parsed_values = + sscanf(experiment_string.c_str(), "Enabled-%zu", &window_size); + if (parsed_values == 1) { + if (window_size > 1) + return window_size; + RTC_LOG(WARNING) << "Window size must be greater than 1."; + } + RTC_LOG(LS_WARNING) << "Failed to parse parameters for BweWindowSizeInPackets" + " experiment from field trial string. Using default."; + return TrendlineEstimatorSettings::kDefaultTrendlineWindowSize; +} + +absl::optional LinearFitSlope( + const std::deque& packets) { + RTC_DCHECK(packets.size() >= 2); + // Compute the "center of mass". + double sum_x = 0; + double sum_y = 0; + for (const auto& packet : packets) { + sum_x += packet.arrival_time_ms; + sum_y += packet.smoothed_delay_ms; + } + double x_avg = sum_x / packets.size(); + double y_avg = sum_y / packets.size(); + // Compute the slope k = \sum (x_i-x_avg)(y_i-y_avg) / \sum (x_i-x_avg)^2 + double numerator = 0; + double denominator = 0; + for (const auto& packet : packets) { + double x = packet.arrival_time_ms; + double y = packet.smoothed_delay_ms; + numerator += (x - x_avg) * (y - y_avg); + denominator += (x - x_avg) * (x - x_avg); + } + if (denominator == 0) + return absl::nullopt; + return numerator / denominator; +} + +absl::optional ComputeSlopeCap( + const std::deque& packets, + const TrendlineEstimatorSettings& settings) { + RTC_DCHECK(1 <= settings.beginning_packets && + settings.beginning_packets < packets.size()); + RTC_DCHECK(1 <= settings.end_packets && + settings.end_packets < packets.size()); + RTC_DCHECK(settings.beginning_packets + settings.end_packets <= + packets.size()); + TrendlineEstimator::PacketTiming early = packets[0]; + for (size_t i = 1; i < settings.beginning_packets; ++i) { + if (packets[i].raw_delay_ms < early.raw_delay_ms) + early = packets[i]; + } + size_t late_start = packets.size() - settings.end_packets; + TrendlineEstimator::PacketTiming late = packets[late_start]; + for (size_t i = late_start + 1; i < packets.size(); ++i) { + if (packets[i].raw_delay_ms < late.raw_delay_ms) + late = packets[i]; + } + if (late.arrival_time_ms - early.arrival_time_ms < 1) { + return absl::nullopt; + } + return (late.raw_delay_ms - early.raw_delay_ms) / + (late.arrival_time_ms - early.arrival_time_ms) + + settings.cap_uncertainty; +} + +constexpr double kMaxAdaptOffsetMs = 15.0; +constexpr double kOverUsingTimeThreshold = 10; +constexpr int kMinNumDeltas = 60; +constexpr int kDeltaCounterMax = 1000; + +} // namespace + +constexpr char TrendlineEstimatorSettings::kKey[]; + +TrendlineEstimatorSettings::TrendlineEstimatorSettings( + const WebRtcKeyValueConfig* key_value_config) { + if (absl::StartsWith( + key_value_config->Lookup(kBweWindowSizeInPacketsExperiment), + "Enabled")) { + window_size = ReadTrendlineFilterWindowSize(key_value_config); + } + Parser()->Parse(key_value_config->Lookup(TrendlineEstimatorSettings::kKey)); + if (window_size < 10 || 200 < window_size) { + RTC_LOG(LS_WARNING) << "Window size must be between 10 and 200 packets"; + window_size = kDefaultTrendlineWindowSize; + } + if (enable_cap) { + if (beginning_packets < 1 || end_packets < 1 || + beginning_packets > window_size || end_packets > window_size) { + RTC_LOG(LS_WARNING) << "Size of beginning and end must be between 1 and " + << window_size; + enable_cap = false; + beginning_packets = end_packets = 0; + cap_uncertainty = 0.0; + } + if (beginning_packets + end_packets > window_size) { + RTC_LOG(LS_WARNING) + << "Size of beginning plus end can't exceed the window size"; + enable_cap = false; + beginning_packets = end_packets = 0; + cap_uncertainty = 0.0; + } + if (cap_uncertainty < 0.0 || 0.025 < cap_uncertainty) { + RTC_LOG(LS_WARNING) << "Cap uncertainty must be between 0 and 0.025"; + cap_uncertainty = 0.0; + } + } +} + +std::unique_ptr TrendlineEstimatorSettings::Parser() { + return StructParametersParser::Create("sort", &enable_sort, // + "cap", &enable_cap, // + "beginning_packets", + &beginning_packets, // + "end_packets", &end_packets, // + "cap_uncertainty", &cap_uncertainty, // + "window_size", &window_size); +} + +TrendlineEstimator::TrendlineEstimator( + const WebRtcKeyValueConfig* key_value_config, + NetworkStatePredictor* network_state_predictor) + : settings_(key_value_config), + smoothing_coef_(kDefaultTrendlineSmoothingCoeff), + threshold_gain_(kDefaultTrendlineThresholdGain), + num_of_deltas_(0), + first_arrival_time_ms_(-1), + accumulated_delay_(0), + smoothed_delay_(0), + delay_hist_(), + k_up_(0.0087), + k_down_(0.039), + overusing_time_threshold_(kOverUsingTimeThreshold), + threshold_(12.5), + prev_modified_trend_(NAN), + last_update_ms_(-1), + prev_trend_(0.0), + time_over_using_(-1), + overuse_counter_(0), + hypothesis_(BandwidthUsage::kBwNormal), + hypothesis_predicted_(BandwidthUsage::kBwNormal), + network_state_predictor_(network_state_predictor) { + RTC_LOG(LS_INFO) + << "Using Trendline filter for delay change estimation with settings " + << settings_.Parser()->Encode() << " and " + << (network_state_predictor_ ? "injected" : "no") + << " network state predictor"; +} + +TrendlineEstimator::~TrendlineEstimator() {} + +void TrendlineEstimator::UpdateTrendline(double recv_delta_ms, + double send_delta_ms, + int64_t send_time_ms, + int64_t arrival_time_ms, + size_t packet_size) { + const double delta_ms = recv_delta_ms - send_delta_ms; + ++num_of_deltas_; + num_of_deltas_ = std::min(num_of_deltas_, kDeltaCounterMax); + if (first_arrival_time_ms_ == -1) + first_arrival_time_ms_ = arrival_time_ms; + + // Exponential backoff filter. + accumulated_delay_ += delta_ms; + BWE_TEST_LOGGING_PLOT(1, "accumulated_delay_ms", arrival_time_ms, + accumulated_delay_); + smoothed_delay_ = smoothing_coef_ * smoothed_delay_ + + (1 - smoothing_coef_) * accumulated_delay_; + BWE_TEST_LOGGING_PLOT(1, "smoothed_delay_ms", arrival_time_ms, + smoothed_delay_); + + // Maintain packet window + delay_hist_.emplace_back( + static_cast(arrival_time_ms - first_arrival_time_ms_), + smoothed_delay_, accumulated_delay_); + if (settings_.enable_sort) { + for (size_t i = delay_hist_.size() - 1; + i > 0 && + delay_hist_[i].arrival_time_ms < delay_hist_[i - 1].arrival_time_ms; + --i) { + std::swap(delay_hist_[i], delay_hist_[i - 1]); + } + } + if (delay_hist_.size() > settings_.window_size) + delay_hist_.pop_front(); + + // Simple linear regression. + double trend = prev_trend_; + if (delay_hist_.size() == settings_.window_size) { + // Update trend_ if it is possible to fit a line to the data. The delay + // trend can be seen as an estimate of (send_rate - capacity)/capacity. + // 0 < trend < 1 -> the delay increases, queues are filling up + // trend == 0 -> the delay does not change + // trend < 0 -> the delay decreases, queues are being emptied + trend = LinearFitSlope(delay_hist_).value_or(trend); + if (settings_.enable_cap) { + absl::optional cap = ComputeSlopeCap(delay_hist_, settings_); + // We only use the cap to filter out overuse detections, not + // to detect additional underuses. + if (trend >= 0 && cap.has_value() && trend > cap.value()) { + trend = cap.value(); + } + } + } + BWE_TEST_LOGGING_PLOT(1, "trendline_slope", arrival_time_ms, trend); + + Detect(trend, send_delta_ms, arrival_time_ms); +} + +void TrendlineEstimator::Update(double recv_delta_ms, + double send_delta_ms, + int64_t send_time_ms, + int64_t arrival_time_ms, + size_t packet_size, + bool calculated_deltas) { + if (calculated_deltas) { + UpdateTrendline(recv_delta_ms, send_delta_ms, send_time_ms, arrival_time_ms, + packet_size); + } + if (network_state_predictor_) { + hypothesis_predicted_ = network_state_predictor_->Update( + send_time_ms, arrival_time_ms, hypothesis_); + } +} + +BandwidthUsage TrendlineEstimator::State() const { + return network_state_predictor_ ? hypothesis_predicted_ : hypothesis_; +} + +void TrendlineEstimator::Detect(double trend, double ts_delta, int64_t now_ms) { + if (num_of_deltas_ < 2) { + hypothesis_ = BandwidthUsage::kBwNormal; + return; + } + const double modified_trend = + std::min(num_of_deltas_, kMinNumDeltas) * trend * threshold_gain_; + prev_modified_trend_ = modified_trend; + BWE_TEST_LOGGING_PLOT(1, "T", now_ms, modified_trend); + BWE_TEST_LOGGING_PLOT(1, "threshold", now_ms, threshold_); + if (modified_trend > threshold_) { + if (time_over_using_ == -1) { + // Initialize the timer. Assume that we've been + // over-using half of the time since the previous + // sample. + time_over_using_ = ts_delta / 2; + } else { + // Increment timer + time_over_using_ += ts_delta; + } + overuse_counter_++; + if (time_over_using_ > overusing_time_threshold_ && overuse_counter_ > 1) { + if (trend >= prev_trend_) { + time_over_using_ = 0; + overuse_counter_ = 0; + hypothesis_ = BandwidthUsage::kBwOverusing; + } + } + } else if (modified_trend < -threshold_) { + time_over_using_ = -1; + overuse_counter_ = 0; + hypothesis_ = BandwidthUsage::kBwUnderusing; + } else { + time_over_using_ = -1; + overuse_counter_ = 0; + hypothesis_ = BandwidthUsage::kBwNormal; + } + prev_trend_ = trend; + UpdateThreshold(modified_trend, now_ms); +} + +void TrendlineEstimator::UpdateThreshold(double modified_trend, + int64_t now_ms) { + if (last_update_ms_ == -1) + last_update_ms_ = now_ms; + + if (fabs(modified_trend) > threshold_ + kMaxAdaptOffsetMs) { + // Avoid adapting the threshold to big latency spikes, caused e.g., + // by a sudden capacity drop. + last_update_ms_ = now_ms; + return; + } + + const double k = fabs(modified_trend) < threshold_ ? k_down_ : k_up_; + const int64_t kMaxTimeDeltaMs = 100; + int64_t time_delta_ms = std::min(now_ms - last_update_ms_, kMaxTimeDeltaMs); + threshold_ += k * (fabs(modified_trend) - threshold_) * time_delta_ms; + threshold_ = rtc::SafeClamp(threshold_, 6.f, 600.f); + last_update_ms_ = now_ms; +} + +} // namespace webrtc diff --git a/modules/congestion_controller/goog_cc/trendline_estimator.h b/modules/congestion_controller/goog_cc/trendline_estimator.h new file mode 100644 index 0000000..2db2903 --- /dev/null +++ b/modules/congestion_controller/goog_cc/trendline_estimator.h @@ -0,0 +1,127 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_CONGESTION_CONTROLLER_GOOG_CC_TRENDLINE_ESTIMATOR_H_ +#define MODULES_CONGESTION_CONTROLLER_GOOG_CC_TRENDLINE_ESTIMATOR_H_ + +#include +#include + +#include +#include +#include + +#include "api/network_state_predictor.h" +#include "api/transport/webrtc_key_value_config.h" +#include "modules/congestion_controller/goog_cc/delay_increase_detector_interface.h" +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/experiments/struct_parameters_parser.h" + +namespace webrtc { + +struct TrendlineEstimatorSettings { + static constexpr char kKey[] = "WebRTC-Bwe-TrendlineEstimatorSettings"; + static constexpr unsigned kDefaultTrendlineWindowSize = 20; + + TrendlineEstimatorSettings() = delete; + explicit TrendlineEstimatorSettings( + const WebRtcKeyValueConfig* key_value_config); + + // Sort the packets in the window. Should be redundant, + // but then almost no cost. + bool enable_sort = false; + + // Cap the trendline slope based on the minimum delay seen + // in the beginning_packets and end_packets respectively. + bool enable_cap = false; + unsigned beginning_packets = 7; + unsigned end_packets = 7; + double cap_uncertainty = 0.0; + + // Size (in packets) of the window. + unsigned window_size = kDefaultTrendlineWindowSize; + + std::unique_ptr Parser(); +}; + +class TrendlineEstimator : public DelayIncreaseDetectorInterface { + public: + TrendlineEstimator(const WebRtcKeyValueConfig* key_value_config, + NetworkStatePredictor* network_state_predictor); + + ~TrendlineEstimator() override; + + // Update the estimator with a new sample. The deltas should represent deltas + // between timestamp groups as defined by the InterArrival class. + void Update(double recv_delta_ms, + double send_delta_ms, + int64_t send_time_ms, + int64_t arrival_time_ms, + size_t packet_size, + bool calculated_deltas) override; + + void UpdateTrendline(double recv_delta_ms, + double send_delta_ms, + int64_t send_time_ms, + int64_t arrival_time_ms, + size_t packet_size); + + BandwidthUsage State() const override; + + struct PacketTiming { + PacketTiming(double arrival_time_ms, + double smoothed_delay_ms, + double raw_delay_ms) + : arrival_time_ms(arrival_time_ms), + smoothed_delay_ms(smoothed_delay_ms), + raw_delay_ms(raw_delay_ms) {} + double arrival_time_ms; + double smoothed_delay_ms; + double raw_delay_ms; + }; + + private: + friend class GoogCcStatePrinter; + void Detect(double trend, double ts_delta, int64_t now_ms); + + void UpdateThreshold(double modified_offset, int64_t now_ms); + + // Parameters. + TrendlineEstimatorSettings settings_; + const double smoothing_coef_; + const double threshold_gain_; + // Used by the existing threshold. + int num_of_deltas_; + // Keep the arrival times small by using the change from the first packet. + int64_t first_arrival_time_ms_; + // Exponential backoff filtering. + double accumulated_delay_; + double smoothed_delay_; + // Linear least squares regression. + std::deque delay_hist_; + + const double k_up_; + const double k_down_; + double overusing_time_threshold_; + double threshold_; + double prev_modified_trend_; + int64_t last_update_ms_; + double prev_trend_; + double time_over_using_; + int overuse_counter_; + BandwidthUsage hypothesis_; + BandwidthUsage hypothesis_predicted_; + NetworkStatePredictor* network_state_predictor_; + + RTC_DISALLOW_COPY_AND_ASSIGN(TrendlineEstimator); +}; +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_GOOG_CC_TRENDLINE_ESTIMATOR_H_ diff --git a/modules/congestion_controller/goog_cc/trendline_estimator_unittest.cc b/modules/congestion_controller/goog_cc/trendline_estimator_unittest.cc new file mode 100644 index 0000000..b0195ab --- /dev/null +++ b/modules/congestion_controller/goog_cc/trendline_estimator_unittest.cc @@ -0,0 +1,151 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/goog_cc/trendline_estimator.h" + +#include +#include +#include + +#include "api/transport/field_trial_based_config.h" +#include "rtc_base/random.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +class PacketTimeGenerator { + public: + PacketTimeGenerator(int64_t initial_clock, double time_between_packets) + : initial_clock_(initial_clock), + time_between_packets_(time_between_packets), + packets_(0) {} + int64_t operator()() { + return initial_clock_ + time_between_packets_ * packets_++; + } + + private: + const int64_t initial_clock_; + const double time_between_packets_; + size_t packets_; +}; + +class TrendlineEstimatorTest : public testing::Test { + public: + TrendlineEstimatorTest() + : send_times(kPacketCount), + recv_times(kPacketCount), + packet_sizes(kPacketCount), + config(), + estimator(&config, nullptr), + count(1) { + std::fill(packet_sizes.begin(), packet_sizes.end(), kPacketSizeBytes); + } + + void RunTestUntilStateChange() { + RTC_DCHECK_EQ(send_times.size(), kPacketCount); + RTC_DCHECK_EQ(recv_times.size(), kPacketCount); + RTC_DCHECK_EQ(packet_sizes.size(), kPacketCount); + RTC_DCHECK_GE(count, 1); + RTC_DCHECK_LT(count, kPacketCount); + + auto initial_state = estimator.State(); + for (; count < kPacketCount; count++) { + double recv_delta = recv_times[count] - recv_times[count - 1]; + double send_delta = send_times[count] - send_times[count - 1]; + estimator.Update(recv_delta, send_delta, send_times[count], + recv_times[count], packet_sizes[count], true); + if (estimator.State() != initial_state) { + return; + } + } + } + + protected: + const size_t kPacketCount = 25; + const size_t kPacketSizeBytes = 1200; + std::vector send_times; + std::vector recv_times; + std::vector packet_sizes; + const FieldTrialBasedConfig config; + TrendlineEstimator estimator; + size_t count; +}; +} // namespace + +TEST_F(TrendlineEstimatorTest, Normal) { + PacketTimeGenerator send_time_generator(123456789 /*initial clock*/, + 20 /*20 ms between sent packets*/); + std::generate(send_times.begin(), send_times.end(), send_time_generator); + + PacketTimeGenerator recv_time_generator(987654321 /*initial clock*/, + 20 /*delivered at the same pace*/); + std::generate(recv_times.begin(), recv_times.end(), recv_time_generator); + + EXPECT_EQ(estimator.State(), BandwidthUsage::kBwNormal); + RunTestUntilStateChange(); + EXPECT_EQ(estimator.State(), BandwidthUsage::kBwNormal); + EXPECT_EQ(count, kPacketCount); // All packets processed +} + +TEST_F(TrendlineEstimatorTest, Overusing) { + PacketTimeGenerator send_time_generator(123456789 /*initial clock*/, + 20 /*20 ms between sent packets*/); + std::generate(send_times.begin(), send_times.end(), send_time_generator); + + PacketTimeGenerator recv_time_generator(987654321 /*initial clock*/, + 1.1 * 20 /*10% slower delivery*/); + std::generate(recv_times.begin(), recv_times.end(), recv_time_generator); + + EXPECT_EQ(estimator.State(), BandwidthUsage::kBwNormal); + RunTestUntilStateChange(); + EXPECT_EQ(estimator.State(), BandwidthUsage::kBwOverusing); + RunTestUntilStateChange(); + EXPECT_EQ(estimator.State(), BandwidthUsage::kBwOverusing); + EXPECT_EQ(count, kPacketCount); // All packets processed +} + +TEST_F(TrendlineEstimatorTest, Underusing) { + PacketTimeGenerator send_time_generator(123456789 /*initial clock*/, + 20 /*20 ms between sent packets*/); + std::generate(send_times.begin(), send_times.end(), send_time_generator); + + PacketTimeGenerator recv_time_generator(987654321 /*initial clock*/, + 0.85 * 20 /*15% faster delivery*/); + std::generate(recv_times.begin(), recv_times.end(), recv_time_generator); + + EXPECT_EQ(estimator.State(), BandwidthUsage::kBwNormal); + RunTestUntilStateChange(); + EXPECT_EQ(estimator.State(), BandwidthUsage::kBwUnderusing); + RunTestUntilStateChange(); + EXPECT_EQ(estimator.State(), BandwidthUsage::kBwUnderusing); + EXPECT_EQ(count, kPacketCount); // All packets processed +} + +TEST_F(TrendlineEstimatorTest, IncludesSmallPacketsByDefault) { + PacketTimeGenerator send_time_generator(123456789 /*initial clock*/, + 20 /*20 ms between sent packets*/); + std::generate(send_times.begin(), send_times.end(), send_time_generator); + + PacketTimeGenerator recv_time_generator(987654321 /*initial clock*/, + 1.1 * 20 /*10% slower delivery*/); + std::generate(recv_times.begin(), recv_times.end(), recv_time_generator); + + std::fill(packet_sizes.begin(), packet_sizes.end(), 100); + + EXPECT_EQ(estimator.State(), BandwidthUsage::kBwNormal); + RunTestUntilStateChange(); + EXPECT_EQ(estimator.State(), BandwidthUsage::kBwOverusing); + RunTestUntilStateChange(); + EXPECT_EQ(estimator.State(), BandwidthUsage::kBwOverusing); + EXPECT_EQ(count, kPacketCount); // All packets processed +} + +} // namespace webrtc diff --git a/modules/congestion_controller/include/receive_side_congestion_controller.h b/modules/congestion_controller/include/receive_side_congestion_controller.h new file mode 100644 index 0000000..6cd8be3 --- /dev/null +++ b/modules/congestion_controller/include/receive_side_congestion_controller.h @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_INCLUDE_RECEIVE_SIDE_CONGESTION_CONTROLLER_H_ +#define MODULES_CONGESTION_CONTROLLER_INCLUDE_RECEIVE_SIDE_CONGESTION_CONTROLLER_H_ + +#include +#include + +#include "api/transport/field_trial_based_config.h" +#include "api/transport/network_control.h" +#include "modules/include/module.h" +#include "modules/remote_bitrate_estimator/remote_estimator_proxy.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { +class RemoteBitrateEstimator; +class RemoteBitrateObserver; + +// This class represents the congestion control state for receive +// streams. For send side bandwidth estimation, this is simply +// relaying for each received RTP packet back to the sender. While for +// receive side bandwidth estimation, we do the estimation locally and +// send our results back to the sender. +class ReceiveSideCongestionController : public CallStatsObserver, + public Module { + public: + ReceiveSideCongestionController(Clock* clock, PacketRouter* packet_router); + ReceiveSideCongestionController( + Clock* clock, + PacketRouter* packet_router, + NetworkStateEstimator* network_state_estimator); + + ~ReceiveSideCongestionController() override {} + + virtual void OnReceivedPacket(int64_t arrival_time_ms, + size_t payload_size, + const RTPHeader& header); + + void SetSendPeriodicFeedback(bool send_periodic_feedback); + // TODO(nisse): Delete these methods, design a more specific interface. + virtual RemoteBitrateEstimator* GetRemoteBitrateEstimator(bool send_side_bwe); + virtual const RemoteBitrateEstimator* GetRemoteBitrateEstimator( + bool send_side_bwe) const; + + // Implements CallStatsObserver. + void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; + + // This is send bitrate, used to control the rate of feedback messages. + void OnBitrateChanged(int bitrate_bps); + + // Implements Module. + int64_t TimeUntilNextProcess() override; + void Process() override; + + private: + class WrappingBitrateEstimator : public RemoteBitrateEstimator { + public: + WrappingBitrateEstimator(RemoteBitrateObserver* observer, Clock* clock); + + ~WrappingBitrateEstimator() override; + + void IncomingPacket(int64_t arrival_time_ms, + size_t payload_size, + const RTPHeader& header) override; + + void Process() override; + + int64_t TimeUntilNextProcess() override; + + void OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) override; + + void RemoveStream(unsigned int ssrc) override; + + bool LatestEstimate(std::vector* ssrcs, + unsigned int* bitrate_bps) const override; + + void SetMinBitrate(int min_bitrate_bps) override; + + private: + void PickEstimatorFromHeader(const RTPHeader& header) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + void PickEstimator() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + RemoteBitrateObserver* observer_; + Clock* const clock_; + mutable Mutex mutex_; + std::unique_ptr rbe_; + bool using_absolute_send_time_; + uint32_t packets_since_absolute_send_time_; + int min_bitrate_bps_; + + RTC_DISALLOW_IMPLICIT_CONSTRUCTORS(WrappingBitrateEstimator); + }; + + const FieldTrialBasedConfig field_trial_config_; + WrappingBitrateEstimator remote_bitrate_estimator_; + RemoteEstimatorProxy remote_estimator_proxy_; +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_INCLUDE_RECEIVE_SIDE_CONGESTION_CONTROLLER_H_ diff --git a/modules/congestion_controller/pcc/BUILD.gn b/modules/congestion_controller/pcc/BUILD.gn new file mode 100644 index 0000000..2f37876 --- /dev/null +++ b/modules/congestion_controller/pcc/BUILD.gn @@ -0,0 +1,128 @@ +# Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +rtc_library("pcc") { + sources = [ + "pcc_factory.cc", + "pcc_factory.h", + ] + deps = [ + ":pcc_controller", + "../../../api/transport:network_control", + "../../../api/units:time_delta", + "../../../rtc_base:rtc_base_approved", + ] +} + +rtc_library("pcc_controller") { + sources = [ + "pcc_network_controller.cc", + "pcc_network_controller.h", + ] + deps = [ + ":bitrate_controller", + ":monitor_interval", + ":rtt_tracker", + "../../../api/transport:network_control", + "../../../api/units:data_rate", + "../../../api/units:data_size", + "../../../api/units:time_delta", + "../../../api/units:timestamp", + "../../../rtc_base:checks", + "../../../rtc_base:rtc_base_approved", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +rtc_library("monitor_interval") { + sources = [ + "monitor_interval.cc", + "monitor_interval.h", + ] + deps = [ + "../../../api/transport:network_control", + "../../../api/units:data_rate", + "../../../api/units:data_size", + "../../../api/units:time_delta", + "../../../api/units:timestamp", + "../../../rtc_base:rtc_base_approved", + ] +} + +rtc_library("rtt_tracker") { + sources = [ + "rtt_tracker.cc", + "rtt_tracker.h", + ] + deps = [ + "../../../api/transport:network_control", + "../../../api/units:time_delta", + "../../../api/units:timestamp", + "../../../rtc_base:rtc_base_approved", + ] +} + +rtc_library("utility_function") { + sources = [ + "utility_function.cc", + "utility_function.h", + ] + deps = [ + ":monitor_interval", + "../../../api/transport:network_control", + "../../../api/units:data_rate", + "../../../rtc_base:checks", + "../../../rtc_base:rtc_base_approved", + ] +} + +rtc_library("bitrate_controller") { + sources = [ + "bitrate_controller.cc", + "bitrate_controller.h", + ] + deps = [ + ":monitor_interval", + ":utility_function", + "../../../api/transport:network_control", + "../../../api/units:data_rate", + "../../../rtc_base:rtc_base_approved", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] +} + +if (rtc_include_tests) { + rtc_library("pcc_unittests") { + testonly = true + sources = [ + "bitrate_controller_unittest.cc", + "monitor_interval_unittest.cc", + "pcc_network_controller_unittest.cc", + "rtt_tracker_unittest.cc", + "utility_function_unittest.cc", + ] + deps = [ + ":bitrate_controller", + ":monitor_interval", + ":pcc", + ":pcc_controller", + ":rtt_tracker", + ":utility_function", + "../../../api/transport:network_control", + "../../../api/units:data_rate", + "../../../api/units:data_size", + "../../../api/units:time_delta", + "../../../api/units:timestamp", + "../../../rtc_base:rtc_base_approved", + "../../../test:test_support", + "../../../test/scenario", + ] + } +} diff --git a/modules/congestion_controller/pcc/bitrate_controller.cc b/modules/congestion_controller/pcc/bitrate_controller.cc new file mode 100644 index 0000000..16b8e69 --- /dev/null +++ b/modules/congestion_controller/pcc/bitrate_controller.cc @@ -0,0 +1,139 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/pcc/bitrate_controller.h" + +#include +#include +#include +#include +#include +#include + + +namespace webrtc { +namespace pcc { + +PccBitrateController::PccBitrateController(double initial_conversion_factor, + double initial_dynamic_boundary, + double dynamic_boundary_increment, + double rtt_gradient_coefficient, + double loss_coefficient, + double throughput_coefficient, + double throughput_power, + double rtt_gradient_threshold, + double delay_gradient_negative_bound) + : PccBitrateController(initial_conversion_factor, + initial_dynamic_boundary, + dynamic_boundary_increment, + std::make_unique( + rtt_gradient_coefficient, + loss_coefficient, + throughput_coefficient, + throughput_power, + rtt_gradient_threshold, + delay_gradient_negative_bound)) {} + +PccBitrateController::PccBitrateController( + double initial_conversion_factor, + double initial_dynamic_boundary, + double dynamic_boundary_increment, + std::unique_ptr utility_function) + : consecutive_boundary_adjustments_number_(0), + initial_dynamic_boundary_(initial_dynamic_boundary), + dynamic_boundary_increment_(dynamic_boundary_increment), + utility_function_(std::move(utility_function)), + step_size_adjustments_number_(0), + initial_conversion_factor_(initial_conversion_factor) {} + +PccBitrateController::~PccBitrateController() = default; + +double PccBitrateController::ComputeStepSize(double utility_gradient) { + // Computes number of consecutive step size adjustments. + if (utility_gradient > 0) { + step_size_adjustments_number_ = + std::max(step_size_adjustments_number_ + 1, 1); + } else if (utility_gradient < 0) { + step_size_adjustments_number_ = + std::min(step_size_adjustments_number_ - 1, -1); + } else { + step_size_adjustments_number_ = 0; + } + // Computes step size amplifier. + int64_t step_size_amplifier = 1; + if (std::abs(step_size_adjustments_number_) <= 3) { + step_size_amplifier = + std::max(std::abs(step_size_adjustments_number_), 1); + } else { + step_size_amplifier = 2 * std::abs(step_size_adjustments_number_) - 3; + } + return step_size_amplifier * initial_conversion_factor_; +} + +double PccBitrateController::ApplyDynamicBoundary(double rate_change, + double bitrate) { + double rate_change_abs = std::abs(rate_change); + int64_t rate_change_sign = (rate_change > 0) ? 1 : -1; + if (consecutive_boundary_adjustments_number_ * rate_change_sign < 0) { + consecutive_boundary_adjustments_number_ = 0; + } + double dynamic_change_boundary = + initial_dynamic_boundary_ + + std::abs(consecutive_boundary_adjustments_number_) * + dynamic_boundary_increment_; + double boundary = bitrate * dynamic_change_boundary; + if (rate_change_abs > boundary) { + consecutive_boundary_adjustments_number_ += rate_change_sign; + return boundary * rate_change_sign; + } + // Rate change smaller than boundary. Reset boundary to the smallest possible + // that would allow the change. + while (rate_change_abs <= boundary && + consecutive_boundary_adjustments_number_ * rate_change_sign > 0) { + consecutive_boundary_adjustments_number_ -= rate_change_sign; + dynamic_change_boundary = + initial_dynamic_boundary_ + + std::abs(consecutive_boundary_adjustments_number_) * + dynamic_boundary_increment_; + boundary = bitrate * dynamic_change_boundary; + } + consecutive_boundary_adjustments_number_ += rate_change_sign; + return rate_change; +} + +absl::optional +PccBitrateController::ComputeRateUpdateForSlowStartMode( + const PccMonitorInterval& monitor_interval) { + double utility_value = utility_function_->Compute(monitor_interval); + if (previous_utility_.has_value() && utility_value <= previous_utility_) { + return absl::nullopt; + } + previous_utility_ = utility_value; + return monitor_interval.GetTargetSendingRate(); +} + +DataRate PccBitrateController::ComputeRateUpdateForOnlineLearningMode( + const std::vector& intervals, + DataRate bandwith_estimate) { + double first_utility = utility_function_->Compute(intervals[0]); + double second_utility = utility_function_->Compute(intervals[1]); + double first_bitrate_bps = intervals[0].GetTargetSendingRate().bps(); + double second_bitrate_bps = intervals[1].GetTargetSendingRate().bps(); + double gradient = (first_utility - second_utility) / + (first_bitrate_bps - second_bitrate_bps); + double rate_change_bps = gradient * ComputeStepSize(gradient); // delta_r + rate_change_bps = + ApplyDynamicBoundary(rate_change_bps, bandwith_estimate.bps()); + return DataRate::BitsPerSec( + std::max(0.0, bandwith_estimate.bps() + rate_change_bps)); +} + +} // namespace pcc +} // namespace webrtc diff --git a/modules/congestion_controller/pcc/bitrate_controller.h b/modules/congestion_controller/pcc/bitrate_controller.h new file mode 100644 index 0000000..fadeea1 --- /dev/null +++ b/modules/congestion_controller/pcc/bitrate_controller.h @@ -0,0 +1,74 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_PCC_BITRATE_CONTROLLER_H_ +#define MODULES_CONGESTION_CONTROLLER_PCC_BITRATE_CONTROLLER_H_ + +#include + +#include +#include + +#include "absl/types/optional.h" +#include "api/units/data_rate.h" +#include "modules/congestion_controller/pcc/monitor_interval.h" +#include "modules/congestion_controller/pcc/utility_function.h" + +namespace webrtc { +namespace pcc { + +class PccBitrateController { + public: + PccBitrateController(double initial_conversion_factor, + double initial_dynamic_boundary, + double dynamic_boundary_increment, + double rtt_gradient_coefficient, + double loss_coefficient, + double throughput_coefficient, + double throughput_power, + double rtt_gradient_threshold, + double delay_gradient_negative_bound); + + PccBitrateController( + double initial_conversion_factor, + double initial_dynamic_boundary, + double dynamic_boundary_increment, + std::unique_ptr utility_function); + + absl::optional ComputeRateUpdateForSlowStartMode( + const PccMonitorInterval& monitor_interval); + + DataRate ComputeRateUpdateForOnlineLearningMode( + const std::vector& block, + DataRate bandwidth_estimate); + + ~PccBitrateController(); + + private: + double ApplyDynamicBoundary(double rate_change, double bitrate); + double ComputeStepSize(double utility_gradient); + + // Dynamic boundary variables: + int64_t consecutive_boundary_adjustments_number_; + const double initial_dynamic_boundary_; + const double dynamic_boundary_increment_; + + const std::unique_ptr utility_function_; + // Step Size variables: + int64_t step_size_adjustments_number_; + const double initial_conversion_factor_; + + absl::optional previous_utility_; +}; + +} // namespace pcc +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_PCC_BITRATE_CONTROLLER_H_ diff --git a/modules/congestion_controller/pcc/bitrate_controller_unittest.cc b/modules/congestion_controller/pcc/bitrate_controller_unittest.cc new file mode 100644 index 0000000..957d99b --- /dev/null +++ b/modules/congestion_controller/pcc/bitrate_controller_unittest.cc @@ -0,0 +1,303 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/pcc/bitrate_controller.h" + +#include +#include + +#include "modules/congestion_controller/pcc/monitor_interval.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace pcc { +namespace test { +namespace { +constexpr double kInitialConversionFactor = 1; +constexpr double kInitialDynamicBoundary = 0.05; +constexpr double kDynamicBoundaryIncrement = 0.1; + +constexpr double kDelayGradientCoefficient = 900; +constexpr double kLossCoefficient = 11.35; +constexpr double kThroughputCoefficient = 500 * 1000; +constexpr double kThroughputPower = 0.99; +constexpr double kDelayGradientThreshold = 0.01; +constexpr double kDelayGradientNegativeBound = 10; + +const DataRate kTargetSendingRate = DataRate::KilobitsPerSec(300); +const double kEpsilon = 0.05; +const Timestamp kStartTime = Timestamp::Micros(0); +const TimeDelta kPacketsDelta = TimeDelta::Millis(1); +const TimeDelta kIntervalDuration = TimeDelta::Millis(1000); +const TimeDelta kDefaultRtt = TimeDelta::Millis(1000); +const DataSize kDefaultDataSize = DataSize::Bytes(100); + +std::vector CreatePacketResults( + const std::vector& packets_send_times, + const std::vector& packets_received_times = {}, + const std::vector& packets_sizes = {}) { + std::vector packet_results; + PacketResult packet_result; + SentPacket sent_packet; + for (size_t i = 0; i < packets_send_times.size(); ++i) { + sent_packet.send_time = packets_send_times[i]; + if (packets_sizes.empty()) { + sent_packet.size = kDefaultDataSize; + } else { + sent_packet.size = packets_sizes[i]; + } + packet_result.sent_packet = sent_packet; + if (packets_received_times.empty()) { + packet_result.receive_time = packets_send_times[i] + kDefaultRtt; + } else { + packet_result.receive_time = packets_received_times[i]; + } + packet_results.push_back(packet_result); + } + return packet_results; +} + +class MockUtilityFunction : public PccUtilityFunctionInterface { + public: + MOCK_METHOD(double, + Compute, + (const PccMonitorInterval& monitor_interval), + (const, override)); +}; + +} // namespace + +TEST(PccBitrateControllerTest, IncreaseRateWhenNoChangesForTestBitrates) { + PccBitrateController bitrate_controller( + kInitialConversionFactor, kInitialDynamicBoundary, + kDynamicBoundaryIncrement, kDelayGradientCoefficient, kLossCoefficient, + kThroughputCoefficient, kThroughputPower, kDelayGradientThreshold, + kDelayGradientNegativeBound); + VivaceUtilityFunction utility_function( + kDelayGradientCoefficient, kLossCoefficient, kThroughputCoefficient, + kThroughputPower, kDelayGradientThreshold, kDelayGradientNegativeBound); + std::vector monitor_block{ + PccMonitorInterval(kTargetSendingRate * (1 + kEpsilon), kStartTime, + kIntervalDuration), + PccMonitorInterval(kTargetSendingRate * (1 - kEpsilon), + kStartTime + kIntervalDuration, kIntervalDuration)}; + monitor_block[0].OnPacketsFeedback( + CreatePacketResults({kStartTime + kPacketsDelta, + kStartTime + kIntervalDuration + kPacketsDelta, + kStartTime + 3 * kIntervalDuration}, + {}, {})); + monitor_block[1].OnPacketsFeedback( + CreatePacketResults({kStartTime + kPacketsDelta, + kStartTime + kIntervalDuration + kPacketsDelta, + kStartTime + 3 * kIntervalDuration}, + {}, {})); + // For both of the monitor intervals there were no change in rtt gradient + // and in packet loss. Since the only difference is in the sending rate, + // the higher sending rate should be chosen by congestion controller. + EXPECT_GT(bitrate_controller + .ComputeRateUpdateForOnlineLearningMode(monitor_block, + kTargetSendingRate) + .bps(), + kTargetSendingRate.bps()); +} + +TEST(PccBitrateControllerTest, NoChangesWhenUtilityFunctionDoesntChange) { + std::unique_ptr mock_utility_function = + std::make_unique(); + EXPECT_CALL(*mock_utility_function, Compute(::testing::_)) + .Times(2) + .WillOnce(::testing::Return(100)) + .WillOnce(::testing::Return(100)); + + PccBitrateController bitrate_controller( + kInitialConversionFactor, kInitialDynamicBoundary, + kDynamicBoundaryIncrement, std::move(mock_utility_function)); + std::vector monitor_block{ + PccMonitorInterval(kTargetSendingRate * (1 + kEpsilon), kStartTime, + kIntervalDuration), + PccMonitorInterval(kTargetSendingRate * (1 - kEpsilon), + kStartTime + kIntervalDuration, kIntervalDuration)}; + // To complete collecting feedback within monitor intervals. + monitor_block[0].OnPacketsFeedback( + CreatePacketResults({kStartTime + 3 * kIntervalDuration}, {}, {})); + monitor_block[1].OnPacketsFeedback( + CreatePacketResults({kStartTime + 3 * kIntervalDuration}, {}, {})); + // Because we don't have any packets inside of monitor intervals, utility + // function should be zero for both of them and the sending rate should not + // change. + EXPECT_EQ(bitrate_controller + .ComputeRateUpdateForOnlineLearningMode(monitor_block, + kTargetSendingRate) + .bps(), + kTargetSendingRate.bps()); +} + +TEST(PccBitrateControllerTest, NoBoundaryWhenSmallGradient) { + std::unique_ptr mock_utility_function = + std::make_unique(); + constexpr double kFirstMonitorIntervalUtility = 0; + const double kSecondMonitorIntervalUtility = + 2 * kTargetSendingRate.bps() * kEpsilon; + + EXPECT_CALL(*mock_utility_function, Compute(::testing::_)) + .Times(2) + .WillOnce(::testing::Return(kFirstMonitorIntervalUtility)) + .WillOnce(::testing::Return(kSecondMonitorIntervalUtility)); + + PccBitrateController bitrate_controller( + kInitialConversionFactor, kInitialDynamicBoundary, + kDynamicBoundaryIncrement, std::move(mock_utility_function)); + std::vector monitor_block{ + PccMonitorInterval(kTargetSendingRate * (1 + kEpsilon), kStartTime, + kIntervalDuration), + PccMonitorInterval(kTargetSendingRate * (1 - kEpsilon), + kStartTime + kIntervalDuration, kIntervalDuration)}; + // To complete collecting feedback within monitor intervals. + monitor_block[0].OnPacketsFeedback( + CreatePacketResults({kStartTime + 3 * kIntervalDuration}, {}, {})); + monitor_block[1].OnPacketsFeedback( + CreatePacketResults({kStartTime + 3 * kIntervalDuration}, {}, {})); + + double gradient = + (kFirstMonitorIntervalUtility - kSecondMonitorIntervalUtility) / + (kTargetSendingRate.bps() * 2 * kEpsilon); + // When the gradient is small we don't hit the dynamic boundary. + EXPECT_EQ(bitrate_controller + .ComputeRateUpdateForOnlineLearningMode(monitor_block, + kTargetSendingRate) + .bps(), + kTargetSendingRate.bps() + kInitialConversionFactor * gradient); +} + +TEST(PccBitrateControllerTest, FaceBoundaryWhenLargeGradient) { + std::unique_ptr mock_utility_function = + std::make_unique(); + constexpr double kFirstMonitorIntervalUtility = 0; + const double kSecondMonitorIntervalUtility = + 10 * kInitialDynamicBoundary * kTargetSendingRate.bps() * 2 * + kTargetSendingRate.bps() * kEpsilon; + + EXPECT_CALL(*mock_utility_function, Compute(::testing::_)) + .Times(4) + .WillOnce(::testing::Return(kFirstMonitorIntervalUtility)) + .WillOnce(::testing::Return(kSecondMonitorIntervalUtility)) + .WillOnce(::testing::Return(kFirstMonitorIntervalUtility)) + .WillOnce(::testing::Return(kSecondMonitorIntervalUtility)); + + PccBitrateController bitrate_controller( + kInitialConversionFactor, kInitialDynamicBoundary, + kDynamicBoundaryIncrement, std::move(mock_utility_function)); + std::vector monitor_block{ + PccMonitorInterval(kTargetSendingRate * (1 + kEpsilon), kStartTime, + kIntervalDuration), + PccMonitorInterval(kTargetSendingRate * (1 - kEpsilon), + kStartTime + kIntervalDuration, kIntervalDuration)}; + // To complete collecting feedback within monitor intervals. + monitor_block[0].OnPacketsFeedback( + CreatePacketResults({kStartTime + 3 * kIntervalDuration}, {}, {})); + monitor_block[1].OnPacketsFeedback( + CreatePacketResults({kStartTime + 3 * kIntervalDuration}, {}, {})); + // The utility function gradient is too big and we hit the dynamic boundary. + EXPECT_EQ(bitrate_controller.ComputeRateUpdateForOnlineLearningMode( + monitor_block, kTargetSendingRate), + kTargetSendingRate * (1 - kInitialDynamicBoundary)); + // For the second time we hit the dynamic boundary in the same direction, the + // boundary should increase. + EXPECT_EQ(bitrate_controller + .ComputeRateUpdateForOnlineLearningMode(monitor_block, + kTargetSendingRate) + .bps(), + kTargetSendingRate.bps() * + (1 - kInitialDynamicBoundary - kDynamicBoundaryIncrement)); +} + +TEST(PccBitrateControllerTest, SlowStartMode) { + std::unique_ptr mock_utility_function = + std::make_unique(); + constexpr double kFirstUtilityFunction = 1000; + EXPECT_CALL(*mock_utility_function, Compute(::testing::_)) + .Times(4) + // For first 3 calls we expect to stay in the SLOW_START mode and double + // the sending rate since the utility function increases its value. For + // the last call utility function decreases its value, this means that + // we should not double the sending rate and exit SLOW_START mode. + .WillOnce(::testing::Return(kFirstUtilityFunction)) + .WillOnce(::testing::Return(kFirstUtilityFunction + 1)) + .WillOnce(::testing::Return(kFirstUtilityFunction + 2)) + .WillOnce(::testing::Return(kFirstUtilityFunction + 1)); + + PccBitrateController bitrate_controller( + kInitialConversionFactor, kInitialDynamicBoundary, + kDynamicBoundaryIncrement, std::move(mock_utility_function)); + std::vector monitor_block{PccMonitorInterval( + 2 * kTargetSendingRate, kStartTime, kIntervalDuration)}; + // To complete collecting feedback within monitor intervals. + monitor_block[0].OnPacketsFeedback( + CreatePacketResults({kStartTime + 3 * kIntervalDuration}, {}, {})); + EXPECT_EQ( + bitrate_controller.ComputeRateUpdateForSlowStartMode(monitor_block[0]), + kTargetSendingRate * 2); + EXPECT_EQ( + bitrate_controller.ComputeRateUpdateForSlowStartMode(monitor_block[0]), + kTargetSendingRate * 2); + EXPECT_EQ( + bitrate_controller.ComputeRateUpdateForSlowStartMode(monitor_block[0]), + kTargetSendingRate * 2); + EXPECT_EQ( + bitrate_controller.ComputeRateUpdateForSlowStartMode(monitor_block[0]), + absl::nullopt); +} + +TEST(PccBitrateControllerTest, StepSizeIncrease) { + std::unique_ptr mock_utility_function = + std::make_unique(); + constexpr double kFirstMiUtilityFunction = 0; + const double kSecondMiUtilityFunction = + 2 * kTargetSendingRate.bps() * kEpsilon; + + EXPECT_CALL(*mock_utility_function, Compute(::testing::_)) + .Times(4) + .WillOnce(::testing::Return(kFirstMiUtilityFunction)) + .WillOnce(::testing::Return(kSecondMiUtilityFunction)) + .WillOnce(::testing::Return(kFirstMiUtilityFunction)) + .WillOnce(::testing::Return(kSecondMiUtilityFunction)); + std::vector monitor_block{ + PccMonitorInterval(kTargetSendingRate * (1 + kEpsilon), kStartTime, + kIntervalDuration), + PccMonitorInterval(kTargetSendingRate * (1 - kEpsilon), + kStartTime + kIntervalDuration, kIntervalDuration)}; + // To complete collecting feedback within monitor intervals. + monitor_block[0].OnPacketsFeedback( + CreatePacketResults({kStartTime + 3 * kIntervalDuration}, {}, {})); + monitor_block[1].OnPacketsFeedback( + CreatePacketResults({kStartTime + 3 * kIntervalDuration}, {}, {})); + + double gradient = (kFirstMiUtilityFunction - kSecondMiUtilityFunction) / + (kTargetSendingRate.bps() * 2 * kEpsilon); + PccBitrateController bitrate_controller( + kInitialConversionFactor, kInitialDynamicBoundary, + kDynamicBoundaryIncrement, std::move(mock_utility_function)); + // If we are moving in the same direction - the step size should increase. + EXPECT_EQ(bitrate_controller + .ComputeRateUpdateForOnlineLearningMode(monitor_block, + kTargetSendingRate) + .bps(), + kTargetSendingRate.bps() + kInitialConversionFactor * gradient); + EXPECT_EQ(bitrate_controller + .ComputeRateUpdateForOnlineLearningMode(monitor_block, + kTargetSendingRate) + .bps(), + kTargetSendingRate.bps() + 2 * kInitialConversionFactor * gradient); +} + +} // namespace test +} // namespace pcc +} // namespace webrtc diff --git a/modules/congestion_controller/pcc/monitor_interval.cc b/modules/congestion_controller/pcc/monitor_interval.cc new file mode 100644 index 0000000..c8efd5b --- /dev/null +++ b/modules/congestion_controller/pcc/monitor_interval.cc @@ -0,0 +1,138 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/pcc/monitor_interval.h" + +#include + +#include + +#include "rtc_base/logging.h" + +namespace webrtc { +namespace pcc { + +PccMonitorInterval::PccMonitorInterval(DataRate target_sending_rate, + Timestamp start_time, + TimeDelta duration) + : target_sending_rate_(target_sending_rate), + start_time_(start_time), + interval_duration_(duration), + received_packets_size_(DataSize::Zero()), + feedback_collection_done_(false) {} + +PccMonitorInterval::~PccMonitorInterval() = default; + +PccMonitorInterval::PccMonitorInterval(const PccMonitorInterval& other) = + default; + +void PccMonitorInterval::OnPacketsFeedback( + const std::vector& packets_results) { + for (const PacketResult& packet_result : packets_results) { + if (packet_result.sent_packet.send_time <= start_time_) { + continue; + } + // Here we assume that if some packets are reordered with packets sent + // after the end of the monitor interval, then they are lost. (Otherwise + // it is not clear how long should we wait for packets feedback to arrive). + if (packet_result.sent_packet.send_time > + start_time_ + interval_duration_) { + feedback_collection_done_ = true; + return; + } + if (packet_result.receive_time.IsInfinite()) { + lost_packets_sent_time_.push_back(packet_result.sent_packet.send_time); + } else { + received_packets_.push_back( + {packet_result.receive_time - packet_result.sent_packet.send_time, + packet_result.sent_packet.send_time}); + received_packets_size_ += packet_result.sent_packet.size; + } + } +} + +// For the formula used in computations see formula for "slope" in the second +// method: +// https://www.johndcook.com/blog/2008/10/20/comparing-two-ways-to-fit-a-line-to-data/ +double PccMonitorInterval::ComputeDelayGradient( + double delay_gradient_threshold) const { + // Early return to prevent division by 0 in case all packets are sent at the + // same time. + if (received_packets_.empty() || received_packets_.front().sent_time == + received_packets_.back().sent_time) { + return 0; + } + double sum_times = 0; + double sum_delays = 0; + for (const ReceivedPacket& packet : received_packets_) { + double time_delta_us = + (packet.sent_time - received_packets_[0].sent_time).us(); + double delay = packet.delay.us(); + sum_times += time_delta_us; + sum_delays += delay; + } + double sum_squared_scaled_time_deltas = 0; + double sum_scaled_time_delta_dot_delay = 0; + for (const ReceivedPacket& packet : received_packets_) { + double time_delta_us = + (packet.sent_time - received_packets_[0].sent_time).us(); + double delay = packet.delay.us(); + double scaled_time_delta_us = + time_delta_us - sum_times / received_packets_.size(); + sum_squared_scaled_time_deltas += + scaled_time_delta_us * scaled_time_delta_us; + sum_scaled_time_delta_dot_delay += scaled_time_delta_us * delay; + } + double rtt_gradient = + sum_scaled_time_delta_dot_delay / sum_squared_scaled_time_deltas; + if (std::abs(rtt_gradient) < delay_gradient_threshold) + rtt_gradient = 0; + return rtt_gradient; +} + +bool PccMonitorInterval::IsFeedbackCollectionDone() const { + return feedback_collection_done_; +} + +Timestamp PccMonitorInterval::GetEndTime() const { + return start_time_ + interval_duration_; +} + +double PccMonitorInterval::GetLossRate() const { + size_t packets_lost = lost_packets_sent_time_.size(); + size_t packets_received = received_packets_.size(); + if (packets_lost == 0) + return 0; + return static_cast(packets_lost) / (packets_lost + packets_received); +} + +DataRate PccMonitorInterval::GetTargetSendingRate() const { + return target_sending_rate_; +} + +DataRate PccMonitorInterval::GetTransmittedPacketsRate() const { + if (received_packets_.empty()) { + return target_sending_rate_; + } + Timestamp receive_time_of_first_packet = + received_packets_.front().sent_time + received_packets_.front().delay; + Timestamp receive_time_of_last_packet = + received_packets_.back().sent_time + received_packets_.back().delay; + if (receive_time_of_first_packet == receive_time_of_last_packet) { + RTC_LOG(LS_WARNING) + << "All packets in monitor interval were received at the same time."; + return target_sending_rate_; + } + return received_packets_size_ / + (receive_time_of_last_packet - receive_time_of_first_packet); +} + +} // namespace pcc +} // namespace webrtc diff --git a/modules/congestion_controller/pcc/monitor_interval.h b/modules/congestion_controller/pcc/monitor_interval.h new file mode 100644 index 0000000..51bd0f0 --- /dev/null +++ b/modules/congestion_controller/pcc/monitor_interval.h @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_PCC_MONITOR_INTERVAL_H_ +#define MODULES_CONGESTION_CONTROLLER_PCC_MONITOR_INTERVAL_H_ + +#include + +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" + +namespace webrtc { +namespace pcc { + +// PCC divides time into consecutive monitor intervals which are used to test +// consequences for performance of sending at a certain rate. +class PccMonitorInterval { + public: + PccMonitorInterval(DataRate target_sending_rate, + Timestamp start_time, + TimeDelta duration); + ~PccMonitorInterval(); + PccMonitorInterval(const PccMonitorInterval& other); + void OnPacketsFeedback(const std::vector& packets_results); + // Returns true if got complete information about packets. + // Notice, this only happens when received feedback about the first packet + // which were sent after the end of the monitor interval. If such event + // doesn't occur, we don't mind anyway and stay in the same state. + bool IsFeedbackCollectionDone() const; + Timestamp GetEndTime() const; + + double GetLossRate() const; + // Estimates the gradient using linear regression on the 2-dimensional + // dataset (sampled packets delay, time of sampling). + double ComputeDelayGradient(double delay_gradient_threshold) const; + DataRate GetTargetSendingRate() const; + // How fast receiving side gets packets. + DataRate GetTransmittedPacketsRate() const; + + private: + struct ReceivedPacket { + TimeDelta delay; + Timestamp sent_time; + }; + // Target bitrate used to generate and pace the outgoing packets. + // Actually sent bitrate might not match the target exactly. + DataRate target_sending_rate_; + // Start time is not included into interval while end time is included. + Timestamp start_time_; + TimeDelta interval_duration_; + // Vectors below updates while receiving feedback. + std::vector received_packets_; + std::vector lost_packets_sent_time_; + DataSize received_packets_size_; + bool feedback_collection_done_; +}; + +} // namespace pcc +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_PCC_MONITOR_INTERVAL_H_ diff --git a/modules/congestion_controller/pcc/monitor_interval_unittest.cc b/modules/congestion_controller/pcc/monitor_interval_unittest.cc new file mode 100644 index 0000000..aaff57b --- /dev/null +++ b/modules/congestion_controller/pcc/monitor_interval_unittest.cc @@ -0,0 +1,190 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/pcc/monitor_interval.h" + +#include + +#include "test/gtest.h" + +namespace webrtc { +namespace pcc { +namespace test { +namespace { +const DataRate kTargetSendingRate = DataRate::KilobitsPerSec(300); +const Timestamp kStartTime = Timestamp::Micros(0); +const TimeDelta kPacketsDelta = TimeDelta::Millis(1); +const TimeDelta kIntervalDuration = TimeDelta::Millis(100); +const TimeDelta kDefaultDelay = TimeDelta::Millis(100); +const DataSize kDefaultPacketSize = DataSize::Bytes(100); +constexpr double kDelayGradientThreshold = 0.01; + +std::vector CreatePacketResults( + const std::vector& packets_send_times, + const std::vector& packets_received_times = {}, + const std::vector& packets_sizes = {}) { + std::vector packet_results; + for (size_t i = 0; i < packets_send_times.size(); ++i) { + SentPacket sent_packet; + sent_packet.send_time = packets_send_times[i]; + if (packets_sizes.empty()) { + sent_packet.size = kDefaultPacketSize; + } else { + sent_packet.size = packets_sizes[i]; + } + PacketResult packet_result; + packet_result.sent_packet = sent_packet; + if (packets_received_times.empty()) { + packet_result.receive_time = packets_send_times[i] + kDefaultDelay; + } else { + packet_result.receive_time = packets_received_times[i]; + } + packet_results.push_back(packet_result); + } + return packet_results; +} + +} // namespace + +TEST(PccMonitorIntervalTest, InitialValuesAreEqualToOnesSetInConstructor) { + PccMonitorInterval interval{kTargetSendingRate, kStartTime, + kIntervalDuration}; + EXPECT_EQ(interval.IsFeedbackCollectionDone(), false); + EXPECT_EQ(interval.GetEndTime(), kStartTime + kIntervalDuration); + EXPECT_EQ(interval.GetTargetSendingRate(), kTargetSendingRate); +} + +TEST(PccMonitorIntervalTest, IndicatesDoneWhenFeedbackReceivedAfterInterval) { + PccMonitorInterval interval{kTargetSendingRate, kStartTime, + kIntervalDuration}; + interval.OnPacketsFeedback(CreatePacketResults({kStartTime})); + EXPECT_EQ(interval.IsFeedbackCollectionDone(), false); + interval.OnPacketsFeedback( + CreatePacketResults({kStartTime, kStartTime + kIntervalDuration})); + EXPECT_EQ(interval.IsFeedbackCollectionDone(), false); + interval.OnPacketsFeedback(CreatePacketResults( + {kStartTime + kIntervalDuration, kStartTime + 2 * kIntervalDuration})); + EXPECT_EQ(interval.IsFeedbackCollectionDone(), true); +} + +TEST(PccMonitorIntervalTest, LossRateIsOneThirdIfLostOnePacketOutOfThree) { + PccMonitorInterval interval{kTargetSendingRate, kStartTime, + kIntervalDuration}; + std::vector start_times = { + kStartTime, kStartTime + 0.1 * kIntervalDuration, + kStartTime + 0.5 * kIntervalDuration, kStartTime + kIntervalDuration, + kStartTime + 2 * kIntervalDuration}; + std::vector end_times = { + kStartTime + 2 * kIntervalDuration, kStartTime + 2 * kIntervalDuration, + Timestamp::PlusInfinity(), kStartTime + 2 * kIntervalDuration, + kStartTime + 4 * kIntervalDuration}; + std::vector packet_sizes = { + kDefaultPacketSize, 2 * kDefaultPacketSize, 3 * kDefaultPacketSize, + 4 * kDefaultPacketSize, 5 * kDefaultPacketSize}; + std::vector packet_results = + CreatePacketResults(start_times, end_times, packet_sizes); + interval.OnPacketsFeedback(packet_results); + EXPECT_EQ(interval.IsFeedbackCollectionDone(), true); + + EXPECT_DOUBLE_EQ(interval.GetLossRate(), 1. / 3); +} + +TEST(PccMonitorIntervalTest, DelayGradientIsZeroIfNoChangeInPacketDelay) { + PccMonitorInterval monitor_interval(kTargetSendingRate, kStartTime, + kIntervalDuration); + monitor_interval.OnPacketsFeedback(CreatePacketResults( + {kStartTime + kPacketsDelta, kStartTime + 2 * kPacketsDelta, + kStartTime + 3 * kPacketsDelta, kStartTime + 2 * kIntervalDuration}, + {kStartTime + kDefaultDelay, Timestamp::PlusInfinity(), + kStartTime + kDefaultDelay + 2 * kPacketsDelta, + Timestamp::PlusInfinity()}, + {})); + // Delay gradient should be zero, because both received packets have the + // same one way delay. + EXPECT_DOUBLE_EQ( + monitor_interval.ComputeDelayGradient(kDelayGradientThreshold), 0); +} + +TEST(PccMonitorIntervalTest, + DelayGradientIsZeroWhenOnePacketSentInMonitorInterval) { + PccMonitorInterval monitor_interval(kTargetSendingRate, kStartTime, + kIntervalDuration); + monitor_interval.OnPacketsFeedback(CreatePacketResults( + {kStartTime + kPacketsDelta, kStartTime + 2 * kIntervalDuration}, + {kStartTime + kDefaultDelay, kStartTime + 3 * kIntervalDuration}, {})); + // Only one received packet belongs to the monitor_interval, delay gradient + // should be zero in this case. + EXPECT_DOUBLE_EQ( + monitor_interval.ComputeDelayGradient(kDelayGradientThreshold), 0); +} + +TEST(PccMonitorIntervalTest, DelayGradientIsOne) { + PccMonitorInterval monitor_interval(kTargetSendingRate, kStartTime, + kIntervalDuration); + monitor_interval.OnPacketsFeedback(CreatePacketResults( + {kStartTime + kPacketsDelta, kStartTime + 2 * kPacketsDelta, + kStartTime + 3 * kPacketsDelta, kStartTime + 3 * kIntervalDuration}, + {kStartTime + kDefaultDelay, Timestamp::PlusInfinity(), + kStartTime + 4 * kPacketsDelta + kDefaultDelay, + kStartTime + 3 * kIntervalDuration}, + {})); + EXPECT_DOUBLE_EQ( + monitor_interval.ComputeDelayGradient(kDelayGradientThreshold), 1); +} + +TEST(PccMonitorIntervalTest, DelayGradientIsMinusOne) { + PccMonitorInterval monitor_interval(kTargetSendingRate, kStartTime, + kIntervalDuration); + monitor_interval.OnPacketsFeedback(CreatePacketResults( + {kStartTime + kPacketsDelta, kStartTime + 2 * kPacketsDelta, + kStartTime + 5 * kPacketsDelta, kStartTime + 2 * kIntervalDuration}, + {kStartTime + kDefaultDelay, Timestamp::PlusInfinity(), + kStartTime + kDefaultDelay, kStartTime + 3 * kIntervalDuration}, + {})); + EXPECT_DOUBLE_EQ( + monitor_interval.ComputeDelayGradient(kDelayGradientThreshold), -1); +} + +TEST(PccMonitorIntervalTest, + DelayGradientIsZeroIfItSmallerWhenGradientThreshold) { + PccMonitorInterval monitor_interval(kTargetSendingRate, kStartTime, + kIntervalDuration); + monitor_interval.OnPacketsFeedback(CreatePacketResults( + {kStartTime + kPacketsDelta, kStartTime + kPacketsDelta, + kStartTime + 102 * kPacketsDelta, kStartTime + 2 * kIntervalDuration}, + {kStartTime + kDefaultDelay, Timestamp::PlusInfinity(), + kStartTime + kDefaultDelay + kPacketsDelta, + kStartTime + 3 * kIntervalDuration}, + {})); + // Delay gradient is less than 0.01 hence should be treated as zero. + EXPECT_DOUBLE_EQ( + monitor_interval.ComputeDelayGradient(kDelayGradientThreshold), 0); +} + +TEST(PccMonitorIntervalTest, + DelayGradientIsZeroWhenAllPacketsSentAtTheSameTime) { + PccMonitorInterval monitor_interval(kTargetSendingRate, kStartTime, + kIntervalDuration); + monitor_interval.OnPacketsFeedback(CreatePacketResults( + {kStartTime + kPacketsDelta, kStartTime + kPacketsDelta, + kStartTime + kPacketsDelta, kStartTime + 2 * kIntervalDuration}, + {kStartTime + kDefaultDelay, Timestamp::PlusInfinity(), + kStartTime + kDefaultDelay + kPacketsDelta, + kStartTime + 3 * kIntervalDuration}, + {})); + // If all packets were sent at the same time, then delay gradient should be + // zero. + EXPECT_DOUBLE_EQ( + monitor_interval.ComputeDelayGradient(kDelayGradientThreshold), 0); +} + +} // namespace test +} // namespace pcc +} // namespace webrtc diff --git a/modules/congestion_controller/pcc/pcc_factory.cc b/modules/congestion_controller/pcc/pcc_factory.cc new file mode 100644 index 0000000..c35c6e8 --- /dev/null +++ b/modules/congestion_controller/pcc/pcc_factory.cc @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/pcc/pcc_factory.h" + +#include + +#include "modules/congestion_controller/pcc/pcc_network_controller.h" + +namespace webrtc { + +PccNetworkControllerFactory::PccNetworkControllerFactory() {} + +std::unique_ptr PccNetworkControllerFactory::Create( + NetworkControllerConfig config) { + return std::make_unique(config); +} + +TimeDelta PccNetworkControllerFactory::GetProcessInterval() const { + return TimeDelta::PlusInfinity(); +} + +} // namespace webrtc diff --git a/modules/congestion_controller/pcc/pcc_factory.h b/modules/congestion_controller/pcc/pcc_factory.h new file mode 100644 index 0000000..bb70d7a --- /dev/null +++ b/modules/congestion_controller/pcc/pcc_factory.h @@ -0,0 +1,30 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_PCC_PCC_FACTORY_H_ +#define MODULES_CONGESTION_CONTROLLER_PCC_PCC_FACTORY_H_ + +#include + +#include "api/transport/network_control.h" +#include "api/units/time_delta.h" + +namespace webrtc { + +class PccNetworkControllerFactory : public NetworkControllerFactoryInterface { + public: + PccNetworkControllerFactory(); + std::unique_ptr Create( + NetworkControllerConfig config) override; + TimeDelta GetProcessInterval() const override; +}; +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_PCC_PCC_FACTORY_H_ diff --git a/modules/congestion_controller/pcc/pcc_network_controller.cc b/modules/congestion_controller/pcc/pcc_network_controller.cc new file mode 100644 index 0000000..8653470 --- /dev/null +++ b/modules/congestion_controller/pcc/pcc_network_controller.cc @@ -0,0 +1,391 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/pcc/pcc_network_controller.h" + +#include + +#include "absl/types/optional.h" +#include "api/units/data_size.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace pcc { +namespace { +constexpr int64_t kInitialRttMs = 200; +constexpr int64_t kInitialBandwidthKbps = 300; +constexpr double kMonitorIntervalDurationRatio = 1; +constexpr double kDefaultSamplingStep = 0.05; +constexpr double kTimeoutRatio = 2; +constexpr double kAlphaForRtt = 0.9; +constexpr double kSlowStartModeIncrease = 1.5; + +constexpr double kAlphaForPacketInterval = 0.9; +constexpr int64_t kMinPacketsNumberPerInterval = 20; +const TimeDelta kMinDurationOfMonitorInterval = TimeDelta::Millis(50); +const TimeDelta kStartupDuration = TimeDelta::Millis(500); +constexpr double kMinRateChangeBps = 4000; +constexpr DataRate kMinRateHaveMultiplicativeRateChange = DataRate::BitsPerSec( + static_cast(kMinRateChangeBps / kDefaultSamplingStep)); + +// Bitrate controller constants. +constexpr double kInitialConversionFactor = 5; +constexpr double kInitialDynamicBoundary = 0.1; +constexpr double kDynamicBoundaryIncrement = 0.1; +// Utility function parameters. +constexpr double kRttGradientCoefficientBps = 0.005; +constexpr double kLossCoefficientBps = 10; +constexpr double kThroughputCoefficient = 0.001; +constexpr double kThroughputPower = 0.9; +constexpr double kRttGradientThreshold = 0.01; +constexpr double kDelayGradientNegativeBound = 0.1; + +constexpr int64_t kNumberOfPacketsToKeep = 20; +const uint64_t kRandomSeed = 100; +} // namespace + +PccNetworkController::PccNetworkController(NetworkControllerConfig config) + : start_time_(Timestamp::PlusInfinity()), + last_sent_packet_time_(Timestamp::PlusInfinity()), + smoothed_packets_sending_interval_(TimeDelta::Zero()), + mode_(Mode::kStartup), + default_bandwidth_(DataRate::KilobitsPerSec(kInitialBandwidthKbps)), + bandwidth_estimate_(default_bandwidth_), + rtt_tracker_(TimeDelta::Millis(kInitialRttMs), kAlphaForRtt), + monitor_interval_timeout_(TimeDelta::Millis(kInitialRttMs) * + kTimeoutRatio), + monitor_interval_length_strategy_(MonitorIntervalLengthStrategy::kFixed), + monitor_interval_duration_ratio_(kMonitorIntervalDurationRatio), + sampling_step_(kDefaultSamplingStep), + monitor_interval_timeout_ratio_(kTimeoutRatio), + min_packets_number_per_interval_(kMinPacketsNumberPerInterval), + bitrate_controller_(kInitialConversionFactor, + kInitialDynamicBoundary, + kDynamicBoundaryIncrement, + kRttGradientCoefficientBps, + kLossCoefficientBps, + kThroughputCoefficient, + kThroughputPower, + kRttGradientThreshold, + kDelayGradientNegativeBound), + monitor_intervals_duration_(TimeDelta::Zero()), + complete_feedback_monitor_interval_number_(0), + random_generator_(kRandomSeed) { + if (config.constraints.starting_rate) { + default_bandwidth_ = *config.constraints.starting_rate; + bandwidth_estimate_ = default_bandwidth_; + } +} + +PccNetworkController::~PccNetworkController() {} + +NetworkControlUpdate PccNetworkController::CreateRateUpdate( + Timestamp at_time) const { + DataRate sending_rate = DataRate::Zero(); + if (monitor_intervals_.empty() || + (monitor_intervals_.size() >= monitor_intervals_bitrates_.size() && + at_time >= monitor_intervals_.back().GetEndTime())) { + sending_rate = bandwidth_estimate_; + } else { + sending_rate = monitor_intervals_.back().GetTargetSendingRate(); + } + // Set up config when sending rate is computed. + NetworkControlUpdate update; + + // Set up target rate to encoder. + TargetTransferRate target_rate_msg; + target_rate_msg.at_time = at_time; + target_rate_msg.network_estimate.at_time = at_time; + target_rate_msg.network_estimate.round_trip_time = rtt_tracker_.GetRtt(); + // TODO(koloskova): Add correct estimate. + target_rate_msg.network_estimate.loss_rate_ratio = 0; + target_rate_msg.network_estimate.bwe_period = + monitor_interval_duration_ratio_ * rtt_tracker_.GetRtt(); + + target_rate_msg.target_rate = sending_rate; + update.target_rate = target_rate_msg; + + // Set up pacing/padding target rate. + PacerConfig pacer_config; + pacer_config.at_time = at_time; + pacer_config.time_window = TimeDelta::Millis(1); + pacer_config.data_window = sending_rate * pacer_config.time_window; + pacer_config.pad_window = sending_rate * pacer_config.time_window; + + update.pacer_config = pacer_config; + return update; +} + +NetworkControlUpdate PccNetworkController::OnSentPacket(SentPacket msg) { + // Start new monitor interval if previous has finished. + // Monitor interval is initialized in OnProcessInterval function. + if (start_time_.IsInfinite()) { + start_time_ = msg.send_time; + monitor_intervals_duration_ = kStartupDuration; + monitor_intervals_bitrates_ = {bandwidth_estimate_}; + monitor_intervals_.emplace_back(bandwidth_estimate_, msg.send_time, + monitor_intervals_duration_); + complete_feedback_monitor_interval_number_ = 0; + } + if (last_sent_packet_time_.IsFinite()) { + smoothed_packets_sending_interval_ = + (msg.send_time - last_sent_packet_time_) * kAlphaForPacketInterval + + (1 - kAlphaForPacketInterval) * smoothed_packets_sending_interval_; + } + last_sent_packet_time_ = msg.send_time; + if (!monitor_intervals_.empty() && + msg.send_time >= monitor_intervals_.back().GetEndTime() && + monitor_intervals_bitrates_.size() > monitor_intervals_.size()) { + // Start new monitor interval. + monitor_intervals_.emplace_back( + monitor_intervals_bitrates_[monitor_intervals_.size()], msg.send_time, + monitor_intervals_duration_); + } + if (IsTimeoutExpired(msg.send_time)) { + DataSize received_size = DataSize::Zero(); + for (size_t i = 1; i < last_received_packets_.size(); ++i) { + received_size += last_received_packets_[i].sent_packet.size; + } + TimeDelta sending_time = TimeDelta::Zero(); + if (last_received_packets_.size() > 0) + sending_time = last_received_packets_.back().receive_time - + last_received_packets_.front().receive_time; + DataRate receiving_rate = bandwidth_estimate_; + if (sending_time > TimeDelta::Zero()) + receiving_rate = received_size / sending_time; + bandwidth_estimate_ = + std::min(bandwidth_estimate_ * 0.5, receiving_rate); + if (mode_ == Mode::kSlowStart) + mode_ = Mode::kOnlineLearning; + } + if (mode_ == Mode::kStartup && + msg.send_time - start_time_ >= kStartupDuration) { + DataSize received_size = DataSize::Zero(); + for (size_t i = 1; i < last_received_packets_.size(); ++i) { + received_size += last_received_packets_[i].sent_packet.size; + } + TimeDelta sending_time = TimeDelta::Zero(); + if (last_received_packets_.size() > 0) + sending_time = last_received_packets_.back().receive_time - + last_received_packets_.front().receive_time; + DataRate receiving_rate = bandwidth_estimate_; + if (sending_time > TimeDelta::Zero()) + receiving_rate = received_size / sending_time; + bandwidth_estimate_ = receiving_rate; + monitor_intervals_.clear(); + mode_ = Mode::kSlowStart; + monitor_intervals_duration_ = ComputeMonitorIntervalsDuration(); + monitor_intervals_bitrates_ = {bandwidth_estimate_}; + monitor_intervals_.emplace_back(bandwidth_estimate_, msg.send_time, + monitor_intervals_duration_); + bandwidth_estimate_ = bandwidth_estimate_ * (1 / kSlowStartModeIncrease); + complete_feedback_monitor_interval_number_ = 0; + return CreateRateUpdate(msg.send_time); + } + if (IsFeedbackCollectionDone() || IsTimeoutExpired(msg.send_time)) { + // Creating new monitor intervals. + monitor_intervals_.clear(); + monitor_interval_timeout_ = + rtt_tracker_.GetRtt() * monitor_interval_timeout_ratio_; + monitor_intervals_duration_ = ComputeMonitorIntervalsDuration(); + complete_feedback_monitor_interval_number_ = 0; + // Compute bitrates and start first monitor interval. + if (mode_ == Mode::kSlowStart) { + monitor_intervals_bitrates_ = {kSlowStartModeIncrease * + bandwidth_estimate_}; + monitor_intervals_.emplace_back( + kSlowStartModeIncrease * bandwidth_estimate_, msg.send_time, + monitor_intervals_duration_); + } else { + RTC_DCHECK(mode_ == Mode::kOnlineLearning || mode_ == Mode::kDoubleCheck); + monitor_intervals_.clear(); + int64_t sign = 2 * (random_generator_.Rand(0, 1) % 2) - 1; + RTC_DCHECK_GE(sign, -1); + RTC_DCHECK_LE(sign, 1); + if (bandwidth_estimate_ >= kMinRateHaveMultiplicativeRateChange) { + monitor_intervals_bitrates_ = { + bandwidth_estimate_ * (1 + sign * sampling_step_), + bandwidth_estimate_ * (1 - sign * sampling_step_)}; + } else { + monitor_intervals_bitrates_ = { + DataRate::BitsPerSec(std::max( + bandwidth_estimate_.bps() + sign * kMinRateChangeBps, 0)), + DataRate::BitsPerSec(std::max( + bandwidth_estimate_.bps() - sign * kMinRateChangeBps, 0))}; + } + monitor_intervals_.emplace_back(monitor_intervals_bitrates_[0], + msg.send_time, + monitor_intervals_duration_); + } + } + return CreateRateUpdate(msg.send_time); +} + +TimeDelta PccNetworkController::ComputeMonitorIntervalsDuration() const { + TimeDelta monitor_intervals_duration = TimeDelta::Zero(); + if (monitor_interval_length_strategy_ == + MonitorIntervalLengthStrategy::kAdaptive) { + monitor_intervals_duration = std::max( + rtt_tracker_.GetRtt() * monitor_interval_duration_ratio_, + smoothed_packets_sending_interval_ * min_packets_number_per_interval_); + } else { + RTC_DCHECK(monitor_interval_length_strategy_ == + MonitorIntervalLengthStrategy::kFixed); + monitor_intervals_duration = + smoothed_packets_sending_interval_ * min_packets_number_per_interval_; + } + monitor_intervals_duration = + std::max(kMinDurationOfMonitorInterval, monitor_intervals_duration); + return monitor_intervals_duration; +} + +bool PccNetworkController::IsTimeoutExpired(Timestamp current_time) const { + if (complete_feedback_monitor_interval_number_ >= monitor_intervals_.size()) { + return false; + } + return current_time - + monitor_intervals_[complete_feedback_monitor_interval_number_] + .GetEndTime() >= + monitor_interval_timeout_; +} + +bool PccNetworkController::IsFeedbackCollectionDone() const { + return complete_feedback_monitor_interval_number_ >= + monitor_intervals_bitrates_.size(); +} + +NetworkControlUpdate PccNetworkController::OnTransportPacketsFeedback( + TransportPacketsFeedback msg) { + if (msg.packet_feedbacks.empty()) + return NetworkControlUpdate(); + // Save packets to last_received_packets_ array. + for (const PacketResult& packet_result : msg.ReceivedWithSendInfo()) { + last_received_packets_.push_back(packet_result); + } + while (last_received_packets_.size() > kNumberOfPacketsToKeep) { + last_received_packets_.pop_front(); + } + rtt_tracker_.OnPacketsFeedback(msg.PacketsWithFeedback(), msg.feedback_time); + // Skip rate update in case when online learning mode just started, but + // corresponding monitor intervals were not started yet. + if (mode_ == Mode::kOnlineLearning && + monitor_intervals_bitrates_.size() < 2) { + return NetworkControlUpdate(); + } + if (!IsFeedbackCollectionDone() && !monitor_intervals_.empty()) { + while (complete_feedback_monitor_interval_number_ < + monitor_intervals_.size()) { + monitor_intervals_[complete_feedback_monitor_interval_number_] + .OnPacketsFeedback(msg.PacketsWithFeedback()); + if (!monitor_intervals_[complete_feedback_monitor_interval_number_] + .IsFeedbackCollectionDone()) + break; + ++complete_feedback_monitor_interval_number_; + } + } + if (IsFeedbackCollectionDone()) { + if (mode_ == Mode::kDoubleCheck) { + mode_ = Mode::kOnlineLearning; + } else if (NeedDoubleCheckMeasurments()) { + mode_ = Mode::kDoubleCheck; + } + if (mode_ != Mode::kDoubleCheck) + UpdateSendingRateAndMode(); + } + return NetworkControlUpdate(); +} + +bool PccNetworkController::NeedDoubleCheckMeasurments() const { + if (mode_ == Mode::kSlowStart) { + return false; + } + double first_loss_rate = monitor_intervals_[0].GetLossRate(); + double second_loss_rate = monitor_intervals_[1].GetLossRate(); + DataRate first_bitrate = monitor_intervals_[0].GetTargetSendingRate(); + DataRate second_bitrate = monitor_intervals_[1].GetTargetSendingRate(); + if ((first_bitrate.bps() - second_bitrate.bps()) * + (first_loss_rate - second_loss_rate) < + 0) { + return true; + } + return false; +} + +void PccNetworkController::UpdateSendingRateAndMode() { + if (monitor_intervals_.empty() || !IsFeedbackCollectionDone()) { + return; + } + if (mode_ == Mode::kSlowStart) { + DataRate old_bandwidth_estimate = bandwidth_estimate_; + bandwidth_estimate_ = + bitrate_controller_ + .ComputeRateUpdateForSlowStartMode(monitor_intervals_[0]) + .value_or(bandwidth_estimate_); + if (bandwidth_estimate_ <= old_bandwidth_estimate) + mode_ = Mode::kOnlineLearning; + } else { + RTC_DCHECK(mode_ == Mode::kOnlineLearning); + bandwidth_estimate_ = + bitrate_controller_.ComputeRateUpdateForOnlineLearningMode( + monitor_intervals_, bandwidth_estimate_); + } +} + +NetworkControlUpdate PccNetworkController::OnNetworkAvailability( + NetworkAvailability msg) { + return NetworkControlUpdate(); +} + +NetworkControlUpdate PccNetworkController::OnNetworkRouteChange( + NetworkRouteChange msg) { + return NetworkControlUpdate(); +} + +NetworkControlUpdate PccNetworkController::OnProcessInterval( + ProcessInterval msg) { + return CreateRateUpdate(msg.at_time); +} + +NetworkControlUpdate PccNetworkController::OnTargetRateConstraints( + TargetRateConstraints msg) { + return NetworkControlUpdate(); +} + +NetworkControlUpdate PccNetworkController::OnRemoteBitrateReport( + RemoteBitrateReport) { + return NetworkControlUpdate(); +} + +NetworkControlUpdate PccNetworkController::OnRoundTripTimeUpdate( + RoundTripTimeUpdate) { + return NetworkControlUpdate(); +} + +NetworkControlUpdate PccNetworkController::OnTransportLossReport( + TransportLossReport) { + return NetworkControlUpdate(); +} + +NetworkControlUpdate PccNetworkController::OnStreamsConfig(StreamsConfig msg) { + return NetworkControlUpdate(); +} + +NetworkControlUpdate PccNetworkController::OnReceivedPacket( + ReceivedPacket msg) { + return NetworkControlUpdate(); +} + +NetworkControlUpdate PccNetworkController::OnNetworkStateEstimate( + NetworkStateEstimate msg) { + return NetworkControlUpdate(); +} + +} // namespace pcc +} // namespace webrtc diff --git a/modules/congestion_controller/pcc/pcc_network_controller.h b/modules/congestion_controller/pcc/pcc_network_controller.h new file mode 100644 index 0000000..e5f65dd --- /dev/null +++ b/modules/congestion_controller/pcc/pcc_network_controller.h @@ -0,0 +1,125 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_PCC_PCC_NETWORK_CONTROLLER_H_ +#define MODULES_CONGESTION_CONTROLLER_PCC_PCC_NETWORK_CONTROLLER_H_ + +#include +#include + +#include +#include + +#include "api/transport/network_control.h" +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/congestion_controller/pcc/bitrate_controller.h" +#include "modules/congestion_controller/pcc/monitor_interval.h" +#include "modules/congestion_controller/pcc/rtt_tracker.h" +#include "rtc_base/random.h" + +namespace webrtc { +namespace pcc { + +// PCC (Performance-oriented Congestion Control) Vivace is a congestion +// control algorithm based on online (convex) optimization in machine learning. +// It divides time into consecutive Monitor Intervals (MI) to test sending +// rates r(1 + eps), r(1 - eps) for the current sending rate r. +// At the end of each MI it computes utility function to transform the +// performance statistics into a numerical value. Then it updates current +// sending rate using gradient ascent to maximize utility function. +class PccNetworkController : public NetworkControllerInterface { + public: + enum class Mode { + kStartup, + // Slow start phase of PCC doubles sending rate each monitor interval. + kSlowStart, + // After getting the first decrease in utility function PCC exits slow start + // and enters the online learning phase. + kOnlineLearning, + // If we got that sending with the lower rate resulted in higher packet + // loss, then the measurements are unreliable and we need to double check + // them. + kDoubleCheck + }; + + enum class MonitorIntervalLengthStrategy { + // Monitor interval length adaptive when it is proportional to packets RTT. + kAdaptive, + // Monitor interval length is fixed when it is equal to the time of sending + // predefined amount of packets (kMinPacketsNumberPerInterval). + kFixed + }; + + explicit PccNetworkController(NetworkControllerConfig config); + ~PccNetworkController() override; + + // NetworkControllerInterface + NetworkControlUpdate OnNetworkAvailability(NetworkAvailability msg) override; + NetworkControlUpdate OnNetworkRouteChange(NetworkRouteChange msg) override; + NetworkControlUpdate OnProcessInterval(ProcessInterval msg) override; + NetworkControlUpdate OnSentPacket(SentPacket msg) override; + NetworkControlUpdate OnTargetRateConstraints( + TargetRateConstraints msg) override; + NetworkControlUpdate OnTransportPacketsFeedback( + TransportPacketsFeedback msg) override; + + // Part of remote bitrate estimation api, not implemented for PCC + NetworkControlUpdate OnStreamsConfig(StreamsConfig msg) override; + NetworkControlUpdate OnRemoteBitrateReport(RemoteBitrateReport msg) override; + NetworkControlUpdate OnRoundTripTimeUpdate(RoundTripTimeUpdate msg) override; + NetworkControlUpdate OnTransportLossReport(TransportLossReport msg) override; + NetworkControlUpdate OnReceivedPacket(ReceivedPacket msg) override; + NetworkControlUpdate OnNetworkStateEstimate( + NetworkStateEstimate msg) override; + + private: + void UpdateSendingRateAndMode(); + NetworkControlUpdate CreateRateUpdate(Timestamp at_time) const; + TimeDelta ComputeMonitorIntervalsDuration() const; + bool NeedDoubleCheckMeasurments() const; + bool IsTimeoutExpired(Timestamp current_time) const; + bool IsFeedbackCollectionDone() const; + + Timestamp start_time_; + Timestamp last_sent_packet_time_; + TimeDelta smoothed_packets_sending_interval_; + Mode mode_; + + // Default value used for initializing bandwidth. + DataRate default_bandwidth_; + // Current estimate r. + DataRate bandwidth_estimate_; + + RttTracker rtt_tracker_; + TimeDelta monitor_interval_timeout_; + const MonitorIntervalLengthStrategy monitor_interval_length_strategy_; + const double monitor_interval_duration_ratio_; + const double sampling_step_; // Epsilon. + const double monitor_interval_timeout_ratio_; + const int64_t min_packets_number_per_interval_; + + PccBitrateController bitrate_controller_; + + std::vector monitor_intervals_; + std::vector monitor_intervals_bitrates_; + TimeDelta monitor_intervals_duration_; + size_t complete_feedback_monitor_interval_number_; + + webrtc::Random random_generator_; + std::deque last_received_packets_; +}; + +} // namespace pcc +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_PCC_PCC_NETWORK_CONTROLLER_H_ diff --git a/modules/congestion_controller/pcc/pcc_network_controller_unittest.cc b/modules/congestion_controller/pcc/pcc_network_controller_unittest.cc new file mode 100644 index 0000000..c98680c --- /dev/null +++ b/modules/congestion_controller/pcc/pcc_network_controller_unittest.cc @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/pcc/pcc_network_controller.h" + +#include + +#include "modules/congestion_controller/pcc/pcc_factory.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/scenario/scenario.h" + +using ::testing::AllOf; +using ::testing::Field; +using ::testing::Ge; +using ::testing::Le; +using ::testing::Matcher; +using ::testing::Property; + +namespace webrtc { +namespace test { +namespace { + +const DataRate kInitialBitrate = DataRate::KilobitsPerSec(60); +const Timestamp kDefaultStartTime = Timestamp::Millis(10000000); + +constexpr double kDataRateMargin = 0.20; +constexpr double kMinDataRateFactor = 1 - kDataRateMargin; +constexpr double kMaxDataRateFactor = 1 + kDataRateMargin; +inline Matcher TargetRateCloseTo(DataRate rate) { + DataRate min_data_rate = rate * kMinDataRateFactor; + DataRate max_data_rate = rate * kMaxDataRateFactor; + return Field(&TargetTransferRate::target_rate, + AllOf(Ge(min_data_rate), Le(max_data_rate))); +} + +NetworkControllerConfig InitialConfig( + int starting_bandwidth_kbps = kInitialBitrate.kbps(), + int min_data_rate_kbps = 0, + int max_data_rate_kbps = 5 * kInitialBitrate.kbps()) { + NetworkControllerConfig config; + config.constraints.at_time = kDefaultStartTime; + config.constraints.min_data_rate = + DataRate::KilobitsPerSec(min_data_rate_kbps); + config.constraints.max_data_rate = + DataRate::KilobitsPerSec(max_data_rate_kbps); + config.constraints.starting_rate = + DataRate::KilobitsPerSec(starting_bandwidth_kbps); + return config; +} + +ProcessInterval InitialProcessInterval() { + ProcessInterval process_interval; + process_interval.at_time = kDefaultStartTime; + return process_interval; +} + +} // namespace + +TEST(PccNetworkControllerTest, SendsConfigurationOnFirstProcess) { + std::unique_ptr controller_; + controller_.reset(new pcc::PccNetworkController(InitialConfig())); + + NetworkControlUpdate update = + controller_->OnProcessInterval(InitialProcessInterval()); + EXPECT_THAT(*update.target_rate, TargetRateCloseTo(kInitialBitrate)); + EXPECT_THAT(*update.pacer_config, + Property(&PacerConfig::data_rate, Ge(kInitialBitrate))); +} + +TEST(PccNetworkControllerTest, UpdatesTargetSendRate) { + PccNetworkControllerFactory factory; + Scenario s("pcc_unit/updates_rate", false); + CallClientConfig config; + config.transport.cc_factory = &factory; + config.transport.rates.min_rate = DataRate::KilobitsPerSec(10); + config.transport.rates.max_rate = DataRate::KilobitsPerSec(1500); + config.transport.rates.start_rate = DataRate::KilobitsPerSec(300); + auto send_net = s.CreateMutableSimulationNode([](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(500); + c->delay = TimeDelta::Millis(100); + }); + auto ret_net = s.CreateMutableSimulationNode( + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(100); }); + + auto* client = s.CreateClient("send", config); + auto* route = s.CreateRoutes(client, {send_net->node()}, + s.CreateClient("return", CallClientConfig()), + {ret_net->node()}); + VideoStreamConfig video; + video.stream.use_rtx = false; + s.CreateVideoStream(route->forward(), video); + s.RunFor(TimeDelta::Seconds(30)); + EXPECT_NEAR(client->target_rate().kbps(), 450, 100); + send_net->UpdateConfig([](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(800); + c->delay = TimeDelta::Millis(100); + }); + s.RunFor(TimeDelta::Seconds(20)); + EXPECT_NEAR(client->target_rate().kbps(), 750, 150); + send_net->UpdateConfig([](NetworkSimulationConfig* c) { + c->bandwidth = DataRate::KilobitsPerSec(200); + c->delay = TimeDelta::Millis(200); + }); + ret_net->UpdateConfig( + [](NetworkSimulationConfig* c) { c->delay = TimeDelta::Millis(200); }); + s.RunFor(TimeDelta::Seconds(35)); + EXPECT_NEAR(client->target_rate().kbps(), 170, 50); +} + +} // namespace test +} // namespace webrtc diff --git a/modules/congestion_controller/pcc/rtt_tracker.cc b/modules/congestion_controller/pcc/rtt_tracker.cc new file mode 100644 index 0000000..0814912 --- /dev/null +++ b/modules/congestion_controller/pcc/rtt_tracker.cc @@ -0,0 +1,41 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/pcc/rtt_tracker.h" + +#include + +namespace webrtc { +namespace pcc { + +RttTracker::RttTracker(TimeDelta initial_rtt, double alpha) + : rtt_estimate_(initial_rtt), alpha_(alpha) {} + +void RttTracker::OnPacketsFeedback( + const std::vector& packet_feedbacks, + Timestamp feedback_received_time) { + TimeDelta packet_rtt = TimeDelta::MinusInfinity(); + for (const PacketResult& packet_result : packet_feedbacks) { + if (packet_result.receive_time.IsInfinite()) + continue; + packet_rtt = std::max( + packet_rtt, + feedback_received_time - packet_result.sent_packet.send_time); + } + if (packet_rtt.IsFinite()) + rtt_estimate_ = (1 - alpha_) * rtt_estimate_ + alpha_ * packet_rtt; +} + +TimeDelta RttTracker::GetRtt() const { + return rtt_estimate_; +} + +} // namespace pcc +} // namespace webrtc diff --git a/modules/congestion_controller/pcc/rtt_tracker.h b/modules/congestion_controller/pcc/rtt_tracker.h new file mode 100644 index 0000000..94033cd --- /dev/null +++ b/modules/congestion_controller/pcc/rtt_tracker.h @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_PCC_RTT_TRACKER_H_ +#define MODULES_CONGESTION_CONTROLLER_PCC_RTT_TRACKER_H_ + +#include + +#include "api/transport/network_types.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" + +namespace webrtc { +namespace pcc { + +class RttTracker { + public: + RttTracker(TimeDelta initial_rtt, double alpha); + // Updates RTT estimate. + void OnPacketsFeedback(const std::vector& packet_feedbacks, + Timestamp feedback_received_time); + TimeDelta GetRtt() const; + + private: + TimeDelta rtt_estimate_; + double alpha_; +}; + +} // namespace pcc +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_PCC_RTT_TRACKER_H_ diff --git a/modules/congestion_controller/pcc/rtt_tracker_unittest.cc b/modules/congestion_controller/pcc/rtt_tracker_unittest.cc new file mode 100644 index 0000000..7d90e86 --- /dev/null +++ b/modules/congestion_controller/pcc/rtt_tracker_unittest.cc @@ -0,0 +1,71 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/pcc/rtt_tracker.h" + +#include "test/gtest.h" + +namespace webrtc { +namespace pcc { +namespace test { +namespace { +const TimeDelta kInitialRtt = TimeDelta::Micros(10); +constexpr double kAlpha = 0.9; +const Timestamp kStartTime = Timestamp::Seconds(0); + +PacketResult GetPacketWithRtt(TimeDelta rtt) { + SentPacket packet; + packet.send_time = kStartTime; + PacketResult packet_result; + packet_result.sent_packet = packet; + if (rtt.IsFinite()) { + packet_result.receive_time = kStartTime + rtt; + } else { + packet_result.receive_time = Timestamp::PlusInfinity(); + } + return packet_result; +} +} // namespace + +TEST(PccRttTrackerTest, InitialValue) { + RttTracker tracker{kInitialRtt, kAlpha}; + EXPECT_EQ(kInitialRtt, tracker.GetRtt()); + for (int i = 0; i < 100; ++i) { + tracker.OnPacketsFeedback({GetPacketWithRtt(kInitialRtt)}, + kStartTime + kInitialRtt); + } + EXPECT_EQ(kInitialRtt, tracker.GetRtt()); +} + +TEST(PccRttTrackerTest, DoNothingWhenPacketIsLost) { + RttTracker tracker{kInitialRtt, kAlpha}; + tracker.OnPacketsFeedback({GetPacketWithRtt(TimeDelta::PlusInfinity())}, + kStartTime + kInitialRtt); + EXPECT_EQ(tracker.GetRtt(), kInitialRtt); +} + +TEST(PccRttTrackerTest, ChangeInRtt) { + RttTracker tracker{kInitialRtt, kAlpha}; + const TimeDelta kNewRtt = TimeDelta::Micros(100); + tracker.OnPacketsFeedback({GetPacketWithRtt(kNewRtt)}, kStartTime + kNewRtt); + EXPECT_GT(tracker.GetRtt(), kInitialRtt); + EXPECT_LE(tracker.GetRtt(), kNewRtt); + for (int i = 0; i < 100; ++i) { + tracker.OnPacketsFeedback({GetPacketWithRtt(kNewRtt)}, + kStartTime + kNewRtt); + } + const TimeDelta absolute_error = TimeDelta::Micros(1); + EXPECT_NEAR(tracker.GetRtt().us(), kNewRtt.us(), absolute_error.us()); + EXPECT_LE(tracker.GetRtt(), kNewRtt); +} + +} // namespace test +} // namespace pcc +} // namespace webrtc diff --git a/modules/congestion_controller/pcc/utility_function.cc b/modules/congestion_controller/pcc/utility_function.cc new file mode 100644 index 0000000..006a2fc --- /dev/null +++ b/modules/congestion_controller/pcc/utility_function.cc @@ -0,0 +1,86 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/pcc/utility_function.h" + +#include +#include + +#include "api/units/data_rate.h" +#include "rtc_base/checks.h" + +namespace webrtc { +namespace pcc { + +VivaceUtilityFunction::VivaceUtilityFunction( + double delay_gradient_coefficient, + double loss_coefficient, + double throughput_coefficient, + double throughput_power, + double delay_gradient_threshold, + double delay_gradient_negative_bound) + : delay_gradient_coefficient_(delay_gradient_coefficient), + loss_coefficient_(loss_coefficient), + throughput_power_(throughput_power), + throughput_coefficient_(throughput_coefficient), + delay_gradient_threshold_(delay_gradient_threshold), + delay_gradient_negative_bound_(delay_gradient_negative_bound) { + RTC_DCHECK_GE(delay_gradient_negative_bound_, 0); +} + +double VivaceUtilityFunction::Compute( + const PccMonitorInterval& monitor_interval) const { + RTC_DCHECK(monitor_interval.IsFeedbackCollectionDone()); + double bitrate = monitor_interval.GetTargetSendingRate().bps(); + double loss_rate = monitor_interval.GetLossRate(); + double rtt_gradient = + monitor_interval.ComputeDelayGradient(delay_gradient_threshold_); + rtt_gradient = std::max(rtt_gradient, -delay_gradient_negative_bound_); + return (throughput_coefficient_ * std::pow(bitrate, throughput_power_)) - + (delay_gradient_coefficient_ * bitrate * rtt_gradient) - + (loss_coefficient_ * bitrate * loss_rate); +} + +VivaceUtilityFunction::~VivaceUtilityFunction() = default; + +ModifiedVivaceUtilityFunction::ModifiedVivaceUtilityFunction( + double delay_gradient_coefficient, + double loss_coefficient, + double throughput_coefficient, + double throughput_power, + double delay_gradient_threshold, + double delay_gradient_negative_bound) + : delay_gradient_coefficient_(delay_gradient_coefficient), + loss_coefficient_(loss_coefficient), + throughput_power_(throughput_power), + throughput_coefficient_(throughput_coefficient), + delay_gradient_threshold_(delay_gradient_threshold), + delay_gradient_negative_bound_(delay_gradient_negative_bound) { + RTC_DCHECK_GE(delay_gradient_negative_bound_, 0); +} + +double ModifiedVivaceUtilityFunction::Compute( + const PccMonitorInterval& monitor_interval) const { + RTC_DCHECK(monitor_interval.IsFeedbackCollectionDone()); + double bitrate = monitor_interval.GetTargetSendingRate().bps(); + double loss_rate = monitor_interval.GetLossRate(); + double rtt_gradient = + monitor_interval.ComputeDelayGradient(delay_gradient_threshold_); + rtt_gradient = std::max(rtt_gradient, -delay_gradient_negative_bound_); + return (throughput_coefficient_ * std::pow(bitrate, throughput_power_) * + bitrate) - + (delay_gradient_coefficient_ * bitrate * bitrate * rtt_gradient) - + (loss_coefficient_ * bitrate * bitrate * loss_rate); +} + +ModifiedVivaceUtilityFunction::~ModifiedVivaceUtilityFunction() = default; + +} // namespace pcc +} // namespace webrtc diff --git a/modules/congestion_controller/pcc/utility_function.h b/modules/congestion_controller/pcc/utility_function.h new file mode 100644 index 0000000..98bb074 --- /dev/null +++ b/modules/congestion_controller/pcc/utility_function.h @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_PCC_UTILITY_FUNCTION_H_ +#define MODULES_CONGESTION_CONTROLLER_PCC_UTILITY_FUNCTION_H_ + +#include "modules/congestion_controller/pcc/monitor_interval.h" + +namespace webrtc { +namespace pcc { + +// Utility function is used by PCC to transform the performance statistics +// (sending rate, loss rate, packets latency) gathered at one monitor interval +// into a numerical value. +// https://www.usenix.org/conference/nsdi18/presentation/dong +class PccUtilityFunctionInterface { + public: + virtual double Compute(const PccMonitorInterval& monitor_interval) const = 0; + virtual ~PccUtilityFunctionInterface() = default; +}; + +// Vivace utility function were suggested in the paper "PCC Vivace: +// Online-Learning Congestion Control", Mo Dong et all. +class VivaceUtilityFunction : public PccUtilityFunctionInterface { + public: + VivaceUtilityFunction(double delay_gradient_coefficient, + double loss_coefficient, + double throughput_coefficient, + double throughput_power, + double delay_gradient_threshold, + double delay_gradient_negative_bound); + double Compute(const PccMonitorInterval& monitor_interval) const override; + ~VivaceUtilityFunction() override; + + private: + const double delay_gradient_coefficient_; + const double loss_coefficient_; + const double throughput_power_; + const double throughput_coefficient_; + const double delay_gradient_threshold_; + const double delay_gradient_negative_bound_; +}; + +// This utility function were obtained by tuning Vivace utility function. +// The main difference is that gradient of modified utilify funtion (as well as +// rate updates) scales proportionally to the sending rate which leads to +// better performance in case of single sender. +class ModifiedVivaceUtilityFunction : public PccUtilityFunctionInterface { + public: + ModifiedVivaceUtilityFunction(double delay_gradient_coefficient, + double loss_coefficient, + double throughput_coefficient, + double throughput_power, + double delay_gradient_threshold, + double delay_gradient_negative_bound); + double Compute(const PccMonitorInterval& monitor_interval) const override; + ~ModifiedVivaceUtilityFunction() override; + + private: + const double delay_gradient_coefficient_; + const double loss_coefficient_; + const double throughput_power_; + const double throughput_coefficient_; + const double delay_gradient_threshold_; + const double delay_gradient_negative_bound_; +}; + +} // namespace pcc +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_PCC_UTILITY_FUNCTION_H_ diff --git a/modules/congestion_controller/pcc/utility_function_unittest.cc b/modules/congestion_controller/pcc/utility_function_unittest.cc new file mode 100644 index 0000000..19b2d15 --- /dev/null +++ b/modules/congestion_controller/pcc/utility_function_unittest.cc @@ -0,0 +1,113 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/pcc/utility_function.h" + +#include + +#include +#include +#include + +#include "api/transport/network_types.h" +#include "api/units/data_rate.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "test/gtest.h" + +namespace webrtc { +namespace pcc { +namespace test { +namespace { +constexpr double kLossCoefficient = 11.35; +constexpr double kThroughputPower = 0.9; +constexpr double kThroughputCoefficient = 1; +constexpr double kDelayGradientNegativeBound = 10; + +const Timestamp kStartTime = Timestamp::Micros(0); +const TimeDelta kPacketsDelta = TimeDelta::Millis(1); +const TimeDelta kIntervalDuration = TimeDelta::Millis(100); +const DataRate kSendingBitrate = DataRate::BitsPerSec(1000); + +const DataSize kDefaultDataSize = DataSize::Bytes(100); +const TimeDelta kDefaultDelay = TimeDelta::Millis(100); + +std::vector CreatePacketResults( + const std::vector& packets_send_times, + const std::vector& packets_received_times = {}, + const std::vector& packets_sizes = {}) { + std::vector packet_results; + PacketResult packet_result; + SentPacket sent_packet; + for (size_t i = 0; i < packets_send_times.size(); ++i) { + sent_packet.send_time = packets_send_times[i]; + if (packets_sizes.empty()) { + sent_packet.size = kDefaultDataSize; + } else { + sent_packet.size = packets_sizes[i]; + } + packet_result.sent_packet = sent_packet; + if (packets_received_times.empty()) { + packet_result.receive_time = packets_send_times[i] + kDefaultDelay; + } else { + packet_result.receive_time = packets_received_times[i]; + } + packet_results.push_back(packet_result); + } + return packet_results; +} + +} // namespace + +TEST(PccVivaceUtilityFunctionTest, + UtilityIsThroughputTermIfAllRestCoefficientsAreZero) { + VivaceUtilityFunction utility_function(0, 0, kThroughputCoefficient, + kThroughputPower, 0, + kDelayGradientNegativeBound); + PccMonitorInterval monitor_interval(kSendingBitrate, kStartTime, + kIntervalDuration); + monitor_interval.OnPacketsFeedback(CreatePacketResults( + {kStartTime + kPacketsDelta, kStartTime + 2 * kPacketsDelta, + kStartTime + 3 * kPacketsDelta, kStartTime + 2 * kIntervalDuration}, + {kStartTime + kPacketsDelta + kDefaultDelay, Timestamp::PlusInfinity(), + kStartTime + kDefaultDelay + 3 * kPacketsDelta, + Timestamp::PlusInfinity()}, + {kDefaultDataSize, kDefaultDataSize, kDefaultDataSize, + kDefaultDataSize})); + EXPECT_DOUBLE_EQ(utility_function.Compute(monitor_interval), + kThroughputCoefficient * + std::pow(kSendingBitrate.bps(), kThroughputPower)); +} + +TEST(PccVivaceUtilityFunctionTest, + LossTermIsNonZeroIfLossCoefficientIsNonZero) { + VivaceUtilityFunction utility_function( + 0, kLossCoefficient, kThroughputCoefficient, kThroughputPower, 0, + kDelayGradientNegativeBound); + PccMonitorInterval monitor_interval(kSendingBitrate, kStartTime, + kIntervalDuration); + monitor_interval.OnPacketsFeedback(CreatePacketResults( + {kStartTime + kPacketsDelta, kStartTime + 2 * kPacketsDelta, + kStartTime + 5 * kPacketsDelta, kStartTime + 2 * kIntervalDuration}, + {kStartTime + kDefaultDelay, Timestamp::PlusInfinity(), + kStartTime + kDefaultDelay, kStartTime + 3 * kIntervalDuration}, + {})); + // The second packet was lost. + EXPECT_DOUBLE_EQ(utility_function.Compute(monitor_interval), + kThroughputCoefficient * + std::pow(kSendingBitrate.bps(), kThroughputPower) - + kLossCoefficient * kSendingBitrate.bps() * + monitor_interval.GetLossRate()); +} + +} // namespace test +} // namespace pcc +} // namespace webrtc diff --git a/modules/congestion_controller/receive_side_congestion_controller.cc b/modules/congestion_controller/receive_side_congestion_controller.cc new file mode 100644 index 0000000..638cb2d --- /dev/null +++ b/modules/congestion_controller/receive_side_congestion_controller.cc @@ -0,0 +1,189 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/include/receive_side_congestion_controller.h" + +#include "modules/pacing/packet_router.h" +#include "modules/remote_bitrate_estimator/include/bwe_defines.h" +#include "modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.h" +#include "modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +namespace { +static const uint32_t kTimeOffsetSwitchThreshold = 30; +} // namespace + +ReceiveSideCongestionController::WrappingBitrateEstimator:: + WrappingBitrateEstimator(RemoteBitrateObserver* observer, Clock* clock) + : observer_(observer), + clock_(clock), + rbe_(new RemoteBitrateEstimatorSingleStream(observer_, clock_)), + using_absolute_send_time_(false), + packets_since_absolute_send_time_(0), + min_bitrate_bps_(congestion_controller::GetMinBitrateBps()) {} + +ReceiveSideCongestionController::WrappingBitrateEstimator:: + ~WrappingBitrateEstimator() = default; + +void ReceiveSideCongestionController::WrappingBitrateEstimator::IncomingPacket( + int64_t arrival_time_ms, + size_t payload_size, + const RTPHeader& header) { + MutexLock lock(&mutex_); + PickEstimatorFromHeader(header); + rbe_->IncomingPacket(arrival_time_ms, payload_size, header); +} + +void ReceiveSideCongestionController::WrappingBitrateEstimator::Process() { + MutexLock lock(&mutex_); + rbe_->Process(); +} + +int64_t ReceiveSideCongestionController::WrappingBitrateEstimator:: + TimeUntilNextProcess() { + MutexLock lock(&mutex_); + return rbe_->TimeUntilNextProcess(); +} + +void ReceiveSideCongestionController::WrappingBitrateEstimator::OnRttUpdate( + int64_t avg_rtt_ms, + int64_t max_rtt_ms) { + MutexLock lock(&mutex_); + rbe_->OnRttUpdate(avg_rtt_ms, max_rtt_ms); +} + +void ReceiveSideCongestionController::WrappingBitrateEstimator::RemoveStream( + unsigned int ssrc) { + MutexLock lock(&mutex_); + rbe_->RemoveStream(ssrc); +} + +bool ReceiveSideCongestionController::WrappingBitrateEstimator::LatestEstimate( + std::vector* ssrcs, + unsigned int* bitrate_bps) const { + MutexLock lock(&mutex_); + return rbe_->LatestEstimate(ssrcs, bitrate_bps); +} + +void ReceiveSideCongestionController::WrappingBitrateEstimator::SetMinBitrate( + int min_bitrate_bps) { + MutexLock lock(&mutex_); + rbe_->SetMinBitrate(min_bitrate_bps); + min_bitrate_bps_ = min_bitrate_bps; +} + +void ReceiveSideCongestionController::WrappingBitrateEstimator:: + PickEstimatorFromHeader(const RTPHeader& header) { + if (header.extension.hasAbsoluteSendTime) { + // If we see AST in header, switch RBE strategy immediately. + if (!using_absolute_send_time_) { + RTC_LOG(LS_INFO) + << "WrappingBitrateEstimator: Switching to absolute send time RBE."; + using_absolute_send_time_ = true; + PickEstimator(); + } + packets_since_absolute_send_time_ = 0; + } else { + // When we don't see AST, wait for a few packets before going back to TOF. + if (using_absolute_send_time_) { + ++packets_since_absolute_send_time_; + if (packets_since_absolute_send_time_ >= kTimeOffsetSwitchThreshold) { + RTC_LOG(LS_INFO) + << "WrappingBitrateEstimator: Switching to transmission " + "time offset RBE."; + using_absolute_send_time_ = false; + PickEstimator(); + } + } + } +} + +// Instantiate RBE for Time Offset or Absolute Send Time extensions. +void ReceiveSideCongestionController::WrappingBitrateEstimator:: + PickEstimator() { + if (using_absolute_send_time_) { + rbe_.reset(new RemoteBitrateEstimatorAbsSendTime(observer_, clock_)); + } else { + rbe_.reset(new RemoteBitrateEstimatorSingleStream(observer_, clock_)); + } + rbe_->SetMinBitrate(min_bitrate_bps_); +} + +ReceiveSideCongestionController::ReceiveSideCongestionController( + Clock* clock, + PacketRouter* packet_router) + : ReceiveSideCongestionController(clock, packet_router, nullptr) {} + +ReceiveSideCongestionController::ReceiveSideCongestionController( + Clock* clock, + PacketRouter* packet_router, + NetworkStateEstimator* network_state_estimator) + : remote_bitrate_estimator_(packet_router, clock), + remote_estimator_proxy_(clock, + packet_router, + &field_trial_config_, + network_state_estimator) {} + +void ReceiveSideCongestionController::OnReceivedPacket( + int64_t arrival_time_ms, + size_t payload_size, + const RTPHeader& header) { + remote_estimator_proxy_.IncomingPacket(arrival_time_ms, payload_size, header); + if (!header.extension.hasTransportSequenceNumber) { + // Receive-side BWE. + remote_bitrate_estimator_.IncomingPacket(arrival_time_ms, payload_size, + header); + } +} + +void ReceiveSideCongestionController::SetSendPeriodicFeedback( + bool send_periodic_feedback) { + remote_estimator_proxy_.SetSendPeriodicFeedback(send_periodic_feedback); +} + +RemoteBitrateEstimator* +ReceiveSideCongestionController::GetRemoteBitrateEstimator(bool send_side_bwe) { + if (send_side_bwe) { + return &remote_estimator_proxy_; + } else { + return &remote_bitrate_estimator_; + } +} + +const RemoteBitrateEstimator* +ReceiveSideCongestionController::GetRemoteBitrateEstimator( + bool send_side_bwe) const { + if (send_side_bwe) { + return &remote_estimator_proxy_; + } else { + return &remote_bitrate_estimator_; + } +} + +void ReceiveSideCongestionController::OnRttUpdate(int64_t avg_rtt_ms, + int64_t max_rtt_ms) { + remote_bitrate_estimator_.OnRttUpdate(avg_rtt_ms, max_rtt_ms); +} + +void ReceiveSideCongestionController::OnBitrateChanged(int bitrate_bps) { + remote_estimator_proxy_.OnBitrateChanged(bitrate_bps); +} + +int64_t ReceiveSideCongestionController::TimeUntilNextProcess() { + return remote_bitrate_estimator_.TimeUntilNextProcess(); +} + +void ReceiveSideCongestionController::Process() { + remote_bitrate_estimator_.Process(); +} + +} // namespace webrtc diff --git a/modules/congestion_controller/receive_side_congestion_controller_unittest.cc b/modules/congestion_controller/receive_side_congestion_controller_unittest.cc new file mode 100644 index 0000000..b584623 --- /dev/null +++ b/modules/congestion_controller/receive_side_congestion_controller_unittest.cc @@ -0,0 +1,119 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/include/receive_side_congestion_controller.h" + +#include "modules/pacing/packet_router.h" +#include "system_wrappers/include/clock.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/scenario/scenario.h" + +using ::testing::_; +using ::testing::AtLeast; +using ::testing::NiceMock; +using ::testing::Return; +using ::testing::SaveArg; +using ::testing::StrictMock; + +namespace webrtc { + +namespace { + +// Helper to convert some time format to resolution used in absolute send time +// header extension, rounded upwards. |t| is the time to convert, in some +// resolution. |denom| is the value to divide |t| by to get whole seconds, +// e.g. |denom| = 1000 if |t| is in milliseconds. +uint32_t AbsSendTime(int64_t t, int64_t denom) { + return (((t << 18) + (denom >> 1)) / denom) & 0x00fffffful; +} + +class MockPacketRouter : public PacketRouter { + public: + MOCK_METHOD(void, + OnReceiveBitrateChanged, + (const std::vector& ssrcs, uint32_t bitrate), + (override)); +}; + +const uint32_t kInitialBitrateBps = 60000; + +} // namespace + +namespace test { + +TEST(ReceiveSideCongestionControllerTest, OnReceivedPacketWithAbsSendTime) { + StrictMock packet_router; + SimulatedClock clock_(123456); + + ReceiveSideCongestionController controller(&clock_, &packet_router); + + size_t payload_size = 1000; + RTPHeader header; + header.ssrc = 0x11eb21c; + header.extension.hasAbsoluteSendTime = true; + + std::vector ssrcs; + EXPECT_CALL(packet_router, OnReceiveBitrateChanged(_, _)) + .WillRepeatedly(SaveArg<0>(&ssrcs)); + + for (int i = 0; i < 10; ++i) { + clock_.AdvanceTimeMilliseconds((1000 * payload_size) / kInitialBitrateBps); + int64_t now_ms = clock_.TimeInMilliseconds(); + header.extension.absoluteSendTime = AbsSendTime(now_ms, 1000); + controller.OnReceivedPacket(now_ms, payload_size, header); + } + + ASSERT_EQ(1u, ssrcs.size()); + EXPECT_EQ(header.ssrc, ssrcs[0]); +} + +TEST(ReceiveSideCongestionControllerTest, ConvergesToCapacity) { + Scenario s("recieve_cc_unit/converge"); + NetworkSimulationConfig net_conf; + net_conf.bandwidth = DataRate::KilobitsPerSec(1000); + net_conf.delay = TimeDelta::Millis(50); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = DataRate::KilobitsPerSec(300); + }); + + auto* route = s.CreateRoutes(client, {s.CreateSimulationNode(net_conf)}, + s.CreateClient("return", CallClientConfig()), + {s.CreateSimulationNode(net_conf)}); + VideoStreamConfig video; + video.stream.packet_feedback = false; + s.CreateVideoStream(route->forward(), video); + s.RunFor(TimeDelta::Seconds(30)); + EXPECT_NEAR(client->send_bandwidth().kbps(), 900, 150); +} + +TEST(ReceiveSideCongestionControllerTest, IsFairToTCP) { + Scenario s("recieve_cc_unit/tcp_fairness"); + NetworkSimulationConfig net_conf; + net_conf.bandwidth = DataRate::KilobitsPerSec(1000); + net_conf.delay = TimeDelta::Millis(50); + auto* client = s.CreateClient("send", [&](CallClientConfig* c) { + c->transport.rates.start_rate = DataRate::KilobitsPerSec(1000); + }); + auto send_net = {s.CreateSimulationNode(net_conf)}; + auto ret_net = {s.CreateSimulationNode(net_conf)}; + auto* route = s.CreateRoutes( + client, send_net, s.CreateClient("return", CallClientConfig()), ret_net); + VideoStreamConfig video; + video.stream.packet_feedback = false; + s.CreateVideoStream(route->forward(), video); + s.net()->StartFakeTcpCrossTraffic(send_net, ret_net, FakeTcpConfig()); + s.RunFor(TimeDelta::Seconds(30)); + // For some reason we get outcompeted by TCP here, this should probably be + // fixed and a lower bound should be added to the test. + EXPECT_LT(client->send_bandwidth().kbps(), 750); +} +} // namespace test +} // namespace webrtc diff --git a/modules/congestion_controller/rtp/BUILD.gn b/modules/congestion_controller/rtp/BUILD.gn new file mode 100644 index 0000000..2f97b67 --- /dev/null +++ b/modules/congestion_controller/rtp/BUILD.gn @@ -0,0 +1,98 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("../../../webrtc.gni") + +config("bwe_test_logging") { + if (rtc_enable_bwe_test_logging) { + defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=1" ] + } else { + defines = [ "BWE_TEST_LOGGING_COMPILE_TIME_ENABLE=0" ] + } +} + +rtc_library("control_handler") { + visibility = [ "*" ] + sources = [ + "control_handler.cc", + "control_handler.h", + ] + + deps = [ + "../../../api/transport:network_control", + "../../../api/units:data_rate", + "../../../api/units:data_size", + "../../../api/units:time_delta", + "../../../rtc_base:checks", + "../../../rtc_base:safe_minmax", + "../../../rtc_base/synchronization:sequence_checker", + "../../../system_wrappers:field_trial", + "../../pacing", + ] + absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] + + if (!build_with_mozilla) { + deps += [ "../../../rtc_base" ] + } +} +rtc_library("transport_feedback") { + visibility = [ "*" ] + sources = [ + "transport_feedback_adapter.cc", + "transport_feedback_adapter.h", + "transport_feedback_demuxer.cc", + "transport_feedback_demuxer.h", + ] + + deps = [ + "../..:module_api_public", + "../../../api/transport:network_control", + "../../../api/units:data_size", + "../../../api/units:timestamp", + "../../../rtc_base", + "../../../rtc_base:checks", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/network:sent_packet", + "../../../rtc_base/synchronization:mutex", + "../../../system_wrappers", + "../../../system_wrappers:field_trial", + "../../rtp_rtcp:rtp_rtcp_format", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/algorithm:container", + "//third_party/abseil-cpp/absl/types:optional", + ] +} + +if (rtc_include_tests) { + rtc_library("congestion_controller_unittests") { + testonly = true + + sources = [ + "transport_feedback_adapter_unittest.cc", + "transport_feedback_demuxer_unittest.cc", + ] + deps = [ + ":transport_feedback", + "../:congestion_controller", + "../../../api/transport:network_control", + "../../../logging:mocks", + "../../../rtc_base", + "../../../rtc_base:checks", + "../../../rtc_base:rtc_base_approved", + "../../../rtc_base/network:sent_packet", + "../../../system_wrappers", + "../../../test:field_trial", + "../../../test:test_support", + "../../pacing", + "../../remote_bitrate_estimator", + "../../rtp_rtcp:rtp_rtcp_format", + "//testing/gmock", + ] + } +} diff --git a/modules/congestion_controller/rtp/control_handler.cc b/modules/congestion_controller/rtp/control_handler.cc new file mode 100644 index 0000000..ba77aac --- /dev/null +++ b/modules/congestion_controller/rtp/control_handler.cc @@ -0,0 +1,87 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/rtp/control_handler.h" + +#include +#include + +#include "api/units/data_rate.h" +#include "rtc_base/logging.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "rtc_base/numerics/safe_minmax.h" +#include "system_wrappers/include/field_trial.h" + +namespace webrtc { +namespace { + +// By default, pacer emergency stops encoder when buffer reaches a high level. +bool IsPacerEmergencyStopDisabled() { + return field_trial::IsEnabled("WebRTC-DisablePacerEmergencyStop"); +} + +} // namespace +CongestionControlHandler::CongestionControlHandler() + : disable_pacer_emergency_stop_(IsPacerEmergencyStopDisabled()) { + sequenced_checker_.Detach(); +} + +CongestionControlHandler::~CongestionControlHandler() {} + +void CongestionControlHandler::SetTargetRate( + TargetTransferRate new_target_rate) { + RTC_DCHECK_RUN_ON(&sequenced_checker_); + RTC_CHECK(new_target_rate.at_time.IsFinite()); + last_incoming_ = new_target_rate; +} + +void CongestionControlHandler::SetNetworkAvailability(bool network_available) { + RTC_DCHECK_RUN_ON(&sequenced_checker_); + network_available_ = network_available; +} + +void CongestionControlHandler::SetPacerQueue(TimeDelta expected_queue_time) { + RTC_DCHECK_RUN_ON(&sequenced_checker_); + pacer_expected_queue_ms_ = expected_queue_time.ms(); +} + +absl::optional CongestionControlHandler::GetUpdate() { + RTC_DCHECK_RUN_ON(&sequenced_checker_); + if (!last_incoming_.has_value()) + return absl::nullopt; + TargetTransferRate new_outgoing = *last_incoming_; + DataRate log_target_rate = new_outgoing.target_rate; + bool pause_encoding = false; + if (!network_available_) { + pause_encoding = true; + } else if (!disable_pacer_emergency_stop_ && + pacer_expected_queue_ms_ > PacedSender::kMaxQueueLengthMs) { + pause_encoding = true; + } + if (pause_encoding) + new_outgoing.target_rate = DataRate::Zero(); + if (!last_reported_ || + last_reported_->target_rate != new_outgoing.target_rate || + (!new_outgoing.target_rate.IsZero() && + (last_reported_->network_estimate.loss_rate_ratio != + new_outgoing.network_estimate.loss_rate_ratio || + last_reported_->network_estimate.round_trip_time != + new_outgoing.network_estimate.round_trip_time))) { + if (encoder_paused_in_last_report_ != pause_encoding) + RTC_LOG(LS_INFO) << "Bitrate estimate state changed, BWE: " + << ToString(log_target_rate) << "."; + encoder_paused_in_last_report_ = pause_encoding; + last_reported_ = new_outgoing; + return new_outgoing; + } + return absl::nullopt; +} + +} // namespace webrtc diff --git a/modules/congestion_controller/rtp/control_handler.h b/modules/congestion_controller/rtp/control_handler.h new file mode 100644 index 0000000..9cce0d7 --- /dev/null +++ b/modules/congestion_controller/rtp/control_handler.h @@ -0,0 +1,53 @@ +/* + * Copyright (c) 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_RTP_CONTROL_HANDLER_H_ +#define MODULES_CONGESTION_CONTROLLER_RTP_CONTROL_HANDLER_H_ + +#include + +#include "absl/types/optional.h" +#include "api/transport/network_types.h" +#include "api/units/data_size.h" +#include "api/units/time_delta.h" +#include "modules/pacing/paced_sender.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/synchronization/sequence_checker.h" + +namespace webrtc { +// This is used to observe the network controller state and route calls to +// the proper handler. It also keeps cached values for safe asynchronous use. +// This makes sure that things running on the worker queue can't access state +// in RtpTransportControllerSend, which would risk causing data race on +// destruction unless members are properly ordered. +class CongestionControlHandler { + public: + CongestionControlHandler(); + ~CongestionControlHandler(); + + void SetTargetRate(TargetTransferRate new_target_rate); + void SetNetworkAvailability(bool network_available); + void SetPacerQueue(TimeDelta expected_queue_time); + absl::optional GetUpdate(); + + private: + absl::optional last_incoming_; + absl::optional last_reported_; + bool network_available_ = true; + bool encoder_paused_in_last_report_ = false; + + const bool disable_pacer_emergency_stop_; + int64_t pacer_expected_queue_ms_ = 0; + + SequenceChecker sequenced_checker_; + RTC_DISALLOW_COPY_AND_ASSIGN(CongestionControlHandler); +}; +} // namespace webrtc +#endif // MODULES_CONGESTION_CONTROLLER_RTP_CONTROL_HANDLER_H_ diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter.cc b/modules/congestion_controller/rtp/transport_feedback_adapter.cc new file mode 100644 index 0000000..87691bf --- /dev/null +++ b/modules/congestion_controller/rtp/transport_feedback_adapter.cc @@ -0,0 +1,276 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/rtp/transport_feedback_adapter.h" + +#include + +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "rtc_base/checks.h" +#include "rtc_base/logging.h" +#include "system_wrappers/include/field_trial.h" + +namespace webrtc { + +constexpr TimeDelta kSendTimeHistoryWindow = TimeDelta::Seconds(60); + +void InFlightBytesTracker::AddInFlightPacketBytes( + const PacketFeedback& packet) { + RTC_DCHECK(packet.sent.send_time.IsFinite()); + auto it = in_flight_data_.find(packet.network_route); + if (it != in_flight_data_.end()) { + it->second += packet.sent.size; + } else { + in_flight_data_.insert({packet.network_route, packet.sent.size}); + } +} + +void InFlightBytesTracker::RemoveInFlightPacketBytes( + const PacketFeedback& packet) { + if (packet.sent.send_time.IsInfinite()) + return; + auto it = in_flight_data_.find(packet.network_route); + if (it != in_flight_data_.end()) { + RTC_DCHECK_GE(it->second, packet.sent.size); + it->second -= packet.sent.size; + if (it->second.IsZero()) + in_flight_data_.erase(it); + } +} + +DataSize InFlightBytesTracker::GetOutstandingData( + const rtc::NetworkRoute& network_route) const { + auto it = in_flight_data_.find(network_route); + if (it != in_flight_data_.end()) { + return it->second; + } else { + return DataSize::Zero(); + } +} + +// Comparator for consistent map with NetworkRoute as key. +bool InFlightBytesTracker::NetworkRouteComparator::operator()( + const rtc::NetworkRoute& a, + const rtc::NetworkRoute& b) const { + if (a.local.network_id() != b.local.network_id()) + return a.local.network_id() < b.local.network_id(); + if (a.remote.network_id() != b.remote.network_id()) + return a.remote.network_id() < b.remote.network_id(); + + if (a.local.adapter_id() != b.local.adapter_id()) + return a.local.adapter_id() < b.local.adapter_id(); + if (a.remote.adapter_id() != b.remote.adapter_id()) + return a.remote.adapter_id() < b.remote.adapter_id(); + + if (a.local.uses_turn() != b.local.uses_turn()) + return a.local.uses_turn() < b.local.uses_turn(); + if (a.remote.uses_turn() != b.remote.uses_turn()) + return a.remote.uses_turn() < b.remote.uses_turn(); + + return a.connected < b.connected; +} + +TransportFeedbackAdapter::TransportFeedbackAdapter() = default; + + +void TransportFeedbackAdapter::AddPacket(const RtpPacketSendInfo& packet_info, + size_t overhead_bytes, + Timestamp creation_time) { + PacketFeedback packet; + packet.creation_time = creation_time; + packet.sent.sequence_number = + seq_num_unwrapper_.Unwrap(packet_info.transport_sequence_number); + packet.sent.size = DataSize::Bytes(packet_info.length + overhead_bytes); + packet.sent.audio = packet_info.packet_type == RtpPacketMediaType::kAudio; + packet.network_route = network_route_; + packet.sent.pacing_info = packet_info.pacing_info; + + while (!history_.empty() && + creation_time - history_.begin()->second.creation_time > + kSendTimeHistoryWindow) { + // TODO(sprang): Warn if erasing (too many) old items? + if (history_.begin()->second.sent.sequence_number > last_ack_seq_num_) + in_flight_.RemoveInFlightPacketBytes(history_.begin()->second); + history_.erase(history_.begin()); + } + history_.insert(std::make_pair(packet.sent.sequence_number, packet)); +} + +absl::optional TransportFeedbackAdapter::ProcessSentPacket( + const rtc::SentPacket& sent_packet) { + auto send_time = Timestamp::Millis(sent_packet.send_time_ms); + // TODO(srte): Only use one way to indicate that packet feedback is used. + if (sent_packet.info.included_in_feedback || sent_packet.packet_id != -1) { + int64_t unwrapped_seq_num = + seq_num_unwrapper_.Unwrap(sent_packet.packet_id); + auto it = history_.find(unwrapped_seq_num); + if (it != history_.end()) { + bool packet_retransmit = it->second.sent.send_time.IsFinite(); + it->second.sent.send_time = send_time; + last_send_time_ = std::max(last_send_time_, send_time); + // TODO(srte): Don't do this on retransmit. + if (!pending_untracked_size_.IsZero()) { + if (send_time < last_untracked_send_time_) + RTC_LOG(LS_WARNING) + << "appending acknowledged data for out of order packet. (Diff: " + << ToString(last_untracked_send_time_ - send_time) << " ms.)"; + it->second.sent.prior_unacked_data += pending_untracked_size_; + pending_untracked_size_ = DataSize::Zero(); + } + if (!packet_retransmit) { + if (it->second.sent.sequence_number > last_ack_seq_num_) + in_flight_.AddInFlightPacketBytes(it->second); + it->second.sent.data_in_flight = GetOutstandingData(); + return it->second.sent; + } + } + } else if (sent_packet.info.included_in_allocation) { + if (send_time < last_send_time_) { + RTC_LOG(LS_WARNING) << "ignoring untracked data for out of order packet."; + } + pending_untracked_size_ += + DataSize::Bytes(sent_packet.info.packet_size_bytes); + last_untracked_send_time_ = std::max(last_untracked_send_time_, send_time); + } + return absl::nullopt; +} + +absl::optional +TransportFeedbackAdapter::ProcessTransportFeedback( + const rtcp::TransportFeedback& feedback, + Timestamp feedback_receive_time) { + if (feedback.GetPacketStatusCount() == 0) { + RTC_LOG(LS_INFO) << "Empty transport feedback packet received."; + return absl::nullopt; + } + + TransportPacketsFeedback msg; + msg.feedback_time = feedback_receive_time; + + msg.prior_in_flight = in_flight_.GetOutstandingData(network_route_); + msg.packet_feedbacks = + ProcessTransportFeedbackInner(feedback, feedback_receive_time); + if (msg.packet_feedbacks.empty()) + return absl::nullopt; + + auto it = history_.find(last_ack_seq_num_); + if (it != history_.end()) { + msg.first_unacked_send_time = it->second.sent.send_time; + } + msg.data_in_flight = in_flight_.GetOutstandingData(network_route_); + + return msg; +} + +void TransportFeedbackAdapter::SetNetworkRoute( + const rtc::NetworkRoute& network_route) { + network_route_ = network_route; +} + +DataSize TransportFeedbackAdapter::GetOutstandingData() const { + return in_flight_.GetOutstandingData(network_route_); +} + +std::vector +TransportFeedbackAdapter::ProcessTransportFeedbackInner( + const rtcp::TransportFeedback& feedback, + Timestamp feedback_receive_time) { + // Add timestamp deltas to a local time base selected on first packet arrival. + // This won't be the true time base, but makes it easier to manually inspect + // time stamps. + if (last_timestamp_.IsInfinite()) { + current_offset_ = feedback_receive_time; + } else { + // TODO(srte): We shouldn't need to do rounding here. + const TimeDelta delta = feedback.GetBaseDelta(last_timestamp_) + .RoundDownTo(TimeDelta::Millis(1)); + // Protect against assigning current_offset_ negative value. + if (delta < Timestamp::Zero() - current_offset_) { + RTC_LOG(LS_WARNING) << "Unexpected feedback timestamp received."; + current_offset_ = feedback_receive_time; + } else { + current_offset_ += delta; + } + } + last_timestamp_ = feedback.GetBaseTime(); + + std::vector packet_result_vector; + packet_result_vector.reserve(feedback.GetPacketStatusCount()); + + size_t failed_lookups = 0; + size_t ignored = 0; + TimeDelta packet_offset = TimeDelta::Zero(); + for (const auto& packet : feedback.GetAllPackets()) { + int64_t seq_num = seq_num_unwrapper_.Unwrap(packet.sequence_number()); + + if (seq_num > last_ack_seq_num_) { + // Starts at history_.begin() if last_ack_seq_num_ < 0, since any valid + // sequence number is >= 0. + for (auto it = history_.upper_bound(last_ack_seq_num_); + it != history_.upper_bound(seq_num); ++it) { + in_flight_.RemoveInFlightPacketBytes(it->second); + } + last_ack_seq_num_ = seq_num; + } + + auto it = history_.find(seq_num); + if (it == history_.end()) { + ++failed_lookups; + continue; + } + + if (it->second.sent.send_time.IsInfinite()) { + // TODO(srte): Fix the tests that makes this happen and make this a + // DCHECK. + RTC_DLOG(LS_ERROR) + << "Received feedback before packet was indicated as sent"; + continue; + } + + PacketFeedback packet_feedback = it->second; + if (packet.received()) { + packet_offset += packet.delta(); + packet_feedback.receive_time = + current_offset_ + packet_offset.RoundDownTo(TimeDelta::Millis(1)); + // Note: Lost packets are not removed from history because they might be + // reported as received by a later feedback. + history_.erase(it); + } + if (packet_feedback.network_route == network_route_) { + PacketResult result; + result.sent_packet = packet_feedback.sent; + result.receive_time = packet_feedback.receive_time; + packet_result_vector.push_back(result); + } else { + ++ignored; + } + } + + if (failed_lookups > 0) { + RTC_LOG(LS_WARNING) << "Failed to lookup send time for " << failed_lookups + << " packet" << (failed_lookups > 1 ? "s" : "") + << ". Send time history too small?"; + } + if (ignored > 0) { + RTC_LOG(LS_INFO) << "Ignoring " << ignored + << " packets because they were sent on a different route."; + } + + return packet_result_vector; +} + +} // namespace webrtc diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter.h b/modules/congestion_controller/rtp/transport_feedback_adapter.h new file mode 100644 index 0000000..c41a7c6 --- /dev/null +++ b/modules/congestion_controller/rtp/transport_feedback_adapter.h @@ -0,0 +1,101 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_CONGESTION_CONTROLLER_RTP_TRANSPORT_FEEDBACK_ADAPTER_H_ +#define MODULES_CONGESTION_CONTROLLER_RTP_TRANSPORT_FEEDBACK_ADAPTER_H_ + +#include +#include +#include +#include + +#include "api/transport/network_types.h" +#include "modules/include/module_common_types_public.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "rtc_base/network/sent_packet.h" +#include "rtc_base/network_route.h" +#include "rtc_base/thread_annotations.h" +#include "rtc_base/thread_checker.h" + +namespace webrtc { + +struct PacketFeedback { + PacketFeedback() = default; + // Time corresponding to when this object was created. + Timestamp creation_time = Timestamp::MinusInfinity(); + SentPacket sent; + // Time corresponding to when the packet was received. Timestamped with the + // receiver's clock. For unreceived packet, Timestamp::PlusInfinity() is + // used. + Timestamp receive_time = Timestamp::PlusInfinity(); + + // The network route that this packet is associated with. + rtc::NetworkRoute network_route; +}; + +class InFlightBytesTracker { + public: + void AddInFlightPacketBytes(const PacketFeedback& packet); + void RemoveInFlightPacketBytes(const PacketFeedback& packet); + DataSize GetOutstandingData(const rtc::NetworkRoute& network_route) const; + + private: + struct NetworkRouteComparator { + bool operator()(const rtc::NetworkRoute& a, + const rtc::NetworkRoute& b) const; + }; + std::map in_flight_data_; +}; + +class TransportFeedbackAdapter { + public: + TransportFeedbackAdapter(); + + void AddPacket(const RtpPacketSendInfo& packet_info, + size_t overhead_bytes, + Timestamp creation_time); + absl::optional ProcessSentPacket( + const rtc::SentPacket& sent_packet); + + absl::optional ProcessTransportFeedback( + const rtcp::TransportFeedback& feedback, + Timestamp feedback_receive_time); + + void SetNetworkRoute(const rtc::NetworkRoute& network_route); + + DataSize GetOutstandingData() const; + + private: + enum class SendTimeHistoryStatus { kNotAdded, kOk, kDuplicate }; + + std::vector ProcessTransportFeedbackInner( + const rtcp::TransportFeedback& feedback, + Timestamp feedback_receive_time); + + DataSize pending_untracked_size_ = DataSize::Zero(); + Timestamp last_send_time_ = Timestamp::MinusInfinity(); + Timestamp last_untracked_send_time_ = Timestamp::MinusInfinity(); + SequenceNumberUnwrapper seq_num_unwrapper_; + std::map history_; + + // Sequence numbers are never negative, using -1 as it always < a real + // sequence number. + int64_t last_ack_seq_num_ = -1; + InFlightBytesTracker in_flight_; + + Timestamp current_offset_ = Timestamp::MinusInfinity(); + TimeDelta last_timestamp_ = TimeDelta::MinusInfinity(); + + rtc::NetworkRoute network_route_; +}; + +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_RTP_TRANSPORT_FEEDBACK_ADAPTER_H_ diff --git a/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc b/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc new file mode 100644 index 0000000..3849cb3 --- /dev/null +++ b/modules/congestion_controller/rtp/transport_feedback_adapter_unittest.cc @@ -0,0 +1,417 @@ +/* + * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/congestion_controller/rtp/transport_feedback_adapter.h" + +#include +#include +#include + +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "rtc_base/checks.h" +#include "rtc_base/numerics/safe_conversions.h" +#include "system_wrappers/include/clock.h" +#include "test/field_trial.h" +#include "test/gmock.h" +#include "test/gtest.h" + +using ::testing::_; +using ::testing::Invoke; + +namespace webrtc { +namespace webrtc_cc { + +namespace { +const PacedPacketInfo kPacingInfo0(0, 5, 2000); +const PacedPacketInfo kPacingInfo1(1, 8, 4000); +const PacedPacketInfo kPacingInfo2(2, 14, 7000); +const PacedPacketInfo kPacingInfo3(3, 20, 10000); +const PacedPacketInfo kPacingInfo4(4, 22, 10000); + +void ComparePacketFeedbackVectors(const std::vector& truth, + const std::vector& input) { + ASSERT_EQ(truth.size(), input.size()); + size_t len = truth.size(); + // truth contains the input data for the test, and input is what will be + // sent to the bandwidth estimator. truth.arrival_tims_ms is used to + // populate the transport feedback messages. As these times may be changed + // (because of resolution limits in the packets, and because of the time + // base adjustment performed by the TransportFeedbackAdapter at the first + // packet, the truth[x].arrival_time and input[x].arrival_time may not be + // equal. However, the difference must be the same for all x. + TimeDelta arrival_time_delta = truth[0].receive_time - input[0].receive_time; + for (size_t i = 0; i < len; ++i) { + RTC_CHECK(truth[i].receive_time.IsFinite()); + if (input[i].receive_time.IsFinite()) { + EXPECT_EQ(truth[i].receive_time - input[i].receive_time, + arrival_time_delta); + } + EXPECT_EQ(truth[i].sent_packet.send_time, input[i].sent_packet.send_time); + EXPECT_EQ(truth[i].sent_packet.sequence_number, + input[i].sent_packet.sequence_number); + EXPECT_EQ(truth[i].sent_packet.size, input[i].sent_packet.size); + EXPECT_EQ(truth[i].sent_packet.pacing_info, + input[i].sent_packet.pacing_info); + } +} + +PacketResult CreatePacket(int64_t receive_time_ms, + int64_t send_time_ms, + int64_t sequence_number, + size_t payload_size, + const PacedPacketInfo& pacing_info) { + PacketResult res; + res.receive_time = Timestamp::Millis(receive_time_ms); + res.sent_packet.send_time = Timestamp::Millis(send_time_ms); + res.sent_packet.sequence_number = sequence_number; + res.sent_packet.size = DataSize::Bytes(payload_size); + res.sent_packet.pacing_info = pacing_info; + return res; +} + +} // namespace + +namespace test { + +class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver { + public: + MOCK_METHOD(void, + OnPacketFeedbackVector, + (std::vector packet_feedback_vector), + (override)); +}; + +class TransportFeedbackAdapterTest : public ::testing::Test { + public: + TransportFeedbackAdapterTest() : clock_(0) {} + + virtual ~TransportFeedbackAdapterTest() {} + + virtual void SetUp() { adapter_.reset(new TransportFeedbackAdapter()); } + + virtual void TearDown() { adapter_.reset(); } + + protected: + void OnReceivedEstimatedBitrate(uint32_t bitrate) {} + + void OnReceivedRtcpReceiverReport(const ReportBlockList& report_blocks, + int64_t rtt, + int64_t now_ms) {} + + void OnSentPacket(const PacketResult& packet_feedback) { + RtpPacketSendInfo packet_info; + packet_info.ssrc = kSsrc; + packet_info.transport_sequence_number = + packet_feedback.sent_packet.sequence_number; + packet_info.rtp_sequence_number = 0; + packet_info.length = packet_feedback.sent_packet.size.bytes(); + packet_info.pacing_info = packet_feedback.sent_packet.pacing_info; + packet_info.packet_type = RtpPacketMediaType::kVideo; + adapter_->AddPacket(RtpPacketSendInfo(packet_info), 0u, + clock_.CurrentTime()); + adapter_->ProcessSentPacket(rtc::SentPacket( + packet_feedback.sent_packet.sequence_number, + packet_feedback.sent_packet.send_time.ms(), rtc::PacketInfo())); + } + + static constexpr uint32_t kSsrc = 8492; + + SimulatedClock clock_; + std::unique_ptr adapter_; +}; + +TEST_F(TransportFeedbackAdapterTest, AdaptsFeedbackAndPopulatesSendTimes) { + std::vector packets; + packets.push_back(CreatePacket(100, 200, 0, 1500, kPacingInfo0)); + packets.push_back(CreatePacket(110, 210, 1, 1500, kPacingInfo0)); + packets.push_back(CreatePacket(120, 220, 2, 1500, kPacingInfo0)); + packets.push_back(CreatePacket(130, 230, 3, 1500, kPacingInfo1)); + packets.push_back(CreatePacket(140, 240, 4, 1500, kPacingInfo1)); + + for (const auto& packet : packets) + OnSentPacket(packet); + + rtcp::TransportFeedback feedback; + feedback.SetBase(packets[0].sent_packet.sequence_number, + packets[0].receive_time.us()); + + for (const auto& packet : packets) { + EXPECT_TRUE(feedback.AddReceivedPacket(packet.sent_packet.sequence_number, + packet.receive_time.us())); + } + + feedback.Build(); + + auto result = + adapter_->ProcessTransportFeedback(feedback, clock_.CurrentTime()); + ComparePacketFeedbackVectors(packets, result->packet_feedbacks); +} + +TEST_F(TransportFeedbackAdapterTest, FeedbackVectorReportsUnreceived) { + std::vector sent_packets = { + CreatePacket(100, 220, 0, 1500, kPacingInfo0), + CreatePacket(110, 210, 1, 1500, kPacingInfo0), + CreatePacket(120, 220, 2, 1500, kPacingInfo0), + CreatePacket(130, 230, 3, 1500, kPacingInfo0), + CreatePacket(140, 240, 4, 1500, kPacingInfo0), + CreatePacket(150, 250, 5, 1500, kPacingInfo0), + CreatePacket(160, 260, 6, 1500, kPacingInfo0)}; + + for (const auto& packet : sent_packets) + OnSentPacket(packet); + + // Note: Important to include the last packet, as only unreceived packets in + // between received packets can be inferred. + std::vector received_packets = { + sent_packets[0], sent_packets[2], sent_packets[6]}; + + rtcp::TransportFeedback feedback; + feedback.SetBase(received_packets[0].sent_packet.sequence_number, + received_packets[0].receive_time.us()); + + for (const auto& packet : received_packets) { + EXPECT_TRUE(feedback.AddReceivedPacket(packet.sent_packet.sequence_number, + packet.receive_time.us())); + } + + feedback.Build(); + + auto res = adapter_->ProcessTransportFeedback(feedback, clock_.CurrentTime()); + ComparePacketFeedbackVectors(sent_packets, res->packet_feedbacks); +} + +TEST_F(TransportFeedbackAdapterTest, HandlesDroppedPackets) { + std::vector packets; + packets.push_back(CreatePacket(100, 200, 0, 1500, kPacingInfo0)); + packets.push_back(CreatePacket(110, 210, 1, 1500, kPacingInfo1)); + packets.push_back(CreatePacket(120, 220, 2, 1500, kPacingInfo2)); + packets.push_back(CreatePacket(130, 230, 3, 1500, kPacingInfo3)); + packets.push_back(CreatePacket(140, 240, 4, 1500, kPacingInfo4)); + + const uint16_t kSendSideDropBefore = 1; + const uint16_t kReceiveSideDropAfter = 3; + + for (const auto& packet : packets) { + if (packet.sent_packet.sequence_number >= kSendSideDropBefore) + OnSentPacket(packet); + } + + rtcp::TransportFeedback feedback; + feedback.SetBase(packets[0].sent_packet.sequence_number, + packets[0].receive_time.us()); + + for (const auto& packet : packets) { + if (packet.sent_packet.sequence_number <= kReceiveSideDropAfter) { + EXPECT_TRUE(feedback.AddReceivedPacket(packet.sent_packet.sequence_number, + packet.receive_time.us())); + } + } + + feedback.Build(); + + std::vector expected_packets( + packets.begin() + kSendSideDropBefore, + packets.begin() + kReceiveSideDropAfter + 1); + // Packets that have timed out on the send-side have lost the + // information stored on the send-side. And they will not be reported to + // observers since we won't know that they come from the same networks. + + auto res = adapter_->ProcessTransportFeedback(feedback, clock_.CurrentTime()); + ComparePacketFeedbackVectors(expected_packets, res->packet_feedbacks); +} + +TEST_F(TransportFeedbackAdapterTest, SendTimeWrapsBothWays) { + int64_t kHighArrivalTimeMs = rtcp::TransportFeedback::kDeltaScaleFactor * + static_cast(1 << 8) * + static_cast((1 << 23) - 1) / 1000; + std::vector packets; + packets.push_back( + CreatePacket(kHighArrivalTimeMs + 64, 210, 0, 1500, PacedPacketInfo())); + packets.push_back( + CreatePacket(kHighArrivalTimeMs - 64, 210, 1, 1500, PacedPacketInfo())); + packets.push_back( + CreatePacket(kHighArrivalTimeMs, 220, 2, 1500, PacedPacketInfo())); + + for (const auto& packet : packets) + OnSentPacket(packet); + + for (size_t i = 0; i < packets.size(); ++i) { + std::unique_ptr feedback( + new rtcp::TransportFeedback()); + feedback->SetBase(packets[i].sent_packet.sequence_number, + packets[i].receive_time.us()); + + EXPECT_TRUE(feedback->AddReceivedPacket( + packets[i].sent_packet.sequence_number, packets[i].receive_time.us())); + + rtc::Buffer raw_packet = feedback->Build(); + feedback = rtcp::TransportFeedback::ParseFrom(raw_packet.data(), + raw_packet.size()); + + std::vector expected_packets; + expected_packets.push_back(packets[i]); + + auto res = adapter_->ProcessTransportFeedback(*feedback.get(), + clock_.CurrentTime()); + ComparePacketFeedbackVectors(expected_packets, res->packet_feedbacks); + } +} + +TEST_F(TransportFeedbackAdapterTest, HandlesArrivalReordering) { + std::vector packets; + packets.push_back(CreatePacket(120, 200, 0, 1500, kPacingInfo0)); + packets.push_back(CreatePacket(110, 210, 1, 1500, kPacingInfo0)); + packets.push_back(CreatePacket(100, 220, 2, 1500, kPacingInfo0)); + + for (const auto& packet : packets) + OnSentPacket(packet); + + rtcp::TransportFeedback feedback; + feedback.SetBase(packets[0].sent_packet.sequence_number, + packets[0].receive_time.us()); + + for (const auto& packet : packets) { + EXPECT_TRUE(feedback.AddReceivedPacket(packet.sent_packet.sequence_number, + packet.receive_time.us())); + } + + feedback.Build(); + + // Adapter keeps the packets ordered by sequence number (which is itself + // assigned by the order of transmission). Reordering by some other criteria, + // eg. arrival time, is up to the observers. + auto res = adapter_->ProcessTransportFeedback(feedback, clock_.CurrentTime()); + ComparePacketFeedbackVectors(packets, res->packet_feedbacks); +} + +TEST_F(TransportFeedbackAdapterTest, TimestampDeltas) { + std::vector sent_packets; + // TODO(srte): Consider using us resolution in the constants. + const TimeDelta kSmallDelta = + TimeDelta::Micros(rtcp::TransportFeedback::kDeltaScaleFactor * 0xFF) + .RoundDownTo(TimeDelta::Millis(1)); + const TimeDelta kLargePositiveDelta = + TimeDelta::Micros(rtcp::TransportFeedback::kDeltaScaleFactor * + std::numeric_limits::max()) + .RoundDownTo(TimeDelta::Millis(1)); + const TimeDelta kLargeNegativeDelta = + TimeDelta::Micros(rtcp::TransportFeedback::kDeltaScaleFactor * + std::numeric_limits::min()) + .RoundDownTo(TimeDelta::Millis(1)); + + PacketResult packet_feedback; + packet_feedback.sent_packet.sequence_number = 1; + packet_feedback.sent_packet.send_time = Timestamp::Millis(100); + packet_feedback.receive_time = Timestamp::Millis(200); + packet_feedback.sent_packet.size = DataSize::Bytes(1500); + sent_packets.push_back(packet_feedback); + + // TODO(srte): This rounding maintains previous behavior, but should ot be + // required. + packet_feedback.sent_packet.send_time += kSmallDelta; + packet_feedback.receive_time += kSmallDelta; + ++packet_feedback.sent_packet.sequence_number; + sent_packets.push_back(packet_feedback); + + packet_feedback.sent_packet.send_time += kLargePositiveDelta; + packet_feedback.receive_time += kLargePositiveDelta; + ++packet_feedback.sent_packet.sequence_number; + sent_packets.push_back(packet_feedback); + + packet_feedback.sent_packet.send_time += kLargeNegativeDelta; + packet_feedback.receive_time += kLargeNegativeDelta; + ++packet_feedback.sent_packet.sequence_number; + sent_packets.push_back(packet_feedback); + + // Too large, delta - will need two feedback messages. + packet_feedback.sent_packet.send_time += + kLargePositiveDelta + TimeDelta::Millis(1); + packet_feedback.receive_time += kLargePositiveDelta + TimeDelta::Millis(1); + ++packet_feedback.sent_packet.sequence_number; + + // Packets will be added to send history. + for (const auto& packet : sent_packets) + OnSentPacket(packet); + OnSentPacket(packet_feedback); + + // Create expected feedback and send into adapter. + std::unique_ptr feedback( + new rtcp::TransportFeedback()); + feedback->SetBase(sent_packets[0].sent_packet.sequence_number, + sent_packets[0].receive_time.us()); + + for (const auto& packet : sent_packets) { + EXPECT_TRUE(feedback->AddReceivedPacket(packet.sent_packet.sequence_number, + packet.receive_time.us())); + } + EXPECT_FALSE( + feedback->AddReceivedPacket(packet_feedback.sent_packet.sequence_number, + packet_feedback.receive_time.us())); + + rtc::Buffer raw_packet = feedback->Build(); + feedback = + rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size()); + + std::vector received_feedback; + + EXPECT_TRUE(feedback.get() != nullptr); + auto res = + adapter_->ProcessTransportFeedback(*feedback.get(), clock_.CurrentTime()); + ComparePacketFeedbackVectors(sent_packets, res->packet_feedbacks); + + // Create a new feedback message and add the trailing item. + feedback.reset(new rtcp::TransportFeedback()); + feedback->SetBase(packet_feedback.sent_packet.sequence_number, + packet_feedback.receive_time.us()); + EXPECT_TRUE( + feedback->AddReceivedPacket(packet_feedback.sent_packet.sequence_number, + packet_feedback.receive_time.us())); + raw_packet = feedback->Build(); + feedback = + rtcp::TransportFeedback::ParseFrom(raw_packet.data(), raw_packet.size()); + + EXPECT_TRUE(feedback.get() != nullptr); + { + auto res = adapter_->ProcessTransportFeedback(*feedback.get(), + clock_.CurrentTime()); + std::vector expected_packets; + expected_packets.push_back(packet_feedback); + ComparePacketFeedbackVectors(expected_packets, res->packet_feedbacks); + } +} + +TEST_F(TransportFeedbackAdapterTest, IgnoreDuplicatePacketSentCalls) { + auto packet = CreatePacket(100, 200, 0, 1500, kPacingInfo0); + + // Add a packet and then mark it as sent. + RtpPacketSendInfo packet_info; + packet_info.ssrc = kSsrc; + packet_info.transport_sequence_number = packet.sent_packet.sequence_number; + packet_info.length = packet.sent_packet.size.bytes(); + packet_info.pacing_info = packet.sent_packet.pacing_info; + packet_info.packet_type = RtpPacketMediaType::kVideo; + adapter_->AddPacket(packet_info, 0u, clock_.CurrentTime()); + absl::optional sent_packet = adapter_->ProcessSentPacket( + rtc::SentPacket(packet.sent_packet.sequence_number, + packet.sent_packet.send_time.ms(), rtc::PacketInfo())); + EXPECT_TRUE(sent_packet.has_value()); + + // Call ProcessSentPacket() again with the same sequence number. This packet + // has already been marked as sent and the call should be ignored. + absl::optional duplicate_packet = adapter_->ProcessSentPacket( + rtc::SentPacket(packet.sent_packet.sequence_number, + packet.sent_packet.send_time.ms(), rtc::PacketInfo())); + EXPECT_FALSE(duplicate_packet.has_value()); +} + +} // namespace test +} // namespace webrtc_cc +} // namespace webrtc diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer.cc b/modules/congestion_controller/rtp/transport_feedback_demuxer.cc new file mode 100644 index 0000000..c958a1c --- /dev/null +++ b/modules/congestion_controller/rtp/transport_feedback_demuxer.cc @@ -0,0 +1,88 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/congestion_controller/rtp/transport_feedback_demuxer.h" +#include "absl/algorithm/container.h" +#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" + +namespace webrtc { +namespace { +static const size_t kMaxPacketsInHistory = 5000; +} +void TransportFeedbackDemuxer::RegisterStreamFeedbackObserver( + std::vector ssrcs, + StreamFeedbackObserver* observer) { + MutexLock lock(&observers_lock_); + RTC_DCHECK(observer); + RTC_DCHECK(absl::c_find_if(observers_, [=](const auto& pair) { + return pair.second == observer; + }) == observers_.end()); + observers_.push_back({ssrcs, observer}); +} + +void TransportFeedbackDemuxer::DeRegisterStreamFeedbackObserver( + StreamFeedbackObserver* observer) { + MutexLock lock(&observers_lock_); + RTC_DCHECK(observer); + const auto it = absl::c_find_if( + observers_, [=](const auto& pair) { return pair.second == observer; }); + RTC_DCHECK(it != observers_.end()); + observers_.erase(it); +} + +void TransportFeedbackDemuxer::AddPacket(const RtpPacketSendInfo& packet_info) { + MutexLock lock(&lock_); + if (packet_info.ssrc != 0) { + StreamFeedbackObserver::StreamPacketInfo info; + info.ssrc = packet_info.ssrc; + info.rtp_sequence_number = packet_info.rtp_sequence_number; + info.received = false; + history_.insert( + {seq_num_unwrapper_.Unwrap(packet_info.transport_sequence_number), + info}); + } + while (history_.size() > kMaxPacketsInHistory) { + history_.erase(history_.begin()); + } +} + +void TransportFeedbackDemuxer::OnTransportFeedback( + const rtcp::TransportFeedback& feedback) { + std::vector stream_feedbacks; + { + MutexLock lock(&lock_); + for (const auto& packet : feedback.GetAllPackets()) { + int64_t seq_num = + seq_num_unwrapper_.UnwrapWithoutUpdate(packet.sequence_number()); + auto it = history_.find(seq_num); + if (it != history_.end()) { + auto packet_info = it->second; + packet_info.received = packet.received(); + stream_feedbacks.push_back(packet_info); + if (packet.received()) + history_.erase(it); + } + } + } + + MutexLock lock(&observers_lock_); + for (auto& observer : observers_) { + std::vector selected_feedback; + for (const auto& packet_info : stream_feedbacks) { + if (absl::c_count(observer.first, packet_info.ssrc) > 0) { + selected_feedback.push_back(packet_info); + } + } + if (!selected_feedback.empty()) { + observer.second->OnPacketFeedbackVector(std::move(selected_feedback)); + } + } +} + +} // namespace webrtc diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer.h b/modules/congestion_controller/rtp/transport_feedback_demuxer.h new file mode 100644 index 0000000..634a37ea --- /dev/null +++ b/modules/congestion_controller/rtp/transport_feedback_demuxer.h @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_CONGESTION_CONTROLLER_RTP_TRANSPORT_FEEDBACK_DEMUXER_H_ +#define MODULES_CONGESTION_CONTROLLER_RTP_TRANSPORT_FEEDBACK_DEMUXER_H_ + +#include +#include +#include + +#include "modules/include/module_common_types_public.h" +#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "rtc_base/synchronization/mutex.h" + +namespace webrtc { + +class TransportFeedbackDemuxer : public StreamFeedbackProvider { + public: + // Implements StreamFeedbackProvider interface + void RegisterStreamFeedbackObserver( + std::vector ssrcs, + StreamFeedbackObserver* observer) override; + void DeRegisterStreamFeedbackObserver( + StreamFeedbackObserver* observer) override; + void AddPacket(const RtpPacketSendInfo& packet_info); + void OnTransportFeedback(const rtcp::TransportFeedback& feedback); + + private: + Mutex lock_; + SequenceNumberUnwrapper seq_num_unwrapper_ RTC_GUARDED_BY(&lock_); + std::map history_ + RTC_GUARDED_BY(&lock_); + + // Maps a set of ssrcs to corresponding observer. Vectors are used rather than + // set/map to ensure that the processing order is consistent independently of + // the randomized ssrcs. + Mutex observers_lock_; + std::vector, StreamFeedbackObserver*>> + observers_ RTC_GUARDED_BY(&observers_lock_); +}; +} // namespace webrtc + +#endif // MODULES_CONGESTION_CONTROLLER_RTP_TRANSPORT_FEEDBACK_DEMUXER_H_ diff --git a/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc b/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc new file mode 100644 index 0000000..6514a4e --- /dev/null +++ b/modules/congestion_controller/rtp/transport_feedback_demuxer_unittest.cc @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/congestion_controller/rtp/transport_feedback_demuxer.h" + +#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" +#include "test/gmock.h" +#include "test/gtest.h" + +namespace webrtc { +namespace { + +using ::testing::_; +static constexpr uint32_t kSsrc = 8492; + +class MockStreamFeedbackObserver : public webrtc::StreamFeedbackObserver { + public: + MOCK_METHOD(void, + OnPacketFeedbackVector, + (std::vector packet_feedback_vector), + (override)); +}; + +RtpPacketSendInfo CreatePacket(uint32_t ssrc, + int16_t rtp_sequence_number, + int64_t transport_sequence_number) { + RtpPacketSendInfo res; + res.ssrc = ssrc; + res.transport_sequence_number = transport_sequence_number; + res.rtp_sequence_number = rtp_sequence_number; + return res; +} +} // namespace +TEST(TransportFeedbackDemuxerTest, ObserverSanity) { + TransportFeedbackDemuxer demuxer; + MockStreamFeedbackObserver mock; + demuxer.RegisterStreamFeedbackObserver({kSsrc}, &mock); + + demuxer.AddPacket(CreatePacket(kSsrc, 55, 1)); + demuxer.AddPacket(CreatePacket(kSsrc, 56, 2)); + demuxer.AddPacket(CreatePacket(kSsrc, 57, 3)); + + rtcp::TransportFeedback feedback; + feedback.SetBase(1, 1000); + ASSERT_TRUE(feedback.AddReceivedPacket(1, 1000)); + ASSERT_TRUE(feedback.AddReceivedPacket(2, 2000)); + ASSERT_TRUE(feedback.AddReceivedPacket(3, 3000)); + + EXPECT_CALL(mock, OnPacketFeedbackVector(_)).Times(1); + demuxer.OnTransportFeedback(feedback); + + demuxer.DeRegisterStreamFeedbackObserver(&mock); + + demuxer.AddPacket(CreatePacket(kSsrc, 58, 4)); + rtcp::TransportFeedback second_feedback; + second_feedback.SetBase(4, 4000); + ASSERT_TRUE(second_feedback.AddReceivedPacket(4, 4000)); + + EXPECT_CALL(mock, OnPacketFeedbackVector(_)).Times(0); + demuxer.OnTransportFeedback(second_feedback); +} +} // namespace webrtc diff --git a/modules/desktop_capture/BUILD.gn b/modules/desktop_capture/BUILD.gn new file mode 100644 index 0000000..b40b743 --- /dev/null +++ b/modules/desktop_capture/BUILD.gn @@ -0,0 +1,548 @@ +# Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. +# +# Use of this source code is governed by a BSD-style license +# that can be found in the LICENSE file in the root of the source +# tree. An additional intellectual property rights grant can be found +# in the file PATENTS. All contributing project authors may +# be found in the AUTHORS file in the root of the source tree. + +import("//build/config/linux/pkg_config.gni") +import("//build/config/ui.gni") +import("//tools/generate_stubs/rules.gni") +import("../../webrtc.gni") + +use_desktop_capture_differ_sse2 = current_cpu == "x86" || current_cpu == "x64" + +config("x11_config") { + if (rtc_use_x11_extensions) { + defines = [ "WEBRTC_USE_X11" ] + } +} + +rtc_library("primitives") { + visibility = [ "*" ] + sources = [ + "desktop_capture_types.h", + "desktop_frame.cc", + "desktop_frame.h", + "desktop_geometry.cc", + "desktop_geometry.h", + "desktop_region.cc", + "desktop_region.h", + "shared_desktop_frame.cc", + "shared_desktop_frame.h", + "shared_memory.cc", + "shared_memory.h", + ] + + deps = [ + "../../api:scoped_refptr", + "../../rtc_base:checks", + "../../rtc_base/system:rtc_export", + ] + + if (!build_with_mozilla) { + deps += [ "../../rtc_base" ] # TODO(kjellander): Cleanup in + # bugs.webrtc.org/3806. + } +} + +if (rtc_include_tests) { + rtc_library("desktop_capture_modules_tests") { + testonly = true + + defines = [] + sources = [] + deps = [ + "../../api:function_view", + "../../api:scoped_refptr", + "../../rtc_base:checks", + ] + if (rtc_desktop_capture_supported) { + deps += [ + ":desktop_capture_mock", + ":primitives", + ":screen_drawer", + "../../rtc_base", + "../../rtc_base:rtc_base_approved", + "../../rtc_base/third_party/base64", + "../../system_wrappers", + "../../test:test_support", + "../../test:video_test_support", + ] + sources += [ + "screen_capturer_integration_test.cc", + "screen_drawer_unittest.cc", + "window_finder_unittest.cc", + ] + public_configs = [ ":x11_config" ] + } + } + + rtc_library("desktop_capture_unittests") { + testonly = true + + defines = [] + sources = [ + "blank_detector_desktop_capturer_wrapper_unittest.cc", + "cropped_desktop_frame_unittest.cc", + "desktop_and_cursor_composer_unittest.cc", + "desktop_capturer_differ_wrapper_unittest.cc", + "desktop_frame_rotation_unittest.cc", + "desktop_frame_unittest.cc", + "desktop_geometry_unittest.cc", + "desktop_region_unittest.cc", + "differ_block_unittest.cc", + "fallback_desktop_capturer_wrapper_unittest.cc", + "mouse_cursor_monitor_unittest.cc", + "rgba_color_unittest.cc", + "test_utils.cc", + "test_utils.h", + "test_utils_unittest.cc", + ] + if (is_win) { + sources += [ + "win/cursor_unittest.cc", + "win/cursor_unittest_resources.h", + "win/cursor_unittest_resources.rc", + "win/screen_capture_utils_unittest.cc", + "win/screen_capturer_win_directx_unittest.cc", + ] + } + deps = [ + ":desktop_capture", + ":desktop_capture_mock", + ":primitives", + "../../rtc_base:checks", + "../../rtc_base:rtc_base_approved", + "../../system_wrappers:cpu_features_api", + "../../test:test_support", + ] + if (rtc_desktop_capture_supported) { + sources += [ + "screen_capturer_helper_unittest.cc", + "screen_capturer_mac_unittest.cc", + "screen_capturer_unittest.cc", + "window_capturer_unittest.cc", + ] + deps += [ ":desktop_capture_mock" ] + public_configs = [ ":x11_config" ] + } + } + + rtc_library("screen_drawer") { + testonly = true + + public_deps = [ ":desktop_capture" ] # no-presubmit-check TODO(webrtc:8603) + + sources = [ + "screen_drawer.cc", + "screen_drawer.h", + ] + + if (is_linux) { + sources += [ "screen_drawer_linux.cc" ] + } + + if (is_mac) { + sources += [ "screen_drawer_mac.cc" ] + } + + if (is_win) { + sources += [ "screen_drawer_win.cc" ] + } + + deps = [ + ":primitives", + "../../api:scoped_refptr", + "../../rtc_base:checks", + "../../rtc_base:rtc_base_approved", + "../../system_wrappers", + ] + + if (is_posix || is_fuchsia) { + sources += [ + "screen_drawer_lock_posix.cc", + "screen_drawer_lock_posix.h", + ] + } + } + + rtc_library("desktop_capture_mock") { + testonly = true + + public_deps = [ ":desktop_capture" ] # no-presubmit-check TODO(webrtc:8603) + + sources = [ + "mock_desktop_capturer_callback.cc", + "mock_desktop_capturer_callback.h", + ] + + deps = [ + ":primitives", + "../../rtc_base:rtc_base_approved", + "../../test:test_support", + ] + } +} + +if (is_linux) { + if (rtc_use_pipewire) { + pkg_config("gio") { + packages = [ + "gio-2.0", + "gio-unix-2.0", + ] + } + + if (rtc_link_pipewire) { + pkg_config("pipewire") { + packages = [ "libpipewire-0.2" ] + } + } else { + # When libpipewire is not directly linked, use stubs to allow for dlopening of + # the binary. + generate_stubs("pipewire_stubs") { + configs = [ "../../:common_config" ] + deps = [ "../../rtc_base" ] + extra_header = "linux/pipewire_stub_header.fragment" + logging_function = "RTC_LOG(LS_VERBOSE)" + logging_include = "rtc_base/logging.h" + output_name = "linux/pipewire_stubs" + path_from_source = "modules/desktop_capture/linux" + sigs = [ "linux/pipewire.sigs" ] + } + } + + config("pipewire_config") { + defines = [ "WEBRTC_USE_PIPEWIRE" ] + if (!rtc_link_pipewire) { + defines += [ "WEBRTC_DLOPEN_PIPEWIRE" ] + } + } + } +} + +rtc_source_set("desktop_capture") { + visibility = [ "*" ] + public_deps = # no-presubmit-check TODO(webrtc:8603) + [ ":desktop_capture_generic" ] + if (is_mac) { + public_deps += [ ":desktop_capture_objc" ] + } +} + +if (is_mac) { + rtc_library("desktop_capture_objc") { + visibility = [ ":desktop_capture" ] + sources = [ + "mac/desktop_configuration.mm", + "mac/desktop_frame_cgimage.h", + "mac/desktop_frame_cgimage.mm", + "mac/desktop_frame_iosurface.h", + "mac/desktop_frame_iosurface.mm", + "mac/desktop_frame_provider.h", + "mac/desktop_frame_provider.mm", + "mac/screen_capturer_mac.h", + "mac/screen_capturer_mac.mm", + "mouse_cursor_monitor_mac.mm", + "screen_capturer_darwin.mm", + "window_capturer_mac.mm", + "window_finder_mac.h", + "window_finder_mac.mm", + ] + deps = [ + ":desktop_capture_generic", + ":primitives", + "../../api:scoped_refptr", + "../../rtc_base", + "../../rtc_base:checks", + "../../rtc_base:rtc_base_approved", + "../../rtc_base/synchronization:rw_lock_wrapper", + "../../rtc_base/system:rtc_export", + "../../sdk:helpers_objc", + ] + frameworks = [ + "AppKit.framework", + "IOKit.framework", + "IOSurface.framework", + ] + } +} + +rtc_library("desktop_capture_generic") { + defines = [] + public_configs = [ ":x11_config" ] + visibility = [ + ":desktop_capture", + ":desktop_capture_objc", + ] + sources = [ + "blank_detector_desktop_capturer_wrapper.cc", + "blank_detector_desktop_capturer_wrapper.h", + "capture_result_desktop_capturer_wrapper.cc", + "capture_result_desktop_capturer_wrapper.h", + "cropped_desktop_frame.cc", + "cropped_desktop_frame.h", + "cropping_window_capturer.cc", + "cropping_window_capturer.h", + "desktop_and_cursor_composer.cc", + "desktop_and_cursor_composer.h", + "desktop_capture_options.cc", + "desktop_capture_options.h", + "desktop_capturer.cc", + "desktop_capturer.h", + "desktop_capturer_differ_wrapper.cc", + "desktop_capturer_differ_wrapper.h", + "desktop_capturer_wrapper.cc", + "desktop_capturer_wrapper.h", + "desktop_frame_generator.cc", + "desktop_frame_generator.h", + "desktop_frame_rotation.cc", + "desktop_frame_rotation.h", + "differ_block.cc", + "differ_block.h", + "fake_desktop_capturer.cc", + "fake_desktop_capturer.h", + "fallback_desktop_capturer_wrapper.cc", + "fallback_desktop_capturer_wrapper.h", + "full_screen_application_handler.cc", + "full_screen_application_handler.h", + "full_screen_window_detector.cc", + "full_screen_window_detector.h", + "mouse_cursor.cc", + "mouse_cursor.h", + "mouse_cursor_monitor.h", + "resolution_tracker.cc", + "resolution_tracker.h", + "rgba_color.cc", + "rgba_color.h", + "screen_capture_frame_queue.h", + "screen_capturer_helper.cc", + "screen_capturer_helper.h", + "window_finder.cc", + "window_finder.h", + ] + + if (is_mac) { + sources += [ + "mac/desktop_configuration.h", + "mac/desktop_configuration_monitor.cc", + "mac/desktop_configuration_monitor.h", + "mac/full_screen_mac_application_handler.cc", + "mac/full_screen_mac_application_handler.h", + "mac/window_list_utils.cc", + "mac/window_list_utils.h", + ] + } + + if (build_with_mozilla) { + sources += [ + "desktop_device_info.cc", + "desktop_device_info.h", + ] + if (is_win) { + sources += [ + "app_capturer_win.cc", + "win/desktop_device_info_win.cc", + "win/win_shared.cc", + ] + } + } + + if (rtc_use_x11_extensions || rtc_use_pipewire) { + sources += [ + "mouse_cursor_monitor_linux.cc", + "screen_capturer_linux.cc", + "window_capturer_linux.cc", + ] + + if (build_with_mozilla && is_linux) { + sources += [ + "app_capturer_linux.cc", + "linux/app_capturer_x11.cc", + "linux/desktop_device_info_linux.cc", + "linux/desktop_device_info_linux.h", + "linux/shared_x_util.cc", + "linux/shared_x_util.h", + ] + } + } + + if (rtc_use_x11_extensions) { + sources += [ + "linux/mouse_cursor_monitor_x11.cc", + "linux/mouse_cursor_monitor_x11.h", + "linux/screen_capturer_x11.cc", + "linux/screen_capturer_x11.h", + "linux/shared_x_display.cc", + "linux/shared_x_display.h", + "linux/window_capturer_x11.cc", + "linux/window_capturer_x11.h", + "linux/window_finder_x11.cc", + "linux/window_finder_x11.h", + "linux/window_list_utils.cc", + "linux/window_list_utils.h", + "linux/x_atom_cache.cc", + "linux/x_atom_cache.h", + "linux/x_error_trap.cc", + "linux/x_error_trap.h", + "linux/x_server_pixel_buffer.cc", + "linux/x_server_pixel_buffer.h", + "linux/x_window_property.cc", + "linux/x_window_property.h", + ] + libs = [ + "X11", + "Xcomposite", + "Xdamage", + "Xext", + "Xfixes", + "Xrender", + "Xrandr", + "Xtst", + ] + } + + if (!is_win && !is_mac && !rtc_use_x11_extensions && !rtc_use_pipewire) { + sources += [ + "mouse_cursor_monitor_null.cc", + "screen_capturer_null.cc", + "window_capturer_null.cc", + ] + } + + if (is_win) { + sources += [ + "cropping_window_capturer_win.cc", + "desktop_frame_win.cc", + "desktop_frame_win.h", + "mouse_cursor_monitor_win.cc", + "screen_capturer_win.cc", + "win/cursor.cc", + "win/cursor.h", + "win/d3d_device.cc", + "win/d3d_device.h", + "win/desktop.cc", + "win/desktop.h", + "win/display_configuration_monitor.cc", + "win/display_configuration_monitor.h", + "win/dxgi_adapter_duplicator.cc", + "win/dxgi_adapter_duplicator.h", + "win/dxgi_context.cc", + "win/dxgi_context.h", + "win/dxgi_duplicator_controller.cc", + "win/dxgi_duplicator_controller.h", + "win/dxgi_frame.cc", + "win/dxgi_frame.h", + "win/dxgi_output_duplicator.cc", + "win/dxgi_output_duplicator.h", + "win/dxgi_texture.cc", + "win/dxgi_texture.h", + "win/dxgi_texture_mapping.cc", + "win/dxgi_texture_mapping.h", + "win/dxgi_texture_staging.cc", + "win/dxgi_texture_staging.h", + "win/full_screen_win_application_handler.cc", + "win/full_screen_win_application_handler.h", + "win/scoped_gdi_object.h", + "win/scoped_thread_desktop.cc", + "win/scoped_thread_desktop.h", + "win/screen_capture_utils.cc", + "win/screen_capture_utils.h", + "win/screen_capturer_win_directx.cc", + "win/screen_capturer_win_directx.h", + "win/screen_capturer_win_gdi.cc", + "win/screen_capturer_win_gdi.h", + "win/screen_capturer_win_magnifier.cc", + "win/screen_capturer_win_magnifier.h", + "win/selected_window_context.cc", + "win/selected_window_context.h", + "win/window_capture_utils.cc", + "win/window_capture_utils.h", + "win/window_capturer_win_gdi.cc", + "win/window_capturer_win_gdi.h", + "win/window_capturer_win_wgc.cc", + "win/window_capturer_win_wgc.h", + "window_capturer_win.cc", + "window_finder_win.cc", + "window_finder_win.h", + ] + libs = [ + "d3d11.lib", + "dxgi.lib", + ] + } + + deps = [ + ":primitives", + "../../api:function_view", + "../../api:refcountedbase", + "../../api:scoped_refptr", + "../../rtc_base", # TODO(kjellander): Cleanup in bugs.webrtc.org/3806. + "../../rtc_base:checks", + "../../rtc_base/synchronization:mutex", + "../../rtc_base/synchronization:rw_lock_wrapper", + "../../rtc_base/system:arch", + "../../rtc_base/system:rtc_export", + "../../system_wrappers", + "../../system_wrappers:cpu_features_api", + "../../system_wrappers:metrics", + ] + absl_deps = [ + "//third_party/abseil-cpp/absl/memory", + "//third_party/abseil-cpp/absl/strings", + ] + + if (rtc_use_x11_extensions) { + deps += [ "../../rtc_base:sanitizer" ] + } + + if (build_with_mozilla) { + deps += [ "../../rtc_base:rtc_base_approved" ] + } else { + deps += [ "//third_party/libyuv" ] + } + + if (use_desktop_capture_differ_sse2) { + deps += [ ":desktop_capture_differ_sse2" ] + } + + if (rtc_use_pipewire) { + sources += [ + "linux/base_capturer_pipewire.cc", + "linux/base_capturer_pipewire.h", + "linux/screen_capturer_pipewire.cc", + "linux/screen_capturer_pipewire.h", + "linux/window_capturer_pipewire.cc", + "linux/window_capturer_pipewire.h", + ] + + configs += [ + ":pipewire_config", + ":gio", + ] + + if (rtc_link_pipewire) { + configs += [ ":pipewire" ] + } else { + deps += [ ":pipewire_stubs" ] + } + } +} + +if (use_desktop_capture_differ_sse2) { + # Have to be compiled as a separate target because it needs to be compiled + # with SSE2 enabled. + rtc_library("desktop_capture_differ_sse2") { + visibility = [ ":*" ] + sources = [ + "differ_vector_sse2.cc", + "differ_vector_sse2.h", + ] + + if (is_posix || is_fuchsia) { + cflags = [ "-msse2" ] + } + } +} diff --git a/modules/desktop_capture/DEPS b/modules/desktop_capture/DEPS new file mode 100644 index 0000000..8c894c4 --- /dev/null +++ b/modules/desktop_capture/DEPS @@ -0,0 +1,19 @@ +include_rules = [ + "+system_wrappers", + "+third_party/libyuv", +] + +specific_include_rules = { + "desktop_frame_cgimage\.h": [ + "+sdk/objc", + ], + "desktop_frame_iosurface\.h": [ + "+sdk/objc", + ], + "desktop_frame_provider\.h": [ + "+sdk/objc", + ], + "screen_capturer_mac\.mm": [ + "+sdk/objc", + ], +} diff --git a/modules/desktop_capture/OWNERS b/modules/desktop_capture/OWNERS new file mode 100644 index 0000000..eaa671c --- /dev/null +++ b/modules/desktop_capture/OWNERS @@ -0,0 +1,2 @@ +jamiewalch@chromium.org +sergeyu@chromium.org diff --git a/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.cc b/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.cc new file mode 100644 index 0000000..ca3a89f --- /dev/null +++ b/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.cc @@ -0,0 +1,123 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h" + +#include + +#include + +#include "modules/desktop_capture/desktop_geometry.h" +#include "modules/desktop_capture/desktop_region.h" +#include "rtc_base/checks.h" +#include "system_wrappers/include/metrics.h" + +namespace webrtc { + +BlankDetectorDesktopCapturerWrapper::BlankDetectorDesktopCapturerWrapper( + std::unique_ptr capturer, + RgbaColor blank_pixel) + : capturer_(std::move(capturer)), blank_pixel_(blank_pixel) { + RTC_DCHECK(capturer_); +} + +BlankDetectorDesktopCapturerWrapper::~BlankDetectorDesktopCapturerWrapper() = + default; + +void BlankDetectorDesktopCapturerWrapper::Start( + DesktopCapturer::Callback* callback) { + callback_ = callback; + capturer_->Start(this); +} + +void BlankDetectorDesktopCapturerWrapper::SetSharedMemoryFactory( + std::unique_ptr shared_memory_factory) { + capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory)); +} + +void BlankDetectorDesktopCapturerWrapper::CaptureFrame() { + RTC_DCHECK(callback_); + capturer_->CaptureFrame(); +} + +void BlankDetectorDesktopCapturerWrapper::SetExcludedWindow(WindowId window) { + capturer_->SetExcludedWindow(window); +} + +bool BlankDetectorDesktopCapturerWrapper::GetSourceList(SourceList* sources) { + return capturer_->GetSourceList(sources); +} + +bool BlankDetectorDesktopCapturerWrapper::SelectSource(SourceId id) { + return capturer_->SelectSource(id); +} + +bool BlankDetectorDesktopCapturerWrapper::FocusOnSelectedSource() { + return capturer_->FocusOnSelectedSource(); +} + +bool BlankDetectorDesktopCapturerWrapper::IsOccluded(const DesktopVector& pos) { + return capturer_->IsOccluded(pos); +} + +void BlankDetectorDesktopCapturerWrapper::OnCaptureResult( + Result result, + std::unique_ptr frame) { + RTC_DCHECK(callback_); + if (result != Result::SUCCESS || non_blank_frame_received_) { + callback_->OnCaptureResult(result, std::move(frame)); + return; + } + + RTC_DCHECK(frame); + + // If nothing has been changed in current frame, we do not need to check it + // again. + if (!frame->updated_region().is_empty() || is_first_frame_) { + last_frame_is_blank_ = IsBlankFrame(*frame); + is_first_frame_ = false; + } + RTC_HISTOGRAM_BOOLEAN("WebRTC.DesktopCapture.BlankFrameDetected", + last_frame_is_blank_); + if (!last_frame_is_blank_) { + non_blank_frame_received_ = true; + callback_->OnCaptureResult(Result::SUCCESS, std::move(frame)); + return; + } + + callback_->OnCaptureResult(Result::ERROR_TEMPORARY, + std::unique_ptr()); +} + +bool BlankDetectorDesktopCapturerWrapper::IsBlankFrame( + const DesktopFrame& frame) const { + // We will check 7489 pixels for a frame with 1024 x 768 resolution. + for (int i = 0; i < frame.size().width() * frame.size().height(); i += 105) { + const int x = i % frame.size().width(); + const int y = i / frame.size().width(); + if (!IsBlankPixel(frame, x, y)) { + return false; + } + } + + // We are verifying the pixel in the center as well. + return IsBlankPixel(frame, frame.size().width() / 2, + frame.size().height() / 2); +} + +bool BlankDetectorDesktopCapturerWrapper::IsBlankPixel( + const DesktopFrame& frame, + int x, + int y) const { + uint8_t* pixel_data = frame.GetFrameDataAtPos(DesktopVector(x, y)); + return RgbaColor(pixel_data) == blank_pixel_; +} + +} // namespace webrtc diff --git a/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h b/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h new file mode 100644 index 0000000..46ba525 --- /dev/null +++ b/modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h @@ -0,0 +1,78 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_BLANK_DETECTOR_DESKTOP_CAPTURER_WRAPPER_H_ +#define MODULES_DESKTOP_CAPTURE_BLANK_DETECTOR_DESKTOP_CAPTURER_WRAPPER_H_ + +#include + +#include "modules/desktop_capture/desktop_capture_types.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/rgba_color.h" +#include "modules/desktop_capture/shared_memory.h" + +namespace webrtc { + +// A DesktopCapturer wrapper detects the return value of its owned +// DesktopCapturer implementation. If sampled pixels returned by the +// DesktopCapturer implementation all equal to the blank pixel, this wrapper +// returns ERROR_TEMPORARY. If the DesktopCapturer implementation fails for too +// many times, this wrapper returns ERROR_PERMANENT. +class BlankDetectorDesktopCapturerWrapper final + : public DesktopCapturer, + public DesktopCapturer::Callback { + public: + // Creates BlankDetectorDesktopCapturerWrapper. BlankDesktopCapturerWrapper + // takes ownership of |capturer|. The |blank_pixel| is the unmodified color + // returned by the |capturer|. + BlankDetectorDesktopCapturerWrapper(std::unique_ptr capturer, + RgbaColor blank_pixel); + ~BlankDetectorDesktopCapturerWrapper() override; + + // DesktopCapturer interface. + void Start(DesktopCapturer::Callback* callback) override; + void SetSharedMemoryFactory( + std::unique_ptr shared_memory_factory) override; + void CaptureFrame() override; + void SetExcludedWindow(WindowId window) override; + bool GetSourceList(SourceList* sources) override; + bool SelectSource(SourceId id) override; + bool FocusOnSelectedSource() override; + bool IsOccluded(const DesktopVector& pos) override; + + private: + // DesktopCapturer::Callback interface. + void OnCaptureResult(Result result, + std::unique_ptr frame) override; + + bool IsBlankFrame(const DesktopFrame& frame) const; + + // Detects whether pixel at (x, y) equals to |blank_pixel_|. + bool IsBlankPixel(const DesktopFrame& frame, int x, int y) const; + + const std::unique_ptr capturer_; + const RgbaColor blank_pixel_; + + // Whether a non-blank frame has been received. + bool non_blank_frame_received_ = false; + + // Whether the last frame is blank. + bool last_frame_is_blank_ = false; + + // Whether current frame is the first frame. + bool is_first_frame_ = true; + + DesktopCapturer::Callback* callback_ = nullptr; +}; + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_BLANK_DETECTOR_DESKTOP_CAPTURER_WRAPPER_H_ diff --git a/modules/desktop_capture/blank_detector_desktop_capturer_wrapper_unittest.cc b/modules/desktop_capture/blank_detector_desktop_capturer_wrapper_unittest.cc new file mode 100644 index 0000000..25a81ed --- /dev/null +++ b/modules/desktop_capture/blank_detector_desktop_capturer_wrapper_unittest.cc @@ -0,0 +1,165 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/blank_detector_desktop_capturer_wrapper.h" + +#include +#include + +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/desktop_frame_generator.h" +#include "modules/desktop_capture/desktop_geometry.h" +#include "modules/desktop_capture/desktop_region.h" +#include "modules/desktop_capture/fake_desktop_capturer.h" +#include "test/gtest.h" + +namespace webrtc { + +class BlankDetectorDesktopCapturerWrapperTest + : public ::testing::Test, + public DesktopCapturer::Callback { + public: + BlankDetectorDesktopCapturerWrapperTest(); + ~BlankDetectorDesktopCapturerWrapperTest() override; + + protected: + void PerfTest(DesktopCapturer* capturer); + + const int frame_width_ = 1024; + const int frame_height_ = 768; + std::unique_ptr wrapper_; + DesktopCapturer* capturer_ = nullptr; + BlackWhiteDesktopFramePainter painter_; + int num_frames_captured_ = 0; + DesktopCapturer::Result last_result_ = DesktopCapturer::Result::SUCCESS; + std::unique_ptr last_frame_; + + private: + // DesktopCapturer::Callback interface. + void OnCaptureResult(DesktopCapturer::Result result, + std::unique_ptr frame) override; + + PainterDesktopFrameGenerator frame_generator_; +}; + +BlankDetectorDesktopCapturerWrapperTest:: + BlankDetectorDesktopCapturerWrapperTest() { + frame_generator_.size()->set(frame_width_, frame_height_); + frame_generator_.set_desktop_frame_painter(&painter_); + std::unique_ptr capturer(new FakeDesktopCapturer()); + FakeDesktopCapturer* fake_capturer = + static_cast(capturer.get()); + fake_capturer->set_frame_generator(&frame_generator_); + capturer_ = fake_capturer; + wrapper_.reset(new BlankDetectorDesktopCapturerWrapper( + std::move(capturer), RgbaColor(0, 0, 0, 0))); + wrapper_->Start(this); +} + +BlankDetectorDesktopCapturerWrapperTest:: + ~BlankDetectorDesktopCapturerWrapperTest() = default; + +void BlankDetectorDesktopCapturerWrapperTest::OnCaptureResult( + DesktopCapturer::Result result, + std::unique_ptr frame) { + last_result_ = result; + last_frame_ = std::move(frame); + num_frames_captured_++; +} + +void BlankDetectorDesktopCapturerWrapperTest::PerfTest( + DesktopCapturer* capturer) { + for (int i = 0; i < 10000; i++) { + capturer->CaptureFrame(); + ASSERT_EQ(num_frames_captured_, i + 1); + } +} + +TEST_F(BlankDetectorDesktopCapturerWrapperTest, ShouldDetectBlankFrame) { + wrapper_->CaptureFrame(); + ASSERT_EQ(num_frames_captured_, 1); + ASSERT_EQ(last_result_, DesktopCapturer::Result::ERROR_TEMPORARY); + ASSERT_FALSE(last_frame_); +} + +TEST_F(BlankDetectorDesktopCapturerWrapperTest, ShouldPassBlankDetection) { + painter_.updated_region()->AddRect(DesktopRect::MakeXYWH(0, 0, 100, 100)); + wrapper_->CaptureFrame(); + ASSERT_EQ(num_frames_captured_, 1); + ASSERT_EQ(last_result_, DesktopCapturer::Result::SUCCESS); + ASSERT_TRUE(last_frame_); + + painter_.updated_region()->AddRect( + DesktopRect::MakeXYWH(frame_width_ - 100, frame_height_ - 100, 100, 100)); + wrapper_->CaptureFrame(); + ASSERT_EQ(num_frames_captured_, 2); + ASSERT_EQ(last_result_, DesktopCapturer::Result::SUCCESS); + ASSERT_TRUE(last_frame_); + + painter_.updated_region()->AddRect( + DesktopRect::MakeXYWH(0, frame_height_ - 100, 100, 100)); + wrapper_->CaptureFrame(); + ASSERT_EQ(num_frames_captured_, 3); + ASSERT_EQ(last_result_, DesktopCapturer::Result::SUCCESS); + ASSERT_TRUE(last_frame_); + + painter_.updated_region()->AddRect( + DesktopRect::MakeXYWH(frame_width_ - 100, 0, 100, 100)); + wrapper_->CaptureFrame(); + ASSERT_EQ(num_frames_captured_, 4); + ASSERT_EQ(last_result_, DesktopCapturer::Result::SUCCESS); + ASSERT_TRUE(last_frame_); + + painter_.updated_region()->AddRect(DesktopRect::MakeXYWH( + (frame_width_ >> 1) - 50, (frame_height_ >> 1) - 50, 100, 100)); + wrapper_->CaptureFrame(); + ASSERT_EQ(num_frames_captured_, 5); + ASSERT_EQ(last_result_, DesktopCapturer::Result::SUCCESS); + ASSERT_TRUE(last_frame_); +} + +TEST_F(BlankDetectorDesktopCapturerWrapperTest, + ShouldNotCheckAfterANonBlankFrameReceived) { + wrapper_->CaptureFrame(); + ASSERT_EQ(num_frames_captured_, 1); + ASSERT_EQ(last_result_, DesktopCapturer::Result::ERROR_TEMPORARY); + ASSERT_FALSE(last_frame_); + + painter_.updated_region()->AddRect( + DesktopRect::MakeXYWH(frame_width_ - 100, 0, 100, 100)); + wrapper_->CaptureFrame(); + ASSERT_EQ(num_frames_captured_, 2); + ASSERT_EQ(last_result_, DesktopCapturer::Result::SUCCESS); + ASSERT_TRUE(last_frame_); + + for (int i = 0; i < 100; i++) { + wrapper_->CaptureFrame(); + ASSERT_EQ(num_frames_captured_, i + 3); + ASSERT_EQ(last_result_, DesktopCapturer::Result::SUCCESS); + ASSERT_TRUE(last_frame_); + } +} + +// There is no perceptible impact by using BlankDetectorDesktopCapturerWrapper. +// i.e. less than 0.2ms per frame. +// [ OK ] DISABLED_Performance (10210 ms) +// [ OK ] DISABLED_PerformanceComparison (8791 ms) +TEST_F(BlankDetectorDesktopCapturerWrapperTest, DISABLED_Performance) { + PerfTest(wrapper_.get()); +} + +TEST_F(BlankDetectorDesktopCapturerWrapperTest, + DISABLED_PerformanceComparison) { + capturer_->Start(this); + PerfTest(capturer_); +} + +} // namespace webrtc diff --git a/modules/desktop_capture/capture_result_desktop_capturer_wrapper.cc b/modules/desktop_capture/capture_result_desktop_capturer_wrapper.cc new file mode 100644 index 0000000..e1d4b99 --- /dev/null +++ b/modules/desktop_capture/capture_result_desktop_capturer_wrapper.cc @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/capture_result_desktop_capturer_wrapper.h" + +#include +#include + +#include "rtc_base/checks.h" + +namespace webrtc { + +CaptureResultDesktopCapturerWrapper::CaptureResultDesktopCapturerWrapper( + std::unique_ptr base_capturer, + ResultObserver* observer) + : DesktopCapturerWrapper(std::move(base_capturer)), observer_(observer) { + RTC_DCHECK(observer_); +} + +CaptureResultDesktopCapturerWrapper::~CaptureResultDesktopCapturerWrapper() = + default; + +void CaptureResultDesktopCapturerWrapper::Start(Callback* callback) { + if ((callback_ == nullptr) != (callback == nullptr)) { + if (callback) { + callback_ = callback; + base_capturer_->Start(this); + } else { + base_capturer_->Start(nullptr); + } + } + callback_ = callback; +} + +void CaptureResultDesktopCapturerWrapper::OnCaptureResult( + Result result, + std::unique_ptr frame) { + observer_->Observe(&result, &frame); + callback_->OnCaptureResult(result, std::move(frame)); +} + +} // namespace webrtc diff --git a/modules/desktop_capture/capture_result_desktop_capturer_wrapper.h b/modules/desktop_capture/capture_result_desktop_capturer_wrapper.h new file mode 100644 index 0000000..6d1d49a --- /dev/null +++ b/modules/desktop_capture/capture_result_desktop_capturer_wrapper.h @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_CAPTURE_RESULT_DESKTOP_CAPTURER_WRAPPER_H_ +#define MODULES_DESKTOP_CAPTURE_CAPTURE_RESULT_DESKTOP_CAPTURER_WRAPPER_H_ + +#include + +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_capturer_wrapper.h" +#include "modules/desktop_capture/desktop_frame.h" + +namespace webrtc { + +// A DesktopCapturerWrapper implementation to capture the result of +// |base_capturer|. Derived classes are expected to provide a ResultObserver +// implementation to observe the DesktopFrame returned by |base_capturer_|. +class CaptureResultDesktopCapturerWrapper : public DesktopCapturerWrapper, + public DesktopCapturer::Callback { + public: + using Callback = DesktopCapturer::Callback; + + // Provides a way to let derived classes or clients to modify the result + // returned by |base_capturer_|. + class ResultObserver { + public: + ResultObserver(); + virtual ~ResultObserver(); + + virtual void Observe(Result* result, + std::unique_ptr* frame) = 0; + }; + + // |observer| must outlive this instance and can be |this|. |observer| is + // guaranteed to be executed only after the constructor and before the + // destructor. + CaptureResultDesktopCapturerWrapper( + std::unique_ptr base_capturer, + ResultObserver* observer); + + ~CaptureResultDesktopCapturerWrapper() override; + + // DesktopCapturer implementations. + void Start(Callback* callback) final; + + private: + // DesktopCapturer::Callback implementation. + void OnCaptureResult(Result result, + std::unique_ptr frame) final; + + ResultObserver* const observer_; + Callback* callback_ = nullptr; +}; + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_CAPTURE_RESULT_DESKTOP_CAPTURER_WRAPPER_H_ diff --git a/modules/desktop_capture/cropped_desktop_frame.cc b/modules/desktop_capture/cropped_desktop_frame.cc new file mode 100644 index 0000000..1ab0355 --- /dev/null +++ b/modules/desktop_capture/cropped_desktop_frame.cc @@ -0,0 +1,64 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/cropped_desktop_frame.h" + +#include +#include + +#include "modules/desktop_capture/desktop_region.h" +#include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" + +namespace webrtc { + +// A DesktopFrame that is a sub-rect of another DesktopFrame. +class CroppedDesktopFrame : public DesktopFrame { + public: + CroppedDesktopFrame(std::unique_ptr frame, + const DesktopRect& rect); + + private: + const std::unique_ptr frame_; + + RTC_DISALLOW_COPY_AND_ASSIGN(CroppedDesktopFrame); +}; + +std::unique_ptr CreateCroppedDesktopFrame( + std::unique_ptr frame, + const DesktopRect& rect) { + RTC_DCHECK(frame); + + if (!DesktopRect::MakeSize(frame->size()).ContainsRect(rect)) { + return nullptr; + } + + if (frame->size().equals(rect.size())) { + return frame; + } + + return std::unique_ptr( + new CroppedDesktopFrame(std::move(frame), rect)); +} + +CroppedDesktopFrame::CroppedDesktopFrame(std::unique_ptr frame, + const DesktopRect& rect) + : DesktopFrame(rect.size(), + frame->stride(), + frame->GetFrameDataAtPos(rect.top_left()), + frame->shared_memory()), + frame_(std::move(frame)) { + MoveFrameInfoFrom(frame_.get()); + set_top_left(frame_->top_left().add(rect.top_left())); + mutable_updated_region()->IntersectWith(rect); + mutable_updated_region()->Translate(-rect.left(), -rect.top()); +} + +} // namespace webrtc diff --git a/modules/desktop_capture/cropped_desktop_frame.h b/modules/desktop_capture/cropped_desktop_frame.h new file mode 100644 index 0000000..bbc3c86 --- /dev/null +++ b/modules/desktop_capture/cropped_desktop_frame.h @@ -0,0 +1,33 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_CROPPED_DESKTOP_FRAME_H_ +#define MODULES_DESKTOP_CAPTURE_CROPPED_DESKTOP_FRAME_H_ + +#include + +#include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/desktop_geometry.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// Creates a DesktopFrame to contain only the area of |rect| in the original +// |frame|. +// |frame| should not be nullptr. |rect| is in |frame| coordinate, i.e. +// |frame|->top_left() does not impact the area of |rect|. +// Returns nullptr frame if |rect| is not contained by the bounds of |frame|. +std::unique_ptr RTC_EXPORT +CreateCroppedDesktopFrame(std::unique_ptr frame, + const DesktopRect& rect); + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_CROPPED_DESKTOP_FRAME_H_ diff --git a/modules/desktop_capture/cropped_desktop_frame_unittest.cc b/modules/desktop_capture/cropped_desktop_frame_unittest.cc new file mode 100644 index 0000000..c4ccb87 --- /dev/null +++ b/modules/desktop_capture/cropped_desktop_frame_unittest.cc @@ -0,0 +1,110 @@ +/* + * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/cropped_desktop_frame.h" + +#include +#include + +#include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/shared_desktop_frame.h" +#include "test/gtest.h" + +namespace webrtc { + +std::unique_ptr CreateTestFrame() { + return std::make_unique(DesktopSize(10, 20)); +} + +TEST(CroppedDesktopFrameTest, DoNotCreateWrapperIfSizeIsNotChanged) { + std::unique_ptr original = CreateTestFrame(); + // owned by |original| and CroppedDesktopFrame. + DesktopFrame* raw_original = original.get(); + std::unique_ptr cropped = CreateCroppedDesktopFrame( + std::move(original), DesktopRect::MakeWH(10, 20)); + ASSERT_EQ(cropped.get(), raw_original); +} + +TEST(CroppedDesktopFrameTest, ReturnNullptrIfSizeIsNotSufficient) { + ASSERT_EQ(nullptr, CreateCroppedDesktopFrame(CreateTestFrame(), + DesktopRect::MakeWH(11, 10))); +} + +TEST(CroppedDesktopFrameTest, ReturnNullIfCropRegionIsOutOfBounds) { + std::unique_ptr frame = CreateTestFrame(); + frame->set_top_left(DesktopVector(100, 200)); + ASSERT_EQ(nullptr, + CreateCroppedDesktopFrame( + std::move(frame), DesktopRect::MakeLTRB(101, 203, 109, 218))); +} + +TEST(CroppedDesktopFrameTest, CropASubArea) { + std::unique_ptr cropped = CreateCroppedDesktopFrame( + CreateTestFrame(), DesktopRect::MakeLTRB(1, 2, 9, 19)); + ASSERT_EQ(cropped->size().width(), 8); + ASSERT_EQ(cropped->size().height(), 17); + ASSERT_EQ(cropped->top_left().x(), 1); + ASSERT_EQ(cropped->top_left().y(), 2); +} + +TEST(CroppedDesktopFrameTest, SetTopLeft) { + std::unique_ptr frame = CreateTestFrame(); + frame->set_top_left(DesktopVector(100, 200)); + frame = CreateCroppedDesktopFrame(std::move(frame), + DesktopRect::MakeLTRB(1, 3, 9, 18)); + ASSERT_EQ(frame->size().width(), 8); + ASSERT_EQ(frame->size().height(), 15); + ASSERT_EQ(frame->top_left().x(), 101); + ASSERT_EQ(frame->top_left().y(), 203); +} + +TEST(CroppedDesktopFrameTest, InitializedWithZeros) { + std::unique_ptr frame = CreateTestFrame(); + const DesktopVector frame_origin = frame->top_left(); + const DesktopSize frame_size = frame->size(); + std::unique_ptr cropped = CreateCroppedDesktopFrame( + std::move(frame), DesktopRect::MakeOriginSize(frame_origin, frame_size)); + for (int j = 0; j < cropped->size().height(); ++j) { + for (int i = 0; i < cropped->stride(); ++i) { + ASSERT_EQ(cropped->data()[i + j * cropped->stride()], 0); + } + } +} + +TEST(CroppedDesktopFrameTest, IccProfile) { + const uint8_t fake_icc_profile_data_array[] = {0x1a, 0x00, 0x2b, 0x00, + 0x3c, 0x00, 0x4d}; + const std::vector icc_profile( + fake_icc_profile_data_array, + fake_icc_profile_data_array + sizeof(fake_icc_profile_data_array)); + + std::unique_ptr frame = CreateTestFrame(); + EXPECT_EQ(frame->icc_profile().size(), 0UL); + + frame->set_icc_profile(icc_profile); + EXPECT_EQ(frame->icc_profile().size(), 7UL); + EXPECT_EQ(frame->icc_profile(), icc_profile); + + frame = CreateCroppedDesktopFrame(std::move(frame), + DesktopRect::MakeLTRB(2, 2, 8, 18)); + EXPECT_EQ(frame->icc_profile().size(), 7UL); + EXPECT_EQ(frame->icc_profile(), icc_profile); + + std::unique_ptr shared = + SharedDesktopFrame::Wrap(std::move(frame)); + EXPECT_EQ(shared->icc_profile().size(), 7UL); + EXPECT_EQ(shared->icc_profile(), icc_profile); + + std::unique_ptr shared_other = shared->Share(); + EXPECT_EQ(shared_other->icc_profile().size(), 7UL); + EXPECT_EQ(shared_other->icc_profile(), icc_profile); +} + +} // namespace webrtc diff --git a/modules/desktop_capture/cropping_window_capturer.cc b/modules/desktop_capture/cropping_window_capturer.cc new file mode 100644 index 0000000..bd1ba46 --- /dev/null +++ b/modules/desktop_capture/cropping_window_capturer.cc @@ -0,0 +1,128 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/cropping_window_capturer.h" + +#include + +#include + +#include "modules/desktop_capture/cropped_desktop_frame.h" +#include "rtc_base/logging.h" + +namespace webrtc { + +CroppingWindowCapturer::CroppingWindowCapturer( + const DesktopCaptureOptions& options) + : options_(options), + callback_(NULL), + window_capturer_(DesktopCapturer::CreateRawWindowCapturer(options)), + selected_window_(kNullWindowId), + excluded_window_(kNullWindowId) {} + +CroppingWindowCapturer::~CroppingWindowCapturer() {} + +void CroppingWindowCapturer::Start(DesktopCapturer::Callback* callback) { + callback_ = callback; + window_capturer_->Start(callback); +} + +void CroppingWindowCapturer::SetSharedMemoryFactory( + std::unique_ptr shared_memory_factory) { + window_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory)); +} + +void CroppingWindowCapturer::CaptureFrame() { + if (ShouldUseScreenCapturer()) { + if (!screen_capturer_.get()) { + screen_capturer_ = DesktopCapturer::CreateRawScreenCapturer(options_); + if (excluded_window_) { + screen_capturer_->SetExcludedWindow(excluded_window_); + } + screen_capturer_->Start(this); + } + screen_capturer_->CaptureFrame(); + } else { + window_capturer_->CaptureFrame(); + } +} + +void CroppingWindowCapturer::SetExcludedWindow(WindowId window) { + excluded_window_ = window; + if (screen_capturer_.get()) { + screen_capturer_->SetExcludedWindow(window); + } +} + +bool CroppingWindowCapturer::GetSourceList(SourceList* sources) { + return window_capturer_->GetSourceList(sources); +} + +bool CroppingWindowCapturer::SelectSource(SourceId id) { + if (window_capturer_->SelectSource(id)) { + selected_window_ = id; + return true; + } + return false; +} + +bool CroppingWindowCapturer::FocusOnSelectedSource() { + return window_capturer_->FocusOnSelectedSource(); +} + +void CroppingWindowCapturer::OnCaptureResult( + DesktopCapturer::Result result, + std::unique_ptr screen_frame) { + if (!ShouldUseScreenCapturer()) { + RTC_LOG(LS_INFO) << "Window no longer on top when ScreenCapturer finishes"; + window_capturer_->CaptureFrame(); + return; + } + + if (result != Result::SUCCESS) { + RTC_LOG(LS_WARNING) << "ScreenCapturer failed to capture a frame"; + callback_->OnCaptureResult(result, nullptr); + return; + } + + DesktopRect window_rect = GetWindowRectInVirtualScreen(); + if (window_rect.is_empty()) { + RTC_LOG(LS_WARNING) << "Window rect is empty"; + callback_->OnCaptureResult(Result::ERROR_TEMPORARY, nullptr); + return; + } + + callback_->OnCaptureResult( + Result::SUCCESS, + CreateCroppedDesktopFrame(std::move(screen_frame), window_rect)); +} + +bool CroppingWindowCapturer::IsOccluded(const DesktopVector& pos) { + // Returns true if either capturer returns true. + if (window_capturer_->IsOccluded(pos)) { + return true; + } + if (screen_capturer_ != nullptr && screen_capturer_->IsOccluded(pos)) { + return true; + } + return false; +} + +#if !defined(WEBRTC_WIN) +// CroppingWindowCapturer is implemented only for windows. On other platforms +// the regular window capturer is used. +// static +std::unique_ptr CroppingWindowCapturer::CreateCapturer( + const DesktopCaptureOptions& options) { + return DesktopCapturer::CreateWindowCapturer(options); +} +#endif + +} // namespace webrtc diff --git a/modules/desktop_capture/cropping_window_capturer.h b/modules/desktop_capture/cropping_window_capturer.h new file mode 100644 index 0000000..272a196 --- /dev/null +++ b/modules/desktop_capture/cropping_window_capturer.h @@ -0,0 +1,84 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_CROPPING_WINDOW_CAPTURER_H_ +#define MODULES_DESKTOP_CAPTURE_CROPPING_WINDOW_CAPTURER_H_ + +#include + +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_capture_types.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/desktop_geometry.h" +#include "modules/desktop_capture/shared_memory.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// WindowCapturer implementation that uses a screen capturer to capture the +// whole screen and crops the video frame to the window area when the captured +// window is on top. +class RTC_EXPORT CroppingWindowCapturer : public DesktopCapturer, + public DesktopCapturer::Callback { + public: + static std::unique_ptr CreateCapturer( + const DesktopCaptureOptions& options); + + ~CroppingWindowCapturer() override; + + // DesktopCapturer implementation. + void Start(DesktopCapturer::Callback* callback) override; + void SetSharedMemoryFactory( + std::unique_ptr shared_memory_factory) override; + void CaptureFrame() override; + void SetExcludedWindow(WindowId window) override; + bool GetSourceList(SourceList* sources) override; + bool SelectSource(SourceId id) override; + bool FocusOnSelectedSource() override; + bool IsOccluded(const DesktopVector& pos) override; + + // DesktopCapturer::Callback implementation, passed to |screen_capturer_| to + // intercept the capture result. + void OnCaptureResult(DesktopCapturer::Result result, + std::unique_ptr frame) override; + + protected: + explicit CroppingWindowCapturer(const DesktopCaptureOptions& options); + + // The platform implementation should override these methods. + + // Returns true if it is OK to capture the whole screen and crop to the + // selected window, i.e. the selected window is opaque, rectangular, and not + // occluded. + virtual bool ShouldUseScreenCapturer() = 0; + + // Returns the window area relative to the top left of the virtual screen + // within the bounds of the virtual screen. This function should return the + // DesktopRect in full desktop coordinates, i.e. the top-left monitor starts + // from (0, 0). + virtual DesktopRect GetWindowRectInVirtualScreen() = 0; + + WindowId selected_window() const { return selected_window_; } + WindowId excluded_window() const { return excluded_window_; } + DesktopCapturer* window_capturer() const { return window_capturer_.get(); } + + private: + DesktopCaptureOptions options_; + DesktopCapturer::Callback* callback_; + std::unique_ptr window_capturer_; + std::unique_ptr screen_capturer_; + SourceId selected_window_; + WindowId excluded_window_; +}; + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_CROPPING_WINDOW_CAPTURER_H_ diff --git a/modules/desktop_capture/cropping_window_capturer_win.cc b/modules/desktop_capture/cropping_window_capturer_win.cc new file mode 100644 index 0000000..6e53ca3 --- /dev/null +++ b/modules/desktop_capture/cropping_window_capturer_win.cc @@ -0,0 +1,300 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/cropping_window_capturer.h" +#include "modules/desktop_capture/desktop_capturer_differ_wrapper.h" +#include "modules/desktop_capture/win/screen_capture_utils.h" +#include "modules/desktop_capture/win/selected_window_context.h" +#include "modules/desktop_capture/win/window_capture_utils.h" +#include "rtc_base/logging.h" +#include "rtc_base/trace_event.h" +#include "rtc_base/win32.h" + +namespace webrtc { + +namespace { + +// Used to pass input data for verifying the selected window is on top. +struct TopWindowVerifierContext : public SelectedWindowContext { + TopWindowVerifierContext(HWND selected_window, + HWND excluded_window, + DesktopRect selected_window_rect, + WindowCaptureHelperWin* window_capture_helper) + : SelectedWindowContext(selected_window, + selected_window_rect, + window_capture_helper), + excluded_window(excluded_window) { + RTC_DCHECK_NE(selected_window, excluded_window); + } + + // Determines whether the selected window is on top (not occluded by any + // windows except for those it owns or any excluded window). + bool IsTopWindow() { + if (!IsSelectedWindowValid()) { + return false; + } + + // Enumerate all top-level windows above the selected window in Z-order, + // checking whether any overlaps it. This uses FindWindowEx rather than + // EnumWindows because the latter excludes certain system windows (e.g. the + // Start menu & other taskbar menus) that should be detected here to avoid + // inadvertent capture. + int num_retries = 0; + while (true) { + HWND hwnd = nullptr; + while ((hwnd = FindWindowEx(nullptr, hwnd, nullptr, nullptr))) { + if (hwnd == selected_window()) { + // Windows are enumerated in top-down Z-order, so we can stop + // enumerating upon reaching the selected window & report it's on top. + return true; + } + + // Ignore the excluded window. + if (hwnd == excluded_window) { + continue; + } + + // Ignore windows that aren't visible on the current desktop. + if (!window_capture_helper()->IsWindowVisibleOnCurrentDesktop(hwnd)) { + continue; + } + + // Ignore Chrome notification windows, especially the notification for + // the ongoing window sharing. Notes: + // - This only works with notifications from Chrome, not other Apps. + // - All notifications from Chrome will be ignored. + // - This may cause part or whole of notification window being cropped + // into the capturing of the target window if there is overlapping. + if (window_capture_helper()->IsWindowChromeNotification(hwnd)) { + continue; + } + + // Ignore windows owned by the selected window since we want to capture + // them. + if (IsWindowOwnedBySelectedWindow(hwnd)) { + continue; + } + + // Check whether this window intersects with the selected window. + if (IsWindowOverlappingSelectedWindow(hwnd)) { + // If intersection is not empty, the selected window is not on top. + return false; + } + } + + DWORD lastError = GetLastError(); + if (lastError == ERROR_SUCCESS) { + // The enumeration completed successfully without finding the selected + // window (which may have been closed). + RTC_LOG(LS_WARNING) << "Failed to find selected window (only expected " + "if it was closed)"; + RTC_DCHECK(!IsWindow(selected_window())); + return false; + } else if (lastError == ERROR_INVALID_WINDOW_HANDLE) { + // This error may occur if a window is closed around the time it's + // enumerated; retry the enumeration in this case up to 10 times + // (this should be a rare race & unlikely to recur). + if (++num_retries <= 10) { + RTC_LOG(LS_WARNING) << "Enumeration failed due to race with a window " + "closing; retrying - retry #" + << num_retries; + continue; + } else { + RTC_LOG(LS_ERROR) + << "Exhausted retry allowance around window enumeration failures " + "due to races with windows closing"; + } + } + + // The enumeration failed with an unexpected error (or more repeats of + // an infrequently-expected error than anticipated). After logging this & + // firing an assert when enabled, report that the selected window isn't + // topmost to avoid inadvertent capture of other windows. + RTC_LOG(LS_ERROR) << "Failed to enumerate windows: " << lastError; + RTC_DCHECK(false); + return false; + } + } + + const HWND excluded_window; +}; + +class CroppingWindowCapturerWin : public CroppingWindowCapturer { + public: + explicit CroppingWindowCapturerWin(const DesktopCaptureOptions& options) + : CroppingWindowCapturer(options), + full_screen_window_detector_(options.full_screen_window_detector()) {} + + void CaptureFrame() override; + + private: + bool ShouldUseScreenCapturer() override; + DesktopRect GetWindowRectInVirtualScreen() override; + + // Returns either selected by user sourceId or sourceId provided by + // FullScreenWindowDetector + WindowId GetWindowToCapture() const; + + // The region from GetWindowRgn in the desktop coordinate if the region is + // rectangular, or the rect from GetWindowRect if the region is not set. + DesktopRect window_region_rect_; + + WindowCaptureHelperWin window_capture_helper_; + + rtc::scoped_refptr full_screen_window_detector_; +}; + +void CroppingWindowCapturerWin::CaptureFrame() { + DesktopCapturer* win_capturer = window_capturer(); + if (win_capturer) { + // Update the list of available sources and override source to capture if + // FullScreenWindowDetector returns not zero + if (full_screen_window_detector_) { + full_screen_window_detector_->UpdateWindowListIfNeeded( + selected_window(), + [win_capturer](DesktopCapturer::SourceList* sources) { + return win_capturer->GetSourceList(sources); + }); + } + win_capturer->SelectSource(GetWindowToCapture()); + } + + CroppingWindowCapturer::CaptureFrame(); +} + +bool CroppingWindowCapturerWin::ShouldUseScreenCapturer() { + if (!rtc::IsWindows8OrLater() && window_capture_helper_.IsAeroEnabled()) { + return false; + } + + const HWND selected = reinterpret_cast(GetWindowToCapture()); + // Check if the window is visible on current desktop. + if (!window_capture_helper_.IsWindowVisibleOnCurrentDesktop(selected)) { + return false; + } + + // Check if the window is a translucent layered window. + const LONG window_ex_style = GetWindowLong(selected, GWL_EXSTYLE); + if (window_ex_style & WS_EX_LAYERED) { + COLORREF color_ref_key = 0; + BYTE alpha = 0; + DWORD flags = 0; + + // GetLayeredWindowAttributes fails if the window was setup with + // UpdateLayeredWindow. We have no way to know the opacity of the window in + // that case. This happens for Stiky Note (crbug/412726). + if (!GetLayeredWindowAttributes(selected, &color_ref_key, &alpha, &flags)) + return false; + + // UpdateLayeredWindow is the only way to set per-pixel alpha and will cause + // the previous GetLayeredWindowAttributes to fail. So we only need to check + // the window wide color key or alpha. + if ((flags & LWA_COLORKEY) || ((flags & LWA_ALPHA) && (alpha < 255))) { + return false; + } + } + + if (!GetWindowRect(selected, &window_region_rect_)) { + return false; + } + + DesktopRect content_rect; + if (!GetWindowContentRect(selected, &content_rect)) { + return false; + } + + DesktopRect region_rect; + // Get the window region and check if it is rectangular. + const int region_type = + GetWindowRegionTypeWithBoundary(selected, ®ion_rect); + + // Do not use the screen capturer if the region is empty or not rectangular. + if (region_type == COMPLEXREGION || region_type == NULLREGION) { + return false; + } + + if (region_type == SIMPLEREGION) { + // The |region_rect| returned from GetRgnBox() is always in window + // coordinate. + region_rect.Translate(window_region_rect_.left(), + window_region_rect_.top()); + // MSDN: The window region determines the area *within* the window where the + // system permits drawing. + // https://msdn.microsoft.com/en-us/library/windows/desktop/dd144950(v=vs.85).aspx. + // + // |region_rect| should always be inside of |window_region_rect_|. So after + // the intersection, |window_region_rect_| == |region_rect|. If so, what's + // the point of the intersecting operations? Why cannot we directly retrieve + // |window_region_rect_| from GetWindowRegionTypeWithBoundary() function? + // TODO(zijiehe): Figure out the purpose of these intersections. + window_region_rect_.IntersectWith(region_rect); + content_rect.IntersectWith(region_rect); + } + + // Check if the client area is out of the screen area. When the window is + // maximized, only its client area is visible in the screen, the border will + // be hidden. So we are using |content_rect| here. + if (!GetFullscreenRect().ContainsRect(content_rect)) { + return false; + } + + // Check if the window is occluded by any other window, excluding the child + // windows, context menus, and |excluded_window_|. + // |content_rect| is preferred, see the comments on + // IsWindowIntersectWithSelectedWindow(). + TopWindowVerifierContext context(selected, + reinterpret_cast(excluded_window()), + content_rect, &window_capture_helper_); + return context.IsTopWindow(); +} + +DesktopRect CroppingWindowCapturerWin::GetWindowRectInVirtualScreen() { + TRACE_EVENT0("webrtc", + "CroppingWindowCapturerWin::GetWindowRectInVirtualScreen"); + DesktopRect window_rect; + HWND hwnd = reinterpret_cast(GetWindowToCapture()); + if (!GetCroppedWindowRect(hwnd, /*avoid_cropping_border*/ false, &window_rect, + /*original_rect*/ nullptr)) { + RTC_LOG(LS_WARNING) << "Failed to get window info: " << GetLastError(); + return window_rect; + } + window_rect.IntersectWith(window_region_rect_); + + // Convert |window_rect| to be relative to the top-left of the virtual screen. + DesktopRect screen_rect(GetFullscreenRect()); + window_rect.IntersectWith(screen_rect); + window_rect.Translate(-screen_rect.left(), -screen_rect.top()); + return window_rect; +} + +WindowId CroppingWindowCapturerWin::GetWindowToCapture() const { + const auto selected_source = selected_window(); + const auto full_screen_source = + full_screen_window_detector_ + ? full_screen_window_detector_->FindFullScreenWindow(selected_source) + : 0; + return full_screen_source ? full_screen_source : selected_source; +} + +} // namespace + +// static +std::unique_ptr CroppingWindowCapturer::CreateCapturer( + const DesktopCaptureOptions& options) { + std::unique_ptr capturer( + new CroppingWindowCapturerWin(options)); + if (capturer && options.detect_updated_region()) { + capturer.reset(new DesktopCapturerDifferWrapper(std::move(capturer))); + } + + return capturer; +} + +} // namespace webrtc diff --git a/modules/desktop_capture/desktop_and_cursor_composer.cc b/modules/desktop_capture/desktop_and_cursor_composer.cc new file mode 100644 index 0000000..f282c1d --- /dev/null +++ b/modules/desktop_capture/desktop_and_cursor_composer.cc @@ -0,0 +1,247 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/desktop_and_cursor_composer.h" + +#include +#include + +#include +#include + +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/mouse_cursor.h" +#include "modules/desktop_capture/mouse_cursor_monitor.h" +#include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" + +namespace webrtc { + +namespace { + +// Helper function that blends one image into another. Source image must be +// pre-multiplied with the alpha channel. Destination is assumed to be opaque. +void AlphaBlend(uint8_t* dest, + int dest_stride, + const uint8_t* src, + int src_stride, + const DesktopSize& size) { + for (int y = 0; y < size.height(); ++y) { + for (int x = 0; x < size.width(); ++x) { + uint32_t base_alpha = 255 - src[x * DesktopFrame::kBytesPerPixel + 3]; + if (base_alpha == 255) { + continue; + } else if (base_alpha == 0) { + memcpy(dest + x * DesktopFrame::kBytesPerPixel, + src + x * DesktopFrame::kBytesPerPixel, + DesktopFrame::kBytesPerPixel); + } else { + dest[x * DesktopFrame::kBytesPerPixel] = + dest[x * DesktopFrame::kBytesPerPixel] * base_alpha / 255 + + src[x * DesktopFrame::kBytesPerPixel]; + dest[x * DesktopFrame::kBytesPerPixel + 1] = + dest[x * DesktopFrame::kBytesPerPixel + 1] * base_alpha / 255 + + src[x * DesktopFrame::kBytesPerPixel + 1]; + dest[x * DesktopFrame::kBytesPerPixel + 2] = + dest[x * DesktopFrame::kBytesPerPixel + 2] * base_alpha / 255 + + src[x * DesktopFrame::kBytesPerPixel + 2]; + } + } + src += src_stride; + dest += dest_stride; + } +} + +// DesktopFrame wrapper that draws mouse on a frame and restores original +// content before releasing the underlying frame. +class DesktopFrameWithCursor : public DesktopFrame { + public: + // Takes ownership of |frame|. + DesktopFrameWithCursor(std::unique_ptr frame, + const MouseCursor& cursor, + const DesktopVector& position, + const DesktopRect& previous_cursor_rect, + bool cursor_changed); + ~DesktopFrameWithCursor() override; + + DesktopRect cursor_rect() const { return cursor_rect_; } + + private: + const std::unique_ptr original_frame_; + + DesktopVector restore_position_; + std::unique_ptr restore_frame_; + DesktopRect cursor_rect_; + + RTC_DISALLOW_COPY_AND_ASSIGN(DesktopFrameWithCursor); +}; + +DesktopFrameWithCursor::DesktopFrameWithCursor( + std::unique_ptr frame, + const MouseCursor& cursor, + const DesktopVector& position, + const DesktopRect& previous_cursor_rect, + bool cursor_changed) + : DesktopFrame(frame->size(), + frame->stride(), + frame->data(), + frame->shared_memory()), + original_frame_(std::move(frame)) { + MoveFrameInfoFrom(original_frame_.get()); + + DesktopVector image_pos = position.subtract(cursor.hotspot()); + cursor_rect_ = DesktopRect::MakeSize(cursor.image()->size()); + cursor_rect_.Translate(image_pos); + DesktopVector cursor_origin = cursor_rect_.top_left(); + cursor_rect_.IntersectWith(DesktopRect::MakeSize(size())); + + if (!previous_cursor_rect.equals(cursor_rect_)) { + mutable_updated_region()->AddRect(cursor_rect_); + mutable_updated_region()->AddRect(previous_cursor_rect); + } else if (cursor_changed) { + mutable_updated_region()->AddRect(cursor_rect_); + } + + if (cursor_rect_.is_empty()) + return; + + // Copy original screen content under cursor to |restore_frame_|. + restore_position_ = cursor_rect_.top_left(); + restore_frame_.reset(new BasicDesktopFrame(cursor_rect_.size())); + restore_frame_->CopyPixelsFrom(*this, cursor_rect_.top_left(), + DesktopRect::MakeSize(restore_frame_->size())); + + // Blit the cursor. + uint8_t* cursor_rect_data = + reinterpret_cast(data()) + cursor_rect_.top() * stride() + + cursor_rect_.left() * DesktopFrame::kBytesPerPixel; + DesktopVector origin_shift = cursor_rect_.top_left().subtract(cursor_origin); + AlphaBlend(cursor_rect_data, stride(), + cursor.image()->data() + + origin_shift.y() * cursor.image()->stride() + + origin_shift.x() * DesktopFrame::kBytesPerPixel, + cursor.image()->stride(), cursor_rect_.size()); +} + +DesktopFrameWithCursor::~DesktopFrameWithCursor() { + // Restore original content of the frame. + if (restore_frame_) { + DesktopRect target_rect = DesktopRect::MakeSize(restore_frame_->size()); + target_rect.Translate(restore_position_); + CopyPixelsFrom(restore_frame_->data(), restore_frame_->stride(), + target_rect); + } +} + +} // namespace + +DesktopAndCursorComposer::DesktopAndCursorComposer( + std::unique_ptr desktop_capturer, + const DesktopCaptureOptions& options) + : DesktopAndCursorComposer(desktop_capturer.release(), + MouseCursorMonitor::Create(options).release()) {} + +DesktopAndCursorComposer::DesktopAndCursorComposer( + DesktopCapturer* desktop_capturer, + MouseCursorMonitor* mouse_monitor) + : desktop_capturer_(desktop_capturer), mouse_monitor_(mouse_monitor) { + RTC_DCHECK(desktop_capturer_); +} + +DesktopAndCursorComposer::~DesktopAndCursorComposer() = default; + +std::unique_ptr +DesktopAndCursorComposer::CreateWithoutMouseCursorMonitor( + std::unique_ptr desktop_capturer) { + return std::unique_ptr( + new DesktopAndCursorComposer(desktop_capturer.release(), nullptr)); +} + +void DesktopAndCursorComposer::Start(DesktopCapturer::Callback* callback) { + callback_ = callback; + if (mouse_monitor_) + mouse_monitor_->Init(this, MouseCursorMonitor::SHAPE_AND_POSITION); + desktop_capturer_->Start(this); +} + +void DesktopAndCursorComposer::SetSharedMemoryFactory( + std::unique_ptr shared_memory_factory) { + desktop_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory)); +} + +void DesktopAndCursorComposer::CaptureFrame() { + if (mouse_monitor_) + mouse_monitor_->Capture(); + desktop_capturer_->CaptureFrame(); +} + +void DesktopAndCursorComposer::SetExcludedWindow(WindowId window) { + desktop_capturer_->SetExcludedWindow(window); +} + +bool DesktopAndCursorComposer::GetSourceList(SourceList* sources) { + return desktop_capturer_->GetSourceList(sources); +} + +bool DesktopAndCursorComposer::SelectSource(SourceId id) { + return desktop_capturer_->SelectSource(id); +} + +bool DesktopAndCursorComposer::FocusOnSelectedSource() { + return desktop_capturer_->FocusOnSelectedSource(); +} + +bool DesktopAndCursorComposer::IsOccluded(const DesktopVector& pos) { + return desktop_capturer_->IsOccluded(pos); +} + +void DesktopAndCursorComposer::OnCaptureResult( + DesktopCapturer::Result result, + std::unique_ptr frame) { + if (frame && cursor_) { + if (frame->rect().Contains(cursor_position_) && + !desktop_capturer_->IsOccluded(cursor_position_)) { + DesktopVector relative_position = + cursor_position_.subtract(frame->top_left()); +#if defined(WEBRTC_MAC) + // On OSX, the logical(DIP) and physical coordinates are used mixingly. + // For example, the captured cursor has its size in physical pixels(2x) + // and location in logical(DIP) pixels on Retina monitor. This will cause + // problem when the desktop is mixed with Retina and non-Retina monitors. + // So we use DIP pixel for all location info and compensate with the scale + // factor of current frame to the |relative_position|. + const float scale = frame->scale_factor(); + relative_position.set(relative_position.x() * scale, + relative_position.y() * scale); +#endif + auto frame_with_cursor = std::make_unique( + std::move(frame), *cursor_, relative_position, previous_cursor_rect_, + cursor_changed_); + previous_cursor_rect_ = frame_with_cursor->cursor_rect(); + cursor_changed_ = false; + frame = std::move(frame_with_cursor); + } + } + + callback_->OnCaptureResult(result, std::move(frame)); +} + +void DesktopAndCursorComposer::OnMouseCursor(MouseCursor* cursor) { + cursor_changed_ = true; + cursor_.reset(cursor); +} + +void DesktopAndCursorComposer::OnMouseCursorPosition( + const DesktopVector& position) { + cursor_position_ = position; +} + +} // namespace webrtc diff --git a/modules/desktop_capture/desktop_and_cursor_composer.h b/modules/desktop_capture/desktop_and_cursor_composer.h new file mode 100644 index 0000000..8f95721 --- /dev/null +++ b/modules/desktop_capture/desktop_and_cursor_composer.h @@ -0,0 +1,93 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_AND_CURSOR_COMPOSER_H_ +#define MODULES_DESKTOP_CAPTURE_DESKTOP_AND_CURSOR_COMPOSER_H_ + +#include + +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_capture_types.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/desktop_geometry.h" +#include "modules/desktop_capture/mouse_cursor.h" +#include "modules/desktop_capture/mouse_cursor_monitor.h" +#include "modules/desktop_capture/shared_memory.h" +#include "rtc_base/constructor_magic.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// A wrapper for DesktopCapturer that also captures mouse using specified +// MouseCursorMonitor and renders it on the generated streams. +class RTC_EXPORT DesktopAndCursorComposer + : public DesktopCapturer, + public DesktopCapturer::Callback, + public MouseCursorMonitor::Callback { + public: + // Creates a new composer that captures mouse cursor using + // MouseCursorMonitor::Create(options) and renders it into the frames + // generated by |desktop_capturer|. + DesktopAndCursorComposer(std::unique_ptr desktop_capturer, + const DesktopCaptureOptions& options); + + ~DesktopAndCursorComposer() override; + + // Creates a new composer that relies on an external source for cursor shape + // and position information via the MouseCursorMonitor::Callback interface. + static std::unique_ptr + CreateWithoutMouseCursorMonitor( + std::unique_ptr desktop_capturer); + + // DesktopCapturer interface. + void Start(DesktopCapturer::Callback* callback) override; + void SetSharedMemoryFactory( + std::unique_ptr shared_memory_factory) override; + void CaptureFrame() override; + void SetExcludedWindow(WindowId window) override; + bool GetSourceList(SourceList* sources) override; + bool SelectSource(SourceId id) override; + bool FocusOnSelectedSource() override; + bool IsOccluded(const DesktopVector& pos) override; + + // MouseCursorMonitor::Callback interface. + void OnMouseCursor(MouseCursor* cursor) override; + void OnMouseCursorPosition(const DesktopVector& position) override; + + private: + // Allows test cases to use a fake MouseCursorMonitor implementation. + friend class DesktopAndCursorComposerTest; + + // Constructor to delegate both deprecated and new constructors and allows + // test cases to use a fake MouseCursorMonitor implementation. + DesktopAndCursorComposer(DesktopCapturer* desktop_capturer, + MouseCursorMonitor* mouse_monitor); + + // DesktopCapturer::Callback interface. + void OnCaptureResult(DesktopCapturer::Result result, + std::unique_ptr frame) override; + + const std::unique_ptr desktop_capturer_; + const std::unique_ptr mouse_monitor_; + + DesktopCapturer::Callback* callback_; + + std::unique_ptr cursor_; + DesktopVector cursor_position_; + DesktopRect previous_cursor_rect_; + bool cursor_changed_ = false; + + RTC_DISALLOW_COPY_AND_ASSIGN(DesktopAndCursorComposer); +}; + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_AND_CURSOR_COMPOSER_H_ diff --git a/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc b/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc new file mode 100644 index 0000000..c9cb56d --- /dev/null +++ b/modules/desktop_capture/desktop_and_cursor_composer_unittest.cc @@ -0,0 +1,413 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/desktop_and_cursor_composer.h" + +#include +#include + +#include +#include + +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/mouse_cursor.h" +#include "modules/desktop_capture/shared_desktop_frame.h" +#include "rtc_base/arraysize.h" +#include "test/gtest.h" + +namespace webrtc { + +namespace { + +const int kScreenWidth = 100; +const int kScreenHeight = 100; +const int kCursorWidth = 10; +const int kCursorHeight = 10; + +const int kTestCursorSize = 3; +const uint32_t kTestCursorData[kTestCursorSize][kTestCursorSize] = { + { + 0xffffffff, + 0x99990000, + 0xaa222222, + }, + { + 0x88008800, + 0xaa0000aa, + 0xaa333333, + }, + { + 0x00000000, + 0xaa0000aa, + 0xaa333333, + }, +}; + +uint32_t GetFakeFramePixelValue(const DesktopVector& p) { + uint32_t r = 100 + p.x(); + uint32_t g = 100 + p.y(); + uint32_t b = 100 + p.x() + p.y(); + return b + (g << 8) + (r << 16) + 0xff000000; +} + +uint32_t GetFramePixel(const DesktopFrame& frame, const DesktopVector& pos) { + return *reinterpret_cast(frame.GetFrameDataAtPos(pos)); +} + +// Blends two pixel values taking into account alpha. +uint32_t BlendPixels(uint32_t dest, uint32_t src) { + uint8_t alpha = 255 - ((src & 0xff000000) >> 24); + uint32_t r = + ((dest & 0x00ff0000) >> 16) * alpha / 255 + ((src & 0x00ff0000) >> 16); + uint32_t g = + ((dest & 0x0000ff00) >> 8) * alpha / 255 + ((src & 0x0000ff00) >> 8); + uint32_t b = (dest & 0x000000ff) * alpha / 255 + (src & 0x000000ff); + return b + (g << 8) + (r << 16) + 0xff000000; +} + +DesktopFrame* CreateTestFrame() { + DesktopFrame* frame = + new BasicDesktopFrame(DesktopSize(kScreenWidth, kScreenHeight)); + uint32_t* data = reinterpret_cast(frame->data()); + for (int y = 0; y < kScreenHeight; ++y) { + for (int x = 0; x < kScreenWidth; ++x) { + *(data++) = GetFakeFramePixelValue(DesktopVector(x, y)); + } + } + return frame; +} + +MouseCursor* CreateTestCursor(DesktopVector hotspot) { + std::unique_ptr image( + new BasicDesktopFrame(DesktopSize(kCursorWidth, kCursorHeight))); + uint32_t* data = reinterpret_cast(image->data()); + // Set four pixels near the hotspot and leave all other blank. + for (int y = 0; y < kTestCursorSize; ++y) { + for (int x = 0; x < kTestCursorSize; ++x) { + data[(hotspot.y() + y) * kCursorWidth + (hotspot.x() + x)] = + kTestCursorData[y][x]; + } + } + return new MouseCursor(image.release(), hotspot); +} + +class FakeScreenCapturer : public DesktopCapturer { + public: + FakeScreenCapturer() {} + + void Start(Callback* callback) override { callback_ = callback; } + + void CaptureFrame() override { + callback_->OnCaptureResult( + next_frame_ ? Result::SUCCESS : Result::ERROR_TEMPORARY, + std::move(next_frame_)); + } + + void SetNextFrame(std::unique_ptr next_frame) { + next_frame_ = std::move(next_frame); + } + + bool IsOccluded(const DesktopVector& pos) override { return is_occluded_; } + + void set_is_occluded(bool value) { is_occluded_ = value; } + + private: + Callback* callback_ = nullptr; + + std::unique_ptr next_frame_; + bool is_occluded_ = false; +}; + +class FakeMouseMonitor : public MouseCursorMonitor { + public: + FakeMouseMonitor() : changed_(true) {} + + void SetState(CursorState state, const DesktopVector& pos) { + state_ = state; + position_ = pos; + } + + void SetHotspot(const DesktopVector& hotspot) { + if (!hotspot_.equals(hotspot)) + changed_ = true; + hotspot_ = hotspot; + } + + void Init(Callback* callback, Mode mode) override { callback_ = callback; } + + void Capture() override { + if (changed_) { + callback_->OnMouseCursor(CreateTestCursor(hotspot_)); + } + callback_->OnMouseCursorPosition(position_); + } + + private: + Callback* callback_; + CursorState state_; + DesktopVector position_; + DesktopVector hotspot_; + bool changed_; +}; + +void VerifyFrame(const DesktopFrame& frame, + MouseCursorMonitor::CursorState state, + const DesktopVector& pos) { + // Verify that all other pixels are set to their original values. + DesktopRect image_rect = + DesktopRect::MakeWH(kTestCursorSize, kTestCursorSize); + image_rect.Translate(pos); + + for (int y = 0; y < kScreenHeight; ++y) { + for (int x = 0; x < kScreenWidth; ++x) { + DesktopVector p(x, y); + if (state == MouseCursorMonitor::INSIDE && image_rect.Contains(p)) { + EXPECT_EQ(BlendPixels(GetFakeFramePixelValue(p), + kTestCursorData[y - pos.y()][x - pos.x()]), + GetFramePixel(frame, p)); + } else { + EXPECT_EQ(GetFakeFramePixelValue(p), GetFramePixel(frame, p)); + } + } + } +} + +} // namespace + +class DesktopAndCursorComposerTest : public ::testing::Test, + public DesktopCapturer::Callback { + public: + DesktopAndCursorComposerTest(bool include_cursor = true) + : fake_screen_(new FakeScreenCapturer()), + fake_cursor_(include_cursor ? new FakeMouseMonitor() : nullptr), + blender_(fake_screen_, fake_cursor_) { + blender_.Start(this); + } + + // DesktopCapturer::Callback interface + void OnCaptureResult(DesktopCapturer::Result result, + std::unique_ptr frame) override { + frame_ = std::move(frame); + } + + protected: + // Owned by |blender_|. + FakeScreenCapturer* fake_screen_; + FakeMouseMonitor* fake_cursor_; + + DesktopAndCursorComposer blender_; + std::unique_ptr frame_; +}; + +class DesktopAndCursorComposerNoCursorMonitorTest + : public DesktopAndCursorComposerTest { + public: + DesktopAndCursorComposerNoCursorMonitorTest() + : DesktopAndCursorComposerTest(false) {} +}; + +TEST_F(DesktopAndCursorComposerTest, CursorShouldBeIgnoredIfNoFrameCaptured) { + struct { + int x, y; + int hotspot_x, hotspot_y; + bool inside; + } tests[] = { + {0, 0, 0, 0, true}, {50, 50, 0, 0, true}, {100, 50, 0, 0, true}, + {50, 100, 0, 0, true}, {100, 100, 0, 0, true}, {0, 0, 2, 5, true}, + {1, 1, 2, 5, true}, {50, 50, 2, 5, true}, {100, 100, 2, 5, true}, + {0, 0, 5, 2, true}, {50, 50, 5, 2, true}, {100, 100, 5, 2, true}, + {0, 0, 0, 0, false}, + }; + + for (size_t i = 0; i < arraysize(tests); i++) { + SCOPED_TRACE(i); + + DesktopVector hotspot(tests[i].hotspot_x, tests[i].hotspot_y); + fake_cursor_->SetHotspot(hotspot); + + MouseCursorMonitor::CursorState state = tests[i].inside + ? MouseCursorMonitor::INSIDE + : MouseCursorMonitor::OUTSIDE; + DesktopVector pos(tests[i].x, tests[i].y); + fake_cursor_->SetState(state, pos); + + std::unique_ptr frame( + SharedDesktopFrame::Wrap(CreateTestFrame())); + + blender_.CaptureFrame(); + // If capturer captured nothing, then cursor should be ignored, not matter + // its state or position. + EXPECT_EQ(frame_, nullptr); + } +} + +TEST_F(DesktopAndCursorComposerTest, + CursorShouldBeIgnoredIfItIsOutOfDesktopFrame) { + std::unique_ptr frame( + SharedDesktopFrame::Wrap(CreateTestFrame())); + frame->set_top_left(DesktopVector(100, 200)); + // The frame covers (100, 200) - (200, 300). + + struct { + int x; + int y; + } tests[] = { + {0, 0}, {50, 50}, {50, 150}, {100, 150}, {50, 200}, + {99, 200}, {100, 199}, {200, 300}, {200, 299}, {199, 300}, + {-1, -1}, {-10000, -10000}, {10000, 10000}, + }; + for (size_t i = 0; i < arraysize(tests); i++) { + SCOPED_TRACE(i); + + fake_screen_->SetNextFrame(frame->Share()); + // The CursorState is ignored when using absolute cursor position. + fake_cursor_->SetState(MouseCursorMonitor::OUTSIDE, + DesktopVector(tests[i].x, tests[i].y)); + blender_.CaptureFrame(); + VerifyFrame(*frame_, MouseCursorMonitor::OUTSIDE, DesktopVector(0, 0)); + } +} + +TEST_F(DesktopAndCursorComposerTest, IsOccludedShouldBeConsidered) { + std::unique_ptr frame( + SharedDesktopFrame::Wrap(CreateTestFrame())); + frame->set_top_left(DesktopVector(100, 200)); + // The frame covers (100, 200) - (200, 300). + + struct { + int x; + int y; + } tests[] = { + {100, 200}, {101, 200}, {100, 201}, {101, 201}, {150, 250}, {199, 299}, + }; + fake_screen_->set_is_occluded(true); + for (size_t i = 0; i < arraysize(tests); i++) { + SCOPED_TRACE(i); + + fake_screen_->SetNextFrame(frame->Share()); + // The CursorState is ignored when using absolute cursor position. + fake_cursor_->SetState(MouseCursorMonitor::OUTSIDE, + DesktopVector(tests[i].x, tests[i].y)); + blender_.CaptureFrame(); + VerifyFrame(*frame_, MouseCursorMonitor::OUTSIDE, DesktopVector()); + } +} + +TEST_F(DesktopAndCursorComposerTest, CursorIncluded) { + std::unique_ptr frame( + SharedDesktopFrame::Wrap(CreateTestFrame())); + frame->set_top_left(DesktopVector(100, 200)); + // The frame covers (100, 200) - (200, 300). + + struct { + int x; + int y; + } tests[] = { + {100, 200}, {101, 200}, {100, 201}, {101, 201}, {150, 250}, {199, 299}, + }; + for (size_t i = 0; i < arraysize(tests); i++) { + SCOPED_TRACE(i); + + const DesktopVector abs_pos(tests[i].x, tests[i].y); + const DesktopVector rel_pos(abs_pos.subtract(frame->top_left())); + + fake_screen_->SetNextFrame(frame->Share()); + // The CursorState is ignored when using absolute cursor position. + fake_cursor_->SetState(MouseCursorMonitor::OUTSIDE, abs_pos); + blender_.CaptureFrame(); + VerifyFrame(*frame_, MouseCursorMonitor::INSIDE, rel_pos); + + // Verify that the cursor is erased before the frame buffer is returned to + // the screen capturer. + frame_.reset(); + VerifyFrame(*frame, MouseCursorMonitor::OUTSIDE, DesktopVector()); + } +} + +TEST_F(DesktopAndCursorComposerNoCursorMonitorTest, + UpdatedRegionIncludesOldAndNewCursorRectsIfMoved) { + std::unique_ptr frame( + SharedDesktopFrame::Wrap(CreateTestFrame())); + DesktopRect first_cursor_rect; + { + // Block to scope test_cursor, which is invalidated by OnMouseCursor. + MouseCursor* test_cursor = CreateTestCursor(DesktopVector(0, 0)); + first_cursor_rect = DesktopRect::MakeSize(test_cursor->image()->size()); + blender_.OnMouseCursor(test_cursor); + } + blender_.OnMouseCursorPosition(DesktopVector(0, 0)); + fake_screen_->SetNextFrame(frame->Share()); + blender_.CaptureFrame(); + + DesktopVector cursor_move_offset(1, 1); + DesktopRect second_cursor_rect = first_cursor_rect; + second_cursor_rect.Translate(cursor_move_offset); + blender_.OnMouseCursorPosition(cursor_move_offset); + fake_screen_->SetNextFrame(frame->Share()); + blender_.CaptureFrame(); + + EXPECT_TRUE(frame->updated_region().is_empty()); + DesktopRegion expected_region; + expected_region.AddRect(first_cursor_rect); + expected_region.AddRect(second_cursor_rect); + EXPECT_TRUE(frame_->updated_region().Equals(expected_region)); +} + +TEST_F(DesktopAndCursorComposerNoCursorMonitorTest, + UpdatedRegionIncludesOldAndNewCursorRectsIfShapeChanged) { + std::unique_ptr frame( + SharedDesktopFrame::Wrap(CreateTestFrame())); + DesktopRect first_cursor_rect; + { + // Block to scope test_cursor, which is invalidated by OnMouseCursor. + MouseCursor* test_cursor = CreateTestCursor(DesktopVector(0, 0)); + first_cursor_rect = DesktopRect::MakeSize(test_cursor->image()->size()); + blender_.OnMouseCursor(test_cursor); + } + blender_.OnMouseCursorPosition(DesktopVector(0, 0)); + fake_screen_->SetNextFrame(frame->Share()); + blender_.CaptureFrame(); + + // Create a second cursor, the same shape as the first. Since the code doesn't + // compare the cursor pixels, this is sufficient, and avoids needing two test + // cursor bitmaps. + DesktopRect second_cursor_rect; + { + MouseCursor* test_cursor = CreateTestCursor(DesktopVector(0, 0)); + second_cursor_rect = DesktopRect::MakeSize(test_cursor->image()->size()); + blender_.OnMouseCursor(test_cursor); + } + fake_screen_->SetNextFrame(frame->Share()); + blender_.CaptureFrame(); + + EXPECT_TRUE(frame->updated_region().is_empty()); + DesktopRegion expected_region; + expected_region.AddRect(first_cursor_rect); + expected_region.AddRect(second_cursor_rect); + EXPECT_TRUE(frame_->updated_region().Equals(expected_region)); +} + +TEST_F(DesktopAndCursorComposerNoCursorMonitorTest, + UpdatedRegionUnchangedIfCursorUnchanged) { + std::unique_ptr frame( + SharedDesktopFrame::Wrap(CreateTestFrame())); + blender_.OnMouseCursor(CreateTestCursor(DesktopVector(0, 0))); + blender_.OnMouseCursorPosition(DesktopVector(0, 0)); + fake_screen_->SetNextFrame(frame->Share()); + blender_.CaptureFrame(); + fake_screen_->SetNextFrame(frame->Share()); + blender_.CaptureFrame(); + + EXPECT_TRUE(frame->updated_region().is_empty()); + EXPECT_TRUE(frame_->updated_region().is_empty()); +} + +} // namespace webrtc diff --git a/modules/desktop_capture/desktop_capture_options.cc b/modules/desktop_capture/desktop_capture_options.cc new file mode 100644 index 0000000..c89896d --- /dev/null +++ b/modules/desktop_capture/desktop_capture_options.cc @@ -0,0 +1,49 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/desktop_capture_options.h" +#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) +#include "modules/desktop_capture/mac/full_screen_mac_application_handler.h" +#elif defined(WEBRTC_WIN) +#include "modules/desktop_capture/win/full_screen_win_application_handler.h" +#endif + +namespace webrtc { + +DesktopCaptureOptions::DesktopCaptureOptions() {} +DesktopCaptureOptions::DesktopCaptureOptions( + const DesktopCaptureOptions& options) = default; +DesktopCaptureOptions::DesktopCaptureOptions(DesktopCaptureOptions&& options) = + default; +DesktopCaptureOptions::~DesktopCaptureOptions() {} + +DesktopCaptureOptions& DesktopCaptureOptions::operator=( + const DesktopCaptureOptions& options) = default; +DesktopCaptureOptions& DesktopCaptureOptions::operator=( + DesktopCaptureOptions&& options) = default; + +// static +DesktopCaptureOptions DesktopCaptureOptions::CreateDefault() { + DesktopCaptureOptions result; +#if defined(WEBRTC_USE_X11) + result.set_x_display(SharedXDisplay::CreateDefault()); +#endif +#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) + result.set_configuration_monitor(new DesktopConfigurationMonitor()); + result.set_full_screen_window_detector( + new FullScreenWindowDetector(CreateFullScreenMacApplicationHandler)); +#elif defined(WEBRTC_WIN) + result.set_full_screen_window_detector( + new FullScreenWindowDetector(CreateFullScreenWinApplicationHandler)); +#endif + return result; +} + +} // namespace webrtc diff --git a/modules/desktop_capture/desktop_capture_options.h b/modules/desktop_capture/desktop_capture_options.h new file mode 100644 index 0000000..521c80b --- /dev/null +++ b/modules/desktop_capture/desktop_capture_options.h @@ -0,0 +1,167 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_ +#define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_ + +#include "api/scoped_refptr.h" +#include "rtc_base/system/rtc_export.h" + +#if defined(WEBRTC_USE_X11) +#include "modules/desktop_capture/linux/shared_x_display.h" +#endif + +#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) +#include "modules/desktop_capture/mac/desktop_configuration_monitor.h" +#endif + +#include "modules/desktop_capture/full_screen_window_detector.h" + +namespace webrtc { + +// An object that stores initialization parameters for screen and window +// capturers. +class RTC_EXPORT DesktopCaptureOptions { + public: + // Returns instance of DesktopCaptureOptions with default parameters. On Linux + // also initializes X window connection. x_display() will be set to null if + // X11 connection failed (e.g. DISPLAY isn't set). + static DesktopCaptureOptions CreateDefault(); + + DesktopCaptureOptions(); + DesktopCaptureOptions(const DesktopCaptureOptions& options); + DesktopCaptureOptions(DesktopCaptureOptions&& options); + ~DesktopCaptureOptions(); + + DesktopCaptureOptions& operator=(const DesktopCaptureOptions& options); + DesktopCaptureOptions& operator=(DesktopCaptureOptions&& options); + +#if defined(WEBRTC_USE_X11) + SharedXDisplay* x_display() const { return x_display_; } + void set_x_display(rtc::scoped_refptr x_display) { + x_display_ = x_display; + } +#endif + +#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) + // TODO(zijiehe): Remove both DesktopConfigurationMonitor and + // FullScreenChromeWindowDetector out of DesktopCaptureOptions. It's not + // reasonable for external consumers to set these two parameters. + DesktopConfigurationMonitor* configuration_monitor() const { + return configuration_monitor_; + } + // If nullptr is set, ScreenCapturer won't work and WindowCapturer may return + // inaccurate result from IsOccluded() function. + void set_configuration_monitor( + rtc::scoped_refptr m) { + configuration_monitor_ = m; + } + + bool allow_iosurface() const { return allow_iosurface_; } + void set_allow_iosurface(bool allow) { allow_iosurface_ = allow; } +#endif + + FullScreenWindowDetector* full_screen_window_detector() const { + return full_screen_window_detector_; + } + void set_full_screen_window_detector( + rtc::scoped_refptr detector) { + full_screen_window_detector_ = detector; + } + + // Flag indicating that the capturer should use screen change notifications. + // Enables/disables use of XDAMAGE in the X11 capturer. + bool use_update_notifications() const { return use_update_notifications_; } + void set_use_update_notifications(bool use_update_notifications) { + use_update_notifications_ = use_update_notifications; + } + + // Flag indicating if desktop effects (e.g. Aero) should be disabled when the + // capturer is active. Currently used only on Windows. + bool disable_effects() const { return disable_effects_; } + void set_disable_effects(bool disable_effects) { + disable_effects_ = disable_effects; + } + + // Flag that should be set if the consumer uses updated_region() and the + // capturer should try to provide correct updated_region() for the frames it + // generates (e.g. by comparing each frame with the previous one). + bool detect_updated_region() const { return detect_updated_region_; } + void set_detect_updated_region(bool detect_updated_region) { + detect_updated_region_ = detect_updated_region; + } + +#if defined(WEBRTC_WIN) + bool allow_use_magnification_api() const { + return allow_use_magnification_api_; + } + void set_allow_use_magnification_api(bool allow) { + allow_use_magnification_api_ = allow; + } + // Allowing directx based capturer or not, this capturer works on windows 7 + // with platform update / windows 8 or upper. + bool allow_directx_capturer() const { return allow_directx_capturer_; } + void set_allow_directx_capturer(bool enabled) { + allow_directx_capturer_ = enabled; + } + + // Flag that may be set to allow use of the cropping window capturer (which + // captures the screen & crops that to the window region in some cases). An + // advantage of using this is significantly higher capture frame rates than + // capturing the window directly. A disadvantage of using this is the + // possibility of capturing unrelated content (e.g. overlapping windows that + // aren't detected properly, or neighboring regions when moving/resizing the + // captured window). Note: this flag influences the behavior of calls to + // DesktopCapturer::CreateWindowCapturer; calls to + // CroppingWindowCapturer::CreateCapturer ignore the flag (treat it as true). + bool allow_cropping_window_capturer() const { + return allow_cropping_window_capturer_; + } + void set_allow_cropping_window_capturer(bool allow) { + allow_cropping_window_capturer_ = allow; + } +#endif + +#if defined(WEBRTC_USE_PIPEWIRE) + bool allow_pipewire() const { return allow_pipewire_; } + void set_allow_pipewire(bool allow) { allow_pipewire_ = allow; } +#endif + + private: +#if defined(WEBRTC_USE_X11) + rtc::scoped_refptr x_display_; +#endif + +#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) + rtc::scoped_refptr configuration_monitor_; + bool allow_iosurface_ = false; +#endif + + rtc::scoped_refptr full_screen_window_detector_; + +#if defined(WEBRTC_WIN) + bool allow_use_magnification_api_ = false; + bool allow_directx_capturer_ = false; + bool allow_cropping_window_capturer_ = false; +#endif +#if defined(WEBRTC_USE_X11) + bool use_update_notifications_ = false; +#else + bool use_update_notifications_ = true; +#endif + bool disable_effects_ = true; + bool detect_updated_region_ = false; +#if defined(WEBRTC_USE_PIPEWIRE) + bool allow_pipewire_ = false; +#endif +}; + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_OPTIONS_H_ diff --git a/modules/desktop_capture/desktop_capture_types.h b/modules/desktop_capture/desktop_capture_types.h new file mode 100644 index 0000000..5031cbf --- /dev/null +++ b/modules/desktop_capture/desktop_capture_types.h @@ -0,0 +1,54 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_ +#define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_ + +#include + +namespace webrtc { + +// Type used to identify windows on the desktop. Values are platform-specific: +// - On Windows: HWND cast to intptr_t. +// - On Linux (with X11): X11 Window (unsigned long) type cast to intptr_t. +// - On OSX: integer window number. +typedef intptr_t WindowId; + +const WindowId kNullWindowId = 0; + +// Type used to identify screens on the desktop. Values are platform-specific: +// - On Windows: integer display device index. +// - On OSX: CGDirectDisplayID cast to intptr_t. +// - On Linux (with X11): TBD. +// On Windows, ScreenId is implementation dependent: sending a ScreenId from one +// implementation to another usually won't work correctly. +typedef intptr_t ScreenId; + +// The screen id corresponds to all screen combined together. +const ScreenId kFullDesktopScreenId = -1; + +const ScreenId kInvalidScreenId = -2; + +// An integer to attach to each DesktopFrame to differentiate the generator of +// the frame. +namespace DesktopCapturerId { +constexpr uint32_t CreateFourCC(char a, char b, char c, char d) { + return ((static_cast(a)) | (static_cast(b) << 8) | + (static_cast(c) << 16) | (static_cast(d) << 24)); +} + +constexpr uint32_t kUnknown = 0; +constexpr uint32_t kScreenCapturerWinGdi = CreateFourCC('G', 'D', 'I', ' '); +constexpr uint32_t kScreenCapturerWinDirectx = CreateFourCC('D', 'X', 'G', 'I'); +} // namespace DesktopCapturerId + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURE_TYPES_H_ diff --git a/modules/desktop_capture/desktop_capturer.cc b/modules/desktop_capture/desktop_capturer.cc new file mode 100644 index 0000000..61926a6 --- /dev/null +++ b/modules/desktop_capture/desktop_capturer.cc @@ -0,0 +1,89 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/desktop_capturer.h" + +#include +#include + +#include +#include + +#include "modules/desktop_capture/cropping_window_capturer.h" +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_capturer_differ_wrapper.h" + +namespace webrtc { + +DesktopCapturer::~DesktopCapturer() = default; + +void DesktopCapturer::SetSharedMemoryFactory( + std::unique_ptr shared_memory_factory) {} + +void DesktopCapturer::SetExcludedWindow(WindowId window) {} + +bool DesktopCapturer::GetSourceList(SourceList* sources) { + return true; +} + +bool DesktopCapturer::SelectSource(SourceId id) { + return false; +} + +bool DesktopCapturer::FocusOnSelectedSource() { + return false; +} + +bool DesktopCapturer::IsOccluded(const DesktopVector& pos) { + return false; +} + +// static +std::unique_ptr DesktopCapturer::CreateWindowCapturer( + const DesktopCaptureOptions& options) { +#if defined(WEBRTC_WIN) + if (options.allow_cropping_window_capturer()) { + return CroppingWindowCapturer::CreateCapturer(options); + } +#endif // defined(WEBRTC_WIN) + + std::unique_ptr capturer = CreateRawWindowCapturer(options); + if (capturer && options.detect_updated_region()) { + capturer.reset(new DesktopCapturerDifferWrapper(std::move(capturer))); + } + + return capturer; +} + +// static +std::unique_ptr DesktopCapturer::CreateScreenCapturer( + const DesktopCaptureOptions& options) { + std::unique_ptr capturer = CreateRawScreenCapturer(options); + if (capturer && options.detect_updated_region()) { + capturer.reset(new DesktopCapturerDifferWrapper(std::move(capturer))); + } + + return capturer; +} + +#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) +bool DesktopCapturer::IsRunningUnderWayland() { + const char* xdg_session_type = getenv("XDG_SESSION_TYPE"); + if (!xdg_session_type || strncmp(xdg_session_type, "wayland", 7) != 0) + return false; + + if (!(getenv("WAYLAND_DISPLAY"))) + return false; + + return true; +} +#endif // defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) + +} // namespace webrtc diff --git a/modules/desktop_capture/desktop_capturer.h b/modules/desktop_capture/desktop_capturer.h new file mode 100644 index 0000000..64339c0 --- /dev/null +++ b/modules/desktop_capture/desktop_capturer.h @@ -0,0 +1,159 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_H_ +#define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_H_ + +#include +#include + +#include +#include +#include +#include + +#include "modules/desktop_capture/desktop_capture_types.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/shared_memory.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +class DesktopCaptureOptions; +class DesktopFrame; + +// Abstract interface for screen and window capturers. +class RTC_EXPORT DesktopCapturer { + public: + enum class Result { + // The frame was captured successfully. + SUCCESS, + + // There was a temporary error. The caller should continue calling + // CaptureFrame(), in the expectation that it will eventually recover. + ERROR_TEMPORARY, + + // Capture has failed and will keep failing if the caller tries calling + // CaptureFrame() again. + ERROR_PERMANENT, + + MAX_VALUE = ERROR_PERMANENT + }; + + // Interface that must be implemented by the DesktopCapturer consumers. + class Callback { + public: + // Called after a frame has been captured. |frame| is not nullptr if and + // only if |result| is SUCCESS. + virtual void OnCaptureResult(Result result, + std::unique_ptr frame) = 0; + + protected: + virtual ~Callback() {} + }; + + typedef intptr_t SourceId; + + static_assert(std::is_same::value, + "SourceId should be a same type as ScreenId."); + + struct Source { + // The unique id to represent a Source of current DesktopCapturer. + SourceId id; + + // Title of the window or screen in UTF-8 encoding, maybe empty. This field + // should not be used to identify a source. + std::string title; + }; + + typedef std::vector SourceList; + + virtual ~DesktopCapturer(); + + // Called at the beginning of a capturing session. |callback| must remain + // valid until capturer is destroyed. + virtual void Start(Callback* callback) = 0; + + // Sets SharedMemoryFactory that will be used to create buffers for the + // captured frames. The factory can be invoked on a thread other than the one + // where CaptureFrame() is called. It will be destroyed on the same thread. + // Shared memory is currently supported only by some DesktopCapturer + // implementations. + virtual void SetSharedMemoryFactory( + std::unique_ptr shared_memory_factory); + + // Captures next frame, and involve callback provided by Start() function. + // Pending capture requests are canceled when DesktopCapturer is deleted. + virtual void CaptureFrame() = 0; + + // Sets the window to be excluded from the captured image in the future + // Capture calls. Used to exclude the screenshare notification window for + // screen capturing. + virtual void SetExcludedWindow(WindowId window); + + // TODO(zijiehe): Following functions should be pure virtual. The default + // implementations are for backward compatibility only. Remove default + // implementations once all DesktopCapturer implementations in Chromium have + // implemented these functions. + + // Gets a list of sources current capturer supports. Returns false in case of + // a failure. + // For DesktopCapturer implementations to capture screens, this function + // should return monitors. + // For DesktopCapturer implementations to capture windows, this function + // should only return root windows owned by applications. + virtual bool GetSourceList(SourceList* sources); + + // Selects a source to be captured. Returns false in case of a failure (e.g. + // if there is no source with the specified type and id.) + virtual bool SelectSource(SourceId id); + + // Brings the selected source to the front and sets the input focus on it. + // Returns false in case of a failure or no source has been selected or the + // implementation does not support this functionality. + virtual bool FocusOnSelectedSource(); + + // Returns true if the |pos| on the selected source is covered by other + // elements on the display, and is not visible to the users. + // |pos| is in full desktop coordinates, i.e. the top-left monitor always + // starts from (0, 0). + // The return value if |pos| is out of the scope of the source is undefined. + virtual bool IsOccluded(const DesktopVector& pos); + + // Creates a DesktopCapturer instance which targets to capture windows. + static std::unique_ptr CreateWindowCapturer( + const DesktopCaptureOptions& options); + + // Creates a DesktopCapturer instance which targets to capture screens. + static std::unique_ptr CreateScreenCapturer( + const DesktopCaptureOptions& options); + +#if defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) + static bool IsRunningUnderWayland(); +#endif // defined(WEBRTC_USE_PIPEWIRE) || defined(WEBRTC_USE_X11) + + protected: + // CroppingWindowCapturer needs to create raw capturers without wrappers, so + // the following two functions are protected. + + // Creates a platform specific DesktopCapturer instance which targets to + // capture windows. + static std::unique_ptr CreateRawWindowCapturer( + const DesktopCaptureOptions& options); + + // Creates a platform specific DesktopCapturer instance which targets to + // capture screens. + static std::unique_ptr CreateRawScreenCapturer( + const DesktopCaptureOptions& options); +}; + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_H_ diff --git a/modules/desktop_capture/desktop_capturer_differ_wrapper.cc b/modules/desktop_capture/desktop_capturer_differ_wrapper.cc new file mode 100644 index 0000000..4e80f30 --- /dev/null +++ b/modules/desktop_capture/desktop_capturer_differ_wrapper.cc @@ -0,0 +1,226 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/desktop_capturer_differ_wrapper.h" + +#include +#include + +#include + +#include "modules/desktop_capture/desktop_geometry.h" +#include "modules/desktop_capture/desktop_region.h" +#include "modules/desktop_capture/differ_block.h" +#include "rtc_base/checks.h" +#include "rtc_base/time_utils.h" + +namespace webrtc { + +namespace { + +// Returns true if (0, 0) - (|width|, |height|) vector in |old_buffer| and +// |new_buffer| are equal. |width| should be less than 32 +// (defined by kBlockSize), otherwise BlockDifference() should be used. +bool PartialBlockDifference(const uint8_t* old_buffer, + const uint8_t* new_buffer, + int width, + int height, + int stride) { + RTC_DCHECK_LT(width, kBlockSize); + const int width_bytes = width * DesktopFrame::kBytesPerPixel; + for (int i = 0; i < height; i++) { + if (memcmp(old_buffer, new_buffer, width_bytes) != 0) { + return true; + } + old_buffer += stride; + new_buffer += stride; + } + return false; +} + +// Compares columns in the range of [|left|, |right|), in a row in the +// range of [|top|, |top| + |height|), starts from |old_buffer| and +// |new_buffer|, and outputs updated regions into |output|. |stride| is the +// DesktopFrame::stride(). +void CompareRow(const uint8_t* old_buffer, + const uint8_t* new_buffer, + const int left, + const int right, + const int top, + const int bottom, + const int stride, + DesktopRegion* const output) { + const int block_x_offset = kBlockSize * DesktopFrame::kBytesPerPixel; + const int width = right - left; + const int height = bottom - top; + const int block_count = (width - 1) / kBlockSize; + const int last_block_width = width - block_count * kBlockSize; + RTC_DCHECK_GT(last_block_width, 0); + RTC_DCHECK_LE(last_block_width, kBlockSize); + + // The first block-column in a continuous dirty area in current block-row. + int first_dirty_x_block = -1; + + // We always need to add dirty area into |output| in the last block, so handle + // it separatedly. + for (int x = 0; x < block_count; x++) { + if (BlockDifference(old_buffer, new_buffer, height, stride)) { + if (first_dirty_x_block == -1) { + // This is the first dirty block in a continuous dirty area. + first_dirty_x_block = x; + } + } else if (first_dirty_x_block != -1) { + // The block on the left is the last dirty block in a continuous + // dirty area. + output->AddRect( + DesktopRect::MakeLTRB(first_dirty_x_block * kBlockSize + left, top, + x * kBlockSize + left, bottom)); + first_dirty_x_block = -1; + } + old_buffer += block_x_offset; + new_buffer += block_x_offset; + } + + bool last_block_diff; + if (last_block_width < kBlockSize) { + // The last one is a partial vector. + last_block_diff = PartialBlockDifference(old_buffer, new_buffer, + last_block_width, height, stride); + } else { + last_block_diff = BlockDifference(old_buffer, new_buffer, height, stride); + } + if (last_block_diff) { + if (first_dirty_x_block == -1) { + first_dirty_x_block = block_count; + } + output->AddRect(DesktopRect::MakeLTRB( + first_dirty_x_block * kBlockSize + left, top, right, bottom)); + } else if (first_dirty_x_block != -1) { + output->AddRect( + DesktopRect::MakeLTRB(first_dirty_x_block * kBlockSize + left, top, + block_count * kBlockSize + left, bottom)); + } +} + +// Compares |rect| area in |old_frame| and |new_frame|, and outputs dirty +// regions into |output|. +void CompareFrames(const DesktopFrame& old_frame, + const DesktopFrame& new_frame, + DesktopRect rect, + DesktopRegion* const output) { + RTC_DCHECK(old_frame.size().equals(new_frame.size())); + RTC_DCHECK_EQ(old_frame.stride(), new_frame.stride()); + rect.IntersectWith(DesktopRect::MakeSize(old_frame.size())); + + const int y_block_count = (rect.height() - 1) / kBlockSize; + const int last_y_block_height = rect.height() - y_block_count * kBlockSize; + // Offset from the start of one block-row to the next. + const int block_y_stride = old_frame.stride() * kBlockSize; + const uint8_t* prev_block_row_start = + old_frame.GetFrameDataAtPos(rect.top_left()); + const uint8_t* curr_block_row_start = + new_frame.GetFrameDataAtPos(rect.top_left()); + + int top = rect.top(); + // The last row may have a different height, so we handle it separately. + for (int y = 0; y < y_block_count; y++) { + CompareRow(prev_block_row_start, curr_block_row_start, rect.left(), + rect.right(), top, top + kBlockSize, old_frame.stride(), output); + top += kBlockSize; + prev_block_row_start += block_y_stride; + curr_block_row_start += block_y_stride; + } + CompareRow(prev_block_row_start, curr_block_row_start, rect.left(), + rect.right(), top, top + last_y_block_height, old_frame.stride(), + output); +} + +} // namespace + +DesktopCapturerDifferWrapper::DesktopCapturerDifferWrapper( + std::unique_ptr base_capturer) + : base_capturer_(std::move(base_capturer)) { + RTC_DCHECK(base_capturer_); +} + +DesktopCapturerDifferWrapper::~DesktopCapturerDifferWrapper() {} + +void DesktopCapturerDifferWrapper::Start(DesktopCapturer::Callback* callback) { + callback_ = callback; + base_capturer_->Start(this); +} + +void DesktopCapturerDifferWrapper::SetSharedMemoryFactory( + std::unique_ptr shared_memory_factory) { + base_capturer_->SetSharedMemoryFactory(std::move(shared_memory_factory)); +} + +void DesktopCapturerDifferWrapper::CaptureFrame() { + base_capturer_->CaptureFrame(); +} + +void DesktopCapturerDifferWrapper::SetExcludedWindow(WindowId window) { + base_capturer_->SetExcludedWindow(window); +} + +bool DesktopCapturerDifferWrapper::GetSourceList(SourceList* sources) { + return base_capturer_->GetSourceList(sources); +} + +bool DesktopCapturerDifferWrapper::SelectSource(SourceId id) { + return base_capturer_->SelectSource(id); +} + +bool DesktopCapturerDifferWrapper::FocusOnSelectedSource() { + return base_capturer_->FocusOnSelectedSource(); +} + +bool DesktopCapturerDifferWrapper::IsOccluded(const DesktopVector& pos) { + return base_capturer_->IsOccluded(pos); +} + +void DesktopCapturerDifferWrapper::OnCaptureResult( + Result result, + std::unique_ptr input_frame) { + int64_t start_time_nanos = rtc::TimeNanos(); + if (!input_frame) { + callback_->OnCaptureResult(result, nullptr); + return; + } + RTC_DCHECK(result == Result::SUCCESS); + + std::unique_ptr frame = + SharedDesktopFrame::Wrap(std::move(input_frame)); + if (last_frame_ && (last_frame_->size().width() != frame->size().width() || + last_frame_->size().height() != frame->size().height() || + last_frame_->stride() != frame->stride())) { + last_frame_.reset(); + } + + if (last_frame_) { + DesktopRegion hints; + hints.Swap(frame->mutable_updated_region()); + for (DesktopRegion::Iterator it(hints); !it.IsAtEnd(); it.Advance()) { + CompareFrames(*last_frame_, *frame, it.rect(), + frame->mutable_updated_region()); + } + } else { + frame->mutable_updated_region()->SetRect( + DesktopRect::MakeSize(frame->size())); + } + last_frame_ = frame->Share(); + + frame->set_capture_time_ms(frame->capture_time_ms() + + (rtc::TimeNanos() - start_time_nanos) / + rtc::kNumNanosecsPerMillisec); + callback_->OnCaptureResult(result, std::move(frame)); +} + +} // namespace webrtc diff --git a/modules/desktop_capture/desktop_capturer_differ_wrapper.h b/modules/desktop_capture/desktop_capturer_differ_wrapper.h new file mode 100644 index 0000000..1f70cef --- /dev/null +++ b/modules/desktop_capture/desktop_capturer_differ_wrapper.h @@ -0,0 +1,68 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_DIFFER_WRAPPER_H_ +#define MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_DIFFER_WRAPPER_H_ + +#include + +#include "modules/desktop_capture/desktop_capture_types.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "modules/desktop_capture/desktop_geometry.h" +#include "modules/desktop_capture/shared_desktop_frame.h" +#include "modules/desktop_capture/shared_memory.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +// DesktopCapturer wrapper that calculates updated_region() by comparing frames +// content. This class always expects the underlying DesktopCapturer +// implementation returns a superset of updated regions in DestkopFrame. If a +// DesktopCapturer implementation does not know the updated region, it should +// set updated_region() to full frame. +// +// This class marks entire frame as updated if the frame size or frame stride +// has been changed. +class RTC_EXPORT DesktopCapturerDifferWrapper + : public DesktopCapturer, + public DesktopCapturer::Callback { + public: + // Creates a DesktopCapturerDifferWrapper with a DesktopCapturer + // implementation, and takes its ownership. + explicit DesktopCapturerDifferWrapper( + std::unique_ptr base_capturer); + + ~DesktopCapturerDifferWrapper() override; + + // DesktopCapturer interface. + void Start(DesktopCapturer::Callback* callback) override; + void SetSharedMemoryFactory( + std::unique_ptr shared_memory_factory) override; + void CaptureFrame() override; + void SetExcludedWindow(WindowId window) override; + bool GetSourceList(SourceList* screens) override; + bool SelectSource(SourceId id) override; + bool FocusOnSelectedSource() override; + bool IsOccluded(const DesktopVector& pos) override; + + private: + // DesktopCapturer::Callback interface. + void OnCaptureResult(Result result, + std::unique_ptr frame) override; + + const std::unique_ptr base_capturer_; + DesktopCapturer::Callback* callback_; + std::unique_ptr last_frame_; +}; + +} // namespace webrtc + +#endif // MODULES_DESKTOP_CAPTURE_DESKTOP_CAPTURER_DIFFER_WRAPPER_H_ diff --git a/modules/desktop_capture/desktop_capturer_differ_wrapper_unittest.cc b/modules/desktop_capture/desktop_capturer_differ_wrapper_unittest.cc new file mode 100644 index 0000000..d16390d --- /dev/null +++ b/modules/desktop_capture/desktop_capturer_differ_wrapper_unittest.cc @@ -0,0 +1,291 @@ +/* + * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/desktop_capture/desktop_capturer_differ_wrapper.h" + +#include +#include +#include +#include + +#include "modules/desktop_capture/desktop_geometry.h" +#include "modules/desktop_capture/desktop_region.h" +#include "modules/desktop_capture/differ_block.h" +#include "modules/desktop_capture/fake_desktop_capturer.h" +#include "modules/desktop_capture/mock_desktop_capturer_callback.h" +#include "rtc_base/random.h" +#include "rtc_base/time_utils.h" +#include "system_wrappers/include/cpu_features_wrapper.h" +#include "test/gtest.h" + +namespace webrtc { + +namespace { + +// Compares and asserts |frame|.updated_region() equals to |rects|. This +// function does not care about the order of the |rects| and it does not expect +// DesktopRegion to return an exact area for each rectangle in |rects|. +template
'), + '
', + '
', + 'APM QA results ({})'.format( + self._output_filepath), + '
', + ] + + # Tab selectors. + html.append('
') + for tab_index, score_name in enumerate(score_names): + is_active = tab_index == 0 + html.append('' + '{}'.format(tab_index, + ' is-active' if is_active else '', + self._FormatName(score_name))) + html.append('
') + + html.append('
') + html.append('
') + + # Tabs content. + for tab_index, score_name in enumerate(score_names): + html.append('
'.format( + ' is-active' if is_active else '', tab_index)) + html.append('
') + html.append(self._BuildScoreTab(score_name, ('s{}'.format(tab_index),))) + html.append('
') + html.append('
') + + html.append('
') + html.append('