From 1c987716bb05111330dc78a68572352b9fad00e9 Mon Sep 17 00:00:00 2001 From: Ilias Pavlidakis Date: Thu, 19 Sep 2024 21:53:06 +0300 Subject: [PATCH] [Fix]Track orientation issue (#534) --- CHANGELOG.md | 1 + .../StreamDeviceOrientationAdapter.swift | 50 +++++-- .../StreamVideoCaptureHandler.swift | 63 ++++----- StreamVideo.xcodeproj/project.pbxproj | 30 +++- .../ParticipantsGridLayout_Tests.swift | 10 +- .../Mock/MockRTCVideoCapturerDelegate.swift | 22 +++ .../CVPixelBuffer+Convenience.swift | 46 ++++++ .../StreamVideoCaptureHandler_Tests.swift | 131 ++++++++++++++++++ 8 files changed, 300 insertions(+), 53 deletions(-) rename Sources/{StreamVideoSwiftUI => StreamVideo}/Utils/DeviceOrientation/StreamDeviceOrientationAdapter.swift (71%) create mode 100644 StreamVideoTests/Mock/MockRTCVideoCapturerDelegate.swift create mode 100644 StreamVideoTests/Utilities/Extensions/CVPixelBuffer+Convenience.swift create mode 100644 StreamVideoTests/WebRTC/VideoCapturing/StreamVideoCaptureHandler_Tests.swift diff --git a/CHANGELOG.md b/CHANGELOG.md index 0e31074c5..336b544a5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ### 🔄 Changed - Updated the default sorting for Participants during a call to minimize the movement of already visible tiles [#515](https://github.com/GetStream/stream-video-swift/pull/515) +- **Breaking** The `StreamDeviceOrientation` values now are `.portrait(isUpsideDown: Bool)` & `.landscape(isLeft: Bool)`. [#534](https://github.com/GetStream/stream-video-swift/pull/534) ### 🐞 Fixed - An `MissingPermissions` error was thrown when creating a `StreamVideo` with anonymous user type. [#525](https://github.com/GetStream/stream-video-swift/pull/525) diff --git a/Sources/StreamVideoSwiftUI/Utils/DeviceOrientation/StreamDeviceOrientationAdapter.swift b/Sources/StreamVideo/Utils/DeviceOrientation/StreamDeviceOrientationAdapter.swift similarity index 71% rename from Sources/StreamVideoSwiftUI/Utils/DeviceOrientation/StreamDeviceOrientationAdapter.swift rename to Sources/StreamVideo/Utils/DeviceOrientation/StreamDeviceOrientationAdapter.swift index 2b40d35b9..34f2cf54f 100644 --- a/Sources/StreamVideoSwiftUI/Utils/DeviceOrientation/StreamDeviceOrientationAdapter.swift +++ b/Sources/StreamVideo/Utils/DeviceOrientation/StreamDeviceOrientationAdapter.swift @@ -4,20 +4,46 @@ import Combine import Foundation -import StreamVideo #if canImport(UIKit) import UIKit #endif /// An enumeration representing device orientations: portrait or landscape. public enum StreamDeviceOrientation: Equatable { - case portrait, landscape + case portrait(isUpsideDown: Bool) + case landscape(isLeft: Bool) /// A computed property that indicates whether the orientation is portrait. - public var isPortrait: Bool { self == .portrait } + public var isPortrait: Bool { + switch self { + case .landscape: + return false + case .portrait: + return true + } + } /// A computed property that indicates whether the orientation is landscape. - public var isLandscape: Bool { self == .landscape } + public var isLandscape: Bool { + switch self { + case .landscape: + return true + case .portrait: + return false + } + } + + public var cgOrientation: CGImagePropertyOrientation { + switch self { + /// Handle known portrait orientations + case let .portrait(isUpsideDown): + return isUpsideDown ?.right : .left + + /// Handle known landscape orientations + case let .landscape(isLeft): + return isLeft ? .up : .down + } + } } /// An observable object that adapts to device orientation changes. @@ -28,14 +54,18 @@ open class StreamDeviceOrientationAdapter: ObservableObject { public static let defaultProvider: Provider = { #if canImport(UIKit) switch UIDevice.current.orientation { - case .unknown, .portrait, .portraitUpsideDown: - return .portrait - case .landscapeLeft, .landscapeRight: - return .landscape + case .unknown, .portrait: + return .portrait(isUpsideDown: false) + case .portraitUpsideDown: + return .portrait(isUpsideDown: true) + case .landscapeLeft: + return .landscape(isLeft: true) + case .landscapeRight: + return .landscape(isLeft: false) case .faceUp, .faceDown: - return .portrait + return .portrait(isUpsideDown: false) @unknown default: - return .portrait + return .portrait(isUpsideDown: false) } #else return .portrait diff --git a/Sources/StreamVideo/WebRTC/VideoCapturing/StreamVideoCaptureHandler.swift b/Sources/StreamVideo/WebRTC/VideoCapturing/StreamVideoCaptureHandler.swift index 1eeb14fcd..bb4e265ff 100644 --- a/Sources/StreamVideo/WebRTC/VideoCapturing/StreamVideoCaptureHandler.swift +++ b/Sources/StreamVideo/WebRTC/VideoCapturing/StreamVideoCaptureHandler.swift @@ -2,24 +2,28 @@ // Copyright © 2024 Stream.io Inc. All rights reserved. // +import Combine import Foundation @preconcurrency import StreamWebRTC final class StreamVideoCaptureHandler: NSObject, RTCVideoCapturerDelegate { - let source: RTCVideoSource + @Injected(\.orientationAdapter) private var orientationAdapter + + let source: RTCVideoCapturerDelegate let filters: [VideoFilter] let context: CIContext let colorSpace: CGColorSpace var selectedFilter: VideoFilter? - var sceneOrientation: UIInterfaceOrientation = .unknown + var sceneOrientation: StreamDeviceOrientation = .portrait(isUpsideDown: false) var currentCameraPosition: AVCaptureDevice.Position = .front private let handleRotation: Bool private lazy var serialActor = SerialActor() + private var orientationCancellable: AnyCancellable? init( - source: RTCVideoSource, + source: RTCVideoCapturerDelegate, filters: [VideoFilter], handleRotation: Bool = true ) { @@ -29,13 +33,13 @@ final class StreamVideoCaptureHandler: NSObject, RTCVideoCapturerDelegate { context = CIContext(options: [CIContextOption.useSoftwareRenderer: false]) colorSpace = CGColorSpaceCreateDeviceRGB() super.init() - NotificationCenter.default.addObserver( - self, - selector: #selector(updateRotation), - name: UIDevice.orientationDidChangeNotification, - object: nil - ) - updateRotation() + + orientationCancellable = orientationAdapter + .$orientation + .removeDuplicates() + .receive(on: DispatchQueue.main) + .assign(to: \Self.sceneOrientation, onWeak: self) + sceneOrientation = orientationAdapter.orientation } func capturer( @@ -71,32 +75,29 @@ final class StreamVideoCaptureHandler: NSObject, RTCVideoCapturerDelegate { } } - @objc private func updateRotation() { - DispatchQueue.main.async { - self.sceneOrientation = UIApplication.shared.windows.first?.windowScene?.interfaceOrientation ?? .unknown - } - } - private func adjustRotation( _ capturer: RTCVideoCapturer, for buffer: RTCCVPixelBuffer?, frame: RTCVideoFrame ) -> RTCVideoFrame { - #if os(macOS) || targetEnvironment(simulator) || targetEnvironment(macCatalyst) + #if os(macOS) || targetEnvironment(macCatalyst) var rotation = RTCVideoRotation._0 #else var rotation = RTCVideoRotation._90 switch sceneOrientation { - case .portrait: - rotation = ._90 - case .portraitUpsideDown: - rotation = ._270 - case .landscapeRight: - rotation = currentCameraPosition == .front ? ._180 : ._0 - case .landscapeLeft: - rotation = currentCameraPosition == .front ? ._0 : ._180 - default: - break + case let .portrait(isUpsideDown): + rotation = isUpsideDown ? ._270 : ._90 + case let .landscape(isLeft): + switch (isLeft, currentCameraPosition == .front) { + case (true, true): + rotation = ._180 + case (true, false): + rotation = ._0 + case (false, true): + rotation = ._0 + case (false, false): + rotation = ._180 + } } #endif if rotation != frame.rotation, let _buffer = buffer ?? frame.buffer as? RTCCVPixelBuffer { @@ -121,14 +122,6 @@ final class StreamVideoCaptureHandler: NSObject, RTCVideoCapturerDelegate { ) ) ?? image } - - deinit { - NotificationCenter.default.removeObserver( - self, - name: UIDevice.orientationDidChangeNotification, - object: nil - ) - } } extension StreamVideoCaptureHandler: @unchecked Sendable {} diff --git a/StreamVideo.xcodeproj/project.pbxproj b/StreamVideo.xcodeproj/project.pbxproj index 484fce661..2f35c9f19 100644 --- a/StreamVideo.xcodeproj/project.pbxproj +++ b/StreamVideo.xcodeproj/project.pbxproj @@ -100,7 +100,6 @@ 402F04A92B70ED8600CA1986 /* StreamCallStatisticsReporter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402F04A62B70ED8600CA1986 /* StreamCallStatisticsReporter.swift */; }; 402F04AA2B70ED8600CA1986 /* Statistics+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402F04A72B70ED8600CA1986 /* Statistics+Convenience.swift */; }; 402F04AB2B70ED8600CA1986 /* StreamCallStatisticsFormatter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402F04A82B70ED8600CA1986 /* StreamCallStatisticsFormatter.swift */; }; - 402F04AE2B714E9B00CA1986 /* StreamDeviceOrientationAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402F04AD2B714E9B00CA1986 /* StreamDeviceOrientationAdapter.swift */; }; 402F04AF2B7245F800CA1986 /* DemoCallView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F446012A9E2C23004BE3DA /* DemoCallView.swift */; }; 402F04B12B724EE500CA1986 /* CallViewModel+Snapshot.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402F04B02B724EE500CA1986 /* CallViewModel+Snapshot.swift */; }; 4030E5A02A9DF5BD003E8CBA /* AppEnvironment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4030E59F2A9DF5BD003E8CBA /* AppEnvironment.swift */; }; @@ -559,6 +558,12 @@ 40FB15212BF78FA100D5E580 /* Publisher+NextValue.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40FB15202BF78FA100D5E580 /* Publisher+NextValue.swift */; }; 40FBEF492AC30343007CFF17 /* Safari.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40FBEF482AC30343007CFF17 /* Safari.swift */; }; 40FBEF4B2AC30371007CFF17 /* XCUIApplication+KeyboardIntroduction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40FBEF4A2AC30371007CFF17 /* XCUIApplication+KeyboardIntroduction.swift */; }; + 40FE5EB32C9C73E0006B0881 /* StreamDeviceOrientationAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402F04AD2B714E9B00CA1986 /* StreamDeviceOrientationAdapter.swift */; }; + 40FE5EBB2C9C7D40006B0881 /* StreamVideoCaptureHandler_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40FE5EBA2C9C7D40006B0881 /* StreamVideoCaptureHandler_Tests.swift */; }; + 40FE5EBD2C9C82A6006B0881 /* MockRTCVideoCapturerDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40FE5EBC2C9C82A6006B0881 /* MockRTCVideoCapturerDelegate.swift */; }; + 40FE5EBF2C9C82CD006B0881 /* CVPixelBuffer+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40FE5EBE2C9C82CD006B0881 /* CVPixelBuffer+Convenience.swift */; }; + 40FE5EC02C9C82CD006B0881 /* CVPixelBuffer+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40FE5EBE2C9C82CD006B0881 /* CVPixelBuffer+Convenience.swift */; }; + 40FE5EC12C9C82CD006B0881 /* CVPixelBuffer+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40FE5EBE2C9C82CD006B0881 /* CVPixelBuffer+Convenience.swift */; }; 43217A0C2A44A28B002B5857 /* ConnectionErrorEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 43217A0B2A44A28B002B5857 /* ConnectionErrorEvent.swift */; }; 4351AEAD2A40588D00D32D0D /* IntegrationTest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4351AEAC2A40588D00D32D0D /* IntegrationTest.swift */; }; 4351AEAF2A40591800D32D0D /* CallCRUDTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4351AEAE2A40591800D32D0D /* CallCRUDTests.swift */; }; @@ -1795,6 +1800,9 @@ 40FB15202BF78FA100D5E580 /* Publisher+NextValue.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Publisher+NextValue.swift"; sourceTree = ""; }; 40FBEF482AC30343007CFF17 /* Safari.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Safari.swift; sourceTree = ""; }; 40FBEF4A2AC30371007CFF17 /* XCUIApplication+KeyboardIntroduction.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "XCUIApplication+KeyboardIntroduction.swift"; sourceTree = ""; }; + 40FE5EBA2C9C7D40006B0881 /* StreamVideoCaptureHandler_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamVideoCaptureHandler_Tests.swift; sourceTree = ""; }; + 40FE5EBC2C9C82A6006B0881 /* MockRTCVideoCapturerDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockRTCVideoCapturerDelegate.swift; sourceTree = ""; }; + 40FE5EBE2C9C82CD006B0881 /* CVPixelBuffer+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CVPixelBuffer+Convenience.swift"; sourceTree = ""; }; 43217A0B2A44A28B002B5857 /* ConnectionErrorEvent.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ConnectionErrorEvent.swift; sourceTree = ""; }; 4351AEAC2A40588D00D32D0D /* IntegrationTest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = IntegrationTest.swift; sourceTree = ""; }; 4351AEAE2A40591800D32D0D /* CallCRUDTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallCRUDTests.swift; sourceTree = ""; }; @@ -3291,6 +3299,7 @@ 409CA7972BEE21660045F7AA /* Extensions */ = { isa = PBXGroup; children = ( + 40FE5EBE2C9C82CD006B0881 /* CVPixelBuffer+Convenience.swift */, 40382F412C89CF9300C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift */, 40382F442C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift */, 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */, @@ -3991,6 +4000,14 @@ path = UITests; sourceTree = ""; }; + 40FE5EB92C9C7D35006B0881 /* VideoCapturing */ = { + isa = PBXGroup; + children = ( + 40FE5EBA2C9C7D40006B0881 /* StreamVideoCaptureHandler_Tests.swift */, + ); + path = VideoCapturing; + sourceTree = ""; + }; 4351AEAB2A40586B00D32D0D /* IntegrationTests */ = { isa = PBXGroup; children = ( @@ -4550,6 +4567,7 @@ 8492B87629081CE700006649 /* Mock */ = { isa = PBXGroup; children = ( + 40FE5EBC2C9C82A6006B0881 /* MockRTCVideoCapturerDelegate.swift */, 40483CB92C9B1E6000B4FCA8 /* MockWebRTCCoordinatorFactory.swift */, 40AF6A3A2C93469000BA2935 /* MockWebSocketClientFactory.swift */, 406B3C5C2C92E37500FC93A1 /* MockInternetConnection.swift */, @@ -4718,6 +4736,7 @@ 40A0E9612B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift */, 40C4DF432C1C261D0035DBC2 /* Publisher+WeakAssign.swift */, 408CE0F22BD905920052EC3A /* Models+Sendable.swift */, + 402F04AC2B714E9B00CA1986 /* DeviceOrientation */, ); path = Utils; sourceTree = ""; @@ -4789,6 +4808,7 @@ 84D6494529E9F2B7002CA428 /* WebRTC */ = { isa = PBXGroup; children = ( + 40FE5EB92C9C7D35006B0881 /* VideoCapturing */, 406B3C072C8F602D00FC93A1 /* v2 */, 40AB34BD2C5D33C800B5B6B3 /* SFU */, 40AB34B72C5D2F4D00B5B6B3 /* Extensions */, @@ -5034,7 +5054,6 @@ 4049CE802BBBF73A003D07D2 /* AsyncImage */, 403FF3E22BA1D2270092CE8A /* StreamPixelBufferRepository */, 40FA12EE2B76AC4B00CE3EC9 /* Extensions */, - 402F04AC2B714E9B00CA1986 /* DeviceOrientation */, 40E1104A2B5A9F5B007DF492 /* Formatters */, 40A9416C2B4D958A006D6965 /* PictureInPicture */, 40AA2EE02AE00179000DCA5C /* ClipCorners.swift */, @@ -6405,6 +6424,7 @@ 8206D8532A5FF3260099F5EC /* SystemEnvironment+Version.swift in Sources */, 84CD12202C73831000056640 /* CallMissedEvent.swift in Sources */, 842B8E1C2A2DFED900863A87 /* AcceptCallResponse.swift in Sources */, + 40FE5EB32C9C73E0006B0881 /* StreamDeviceOrientationAdapter.swift in Sources */, 40C9E45B2C9987CE00802B28 /* Publisher+Sendable.swift in Sources */, 40F18B8E2BEBB65100ADF76E /* View+OptionalPublisher.swift in Sources */, 40C9E4512C9880DB00802B28 /* Unwrap.swift in Sources */, @@ -6523,6 +6543,7 @@ 842747F129EED88800E063AD /* InternetConnection_Tests.swift in Sources */, 40C9E4552C988CE100802B28 /* WebRTCJoinRequestFactory_Tests.swift in Sources */, 84F58B9329EEB53E00010C4C /* EventMiddleware_Mock.swift in Sources */, + 40FE5EBF2C9C82CD006B0881 /* CVPixelBuffer+Convenience.swift in Sources */, 842747EC29EED59000E063AD /* JSONDecoder_Tests.swift in Sources */, 406B3C142C8F870400FC93A1 /* MockActiveCallProvider.swift in Sources */, 841FF5052A5D815700809BBB /* VideoCapturerUtils_Tests.swift in Sources */, @@ -6559,6 +6580,7 @@ 40382F472C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift in Sources */, 84DC44982BA3ACC70050290C /* CallStatsReporting_Tests.swift in Sources */, 84F58B7229EE922700010C4C /* WebSocketConnectionState_Tests.swift in Sources */, + 40FE5EBD2C9C82A6006B0881 /* MockRTCVideoCapturerDelegate.swift in Sources */, 40382F3D2C89C11D00C2D00F /* MockRTCPeerConnectionCoordinatorFactory.swift in Sources */, 40AB31262A49838000C270E1 /* EventTests.swift in Sources */, 84F58B7C29EE979F00010C4C /* VirtualTime.swift in Sources */, @@ -6590,6 +6612,7 @@ 8492B87A29081E6600006649 /* StreamVideo_Mock.swift in Sources */, 84D6494729E9F2D0002CA428 /* WebRTCClient_Tests.swift in Sources */, 4013387A2BF248CC007318BD /* MockCall.swift in Sources */, + 40FE5EBB2C9C7D40006B0881 /* StreamVideoCaptureHandler_Tests.swift in Sources */, 40AB34BF2C5D33CF00B5B6B3 /* SFUAdapter_Tests.swift in Sources */, 40F017532BBEF01F00E89FD1 /* RingSettings+Dummy.swift in Sources */, 40F017732BBEF28600E89FD1 /* CallAcceptedEvent+Dummy.swift in Sources */, @@ -6759,7 +6782,6 @@ 8469593E29BF214700134EA0 /* ViewExtensions.swift in Sources */, 4049CE822BBBF74C003D07D2 /* LegacyAsyncImage.swift in Sources */, 40F0C3A72BC7FAA400AB75AD /* VideoRendererPool.swift in Sources */, - 402F04AE2B714E9B00CA1986 /* StreamDeviceOrientationAdapter.swift in Sources */, 4049CE842BBBF8EF003D07D2 /* StreamAsyncImage.swift in Sources */, 40C7B82C2B612D6000FB9DB2 /* ParticipantsListViewModifier.swift in Sources */, 40245F322BE269E300FCF075 /* StatelessVideoIconView.swift in Sources */, @@ -6812,6 +6834,7 @@ 40245F452BE2746300FCF075 /* CallIngressResponse+Dummy.swift in Sources */, 40245F462BE2746300FCF075 /* GeofenceSettings+Dummy.swift in Sources */, 40245F472BE2746300FCF075 /* CallRejectedEvent+Dummy.swift in Sources */, + 40FE5EC12C9C82CD006B0881 /* CVPixelBuffer+Convenience.swift in Sources */, 40245F482BE2746300FCF075 /* CallParticipantResponse+Dummy.swift in Sources */, 40245F492BE2746300FCF075 /* EgressHLSResponse+Dummy.swift in Sources */, 40245F4A2BE2746300FCF075 /* CallSessionParticipantLeftEvent+Dummy.swift in Sources */, @@ -6913,6 +6936,7 @@ 40382F482C89D03700C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift in Sources */, 408CE0F92BD95F1B0052EC3A /* VideoConfig+Dummy.swift in Sources */, 40382F462C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift in Sources */, + 40FE5EC02C9C82CD006B0881 /* CVPixelBuffer+Convenience.swift in Sources */, 82E3BA432A0BAE0A001AB93E /* EventBatcher_Mock.swift in Sources */, 8493227E29093A420013C029 /* StreamVideo_Mock.swift in Sources */, 82E3BA4F2A0BAE4E001AB93E /* VirtualTimer.swift in Sources */, diff --git a/StreamVideoSwiftUITests/CallView/ParticipantsGridLayout_Tests.swift b/StreamVideoSwiftUITests/CallView/ParticipantsGridLayout_Tests.swift index ebd016381..42a8eae00 100644 --- a/StreamVideoSwiftUITests/CallView/ParticipantsGridLayout_Tests.swift +++ b/StreamVideoSwiftUITests/CallView/ParticipantsGridLayout_Tests.swift @@ -10,7 +10,7 @@ import XCTest final class ParticipantsGridLayout_Tests: StreamVideoUITestCase { - private var mockedOrientation: StreamDeviceOrientation! = .portrait + private var mockedOrientation: StreamDeviceOrientation! = .portrait(isUpsideDown: false) private lazy var orientationAdapter: StreamDeviceOrientationAdapter! = .init { self.mockedOrientation } private lazy var callController: CallController_Mock! = CallController_Mock( @@ -45,7 +45,7 @@ final class ParticipantsGridLayout_Tests: StreamVideoUITestCase { @MainActor func test_grid_participantWithAudio_snapshot() { - mockedOrientation = .portrait + mockedOrientation = .portrait(isUpsideDown: false) for count in gridParticipants { let layout = ParticipantsGridLayout( @@ -61,7 +61,7 @@ final class ParticipantsGridLayout_Tests: StreamVideoUITestCase { @MainActor func test_grid_participantWithoutAudio_snapshot() { - mockedOrientation = .portrait + mockedOrientation = .portrait(isUpsideDown: false) for count in gridParticipants { let layout = ParticipantsGridLayout( @@ -77,7 +77,7 @@ final class ParticipantsGridLayout_Tests: StreamVideoUITestCase { @MainActor func test_grid_participantsConnectionQuality_snapshot() throws { - mockedOrientation = .portrait + mockedOrientation = .portrait(isUpsideDown: false) for quality in connectionQuality { let count = gridParticipants.last! @@ -94,7 +94,7 @@ final class ParticipantsGridLayout_Tests: StreamVideoUITestCase { @MainActor func test_grid_participantsSpeaking_snapshot() { - mockedOrientation = .portrait + mockedOrientation = .portrait(isUpsideDown: false) for count in gridParticipants { let participants = ParticipantFactory.get(count, speaking: true) diff --git a/StreamVideoTests/Mock/MockRTCVideoCapturerDelegate.swift b/StreamVideoTests/Mock/MockRTCVideoCapturerDelegate.swift new file mode 100644 index 000000000..cc57cb78c --- /dev/null +++ b/StreamVideoTests/Mock/MockRTCVideoCapturerDelegate.swift @@ -0,0 +1,22 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +// +// MockRTCVideoCapturerDelegate.swift +// StreamVideo +// +// Created by Ilias Pavlidakis on 19/9/24. +// +import StreamWebRTC + +final class MockRTCVideoCapturerDelegate: NSObject, RTCVideoCapturerDelegate { + private(set) var didCaptureWasCalledWith: (capturer: RTCVideoCapturer, frame: RTCVideoFrame)? + + func capturer( + _ capturer: RTCVideoCapturer, + didCapture frame: RTCVideoFrame + ) { + didCaptureWasCalledWith = (capturer, frame) + } +} diff --git a/StreamVideoTests/Utilities/Extensions/CVPixelBuffer+Convenience.swift b/StreamVideoTests/Utilities/Extensions/CVPixelBuffer+Convenience.swift new file mode 100644 index 000000000..493e16eda --- /dev/null +++ b/StreamVideoTests/Utilities/Extensions/CVPixelBuffer+Convenience.swift @@ -0,0 +1,46 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import CoreVideo +import Foundation +import StreamVideo + +extension CVPixelBuffer { + static func make(bufferSize: CGSize = .init(width: 100, height: 100)) throws -> CVPixelBuffer { + var cvPool: CVPixelBufferPool? + let poolAttributes: [String: Any] = [ + kCVPixelBufferPoolMinimumBufferCountKey as String: 5 + ] + let pixelBufferAttributes: [String: Any] = [ + kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA), + kCVPixelBufferWidthKey as String: Int(bufferSize.width), + kCVPixelBufferHeightKey as String: Int(bufferSize.height), + kCVPixelBufferIOSurfacePropertiesKey as String: [:] + ] + CVPixelBufferPoolCreate( + nil, + poolAttributes as CFDictionary, + pixelBufferAttributes as CFDictionary, + &cvPool + ) + guard let pool = cvPool else { + throw ClientError() + } + + var pixelBuffer: CVPixelBuffer? + let error = CVPixelBufferPoolCreatePixelBuffer( + nil, + pool, + &pixelBuffer + ) + + if error == kCVReturnWouldExceedAllocationThreshold { + throw ClientError("\(kCVReturnWouldExceedAllocationThreshold)") + } else if let pixelBuffer { + return pixelBuffer + } else { + throw ClientError() + } + } +} diff --git a/StreamVideoTests/WebRTC/VideoCapturing/StreamVideoCaptureHandler_Tests.swift b/StreamVideoTests/WebRTC/VideoCapturing/StreamVideoCaptureHandler_Tests.swift new file mode 100644 index 000000000..141dfef1b --- /dev/null +++ b/StreamVideoTests/WebRTC/VideoCapturing/StreamVideoCaptureHandler_Tests.swift @@ -0,0 +1,131 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class StreamVideoCaptureHandler_Tests: XCTestCase { + + private lazy var source: MockRTCVideoCapturerDelegate! = .init() + private lazy var subject: StreamVideoCaptureHandler! = .init( + source: source, + filters: [] + ) + + // MARK: - Lifecycle + + override func tearDown() { + subject = nil + source = nil + super.tearDown() + } + + // MARK: - capturer(_:didCapture:) + + // MARK: camera: front + + func test_didCapture_orientationPortraitCameraFront_frameHasExpectedOrientation() async throws { + try await assertFrameOrientation( + deviceOrientation: .portrait(isUpsideDown: false), + cameraPosition: .front, + expected: ._90 + ) + } + + func test_didCapture_orientationPortraitUpsideDownCameraFront_frameHasExpectedOrientation() async throws { + try await assertFrameOrientation( + deviceOrientation: .portrait(isUpsideDown: true), + cameraPosition: .front, + expected: ._270 + ) + } + + func test_didCapture_orientationLandscapeLeftCameraFront_frameHasExpectedOrientation() async throws { + try await assertFrameOrientation( + deviceOrientation: .landscape(isLeft: true), + cameraPosition: .front, + expected: ._180 + ) + } + + func test_didCapture_orientationLandscapeRightCameraFront_frameHasExpectedOrientation() async throws { + try await assertFrameOrientation( + deviceOrientation: .landscape(isLeft: false), + cameraPosition: .front, + expected: ._0 + ) + } + + // MARK: camera: back + + func test_didCapture_orientationPortraitCameraBack_frameHasExpectedOrientation() async throws { + try await assertFrameOrientation( + deviceOrientation: .portrait(isUpsideDown: false), + cameraPosition: .back, + expected: ._90 + ) + } + + func test_didCapture_orientationPortraitUpsideDownCameraBack_frameHasExpectedOrientation() async throws { + try await assertFrameOrientation( + deviceOrientation: .portrait(isUpsideDown: true), + cameraPosition: .back, + expected: ._270 + ) + } + + func test_didCapture_orientationLandscapeLeftCameraBack_frameHasExpectedOrientation() async throws { + try await assertFrameOrientation( + deviceOrientation: .landscape(isLeft: true), + cameraPosition: .back, + expected: ._0 + ) + } + + func test_didCapture_orientationLandscapeRightCameraBack_frameHasExpectedOrientation() async throws { + try await assertFrameOrientation( + deviceOrientation: .landscape(isLeft: false), + cameraPosition: .back, + expected: ._180 + ) + } + + // MARK: - Private Helpers + + func assertFrameOrientation( + deviceOrientation: StreamDeviceOrientation, + cameraPosition: AVCaptureDevice.Position, + expected: RTCVideoRotation, + file: StaticString = #file, + line: UInt = #line + ) async throws { + let orientationAdapter = StreamDeviceOrientationAdapter() { deviceOrientation } + InjectedValues[\.orientationAdapter] = orientationAdapter + let capturer: RTCVideoCapturer! = .init() + _ = subject + subject.currentCameraPosition = cameraPosition + let frame = RTCVideoFrame( + buffer: RTCCVPixelBuffer(pixelBuffer: try .make()), + rotation: ._270, + timeStampNs: 0 + ) + + subject.capturer(capturer, didCapture: frame) + + await fulfillment(file: file, line: line) { self.source.didCaptureWasCalledWith != nil } + XCTAssertTrue( + source.didCaptureWasCalledWith?.capturer === capturer, + file: file, + line: line + ) + XCTAssertEqual( + source.didCaptureWasCalledWith?.frame.rotation.rawValue, + expected.rawValue, + file: file, + line: line + ) + } +}