diff --git a/CHANGELOG.md b/CHANGELOG.md index d96cc2fc1..2148f69ef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,7 +4,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). # Upcoming -### 🔄 Changed +### 🐞 Fixed +- AudioSession management issues that were causing audio not being recorded during calls. [#906](https://github.com/GetStream/stream-video-swift/pull/906) # [1.29.1](https://github.com/GetStream/stream-video-swift/releases/tag/1.29.1) _July 25, 2025_ diff --git a/DemoApp/Sources/ViewModifiers/MoreControls/DemoMoreControlsViewModifier.swift b/DemoApp/Sources/ViewModifiers/MoreControls/DemoMoreControlsViewModifier.swift index 43785c9d3..6765a07c5 100644 --- a/DemoApp/Sources/ViewModifiers/MoreControls/DemoMoreControlsViewModifier.swift +++ b/DemoApp/Sources/ViewModifiers/MoreControls/DemoMoreControlsViewModifier.swift @@ -52,6 +52,17 @@ struct DemoMoreControlsViewModifier: ViewModifier { ) } + DemoMoreControlListButtonView( + action: { viewModel.toggleAudioOutput() }, + label: viewModel.callSettings.audioOutputOn ? "Disable audio output" : "Enable audio output" + ) { + Image( + systemName: viewModel.callSettings.audioOutputOn + ? "speaker.fill" + : "speaker.slash" + ) + } + DemoTranscriptionAndClosedCaptionsButtonView(viewModel: viewModel) DemoMoreThermalStateButtonView() diff --git a/Sources/StreamVideo/Call.swift b/Sources/StreamVideo/Call.swift index fec0a8cc9..34735d999 100644 --- a/Sources/StreamVideo/Call.swift +++ b/Sources/StreamVideo/Call.swift @@ -147,6 +147,24 @@ public class Call: @unchecked Sendable, WSEventsSubscriber { notify: Bool = false, callSettings: CallSettings? = nil ) async throws -> JoinCallResponse { + /// Determines the source from which the join action was initiated. + /// + /// This block checks if the `joinSource` has already been set in the current + /// call state. If not, it assigns `.inApp` as the default join source, + /// indicating the call was joined from within the app UI. The resolved + /// `JoinSource` value is then used to record how the call was joined, + /// enabling analytics and behavioral branching based on entry point. + let joinSource = await { + if let joinSource = await state.joinSource { + return joinSource + } else { + return await Task { @MainActor in + state.joinSource = .inApp + return .inApp + }.value + } + }() + let result: Any? = stateMachine.withLock { currentStage, transitionHandler in if currentStage.id == .joined, @@ -194,6 +212,7 @@ public class Call: @unchecked Sendable, WSEventsSubscriber { options: options, ring: ring, notify: notify, + source: joinSource, deliverySubject: deliverySubject ) ) @@ -1371,8 +1390,8 @@ public class Call: @unchecked Sendable, WSEventsSubscriber { /// - Parameter policy: A conforming `AudioSessionPolicy` that defines /// the audio session configuration to be applied. /// - Throws: An error if the update fails. - public func updateAudioSessionPolicy(_ policy: AudioSessionPolicy) async throws { - try await callController.updateAudioSessionPolicy(policy) + public func updateAudioSessionPolicy(_ policy: AudioSessionPolicy) async { + await callController.updateAudioSessionPolicy(policy) } /// Adds a proximity policy to manage device proximity behavior during the call. @@ -1473,22 +1492,6 @@ public class Call: @unchecked Sendable, WSEventsSubscriber { ) } - // MARK: - CallKit - - /// Notifies the `Call` instance that CallKit has activated the system audio - /// session. - /// - /// This method should be called when the system activates the `AVAudioSession` - /// as a result of an incoming or outgoing CallKit-managed call. It allows the - /// call to update the provided CallKit AVAudioSession based on the internal CallSettings. - /// - /// - Parameter audioSession: The active `AVAudioSession` instance provided by - /// CallKit. - /// - Throws: An error if the call controller fails to handle the activation. - internal func callKitActivated(_ audioSession: AVAudioSessionProtocol) throws { - try callController.callKitActivated(audioSession) - } - internal func didPerform(_ action: WebRTCTrace.CallKitAction) { Task(disposableBag: disposableBag) { [weak callController] in await callController?.didPerform(action) diff --git a/Sources/StreamVideo/CallKit/CallKitService.swift b/Sources/StreamVideo/CallKit/CallKitService.swift index 08c7f4495..1a00fc3ec 100644 --- a/Sources/StreamVideo/CallKit/CallKitService.swift +++ b/Sources/StreamVideo/CallKit/CallKitService.swift @@ -15,6 +15,7 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable { @Injected(\.callCache) private var callCache @Injected(\.uuidFactory) private var uuidFactory @Injected(\.currentDevice) private var currentDevice + @Injected(\.audioStore) private var audioStore private let disposableBag = DisposableBag() /// Represents a call that is being managed by the service. @@ -95,6 +96,13 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable { private var callEndedNotificationCancellable: AnyCancellable? private var ringingTimerCancellable: AnyCancellable? + /// A reducer responsible for handling audio session changes triggered by CallKit. + /// + /// The `callKitAudioReducer` manages updates to the audio session state in + /// response to CallKit events, ensuring proper activation and deactivation + /// of the audio system when calls are handled through CallKit. + private lazy var callKitAudioReducer = CallKitAudioSessionReducer(store: audioStore) + /// Initializes the `CallKitService` instance. override public init() { super.init() @@ -164,6 +172,7 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable { return } do { + if streamVideo.state.connection != .connected { let result = await Task(disposableBag: disposableBag) { [weak self] in try await self?.streamVideo?.connect() @@ -392,17 +401,11 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable { subsystems: .callKit ) - if - let active, - let call = callEntry(for: active)?.call { - call.didPerform(.didActivateAudioSession) - - do { - try call.callKitActivated(audioSession) - } catch { - log.error(error, subsystems: .callKit) - } - } + /// Activates the audio session for CallKit. This line notifies the audio store + /// to activate the provided AVAudioSession, ensuring that the app's audio + /// routing and configuration are correctly handled when CallKit takes control + /// of the audio session during a call. + audioStore.dispatch(.callKit(.activate(audioSession))) } public func provider( @@ -421,11 +424,11 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable { """, subsystems: .callKit ) - if - let active, - let call = callEntry(for: active)?.call { - call.didPerform(.didDeactivateAudioSession) - } + + /// Notifies the audio store to deactivate the provided AVAudioSession. + /// This ensures that when CallKit relinquishes control of the audio session, + /// the app's audio routing and configuration are updated appropriately. + audioStore.dispatch(.callKit(.deactivate(audioSession))) } open func provider( @@ -460,6 +463,10 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable { } do { + /// Sets the join source to `.callKit` to indicate that the call was + /// joined via CallKit. This helps with audioSession management. + callToJoinEntry.call.state.joinSource = .callKit + try await callToJoinEntry.call.join(callSettings: callSettings) action.fulfill() } catch { @@ -640,9 +647,16 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable { /// A method that's being called every time the StreamVideo instance is getting updated. /// - Parameter streamVideo: The new StreamVideo instance (nil if none) open func didUpdate(_ streamVideo: StreamVideo?) { + if streamVideo != nil { + audioStore.add(callKitAudioReducer) + } else { + audioStore.remove(callKitAudioReducer) + } + guard currentDevice.deviceType != .simulator else { return } + subscribeToCallEvents() } diff --git a/Sources/StreamVideo/CallState.swift b/Sources/StreamVideo/CallState.swift index 38bb4dde7..1d9ebed21 100644 --- a/Sources/StreamVideo/CallState.swift +++ b/Sources/StreamVideo/CallState.swift @@ -155,7 +155,14 @@ public class CallState: ObservableObject { } } } - + + /// Describes the source from which the join action was triggered for this call. + /// + /// Use this property to determine whether the current call was joined from + /// the app's UI or via a system-level integration such as CallKit. This can + /// help customize logic, analytics, and UI based on how the call was started. + var joinSource: JoinSource? + private var localCallSettingsUpdate = false private var durationCancellable: AnyCancellable? private nonisolated let disposableBag = DisposableBag() diff --git a/Sources/StreamVideo/CallStateMachine/Stages/Call+JoiningStage.swift b/Sources/StreamVideo/CallStateMachine/Stages/Call+JoiningStage.swift index 2017dd91a..84e349258 100644 --- a/Sources/StreamVideo/CallStateMachine/Stages/Call+JoiningStage.swift +++ b/Sources/StreamVideo/CallStateMachine/Stages/Call+JoiningStage.swift @@ -122,7 +122,8 @@ extension Call.StateMachine.Stage { callSettings: input.callSettings, options: input.options, ring: input.ring, - notify: input.notify + notify: input.notify, + source: input.source ) if let callSettings = input.callSettings { diff --git a/Sources/StreamVideo/CallStateMachine/Stages/Call+Stage.swift b/Sources/StreamVideo/CallStateMachine/Stages/Call+Stage.swift index 4aa9115e7..5c53a4171 100644 --- a/Sources/StreamVideo/CallStateMachine/Stages/Call+Stage.swift +++ b/Sources/StreamVideo/CallStateMachine/Stages/Call+Stage.swift @@ -30,6 +30,7 @@ extension Call.StateMachine { var options: CreateCallOptions? var ring: Bool var notify: Bool + var source: JoinSource var deliverySubject: PassthroughSubject var currentNumberOfRetries = 0 diff --git a/Sources/StreamVideo/Controllers/CallController.swift b/Sources/StreamVideo/Controllers/CallController.swift index 021ffa4dc..bc55602da 100644 --- a/Sources/StreamVideo/Controllers/CallController.swift +++ b/Sources/StreamVideo/Controllers/CallController.swift @@ -105,24 +105,29 @@ class CallController: @unchecked Sendable { .sinkTask(storeIn: disposableBag) { [weak self] in await self?.didFetch($0) } } - /// Joins a call with the provided information. + /// Joins a call with the provided information and join source. + /// /// - Parameters: - /// - callType: the type of the call - /// - callId: the id of the call - /// - callSettings: the current call settings - /// - videoOptions: configuration options about the video - /// - options: create call options - /// - migratingFrom: if SFU migration is being performed - /// - ring: whether ringing events should be handled - /// - notify: whether uses should be notified about the call - /// - Returns: a newly created `Call`. + /// - callType: The type of the call. + /// - callId: The id of the call. + /// - callSettings: The current call settings. + /// - videoOptions: Configuration options about the video. + /// - options: Create call options. + /// - migratingFrom: If SFU migration is being performed. + /// - ring: Whether ringing events should be handled. + /// - notify: Whether users should be notified about the call. + /// - source: Describes the source from which the join action was triggered. + /// Use this to indicate if the call was joined from in-app UI or + /// via CallKit. + /// - Returns: A newly created `JoinCallResponse`. @discardableResult func joinCall( create: Bool = true, callSettings: CallSettings?, options: CreateCallOptions? = nil, ring: Bool = false, - notify: Bool = false + notify: Bool = false, + source: JoinSource ) async throws -> JoinCallResponse { joinCallResponseSubject = .init(nil) @@ -131,7 +136,8 @@ class CallController: @unchecked Sendable { callSettings: callSettings, options: options, ring: ring, - notify: notify + notify: notify, + source: source ) guard @@ -479,8 +485,8 @@ class CallController: @unchecked Sendable { /// /// - Parameter policy: The audio session policy to apply /// - Throws: An error if the policy update fails - func updateAudioSessionPolicy(_ policy: AudioSessionPolicy) async throws { - try await webRTCCoordinator.updateAudioSessionPolicy(policy) + func updateAudioSessionPolicy(_ policy: AudioSessionPolicy) async { + await webRTCCoordinator.updateAudioSessionPolicy(policy) } /// Sets up observation of WebRTC state changes. @@ -501,10 +507,6 @@ class CallController: @unchecked Sendable { .sink { [weak self] in self?.webRTCClientDidUpdateStage($0) } } - internal func callKitActivated(_ audioSession: AVAudioSessionProtocol) throws { - try webRTCCoordinator.callKitActivated(audioSession) - } - // MARK: - Client Capabilities func enableClientCapabilities(_ capabilities: Set) async { diff --git a/Sources/StreamVideo/Models/JoinSource.swift b/Sources/StreamVideo/Models/JoinSource.swift new file mode 100644 index 000000000..fff7f0874 --- /dev/null +++ b/Sources/StreamVideo/Models/JoinSource.swift @@ -0,0 +1,19 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// An enumeration that describes the source from which a call was joined. +/// +/// Use `JoinSource` to indicate whether the join action originated from within +/// the app's own UI or through a system-level interface such as CallKit. +/// This helps distinguish the user's entry point and can be used to customize +/// behavior or analytics based on how the call was initiated. +enum JoinSource { + /// Indicates that the call was joined from within the app's UI. + case inApp + + /// Indicates that the call was joined via CallKit integration. + case callKit +} diff --git a/Sources/StreamVideo/StreamVideo.swift b/Sources/StreamVideo/StreamVideo.swift index efd28653f..93482f449 100644 --- a/Sources/StreamVideo/StreamVideo.swift +++ b/Sources/StreamVideo/StreamVideo.swift @@ -16,6 +16,7 @@ public class StreamVideo: ObservableObject, @unchecked Sendable { @Injected(\.callCache) private var callCache @Injected(\.screenProperties) private var screenProperties + @Injected(\.audioStore) private var audioStore private enum DisposableKey: String { case ringEventReceived } diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamCallAudioRecorder.swift b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamCallAudioRecorder.swift index 493b23148..4c02be3d3 100644 --- a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamCallAudioRecorder.swift +++ b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/StreamCallAudioRecorder.swift @@ -15,7 +15,7 @@ open class StreamCallAudioRecorder: @unchecked Sendable { private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1) @Injected(\.activeCallProvider) private var activeCallProvider - @Injected(\.activeCallAudioSession) private var activeCallAudioSession + @Injected(\.audioStore) private var audioStore /// The builder used to create the AVAudioRecorder instance. let audioRecorderBuilder: AVAudioRecorderBuilder @@ -38,7 +38,6 @@ open class StreamCallAudioRecorder: @unchecked Sendable { @Atomic private(set) var isRecording: Bool = false { willSet { - activeCallAudioSession?.isRecording = newValue _isRecordingSubject.send(newValue) } } @@ -194,7 +193,7 @@ open class StreamCallAudioRecorder: @unchecked Sendable { private func setUpAudioCaptureIfRequired() async throws -> AVAudioRecorder { guard - await activeCallAudioSession?.requestRecordPermission() == true + await audioStore.requestRecordPermission() == true else { throw ClientError("🎙️Permission denied.") } @@ -219,11 +218,8 @@ open class StreamCallAudioRecorder: @unchecked Sendable { } private func deferSessionActivation() async { - guard let activeCallAudioSession else { - return - } - _ = try? await activeCallAudioSession - .$category + _ = try? await audioStore + .publisher(\.category) .filter { $0 == .playAndRecord } .nextValue(timeout: 1) } diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift b/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift index f22c4910c..9f3e4d06a 100644 --- a/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift +++ b/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift @@ -5,8 +5,8 @@ import AVFoundation /// Represents the audio session configuration. -public struct AudioSessionConfiguration: ReflectiveStringConvertible, - Equatable { +public struct AudioSessionConfiguration: ReflectiveStringConvertible, Equatable, Sendable { + var isActive: Bool /// The audio session category. var category: AVAudioSession.Category /// The audio session mode. @@ -18,7 +18,8 @@ public struct AudioSessionConfiguration: ReflectiveStringConvertible, /// Compares two `AudioSessionConfiguration` instances for equality. public static func == (lhs: Self, rhs: Self) -> Bool { - lhs.category == rhs.category && + lhs.isActive == rhs.isActive && + lhs.category == rhs.category && lhs.mode == rhs.mode && lhs.options.rawValue == rhs.options.rawValue && lhs.overrideOutputAudioPort?.rawValue == diff --git a/Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift new file mode 100644 index 000000000..823075626 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift @@ -0,0 +1,222 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Combine +import Foundation + +/// `CallAudioSession` manages the audio session for calls, handling configuration, +/// activation, and deactivation. +final class CallAudioSession: @unchecked Sendable { + + @Injected(\.audioStore) private var audioStore + + var currentRoute: AVAudioSessionRouteDescription { audioStore.session.currentRoute } + + private(set) weak var delegate: StreamAudioSessionAdapterDelegate? + private(set) var statsAdapter: WebRTCStatsAdapting? + + /// The current audio session policy used to configure the session. + /// Determines audio behaviour for the call session. + /// Set this property to change how the session is configured. + @Atomic private(set) var policy: AudioSessionPolicy + + private let disposableBag = DisposableBag() + + private var interruptionEffect: RTCAudioStore.InterruptionEffect? + private var routeChangeEffect: RTCAudioStore.RouteChangeEffect? + + init( + policy: AudioSessionPolicy = DefaultAudioSessionPolicy() + ) { + self.policy = policy + + initialAudioSessionConfiguration() + } + + func activate( + callSettingsPublisher: AnyPublisher, + ownCapabilitiesPublisher: AnyPublisher, Never>, + delegate: StreamAudioSessionAdapterDelegate, + statsAdapter: WebRTCStatsAdapting?, + shouldSetActive: Bool + ) { + disposableBag.removeAll() + + self.delegate = delegate + self.statsAdapter = statsAdapter + interruptionEffect = .init(audioStore) + routeChangeEffect = .init( + audioStore, + callSettingsPublisher: callSettingsPublisher, + delegate: delegate + ) + + Publishers + .CombineLatest(callSettingsPublisher, ownCapabilitiesPublisher) + .compactMap { [policy] in policy.configuration(for: $0, ownCapabilities: $1) } + .removeDuplicates() + .sinkTask(storeIn: disposableBag) { [weak self] in await self?.didUpdateConfiguration($0) } + .store(in: disposableBag) + + audioStore.dispatch(.audioSession(.isAudioEnabled(true))) + + if shouldSetActive { + audioStore.dispatch(.audioSession(.isActive(true))) + } + + statsAdapter?.trace(.init(audioSession: traceRepresentation)) + } + + func deactivate() { + guard delegate != nil else { + return + } + + disposableBag.removeAll() + delegate = nil + interruptionEffect = nil + routeChangeEffect = nil + audioStore.dispatch(.audioSession(.isActive(false))) + statsAdapter?.trace(.init(audioSession: traceRepresentation)) + } + + func didUpdatePolicy( + _ policy: AudioSessionPolicy, + callSettings: CallSettings, + ownCapabilities: Set + ) { + self.policy = policy + Task(disposableBag: disposableBag) { [weak self] in + guard let self else { return } + await didUpdateConfiguration( + policy.configuration(for: callSettings, ownCapabilities: ownCapabilities) + ) + } + } + + // MARK: - Private Helpers + + private func didUpdateConfiguration( + _ configuration: AudioSessionConfiguration + ) async { + defer { statsAdapter?.trace(.init(audioSession: traceRepresentation)) } + + guard + !Task.isCancelled + else { + return + } + + do { + if configuration.isActive { + try await audioStore.dispatchAsync( + .audioSession( + .setCategory( + configuration.category, + mode: configuration.mode, + options: configuration.options + ) + ) + ) + } + } catch { + log.error( + "Unable to apply configuration category:\(configuration.category) mode:\(configuration.mode) options:\(configuration.options).", + subsystems: .audioSession, + error: error + ) + } + + if configuration.isActive, let overrideOutputAudioPort = configuration.overrideOutputAudioPort { + do { + try await audioStore.dispatchAsync( + .audioSession( + .setOverrideOutputPort(overrideOutputAudioPort) + ) + ) + } catch { + log.error( + "Unable to apply configuration overrideOutputAudioPort:\(overrideOutputAudioPort).", + subsystems: .audioSession, + error: error + ) + } + } + + await handleAudioOutputUpdateIfRequired(configuration) + } + + private func handleAudioOutputUpdateIfRequired( + _ configuration: AudioSessionConfiguration + ) async { + guard + configuration.isActive != audioStore.state.isActive + else { + return + } + do { + try await audioStore.dispatchAsync( + .audioSession( + .setAVAudioSessionActive(configuration.isActive) + ) + ) + } catch { + log.error( + "Failed while to applying AudioSession isActive:\(configuration.isActive) in order to match CallSettings.audioOutputOn.", + subsystems: .audioSession, + error: error + ) + } + } + + /// - Important: This method runs whenever an CallAudioSession is created and ensures that + /// the configuration is correctly for calling. This is quite important for CallKit as if the category and + /// mode aren't set correctly it won't activate the audioSession. + private func initialAudioSessionConfiguration() { + let state = audioStore.state + let requiresCategoryUpdate = state.category != .playAndRecord + let requiresModeUpdate = state.mode != .voiceChat && state.mode != .videoChat + + guard requiresCategoryUpdate || requiresModeUpdate else { + log.info( + "AudioSession initial configuration isn't required.", + subsystems: .audioSession + ) + return + } + + audioStore.dispatch( + .audioSession( + .setCategory( + .playAndRecord, + mode: .voiceChat, + options: .allowBluetooth + ) + ) + ) + } +} + +extension CallAudioSession { + struct TraceRepresentation: Encodable { + var state: RTCAudioStore.State + var hasDelegate: Bool + var hasInterruptionEffect: Bool + var hasRouteChangeEffect: Bool + var policy: String + + init(_ source: CallAudioSession) { + state = source.audioStore.state + hasDelegate = source.delegate != nil + hasInterruptionEffect = source.interruptionEffect != nil + hasRouteChangeEffect = source.routeChangeEffect != nil + policy = String(describing: source.policy) + } + } + + var traceRepresentation: TraceRepresentation { + .init(self) + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession+RequestRecordPermission.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession+RequestRecordPermission.swift deleted file mode 100644 index 7214bb3b1..000000000 --- a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession+RequestRecordPermission.swift +++ /dev/null @@ -1,18 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -import Foundation - -extension AVAudioSession { - /// Asynchronously requests permission to record audio. - /// - Returns: A Boolean indicating whether permission was granted. - private func requestRecordPermission() async -> Bool { - await withCheckedContinuation { continuation in - self.requestRecordPermission { result in - continuation.resume(returning: result) - } - } - } -} diff --git a/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift b/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift index c93ea5912..85f1674eb 100644 --- a/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift +++ b/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift @@ -25,6 +25,7 @@ public struct DefaultAudioSessionPolicy: AudioSessionPolicy { ) -> AudioSessionConfiguration { guard applicationStateAdapter.state == .foreground else { return .init( + isActive: callSettings.audioOutputOn, category: .playAndRecord, mode: callSettings.videoOn ? .videoChat : .voiceChat, options: .playAndRecord( @@ -39,6 +40,7 @@ public struct DefaultAudioSessionPolicy: AudioSessionPolicy { } return .init( + isActive: callSettings.audioOutputOn, category: .playAndRecord, mode: callSettings.videoOn && callSettings.speakerOn ? .videoChat : .voiceChat, options: .playAndRecord( diff --git a/Sources/StreamVideo/Utils/AudioSession/Policies/OwnCapabilitiesAudioSessionPolicy.swift b/Sources/StreamVideo/Utils/AudioSession/Policies/OwnCapabilitiesAudioSessionPolicy.swift index 1074d5312..c9995ef1c 100644 --- a/Sources/StreamVideo/Utils/AudioSession/Policies/OwnCapabilitiesAudioSessionPolicy.swift +++ b/Sources/StreamVideo/Utils/AudioSession/Policies/OwnCapabilitiesAudioSessionPolicy.swift @@ -37,6 +37,7 @@ public struct OwnCapabilitiesAudioSessionPolicy: AudioSessionPolicy { ) -> AudioSessionConfiguration { guard ownCapabilities.contains(.sendAudio) else { return .init( + isActive: callSettings.audioOutputOn, category: .playback, mode: .default, options: .playback, @@ -71,6 +72,7 @@ public struct OwnCapabilitiesAudioSessionPolicy: AudioSessionPolicy { : nil return .init( + isActive: callSettings.audioOutputOn, category: category, mode: mode, options: categoryOptions, diff --git a/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift index e1b7ea448..07b126904 100644 --- a/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift +++ b/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift @@ -25,9 +25,17 @@ protocol AVAudioSessionProtocol { func setOverrideOutputAudioPort( _ port: AVAudioSession.PortOverride ) throws + + /// The method uses a slightly different name to avoid compiler not being able to automatically + /// fulfil the conformance to this protocol. + func setIsActive(_ active: Bool) throws } extension AVAudioSession: AVAudioSessionProtocol { + func setIsActive(_ active: Bool) throws { + try setActive(active) + } + func setCategory( _ category: Category, mode: Mode, diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioSessionDelegatePublisher.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioSessionDelegatePublisher.swift deleted file mode 100644 index 4449470be..000000000 --- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioSessionDelegatePublisher.swift +++ /dev/null @@ -1,207 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -import Combine -import StreamWebRTC - -/// Enumeration representing all the events published by the delegate. -enum AudioSessionEvent { - case didBeginInterruption(session: RTCAudioSession) - - case didEndInterruption(session: RTCAudioSession, shouldResumeSession: Bool) - - case didChangeRoute( - session: RTCAudioSession, - reason: AVAudioSession.RouteChangeReason, - previousRoute: AVAudioSessionRouteDescription - ) - - case mediaServerTerminated(session: RTCAudioSession) - - case mediaServerReset(session: RTCAudioSession) - - case didChangeCanPlayOrRecord( - session: RTCAudioSession, - canPlayOrRecord: Bool - ) - - case didStartPlayOrRecord(session: RTCAudioSession) - - case didStopPlayOrRecord(session: RTCAudioSession) - - case didChangeOutputVolume( - audioSession: RTCAudioSession, - outputVolume: Float - ) - - case didDetectPlayoutGlitch( - audioSession: RTCAudioSession, - totalNumberOfGlitches: Int64 - ) - - case willSetActive(audioSession: RTCAudioSession, active: Bool) - - case didSetActive(audioSession: RTCAudioSession, active: Bool) - - case failedToSetActive( - audioSession: RTCAudioSession, - active: Bool, - error: Error - ) - - case audioUnitStartFailedWithError( - audioSession: RTCAudioSession, - error: Error - ) -} - -// MARK: - Delegate Publisher Class - -/// A delegate that publishes all RTCAudioSessionDelegate events via a Combine PassthroughSubject. -@objc -final class RTCAudioSessionDelegatePublisher: NSObject, RTCAudioSessionDelegate { - - /// The subject used to publish delegate events. - private let subject = PassthroughSubject() - - /// A public publisher that subscribers can listen to. - var publisher: AnyPublisher { - subject.eraseToAnyPublisher() - } - - // MARK: - RTCAudioSessionDelegate Methods - - func audioSessionDidBeginInterruption(_ session: RTCAudioSession) { - subject.send(.didBeginInterruption(session: session)) - } - - func audioSessionDidEndInterruption( - _ session: RTCAudioSession, - shouldResumeSession: Bool - ) { - subject.send( - .didEndInterruption( - session: session, - shouldResumeSession: shouldResumeSession - ) - ) - } - - func audioSessionDidChangeRoute( - _ session: RTCAudioSession, - reason: AVAudioSession.RouteChangeReason, - previousRoute: AVAudioSessionRouteDescription - ) { - subject.send( - .didChangeRoute( - session: session, - reason: reason, - previousRoute: previousRoute - ) - ) - } - - func audioSessionMediaServerTerminated(_ session: RTCAudioSession) { - subject.send(.mediaServerTerminated(session: session)) - } - - func audioSessionMediaServerReset(_ session: RTCAudioSession) { - subject.send(.mediaServerReset(session: session)) - } - - func audioSession( - _ session: RTCAudioSession, - didChangeCanPlayOrRecord canPlayOrRecord: Bool - ) { - subject.send( - .didChangeCanPlayOrRecord( - session: session, - canPlayOrRecord: canPlayOrRecord - ) - ) - } - - func audioSessionDidStartPlayOrRecord(_ session: RTCAudioSession) { - subject.send(.didStartPlayOrRecord(session: session)) - } - - func audioSessionDidStopPlayOrRecord(_ session: RTCAudioSession) { - subject.send(.didStopPlayOrRecord(session: session)) - } - - func audioSession( - _ audioSession: RTCAudioSession, - didChangeOutputVolume outputVolume: Float - ) { - subject.send( - .didChangeOutputVolume( - audioSession: audioSession, - outputVolume: outputVolume - ) - ) - } - - func audioSession( - _ audioSession: RTCAudioSession, - didDetectPlayoutGlitch totalNumberOfGlitches: Int64 - ) { - subject.send( - .didDetectPlayoutGlitch( - audioSession: audioSession, - totalNumberOfGlitches: totalNumberOfGlitches - ) - ) - } - - func audioSession( - _ audioSession: RTCAudioSession, - willSetActive active: Bool - ) { - subject.send( - .willSetActive( - audioSession: audioSession, - active: active - ) - ) - } - - func audioSession( - _ audioSession: RTCAudioSession, - didSetActive active: Bool - ) { - subject.send( - .didSetActive( - audioSession: audioSession, - active: active - ) - ) - } - - func audioSession( - _ audioSession: RTCAudioSession, - failedToSetActive active: Bool, - error: Error - ) { - subject.send( - .failedToSetActive( - audioSession: audioSession, - active: active, - error: error - ) - ) - } - - func audioSession( - _ audioSession: RTCAudioSession, - audioUnitStartFailedWithError error: Error - ) { - subject.send( - .audioUnitStartFailedWithError( - audioSession: audioSession, - error: error - ) - ) - } -} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+AudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+AudioSession.swift new file mode 100644 index 000000000..16eb7fb9e --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+AudioSession.swift @@ -0,0 +1,49 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation + +extension RTCAudioStoreAction { + + /// Enumerates the supported actions for audio session state changes. + /// + /// Use these cases to express updates and configuration changes to the + /// audio session, including activation, interruption, category, output + /// port, and permissions. + enum AudioSession { + /// Activates or deactivates the audio session. + case isActive(Bool) + + /// Sets the interruption state of the audio session. + case isInterrupted(Bool) + + /// Enables or disables audio. + case isAudioEnabled(Bool) + + /// Enables or disables manual audio management. + case useManualAudio(Bool) + + /// Sets the session category, mode, and options. + case setCategory( + AVAudioSession.Category, + mode: AVAudioSession.Mode, + options: AVAudioSession.CategoryOptions + ) + + /// Overrides the output audio port (e.g., speaker, none). + case setOverrideOutputPort(AVAudioSession.PortOverride) + + /// Sets whether system alerts should not interrupt the session. + case setPrefersNoInterruptionsFromSystemAlerts(Bool) + + /// Sets the recording permission state for the session. + case setHasRecordingPermission(Bool) + + /// Used when activating/deactivating audioOutput from CallSettings. + /// - Warning: It has the potential to cause misalignment with the underline RTCAudioSession. + /// It should be used with caution. + case setAVAudioSessionActive(Bool) + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+CallKit.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+CallKit.swift new file mode 100644 index 000000000..98106253e --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+CallKit.swift @@ -0,0 +1,21 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation + +extension RTCAudioStoreAction { + + /// An action describing a CallKit-driven change to the AVAudioSession. + /// + /// Use this enum to represent explicit audio session activation and deactivation + /// events that are triggered by CallKit and should be handled by the reducer. + enum CallKit { + /// Indicates that the audio session was activated via CallKit. + case activate(AVAudioSession) + + /// Indicates that the audio session was deactivated via CallKit. + case deactivate(AVAudioSession) + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+Generic.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+Generic.swift new file mode 100644 index 000000000..b659553e0 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+Generic.swift @@ -0,0 +1,16 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation + +extension RTCAudioStoreAction { + + /// Represents actions that can be performed within the RTCAudioStore to control audio behavior + /// or timing. + enum Generic { + /// An action that introduces a delay for a specified number of seconds before proceeding with + /// the next operation. + case delay(seconds: TimeInterval) + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction.swift new file mode 100644 index 000000000..5299f9ed0 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction.swift @@ -0,0 +1,13 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation + +enum RTCAudioStoreAction: Sendable { + case generic(RTCAudioStoreAction.Generic) + + case audioSession(RTCAudioStoreAction.AudioSession) + + case callKit(RTCAudioStoreAction.CallKit) +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/AudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/AudioSessionProtocol.swift new file mode 100644 index 000000000..9feb882a4 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/AudioSessionProtocol.swift @@ -0,0 +1,51 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation +import StreamWebRTC + +protocol AudioSessionProtocol: AnyObject { + var avSession: AVAudioSessionProtocol { get } + + var prefersNoInterruptionsFromSystemAlerts: Bool { get } + + func setPrefersNoInterruptionsFromSystemAlerts(_ newValue: Bool) throws + + var isActive: Bool { get } + + func setActive(_ isActive: Bool) throws + + var isAudioEnabled: Bool { get set } + + var useManualAudio: Bool { get set } + + var category: String { get } + + var mode: String { get } + + var categoryOptions: AVAudioSession.CategoryOptions { get } + + var recordPermissionGranted: Bool { get } + + func requestRecordPermission() async -> Bool + + var currentRoute: AVAudioSessionRouteDescription { get } + + func add(_ delegate: RTCAudioSessionDelegate) + + func remove(_ delegate: RTCAudioSessionDelegate) + + func audioSessionDidActivate(_ audioSession: AVAudioSession) + + func audioSessionDidDeactivate(_ audioSession: AVAudioSession) + + func perform( + _ operation: (AudioSessionProtocol) throws -> Void + ) throws + + func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws + + func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/RTCAudioSession+AudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/RTCAudioSession+AudioSessionProtocol.swift new file mode 100644 index 000000000..6ce718a9b --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/RTCAudioSession+AudioSessionProtocol.swift @@ -0,0 +1,51 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +extension RTCAudioSession: AudioSessionProtocol { + var avSession: any AVAudioSessionProtocol { + session + } + + var prefersNoInterruptionsFromSystemAlerts: Bool { + if #available(iOS 14.5, *) { + return session.prefersNoInterruptionsFromSystemAlerts + } else { + return false + } + } + + func setPrefersNoInterruptionsFromSystemAlerts(_ newValue: Bool) throws { + guard #available(iOS 14.5, *) else { + return + } + try session.setPrefersNoInterruptionsFromSystemAlerts(newValue) + } + + var recordPermissionGranted: Bool { + if #available(iOS 17.0, *) { + return AVAudioApplication.shared.recordPermission == .granted + } else { + return session.recordPermission == .granted + } + } + + func requestRecordPermission() async -> Bool { + await withCheckedContinuation { continuation in + session.requestRecordPermission { result in + continuation.resume(returning: result) + } + } + } + + func perform( + _ operation: (AudioSessionProtocol) throws -> Void + ) throws { + lockForConfiguration() + defer { unlockForConfiguration() } + try operation(self) + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+InterruptionEffect.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+InterruptionEffect.swift new file mode 100644 index 000000000..38075d0eb --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+InterruptionEffect.swift @@ -0,0 +1,96 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +extension RTCAudioStore { + + /// Handles AVAudioSession interruptions for `RTCAudioStore`. + /// + /// This class listens for audio session interruption events and updates the `RTCAudioStore` state accordingly. + /// It manages the audio session's interruption state, audio enablement, and session activation. + /// When an interruption begins, it disables audio and marks the session as interrupted. + /// When the interruption ends, it optionally resumes the session by restoring the audio session category, + /// mode, and options, with appropriate delays to ensure smooth recovery. + final class InterruptionEffect: NSObject, RTCAudioSessionDelegate { + + /// The audio session instance used to observe interruption events. + private let session: AudioSessionProtocol + /// A weak reference to the `RTCAudioStore` to dispatch state changes. + private weak var store: RTCAudioStore? + + /// Creates a new `InterruptionEffect` that listens to the given `RTCAudioStore`'s audio session. + /// + /// - Parameter store: The `RTCAudioStore` instance whose session interruptions will be handled. + /// The effect registers itself as a delegate of the store's audio session. + init(_ store: RTCAudioStore) { + session = store.session + self.store = store + super.init() + + session.add(self) + } + + deinit { + session.remove(self) + } + + // MARK: - RTCAudioSessionDelegate + + /// Called when the audio session begins an interruption. + /// + /// Updates the store to indicate the audio session is interrupted and disables audio. + /// - Parameter session: The audio session that began the interruption. + func audioSessionDidBeginInterruption(_ session: RTCAudioSession) { + store?.dispatch(.audioSession(.isInterrupted(true))) + store?.dispatch(.audioSession(.isAudioEnabled(false))) + } + + /// Called when the audio session ends an interruption. + /// + /// Updates the store to indicate the interruption ended. If the session should resume, + /// it disables audio and session activation briefly, then restores the audio session category, + /// mode, and options with delays, before re-enabling audio and activating the session. + /// + /// - Note: The delay is necessary as CallKit and AVAudioSession together are racey and we + /// need to ensure that our configuration will go through without other parts of the app making + /// changes later on. + /// + /// - Parameters: + /// - session: The audio session that ended the interruption. + /// - shouldResumeSession: A Boolean indicating whether the audio session should resume. + func audioSessionDidEndInterruption( + _ session: RTCAudioSession, + shouldResumeSession: Bool + ) { + guard let store else { + return + } + + store.dispatch(.audioSession(.isInterrupted(false))) + if shouldResumeSession { + store.dispatch(.audioSession(.isActive(false))) + store.dispatch(.audioSession(.isAudioEnabled(false))) + + store.dispatch(.generic(.delay(seconds: 0.2))) + + store.dispatch( + .audioSession( + .setCategory( + store.state.category, + mode: store.state.mode, + options: store.state.options + ) + ) + ) + + store.dispatch(.generic(.delay(seconds: 0.2))) + + store.dispatch(.audioSession(.isAudioEnabled(true))) + store.dispatch(.audioSession(.isActive(true))) + } + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+RouteChangeEffect.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+RouteChangeEffect.swift new file mode 100644 index 000000000..66894e006 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+RouteChangeEffect.swift @@ -0,0 +1,117 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Combine +import Foundation +import StreamWebRTC + +extension RTCAudioStore { + + /// An effect handler that listens for audio session route changes and updates call + /// settings as needed. + /// + /// This class observes changes in the audio route (such as switching between speaker, + /// Bluetooth, or headphones) and ensures the app's call settings stay in sync with the + /// current audio configuration. + final class RouteChangeEffect: NSObject, RTCAudioSessionDelegate { + + /// The device being used, injected for device-specific route handling. + @Injected(\.currentDevice) private var currentDevice + + /// The audio session being observed for route changes. + private let session: AudioSessionProtocol + /// The RTCAudioStore being updated on route change events. + private weak var store: RTCAudioStore? + /// Delegate for notifying about call settings changes. + private weak var delegate: StreamAudioSessionAdapterDelegate? + /// Tracks the current call settings subscription. + private var callSettingsCancellable: AnyCancellable? + /// The most recent active call settings for route change comparison. + private var activeCallSettings: CallSettings? + + /// Initializes the effect, sets up the route change observer, and subscribes to call settings. + /// + /// - Parameters: + /// - store: The audio store to update on changes. + /// - callSettingsPublisher: Publishes the latest call settings. + /// - delegate: Delegate for updating call settings in response to route changes. + init( + _ store: RTCAudioStore, + callSettingsPublisher: AnyPublisher, + delegate: StreamAudioSessionAdapterDelegate + ) { + session = store.session + self.store = store + self.delegate = delegate + super.init() + + callSettingsCancellable = callSettingsPublisher + .removeDuplicates() + .sink { [weak self] in self?.activeCallSettings = $0 } + session.add(self) + } + + deinit { + session.remove(self) + } + + // MARK: - RTCAudioSessionDelegate + + /// Handles audio route changes and updates call settings if the speaker state + /// has changed compared to the current configuration. + /// + /// - Parameters: + /// - session: The session where the route change occurred. + /// - reason: The reason for the route change. + /// - previousRoute: The previous audio route before the change. + func audioSessionDidChangeRoute( + _ session: RTCAudioSession, + reason: AVAudioSession.RouteChangeReason, + previousRoute: AVAudioSessionRouteDescription + ) { + guard let activeCallSettings else { + return + } + + /// We rewrite the reference to RTCAudioSession with our internal session in order to allow + /// easier stubbing for tests. That's a safe operation as our internal session is already pointing + /// to the shared RTCAudioSession. + let session = self.session + + guard currentDevice.deviceType == .phone else { + if activeCallSettings.speakerOn != session.currentRoute.isSpeaker { + log.warning( + """ + AudioSession didChangeRoute with speakerOn:\(session.currentRoute.isSpeaker) + while CallSettings have speakerOn:\(activeCallSettings.speakerOn). + We will update CallSettings to match the AudioSession's + current configuration + """, + subsystems: .audioSession + ) + delegate?.audioSessionAdapterDidUpdateCallSettings( + callSettings: activeCallSettings + .withUpdatedSpeakerState(session.currentRoute.isSpeaker) + ) + } + return + } + + switch (activeCallSettings.speakerOn, session.currentRoute.isSpeaker) { + case (true, false): + delegate?.audioSessionAdapterDidUpdateCallSettings( + callSettings: activeCallSettings.withUpdatedSpeakerState(false) + ) + + case (false, true) where session.category == AVAudioSession.Category.playAndRecord.rawValue: + delegate?.audioSessionAdapterDidUpdateCallSettings( + callSettings: activeCallSettings.withUpdatedSpeakerState(true) + ) + + default: + break + } + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Middleware/RTCAudioStoreMiddleware.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Middleware/RTCAudioStoreMiddleware.swift new file mode 100644 index 000000000..991b19cd8 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Middleware/RTCAudioStoreMiddleware.swift @@ -0,0 +1,28 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// A middleware protocol for intercepting and handling actions applied to the RTCAudioStore state. +/// Implementers can observe or modify actions as they are processed, enabling custom behavior or side effects. +protocol RTCAudioStoreMiddleware: AnyObject { + + /// Applies an action to the RTCAudioStore state, with context information. + /// + /// - Parameters: + /// - state: The current state of the RTCAudioStore. + /// - action: The action to be applied to the state. + /// - file: The source file from which the action originated. + /// - function: The function from which the action originated. + /// - line: The line number in the source file where the action originated. + /// + /// Use this method to observe or modify actions before they affect the state. + func apply( + state: RTCAudioStore.State, + action: RTCAudioStoreAction, + file: StaticString, + function: StaticString, + line: UInt + ) +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/RTCAudioStore.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/RTCAudioStore.swift new file mode 100644 index 000000000..f99768fc1 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/RTCAudioStore.swift @@ -0,0 +1,263 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Combine +import Foundation +import StreamWebRTC + +/// Stores and manages the audio session state for real-time communication calls. +/// +/// `RTCAudioStore` coordinates actions, state updates, and reducers for audio +/// session control. It centralizes audio configuration, provides state +/// observation, and enables serial action processing to avoid concurrency +/// issues. Use this type to access and manage all call audio state in a +/// thread-safe, observable way. +final class RTCAudioStore: @unchecked Sendable { + + static let shared = RTCAudioStore() + + /// The current state of the audio session. + var state: State { stateSubject.value } + + /// The underlying WebRTC audio session being managed. + let session: AudioSessionProtocol + + private let stateSubject: CurrentValueSubject + private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1) + + @Atomic private(set) var middleware: [RTCAudioStoreMiddleware] = [] + @Atomic private(set) var reducers: [RTCAudioStoreReducer] = [] + + private var logCancellable: AnyCancellable? + + init( + session: AudioSessionProtocol = RTCAudioSession.sharedInstance(), + underlyingQueue: dispatch_queue_t? = .global(qos: .userInteractive) + ) { + self.session = session + + stateSubject = .init( + .init( + isActive: session.isActive, + isInterrupted: false, + prefersNoInterruptionsFromSystemAlerts: session.prefersNoInterruptionsFromSystemAlerts, + isAudioEnabled: session.isAudioEnabled, + useManualAudio: session.useManualAudio, + category: .init(rawValue: session.category), + mode: .init(rawValue: session.mode), + options: session.categoryOptions, + overrideOutputAudioPort: .none, + hasRecordingPermission: session.recordPermissionGranted + ) + ) + processingQueue.underlyingQueue = underlyingQueue + + logCancellable = stateSubject + .log(.debug, subsystems: .audioSession) { "AudioStore state updated to: \($0)" } + .sink { _ in } + + add(RTCAudioSessionReducer(store: self)) + + dispatch(.audioSession(.setPrefersNoInterruptionsFromSystemAlerts(true))) + dispatch(.audioSession(.useManualAudio(true))) + dispatch(.audioSession(.isAudioEnabled(false))) + } + + // MARK: - State Observation + + /// Publishes changes to the specified state property. + /// + /// Use this to observe changes for a specific audio state key path. + func publisher( + _ keyPath: KeyPath + ) -> AnyPublisher { + stateSubject + .map { $0[keyPath: keyPath] } + .removeDuplicates() + .eraseToAnyPublisher() + } + + // MARK: - Reducers + + /// Adds middleware to observe or intercept audio actions. + func add(_ value: T) { + guard middleware.first(where: { $0 === value }) == nil else { + return + } + middleware.append(value) + } + + /// Removes previously added middleware. + func remove(_ value: T) { + middleware = middleware.filter { $0 !== value } + } + + // MARK: - Reducers + + /// Adds a reducer to handle audio session actions. + func add(_ value: T) { + guard reducers.first(where: { $0 === value }) == nil else { + return + } + reducers.append(value) + } + + /// Adds a reducer to handle audio session actions. + func remove(_ value: T) { + reducers = reducers.filter { $0 !== value } + } + + // MARK: - Actions dispatch + + /// Dispatches an audio store action asynchronously and waits for completion. + func dispatchAsync( + _ action: RTCAudioStoreAction, + file: StaticString = #file, + function: StaticString = #function, + line: UInt = #line + ) async throws { + try await processingQueue.addSynchronousTaskOperation { [weak self] in + guard let self else { + return + } + + await applyDelayIfRequired(for: action) + + try perform( + action, + file: file, + function: function, + line: line + ) + } + } + + /// Dispatches an audio store action for processing on the queue. + func dispatch( + _ action: RTCAudioStoreAction, + file: StaticString = #file, + function: StaticString = #function, + line: UInt = #line + ) { + processingQueue.addTaskOperation { [weak self] in + guard let self else { + return + } + + do { + await applyDelayIfRequired(for: action) + + try perform( + action, + file: file, + function: function, + line: line + ) + } catch { + log.error( + error, + subsystems: .audioSession, + functionName: function, + fileName: file, + lineNumber: line + ) + } + } + } + + // MARK: - Helpers + + /// Requests record permission from the user, updating state. + func requestRecordPermission() async -> Bool { + guard + !state.hasRecordingPermission + else { + return true + } + + let result = await session.requestRecordPermission() + dispatch(.audioSession(.setHasRecordingPermission(result))) + return result + } + + // MARK: - Private Helpers + + private func perform( + _ action: RTCAudioStoreAction, + file: StaticString = #file, + function: StaticString = #function, + line: UInt = #line + ) throws { + let state = stateSubject.value + + let middleware = middleware + let reducers = reducers + + middleware.forEach { + $0.apply( + state: state, + action: action, + file: file, + function: function, + line: line + ) + } + + do { + let updatedState = try reducers + .reduce(state) { + try $1.reduce( + state: $0, + action: action, + file: file, + function: function, + line: line + ) + } + + stateSubject.send(updatedState) + + log.debug( + "Completed action: \(action).", + subsystems: .audioSession, + functionName: function, + fileName: file, + lineNumber: line + ) + } catch { + log.error( + "Failed action: \(action).", + subsystems: .audioSession, + error: error, + functionName: function, + fileName: file, + lineNumber: line + ) + throw error + } + } + + /// Delays are important for flows like interruptionEnd where we need to perform multiple operations + /// at once while the same session may be accessed/modified from another part of the app (e.g. CallKit). + private func applyDelayIfRequired(for action: RTCAudioStoreAction) async { + guard + case let .generic(.delay(interval)) = action + else { + return + } + + try? await Task.sleep(nanoseconds: UInt64(1_000_000_000 * interval)) + } +} + +extension RTCAudioStore: InjectionKey { + nonisolated(unsafe) static var currentValue: RTCAudioStore = .shared +} + +extension InjectedValues { + var audioStore: RTCAudioStore { + get { Self[RTCAudioStore.self] } + set { Self[RTCAudioStore.self] = newValue } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/CallKitAudioSessionReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/CallKitAudioSessionReducer.swift new file mode 100644 index 000000000..01cba71f2 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/CallKitAudioSessionReducer.swift @@ -0,0 +1,71 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +/// A reducer that manages audio session state changes triggered by CallKit. +/// +/// `CallKitAudioSessionReducer` implements the `RTCAudioStoreReducer` protocol +/// and is responsible for updating the audio state in response to CallKit-related +/// actions, such as audio session activation or deactivation. This allows for +/// proper coordination of the WebRTC audio session lifecycle when the system +/// audio session is managed externally by CallKit. +final class CallKitAudioSessionReducer: RTCAudioStoreReducer { + + /// The underlying WebRTC audio session that is managed by this reducer. + private let source: AudioSessionProtocol + + /// Creates a new reducer for handling CallKit-related audio session changes. + /// + /// - Parameter source: The `RTCAudioSession` instance to manage. Defaults to + /// the shared singleton instance. + init(store: RTCAudioStore) { + source = store.session + } + + // MARK: - RTCAudioStoreReducer + + /// Updates the audio session state based on a CallKit-related action. + /// + /// This method responds to `.callKit` actions from the audio store, updating + /// the state to reflect changes triggered by CallKit, such as activating or + /// deactivating the audio session. The reducer delegates the activation or + /// deactivation to the underlying `RTCAudioSession`. + /// + /// - Parameters: + /// - state: The current audio session state. + /// - action: The audio store action to handle. + /// - file: The file from which the action originated (used for logging). + /// - function: The function from which the action originated (used for logging). + /// - line: The line number from which the action originated (used for logging). + /// - Returns: The updated audio session state after processing the action. + func reduce( + state: RTCAudioStore.State, + action: RTCAudioStoreAction, + file: StaticString, + function: StaticString, + line: UInt + ) throws -> RTCAudioStore.State { + guard + case let .callKit(action) = action + else { + return state + } + + var updatedState = state + + switch action { + case let .activate(audioSession): + source.audioSessionDidActivate(audioSession) + updatedState.isActive = source.isActive + + case let .deactivate(audioSession): + source.audioSessionDidDeactivate(audioSession) + updatedState.isActive = source.isActive + } + + return updatedState + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioSessionReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioSessionReducer.swift new file mode 100644 index 000000000..fdc70458f --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioSessionReducer.swift @@ -0,0 +1,146 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation +import StreamWebRTC + +/// A reducer responsible for managing changes to the audio session state within the WebRTC context. +/// This class listens for audio-related actions and applies corresponding updates to the shared +/// `RTCAudioSession` instance, ensuring the audio session is configured and controlled consistently. +/// It handles activation, interruption, audio enabling, category settings, output port overrides, +/// and permissions, encapsulating the logic for applying these changes safely and atomically. +final class RTCAudioSessionReducer: RTCAudioStoreReducer { + + private let source: AudioSessionProtocol + + /// Initializes the reducer with a given `RTCAudioSession` source. + /// - Parameter source: The audio session instance to manage. Defaults to the shared singleton. + init(store: RTCAudioStore) { + source = store.session + } + + // MARK: - RTCAudioStoreReducer + + /// Processes an audio-related action and returns the updated audio store state. + /// + /// This method interprets the provided action, performs necessary operations on the underlying + /// `RTCAudioSession`, and returns a new state reflecting any changes. It safely handles session + /// configuration updates and respects current state to avoid redundant operations. + /// + /// - Parameters: + /// - state: The current audio store state. + /// - action: The action to apply to the state. + /// - file: The source file from which the action originated. + /// - function: The function from which the action originated. + /// - line: The line number from which the action originated. + /// - Throws: Rethrows errors from audio session configuration operations. + /// - Returns: The updated audio store state after applying the action. + func reduce( + state: RTCAudioStore.State, + action: RTCAudioStoreAction, + file: StaticString, + function: StaticString, + line: UInt + ) throws -> RTCAudioStore.State { + guard + case let .audioSession(action) = action + else { + return state + } + + var updatedState = state + + switch action { + case let .isActive(value): + guard updatedState.isActive != value else { + break + } + try source.perform { try $0.setActive(value) } + updatedState.isActive = value + + case let .isInterrupted(value): + updatedState.isInterrupted = value + + case let .isAudioEnabled(value): + source.isAudioEnabled = value + updatedState.isAudioEnabled = value + + case let .useManualAudio(value): + source.useManualAudio = value + updatedState.useManualAudio = value + + case let .setCategory(category, mode, options): + try source.perform { + /// We update the `webRTC` default configuration because, the WebRTC audioStack + /// can be restarted for various reasons. When the stack restarts it gets reconfigured + /// with the `webRTC` configuration. If then the configuration is invalid compared + /// to the state we expect we may find ourselves in a difficult to recover situation, + /// as our callSetting may be failing to get applied. + /// By updating the `webRTC` configuration we ensure that the audioStack will + /// start from the last known state in every restart, making things simpler to recover. + let webRTCConfiguration = RTCAudioSessionConfiguration.webRTC() + webRTCConfiguration.category = category.rawValue + webRTCConfiguration.mode = mode.rawValue + webRTCConfiguration.categoryOptions = options + + try $0.setConfiguration(webRTCConfiguration) + RTCAudioSessionConfiguration.setWebRTC(webRTCConfiguration) + } + + updatedState.category = category + updatedState.mode = mode + updatedState.options = options + + case let .setOverrideOutputPort(port): + try source.perform { + try $0.overrideOutputAudioPort(port) + } + + updatedState.overrideOutputAudioPort = port + + case let .setPrefersNoInterruptionsFromSystemAlerts(value): + if #available(iOS 14.5, *) { + try source.perform { + try $0.setPrefersNoInterruptionsFromSystemAlerts(value) + } + + updatedState.prefersNoInterruptionsFromSystemAlerts = value + } + + case let .setHasRecordingPermission(value): + updatedState.hasRecordingPermission = value + + case let .setAVAudioSessionActive(value): + /// In the case where audioOutputOn has changed the order of actions matters + /// When activating we need: + /// 1. activate AVAudioSession + /// 2. set isAudioEnabled = true + /// 3. set RTCAudioSession.isActive = true + /// + /// When deactivating we need: + /// 1. set RTCAudioSession.isActive = false + /// 2. set isAudioEnabled = false + /// 3. deactivate AVAudioSession + /// + /// - Weird behaviour: + /// We ignore the errors in AVAudioSession as in the case of CallKit we may fail to + /// deactivate the call but the following calls will ensure that there is no audio. + try source.perform { + if value { + try? $0.avSession.setIsActive(value) + $0.isAudioEnabled = value + try $0.setActive(value) + } else { + try? $0.setActive(value) + $0.isAudioEnabled = value + try? $0.avSession.setIsActive(value) + } + } + updatedState.isActive = value + updatedState.isAudioEnabled = value + } + + return updatedState + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioStoreReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioStoreReducer.swift new file mode 100644 index 000000000..27773100f --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioStoreReducer.swift @@ -0,0 +1,30 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// A protocol that defines how to handle state changes in the RTCAudioStore. +/// +/// Implementers of this protocol provide logic to process actions and produce a new state. +/// This is useful for managing audio-related state in a predictable and testable way. +protocol RTCAudioStoreReducer: AnyObject { + + /// Processes an action and returns the updated state of the RTCAudioStore. + /// + /// - Parameters: + /// - state: The current state before the action is applied. + /// - action: The action to be handled which may modify the state. + /// - file: The source file where the action was dispatched (for debugging). + /// - function: The function name where the action was dispatched (for debugging). + /// - line: The line number where the action was dispatched (for debugging). + /// - Throws: An error if the state reduction fails. + /// - Returns: The new state after applying the action. + func reduce( + state: RTCAudioStore.State, + action: RTCAudioStoreAction, + file: StaticString, + function: StaticString, + line: UInt + ) throws -> RTCAudioStore.State +} diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/State/RTCAudioStore+State.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/State/RTCAudioStore+State.swift new file mode 100644 index 000000000..340d27909 --- /dev/null +++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/State/RTCAudioStore+State.swift @@ -0,0 +1,90 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import Foundation +import StreamWebRTC + +extension RTCAudioStore { + /// A value type representing the current state of the RTCAudioStore. + /// + /// This struct encapsulates all relevant audio session properties, including + /// activation, interruption, permissions, and AVAudioSession configuration. + /// Properties are explicitly encoded for diagnostics, analytics, or + /// persistence. Non-encodable AVFoundation types are encoded using their + /// string or raw value representations to ensure compatibility. + /// + /// - Note: Properties such as `category`, `mode`, `options`, and + /// `overrideOutputAudioPort` are encoded as their string or raw values. + struct State: Equatable, Encodable { + + /// Indicates if the audio session is currently active. + var isActive: Bool + /// Indicates if the audio session is currently interrupted. + var isInterrupted: Bool + /// If true, prefers no interruptions from system alerts. + var prefersNoInterruptionsFromSystemAlerts: Bool + /// If true, audio is enabled. + var isAudioEnabled: Bool + /// If true, manual audio management is enabled. + var useManualAudio: Bool + /// The AVAudioSession category. Encoded as its string value. + var category: AVAudioSession.Category + /// The AVAudioSession mode. Encoded as its string value. + var mode: AVAudioSession.Mode + /// The AVAudioSession category options. Encoded as its raw value. + var options: AVAudioSession.CategoryOptions + /// The AVAudioSession port override. Encoded as its raw value. + var overrideOutputAudioPort: AVAudioSession.PortOverride + /// Indicates if the app has permission to record audio. + var hasRecordingPermission: Bool + + /// The initial default state for the audio store. + static let initial = State( + isActive: false, + isInterrupted: false, + prefersNoInterruptionsFromSystemAlerts: true, + isAudioEnabled: false, + useManualAudio: false, + category: .playAndRecord, + mode: .voiceChat, + options: .allowBluetooth, + overrideOutputAudioPort: .none, + hasRecordingPermission: false + ) + + /// Encodes this state into the given encoder. + /// + /// AVFoundation types are encoded as their string or raw value + /// representations for compatibility. + /// - Parameter encoder: The encoder to write data to. + func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(isActive, forKey: .isActive) + try container.encode(isInterrupted, forKey: .isInterrupted) + try container.encode(prefersNoInterruptionsFromSystemAlerts, forKey: .prefersNoInterruptionsFromSystemAlerts) + try container.encode(isAudioEnabled, forKey: .isAudioEnabled) + try container.encode(useManualAudio, forKey: .useManualAudio) + try container.encode(category.rawValue, forKey: .category) + try container.encode(mode.rawValue, forKey: .mode) + try container.encode(options.rawValue, forKey: .options) + try container.encode(overrideOutputAudioPort.rawValue, forKey: .overrideOutputAudioPort) + try container.encode(hasRecordingPermission, forKey: .hasRecordingPermission) + } + + /// Coding keys for encoding and decoding the state. + private enum CodingKeys: String, CodingKey { + case isActive + case isInterrupted + case prefersNoInterruptionsFromSystemAlerts + case isAudioEnabled + case useManualAudio + case category + case mode + case options + case overrideOutputAudioPort + case hasRecordingPermission + } + } +} diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSession.swift deleted file mode 100644 index 59c982efd..000000000 --- a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSession.swift +++ /dev/null @@ -1,476 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -import Combine -import Foundation -import StreamWebRTC - -/// Manages the app’s audio session, handling activation, configuration, -/// and routing to output devices such as speakers and in-ear speakers. -final class StreamAudioSession: @unchecked Sendable, ObservableObject { - - /// The last applied audio session configuration. - private var lastUsedConfiguration: AudioSessionConfiguration? - - /// The current device as is being described by ``UIUserInterfaceIdiom``. - private let currentDevice = CurrentDevice.currentValue - - /// The WebRTC-compatible audio session. - private let audioSession: AudioSessionProtocol - - /// Serial execution queue for processing session updates. - private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1) - - /// A disposable bag holding all observation cancellable. - private let disposableBag = DisposableBag() - - /// The time to wait for recording to be stopped before we attempt to set the category to `.playback` - private let deferExecutionDueToRecordingInterval: TimeInterval = 1 - - /// The current call settings, or `nil` if no active call exists. - @Atomic private(set) var activeCallSettings: CallSettings - - /// The set of the user's own audio capabilities. - @Atomic private(set) var ownCapabilities: Set - - /// The policy defining audio session behavior. - @Atomic private(set) var policy: AudioSessionPolicy - - /// Published property to track the audio session category. - @Published private(set) var category: AVAudioSession.Category - - /// Delegate for handling audio session events. - weak var delegate: StreamAudioSessionAdapterDelegate? - - // MARK: - AudioSession State - - /// Indicates whether the session is recording. - @Published var isRecording: Bool = false - - /// Checks if the audio session is currently active. - var isActive: Bool { audioSession.isActive } - - /// Retrieves the current audio route description. - var currentRoute: AVAudioSessionRouteDescription { audioSession.currentRoute } - - private let audioDeviceModule: RTCAudioDeviceModule - - /// Initializes a new `StreamAudioSessionAdapter` instance, configuring - /// the session with default settings and enabling manual audio control - /// for WebRTC. - /// - /// - Parameter callSettings: The settings for the current call. - /// - Parameter ownCapabilities: The set of the user's own audio - /// capabilities. - /// - Parameter policy: The policy defining audio session behavior. - /// - Parameter audioSession: An `AudioSessionProtocol` instance. Defaults - /// to `StreamRTCAudioSession`. - required init( - callSettings: CallSettings = .init(), - ownCapabilities: Set = [], - policy: AudioSessionPolicy = DefaultAudioSessionPolicy(), - audioSession: AudioSessionProtocol = StreamRTCAudioSession(), - audioDeviceModule: RTCAudioDeviceModule - ) { - activeCallSettings = callSettings - self.ownCapabilities = ownCapabilities - self.policy = policy - self.audioSession = audioSession - category = audioSession.category - self.audioDeviceModule = audioDeviceModule - - /// Update the active call's `audioSession` to make available to - /// other components. - Self.currentValue = self - - var audioSession = self.audioSession - audioSession.useManualAudio = true - audioSession.isAudioEnabled = true - - audioSession - .eventPublisher - .compactMap { - guard case let .didChangeRoute(session, reason, previousRoute) = $0 else { - return nil - } - return (session, reason, previousRoute) - } - .filter { $0.0.isActive } - .log(.debug, subsystems: .audioSession) { [weak self] session, reason, previousRoute in - """ - AudioSession didChangeRoute reason:\(reason) - - isActive: \(session.isActive) - - isRecording: \(self?.isRecording.description ?? "-") - - category: \(AVAudioSession.Category(rawValue: session.category)) - - mode: \(AVAudioSession.Mode(rawValue: session.mode)) - - categoryOptions: \(session.categoryOptions) - - currentRoute:\(session.currentRoute) - - previousRoute:\(previousRoute) - """ - } - .sink { [weak self] in - self?.audioSessionDidChangeRoute( - $0, - reason: $1, - previousRoute: $2 - ) - } - .store(in: disposableBag) - - if let streamAudioSession = audioSession as? StreamRTCAudioSession { - streamAudioSession - .$state - .map(\.category) - .assign(to: \.category, onWeak: self) - .store(in: disposableBag) - } - } - - /// Removes all observers and resets the active audio session. - nonisolated func dismantle() { - disposableBag.removeAll() - if Self.currentValue === self { - // Reset activeCall audioSession. - Self.currentValue = nil - } - } - - func callKitActivated(_ audioSession: AVAudioSessionProtocol) throws { - let configuration = policy.configuration( - for: activeCallSettings, - ownCapabilities: ownCapabilities - ) - - try audioSession.setCategory( - configuration.category, - mode: configuration.mode, - with: configuration.options - ) - - if let overrideOutputAudioPort = configuration.overrideOutputAudioPort { - try audioSession.setOverrideOutputAudioPort(overrideOutputAudioPort) - } else { - try audioSession.setOverrideOutputAudioPort(.none) - } - } - - // MARK: - OwnCapabilities - - /// Updates the audio session with new call settings. - /// - /// - Parameter ownCapabilities: The new set of `OwnCapability` to apply. - func didUpdateOwnCapabilities( - _ ownCapabilities: Set - ) async throws { - self.ownCapabilities = ownCapabilities - try await didUpdate( - callSettings: activeCallSettings, - ownCapabilities: ownCapabilities - ) - } - - // MARK: - CallSettings - - /// Updates the audio session with new call settings. - /// - /// - Parameter settings: The new `CallSettings` to apply. - func didUpdateCallSettings( - _ settings: CallSettings - ) async throws { - activeCallSettings = settings - try await didUpdate( - callSettings: settings, - ownCapabilities: ownCapabilities - ) - } - - // MARK: - Policy - - /// Updates the audio session with a new policy. - /// - /// - Parameter policy: The new `AudioSessionPolicy` to apply. - func didUpdatePolicy( - _ policy: AudioSessionPolicy - ) async throws { - self.policy = policy - try await didUpdate( - callSettings: activeCallSettings, - ownCapabilities: ownCapabilities - ) - } - - // MARK: - Recording - - /// Prepares the audio session for recording. - func prepareForRecording() async throws { - guard !activeCallSettings.audioOn else { - return - } - - activeCallSettings = activeCallSettings.withUpdatedAudioState(true) - try await didUpdate( - callSettings: activeCallSettings, - ownCapabilities: ownCapabilities - ) - log.debug( - "AudioSession completed preparation for recording.", - subsystems: .audioSession - ) - } - - /// Requests the record permission from the user. - func requestRecordPermission() async -> Bool { - guard !isRecording else { - return isRecording - } - let result = await audioSession.requestRecordPermission() - log.debug( - "AudioSession completed request for recording permission.", - subsystems: .audioSession - ) - return result - } - - // MARK: - Private helpers - - /// Handles audio route changes, updating the session based on the reason - /// for the change. - /// - /// For cases like `.newDeviceAvailable`, `.override`, - /// `.noSuitableRouteForCategory`, `.routeConfigurationChange`, `.default`, - /// or `.unknown`, the route change is accepted, and the `CallSettings` - /// are updated accordingly, triggering a delegate update. - /// - /// For other cases, the route change is ignored, enforcing the existing - /// `CallSettings`. - /// - /// - Parameters: - /// - session: The `RTCAudioSession` instance. - /// - reason: The reason for the route change. - /// - previousRoute: The previous audio route configuration. - private func audioSessionDidChangeRoute( - _ session: RTCAudioSession, - reason: AVAudioSession.RouteChangeReason, - previousRoute: AVAudioSessionRouteDescription - ) { - guard session.isActive else { - return - } - - guard session.category == category.rawValue else { - log.warning( - """ - AudioSession category mismatch between AVAudioSession & SDK: - - AVAudioSession.category: \(AVAudioSession.Category(rawValue: session.category)) - - SDK: \(category) - """, - subsystems: .audioSession - ) - return - } - - guard currentDevice.deviceType == .phone else { - if activeCallSettings.speakerOn != session.currentRoute.isSpeaker { - log.warning( - """ - AudioSession didChangeRoute with speakerOn:\(session.currentRoute.isSpeaker) - while CallSettings have speakerOn:\(activeCallSettings.speakerOn). - We will update CallSettings to match the AudioSession's - current configuration - """, - subsystems: .audioSession - ) - delegate?.audioSessionAdapterDidUpdateCallSettings( - self, - callSettings: activeCallSettings - .withUpdatedSpeakerState(session.currentRoute.isSpeaker) - ) - } - return - } - - switch (activeCallSettings.speakerOn, session.currentRoute.isSpeaker) { - case (true, false): - delegate?.audioSessionAdapterDidUpdateCallSettings( - self, - callSettings: activeCallSettings.withUpdatedSpeakerState(false) - ) - - case (false, true) where session.category == AVAudioSession.Category.playAndRecord.rawValue: - delegate?.audioSessionAdapterDidUpdateCallSettings( - self, - callSettings: activeCallSettings.withUpdatedSpeakerState(true) - ) - - default: - break - } - } - - /// Updates the audio session configuration based on the provided call - /// settings and own capabilities. - /// - /// - Parameters: - /// - callSettings: The current call settings. - /// - ownCapabilities: The set of the user's own audio capabilities. - /// - file: The file where this method is called. - /// - functionName: The name of the function where this method is called. - /// - line: The line number where this method is called. - private func didUpdate( - callSettings: CallSettings, - ownCapabilities: Set, - file: StaticString = #file, - functionName: StaticString = #function, - line: UInt = #line - ) async throws { - try await processingQueue.addSynchronousTaskOperation { [weak self] in - guard let self else { - return - } - - let configuration = policy.configuration( - for: callSettings, - ownCapabilities: ownCapabilities - ) - - guard configuration != lastUsedConfiguration else { - return - } - - log.debug( - """ - Will configure AudioSession with - - configuration: \(configuration) - - policy: \(type(of: policy)) - - settings: \(callSettings) - - ownCapabilities:\(ownCapabilities) - """, - subsystems: .audioSession, - functionName: functionName, - fileName: file, - lineNumber: line - ) - - if configuration.category == .playback, isRecording { - log.debug( - "AudioSession is currently recording. Defer execution until recording has stopped.", - subsystems: .audioSession, - functionName: functionName, - fileName: file, - lineNumber: line - ) - await deferExecutionUntilRecordingIsStopped() - } - - if - configuration.overrideOutputAudioPort == nil, - audioSession.category == AVAudioSession.Category.playAndRecord - { - try await audioSession.overrideOutputAudioPort(.none) - } - - do { - try await audioSession.setCategory( - configuration.category, - mode: configuration.mode, - with: configuration.options - ) - } catch { - log.error( - "Failed while setting AudioSession category:\(configuration.category) mode:\(configuration.mode) options:\(configuration.options)", - subsystems: .audioSession, - error: error, - functionName: functionName, - fileName: file, - lineNumber: line - ) - } - - if let overrideOutputAudioPort = configuration.overrideOutputAudioPort { - try await audioSession.overrideOutputAudioPort(overrideOutputAudioPort) - } - - lastUsedConfiguration = configuration - } - } - - /// Defers execution until recording is stopped. - private func deferExecutionUntilRecordingIsStopped() async { - do { - _ = try await $isRecording - .filter { $0 == false } - .nextValue(timeout: deferExecutionDueToRecordingInterval) - try await Task.sleep(nanoseconds: 250 * 1_000_000) - } catch { - log.error( - "Defer execution until recording has stopped failed.", - subsystems: .audioSession, - error: error - ) - } - } -} - -extension StreamAudioSession: Encodable { - // MARK: - Codable - - enum CodingKeys: String, CodingKey { - case isActive - case isRecording - case isAudioModuleRecording - case isAudioModulePlaying - case category - case mode - case overrideOutputPort - case useManualAudio - case isAudioEnabled - case hasRecordPermission - case speakerOn - case device - case deviceIsExternal = "device.isExternal" - case deviceIsSpeaker = "device.isSpeaker" - case deviceIsReceiver = "device.isReceiver" - } - - func encode(to encoder: Encoder) throws { - var container = encoder.container(keyedBy: CodingKeys.self) - try container.encode(audioSession.isActive, forKey: .isActive) - try container.encode(isRecording, forKey: .isRecording) - try container.encode(audioDeviceModule.recording, forKey: .isAudioModuleRecording) - try container.encode(audioDeviceModule.playing, forKey: .isAudioModulePlaying) - try container.encode(audioSession.category.rawValue, forKey: .category) - try container.encode(audioSession.mode.rawValue, forKey: .mode) - try container.encode(audioSession.overrideOutputPort.stringValue, forKey: .overrideOutputPort) - try container.encode(audioSession.hasRecordPermission, forKey: .hasRecordPermission) - try container.encode("\(audioSession.currentRoute)", forKey: .device) - try container.encode(audioSession.currentRoute.isExternal, forKey: .deviceIsExternal) - try container.encode(audioSession.currentRoute.isSpeaker, forKey: .deviceIsSpeaker) - try container.encode(audioSession.currentRoute.isReceiver, forKey: .deviceIsReceiver) - - if let rtcAudioSession = audioSession as? StreamRTCAudioSession { - try container.encode(rtcAudioSession.useManualAudio, forKey: .useManualAudio) - try container.encode(rtcAudioSession.isAudioEnabled, forKey: .isAudioEnabled) - } - } -} - -/// A key for dependency injection of an `AudioSessionProtocol` instance -/// that represents the active call audio session. -extension StreamAudioSession: InjectionKey { - nonisolated(unsafe) static var currentValue: StreamAudioSession? -} - -extension InjectedValues { - /// The active call's audio session. The value is being set on - /// `StreamAudioSession` `init` / `deinit` - var activeCallAudioSession: StreamAudioSession? { - get { - Self[StreamAudioSession.self] - } - set { - Self[StreamAudioSession.self] = newValue - } - } -} diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift index 2a5e2f3be..dd2cd67fc 100644 --- a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift +++ b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift @@ -12,7 +12,6 @@ protocol StreamAudioSessionAdapterDelegate: AnyObject { /// - audioSession: The `AudioSession` instance that made the update. /// - callSettings: The updated `CallSettings`. func audioSessionAdapterDidUpdateCallSettings( - _ adapter: StreamAudioSession, callSettings: CallSettings ) } diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift deleted file mode 100644 index c974440cd..000000000 --- a/Sources/StreamVideo/Utils/AudioSession/StreamRTCAudioSession.swift +++ /dev/null @@ -1,272 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -import Combine -import Foundation -import StreamWebRTC - -/// A protocol defining an interface for managing an audio session. -/// This allows for dependency injection and easier testing. -protocol AudioSessionProtocol { - - /// A publisher that emits audio session events. - var eventPublisher: AnyPublisher { get } - - /// A Boolean value indicating whether the audio session is active. - var isActive: Bool { get } - - /// The current audio route description for the session. - var currentRoute: AVAudioSessionRouteDescription { get } - - var category: AVAudioSession.Category { get } - - var mode: AVAudioSession.Mode { get } - - var overrideOutputPort: AVAudioSession.PortOverride { get } - - /// A Boolean value indicating whether manual audio routing is used. - var useManualAudio: Bool { get set } - - /// A Boolean value indicating whether audio is enabled. - var isAudioEnabled: Bool { get set } - - var hasRecordPermission: Bool { get } - - /// Configures the audio session category and options. - /// - Parameters: - /// - category: The audio category (e.g., `.playAndRecord`). - /// - mode: The audio mode (e.g., `.videoChat`). - /// - categoryOptions: The options for the category (e.g., `.allowBluetooth`). - /// - Throws: An error if setting the category fails. - func setCategory( - _ category: AVAudioSession.Category, - mode: AVAudioSession.Mode, - with categoryOptions: AVAudioSession.CategoryOptions - ) async throws - - /// Activates or deactivates the audio session. - /// - Parameter isActive: Whether to activate the session. - /// - Throws: An error if activation fails. - func setActive(_ isActive: Bool) async throws - - /// Overrides the audio output port (e.g., to speaker). - /// - Parameter port: The output port override. - /// - Throws: An error if overriding fails. - func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) async throws - - /// Requests permission to record audio from the user. - /// - Returns: `true` if permission was granted, otherwise `false`. - func requestRecordPermission() async -> Bool -} - -/// A class implementing the `AudioSessionProtocol` that manages the WebRTC -/// audio session for the application, handling settings and route management. -final class StreamRTCAudioSession: AudioSessionProtocol, @unchecked Sendable, ReflectiveStringConvertible { - - struct State: ReflectiveStringConvertible, Equatable { - var category: AVAudioSession.Category - var mode: AVAudioSession.Mode - var options: AVAudioSession.CategoryOptions - var overrideOutputPort: AVAudioSession.PortOverride = .none - } - - @Published private(set) var state: State - - /// A queue for processing audio session operations asynchronously. - private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1) - - /// The shared instance of `RTCAudioSession` used for WebRTC audio - /// configuration and management. - private let source: RTCAudioSession - private let sourceDelegate: RTCAudioSessionDelegatePublisher = .init() - private let disposableBag = DisposableBag() - - var eventPublisher: AnyPublisher { - sourceDelegate.publisher - } - - /// A Boolean value indicating whether the audio session is currently active. - var isActive: Bool { source.isActive } - - /// The current audio route description for the session. - var currentRoute: AVAudioSessionRouteDescription { source.currentRoute } - - var category: AVAudioSession.Category { state.category } - - var mode: AVAudioSession.Mode { state.mode } - - var overrideOutputPort: AVAudioSession.PortOverride { state.overrideOutputPort } - - /// A Boolean value indicating whether the audio session uses manual - /// audio routing. - var useManualAudio: Bool { - set { source.useManualAudio = newValue } - get { source.useManualAudio } - } - - /// A Boolean value indicating whether audio is enabled for the session. - var isAudioEnabled: Bool { - set { source.isAudioEnabled = newValue } - get { source.isAudioEnabled } - } - - var hasRecordPermission: Bool { source.session.recordPermission == .granted } - - // MARK: - Lifecycle - - init() { - let source = RTCAudioSession.sharedInstance() - self.source = source - state = .init( - category: .init(rawValue: source.category), - mode: .init(rawValue: source.mode), - options: source.categoryOptions - ) - source.add(sourceDelegate) - } - - // MARK: - Configuration - - /// Configures the audio category and category options for the session. - /// - Parameters: - /// - category: The audio category, such as `.playAndRecord`. - /// - categoryOptions: Options for the category, including - /// `.allowBluetooth` and `.defaultToSpeaker`. - /// - Throws: An error if setting the category fails. - func setCategory( - _ category: AVAudioSession.Category, - mode: AVAudioSession.Mode, - with categoryOptions: AVAudioSession.CategoryOptions - ) async throws { - try await performOperation { [weak self] in - guard let self else { return } - - let state = self.state - let needsCategoryUpdate = category != state.category - let needsModeUpdate = mode != state.mode - let needsOptionsUpdate = categoryOptions != state.options - - guard needsCategoryUpdate || needsModeUpdate || needsOptionsUpdate else { - return - } - - if needsCategoryUpdate || needsOptionsUpdate { - if needsModeUpdate { - try source.setCategory( - category, - mode: mode, - options: categoryOptions - ) - } else { - try source.setCategory(category, with: categoryOptions) - } - } else if needsModeUpdate { - try source.setMode(mode) - } - - self.state = .init( - category: category, - mode: mode, - options: categoryOptions, - overrideOutputPort: state.overrideOutputPort - ) - - updateWebRTCConfiguration(with: self.state) - - log.debug("AudioSession updated with state \(self.state)", subsystems: .audioSession) - } - } - - /// Activates or deactivates the audio session. - /// - Parameter isActive: A Boolean indicating whether the session - /// should be active. - /// - Throws: An error if activation or deactivation fails. - func setActive( - _ isActive: Bool - ) async throws { - try await performOperation { [weak self] in - guard let self, source.isActive != isActive else { - return - } - - try source.setActive(isActive) - } - } - - /// Overrides the audio output port, such as switching to speaker output. - /// - Parameter port: The output port to use, such as `.speaker`. - /// - Throws: An error if overriding the output port fails. - func overrideOutputAudioPort( - _ port: AVAudioSession.PortOverride - ) async throws { - try await performOperation { [weak self] in - guard let self else { - return - } - - guard - state.category == .playAndRecord, - state.overrideOutputPort != port - else { - return - } - - try source.overrideOutputAudioPort(port) - state.overrideOutputPort = port - log.debug("AudioSession updated with state \(self.state)", subsystems: .audioSession) - } - } - - /// Requests permission to record audio from the user. - /// - Returns: A Boolean indicating whether permission was granted. - func requestRecordPermission() async -> Bool { - await withCheckedContinuation { continuation in - AVAudioSession.sharedInstance().requestRecordPermission { result in - continuation.resume(returning: result) - } - } - } - - // MARK: - Private Helpers - - private func performOperation( - _ operation: @Sendable @escaping () async throws -> Void - ) async throws { - try await processingQueue.addSynchronousTaskOperation { [weak self] in - guard let self else { return } - source.lockForConfiguration() - defer { source.unlockForConfiguration() } - try await operation() - } - } - - /// Updates the WebRTC audio session configuration. - /// - /// - Parameter state: The current state of the audio session. - /// - /// - Note: This is required to ensure that the WebRTC audio session - /// is configured correctly when the AVAudioSession is updated in - /// order to avoid unexpected changes to the category. - private func updateWebRTCConfiguration(with state: State) { - let webRTCConfiguration = RTCAudioSessionConfiguration.webRTC() - webRTCConfiguration.category = state.category.rawValue - webRTCConfiguration.mode = state.mode.rawValue - webRTCConfiguration.categoryOptions = state.options - RTCAudioSessionConfiguration.setWebRTC(webRTCConfiguration) - } -} - -extension AVAudioSession.PortOverride { - var stringValue: String { - switch self { - case .none: - return "none" - case .speaker: - return "speaker" - @unknown default: - return "unknown" - } - } -} diff --git a/Sources/StreamVideo/Utils/Proximity/Policies/SpeakerProximityPolicy.swift b/Sources/StreamVideo/Utils/Proximity/Policies/SpeakerProximityPolicy.swift index dced21c0d..b846201ab 100644 --- a/Sources/StreamVideo/Utils/Proximity/Policies/SpeakerProximityPolicy.swift +++ b/Sources/StreamVideo/Utils/Proximity/Policies/SpeakerProximityPolicy.swift @@ -11,7 +11,7 @@ import Foundation /// and restores previous speaker state when device moves away. public final class SpeakerProximityPolicy: ProximityPolicy, @unchecked Sendable { - @Injected(\.activeCallAudioSession) private var activeCallAudioSession + @Injected(\.audioStore) private var audioStore /// Unique identifier for this policy implementation public static let identifier: ObjectIdentifier = .init("speaker-proximity-policy" as NSString) @@ -36,7 +36,7 @@ public final class SpeakerProximityPolicy: ProximityPolicy, @unchecked Sendable guard let self, let call, - activeCallAudioSession?.currentRoute.isExternal == false + audioStore.session.currentRoute.isExternal == false else { return } diff --git a/Sources/StreamVideo/Utils/StreamAppStateAdapter/StreamAppStateAdapter.swift b/Sources/StreamVideo/Utils/StreamAppStateAdapter/StreamAppStateAdapter.swift index bf8944a73..a1da7f5d0 100644 --- a/Sources/StreamVideo/Utils/StreamAppStateAdapter/StreamAppStateAdapter.swift +++ b/Sources/StreamVideo/Utils/StreamAppStateAdapter/StreamAppStateAdapter.swift @@ -15,13 +15,13 @@ public protocol AppStateProviding: Sendable { } /// Represents the app's state: foreground or background. -public enum ApplicationState: String, Sendable, Equatable { case foreground, background } +public enum ApplicationState: String, Sendable, Equatable { case unknown, foreground, background } /// An adapter that observes the app's state and publishes changes. final class StreamAppStateAdapter: AppStateProviding, ObservableObject, @unchecked Sendable { /// The current state of the app. - @Published public private(set) var state: ApplicationState = .foreground + @Published public private(set) var state: ApplicationState = .unknown var statePublisher: AnyPublisher { $state.eraseToAnyPublisher() } private let notificationCenter: NotificationCenter @@ -32,6 +32,12 @@ final class StreamAppStateAdapter: AppStateProviding, ObservableObject, @uncheck init(notificationCenter: NotificationCenter = .default) { self.notificationCenter = notificationCenter setUp() + + statePublisher + .removeDuplicates() + .log(.debug) { "Application state changed to \($0)" } + .sink { _ in } + .store(in: disposableBag) } // MARK: - Private Helpers @@ -49,6 +55,13 @@ final class StreamAppStateAdapter: AppStateProviding, ObservableObject, @uncheck .assign(to: \.state, onWeak: self) .store(in: disposableBag) + notificationCenter + .publisher(for: UIApplication.didBecomeActiveNotification) + .map { _ in ApplicationState.foreground } + .receive(on: DispatchQueue.main) + .assign(to: \.state, onWeak: self) + .store(in: disposableBag) + notificationCenter .publisher(for: UIApplication.didEnterBackgroundNotification) .map { _ in ApplicationState.background } @@ -56,6 +69,17 @@ final class StreamAppStateAdapter: AppStateProviding, ObservableObject, @uncheck .assign(to: \.state, onWeak: self) .store(in: disposableBag) + switch UIApplication.shared.applicationState { + case .active: + state = .foreground + case .inactive: + state = .unknown + case .background: + state = .background + @unknown default: + state = .unknown + } + log.debug("\(type(of: self)) now observes application lifecycle.") } #endif diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift index c0e008955..f4e91c69a 100644 --- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift +++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joined.swift @@ -29,6 +29,7 @@ extension WebRTCCoordinator.StateMachine.Stage { @unchecked Sendable { @Injected(\.internetConnectionObserver) private var internetConnectionObserver + @Injected(\.audioStore) private var audioStore private let disposableBag = DisposableBag() private var updateSubscriptionsAdapter: WebRTCUpdateSubscriptionsAdapter? @@ -397,11 +398,6 @@ extension WebRTCCoordinator.StateMachine.Stage { .removeDuplicates() .sinkTask(storeIn: disposableBag) { [weak self] callSettings in guard let self else { return } - - if let statsAdapter = await context.coordinator?.stateAdapter.statsAdapter { - statsAdapter.callSettings = callSettings - } - do { guard let publisher = await context.coordinator?.stateAdapter.publisher diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift index 4bb7a28e0..0656aba35 100644 --- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift +++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift @@ -360,6 +360,17 @@ extension WebRTCCoordinator.StateMachine.Stage { try Task.checkCancellation() if !isFastReconnecting { + /// Configures the audio session for the current call using the provided + /// join source. This ensures the session setup reflects whether the + /// join was triggered in-app or via CallKit and applies the correct + /// audio routing and category. + try await coordinator.stateAdapter.configureAudioSession( + source: context.joinSource + ) + + /// Configures all peer connections after the audio session is ready. + /// Ensures signaling, media, and routing are correctly established for + /// all tracks as part of the join process. try await coordinator.stateAdapter.configurePeerConnections() // Once our PeerConnection have been created we consume the diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Leaving.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Leaving.swift index cdd57bebb..66dca0b83 100644 --- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Leaving.swift +++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Leaving.swift @@ -47,7 +47,7 @@ extension WebRTCCoordinator.StateMachine.Stage { from previousStage: WebRTCCoordinator.StateMachine.Stage ) -> Self? { switch previousStage.id { - case .joined, .disconnected: + case .joined, .disconnected, .connecting, .connected: execute() return self default: diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift index 56c372e23..413e7ea55 100644 --- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift +++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift @@ -21,6 +21,7 @@ extension WebRTCCoordinator.StateMachine { var reconnectionStrategy: ReconnectionStrategy = .unknown var disconnectionSource: WebSocketConnectionState.DisconnectionSource? = nil var flowError: Error? + var joinSource: JoinSource? var isRejoiningFromSessionID: String? = nil var migratingFromSFU: String = "" diff --git a/Sources/StreamVideo/WebRTC/v2/Stats/Models/WebRTCTrace.swift b/Sources/StreamVideo/WebRTC/v2/Stats/Models/WebRTCTrace.swift index c4d4b0848..1d9377764 100644 --- a/Sources/StreamVideo/WebRTC/v2/Stats/Models/WebRTCTrace.swift +++ b/Sources/StreamVideo/WebRTC/v2/Stats/Models/WebRTCTrace.swift @@ -134,7 +134,7 @@ extension WebRTCTrace { /// - callSettings: The active call settings. /// - audioSession: The audio session state. init( - audioSession: StreamAudioSession + audioSession: CallAudioSession.TraceRepresentation ) { self.init( id: nil, diff --git a/Sources/StreamVideo/WebRTC/v2/Stats/Traces/WebRTCTracesAdapter.swift b/Sources/StreamVideo/WebRTC/v2/Stats/Traces/WebRTCTracesAdapter.swift index bdc0de9a9..0c5ce4758 100644 --- a/Sources/StreamVideo/WebRTC/v2/Stats/Traces/WebRTCTracesAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/Stats/Traces/WebRTCTracesAdapter.swift @@ -84,15 +84,6 @@ final class WebRTCTracesAdapter: WebRTCTracing, @unchecked Sendable { didSet { didUpdate(subscriber: subscriber) } } - /// Current call settings for the ongoing session. Updating this triggers a - /// trace update for session-related state. - var callSettings: CallSettings? { - didSet { didUpdate(callSettings) } - } - - /// Audio session in use for this call. Used to enrich trace data with audio state. - var audioSession: StreamAudioSession? - /// Buffers trace events related to publisher/subscriber peer connections. private let peerConnectionBucket: ConsumableBucket /// Buffers trace events related to SFU adapter requests and responses. @@ -337,16 +328,4 @@ final class WebRTCTracesAdapter: WebRTCTracing, @unchecked Sendable { ) ) } - - /// Handles updates to call settings and audio session state. - /// - /// Enriches trace data with the latest call and audio configuration. - private func didUpdate(_ callSettings: CallSettings?) { - guard let audioSession else { - return - } - genericRequestsBucket.append( - .init(audioSession: audioSession) - ) - } } diff --git a/Sources/StreamVideo/WebRTC/v2/Stats/Traces/WebRTCTracing.swift b/Sources/StreamVideo/WebRTC/v2/Stats/Traces/WebRTCTracing.swift index 67ee1ea5c..a4a5b1ba3 100644 --- a/Sources/StreamVideo/WebRTC/v2/Stats/Traces/WebRTCTracing.swift +++ b/Sources/StreamVideo/WebRTC/v2/Stats/Traces/WebRTCTracing.swift @@ -22,12 +22,6 @@ protocol WebRTCTracing: AnyObject, Sendable { /// Subscriber peer connection coordinator. var subscriber: RTCPeerConnectionCoordinator? { get set } - /// Current call settings. - var callSettings: CallSettings? { get set } - - /// Audio session reference. - var audioSession: StreamAudioSession? { get set } - /// Adds a trace event to the appropriate bucket. func trace(_ trace: WebRTCTrace) diff --git a/Sources/StreamVideo/WebRTC/v2/Stats/WebRTCStatsAdapter.swift b/Sources/StreamVideo/WebRTC/v2/Stats/WebRTCStatsAdapter.swift index fe134bbd9..d5ec51ba2 100644 --- a/Sources/StreamVideo/WebRTC/v2/Stats/WebRTCStatsAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/Stats/WebRTCStatsAdapter.swift @@ -45,17 +45,6 @@ final class WebRTCStatsAdapter: @unchecked Sendable, WebRTCStatsAdapting { didSet { didUpdate(subscriber: subscriber) } } - /// The current call settings associated with this adapter. Setting this - /// updates the trace adapter with the latest session configuration. - var callSettings: CallSettings? { - didSet { traces.callSettings = callSettings } - } - - /// The audio session used in this call. Used for trace enrichment. - var audioSession: StreamAudioSession? { - didSet { traces.audioSession = audioSession } - } - /// The interval at which statistics are reported (in seconds). /// /// Changing this property reschedules the reporting timer. diff --git a/Sources/StreamVideo/WebRTC/v2/Stats/WebRTCStatsAdapting.swift b/Sources/StreamVideo/WebRTC/v2/Stats/WebRTCStatsAdapting.swift index 62c0f4bc8..45a9876dc 100644 --- a/Sources/StreamVideo/WebRTC/v2/Stats/WebRTCStatsAdapting.swift +++ b/Sources/StreamVideo/WebRTC/v2/Stats/WebRTCStatsAdapting.swift @@ -31,17 +31,6 @@ protocol WebRTCStatsAdapting: AnyObject, Sendable { /// subscriber peer connection. var subscriber: RTCPeerConnectionCoordinator? { get set } - /// The current call settings for the session. - /// - /// Updating this property updates the trace adapter and statistics collection - /// with the latest session configuration. - var callSettings: CallSettings? { get set } - - /// The audio session currently used in the call. - /// - /// Used for trace enrichment and reporting audio configuration. - var audioSession: StreamAudioSession? { get set } - /// The interval (in seconds) at which statistics are reported. /// /// Changing this property reschedules the stats reporting timer. diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift index a234e63b8..3bbdf2c36 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift @@ -95,9 +95,11 @@ final class WebRTCCoordinator: @unchecked Sendable { callSettings: CallSettings?, options: CreateCallOptions?, ring: Bool, - notify: Bool + notify: Bool, + source: JoinSource ) async throws { await stateAdapter.set(initialCallSettings: callSettings) + stateMachine.currentStage.context.joinSource = source stateMachine.transition( .connecting( stateMachine.currentStage.context, @@ -428,12 +430,8 @@ final class WebRTCCoordinator: @unchecked Sendable { ) } - func updateAudioSessionPolicy(_ policy: AudioSessionPolicy) async throws { - try await stateAdapter.audioSession.didUpdatePolicy(policy) - } - - func callKitActivated(_ audioSession: AVAudioSessionProtocol) throws { - try stateAdapter.audioSession.callKitActivated(audioSession) + func updateAudioSessionPolicy(_ policy: AudioSessionPolicy) async { + await stateAdapter.set(audioSessionPolicy: policy) } func enableClientCapabilities(_ capabilities: Set) async { diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift index 3b570362d..0083167f9 100644 --- a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift +++ b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift @@ -43,7 +43,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { let peerConnectionFactory: PeerConnectionFactory let videoCaptureSessionProvider: VideoCaptureSessionProvider let screenShareSessionProvider: ScreenShareSessionProvider - let audioSession: StreamAudioSession + let audioSession: CallAudioSession let trackStorage: WebRTCTrackStorage = .init() /// Published properties that represent different parts of the WebRTC state. @@ -124,15 +124,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { self.rtcPeerConnectionCoordinatorFactory = rtcPeerConnectionCoordinatorFactory self.videoCaptureSessionProvider = videoCaptureSessionProvider self.screenShareSessionProvider = screenShareSessionProvider - self.audioSession = .init(audioDeviceModule: peerConnectionFactory.audioDeviceModule) - - Task { - await configureAudioSession() - } - } - - deinit { - audioSession.dismantle() + self.audioSession = .init() } /// Sets the session ID. @@ -185,7 +177,6 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { /// Sets the WebRTC stats reporter. func set(statsAdapter value: WebRTCStatsAdapting?) { self.statsAdapter = value - value?.audioSession = audioSession value?.consume(queuedTraces) } @@ -224,6 +215,14 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { statsAdapter?.isTracingEnabled = value } + func set(audioSessionPolicy: AudioSessionPolicy) { + audioSession.didUpdatePolicy( + audioSessionPolicy, + callSettings: callSettings, + ownCapabilities: ownCapabilities + ) + } + // MARK: - Client Capabilities func enableClientCapabilities(_ capabilities: Set) { @@ -363,6 +362,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { set(anonymousCount: 0) set(participantPins: []) trackStorage.removeAll() + audioSession.deactivate() } /// Cleans up the session for reconnection, clearing adapters and tracks. @@ -389,6 +389,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { /// We set the initialCallSettings to the last activated CallSettings, in order to maintain the state /// during reconnects. initialCallSettings = callSettings + audioSession.deactivate() } /// Restores screen sharing if an active session exists. @@ -634,36 +635,21 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate { } } - private func configureAudioSession() { - audioSession.delegate = self - - $callSettings - .removeDuplicates() - .sinkTask(storeIn: disposableBag) { [weak audioSession] in - do { - try await audioSession?.didUpdateCallSettings($0) - } catch { - log.error(error) - } - } - .store(in: disposableBag) - - $ownCapabilities - .removeDuplicates() - .sinkTask(storeIn: disposableBag) { [weak audioSession] in - do { - try await audioSession?.didUpdateOwnCapabilities($0) - } catch { - log.error(error) - } - } - .store(in: disposableBag) + func configureAudioSession(source: JoinSource?) async throws { + audioSession.activate( + callSettingsPublisher: $callSettings.removeDuplicates().eraseToAnyPublisher(), + ownCapabilitiesPublisher: $ownCapabilities.removeDuplicates().eraseToAnyPublisher(), + delegate: self, + statsAdapter: statsAdapter, + /// If we are joining from CallKit the AudioSession will be activated from it and we + /// shouldn't attempt another activation. + shouldSetActive: source != .callKit + ) } // MARK: - AudioSessionDelegate nonisolated func audioSessionAdapterDidUpdateCallSettings( - _ adapter: StreamAudioSession, callSettings: CallSettings ) { Task(disposableBag: disposableBag) { [weak self] in diff --git a/StreamVideo.xcodeproj/project.pbxproj b/StreamVideo.xcodeproj/project.pbxproj index f922f377c..8c1784ad2 100644 --- a/StreamVideo.xcodeproj/project.pbxproj +++ b/StreamVideo.xcodeproj/project.pbxproj @@ -43,10 +43,27 @@ 40149DC32B7E202600473176 /* ParticipantEventViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40149DC22B7E202600473176 /* ParticipantEventViewModifier.swift */; }; 40149DCC2B7E814300473176 /* AVAudioRecorderBuilder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40149DCB2B7E814300473176 /* AVAudioRecorderBuilder.swift */; }; 40149DCE2B7E837A00473176 /* StreamCallAudioRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40149DCD2B7E837A00473176 /* StreamCallAudioRecorder.swift */; }; - 40149DD02B7E839500473176 /* AVAudioSession+RequestRecordPermission.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40149DCF2B7E839500473176 /* AVAudioSession+RequestRecordPermission.swift */; }; 4014F1032D8C2EBC004E7EFD /* Gleap in Frameworks */ = {isa = PBXBuildFile; productRef = 4014F1022D8C2EBC004E7EFD /* Gleap */; }; 4014F1062D8C2F07004E7EFD /* GleapAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4014F1052D8C2F07004E7EFD /* GleapAdapter.swift */; }; 4014F1072D8C2F07004E7EFD /* GleapAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4014F1052D8C2F07004E7EFD /* GleapAdapter.swift */; }; + 4019A2502E40E08B00CE70A4 /* RTCAudioStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A24F2E40E08B00CE70A4 /* RTCAudioStore.swift */; }; + 4019A2542E40E25000CE70A4 /* RTCAudioStoreAction+Generic.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2532E40E25000CE70A4 /* RTCAudioStoreAction+Generic.swift */; }; + 4019A2572E40E27000CE70A4 /* RTCAudioStore+State.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2562E40E27000CE70A4 /* RTCAudioStore+State.swift */; }; + 4019A25A2E40E2A600CE70A4 /* RTCAudioStoreAction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2592E40E2A600CE70A4 /* RTCAudioStoreAction.swift */; }; + 4019A25C2E40E2E800CE70A4 /* RTCAudioStoreReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A25B2E40E2E800CE70A4 /* RTCAudioStoreReducer.swift */; }; + 4019A25E2E40E45D00CE70A4 /* RTCAudioSessionReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A25D2E40E45D00CE70A4 /* RTCAudioSessionReducer.swift */; }; + 4019A2632E40EB6000CE70A4 /* RTCAudioStoreMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2622E40EB6000CE70A4 /* RTCAudioStoreMiddleware.swift */; }; + 4019A2682E40ED5900CE70A4 /* RTCAudioStore+InterruptionEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2672E40ED5900CE70A4 /* RTCAudioStore+InterruptionEffect.swift */; }; + 4019A26D2E40F48300CE70A4 /* CallAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A26C2E40F48300CE70A4 /* CallAudioSession.swift */; }; + 4019A26F2E40FC8F00CE70A4 /* RTCAudioStore+RouteChangeEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A26E2E40FC8F00CE70A4 /* RTCAudioStore+RouteChangeEffect.swift */; }; + 4019A2782E42225800CE70A4 /* CallKitAudioSessionReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2772E42225800CE70A4 /* CallKitAudioSessionReducer.swift */; }; + 4019A27A2E42475300CE70A4 /* JoinSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2792E42475300CE70A4 /* JoinSource.swift */; }; + 4019A27C2E43397100CE70A4 /* RTCAudioStoreAction+AudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A27B2E43397100CE70A4 /* RTCAudioStoreAction+AudioSession.swift */; }; + 4019A27E2E43398C00CE70A4 /* RTCAudioStoreAction+CallKit.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A27D2E43398C00CE70A4 /* RTCAudioStoreAction+CallKit.swift */; }; + 4019A2802E43529000CE70A4 /* AudioSessionProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A27F2E43529000CE70A4 /* AudioSessionProtocol.swift */; }; + 4019A2832E4352DF00CE70A4 /* RTCAudioSession+AudioSessionProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2822E4352DF00CE70A4 /* RTCAudioSession+AudioSessionProtocol.swift */; }; + 4019A2872E43565A00CE70A4 /* MockAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2862E43565A00CE70A4 /* MockAudioSession.swift */; }; + 4019A2892E4357B200CE70A4 /* MockRTCAudioStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2882E4357B200CE70A4 /* MockRTCAudioStore.swift */; }; 401A0F032AB1C1B600BE2DBD /* ThermalStateObserver.swift in Sources */ = {isa = PBXBuildFile; fileRef = 401A0F022AB1C1B600BE2DBD /* ThermalStateObserver.swift */; }; 401A64A52A9DF79E00534ED1 /* StreamChatSwiftUI in Frameworks */ = {isa = PBXBuildFile; productRef = 401A64A42A9DF79E00534ED1 /* StreamChatSwiftUI */; }; 401A64A82A9DF7B400534ED1 /* EffectsLibrary in Frameworks */ = {isa = PBXBuildFile; productRef = 401A64A72A9DF7B400534ED1 /* EffectsLibrary */; }; @@ -266,11 +283,6 @@ 4067F30D2CDA3377002E28BD /* AVAudioSessionRouteChangeReason+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F30C2CDA3377002E28BD /* AVAudioSessionRouteChangeReason+Convenience.swift */; }; 4067F30F2CDA3394002E28BD /* AVAudioSessionCategoryOptions+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F30E2CDA3394002E28BD /* AVAudioSessionCategoryOptions+Convenience.swift */; }; 4067F3132CDA33C6002E28BD /* AVAudioSession.CategoryOptions+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3122CDA33C4002E28BD /* AVAudioSession.CategoryOptions+Convenience.swift */; }; - 4067F3152CDA4094002E28BD /* StreamRTCAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3142CDA4094002E28BD /* StreamRTCAudioSession.swift */; }; - 4067F3172CDA40CC002E28BD /* StreamAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3162CDA40CC002E28BD /* StreamAudioSession.swift */; }; - 4067F3192CDA469F002E28BD /* MockAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F3182CDA469C002E28BD /* MockAudioSession.swift */; }; - 4067F31C2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F31B2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift */; }; - 4067F31E2CDA5A56002E28BD /* StreamAudioSession_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4067F31D2CDA5A53002E28BD /* StreamAudioSession_Tests.swift */; }; 4069A0042AD985D2009A3A06 /* CallParticipant_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 406303412AD848000091AE77 /* CallParticipant_Mock.swift */; }; 4069A0052AD985D3009A3A06 /* CallParticipant_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 406303412AD848000091AE77 /* CallParticipant_Mock.swift */; }; 406A8E8D2AA1D78C001F598A /* AppEnvironment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4030E59F2A9DF5BD003E8CBA /* AppEnvironment.swift */; }; @@ -699,6 +711,14 @@ 40D36AE22DDE023800972D75 /* WebRTCStatsCollecting.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D36AE12DDE023800972D75 /* WebRTCStatsCollecting.swift */; }; 40D36AE42DDE02D100972D75 /* MockWebRTCStatsCollector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D36AE32DDE02D100972D75 /* MockWebRTCStatsCollector.swift */; }; 40D6ADDD2ACDB51C00EF5336 /* VideoRenderer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D6ADDC2ACDB51C00EF5336 /* VideoRenderer_Tests.swift */; }; + 40D75C522E437FBC000E0438 /* InterruptionEffect_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C512E437FBC000E0438 /* InterruptionEffect_Tests.swift */; }; + 40D75C542E438317000E0438 /* RouteChangeEffect_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C532E438317000E0438 /* RouteChangeEffect_Tests.swift */; }; + 40D75C562E4385FE000E0438 /* MockAVAudioSessionPortDescription.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C552E4385FE000E0438 /* MockAVAudioSessionPortDescription.swift */; }; + 40D75C582E438607000E0438 /* MockAVAudioSessionRouteDescription.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C572E438607000E0438 /* MockAVAudioSessionRouteDescription.swift */; }; + 40D75C5C2E438633000E0438 /* AVAudioSessionRouteDescription+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C5B2E438633000E0438 /* AVAudioSessionRouteDescription+Dummy.swift */; }; + 40D75C5F2E438AC0000E0438 /* CallKitAudioSessionReducer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C5E2E438AC0000E0438 /* CallKitAudioSessionReducer_Tests.swift */; }; + 40D75C612E438BBF000E0438 /* RTCAudioSessionReducer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C602E438BBF000E0438 /* RTCAudioSessionReducer_Tests.swift */; }; + 40D75C632E4396D2000E0438 /* RTCAudioStore_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C622E4396D2000E0438 /* RTCAudioStore_Tests.swift */; }; 40D946412AA5ECEF00C8861B /* CodeScanner.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D946402AA5ECEF00C8861B /* CodeScanner.swift */; }; 40D946432AA5F65300C8861B /* DemoQRCodeScannerButton.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D946422AA5F65300C8861B /* DemoQRCodeScannerButton.swift */; }; 40D946452AA5F67E00C8861B /* DemoCallingTopView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D946442AA5F67E00C8861B /* DemoCallingTopView.swift */; }; @@ -736,7 +756,6 @@ 40E363712D0A27640028C52A /* BroadcastCaptureHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363702D0A27640028C52A /* BroadcastCaptureHandler.swift */; }; 40E363752D0A2C6B0028C52A /* CGSize+Adapt.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363742D0A2C6B0028C52A /* CGSize+Adapt.swift */; }; 40E363772D0A2E320028C52A /* BroadcastBufferReaderKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363762D0A2E320028C52A /* BroadcastBufferReaderKey.swift */; }; - 40E741FA2D54E6F40044C955 /* RTCAudioSessionDelegatePublisher.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E741F92D54E6F40044C955 /* RTCAudioSessionDelegatePublisher.swift */; }; 40E741FF2D553ACD0044C955 /* CurrentDevice.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E741FE2D553ACD0044C955 /* CurrentDevice.swift */; }; 40E7A45B2E29495500E8AB8B /* WebRTCLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E7A4582E29487700E8AB8B /* WebRTCLogger.swift */; }; 40E9B3B12BCD755F00ACF18F /* MemberResponse+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B02BCD755F00ACF18F /* MemberResponse+Dummy.swift */; }; @@ -787,7 +806,6 @@ 40F1016C2D5A654300C49481 /* DefaultAudioSessionPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F1016B2D5A654300C49481 /* DefaultAudioSessionPolicy.swift */; }; 40F101792D5CBA2D00C49481 /* XCTAsyncUnwrap.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F101782D5CBA2D00C49481 /* XCTAsyncUnwrap.swift */; }; 40F1017C2D5CE7E600C49481 /* AVAudioSessionCategoryOptions_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F1017B2D5CE7E600C49481 /* AVAudioSessionCategoryOptions_Tests.swift */; }; - 40F1017E2D5CF32E00C49481 /* RTCAudioSessionDelegatePublisher_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F1017D2D5CF32E00C49481 /* RTCAudioSessionDelegatePublisher_Tests.swift */; }; 40F101802D5D078800C49481 /* MockAudioSessionPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F1017F2D5D078800C49481 /* MockAudioSessionPolicy.swift */; }; 40F161AB2A4C6B5C00846E3E /* ScreenSharingSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F161AA2A4C6B5C00846E3E /* ScreenSharingSession.swift */; }; 40F18B8C2BEBAC4C00ADF76E /* CallEndedViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 403EFC9E2BDBFE050057C248 /* CallEndedViewModifier.swift */; }; @@ -1705,8 +1723,25 @@ 40149DC22B7E202600473176 /* ParticipantEventViewModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ParticipantEventViewModifier.swift; sourceTree = ""; }; 40149DCB2B7E814300473176 /* AVAudioRecorderBuilder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioRecorderBuilder.swift; sourceTree = ""; }; 40149DCD2B7E837A00473176 /* StreamCallAudioRecorder.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamCallAudioRecorder.swift; sourceTree = ""; }; - 40149DCF2B7E839500473176 /* AVAudioSession+RequestRecordPermission.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession+RequestRecordPermission.swift"; sourceTree = ""; }; 4014F1052D8C2F07004E7EFD /* GleapAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = GleapAdapter.swift; sourceTree = ""; }; + 4019A24F2E40E08B00CE70A4 /* RTCAudioStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore.swift; sourceTree = ""; }; + 4019A2532E40E25000CE70A4 /* RTCAudioStoreAction+Generic.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStoreAction+Generic.swift"; sourceTree = ""; }; + 4019A2562E40E27000CE70A4 /* RTCAudioStore+State.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+State.swift"; sourceTree = ""; }; + 4019A2592E40E2A600CE70A4 /* RTCAudioStoreAction.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStoreAction.swift; sourceTree = ""; }; + 4019A25B2E40E2E800CE70A4 /* RTCAudioStoreReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStoreReducer.swift; sourceTree = ""; }; + 4019A25D2E40E45D00CE70A4 /* RTCAudioSessionReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioSessionReducer.swift; sourceTree = ""; }; + 4019A2622E40EB6000CE70A4 /* RTCAudioStoreMiddleware.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStoreMiddleware.swift; sourceTree = ""; }; + 4019A2672E40ED5900CE70A4 /* RTCAudioStore+InterruptionEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+InterruptionEffect.swift"; sourceTree = ""; }; + 4019A26C2E40F48300CE70A4 /* CallAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallAudioSession.swift; sourceTree = ""; }; + 4019A26E2E40FC8F00CE70A4 /* RTCAudioStore+RouteChangeEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+RouteChangeEffect.swift"; sourceTree = ""; }; + 4019A2772E42225800CE70A4 /* CallKitAudioSessionReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitAudioSessionReducer.swift; sourceTree = ""; }; + 4019A2792E42475300CE70A4 /* JoinSource.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = JoinSource.swift; sourceTree = ""; }; + 4019A27B2E43397100CE70A4 /* RTCAudioStoreAction+AudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStoreAction+AudioSession.swift"; sourceTree = ""; }; + 4019A27D2E43398C00CE70A4 /* RTCAudioStoreAction+CallKit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStoreAction+CallKit.swift"; sourceTree = ""; }; + 4019A27F2E43529000CE70A4 /* AudioSessionProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioSessionProtocol.swift; sourceTree = ""; }; + 4019A2822E4352DF00CE70A4 /* RTCAudioSession+AudioSessionProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioSession+AudioSessionProtocol.swift"; sourceTree = ""; }; + 4019A2862E43565A00CE70A4 /* MockAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAudioSession.swift; sourceTree = ""; }; + 4019A2882E4357B200CE70A4 /* MockRTCAudioStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockRTCAudioStore.swift; sourceTree = ""; }; 401A0F022AB1C1B600BE2DBD /* ThermalStateObserver.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ThermalStateObserver.swift; sourceTree = ""; }; 401A64AA2A9DF7EC00534ED1 /* DemoChatAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoChatAdapter.swift; sourceTree = ""; }; 401A64B02A9DF83200534ED1 /* TokenResponse.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TokenResponse.swift; sourceTree = ""; }; @@ -1867,11 +1902,6 @@ 4067F30C2CDA3377002E28BD /* AVAudioSessionRouteChangeReason+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionRouteChangeReason+Convenience.swift"; sourceTree = ""; }; 4067F30E2CDA3394002E28BD /* AVAudioSessionCategoryOptions+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionCategoryOptions+Convenience.swift"; sourceTree = ""; }; 4067F3122CDA33C4002E28BD /* AVAudioSession.CategoryOptions+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSession.CategoryOptions+Convenience.swift"; sourceTree = ""; }; - 4067F3142CDA4094002E28BD /* StreamRTCAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamRTCAudioSession.swift; sourceTree = ""; }; - 4067F3162CDA40CC002E28BD /* StreamAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAudioSession.swift; sourceTree = ""; }; - 4067F3182CDA469C002E28BD /* MockAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAudioSession.swift; sourceTree = ""; }; - 4067F31B2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamRTCAudioSession_Tests.swift; sourceTree = ""; }; - 4067F31D2CDA5A53002E28BD /* StreamAudioSession_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamAudioSession_Tests.swift; sourceTree = ""; }; 406AF2002AF3D98F00ED4D0C /* SimulatorScreenCapturer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SimulatorScreenCapturer.swift; sourceTree = ""; }; 406AF2042AF3DE4000ED4D0C /* test.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = test.mp4; sourceTree = ""; }; 406B3BD62C8F331F00FC93A1 /* RTCVideoTrack+Sendable.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCVideoTrack+Sendable.swift"; sourceTree = ""; }; @@ -2213,6 +2243,14 @@ 40D36AE12DDE023800972D75 /* WebRTCStatsCollecting.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTCStatsCollecting.swift; sourceTree = ""; }; 40D36AE32DDE02D100972D75 /* MockWebRTCStatsCollector.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockWebRTCStatsCollector.swift; sourceTree = ""; }; 40D6ADDC2ACDB51C00EF5336 /* VideoRenderer_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = VideoRenderer_Tests.swift; sourceTree = ""; }; + 40D75C512E437FBC000E0438 /* InterruptionEffect_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = InterruptionEffect_Tests.swift; sourceTree = ""; }; + 40D75C532E438317000E0438 /* RouteChangeEffect_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RouteChangeEffect_Tests.swift; sourceTree = ""; }; + 40D75C552E4385FE000E0438 /* MockAVAudioSessionPortDescription.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAVAudioSessionPortDescription.swift; sourceTree = ""; }; + 40D75C572E438607000E0438 /* MockAVAudioSessionRouteDescription.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAVAudioSessionRouteDescription.swift; sourceTree = ""; }; + 40D75C5B2E438633000E0438 /* AVAudioSessionRouteDescription+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AVAudioSessionRouteDescription+Dummy.swift"; sourceTree = ""; }; + 40D75C5E2E438AC0000E0438 /* CallKitAudioSessionReducer_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitAudioSessionReducer_Tests.swift; sourceTree = ""; }; + 40D75C602E438BBF000E0438 /* RTCAudioSessionReducer_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioSessionReducer_Tests.swift; sourceTree = ""; }; + 40D75C622E4396D2000E0438 /* RTCAudioStore_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore_Tests.swift; sourceTree = ""; }; 40D946402AA5ECEF00C8861B /* CodeScanner.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CodeScanner.swift; sourceTree = ""; }; 40D946422AA5F65300C8861B /* DemoQRCodeScannerButton.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoQRCodeScannerButton.swift; sourceTree = ""; }; 40D946442AA5F67E00C8861B /* DemoCallingTopView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoCallingTopView.swift; sourceTree = ""; }; @@ -2246,7 +2284,6 @@ 40E363702D0A27640028C52A /* BroadcastCaptureHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BroadcastCaptureHandler.swift; sourceTree = ""; }; 40E363742D0A2C6B0028C52A /* CGSize+Adapt.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CGSize+Adapt.swift"; sourceTree = ""; }; 40E363762D0A2E320028C52A /* BroadcastBufferReaderKey.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BroadcastBufferReaderKey.swift; sourceTree = ""; }; - 40E741F92D54E6F40044C955 /* RTCAudioSessionDelegatePublisher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioSessionDelegatePublisher.swift; sourceTree = ""; }; 40E741FE2D553ACD0044C955 /* CurrentDevice.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CurrentDevice.swift; sourceTree = ""; }; 40E7A4582E29487700E8AB8B /* WebRTCLogger.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WebRTCLogger.swift; sourceTree = ""; }; 40E9B3B02BCD755F00ACF18F /* MemberResponse+Dummy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "MemberResponse+Dummy.swift"; sourceTree = ""; }; @@ -2294,7 +2331,6 @@ 40F1016B2D5A654300C49481 /* DefaultAudioSessionPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DefaultAudioSessionPolicy.swift; sourceTree = ""; }; 40F101782D5CBA2D00C49481 /* XCTAsyncUnwrap.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = XCTAsyncUnwrap.swift; sourceTree = ""; }; 40F1017B2D5CE7E600C49481 /* AVAudioSessionCategoryOptions_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioSessionCategoryOptions_Tests.swift; sourceTree = ""; }; - 40F1017D2D5CF32E00C49481 /* RTCAudioSessionDelegatePublisher_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioSessionDelegatePublisher_Tests.swift; sourceTree = ""; }; 40F1017F2D5D078800C49481 /* MockAudioSessionPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAudioSessionPolicy.swift; sourceTree = ""; }; 40F161AA2A4C6B5C00846E3E /* ScreenSharingSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ScreenSharingSession.swift; sourceTree = ""; }; 40F18B8D2BEBB65100ADF76E /* View+OptionalPublisher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "View+OptionalPublisher.swift"; sourceTree = ""; }; @@ -3139,6 +3175,75 @@ path = Gleap; sourceTree = ""; }; + 4019A24E2E40E08200CE70A4 /* RTCAudioStore */ = { + isa = PBXGroup; + children = ( + 4019A2812E4352CB00CE70A4 /* AudioSessions */, + 4019A2642E40ECFA00CE70A4 /* Effects */, + 4019A2552E40E26800CE70A4 /* State */, + 4019A2522E40E22E00CE70A4 /* Actions */, + 4019A2612E40EB4700CE70A4 /* Middleware */, + 4019A2512E40E22300CE70A4 /* Reducers */, + 4019A24F2E40E08B00CE70A4 /* RTCAudioStore.swift */, + ); + path = RTCAudioStore; + sourceTree = ""; + }; + 4019A2512E40E22300CE70A4 /* Reducers */ = { + isa = PBXGroup; + children = ( + 4019A25B2E40E2E800CE70A4 /* RTCAudioStoreReducer.swift */, + 4019A25D2E40E45D00CE70A4 /* RTCAudioSessionReducer.swift */, + 4019A2772E42225800CE70A4 /* CallKitAudioSessionReducer.swift */, + ); + path = Reducers; + sourceTree = ""; + }; + 4019A2522E40E22E00CE70A4 /* Actions */ = { + isa = PBXGroup; + children = ( + 4019A2592E40E2A600CE70A4 /* RTCAudioStoreAction.swift */, + 4019A2532E40E25000CE70A4 /* RTCAudioStoreAction+Generic.swift */, + 4019A27B2E43397100CE70A4 /* RTCAudioStoreAction+AudioSession.swift */, + 4019A27D2E43398C00CE70A4 /* RTCAudioStoreAction+CallKit.swift */, + ); + path = Actions; + sourceTree = ""; + }; + 4019A2552E40E26800CE70A4 /* State */ = { + isa = PBXGroup; + children = ( + 4019A2562E40E27000CE70A4 /* RTCAudioStore+State.swift */, + ); + path = State; + sourceTree = ""; + }; + 4019A2612E40EB4700CE70A4 /* Middleware */ = { + isa = PBXGroup; + children = ( + 4019A2622E40EB6000CE70A4 /* RTCAudioStoreMiddleware.swift */, + ); + path = Middleware; + sourceTree = ""; + }; + 4019A2642E40ECFA00CE70A4 /* Effects */ = { + isa = PBXGroup; + children = ( + 4019A2672E40ED5900CE70A4 /* RTCAudioStore+InterruptionEffect.swift */, + 4019A26E2E40FC8F00CE70A4 /* RTCAudioStore+RouteChangeEffect.swift */, + ); + path = Effects; + sourceTree = ""; + }; + 4019A2812E4352CB00CE70A4 /* AudioSessions */ = { + isa = PBXGroup; + children = ( + 4019A27F2E43529000CE70A4 /* AudioSessionProtocol.swift */, + 4019A2822E4352DF00CE70A4 /* RTCAudioSession+AudioSessionProtocol.swift */, + ); + path = AudioSessions; + sourceTree = ""; + }; 401A64A92A9DF7E600534ED1 /* Chat */ = { isa = PBXGroup; children = ( @@ -3813,14 +3918,13 @@ 4067F3062CDA32F0002E28BD /* AudioSession */ = { isa = PBXGroup; children = ( + 4019A24E2E40E08200CE70A4 /* RTCAudioStore */, 40802AE72DD2A7BA00B9F970 /* Protocols */, 40F101632D5A322E00C49481 /* Policies */, 4067F3092CDA330E002E28BD /* Extensions */, 40149DCA2B7E813500473176 /* AudioRecorder */, 40F101692D5A653B00C49481 /* AudioSessionConfiguration.swift */, - 4067F3142CDA4094002E28BD /* StreamRTCAudioSession.swift */, - 4067F3162CDA40CC002E28BD /* StreamAudioSession.swift */, - 40E741F92D54E6F40044C955 /* RTCAudioSessionDelegatePublisher.swift */, + 4019A26C2E40F48300CE70A4 /* CallAudioSession.swift */, 4067F3072CDA32FA002E28BD /* StreamAudioSessionAdapterDelegate.swift */, ); path = AudioSession; @@ -3829,7 +3933,6 @@ 4067F3092CDA330E002E28BD /* Extensions */ = { isa = PBXGroup; children = ( - 40149DCF2B7E839500473176 /* AVAudioSession+RequestRecordPermission.swift */, 4067F3122CDA33C4002E28BD /* AVAudioSession.CategoryOptions+Convenience.swift */, 4067F30E2CDA3394002E28BD /* AVAudioSessionCategoryOptions+Convenience.swift */, 4067F30C2CDA3377002E28BD /* AVAudioSessionRouteChangeReason+Convenience.swift */, @@ -3844,11 +3947,9 @@ 4067F31A2CDA55D1002E28BD /* AudioSession */ = { isa = PBXGroup; children = ( + 40D75C4F2E437FAE000E0438 /* RTCAudioStore */, 40D287392DB12CAA006AD8C7 /* Policies */, 40F1017A2D5CE7E400C49481 /* Extensions */, - 40F1017D2D5CF32E00C49481 /* RTCAudioSessionDelegatePublisher_Tests.swift */, - 4067F31B2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift */, - 4067F31D2CDA5A53002E28BD /* StreamAudioSession_Tests.swift */, ); path = AudioSession; sourceTree = ""; @@ -4952,6 +5053,34 @@ path = Client; sourceTree = ""; }; + 40D75C4F2E437FAE000E0438 /* RTCAudioStore */ = { + isa = PBXGroup; + children = ( + 40D75C5D2E438A9C000E0438 /* Reducers */, + 40D75C502E437FB8000E0438 /* Effects */, + 40D75C622E4396D2000E0438 /* RTCAudioStore_Tests.swift */, + ); + path = RTCAudioStore; + sourceTree = ""; + }; + 40D75C502E437FB8000E0438 /* Effects */ = { + isa = PBXGroup; + children = ( + 40D75C512E437FBC000E0438 /* InterruptionEffect_Tests.swift */, + 40D75C532E438317000E0438 /* RouteChangeEffect_Tests.swift */, + ); + path = Effects; + sourceTree = ""; + }; + 40D75C5D2E438A9C000E0438 /* Reducers */ = { + isa = PBXGroup; + children = ( + 40D75C5E2E438AC0000E0438 /* CallKitAudioSessionReducer_Tests.swift */, + 40D75C602E438BBF000E0438 /* RTCAudioSessionReducer_Tests.swift */, + ); + path = Reducers; + sourceTree = ""; + }; 40D9463F2AA5ECDC00C8861B /* CodeScanner */ = { isa = PBXGroup; children = ( @@ -5939,6 +6068,7 @@ 8456E6D9287EC46D004E180E /* Models */ = { isa = PBXGroup; children = ( + 4019A2792E42475300CE70A4 /* JoinSource.swift */, 403CA9B32CC7BAF0001A88C2 /* VideoCodec.swift */, 4029E94D2CB8160E00E1D571 /* IncomingVideoQualitySettings.swift */, 40C2B5B92C2C41CF00EC2C2D /* Extensions */, @@ -6085,6 +6215,8 @@ 8492B87629081CE700006649 /* Mock */ = { isa = PBXGroup; children = ( + 40D75C572E438607000E0438 /* MockAVAudioSessionRouteDescription.swift */, + 40D75C552E4385FE000E0438 /* MockAVAudioSessionPortDescription.swift */, 40D36ADF2DDE019F00972D75 /* MockWebRTCStatsReporter.swift */, 40D36AE32DDE02D100972D75 /* MockWebRTCStatsCollector.swift */, 40D36ABD2DDDB77A00972D75 /* MockWebRTCStatsAdapter.swift */, @@ -6096,7 +6228,6 @@ 40B48C482D14E822002C4EAB /* MockStreamVideoCapturer.swift */, 40B48C292D14CF3B002C4EAB /* MockRTCRtpCodecCapability.swift */, 40B48C2F2D14D308002C4EAB /* MockRTCRtpEncodingParameters.swift */, - 4067F3182CDA469C002E28BD /* MockAudioSession.swift */, 40802AEA2DD2A92E00B9F970 /* MockAVAudioSession.swift */, 409774AD2CC1979F00E0D3EE /* MockCallController.swift */, 404A81302DA3C5F0001F7FA8 /* MockDefaultAPI.swift */, @@ -6141,6 +6272,9 @@ 40AAD1902D2EF18A00D10330 /* MockCaptureDevice.swift */, 40F1017F2D5D078800C49481 /* MockAudioSessionPolicy.swift */, 40B3E53D2DBBB0AB00DE8F50 /* CurrentDevice+Dummy.swift */, + 40D75C5B2E438633000E0438 /* AVAudioSessionRouteDescription+Dummy.swift */, + 4019A2862E43565A00CE70A4 /* MockAudioSession.swift */, + 4019A2882E4357B200CE70A4 /* MockRTCAudioStore.swift */, ); path = Mock; sourceTree = ""; @@ -7632,7 +7766,6 @@ 8449824E2C738A830029734D /* StopAllRTMPBroadcastsResponse.swift in Sources */, 40E363522D0A11620028C52A /* AVCaptureDevice+OutputFormat.swift in Sources */, 84D2E37729DC856D001D2118 /* CallMemberUpdatedEvent.swift in Sources */, - 40149DD02B7E839500473176 /* AVAudioSession+RequestRecordPermission.swift in Sources */, 40DFA88D2CC10FF3003DCE05 /* Stream_Video_Sfu_Models_AppleThermalState+Convenience.swift in Sources */, 8409465B29AF4EEC007AF5BF /* ListRecordingsResponse.swift in Sources */, 8490DD21298D4ADF007E53D2 /* StreamJsonDecoder.swift in Sources */, @@ -7646,6 +7779,7 @@ 402D0E882D0C94CD00E9B83F /* RTCAudioTrack+Clone.swift in Sources */, 84DC389C29ADFCFD00946713 /* GetOrCreateCallResponse.swift in Sources */, 402D0E8A2D0C94E600E9B83F /* RTCVideoTrack+Clone.swift in Sources */, + 4019A2632E40EB6000CE70A4 /* RTCAudioStoreMiddleware.swift in Sources */, 406B3BD92C8F337000FC93A1 /* MediaAdapting.swift in Sources */, 40E363622D0A1C2E0028C52A /* SimulatorCaptureHandler.swift in Sources */, 84DCA2242A3A0F0D000C3411 /* HTTPClient.swift in Sources */, @@ -7681,6 +7815,7 @@ 4159F1822C86FA41002B94D3 /* UserEventPayload.swift in Sources */, 40BBC4B72C627E59002AEF92 /* VideoMediaAdapter.swift in Sources */, 84D91E9D2C7CB0AA00B163A0 /* CallRtmpBroadcastFailedEvent.swift in Sources */, + 4019A25A2E40E2A600CE70A4 /* RTCAudioStoreAction.swift in Sources */, 84A737D028F4716E001A6769 /* models.pb.swift in Sources */, 408721E42E127396006A68CB /* RepeatingTimer.swift in Sources */, 846D16222A52B8D00036CE4C /* MicrophoneManager.swift in Sources */, @@ -7735,6 +7870,7 @@ 40ADB85C2D64B00E00B06AAF /* CGSize+Hashable.swift in Sources */, 40E3633E2D09EF560028C52A /* CMVideoDimensions+DefaultValues.swift in Sources */, 40382F2B2C88B84800C2D00F /* Stream_Video_Sfu_Event_SfuEvent.OneOf_EventPayload+Payload.swift in Sources */, + 4019A27A2E42475300CE70A4 /* JoinSource.swift in Sources */, 40FB8FF82D661E2000F4390A /* String+OpenApiExtensions.swift in Sources */, 84BAD7842A6C01AF00733156 /* BroadcastBufferReader.swift in Sources */, 40034C312CFE168D00A318B1 /* StreamLocaleProvider.swift in Sources */, @@ -7804,6 +7940,7 @@ 4061288B2CF33088007F5CDC /* SupportedPrefix.swift in Sources */, 40BBC4C02C629408002AEF92 /* RTCTemporaryPeerConnection.swift in Sources */, 84B0091B2A4C521100CF1FA7 /* Retries.swift in Sources */, + 4019A2802E43529000CE70A4 /* AudioSessionProtocol.swift in Sources */, 405BFFD22DBB8BE8005B2BE4 /* ProximityManager.swift in Sources */, 84DC38CD29ADFCFD00946713 /* SendEventRequest.swift in Sources */, 84DC38B029ADFCFD00946713 /* MuteUsersRequest.swift in Sources */, @@ -7812,7 +7949,6 @@ 40C4DF492C1C2C210035DBC2 /* Publisher+WeakAssign.swift in Sources */, 4157FF912C9AC9EC0093D839 /* RTMPBroadcastRequest.swift in Sources */, 844982472C738A830029734D /* DeleteRecordingResponse.swift in Sources */, - 4067F3172CDA40CC002E28BD /* StreamAudioSession.swift in Sources */, 40AB34DA2C5D5A7B00B5B6B3 /* WebRTCStatsReporter.swift in Sources */, 408679F72BD12F1000D027E0 /* AudioFilter.swift in Sources */, 8456E6D2287EC343004E180E /* ConsoleLogDestination.swift in Sources */, @@ -7831,7 +7967,6 @@ 40E363712D0A27640028C52A /* BroadcastCaptureHandler.swift in Sources */, 848CCCEB2AB8ED8F002E83A2 /* CallHLSBroadcastingStoppedEvent.swift in Sources */, 40BBC4C42C638789002AEF92 /* RTCPeerConnectionCoordinator.swift in Sources */, - 4067F3152CDA4094002E28BD /* StreamRTCAudioSession.swift in Sources */, 40BBC4C62C638915002AEF92 /* WebRTCCoordinator.swift in Sources */, 40802AE92DD2A7C700B9F970 /* AVAudioSessionProtocol.swift in Sources */, 841BAA392BD15CDE000C73E4 /* UserSessionStats.swift in Sources */, @@ -7853,6 +7988,7 @@ 40AD64B12DC15BEB0077AE15 /* WebRTCTrace.swift in Sources */, 40034C202CFDABE600A318B1 /* PublishOptions.swift in Sources */, 84DC389329ADFCFD00946713 /* ScreensharingSettingsRequest.swift in Sources */, + 4019A2782E42225800CE70A4 /* CallKitAudioSessionReducer.swift in Sources */, 84EBA4A22A72B81100577297 /* BroadcastBufferConnection.swift in Sources */, 40FB150A2BF74C1300D5E580 /* CallCache.swift in Sources */, 40FB02012BAC8A4A00A1C206 /* CallKitService.swift in Sources */, @@ -7862,6 +7998,7 @@ 84DC389F29ADFCFD00946713 /* JoinCallResponse.swift in Sources */, 84A7E1AE2883E6B300526C98 /* HTTPUtils.swift in Sources */, 4065839D2B877B6500B4F979 /* UIDevice+NeuralEngine.swift in Sources */, + 4019A26F2E40FC8F00CE70A4 /* RTCAudioStore+RouteChangeEffect.swift in Sources */, 40FB15142BF77D9000D5E580 /* Call+Stage.swift in Sources */, 84DC38D229ADFCFD00946713 /* UpdatedCallPermissionsEvent.swift in Sources */, 40429D5B2C779ADB00AC7FFF /* SFUEventAdapter.swift in Sources */, @@ -7909,19 +8046,21 @@ 4028FEA52DC5046F001F9DC3 /* WebRTCStatsAdapter.swift in Sources */, 40FB02032BAC93A800A1C206 /* CallKitAdapter.swift in Sources */, 402F04AB2B70ED8600CA1986 /* StreamCallStatisticsFormatter.swift in Sources */, + 4019A2832E4352DF00CE70A4 /* RTCAudioSession+AudioSessionProtocol.swift in Sources */, 40382F502C8B3DAE00C2D00F /* StreamRTCPeerConnection.swift in Sources */, 40E18AAD2CD51E5900A65C9F /* RecursiveQueue.swift in Sources */, 842B8E242A2DFED900863A87 /* CallSessionParticipantJoinedEvent.swift in Sources */, 40AD64B82DC16AB10077AE15 /* WebRTCTracesAdapter.swift in Sources */, 40BBC4D42C639371002AEF92 /* WebRTCCoordinator+Connected.swift in Sources */, 40D36AC02DDDB88200972D75 /* WebRTCStatsAdapting.swift in Sources */, + 4019A25E2E40E45D00CE70A4 /* RTCAudioSessionReducer.swift in Sources */, 848CCCE62AB8ED8F002E83A2 /* BroadcastSettingsResponse.swift in Sources */, + 4019A2502E40E08B00CE70A4 /* RTCAudioStore.swift in Sources */, 40FB8FF62D661DC400F4390A /* Call+Identifiable.swift in Sources */, 4039F0CC2D0241120078159E /* AudioCodec.swift in Sources */, 4028FE982DC4F638001F9DC3 /* ConsumableBucket.swift in Sources */, 4097B3832BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift in Sources */, 84A7E1862883632100526C98 /* ConnectionStatus.swift in Sources */, - 40E741FA2D54E6F40044C955 /* RTCAudioSessionDelegatePublisher.swift in Sources */, 841BAA472BD15CDE000C73E4 /* CallTranscriptionReadyEvent.swift in Sources */, 4012B1922BFCA518006B0031 /* Call+AcceptingStage.swift in Sources */, 841BAA352BD15CDE000C73E4 /* CallTranscriptionStartedEvent.swift in Sources */, @@ -8038,6 +8177,7 @@ 40E741FF2D553ACD0044C955 /* CurrentDevice.swift in Sources */, 84DC389B29ADFCFD00946713 /* PermissionRequestEvent.swift in Sources */, 406B3C432C91E41400FC93A1 /* WebRTCAuthenticator.swift in Sources */, + 4019A2682E40ED5900CE70A4 /* RTCAudioStore+InterruptionEffect.swift in Sources */, 84BAD77A2A6BFEF900733156 /* BroadcastBufferUploader.swift in Sources */, 40C4DF4B2C1C2C330035DBC2 /* ParticipantAutoLeavePolicy.swift in Sources */, 408521E52D661C7600F012B8 /* RawJSON+Double.swift in Sources */, @@ -8049,6 +8189,7 @@ 40A0E9622B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift in Sources */, 842B8E222A2DFED900863A87 /* CallNotificationEvent.swift in Sources */, 408721E82E1273CE006A68CB /* DefaultTimer.swift in Sources */, + 4019A26D2E40F48300CE70A4 /* CallAudioSession.swift in Sources */, 406B3BE12C8F356A00FC93A1 /* RTCIceCandidate+Convenience.swift in Sources */, 845C09842C0DEB5C00F725B3 /* LimitsSettingsRequest.swift in Sources */, 4159F1742C86FA41002B94D3 /* MediaPubSubHint.swift in Sources */, @@ -8057,6 +8198,7 @@ 40BBC4DC2C63A4C8002AEF92 /* WebRTCCoordinator+Leaving.swift in Sources */, 40BBC4E62C63A619002AEF92 /* WebRTCCoordinator+Rejoining.swift in Sources */, 8490032229D308A000AD9BB4 /* AudioSettingsRequest.swift in Sources */, + 4019A2572E40E27000CE70A4 /* RTCAudioStore+State.swift in Sources */, 40AB34B62C5D089E00B5B6B3 /* Task+Timeout.swift in Sources */, 40F101682D5A653200C49481 /* AudioSessionPolicy.swift in Sources */, 408721F72E127551006A68CB /* TimerPublisher.swift in Sources */, @@ -8086,6 +8228,7 @@ 84A7E1892883638200526C98 /* WebSocketEngine.swift in Sources */, 40BBC48B2C623C6E002AEF92 /* ICEAdapter.swift in Sources */, 4065838A2B87695500B4F979 /* BlurBackgroundVideoFilter.swift in Sources */, + 4019A2542E40E25000CE70A4 /* RTCAudioStoreAction+Generic.swift in Sources */, 403793C52D35196600C752DF /* StreamAppStateAdapter.swift in Sources */, 84DC38B429ADFCFD00946713 /* ICEServer.swift in Sources */, 4159F1902C86FA41002B94D3 /* PublisherAggregateStats.swift in Sources */, @@ -8095,6 +8238,7 @@ 406583922B877A1600B4F979 /* BackgroundImageFilterProcessor.swift in Sources */, 8490DD23298D5330007E53D2 /* Data+Gzip.swift in Sources */, 84DC38B829ADFCFD00946713 /* UpdateUserPermissionsResponse.swift in Sources */, + 4019A27E2E43398C00CE70A4 /* RTCAudioStoreAction+CallKit.swift in Sources */, 84DC38C029ADFCFD00946713 /* UserRequest.swift in Sources */, 84DC389629ADFCFD00946713 /* EndCallResponse.swift in Sources */, 847BE09C29DADE0100B55D21 /* Call.swift in Sources */, @@ -8161,10 +8305,12 @@ 84FCE4512CE208C400649F86 /* StartClosedCaptionsResponse.swift in Sources */, 841FF51B2A5FED4800809BBB /* SystemEnvironment+XStreamClient.swift in Sources */, 84DC38A329ADFCFD00946713 /* MuteUsersResponse.swift in Sources */, + 4019A27C2E43397100CE70A4 /* RTCAudioStoreAction+AudioSession.swift in Sources */, 84DC38BF29ADFCFD00946713 /* ScreensharingSettings.swift in Sources */, 4092517E2E05AFF000DC0FB3 /* MidStereoInformation.swift in Sources */, 843DAB9929E695CF00E0EB63 /* CreateGuestResponse.swift in Sources */, 84DC389229ADFCFD00946713 /* RequestPermissionRequest.swift in Sources */, + 4019A25C2E40E2E800CE70A4 /* RTCAudioStoreReducer.swift in Sources */, 40E7A45B2E29495500E8AB8B /* WebRTCLogger.swift in Sources */, 84C28C922A84D16A00742E33 /* GoLiveRequest.swift in Sources */, 84FC2C1328ACDF3A00181490 /* ProtoModel.swift in Sources */, @@ -8203,7 +8349,6 @@ 40FAAC8E2DDCB5EF007BF93A /* DateMillisecondsSince1970_Tests.swift in Sources */, 40D36AC52DDDF2E600972D75 /* WebRTCTracesAdapter_Tests.swift in Sources */, 40F017472BBEEF5100E89FD1 /* ThumbnailResponse+Dummy.swift in Sources */, - 4067F31E2CDA5A56002E28BD /* StreamAudioSession_Tests.swift in Sources */, 40AF6A412C9356B700BA2935 /* WebRTCCoordinatorStateMachine_MigratingStageTests.swift in Sources */, 408CE0F72BD95EB60052EC3A /* VideoConfig+Dummy.swift in Sources */, 84F58B7029EE914400010C4C /* BackgroundTaskScheduler_Tests.swift in Sources */, @@ -8225,7 +8370,6 @@ 406B3C4C2C91EFA700FC93A1 /* MockCallAuthenticator.swift in Sources */, 40F0174D2BBEEFD500E89FD1 /* TranscriptionSettings+Dummy.swift in Sources */, 406B3C4A2C91EE9700FC93A1 /* MockWebRTCCoordinatorStack.swift in Sources */, - 4067F3192CDA469F002E28BD /* MockAudioSession.swift in Sources */, 40AB34C92C5D3F2E00B5B6B3 /* ParticipantsStats+Dummy.swift in Sources */, 84F58B7629EE92BF00010C4C /* UniqueValues.swift in Sources */, 40B48C512D14F7AE002C4EAB /* SDPParser_Tests.swift in Sources */, @@ -8235,12 +8379,16 @@ 401C1EF22D4945BE00304609 /* ClosedCaptionsAdapterTests.swift in Sources */, 40B48C1D2D14CA25002C4EAB /* Array_PrepareTests.swift in Sources */, 40FAAC912DDCB972007BF93A /* StreamRTCPeerConnection_EventsTests.swift in Sources */, + 40D75C5C2E438633000E0438 /* AVAudioSessionRouteDescription+Dummy.swift in Sources */, 40C9E4592C98B1A900802B28 /* WebRTCStateAdapter_Tests.swift in Sources */, + 4019A2892E4357B200CE70A4 /* MockRTCAudioStore.swift in Sources */, 40D36AC22DDDF0F400972D75 /* WebRTCTrackStorage_Tests.swift in Sources */, 40B48C212D14CB1B002C4EAB /* Int_DefaultValuesTests.swift in Sources */, 40F017612BBEF15E00E89FD1 /* CallParticipantResponse+Dummy.swift in Sources */, 40034C2E2CFE15AC00A318B1 /* CallKitRegionBasedAvailabilityPolicy.swift in Sources */, 4045D9DB2DAD57570077A660 /* CallSettingsResponse+SettingsPriorityTests.swift in Sources */, + 40D75C5F2E438AC0000E0438 /* CallKitAudioSessionReducer_Tests.swift in Sources */, + 40D75C582E438607000E0438 /* MockAVAudioSessionRouteDescription.swift in Sources */, 406B3C552C92031000FC93A1 /* WebRTCCoordinatorStateMachine_JoiningStageTests.swift in Sources */, 40C9E4642C99886900802B28 /* WebRTCCoorindator_Tests.swift in Sources */, 40D36ADA2DDE008400972D75 /* MockWebRTCTracesAdapter.swift in Sources */, @@ -8250,6 +8398,7 @@ 842747FA29EEEC5A00E063AD /* EventLogger.swift in Sources */, 843061002D38203D000E14D5 /* SessionSettingsResponse+Dummy.swift in Sources */, 40B48C582D1588DB002C4EAB /* Stream_Video_Sfu_Models_TrackInfo+Dummy.swift in Sources */, + 40D75C612E438BBF000E0438 /* RTCAudioSessionReducer_Tests.swift in Sources */, 402B34C32DCDF98300574663 /* WebRTCUpdateSubscriptionsAdapter_Tests.swift in Sources */, 40B48C282D14CDD5002C4EAB /* StreamVideoSfuModelsCodec_ConvenienceTests.swift in Sources */, 40E9B3B52BCD93F500ACF18F /* Credentials+Dummy.swift in Sources */, @@ -8283,6 +8432,7 @@ 40F017572BBEF07B00E89FD1 /* GeofenceSettings+Dummy.swift in Sources */, 40C9E4532C9888C100802B28 /* WebRTCMigrationStatusObserver_Tests.swift in Sources */, 400C9FC42D9D0BDA00DB26DC /* OperationQueue_TaskOperationTests.swift in Sources */, + 40D75C562E4385FE000E0438 /* MockAVAudioSessionPortDescription.swift in Sources */, 40DE867D2BBEAA8600E88D8A /* CallKitPushNotificationAdapterTests.swift in Sources */, 40483CBA2C9B1E6600B4FCA8 /* MockWebRTCCoordinatorFactory.swift in Sources */, 845E31062A7121D6004DC470 /* BroadcastObserver_Tests.swift in Sources */, @@ -8298,6 +8448,7 @@ 404A81342DA3CB66001F7FA8 /* CallStateMachine_RejectedStageTests.swift in Sources */, 40B48C342D14D3E6002C4EAB /* StreamVideoSfuSignalTrackSubscriptionDetails_ConvenienceTests.swift in Sources */, 405616F32E0C0E7200442FF2 /* ICEConnectionStateAdapter_Tests.swift in Sources */, + 40D75C542E438317000E0438 /* RouteChangeEffect_Tests.swift in Sources */, 40B48C4F2D14F77B002C4EAB /* SupportedPrefix_Tests.swift in Sources */, 84F58B8129EE9C4900010C4C /* WebSocketPingController_Delegate.swift in Sources */, 400C9FCD2D9D648100DB26DC /* RTCConfiguration_DefaultsTests.swift in Sources */, @@ -8328,7 +8479,6 @@ 842747EC29EED59000E063AD /* JSONDecoder_Tests.swift in Sources */, 406B3C142C8F870400FC93A1 /* MockActiveCallProvider.swift in Sources */, 40C2B5C62C2D7AED00EC2C2D /* RejectionReasonProvider_Tests.swift in Sources */, - 40F1017E2D5CF32E00C49481 /* RTCAudioSessionDelegatePublisher_Tests.swift in Sources */, 841FF5172A5EA7F600809BBB /* CallParticipants_Tests.swift in Sources */, 40F017452BBEEE6D00E89FD1 /* UserResponse+Dummy.swift in Sources */, 40F017422BBEC81C00E89FD1 /* CallKitServiceTests.swift in Sources */, @@ -8342,7 +8492,6 @@ 40C9E4422C943DC000802B28 /* WebRTCCoordinatorStateMachine_ErrorStageTests.swift in Sources */, 842747E529EECD0100E063AD /* ClientError_Tests.swift in Sources */, 40F017552BBEF03E00E89FD1 /* RecordSettingsResponse+Dummy.swift in Sources */, - 4067F31C2CDA55D6002E28BD /* StreamRTCAudioSession_Tests.swift in Sources */, 8446AF912A4D84F4002AB07B /* Retries_Tests.swift in Sources */, 406B3C272C904F7100FC93A1 /* LocalScreenShareMediaAdapter_Tests.swift in Sources */, 84F58B7429EE928400010C4C /* TestError.swift in Sources */, @@ -8409,6 +8558,7 @@ 40FE5EBB2C9C7D40006B0881 /* StreamVideoCaptureHandler_Tests.swift in Sources */, 4029E95C2CB9449900E1D571 /* CallParticipant_TrackSubscriptionTests.swift in Sources */, 40AB34BF2C5D33CF00B5B6B3 /* SFUAdapter_Tests.swift in Sources */, + 4019A2872E43565A00CE70A4 /* MockAudioSession.swift in Sources */, 40F017532BBEF01F00E89FD1 /* RingSettings+Dummy.swift in Sources */, 40F017732BBEF28600E89FD1 /* CallAcceptedEvent+Dummy.swift in Sources */, 82E3BA532A0BAF4B001AB93E /* WebSocketClientEnvironment_Mock.swift in Sources */, @@ -8443,6 +8593,7 @@ 4052BF552DBA830D0085AFA5 /* MockAppStateAdapter.swift in Sources */, 40D36ABE2DDDB77A00972D75 /* MockWebRTCStatsAdapter.swift in Sources */, 401338782BF248B9007318BD /* MockStreamVideo.swift in Sources */, + 40D75C632E4396D2000E0438 /* RTCAudioStore_Tests.swift in Sources */, 84D114DA29F092E700BCCB0C /* CallController_Tests.swift in Sources */, 40FAAC8B2DDCB488007BF93A /* MockConsumableBucketItemTransformer.swift in Sources */, 40AF6A432C93585B00BA2935 /* WebRTCCoordinatorStateMachine_MigratedStageTests.swift in Sources */, @@ -8465,6 +8616,7 @@ 40F017512BBEF00500E89FD1 /* ScreensharingSettings+Dummy.swift in Sources */, 40AF6A492C935EB600BA2935 /* WebRTCCoordinatorStateMachine_CleanUpStageTests.swift in Sources */, 403FB14E2BFE18D10047A696 /* StreamStateMachine_Tests.swift in Sources */, + 40D75C522E437FBC000E0438 /* InterruptionEffect_Tests.swift in Sources */, 84F58B8329EE9E6400010C4C /* WebSocketClient_Tests.swift in Sources */, 40F017632BBEF17600E89FD1 /* CallSessionResponse+Dummy.swift in Sources */, 404A812C2DA05539001F7FA8 /* CallStateMachine_ErrorStageTests.swift in Sources */, diff --git a/StreamVideoTests/Call/Call_Tests.swift b/StreamVideoTests/Call/Call_Tests.swift index 791573a38..b2e16644a 100644 --- a/StreamVideoTests/Call/Call_Tests.swift +++ b/StreamVideoTests/Call/Call_Tests.swift @@ -475,7 +475,45 @@ final class Call_Tests: StreamVideoTestCase { XCTAssertEqual(mockCallController.timesCalled(.join), 1) } - + + func test_join_stateContainsJoinSource_joinSourceWasPassedToCallController() async throws { + let mockCallController = MockCallController() + let call = MockCall(.dummy(callController: mockCallController)) + call.stub(for: \.state, with: .init()) + mockCallController.stub(for: .join, with: JoinCallResponse.dummy()) + + call.state.joinSource = .callKit + _ = try await call.join() + + XCTAssertEqual( + mockCallController.recordedInputPayload( + (Bool, CallSettings?, CreateCallOptions?, Bool, Bool, JoinSource).self, + for: .join + )?.first?.5, + .callKit + ) + } + + func test_join_stateDoesNotJoinSource_joinSourceDefaultsToInAppAndWasPassedToCallController() async throws { + let mockCallController = MockCallController() + let call = MockCall(.dummy(callController: mockCallController)) + call.stub(for: \.state, with: .init()) + mockCallController.stub(for: .join, with: JoinCallResponse.dummy()) + + call.state.joinSource = nil + _ = try await call.join() + + XCTAssertEqual( + mockCallController.recordedInputPayload( + (Bool, CallSettings?, CreateCallOptions?, Bool, Bool, JoinSource).self, + for: .join + )?.first?.5, + .inApp + ) + } + + // MARK: - updateParticipantsSorting + func test_call_customSorting() async throws { // Given let nameComparator: StreamSortComparator = { diff --git a/StreamVideoTests/CallKit/CallKitServiceTests.swift b/StreamVideoTests/CallKit/CallKitServiceTests.swift index 6a6ea4562..2b8a6bf32 100644 --- a/StreamVideoTests/CallKit/CallKitServiceTests.swift +++ b/StreamVideoTests/CallKit/CallKitServiceTests.swift @@ -19,6 +19,7 @@ final class CallKitServiceTests: XCTestCase, @unchecked Sendable { private var callId: String = String(UUID().uuidString.replacingOccurrences(of: "-", with: "").prefix(10)) private var localizedCallerName: String! = "Test Caller" private var callerId: String! = "test@example.com" + private var mockAudioStore: MockRTCAudioStore! = .init() private lazy var mockedStreamVideo: MockStreamVideo! = MockStreamVideo( stubbedProperty: [ MockStreamVideo.propertyKey(for: \.state): MockStreamVideo.State(user: user) @@ -37,6 +38,7 @@ final class CallKitServiceTests: XCTestCase, @unchecked Sendable { override func setUp() { super.setUp() InjectedValues[\.uuidFactory] = uuidFactory + mockAudioStore.makeShared() subject.callController = callController subject.callProvider = callProvider callProvider.setDelegate(subject, queue: nil) @@ -52,9 +54,31 @@ final class CallKitServiceTests: XCTestCase, @unchecked Sendable { mockedStreamVideo = nil localizedCallerName = nil callerId = nil + mockAudioStore = nil super.tearDown() } + // MARK: - didUpdate(streamVideo:) + + func test_didUpdateStreamVideo_streamVideoIsNotNil_callKitReducerWasAdded() async { + subject.streamVideo = mockedStreamVideo + + await fulfillment { + self.mockAudioStore.audioStore.reducers.first { $0 is CallKitAudioSessionReducer } != nil + } + } + + func test_didUpdateStreamVideo_streamVideoIsNotNilInitiallyAndThenBecomesNil_callKitReducerWasRemoved() async { + subject.streamVideo = mockedStreamVideo + + await wait(for: 0.2) + subject.streamVideo = nil + + await fulfillment { + self.mockAudioStore.audioStore.reducers.first { $0 is CallKitAudioSessionReducer } == nil + } + } + // MARK: - reportIncomingCall @MainActor @@ -764,10 +788,18 @@ final class CallKitServiceTests: XCTestCase, @unchecked Sendable { call.state.callSettings = .init(speakerOn: true) let audioSession = AVAudioSession.sharedInstance() + mockAudioStore.session.isActive = true subject.provider(callProvider, didActivate: audioSession) - XCTAssertEqual(call.timesCalled(.callKitActivated), 1) - XCTAssertTrue(call.recordedInputPayload(AVAudioSession.self, for: .callKitActivated)?.first === audioSession) + await fulfillment { self.mockAudioStore.audioStore.state.isActive } + XCTAssertEqual(mockAudioStore.session.timesCalled(.audioSessionDidActivate), 1) + XCTAssertTrue( + mockAudioStore.session.recordedInputPayload( + AVAudioSession.self, + for: .audioSessionDidActivate + )?.first === audioSession + ) + XCTAssertTrue(mockAudioStore.audioStore.state.isActive) } // MARK: - Private Helpers diff --git a/StreamVideoTests/CallStateMachine/CallStateMachine/Stages/CallStateMachine_JoiningStageTests.swift b/StreamVideoTests/CallStateMachine/CallStateMachine/Stages/CallStateMachine_JoiningStageTests.swift index 6a2300c58..d5dc2bd0e 100644 --- a/StreamVideoTests/CallStateMachine/CallStateMachine/Stages/CallStateMachine_JoiningStageTests.swift +++ b/StreamVideoTests/CallStateMachine/CallStateMachine/Stages/CallStateMachine_JoiningStageTests.swift @@ -17,6 +17,7 @@ final class StreamCallStateMachineStageJoiningStage_Tests: StreamVideoTestCase, create: true, ring: true, notify: true, + source: .inApp, deliverySubject: .init() ) ) @@ -100,6 +101,7 @@ final class StreamCallStateMachineStageJoiningStage_Tests: StreamVideoTestCase, options: .init(memberIds: [.unique]), ring: true, notify: false, + source: .inApp, deliverySubject: .init(), retryPolicy: .init(maxRetries: 0, delay: { _ in 0 }) ) @@ -122,6 +124,7 @@ final class StreamCallStateMachineStageJoiningStage_Tests: StreamVideoTestCase, options: .init(memberIds: [.unique]), ring: true, notify: false, + source: .inApp, deliverySubject: .init() ) ) @@ -148,6 +151,7 @@ final class StreamCallStateMachineStageJoiningStage_Tests: StreamVideoTestCase, options: .init(memberIds: [.unique]), ring: true, notify: false, + source: .inApp, deliverySubject: .init() ) ) @@ -173,6 +177,7 @@ final class StreamCallStateMachineStageJoiningStage_Tests: StreamVideoTestCase, options: .init(memberIds: [.unique]), ring: true, notify: false, + source: .inApp, deliverySubject: .init() ) ) @@ -198,6 +203,7 @@ final class StreamCallStateMachineStageJoiningStage_Tests: StreamVideoTestCase, options: .init(memberIds: [.unique]), ring: true, notify: false, + source: .inApp, deliverySubject: .init() ) ) @@ -231,6 +237,7 @@ final class StreamCallStateMachineStageJoiningStage_Tests: StreamVideoTestCase, options: .init(memberIds: [.unique]), ring: true, notify: false, + source: .inApp, deliverySubject: deliverySubject ) ) @@ -259,6 +266,7 @@ final class StreamCallStateMachineStageJoiningStage_Tests: StreamVideoTestCase, options: .init(memberIds: [.unique]), ring: true, notify: false, + source: .inApp, deliverySubject: .init() ) ) @@ -284,6 +292,7 @@ final class StreamCallStateMachineStageJoiningStage_Tests: StreamVideoTestCase, options: .init(memberIds: [.unique]), ring: true, notify: false, + source: .inApp, deliverySubject: .init(), retryPolicy: .init(maxRetries: 2, delay: { _ in 0 }) ) @@ -321,6 +330,7 @@ final class StreamCallStateMachineStageJoiningStage_Tests: StreamVideoTestCase, options: .init(memberIds: [.unique]), ring: true, notify: false, + source: .inApp, deliverySubject: deliverySubject, retryPolicy: .init(maxRetries: 2, delay: { _ in 0 }) ) @@ -376,7 +386,7 @@ final class StreamCallStateMachineStageJoiningStage_Tests: StreamVideoTestCase, iteration: Int = 0, context: Call.StateMachine.Stage.Context ) throws { - let joinInputType = (Bool, CallSettings?, CreateCallOptions?, Bool, Bool).self + let joinInputType = (Bool, CallSettings?, CreateCallOptions?, Bool, Bool, JoinSource).self let recordedInput = try XCTUnwrap( callController.recordedInputPayload( joinInputType, diff --git a/StreamVideoTests/Controllers/CallController_Tests.swift b/StreamVideoTests/Controllers/CallController_Tests.swift index ab98993b1..9b9006316 100644 --- a/StreamVideoTests/Controllers/CallController_Tests.swift +++ b/StreamVideoTests/Controllers/CallController_Tests.swift @@ -82,7 +82,8 @@ final class CallController_Tests: StreamVideoTestCase, @unchecked Sendable { callSettings: callSettings, options: options, ring: true, - notify: true + notify: true, + source: .callKit ) } } @@ -91,6 +92,7 @@ final class CallController_Tests: StreamVideoTestCase, @unchecked Sendable { XCTAssertEqual(expectedStage.options?.team, options.team) XCTAssertTrue(expectedStage.ring) XCTAssertTrue(expectedStage.notify) + XCTAssertEqual(expectedStage.context.joinSource, .callKit) await self.assertEqualAsync( await self .mockWebRTCCoordinatorFactory diff --git a/StreamVideoTests/Mock/AVAudioSessionRouteDescription+Dummy.swift b/StreamVideoTests/Mock/AVAudioSessionRouteDescription+Dummy.swift new file mode 100644 index 000000000..3af412fd3 --- /dev/null +++ b/StreamVideoTests/Mock/AVAudioSessionRouteDescription+Dummy.swift @@ -0,0 +1,17 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +extension AVAudioSessionRouteDescription { + static func dummy( + input: AVAudioSession.Port = .builtInMic, + output: AVAudioSession.Port = .builtInReceiver + ) -> AVAudioSessionRouteDescription { + MockAVAudioSessionRouteDescription( + inputs: [MockAVAudioSessionPortDescription(portType: input)], + outputs: [MockAVAudioSessionPortDescription(portType: output)] + ) + } +} diff --git a/StreamVideoTests/Mock/CallController_Mock.swift b/StreamVideoTests/Mock/CallController_Mock.swift index 79497f35c..99fdf7eca 100644 --- a/StreamVideoTests/Mock/CallController_Mock.swift +++ b/StreamVideoTests/Mock/CallController_Mock.swift @@ -18,7 +18,8 @@ class CallController_Mock: CallController, @unchecked Sendable { callSettings: CallSettings?, options: CreateCallOptions? = nil, ring: Bool = false, - notify: Bool = false + notify: Bool = false, + source: JoinSource ) async throws -> JoinCallResponse { mockResponseBuilder.makeJoinCallResponse(cid: super.call?.cId ?? "default:\(String.unique)") } diff --git a/StreamVideoTests/Mock/MockAVAudioSession.swift b/StreamVideoTests/Mock/MockAVAudioSession.swift index ffe0323ea..05e797491 100644 --- a/StreamVideoTests/Mock/MockAVAudioSession.swift +++ b/StreamVideoTests/Mock/MockAVAudioSession.swift @@ -18,6 +18,7 @@ final class MockAVAudioSession: AVAudioSessionProtocol, Mockable, @unchecked Sen enum MockFunctionKey: CaseIterable { case setCategory case setOverrideOutputAudioPort + case setIsActive } /// Defines typed payloads passed along with tracked function calls. @@ -29,6 +30,8 @@ final class MockAVAudioSession: AVAudioSessionProtocol, Mockable, @unchecked Sen ) case setOverrideOutputAudioPort(value: AVAudioSession.PortOverride) + case setIsActive(Bool) + // Return an untyped payload for storage in the base Mockable dictionary. var payload: Any { switch self { @@ -37,6 +40,9 @@ final class MockAVAudioSession: AVAudioSessionProtocol, Mockable, @unchecked Sen case let .setOverrideOutputAudioPort(value): return value + + case let .setIsActive(value): + return value } } } @@ -89,6 +95,16 @@ final class MockAVAudioSession: AVAudioSessionProtocol, Mockable, @unchecked Sen } } + func setIsActive(_ active: Bool) throws { + record( + .setIsActive, + input: .setIsActive(active) + ) + if let error = stubbedFunction[.setIsActive] as? Error { + throw error + } + } + // MARK: - Helpers /// Tracks calls to a specific function/property in the mock. diff --git a/StreamVideoTests/Mock/MockAVAudioSessionPortDescription.swift b/StreamVideoTests/Mock/MockAVAudioSessionPortDescription.swift new file mode 100644 index 000000000..748eaa68a --- /dev/null +++ b/StreamVideoTests/Mock/MockAVAudioSessionPortDescription.swift @@ -0,0 +1,58 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +final class MockAVAudioSessionPortDescription: AVAudioSessionPortDescription, @unchecked Sendable { + + var stubPortType: AVAudioSession.Port + override var portType: AVAudioSession.Port { stubPortType } + + var stubPortName: String + override var portName: String { stubPortName } + + var stubUid: String + override var uid: String { stubUid } + + var stubHasHardwareVoiceCallProcessing: Bool + override var hasHardwareVoiceCallProcessing: Bool { stubHasHardwareVoiceCallProcessing } + + var stubIsSpatialAudioEnabled: Bool + override var isSpatialAudioEnabled: Bool { stubIsSpatialAudioEnabled } + + var stubChannels: [AVAudioSessionChannelDescription]? + override var channels: [AVAudioSessionChannelDescription]? { stubChannels } + + var stubDataSources: [AVAudioSessionDataSourceDescription]? + override var dataSources: [AVAudioSessionDataSourceDescription]? { stubDataSources } + + var stubSelectedDataSource: AVAudioSessionDataSourceDescription? + override var selectedDataSource: AVAudioSessionDataSourceDescription? { stubSelectedDataSource } + + var stubPreferredDataSource: AVAudioSessionDataSourceDescription? + override var preferredDataSource: AVAudioSessionDataSourceDescription? { stubPreferredDataSource } + + init( + portType: AVAudioSession.Port, + portName: String = .unique, + uid: String = UUID().uuidString, + hasHardwareVoiceCallProcessing: Bool = false, + isSpatialAudioEnabled: Bool = false, + channels: [AVAudioSessionChannelDescription]? = nil, + dataSources: [AVAudioSessionDataSourceDescription]? = nil, + selectedDataSource: AVAudioSessionDataSourceDescription? = nil, + preferredDataSource: AVAudioSessionDataSourceDescription? = nil + ) { + stubPortType = portType + stubPortName = portName + stubUid = uid + stubHasHardwareVoiceCallProcessing = hasHardwareVoiceCallProcessing + stubIsSpatialAudioEnabled = isSpatialAudioEnabled + stubChannels = channels + stubDataSources = dataSources + stubSelectedDataSource = selectedDataSource + stubPreferredDataSource = preferredDataSource + super.init() + } +} diff --git a/StreamVideoTests/Mock/MockAVAudioSessionRouteDescription.swift b/StreamVideoTests/Mock/MockAVAudioSessionRouteDescription.swift new file mode 100644 index 000000000..c30cec333 --- /dev/null +++ b/StreamVideoTests/Mock/MockAVAudioSessionRouteDescription.swift @@ -0,0 +1,23 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation + +final class MockAVAudioSessionRouteDescription: AVAudioSessionRouteDescription, @unchecked Sendable { + + var stubInputs: [AVAudioSessionPortDescription] + var stubOutputs: [AVAudioSessionPortDescription] + + override var inputs: [AVAudioSessionPortDescription] { stubInputs } + override var outputs: [AVAudioSessionPortDescription] { stubOutputs } + + init( + inputs: [AVAudioSessionPortDescription] = [], + outputs: [AVAudioSessionPortDescription] = [] + ) { + stubInputs = inputs + stubOutputs = outputs + super.init() + } +} diff --git a/StreamVideoTests/Mock/MockAudioSession.swift b/StreamVideoTests/Mock/MockAudioSession.swift index 615bcbb92..8f9df0c33 100644 --- a/StreamVideoTests/Mock/MockAudioSession.swift +++ b/StreamVideoTests/Mock/MockAudioSession.swift @@ -3,60 +3,73 @@ // import AVFoundation -import Combine +import Foundation @testable import StreamVideo import StreamWebRTC final class MockAudioSession: AudioSessionProtocol, Mockable, @unchecked Sendable { - // MARK: - Mockable - typealias FunctionKey = MockFunctionKey typealias FunctionInputKey = MockFunctionInputKey - /// Defines the "functions" or property accesses we want to track or stub. - enum MockFunctionKey: CaseIterable { - case setCategory + enum MockFunctionKey: Hashable, CaseIterable { + case setPrefersNoInterruptionsFromSystemAlerts + case requestRecordPermission + case addDelegate + case removeDelegate + case audioSessionDidActivate + case audioSessionDidDeactivate case setActive case overrideOutputAudioPort - case requestRecordPermission + case setConfiguration } - /// Defines typed payloads passed along with tracked function calls. enum MockFunctionInputKey: Payloadable { - case setCategory( - category: AVAudioSession.Category, - mode: AVAudioSession.Mode, - options: AVAudioSession.CategoryOptions - ) - case setActive(value: Bool) - case overrideOutputAudioPort(value: AVAudioSession.PortOverride) + case setPrefersNoInterruptionsFromSystemAlerts(Bool) case requestRecordPermission + case addDelegate(RTCAudioSessionDelegate) + case removeDelegate(RTCAudioSessionDelegate) + case audioSessionDidActivate(AVAudioSession) + case audioSessionDidDeactivate(AVAudioSession) + case setActive(Bool) + case overrideOutputAudioPort(AVAudioSession.PortOverride) + case setConfiguration(RTCAudioSessionConfiguration) - // Return an untyped payload for storage in the base Mockable dictionary. var payload: Any { switch self { - case let .setCategory(category, mode, options): - return (category, mode, options) - - case let .setActive(value): - return value - - case let .overrideOutputAudioPort(value): + case let .setPrefersNoInterruptionsFromSystemAlerts(value): return value case .requestRecordPermission: return () + + case let .addDelegate(delegate): + return delegate + + case let .removeDelegate(delegate): + return delegate + + case let .audioSessionDidActivate(audioSession): + return audioSession + + case let .audioSessionDidDeactivate(audioSession): + return audioSession + + case let .setActive(isActive): + return isActive + + case let .overrideOutputAudioPort(port): + return port + + case let .setConfiguration(configuration): + return configuration } } } - // MARK: - Mockable Storage - var stubbedProperty: [String: Any] = [:] var stubbedFunction: [FunctionKey: Any] = [:] - @Atomic - var stubbedFunctionInput: [FunctionKey: [FunctionInputKey]] = FunctionKey.allCases - .reduce(into: [FunctionKey: [MockFunctionInputKey]]()) { $0[$1] = [] } + @Atomic var stubbedFunctionInput: [FunctionKey: [MockFunctionInputKey]] = + MockFunctionKey.allCases.reduce(into: [:]) { $0[$1] = [] } func stub(for keyPath: KeyPath, with value: T) { stubbedProperty[propertyKey(for: keyPath)] = value @@ -66,120 +79,96 @@ final class MockAudioSession: AudioSessionProtocol, Mockable, @unchecked Sendabl stubbedFunction[function] = value } - // MARK: - AudioSessionProtocol - - let eventSubject = PassthroughSubject() + // MARK: - Init init() { - stub(for: \.eventPublisher, with: eventSubject.eraseToAnyPublisher()) - stub(for: \.isActive, with: false) - stub(for: \.currentRoute, with: AVAudioSessionRouteDescription()) - stub(for: \.category, with: AVAudioSession.Category.soloAmbient) - stub(for: \.useManualAudio, with: false) - stub(for: \.isAudioEnabled, with: false) + stub(for: .requestRecordPermission, with: false) } - /// Publishes audio session-related events. - var eventPublisher: AnyPublisher { - get { self[dynamicMember: \.eventPublisher] } - set { stub(for: \.eventPublisher, with: newValue) } - } + // MARK: - AudioSessionProtocol - /// Indicates whether the audio session is active. - var isActive: Bool { - get { self[dynamicMember: \.isActive] } - set { stub(for: \.isActive, with: newValue) } - } + var avSession: AVAudioSessionProtocol = MockAVAudioSession() - /// The current audio route for the session. - var currentRoute: AVAudioSessionRouteDescription { - get { self[dynamicMember: \.currentRoute] } - set { stub(for: \.currentRoute, with: newValue) } - } + var prefersNoInterruptionsFromSystemAlerts: Bool = false + + func setPrefersNoInterruptionsFromSystemAlerts(_ newValue: Bool) throws { + stubbedFunctionInput[.setPrefersNoInterruptionsFromSystemAlerts]? + .append(.setPrefersNoInterruptionsFromSystemAlerts(newValue)) - /// The current audio session category. - var category: AVAudioSession.Category { - get { self[dynamicMember: \.category] } - set { stub(for: \.category, with: newValue) } + if let error = stubbedFunction[.setPrefersNoInterruptionsFromSystemAlerts] as? Error { + throw error + } } - /// A Boolean value indicating if manual audio routing is used. - var useManualAudio: Bool { - get { self[dynamicMember: \.useManualAudio] } - set { stub(for: \.useManualAudio, with: newValue) } + var isActive: Bool = false + + var isAudioEnabled: Bool = false + + var useManualAudio: Bool = false + + var category: String = "" + + var mode: String = "" + + var categoryOptions: AVAudioSession.CategoryOptions = [] + + var recordPermissionGranted: Bool = false + + func requestRecordPermission() async -> Bool { + stubbedFunctionInput[.requestRecordPermission]? + .append(.requestRecordPermission) + + return stubbedFunction[.requestRecordPermission] as! Bool } - /// A Boolean value indicating if audio is enabled. - var isAudioEnabled: Bool { - get { self[dynamicMember: \.isAudioEnabled] } - set { stub(for: \.isAudioEnabled, with: newValue) } + var currentRoute: AVAudioSessionRouteDescription = .init() + + func add(_ delegate: any RTCAudioSessionDelegate) { + stubbedFunctionInput[.addDelegate]? + .append(.addDelegate(delegate)) } - var mode: AVAudioSession.Mode { - get { self[dynamicMember: \.mode] } - set { stub(for: \.mode, with: newValue) } + func remove(_ delegate: any RTCAudioSessionDelegate) { + stubbedFunctionInput[.removeDelegate]? + .append(.removeDelegate(delegate)) } - var overrideOutputPort: AVAudioSession.PortOverride { - get { self[dynamicMember: \.overrideOutputPort] } - set { stub(for: \.overrideOutputPort, with: newValue) } + func audioSessionDidActivate(_ audioSession: AVAudioSession) { + stubbedFunctionInput[.audioSessionDidActivate]? + .append(.audioSessionDidActivate(audioSession)) } - var hasRecordPermission: Bool { - get { self[dynamicMember: \.hasRecordPermission] } - set { stub(for: \.hasRecordPermission, with: newValue) } + func audioSessionDidDeactivate(_ audioSession: AVAudioSession) { + stubbedFunctionInput[.audioSessionDidDeactivate]? + .append(.audioSessionDidDeactivate(audioSession)) } - /// Sets the audio category, mode, and options. - func setCategory( - _ category: AVAudioSession.Category, - mode: AVAudioSession.Mode, - with categoryOptions: AVAudioSession.CategoryOptions - ) async throws { - record(.setCategory, input: .setCategory( - category: category, - mode: mode, - options: categoryOptions - )) - if let error = stubbedFunction[.setCategory] as? Error { - throw error - } + func setActive(_ isActive: Bool) throws { + stubbedFunctionInput[.setActive]? + .append(.setActive(isActive)) } - /// Activates or deactivates the audio session. - func setActive(_ isActive: Bool) async throws { - record(.setActive, input: .setActive(value: isActive)) - if let error = stubbedFunction[.setActive] as? Error { - throw error - } + func perform( + _ operation: (any AudioSessionProtocol) throws -> Void + ) throws { + try operation(self) } - /// Overrides the audio output port. - func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) async throws { - record(.overrideOutputAudioPort, input: .overrideOutputAudioPort(value: port)) + func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws { + stubbedFunctionInput[.overrideOutputAudioPort]? + .append(.overrideOutputAudioPort(port)) + if let error = stubbedFunction[.overrideOutputAudioPort] as? Error { throw error } } - /// Requests permission to record audio. - func requestRecordPermission() async -> Bool { - record(.requestRecordPermission, input: .requestRecordPermission) - return (stubbedFunction[.requestRecordPermission] as? Bool) ?? false - } + func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws { + stubbedFunctionInput[.setConfiguration]? + .append(.setConfiguration(configuration)) - // MARK: - Helpers - - /// Tracks calls to a specific function/property in the mock. - private func record( - _ function: FunctionKey, - input: FunctionInputKey? = nil - ) { - if let input { - stubbedFunctionInput[function]?.append(input) - } else { - // Still record the call, but with no input - stubbedFunctionInput[function]?.append(contentsOf: []) + if let error = stubbedFunction[.setConfiguration] as? Error { + throw error } } } diff --git a/StreamVideoTests/Mock/MockAudioSessionPolicy.swift b/StreamVideoTests/Mock/MockAudioSessionPolicy.swift index 5cbd073c3..c4f23ffcd 100644 --- a/StreamVideoTests/Mock/MockAudioSessionPolicy.swift +++ b/StreamVideoTests/Mock/MockAudioSessionPolicy.swift @@ -42,6 +42,7 @@ final class MockAudioSessionPolicy: Mockable, AudioSessionPolicy, @unchecked Sen stub( for: .configuration, with: AudioSessionConfiguration( + isActive: true, category: .soloAmbient, mode: .default, options: [] diff --git a/StreamVideoTests/Mock/MockCall.swift b/StreamVideoTests/Mock/MockCall.swift index 9e9fa30b1..5bb962841 100644 --- a/StreamVideoTests/Mock/MockCall.swift +++ b/StreamVideoTests/Mock/MockCall.swift @@ -176,12 +176,4 @@ final class MockCall: Call, Mockable, @unchecked Sendable { .updateTrackSize(trackSize: trackSize, participant: participant) ) } - - override func callKitActivated( - _ audioSession: AVAudioSessionProtocol - ) throws { - stubbedFunctionInput[.callKitActivated]?.append( - .callKitActivated(audioSession: audioSession) - ) - } } diff --git a/StreamVideoTests/Mock/MockCallController.swift b/StreamVideoTests/Mock/MockCallController.swift index 71d029146..03803f4cb 100644 --- a/StreamVideoTests/Mock/MockCallController.swift +++ b/StreamVideoTests/Mock/MockCallController.swift @@ -24,7 +24,8 @@ final class MockCallController: CallController, Mockable, @unchecked Sendable { callSettings: CallSettings?, options: CreateCallOptions?, ring: Bool = false, - notify: Bool = false + notify: Bool = false, + source: JoinSource ) case observeWebRTCStateUpdated @@ -39,8 +40,8 @@ final class MockCallController: CallController, Mockable, @unchecked Sendable { switch self { case let .setDisconnectionTimeout(timeout): return timeout - case let .join(create, callSettings, options, ring, notify): - return (create, callSettings, options, ring, notify) + case let .join(create, callSettings, options, ring, notify, source): + return (create, callSettings, options, ring, notify, source) case .observeWebRTCStateUpdated: return () case let .changeVideoState(value): @@ -84,7 +85,8 @@ final class MockCallController: CallController, Mockable, @unchecked Sendable { callSettings: CallSettings?, options: CreateCallOptions? = nil, ring: Bool = false, - notify: Bool = false + notify: Bool = false, + source: JoinSource ) async throws -> JoinCallResponse { stubbedFunctionInput[.join]?.append( .join( @@ -92,7 +94,8 @@ final class MockCallController: CallController, Mockable, @unchecked Sendable { callSettings: callSettings, options: options, ring: ring, - notify: notify + notify: notify, + source: source ) ) @@ -106,7 +109,8 @@ final class MockCallController: CallController, Mockable, @unchecked Sendable { callSettings: callSettings, options: options, ring: ring, - notify: notify + notify: notify, + source: source ) } } diff --git a/StreamVideoTests/Mock/MockRTCAudioStore.swift b/StreamVideoTests/Mock/MockRTCAudioStore.swift new file mode 100644 index 000000000..8f946f6c7 --- /dev/null +++ b/StreamVideoTests/Mock/MockRTCAudioStore.swift @@ -0,0 +1,24 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Foundation +@testable import StreamVideo + +final class MockRTCAudioStore { + + let audioStore: RTCAudioStore + let session: MockAudioSession + + init() { + let session = MockAudioSession() + self.session = session + audioStore = RTCAudioStore(session: session) + } + + /// We call this just before the object that needs to use the mock is about to be created. + func makeShared() { + RTCAudioStore.currentValue = audioStore + InjectedValues[\.audioStore] = audioStore + } +} diff --git a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift index 976bcab44..e416db9d4 100644 --- a/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift +++ b/StreamVideoTests/Mock/MockRTCPeerConnectionCoordinatorStack.swift @@ -12,7 +12,7 @@ struct MockRTCPeerConnectionCoordinatorStack: @unchecked Sendable { let peerConnection: MockRTCPeerConnection let peerConnectionFactory: PeerConnectionFactory let mockSFUStack: MockSFUStack - let audioSession: StreamAudioSession + let audioSession: CallAudioSession let spySubject: PassthroughSubject let mockLocalAudioMediaAdapter: MockLocalMediaAdapter let mockLocalVideoMediaAdapter: MockLocalMediaAdapter @@ -35,7 +35,7 @@ struct MockRTCPeerConnectionCoordinatorStack: @unchecked Sendable { peerConnection: MockRTCPeerConnection = .init(), peerConnectionFactory: PeerConnectionFactory = .mock(), mockSFUStack: MockSFUStack = .init(), - audioSession: StreamAudioSession? = nil, + audioSession: CallAudioSession? = nil, spySubject: PassthroughSubject = .init(), mockLocalAudioMediaAdapter: MockLocalMediaAdapter = .init(), mockLocalVideoMediaAdapter: MockLocalMediaAdapter = .init(), @@ -46,7 +46,7 @@ struct MockRTCPeerConnectionCoordinatorStack: @unchecked Sendable { self.peerConnection = peerConnection self.peerConnectionFactory = peerConnectionFactory self.mockSFUStack = mockSFUStack - self.audioSession = audioSession ?? .init(audioDeviceModule: peerConnectionFactory.audioDeviceModule) + self.audioSession = audioSession ?? .init() self.spySubject = spySubject self.mockLocalAudioMediaAdapter = mockLocalAudioMediaAdapter self.mockLocalVideoMediaAdapter = mockLocalVideoMediaAdapter diff --git a/StreamVideoTests/Mock/MockWebRTCStatsAdapter.swift b/StreamVideoTests/Mock/MockWebRTCStatsAdapter.swift index 5f2970e9e..fd62f98e7 100644 --- a/StreamVideoTests/Mock/MockWebRTCStatsAdapter.swift +++ b/StreamVideoTests/Mock/MockWebRTCStatsAdapter.swift @@ -77,16 +77,6 @@ final class MockWebRTCStatsAdapter: Mockable, WebRTCStatsAdapting, @unchecked Se set { stub(for: \.subscriber, with: newValue) } } - var callSettings: CallSettings? { - get { self[dynamicMember: \.callSettings] } - set { stub(for: \.callSettings, with: newValue) } - } - - var audioSession: StreamAudioSession? { - get { self[dynamicMember: \.audioSession] } - set { stub(for: \.audioSession, with: newValue) } - } - var deliveryInterval: TimeInterval { get { self[dynamicMember: \.deliveryInterval] } set { stub(for: \.deliveryInterval, with: newValue) } diff --git a/StreamVideoTests/Mock/MockWebRTCTracesAdapter.swift b/StreamVideoTests/Mock/MockWebRTCTracesAdapter.swift index 2e84d4c93..16f8e2104 100644 --- a/StreamVideoTests/Mock/MockWebRTCTracesAdapter.swift +++ b/StreamVideoTests/Mock/MockWebRTCTracesAdapter.swift @@ -83,16 +83,6 @@ final class MockWebRTCTracesAdapter: WebRTCTracing, Mockable, @unchecked Sendabl set { stub(for: \.subscriber, with: newValue) } } - var callSettings: CallSettings? { - get { self[dynamicMember: \.callSettings] } - set { stub(for: \.callSettings, with: newValue) } - } - - var audioSession: StreamAudioSession? { - get { self[dynamicMember: \.audioSession] } - set { stub(for: \.audioSession, with: newValue) } - } - // MARK: - Methods func trace(_ trace: WebRTCTrace) { diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioSessionDelegatePublisher_Tests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioSessionDelegatePublisher_Tests.swift deleted file mode 100644 index 20ea96522..000000000 --- a/StreamVideoTests/Utils/AudioSession/RTCAudioSessionDelegatePublisher_Tests.swift +++ /dev/null @@ -1,249 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -import Combine -@testable import StreamVideo -import StreamWebRTC -import XCTest - -@MainActor -final class RTCAudioSessionDelegatePublisherTests: XCTestCase, @unchecked Sendable { - private var session: RTCAudioSession! = .sharedInstance() - private var disposableBag: DisposableBag! = .init() - private var subject: RTCAudioSessionDelegatePublisher! = .init() - - override func tearDown() { - Task { @MainActor in - subject = nil - disposableBag.removeAll() - } - super.tearDown() - } - - // MARK: - audioSessionDidBeginInterruption - - func test_audioSessionDidBeginInterruption_givenSession_whenCalled_thenPublishesEvent() { - assertAudioSessionEvent( - subject.audioSessionDidBeginInterruption(session), - validator: { - if case let .didBeginInterruption(receivedSession) = $0 { - XCTAssertEqual(receivedSession, self.session) - } - } - ) - } - - // MARK: - audioSessionDidEndInterruption - - func test_audioSessionDidEndInterruption_givenSessionAndShouldResume_whenCalled_thenPublishesEvent() { - assertAudioSessionEvent( - subject.audioSessionDidEndInterruption(session, shouldResumeSession: true), - validator: { - if case let .didEndInterruption(receivedSession, receivedShouldResume) = $0 { - XCTAssertEqual(receivedSession, self.session) - XCTAssertTrue(receivedShouldResume) - } - } - ) - } - - // MARK: - audioSessionDidChangeRoute - - func test_audioSessionDidChangeRoute_givenSessionReasonAndPreviousRoute_whenCalled_thenPublishesEvent() { - let reason: AVAudioSession.RouteChangeReason = .newDeviceAvailable - let previousRoute = AVAudioSessionRouteDescription() - - assertAudioSessionEvent( - subject.audioSessionDidChangeRoute( - session, - reason: reason, - previousRoute: previousRoute - ), - validator: { - if case let .didChangeRoute(receivedSession, receivedReason, receivedPreviousRoute) = $0 { - XCTAssertEqual(receivedSession, self.session) - XCTAssertEqual(receivedReason, reason) - XCTAssertEqual(receivedPreviousRoute, previousRoute) - } - } - ) - } - - // MARK: - audioSessionMediaServerTerminated - - func test_audioSessionMediaServerTerminated_givenSession_whenCalled_thenPublishesEvent() { - assertAudioSessionEvent( - subject.audioSessionMediaServerTerminated(session), - validator: { - if case let .mediaServerTerminated(receivedSession) = $0 { - XCTAssertEqual(receivedSession, self.session) - } - } - ) - } - - // MARK: - audioSessionMediaServerReset - - func test_audioSessionMediaServerReset_givenSession_whenCalled_thenPublishesEvent() { - assertAudioSessionEvent( - subject.audioSessionMediaServerReset(session), - validator: { - if case let .mediaServerReset(receivedSession) = $0 { - XCTAssertEqual(receivedSession, self.session) - } - } - ) - } - - // MARK: - audioSessionDidChangeCanPlayOrRecord - - func test_audioSessionDidChangeCanPlayOrRecord_givenSessionAndCanPlayOrRecord_whenCalled_thenPublishesEvent() { - assertAudioSessionEvent( - subject.audioSession(session, didChangeCanPlayOrRecord: true), - validator: { - if case let .didChangeCanPlayOrRecord(receivedSession, receivedCanPlayOrRecord) = $0 { - XCTAssertEqual(receivedSession, self.session) - XCTAssertTrue(receivedCanPlayOrRecord) - } - } - ) - } - - // MARK: - audioSessionDidStartPlayOrRecord - - func test_audioSessionDidStartPlayOrRecord_givenSession_whenCalled_thenPublishesEvent() { - assertAudioSessionEvent( - subject.audioSessionDidStartPlayOrRecord(session), - validator: { - if case let .didStartPlayOrRecord(receivedSession) = $0 { - XCTAssertEqual(receivedSession, self.session) - } - } - ) - } - - // MARK: - audioSessionDidStopPlayOrRecord - - func test_audioSessionDidStopPlayOrRecord_givenSession_whenCalled_thenPublishesEvent() { - assertAudioSessionEvent( - subject.audioSessionDidStopPlayOrRecord(session), - validator: { - if case let .didStopPlayOrRecord(receivedSession) = $0 { - XCTAssertEqual(receivedSession, self.session) - } - } - ) - } - - // MARK: - audioSessionDidChangeOutputVolume - - func test_audioSessionDidChangeOutputVolume_givenSessionAndOutputVolume_whenCalled_thenPublishesEvent() { - assertAudioSessionEvent( - subject.audioSession(session, didChangeOutputVolume: 0.5), - validator: { - if case let .didChangeOutputVolume(receivedSession, receivedOutputVolume) = $0 { - XCTAssertEqual(receivedSession, self.session) - XCTAssertEqual(receivedOutputVolume, 0.5) - } - } - ) - } - - // MARK: - audioSessionDidDetectPlayoutGlitch - - func test_audioSessionDidDetectPlayoutGlitch_givenSessionAndTotalNumberOfGlitches_whenCalled_thenPublishesEvent() { - assertAudioSessionEvent( - subject.audioSession(session, didDetectPlayoutGlitch: 10), - validator: { - if case let .didDetectPlayoutGlitch(receivedSession, receivedTotalNumberOfGlitches) = $0 { - XCTAssertEqual(receivedSession, self.session) - XCTAssertEqual(receivedTotalNumberOfGlitches, 10) - } - } - ) - } - - // MARK: - audioSessionWillSetActive - - func test_audioSessionWillSetActive_givenSessionAndActive_whenCalled_thenPublishesEvent() { - assertAudioSessionEvent( - subject.audioSession(session, willSetActive: true), - validator: { - if case let .willSetActive(receivedSession, receivedActive) = $0 { - XCTAssertEqual(receivedSession, self.session) - XCTAssertTrue(receivedActive) - } - } - ) - } - - // MARK: - audioSessionDidSetActive - - func test_audioSessionDidSetActive_givenSessionAndActive_whenCalled_thenPublishesEvent() { - assertAudioSessionEvent( - subject.audioSession(session, didSetActive: true), - validator: { - if case let .didSetActive(receivedSession, receivedActive) = $0 { - XCTAssertEqual(receivedSession, self.session) - XCTAssertTrue(receivedActive) - } - } - ) - } - - // MARK: - audioSessionFailedToSetActive - - func test_audioSessionFailedToSetActive_givenSessionActiveAndError_whenCalled_thenPublishesEvent() { - let error = NSError(domain: "TestError", code: 1, userInfo: nil) - assertAudioSessionEvent( - subject.audioSession(session, failedToSetActive: true, error: error), - validator: { - if case let .failedToSetActive(receivedSession, receivedActive, receivedError) = $0 { - XCTAssertEqual(receivedSession, self.session) - XCTAssertTrue(receivedActive) - XCTAssertEqual(receivedError as NSError, error) - } - } - ) - } - - // MARK: - audioSessionAudioUnitStartFailedWithError - - func test_audioSessionAudioUnitStartFailedWithError_givenSessionAndError_whenCalled_thenPublishesEvent() { - let error = NSError(domain: "TestError", code: 1, userInfo: nil) - assertAudioSessionEvent( - subject.audioSession(session, audioUnitStartFailedWithError: error), - validator: { - if case let .audioUnitStartFailedWithError(receivedSession, receivedError) = $0 { - XCTAssertEqual(receivedSession, self.session) - XCTAssertEqual(receivedError as NSError, error) - } - } - ) - } - - // MARK: - Private helpers - - @MainActor - private func assertAudioSessionEvent( - _ action: @autoclosure () -> Void, - validator: @escaping (AudioSessionEvent) -> Void - ) { - let expectation = self.expectation(description: "AudioSession event received.") - _ = RTCAudioSession.sharedInstance() - - subject - .publisher - .sink { - validator($0) - expectation.fulfill() - } - .store(in: disposableBag) - - action() - - waitForExpectations(timeout: 1, handler: nil) - } -} diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Effects/InterruptionEffect_Tests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Effects/InterruptionEffect_Tests.swift new file mode 100644 index 000000000..ce26c64ac --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Effects/InterruptionEffect_Tests.swift @@ -0,0 +1,72 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class InterruptionEffect_Tests: XCTestCase, @unchecked Sendable { + + // MARK: - Properties + + private lazy var store: MockRTCAudioStore! = .init() + private lazy var subject: RTCAudioStore.InterruptionEffect! = .init(store.audioStore) + + // MARK: - Lifecycle + + override func tearDown() { + store = nil + subject = nil + super.tearDown() + } + + // MARK: - init + + func test_init_delegateWasAdded() { + _ = subject + + XCTAssertEqual(store.session.timesCalled(.addDelegate), 1) + } + + // MARK: - audioSessionDidBeginInterruption + + func test_audioSessionDidBeginInterruption_dispatchesIsInterruptedAndDisablesAudio() async { + subject.audioSessionDidBeginInterruption(.sharedInstance()) + + await fulfillment { + self.store.audioStore.state.isInterrupted == true + && self.store.audioStore.state.isAudioEnabled == false + } + } + + // MARK: - audioSessionDidEndInterruption + + func test_audioSessionDidEndInterruption_shouldNotResume_dispatchesIsInterruptedFalseOnly() async { + subject.audioSessionDidBeginInterruption(.sharedInstance()) + + subject.audioSessionDidEndInterruption( + .sharedInstance(), + shouldResumeSession: false + ) + + await fulfillment { self.store.audioStore.state.isInterrupted == false } + XCTAssertFalse(store.audioStore.state.isActive) + XCTAssertFalse(store.audioStore.state.isAudioEnabled) + } + + func test_audioSessionDidEndInterruption_shouldResume_dispatchesExpectedSequence() async { + subject.audioSessionDidBeginInterruption(.sharedInstance()) + + subject.audioSessionDidEndInterruption( + .sharedInstance(), + shouldResumeSession: true + ) + + await fulfillment { + self.store.audioStore.state.isInterrupted == false + && self.store.audioStore.state.isActive == true + && self.store.audioStore.state.isAudioEnabled == true + } + } +} diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Effects/RouteChangeEffect_Tests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Effects/RouteChangeEffect_Tests.swift new file mode 100644 index 000000000..343bcd4ff --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Effects/RouteChangeEffect_Tests.swift @@ -0,0 +1,126 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Combine +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class RouteChangeEffect_Tests: XCTestCase, @unchecked Sendable { + + // MARK: - Mocks + + final class MockDelegate: StreamAudioSessionAdapterDelegate { + private(set) var updatedCallSettings: CallSettings? + + func audioSessionAdapterDidUpdateCallSettings(callSettings: CallSettings) { + updatedCallSettings = callSettings + } + } + + // MARK: - Properties + + private lazy var store: MockRTCAudioStore! = .init() + private lazy var delegate: MockDelegate! = .init() + private lazy var callSettingsSubject: PassthroughSubject! = .init() + private lazy var subject: RTCAudioStore.RouteChangeEffect! = .init( + store.audioStore, + callSettingsPublisher: callSettingsSubject.eraseToAnyPublisher(), + delegate: delegate + ) + + // MARK: - Lifecycle + + override func tearDown() { + subject = nil + delegate = nil + callSettingsSubject = PassthroughSubject() + store = nil + super.tearDown() + } + + // MARK: - init + + func test_init_delegateWasAdded() { + _ = subject + + XCTAssertEqual(store.session.timesCalled(.addDelegate), 1) + } + + // MARK: - audioSessionDidChangeRoute + + func test_routeChange_whenDeviceIsNotPhone_andSpeakerStateDiffers_shouldUpdateDelegate() async { + await assert( + currentDevice: .pad, + activeCallSettings: .init(speakerOn: false), + updatedRoute: .dummy(output: .builtInSpeaker), + expectedCallSettings: .init(speakerOn: true) + ) + } + + func test_routeChange_whenPhone_speakerOnToOff_shouldUpdateDelegate() async { + await assert( + currentDevice: .phone, + activeCallSettings: .init(speakerOn: true), + updatedRoute: .dummy(output: .builtInReceiver), + expectedCallSettings: .init(speakerOn: false) + ) + } + + func test_routeChange_whenPhone_speakerOffToOn_withPlayAndRecord_shouldUpdateDelegate() async { + await assert( + currentDevice: .phone, + activeCallSettings: .init(speakerOn: false), + updatedRoute: .dummy(output: .builtInSpeaker), + expectedCallSettings: .init(speakerOn: true) + ) + } + + func test_routeChange_whenPhone_speakerOffToOn_withPlayback_shouldNotUpdateDelegate() async { + await assert( + currentDevice: .phone, + activeCallSettings: .init(speakerOn: false), + category: .playback, + updatedRoute: .dummy(output: .builtInSpeaker), + expectedCallSettings: nil + ) + } + + func test_routeChange_whenSpeakerStateMatches_shouldNotUpdateDelegate() async { + await assert( + currentDevice: .phone, + activeCallSettings: .init(speakerOn: true), + updatedRoute: .dummy(output: .builtInSpeaker), + expectedCallSettings: nil + ) + } + + // MARK: - Private Helpers + + private func assert( + currentDevice: CurrentDevice.DeviceType, + activeCallSettings: CallSettings, + category: AVAudioSession.Category = .playAndRecord, + updatedRoute: AVAudioSessionRouteDescription, + expectedCallSettings: CallSettings? + ) async { + // Given + CurrentDevice.currentValue = .init { currentDevice } + await fulfillment { CurrentDevice.currentValue.deviceType == currentDevice } + _ = subject + callSettingsSubject.send(activeCallSettings) + store.session.category = category.rawValue + store.session.currentRoute = updatedRoute + + // When + subject.audioSessionDidChangeRoute( + .sharedInstance(), + reason: .unknown, + previousRoute: .dummy() + ) + + // Then + XCTAssertEqual(delegate.updatedCallSettings, expectedCallSettings) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/RTCAudioStore_Tests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/RTCAudioStore_Tests.swift new file mode 100644 index 000000000..2f966ffdf --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/RTCAudioStore_Tests.swift @@ -0,0 +1,116 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Combine +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class RTCAudioStore_Tests: XCTestCase, @unchecked Sendable { + + private final class SpyReducer: RTCAudioStoreReducer, @unchecked Sendable { + var reduceError: Error? + private(set) var reduceWasCalled: (state: RTCAudioStore.State, action: RTCAudioStoreAction, calledAt: Date)? + func reduce( + state: RTCAudioStore.State, + action: RTCAudioStoreAction, + file: StaticString, + function: StaticString, + line: UInt + ) throws -> RTCAudioStore.State { + reduceWasCalled = (state, action, .init()) + guard let reduceError else { + return state + } + throw reduceError + } + } + + private final class SpyMiddleware: RTCAudioStoreMiddleware, @unchecked Sendable { + private(set) var applyWasCalled: (state: RTCAudioStore.State, action: RTCAudioStoreAction, calledAt: Date)? + func apply( + state: RTCAudioStore.State, + action: RTCAudioStoreAction, + file: StaticString, + function: StaticString, + line: UInt + ) { + applyWasCalled = (state, action, .init()) + } + } + + // MARK: - Properties + + private lazy var subject: RTCAudioStore! = .init() + + // MARK: - Lifecycle + + override func tearDown() { + subject = nil + super.tearDown() + } + + // MARK: - init + + func test_init_RTCAudioSessionReducerHasBeenAdded() { + _ = subject + + XCTAssertNotNil(subject.reducers.first(where: { $0 is RTCAudioSessionReducer })) + } + + func test_init_stateWasUpdatedCorrectly() async { + _ = subject + + await fulfillment { + self.subject.state.prefersNoInterruptionsFromSystemAlerts == true + && self.subject.state.useManualAudio == true + && self.subject.state.isAudioEnabled == false + } + } + + // MARK: - dispatch + + func test_dispatch_middlewareWasCalledBeforeReducer() async throws { + let reducer = SpyReducer() + let middleware = SpyMiddleware() + subject.add(reducer) + subject.add(middleware) + + subject.dispatch(.audioSession(.isActive(true))) + await fulfillment { middleware.applyWasCalled != nil && reducer.reduceWasCalled != nil } + + let middlewareWasCalledAt = try XCTUnwrap(middleware.applyWasCalled?.calledAt) + let reducerWasCalledAt = try XCTUnwrap(reducer.reduceWasCalled?.calledAt) + XCTAssertTrue(reducerWasCalledAt.timeIntervalSince(middlewareWasCalledAt) > 0) + } + + // MARK: - dispatchAsync + + func test_dispatchAsync_middlewareWasCalledBeforeReducer() async throws { + let reducer = SpyReducer() + let middleware = SpyMiddleware() + subject.add(reducer) + subject.add(middleware) + + try await subject.dispatchAsync(.audioSession(.isActive(true))) + + let middlewareWasCalledAt = try XCTUnwrap(middleware.applyWasCalled?.calledAt) + let reducerWasCalledAt = try XCTUnwrap(reducer.reduceWasCalled?.calledAt) + XCTAssertTrue(reducerWasCalledAt.timeIntervalSince(middlewareWasCalledAt) > 0) + } + + func test_dispatchAsync_reducerThrowsError_rethrowsError() async throws { + let expected = ClientError(.unique) + let reducer = SpyReducer() + reducer.reduceError = expected + subject.add(reducer) + + do { + try await subject.dispatchAsync(.audioSession(.isActive(true))) + XCTFail() + } catch { + XCTAssertEqual((error as? ClientError)?.localizedDescription, expected.localizedDescription) + } + } +} diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Reducers/CallKitAudioSessionReducer_Tests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Reducers/CallKitAudioSessionReducer_Tests.swift new file mode 100644 index 000000000..f00186ada --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Reducers/CallKitAudioSessionReducer_Tests.swift @@ -0,0 +1,84 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Combine +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class CallKitAudioSessionReducer_Tests: XCTestCase, @unchecked Sendable { + + // MARK: - Properties + + private lazy var store: MockRTCAudioStore! = .init() + private lazy var subject: CallKitAudioSessionReducer! = .init( + store: store.audioStore + ) + + // MARK: - Lifecycle + + override func tearDown() { + subject = nil + store = nil + super.tearDown() + } + + // MARK: - reduce + + // MARK: activate + + func test_reduce_callKitAction_activate_audioSessionDidActivateWasCalled() throws { + _ = try subject.reduce( + state: .initial, + action: .callKit(.activate(.sharedInstance())), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(store.session.timesCalled(.audioSessionDidActivate), 1) + } + + func test_reduce_callKitAction_activate_isActiveUpdatedToMatchSessionIsActive() throws { + store.session.isActive = true + + let updatedState = try subject.reduce( + state: .initial, + action: .callKit(.deactivate(.sharedInstance())), + file: #file, + function: #function, + line: #line + ) + + XCTAssertTrue(updatedState.isActive) + } + + // MARK: deactivate + + func test_reduce_callKitAction_deactivate_audioSessionDidDeactivateWasCalled() throws { + _ = try subject.reduce( + state: .initial, + action: .callKit(.deactivate(.sharedInstance())), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(store.session.timesCalled(.audioSessionDidDeactivate), 1) + } + + func test_reduce_callKitAction_deactivate_isActiveUpdatedToMatchSessionIsActive() throws { + store.session.isActive = false + + let updatedState = try subject.reduce( + state: .initial, + action: .callKit(.deactivate(.sharedInstance())), + file: #file, + function: #function, + line: #line + ) + + XCTAssertFalse(updatedState.isActive) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioSessionReducer_Tests.swift b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioSessionReducer_Tests.swift new file mode 100644 index 000000000..3959bbaf9 --- /dev/null +++ b/StreamVideoTests/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioSessionReducer_Tests.swift @@ -0,0 +1,278 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import Combine +@testable import StreamVideo +import StreamWebRTC +import XCTest + +final class RTCAudioSessionReducer_Tests: XCTestCase, @unchecked Sendable { + + // MARK: - Properties + + private lazy var store: MockRTCAudioStore! = .init() + private lazy var subject: RTCAudioSessionReducer! = .init( + store: store.audioStore + ) + + // MARK: - Lifecycle + + override func tearDown() { + subject = nil + store = nil + super.tearDown() + } + + // MARK: - reduce + + // MARK: isActive + + func test_reduce_isActive_differentThanCurrentState_setActiveWasCalled() throws { + store.session.isActive = false + _ = try subject.reduce( + state: .initial, + action: .audioSession(.isActive(true)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(store.session.timesCalled(.setActive), 1) + } + + func test_reduce_isActive_differentThanCurrentState_updatedStateHasIsActiveCorrectlySet() throws { + store.session.isActive = false + + let updatedState = try subject.reduce( + state: .initial, + action: .audioSession(.isActive(true)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertTrue(updatedState.isActive) + } + + // MARK: - isInterrupted + + func test_reduce_isInterrupted_updatedStateWasCorrectlySet() throws { + var state = RTCAudioStore.State.initial + state.isInterrupted = false + + let updatedState = try subject.reduce( + state: state, + action: .audioSession(.isInterrupted(true)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertTrue(updatedState.isInterrupted) + } + + // MARK: isAudioEnabled + + func test_reduce_isAudioEnabled_sessionWasConfiguredCorrectly() throws { + store.session.isAudioEnabled = false + + _ = try subject.reduce( + state: .initial, + action: .audioSession(.isAudioEnabled(true)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertTrue(store.session.isAudioEnabled) + } + + func test_reduce_isAudioEnabled_updatedStateHasIsActiveCorrectlySet() throws { + store.session.isAudioEnabled = false + + let updatedState = try subject.reduce( + state: .initial, + action: .audioSession(.isAudioEnabled(true)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertTrue(updatedState.isAudioEnabled) + } + + // MARK: useManualAudio + + func test_reduce_useManualAudio_sessionWasConfiguredCorrectly() throws { + store.session.useManualAudio = false + + _ = try subject.reduce( + state: .initial, + action: .audioSession(.useManualAudio(true)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertTrue(store.session.useManualAudio) + } + + func test_reduce_useManualAudio_updatedStateHasIsActiveCorrectlySet() throws { + store.session.useManualAudio = false + + let updatedState = try subject.reduce( + state: .initial, + action: .audioSession(.useManualAudio(true)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertTrue(updatedState.useManualAudio) + } + + // MARK: - setCategory + + func test_reduce_setCategory_sessionWasConfiguredCorrectly() throws { + _ = try subject.reduce( + state: .initial, + action: .audioSession( + .setCategory( + .playAndRecord, + mode: .videoChat, + options: [ + .allowBluetooth, + .mixWithOthers + ] + ) + ), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(store.session.timesCalled(.setConfiguration), 1) + let input = try XCTUnwrap( + store.session.recordedInputPayload( + RTCAudioSessionConfiguration.self, + for: .setConfiguration + )?.first + ) + XCTAssertEqual(input.category, AVAudioSession.Category.playAndRecord.rawValue) + XCTAssertEqual(input.mode, AVAudioSession.Mode.videoChat.rawValue) + XCTAssertEqual(input.categoryOptions, [.allowBluetooth, .mixWithOthers]) + } + + func test_reduce_setCategory_updatedStateHasIsActiveCorrectlySet() throws { + var state = RTCAudioStore.State.initial + state.category = .ambient + state.mode = .default + state.options = [] + + let updatedState = try subject.reduce( + state: .initial, + action: .audioSession( + .setCategory( + .playAndRecord, + mode: .videoChat, + options: [ + .allowBluetooth, + .mixWithOthers + ] + ) + ), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(updatedState.category, .playAndRecord) + XCTAssertEqual(updatedState.mode, .videoChat) + XCTAssertEqual(updatedState.options, [.allowBluetooth, .mixWithOthers]) + } + + // MARK: - setOverrideOutputPort + + func test_reduce_setOverrideOutputPort_sessionWasConfiguredCorrectly() throws { + _ = try subject.reduce( + state: .initial, + action: .audioSession(.setOverrideOutputPort(.speaker)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(store.session.timesCalled(.overrideOutputAudioPort), 1) + } + + func test_reduce_setOverrideOutputPort_updatedStateHasIsActiveCorrectlySet() throws { + var state = RTCAudioStore.State.initial + state.overrideOutputAudioPort = .none + + let updatedState = try subject.reduce( + state: .initial, + action: .audioSession(.setOverrideOutputPort(.speaker)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual(updatedState.overrideOutputAudioPort, .speaker) + } + + // MARK: - setHasRecordingPermission + + func test_reduce_setHasRecordingPermission_updatedStateWasCorrectlySet() throws { + var state = RTCAudioStore.State.initial + state.hasRecordingPermission = false + + let updatedState = try subject.reduce( + state: state, + action: .audioSession(.setHasRecordingPermission(true)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertTrue(updatedState.hasRecordingPermission) + } + + // MARK: - setAVAudioSessionActive + + func test_reduce_setAVAudioSessionActive_isActiveIsTrue_activatesAVSessionIsAudioEnabledIsTrueSetActiveWasCalled() throws { + var state = RTCAudioStore.State.initial + state.isAudioEnabled = false + state.isActive = false + + let updatedState = try subject.reduce( + state: state, + action: .audioSession(.setAVAudioSessionActive(true)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual((store.session.avSession as? MockAVAudioSession)?.timesCalled(.setIsActive), 1) + XCTAssertTrue(updatedState.isAudioEnabled) + XCTAssertTrue(updatedState.isActive) + } + + func test_reduce_setAVAudioSessionActive_isActiveIsFalse_deactivatesAVSessionIsAudioEnabledIsFalseSetActiveWasCalled() throws { + var state = RTCAudioStore.State.initial + state.isAudioEnabled = true + state.isActive = true + + let updatedState = try subject.reduce( + state: state, + action: .audioSession(.setAVAudioSessionActive(false)), + file: #file, + function: #function, + line: #line + ) + + XCTAssertEqual((store.session.avSession as? MockAVAudioSession)?.timesCalled(.setIsActive), 1) + XCTAssertFalse(updatedState.isAudioEnabled) + XCTAssertFalse(updatedState.isActive) + } +} diff --git a/StreamVideoTests/Utils/AudioSession/StreamAudioSession_Tests.swift b/StreamVideoTests/Utils/AudioSession/StreamAudioSession_Tests.swift deleted file mode 100644 index f6da92c8c..000000000 --- a/StreamVideoTests/Utils/AudioSession/StreamAudioSession_Tests.swift +++ /dev/null @@ -1,329 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -import Combine -@testable import StreamVideo -import StreamWebRTC -import XCTest - -final class StreamAudioSession_Tests: XCTestCase, @unchecked Sendable { - - private lazy var disposableBag: DisposableBag! = .init() - private lazy var peerConnectionFactory: PeerConnectionFactory! = .mock() - private lazy var mockAudioSession: MockAudioSession! = .init() - private lazy var mockPolicy: MockAudioSessionPolicy! = .init() - private lazy var subject: StreamAudioSession! = .init( - policy: mockPolicy, - audioSession: mockAudioSession, - audioDeviceModule: peerConnectionFactory.audioDeviceModule - ) - - override func tearDown() { - subject.dismantle() - subject = nil - disposableBag.removeAll() - mockAudioSession = nil - peerConnectionFactory = nil - mockPolicy = nil - super.tearDown() - } - - // MARK: - init - - func test_init_configuresManualAudioAndEnablesAudioByDefault() throws { - _ = subject - - XCTAssertTrue(mockAudioSession.useManualAudio) - XCTAssertTrue(mockAudioSession.isAudioEnabled) - } - - func test_init_currentValueWasSet() { - _ = subject - - XCTAssertTrue(StreamAudioSession.currentValue === subject) - } - - // MARK: - didUpdateOwnCapabilities(_:) - - func test_didUpdateOwnCapabilities_policyWasCalled() async throws { - let ownCapabilities: Set = [.createCall] - try await assertConfigurationWasCalledOnPolicy({ - try await self.subject.didUpdateOwnCapabilities(ownCapabilities) - }, expectedInput: [(subject.activeCallSettings, ownCapabilities)]) - } - - func test_didUpdateOwnCapabilities_withoutAnyChanges_policyWasCalledTwice() async throws { - let ownCapabilities: Set = [.createCall] - try await assertConfigurationWasCalledOnPolicy({ - try await self.subject.didUpdateOwnCapabilities(ownCapabilities) - try await self.subject.didUpdateOwnCapabilities(ownCapabilities) - }, expectedInput: [ - (subject.activeCallSettings, ownCapabilities), - (subject.activeCallSettings, ownCapabilities) - ]) - } - - // MARK: - didUpdateCallSettings(_:) - - func test_didUpdateCallSettings_policyWasCalled() async throws { - let callSettings = CallSettings(speakerOn: false) - try await assertConfigurationWasCalledOnPolicy({ - try await self.subject.didUpdateCallSettings(callSettings) - }, expectedInput: [(callSettings, [])]) - } - - func test_didUpdateCallSettings_withoutAnyChanges_policyWasCalledTwice() async throws { - let callSettings = CallSettings(speakerOn: false) - try await assertConfigurationWasCalledOnPolicy({ - try await self.subject.didUpdateCallSettings(callSettings) - try await self.subject.didUpdateCallSettings(callSettings) - }, expectedInput: [ - (callSettings, []), - (callSettings, []) - ]) - } - - func test_didUpdateCallSettings_policyReturnsNoOverrideOutputPortWithCategoryPlayAndRecord_overrideOutputAudioPortWasCalledWithNone( - ) async throws { - mockPolicy.stub( - for: .configuration, - with: AudioSessionConfiguration( - category: .ambient, - mode: .default, - options: [.allowAirPlay] - ) - ) - mockAudioSession.category = .playAndRecord - - try await subject.didUpdateCallSettings(.init(audioOn: false)) - - XCTAssertEqual(mockAudioSession.timesCalled(.setCategory), 1) - let payload = try XCTUnwrap( - mockAudioSession.recordedInputPayload( - (AVAudioSession.Category, AVAudioSession.Mode, AVAudioSession.CategoryOptions).self, - for: .setCategory - )?.first - ) - XCTAssertEqual(payload.0, .ambient) - XCTAssertEqual(payload.1, .default) - XCTAssertEqual(payload.2, [.allowAirPlay]) - } - - func test_didUpdateCallSettings_policyReturnsConfiguration_audioSessionWasCalledWithExpectedConfiguration() async throws { - mockPolicy.stub( - for: .configuration, - with: AudioSessionConfiguration( - category: .ambient, - mode: .default, - options: [] - ) - ) - mockAudioSession.category = .playAndRecord - - try await subject.didUpdateCallSettings(.init(audioOn: false)) - - XCTAssertEqual(mockAudioSession.timesCalled(.overrideOutputAudioPort), 1) - let payload = try XCTUnwrap( - mockAudioSession - .recordedInputPayload(AVAudioSession.PortOverride.self, for: .overrideOutputAudioPort)?.first - ) - XCTAssertEqual(payload, .none) - } - - func test_didUpdateCallSettings_policyReturnsConfigurationWithOverrideOutputAudioPort_audioSessionWasCalledWithExpectedOverrideOutputAudioPort( - ) async throws { - mockPolicy.stub( - for: .configuration, - with: AudioSessionConfiguration( - category: .playAndRecord, - mode: .default, - options: [], - overrideOutputAudioPort: .speaker - ) - ) - - try await subject.didUpdateCallSettings(.init(audioOn: false)) - - XCTAssertEqual(mockAudioSession.timesCalled(.overrideOutputAudioPort), 1) - let payload = try XCTUnwrap( - mockAudioSession - .recordedInputPayload(AVAudioSession.PortOverride.self, for: .overrideOutputAudioPort)?.first - ) - XCTAssertEqual(payload, .speaker) - } - - func test_didUpdateCallSettings_policyReturnsSameConfigurationAsPreviously_audioSessionWasNotCalled() async throws { - mockPolicy.stub( - for: .configuration, - with: AudioSessionConfiguration( - category: .playAndRecord, - mode: .default, - options: [], - overrideOutputAudioPort: .speaker - ) - ) - - try await subject.didUpdateCallSettings(.init(audioOn: false)) - try await subject.didUpdateCallSettings(.init(audioOn: false)) - - XCTAssertEqual(mockAudioSession.timesCalled(.setCategory), 1) - } - - // MARK: - didUpdatePolicy(_:) - - func test_didUpdatePolicy_policyWasCalled() async throws { - try await assertConfigurationWasCalledOnPolicy({ - try await self.subject.didUpdatePolicy(self.mockPolicy) - }, expectedInput: [(subject.activeCallSettings, subject.ownCapabilities)]) - } - - func test_didUpdatePolicy_withoutAnyChanges_policyWasCalledTwice() async throws { - try await assertConfigurationWasCalledOnPolicy({ - try await self.subject.didUpdatePolicy(self.mockPolicy) - try await self.subject.didUpdatePolicy(self.mockPolicy) - }, expectedInput: [ - (subject.activeCallSettings, subject.ownCapabilities), - (subject.activeCallSettings, subject.ownCapabilities) - ]) - } - - // MARK: - prepareForRecording - - func test_prepareForRecording_whenAudioOff_setsAudioOn_andCallsSetCategory() async throws { - subject = .init( - callSettings: .init(audioOn: false), - policy: mockPolicy, - audioSession: mockAudioSession, - audioDeviceModule: peerConnectionFactory.audioDeviceModule - ) - - try await assertConfigurationWasCalledOnPolicy({ - try await self.subject.prepareForRecording() - }, expectedInput: [ - (.init(audioOn: true), subject.ownCapabilities) - ]) - XCTAssertTrue(subject.activeCallSettings.audioOn) - } - - func test_prepareForRecording_whenAudioAlreadyOn_doesNotCallSetCategory() async throws { - subject = .init( - callSettings: .init(audioOn: true), - policy: mockPolicy, - audioSession: mockAudioSession, - audioDeviceModule: peerConnectionFactory.audioDeviceModule - ) - - try await subject.prepareForRecording() - - XCTAssertTrue(subject.activeCallSettings.audioOn) - XCTAssertEqual(mockPolicy.timesCalled(.configuration), 0) - } - - // MARK: - requestRecordPermission - - func test_requestRecordPermission_whenNotRecording_callsMockAudioSession() async { - _ = await subject.requestRecordPermission() - - XCTAssertEqual(mockAudioSession.timesCalled(.requestRecordPermission), 1) - } - - func test_requestRecordPermission_whenIsRecording_doesNotCallSession() async { - subject.isRecording = true - _ = await subject.requestRecordPermission() - - XCTAssertEqual(mockAudioSession.timesCalled(.requestRecordPermission), 0) - } - - // MARK: - dismantle - - func test_dismantle_resetsGlobalCurrentValue() { - subject.dismantle() - - XCTAssertNil(StreamAudioSession.currentValue) - } - - // MARK: - callKitActivated - - func test_callKitActivated_configurationWasCalledOnPolicy() async throws { - let mockPolicy = MockAudioSessionPolicy() - try await subject.didUpdatePolicy(mockPolicy) - let audioSession = MockAVAudioSession() - - try subject.callKitActivated(audioSession) - - // The expected value is 2 as the audioSession will call it once - // when we first update the policy. - XCTAssertEqual(mockPolicy.timesCalled(.configuration), 2) - } - - func test_callKitActivated_providedAudioSessionSetCategoryWasCalledCorrectly() async throws { - let mockPolicy = MockAudioSessionPolicy() - mockPolicy.stub( - for: .configuration, - with: AudioSessionConfiguration( - category: .playAndRecord, - mode: .voiceChat, - options: .mixWithOthers, - overrideOutputAudioPort: .speaker - ) - ) - try await subject.didUpdatePolicy(mockPolicy) - let audioSession = MockAVAudioSession() - - try subject.callKitActivated(audioSession) - - let request = try XCTUnwrap( - audioSession.recordedInputPayload( - ( - AVAudioSession.Category, - AVAudioSession.Mode, - AVAudioSession.CategoryOptions - ).self, - for: .setCategory - )?.first - ) - XCTAssertEqual(request.0, .playAndRecord) - XCTAssertEqual(request.1, .voiceChat) - XCTAssertTrue(request.2.contains(.mixWithOthers)) - } - - func test_callKitActivated_providedAudioSessionSetOverridePortWasCalledCorrectly() async throws { - let mockPolicy = MockAudioSessionPolicy() - mockPolicy.stub( - for: .configuration, - with: AudioSessionConfiguration( - category: .playAndRecord, - mode: .voiceChat, - options: .mixWithOthers, - overrideOutputAudioPort: .speaker - ) - ) - try await subject.didUpdatePolicy(mockPolicy) - let audioSession = MockAVAudioSession() - - try subject.callKitActivated(audioSession) - - let request = try XCTUnwrap( - audioSession.recordedInputPayload( - AVAudioSession.PortOverride.self, - for: .setOverrideOutputAudioPort - )?.first - ) - XCTAssertEqual(request, .speaker) - } - - // MARK: - Private Helpers - - private func assertConfigurationWasCalledOnPolicy( - _ trigger: @escaping () async throws -> Void, - expectedInput: @autoclosure () -> [(CallSettings, Set)] - ) async throws { - try await trigger() - XCTAssertEqual(mockPolicy.timesCalled(.configuration), expectedInput().endIndex) - let payloads = try XCTUnwrap(mockPolicy.recordedInputPayload((CallSettings, Set).self, for: .configuration)) - XCTAssertEqual(payloads.map(\.0), expectedInput().map(\.0)) - XCTAssertEqual(payloads.map(\.1), expectedInput().map(\.1)) - } -} diff --git a/StreamVideoTests/Utils/AudioSession/StreamRTCAudioSession_Tests.swift b/StreamVideoTests/Utils/AudioSession/StreamRTCAudioSession_Tests.swift deleted file mode 100644 index d4eed1240..000000000 --- a/StreamVideoTests/Utils/AudioSession/StreamRTCAudioSession_Tests.swift +++ /dev/null @@ -1,170 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -@testable import StreamVideo -import StreamWebRTC -import XCTest - -import AVFoundation -import Combine -@testable import StreamVideo -import StreamWebRTC -import XCTest - -final class StreamRTCAudioSessionTests: XCTestCase, @unchecked Sendable { - - private lazy var rtcAudioSession: RTCAudioSession! = .sharedInstance() - private lazy var subject: StreamRTCAudioSession! = .init() - private var cancellables: Set! = [] - - override func tearDown() async throws { - cancellables = nil - subject = nil - rtcAudioSession = nil - try await super.tearDown() - } - - // MARK: - Initialization - - func test_init_setsInitialState() { - XCTAssertEqual(subject.state.category.rawValue, rtcAudioSession.category) - XCTAssertEqual(subject.state.mode.rawValue, rtcAudioSession.mode) - XCTAssertEqual(subject.state.options, rtcAudioSession.categoryOptions) - XCTAssertEqual(subject.state.overrideOutputPort, .none) - } - - // MARK: - setCategory - - func test_setCategory_whenNoChangesNeeded_thenDoesNotUpdateState() async throws { - let initialState = subject.state - - try await subject.setCategory( - initialState.category, - mode: initialState.mode, - with: initialState.options - ) - - XCTAssertEqual(subject.state, initialState) - } - - func test_setCategory_whenCategoryChanges_thenUpdatesState() async throws { - let newCategory: AVAudioSession.Category = .playback - let initialState = subject.state - - try await subject.setCategory( - newCategory, - mode: initialState.mode, - with: initialState.options - ) - - XCTAssertEqual(subject.state.category, newCategory) - XCTAssertEqual(subject.state.mode, initialState.mode) - XCTAssertEqual(subject.state.options, initialState.options) - } - - func test_setCategory_whenModeChanges_thenUpdatesState() async throws { - let newMode: AVAudioSession.Mode = .videoChat - let initialState = subject.state - - try await subject.setCategory( - initialState.category, - mode: newMode, - with: initialState.options - ) - - XCTAssertEqual(subject.state.category, initialState.category) - XCTAssertEqual(subject.state.mode, newMode) - XCTAssertEqual(subject.state.options, initialState.options) - } - - func test_setCategory_whenOptionsChange_thenUpdatesState() async throws { - let newOptions: AVAudioSession.CategoryOptions = .mixWithOthers - let initialState = subject.state - - try await subject.setCategory( - initialState.category, - mode: initialState.mode, - with: newOptions - ) - - XCTAssertEqual(subject.state.category, initialState.category) - XCTAssertEqual(subject.state.mode, initialState.mode) - XCTAssertEqual(subject.state.options, newOptions) - } - - func test_setCategory_thenUpdatesWebRTCConfiguration() async throws { - let newOptions: AVAudioSession.CategoryOptions = .mixWithOthers - - try await subject.setCategory( - .soloAmbient, - mode: .default, - with: newOptions - ) - - let webRTCConfiguration = RTCAudioSessionConfiguration.webRTC() - XCTAssertEqual(subject.state.category.rawValue, webRTCConfiguration.category) - XCTAssertEqual(subject.state.mode.rawValue, webRTCConfiguration.mode) - XCTAssertEqual(subject.state.options, webRTCConfiguration.categoryOptions) - } - - // MARK: - overrideOutputAudioPort - - func test_overrideOutputAudioPort_whenCategoryIsNotPlayAndRecord_thenDoesNotUpdateState() async throws { - try await subject.setCategory(.playback, mode: .default, with: []) - let initialState = subject.state - - try await subject.overrideOutputAudioPort(.speaker) - - XCTAssertEqual(subject.state, initialState) - } - - func test_overrideOutputAudioPort_whenPortIsSameAsCurrent_thenDoesNotUpdateState() async throws { - try await subject.setCategory(.playAndRecord, mode: .default, with: []) - try await subject.overrideOutputAudioPort(.speaker) - let initialState = subject.state - - try await subject.overrideOutputAudioPort(.speaker) - - XCTAssertEqual(subject.state, initialState) - } - - func test_overrideOutputAudioPort_whenValidChange_thenUpdatesState() async throws { - try await subject.setCategory(.playAndRecord, mode: .default, with: []) - - try await subject.overrideOutputAudioPort(.speaker) - - XCTAssertEqual(subject.state.overrideOutputPort, .speaker) - } - - // MARK: - Properties - - func test_isActive_returnsSourceValue() { - XCTAssertEqual(subject.isActive, rtcAudioSession.isActive) - } - - func test_currentRoute_returnsSourceValue() { - XCTAssertEqual(subject.currentRoute.inputs.map(\.portType), rtcAudioSession.currentRoute.inputs.map(\.portType)) - XCTAssertEqual(subject.currentRoute.outputs.map(\.portType), rtcAudioSession.currentRoute.outputs.map(\.portType)) - } - - func test_category_returnsStateCategory() { - XCTAssertEqual(subject.category, subject.state.category) - } - - func test_useManualAudio_whenSet_updatesSourceValue() { - subject.useManualAudio = true - XCTAssertTrue(rtcAudioSession.useManualAudio) - - subject.useManualAudio = false - XCTAssertFalse(rtcAudioSession.useManualAudio) - } - - func test_isAudioEnabled_whenSet_updatesSourceValue() { - subject.isAudioEnabled = true - XCTAssertTrue(rtcAudioSession.isAudioEnabled) - - subject.isAudioEnabled = false - XCTAssertFalse(rtcAudioSession.isAudioEnabled) - } -} diff --git a/StreamVideoTests/Utils/Proximity/Policies/SpeakerProximityPolicy_Tests.swift b/StreamVideoTests/Utils/Proximity/Policies/SpeakerProximityPolicy_Tests.swift index 2b3d7f358..1137419b7 100644 --- a/StreamVideoTests/Utils/Proximity/Policies/SpeakerProximityPolicy_Tests.swift +++ b/StreamVideoTests/Utils/Proximity/Policies/SpeakerProximityPolicy_Tests.swift @@ -10,7 +10,6 @@ import XCTest @MainActor final class SpeakerProximityPolicy_Tests: XCTestCase, @unchecked Sendable { - private lazy var mockAudioSession: MockAudioSession! = .init() private lazy var mockCall: MockCall! = .init(.dummy()) private lazy var peerConnectionFactory: PeerConnectionFactory! = .mock() private lazy var subject: SpeakerProximityPolicy! = .init() @@ -19,17 +18,12 @@ final class SpeakerProximityPolicy_Tests: XCTestCase, @unchecked Sendable { try await super.setUp() _ = mockCall await wait(for: 0.25) - StreamAudioSession.currentValue = .init( - audioSession: mockAudioSession, - audioDeviceModule: peerConnectionFactory.audioDeviceModule - ) } override func tearDown() async throws { subject = nil mockCall = nil peerConnectionFactory = nil - mockAudioSession = nil try await super.tearDown() } diff --git a/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift b/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift index 5767a0195..deb843b15 100644 --- a/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift +++ b/StreamVideoTests/Utils/StreamCallAudioRecorderTests.swift @@ -11,11 +11,7 @@ final class StreamAudioRecorderTests: XCTestCase, @unchecked Sendable { private lazy var peerConnectionFactory: PeerConnectionFactory! = .mock() private lazy var builder: AVAudioRecorderBuilder! = .init(cachedResult: mockAudioRecorder) - private lazy var mockAudioSession: MockAudioSession! = .init() - private lazy var audioSession: StreamAudioSession! = .init( - audioSession: mockAudioSession, - audioDeviceModule: peerConnectionFactory.audioDeviceModule - ) + private lazy var mockAudioStore: MockRTCAudioStore! = .init() private lazy var mockActiveCallProvider: MockStreamActiveCallProvider! = .init() private var mockAudioRecorder: MockAudioRecorder! private lazy var subject: StreamCallAudioRecorder! = .init(audioRecorderBuilder: builder) @@ -23,7 +19,7 @@ final class StreamAudioRecorderTests: XCTestCase, @unchecked Sendable { override func setUp() async throws { try await super.setUp() StreamActiveCallProviderKey.currentValue = mockActiveCallProvider - _ = audioSession + _ = mockAudioStore mockAudioRecorder = try .init( url: URL(string: "test.wav")!, settings: AVAudioRecorderBuilder.defaultRecordingSettings @@ -32,7 +28,7 @@ final class StreamAudioRecorderTests: XCTestCase, @unchecked Sendable { override func tearDown() { builder = nil - mockAudioSession = nil + mockAudioStore = nil mockActiveCallProvider = nil mockAudioRecorder = nil peerConnectionFactory = nil @@ -72,7 +68,8 @@ final class StreamAudioRecorderTests: XCTestCase, @unchecked Sendable { // MARK: - startRecording func testStartRecording_givenPermissionNotGranted_whenStarted_thenRecordsAndMetersAreNotUpdated() async throws { - mockAudioSession.stub(for: .requestRecordPermission, with: false) + mockAudioStore.makeShared() + mockAudioStore.session.stub(for: .requestRecordPermission, with: false) await setUpHasActiveCall(true) await subject.startRecording() @@ -81,7 +78,8 @@ final class StreamAudioRecorderTests: XCTestCase, @unchecked Sendable { } func testStartRecording_givenPermissionGranted_whenStarted_thenRecordsAndMetersUpdates() async throws { - mockAudioSession.stub(for: .requestRecordPermission, with: true) + mockAudioStore.makeShared() + mockAudioStore.session.stub(for: .requestRecordPermission, with: true) await setUpHasActiveCall(true) await subject.startRecording() @@ -90,7 +88,8 @@ final class StreamAudioRecorderTests: XCTestCase, @unchecked Sendable { } func testStartRecording_givenPermissionGrantedButNoActiveCall_whenStarted_thenRecordsAndMetersWontStart() async throws { - mockAudioSession.stub(for: .requestRecordPermission, with: true) + mockAudioStore.makeShared() + mockAudioStore.session.stub(for: .requestRecordPermission, with: true) await subject.startRecording() @@ -99,7 +98,8 @@ final class StreamAudioRecorderTests: XCTestCase, @unchecked Sendable { func testStartRecording_givenPermissionGrantedButNoActiveCall_whenIgnoreActiveCallAndStarted_thenRecordsAndMetersUpdates( ) async throws { - mockAudioSession.stub(for: .requestRecordPermission, with: true) + mockAudioStore.makeShared() + mockAudioStore.session.stub(for: .requestRecordPermission, with: true) await subject.startRecording(ignoreActiveCall: true) @@ -109,7 +109,8 @@ final class StreamAudioRecorderTests: XCTestCase, @unchecked Sendable { // MARK: - stopRecording func testStopRecording_givenRecording_whenStopped_thenStopsRecording() async throws { - mockAudioSession.stub(for: .requestRecordPermission, with: true) + mockAudioStore.makeShared() + mockAudioStore.session.stub(for: .requestRecordPermission, with: true) await setUpHasActiveCall(true) await subject.startRecording() @@ -121,7 +122,8 @@ final class StreamAudioRecorderTests: XCTestCase, @unchecked Sendable { // MARK: - activeCall ended func test_activeCallEnded_givenAnActiveCallAndRecordingTrue_whenActiveCallEnds_thenStopsRecording() async throws { - mockAudioSession.stub(for: .requestRecordPermission, with: true) + mockAudioStore.makeShared() + mockAudioStore.session.stub(for: .requestRecordPermission, with: true) await setUpHasActiveCall(true) await subject.startRecording() @@ -134,7 +136,8 @@ final class StreamAudioRecorderTests: XCTestCase, @unchecked Sendable { func test_activeCallEnded_givenAnActiveCallAndRecordingTrue_whenActiveCallEndsAndAnotherOneStarts_thenStartsRecording( ) async throws { - mockAudioSession.stub(for: .requestRecordPermission, with: true) + mockAudioStore.makeShared() + mockAudioStore.session.stub(for: .requestRecordPermission, with: true) await setUpHasActiveCall(true) await subject.startRecording() await setUpHasActiveCall(false) @@ -154,7 +157,7 @@ final class StreamAudioRecorderTests: XCTestCase, @unchecked Sendable { file: StaticString = #file, line: UInt = #line ) async throws { - let audioRecorder = try await XCTAsyncUnwrap(await builder.result) + let audioRecorder = try XCTUnwrap(builder.result) XCTAssertEqual( audioRecorder.isRecording, isRecording, diff --git a/StreamVideoTests/WebRTC/v2/IntegrationTests/WebRTCIntegrationTests.swift b/StreamVideoTests/WebRTC/v2/IntegrationTests/WebRTCIntegrationTests.swift index c6e70810b..df0183113 100644 --- a/StreamVideoTests/WebRTC/v2/IntegrationTests/WebRTCIntegrationTests.swift +++ b/StreamVideoTests/WebRTC/v2/IntegrationTests/WebRTCIntegrationTests.swift @@ -58,7 +58,8 @@ final class WebRTCIntegrationTests: XCTestCase, @unchecked Sendable { callSettings: nil, options: nil, ring: false, - notify: false + notify: false, + source: .inApp ) }, .init { diff --git a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift index 6253578de..bddcdca0c 100644 --- a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/AudioMediaAdapter_Tests.swift @@ -14,9 +14,6 @@ final class AudioMediaAdapter_Tests: XCTestCase, @unchecked Sendable { private lazy var mockPeerConnection: MockRTCPeerConnection! = .init() private lazy var spySubject: PassthroughSubject! = .init() private lazy var mockMediaAdapter: MockLocalMediaAdapter! = .init() - private lazy var audioSession: StreamAudioSession! = .init( - audioDeviceModule: peerConnectionFactory.audioDeviceModule - ) private lazy var subject: AudioMediaAdapter! = .init( sessionID: sessionId, peerConnection: mockPeerConnection, @@ -27,7 +24,6 @@ final class AudioMediaAdapter_Tests: XCTestCase, @unchecked Sendable { override func tearDown() { subject = nil - audioSession = nil spySubject = nil mockPeerConnection = nil peerConnectionFactory = nil diff --git a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift index 6d374430f..f4b553511 100644 --- a/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/PeerConnection/MediaAdapters/LocalMediaAdapters/LocalAudioMediaAdapter_Tests.swift @@ -17,11 +17,6 @@ final class LocalAudioMediaAdapter_Tests: XCTestCase, @unchecked Sendable { private lazy var peerConnectionFactory: PeerConnectionFactory! = .mock() private lazy var mockPeerConnection: MockRTCPeerConnection! = .init() private lazy var mockSFUStack: MockSFUStack! = .init() - private lazy var audioSession: MockAudioSession! = .init() - private lazy var audioSessionAdapter: StreamAudioSession! = .init( - audioSession: audioSession, - audioDeviceModule: peerConnectionFactory.audioDeviceModule - ) private lazy var spySubject: PassthroughSubject! = .init() private lazy var subject: LocalAudioMediaAdapter! = .init( sessionID: sessionId, @@ -37,8 +32,6 @@ final class LocalAudioMediaAdapter_Tests: XCTestCase, @unchecked Sendable { override func tearDown() { subject = nil spySubject = nil - audioSession = nil - audioSessionAdapter = nil mockSFUStack = nil mockPeerConnection = nil peerConnectionFactory = nil diff --git a/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift b/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift index 19676948d..26325dd7f 100644 --- a/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator_Tests.swift @@ -14,9 +14,6 @@ final class RTCPeerConnectionCoordinator_Tests: XCTestCase, @unchecked Sendable private lazy var mockPeerConnection: MockRTCPeerConnection! = .init() private lazy var peerConnectionFactory: PeerConnectionFactory! = .mock() private lazy var mockSFUStack: MockSFUStack! = .init() - private lazy var audioSession: StreamAudioSession! = .init( - audioDeviceModule: peerConnectionFactory.audioDeviceModule - ) private lazy var spySubject: PassthroughSubject! = .init() private lazy var mockLocalMediaAdapterA: MockLocalMediaAdapter! = .init() private lazy var mockLocalMediaAdapterB: MockLocalMediaAdapter! = .init() @@ -79,7 +76,6 @@ final class RTCPeerConnectionCoordinator_Tests: XCTestCase, @unchecked Sendable mockLocalMediaAdapterB = nil mockLocalMediaAdapterC = nil spySubject = nil - audioSession = nil mockSFUStack = nil peerConnectionFactory = nil mockPeerConnection = nil diff --git a/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoinedStageTests.swift b/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoinedStageTests.swift index 57b202eb4..11a242474 100644 --- a/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoinedStageTests.swift +++ b/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoinedStageTests.swift @@ -653,32 +653,6 @@ final class WebRTCCoordinatorStateMachine_JoinedStageTests: XCTestCase, @uncheck ) { _ in } } - func test_transition_callSettingsUpdated_statsAdapterUpdated() async throws { - try XCTSkipIf( - ProcessInfo().operatingSystemVersion.majorVersion == 15, - "https://linear.app/stream/issue/IOS-923" - ) - - let statsAdapter = MockWebRTCStatsAdapter() - await mockCoordinatorStack.coordinator.stateAdapter.set( - statsAdapter: statsAdapter - ) - - let updateCallSettings = CallSettings(audioOn: true, videoOn: true) - - await assertResultAfterTrigger( - trigger: { [mockCoordinatorStack] in - await mockCoordinatorStack? - .coordinator - .stateAdapter - .set(callSettings: updateCallSettings) - } - ) { expectation in - XCTAssertEqual(statsAdapter.callSettings, updateCallSettings) - expectation.fulfill() - } - } - func test_transition_callSettingsUpdated_publisherUpdated() async throws { await mockCoordinatorStack.coordinator.stateAdapter.set( sfuAdapter: mockCoordinatorStack.sfuStack.adapter @@ -690,7 +664,6 @@ final class WebRTCCoordinatorStateMachine_JoinedStageTests: XCTestCase, @uncheck let publisher = await mockCoordinatorStack?.coordinator.stateAdapter.publisher let mockPublisher = try XCTUnwrap(publisher as? MockRTCPeerConnectionCoordinator) let updateCallSettings = CallSettings(audioOn: true, videoOn: true) - let audioSession = await mockCoordinatorStack.coordinator.stateAdapter.audioSession await assertResultAfterTrigger( trigger: { [mockCoordinatorStack] in @@ -699,9 +672,8 @@ final class WebRTCCoordinatorStateMachine_JoinedStageTests: XCTestCase, @uncheck .stateAdapter .set(callSettings: updateCallSettings) } - ) { [mockPublisher, audioSession] expectation in + ) { [mockPublisher] expectation in XCTAssertEqual(mockPublisher.timesCalled(.didUpdateCallSettings), 1) - XCTAssertEqual(audioSession.activeCallSettings, updateCallSettings) expectation.fulfill() } } diff --git a/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoiningStageTests.swift b/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoiningStageTests.swift index 5ccf17eae..7b59ec2d1 100644 --- a/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoiningStageTests.swift +++ b/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_JoiningStageTests.swift @@ -312,6 +312,30 @@ final class WebRTCCoordinatorStateMachine_JoiningStageTests: XCTestCase, @unchec cancellable.cancel() } + func test_transition_fromConnected_configuresAudioSession() async throws { + subject.context.coordinator = mockCoordinatorStack.coordinator + subject.context.reconnectAttempts = 11 + await mockCoordinatorStack + .coordinator + .stateAdapter + .set(sfuAdapter: mockCoordinatorStack.sfuStack.adapter) + mockCoordinatorStack.webRTCAuthenticator.stubbedFunction[.waitForConnect] = Result.success(()) + let cancellable = receiveEvent( + .sfuEvent(.joinResponse(Stream_Video_Sfu_Event_JoinResponse())), + every: 0.3 + ) + + try await assertTransition( + from: .connected, + expectedTarget: .joined, + subject: subject + ) { + let audioSession = await $0.context.coordinator?.stateAdapter.audioSession + XCTAssertNotNil(audioSession?.delegate) + } + cancellable.cancel() + } + func test_transition_fromConnectedSFUConnected_updatesParticipants() async throws { subject.context.coordinator = mockCoordinatorStack.coordinator subject.context.reconnectAttempts = 11 @@ -730,6 +754,30 @@ final class WebRTCCoordinatorStateMachine_JoiningStageTests: XCTestCase, @unchec cancellable.cancel() } + func test_transition_fromConnectedWithRejoin_configuresAudioSession() async throws { + subject.context.coordinator = mockCoordinatorStack.coordinator + subject.context.isRejoiningFromSessionID = .unique + await mockCoordinatorStack + .coordinator + .stateAdapter + .set(sfuAdapter: mockCoordinatorStack.sfuStack.adapter) + mockCoordinatorStack.webRTCAuthenticator.stubbedFunction[.waitForConnect] = Result.success(()) + let cancellable = receiveEvent( + .sfuEvent(.joinResponse(Stream_Video_Sfu_Event_JoinResponse())), + every: 0.3 + ) + + try await assertTransition( + from: .connected, + expectedTarget: .joined, + subject: subject + ) { + let audioSession = await $0.context.coordinator?.stateAdapter.audioSession + XCTAssertNotNil(audioSession?.delegate) + } + cancellable.cancel() + } + func test_transition_fromConnectedWithRejoinSFUConnected_updatesParticipantsAndFiltersOutUserWithPreviousSessionId( ) async throws { subject.context.coordinator = mockCoordinatorStack.coordinator @@ -1357,6 +1405,31 @@ final class WebRTCCoordinatorStateMachine_JoiningStageTests: XCTestCase, @unchec cancellable.cancel() } + func test_transition_fromMigrated_configuresAudioSession() async throws { + subject.context.coordinator = mockCoordinatorStack.coordinator + subject.context.reconnectAttempts = 11 + subject.context.migratingFromSFU = "test-sfu" + await mockCoordinatorStack + .coordinator + .stateAdapter + .set(sfuAdapter: mockCoordinatorStack.sfuStack.adapter) + mockCoordinatorStack.webRTCAuthenticator.stubbedFunction[.waitForConnect] = Result.success(()) + let cancellable = receiveEvent( + .sfuEvent(.joinResponse(Stream_Video_Sfu_Event_JoinResponse())), + every: 0.3 + ) + + try await assertTransition( + from: .migrated, + expectedTarget: .joined, + subject: subject + ) { + let audioSession = await $0.context.coordinator?.stateAdapter.audioSession + XCTAssertNotNil(audioSession?.delegate) + } + cancellable.cancel() + } + func test_transition_fromMigratedSFUConnected_updatesParticipants() async throws { subject.context.coordinator = mockCoordinatorStack.coordinator subject.context.reconnectAttempts = 11 diff --git a/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_LeavingStageTests.swift b/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_LeavingStageTests.swift index 9ca6156dc..a1237d808 100644 --- a/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_LeavingStageTests.swift +++ b/StreamVideoTests/WebRTC/v2/StateMachine/Stages/WebRTCCoordinatorStateMachine_LeavingStageTests.swift @@ -16,7 +16,7 @@ final class WebRTCCoordinatorStateMachine_LeavingStageTests: XCTestCase, @unchec .allCases .filter { $0 != subject.id } .map { WebRTCCoordinator.StateMachine.Stage(id: $0, context: .init()) } - private lazy var validStages: Set! = [.joined, .disconnected] + private lazy var validStages: Set! = [.joined, .disconnected, .connected, .connecting] private lazy var subject: WebRTCCoordinator.StateMachine.Stage! = .leaving(.init()) private lazy var mockCoordinatorStack: MockWebRTCCoordinatorStack! = .init( videoConfig: Self.videoConfig diff --git a/StreamVideoTests/WebRTC/v2/Stats/Models/WebRTCTrace_Tests.swift b/StreamVideoTests/WebRTC/v2/Stats/Models/WebRTCTrace_Tests.swift index f520a1f25..ca1b462bd 100644 --- a/StreamVideoTests/WebRTC/v2/Stats/Models/WebRTCTrace_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/Stats/Models/WebRTCTrace_Tests.swift @@ -52,10 +52,9 @@ final class WebRTCTrace_Tests: XCTestCase, @unchecked Sendable { } func test_init_getUserMedia() { - let peerConnectionFactory = PeerConnectionFactory.mock() - let audio = StreamAudioSession(audioDeviceModule: peerConnectionFactory.audioDeviceModule) + let audio = CallAudioSession() - let trace = WebRTCTrace(audioSession: audio) + let trace = WebRTCTrace(audioSession: audio.traceRepresentation) XCTAssertNil(trace.id) XCTAssertEqual(trace.tag, "navigator.mediaDevices.getUserMediaOnSuccess") diff --git a/StreamVideoTests/WebRTC/v2/Stats/WebRTCStatsAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/Stats/WebRTCStatsAdapter_Tests.swift index 7426d9613..09a464ea4 100644 --- a/StreamVideoTests/WebRTC/v2/Stats/WebRTCStatsAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/Stats/WebRTCStatsAdapter_Tests.swift @@ -51,27 +51,6 @@ final class WebRTCStatsAdapter_Tests: XCTestCase, @unchecked Sendable { XCTAssertTrue(called?.contains(trace) ?? false) } - func test_setCallSettings_propagatesToTraces() { - let settings = CallSettings( - audioOn: true, - videoOn: false, - speakerOn: false, - audioOutputOn: true, - cameraPosition: .front - ) - subject.callSettings = settings - XCTAssertEqual(mockTraces.callSettings, settings) - } - - func test_setAudioSession_propagatesToTraces() { - let peerConnectionFactory = PeerConnectionFactory.mock() - let session = StreamAudioSession( - audioDeviceModule: peerConnectionFactory.audioDeviceModule - ) - subject.audioSession = session - XCTAssertTrue(mockTraces.audioSession === session) - } - func test_setPublisher_setsOnCollectorAndTraces() throws { let mockPub = try MockRTCPeerConnectionCoordinator( peerType: .publisher, diff --git a/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift b/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift index e84339f04..4e23d2cb4 100644 --- a/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/WebRTCCoorindator_Tests.swift @@ -70,7 +70,8 @@ final class WebRTCCoordinator_Tests: XCTestCase, @unchecked Sendable { callSettings: expectedCallSettings, options: expectedOptions, ring: true, - notify: true + notify: true, + source: .callKit ) } ) { stage in @@ -83,6 +84,7 @@ final class WebRTCCoordinator_Tests: XCTestCase, @unchecked Sendable { XCTAssertEqual(expectedStage.options?.team, expectedOptions.team) XCTAssertTrue(expectedStage.ring) XCTAssertTrue(expectedStage.notify) + XCTAssertEqual(expectedStage.context.joinSource, .callKit) await self.assertEqualAsync( await self.subject.stateAdapter.initialCallSettings, expectedCallSettings @@ -146,7 +148,8 @@ final class WebRTCCoordinator_Tests: XCTestCase, @unchecked Sendable { callSettings: nil, options: nil, ring: true, - notify: true + notify: true, + source: .inApp ) } ) { _ in diff --git a/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift b/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift index 8ead1c34d..78e50d153 100644 --- a/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift +++ b/StreamVideoTests/WebRTC/v2/WebRTCStateAdapter_Tests.swift @@ -179,7 +179,6 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable { await subject.set(statsAdapter: expected) await assertTrueAsync(await subject.statsAdapter === expected) - await assertTrueAsync(await subject.audioSession === expected.audioSession) } // MARK: - setSFUAdapter @@ -454,18 +453,25 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable { XCTAssertEqual(mockPublisher.timesCalled(.beginScreenSharing), 0) } - func test_configurePeerConnections_audioSessionWasConfigured() async throws { + // MARK: - configureAudioSession + + func test_configureAudioSession_audioSessionWasConfigured() async throws { let sfuStack = MockSFUStack() sfuStack.setConnectionState(to: .connected(healthCheckInfo: .init())) - await subject.set(sfuAdapter: sfuStack.adapter) + let statsAdapter = WebRTCStatsAdapter( + sessionID: .unique, + unifiedSessionID: .unique, + isTracingEnabled: true, + trackStorage: await subject.trackStorage + ) + await subject.set(statsAdapter: statsAdapter) let ownCapabilities = Set([OwnCapability.blockUsers]) await subject.set(ownCapabilities: ownCapabilities) - try await subject.configurePeerConnections() + try await subject.configureAudioSession(source: .inApp) - await fulfillment { - await self.subject.audioSession.delegate === self.subject - } + await assertTrueAsync(await subject.audioSession.delegate === subject) + await assertTrueAsync(await subject.audioSession.statsAdapter === statsAdapter) } // MARK: - cleanUp @@ -801,7 +807,6 @@ final class WebRTCStateAdapter_Tests: XCTestCase, @unchecked Sendable { ) subject.audioSessionAdapterDidUpdateCallSettings( - await subject.audioSession, callSettings: updatedCallSettings )