diff --git a/.vscode/tasks.json b/.vscode/tasks.json
index ce2eaca57..2af29487e 100644
--- a/.vscode/tasks.json
+++ b/.vscode/tasks.json
@@ -1,23 +1,6 @@
{
"version": "2.0.0",
"tasks": [
- {
- "type": "shell",
- "label": "Fastlane: Build SwiftUI Demo",
- "command": "bundle exec fastlane build_swiftui_demo",
- "group": {
- "kind": "build",
- "isDefault": true
- }
- },
- {
- "type": "shell",
- "label": "Fastlane: Test StreamVideo",
- "command": "bundle exec fastlane test",
- "group": {
- "kind": "test",
- "isDefault": true
- }
- },
+
]
}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9f659bbe1..083aa0957 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
### 🔄 Changed
+# [1.37.0](https://github.com/GetStream/stream-video-swift/releases/tag/1.37.0)
+_November 28, 2025_
+
+### ✅ Added
+- A Livestream focused AudioSessionPolicy that has support for stereo playout. [#975](https://github.com/GetStream/stream-video-swift/pull/975)
+
# [1.36.0](https://github.com/GetStream/stream-video-swift/releases/tag/1.36.0)
_November 19, 2025_
diff --git a/DemoApp/Sources/Components/AppEnvironment.swift b/DemoApp/Sources/Components/AppEnvironment.swift
index ea66f24eb..61f4dbabf 100644
--- a/DemoApp/Sources/Components/AppEnvironment.swift
+++ b/DemoApp/Sources/Components/AppEnvironment.swift
@@ -554,7 +554,7 @@ extension AppEnvironment {
extension AppEnvironment {
enum AudioSessionPolicyDebugConfiguration: Hashable, Debuggable, Sendable {
- case `default`, ownCapabilities
+ case `default`, ownCapabilities, livestream
var title: String {
switch self {
@@ -562,6 +562,8 @@ extension AppEnvironment {
return "Default"
case .ownCapabilities:
return "OwnCapabilities"
+ case .livestream:
+ return "Livestream"
}
}
@@ -571,6 +573,8 @@ extension AppEnvironment {
return DefaultAudioSessionPolicy()
case .ownCapabilities:
return OwnCapabilitiesAudioSessionPolicy()
+ case .livestream:
+ return LivestreamAudioSessionPolicy()
}
}
}
@@ -616,7 +620,7 @@ extension AppEnvironment {
}
static var proximityPolicies: Set = {
- [.speaker, .video]
+ [.video, .speaker]
}()
}
@@ -634,6 +638,19 @@ extension ClientCapability: Debuggable {
}
}
+extension Logger.WebRTC.LogMode: Debuggable {
+ var title: String {
+ switch self {
+ case .none:
+ return "None"
+ case .validFilesOnly:
+ return "Valid Files only"
+ case .all:
+ return "All"
+ }
+ }
+}
+
extension String: Debuggable {
var title: String {
self
diff --git a/DemoApp/Sources/Components/MemoryLogDestination/LogQueue.swift b/DemoApp/Sources/Components/MemoryLogDestination/LogQueue.swift
index 76e90d693..c9580d6eb 100644
--- a/DemoApp/Sources/Components/MemoryLogDestination/LogQueue.swift
+++ b/DemoApp/Sources/Components/MemoryLogDestination/LogQueue.swift
@@ -6,7 +6,12 @@ import Foundation
import StreamVideo
enum LogQueue {
- static let queue: Queue = .init(maxCount: 3000)
+ #if DEBUG
+ private static let queueCapaity = 10000
+ #else
+ private static let queueCapaity = 1000
+ #endif
+ static let queue: Queue = .init(maxCount: queueCapaity)
static func insert(_ element: LogDetails) { queue.insert(element) }
diff --git a/DemoApp/Sources/Views/Login/DebugMenu.swift b/DemoApp/Sources/Views/Login/DebugMenu.swift
index 2954bc722..9028d4e1c 100644
--- a/DemoApp/Sources/Views/Login/DebugMenu.swift
+++ b/DemoApp/Sources/Views/Login/DebugMenu.swift
@@ -231,7 +231,7 @@ struct DebugMenu: View {
}
makeMenu(
- for: [.default, .ownCapabilities],
+ for: [.default, .ownCapabilities, .livestream],
currentValue: audioSessionPolicy,
label: "AudioSession policy"
) { self.audioSessionPolicy = $0 }
@@ -302,10 +302,10 @@ struct DebugMenu: View {
) { LogConfig.level = $0 }
makeMenu(
- for: [true, false],
- currentValue: LogConfig.webRTCLogsEnabled,
+ for: [.none, .validFilesOnly, .all],
+ currentValue: Logger.WebRTC.mode,
label: "WebRTC Logs"
- ) { LogConfig.webRTCLogsEnabled = $0 }
+ ) { Logger.WebRTC.mode = $0 }
Button {
isLogsViewerVisible = true
diff --git a/Package.swift b/Package.swift
index 9e6fc7881..3759a0bc3 100644
--- a/Package.swift
+++ b/Package.swift
@@ -23,7 +23,7 @@ let package = Package(
],
dependencies: [
.package(url: "https://github.com/apple/swift-protobuf.git", exact: "1.30.0"),
- .package(url: "https://github.com/GetStream/stream-video-swift-webrtc.git", exact: "137.0.43")
+ .package(url: "https://github.com/GetStream/stream-video-swift-webrtc.git", exact: "137.0.52")
],
targets: [
.target(
diff --git a/README.md b/README.md
index 88b05e19b..73ab41b5e 100644
--- a/README.md
+++ b/README.md
@@ -9,10 +9,10 @@
-
+
-
+
diff --git a/Sources/StreamVideo/Call.swift b/Sources/StreamVideo/Call.swift
index 74777880c..bdd94f166 100644
--- a/Sources/StreamVideo/Call.swift
+++ b/Sources/StreamVideo/Call.swift
@@ -174,11 +174,11 @@ public class Call: @unchecked Sendable, WSEventsSubscriber {
currentStage.id == .joining {
return stateMachine
.publisher
- .tryCompactMap {
- switch $0.id {
+ .tryMap { (stage) -> JoinCallResponse? in
+ switch stage.id {
case .joined:
guard
- let stage = $0 as? Call.StateMachine.Stage.JoinedStage
+ let stage = stage as? Call.StateMachine.Stage.JoinedStage
else {
throw ClientError()
}
@@ -190,7 +190,7 @@ public class Call: @unchecked Sendable, WSEventsSubscriber {
}
case .error:
guard
- let stage = $0 as? Call.StateMachine.Stage.ErrorStage
+ let stage = stage as? Call.StateMachine.Stage.ErrorStage
else {
throw ClientError()
}
@@ -201,7 +201,7 @@ public class Call: @unchecked Sendable, WSEventsSubscriber {
}
.eraseToAnyPublisher()
} else {
- let deliverySubject = PassthroughSubject()
+ let deliverySubject = CurrentValueSubject(nil)
transitionHandler(
.joining(
self,
@@ -224,8 +224,11 @@ public class Call: @unchecked Sendable, WSEventsSubscriber {
if let joinResponse = result as? JoinCallResponse {
return joinResponse
- } else if let publisher = result as? AnyPublisher {
- return try await publisher.nextValue(timeout: CallConfiguration.timeout.join)
+ } else if let publisher = result as? AnyPublisher {
+ let result = try await publisher
+ .compactMap { $0 }
+ .nextValue(timeout: CallConfiguration.timeout.join)
+ return result
} else {
throw ClientError("Call was unable to join call.")
}
diff --git a/Sources/StreamVideo/CallKit/CallKitService.swift b/Sources/StreamVideo/CallKit/CallKitService.swift
index ab1d3e6b4..182d55a13 100644
--- a/Sources/StreamVideo/CallKit/CallKitService.swift
+++ b/Sources/StreamVideo/CallKit/CallKitService.swift
@@ -11,11 +11,17 @@ import StreamWebRTC
/// Manages CallKit integration for VoIP calls.
open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
+ struct MuteRequest: Equatable {
+ var callUUID: UUID
+ var isMuted: Bool
+ }
+
@Injected(\.callCache) private var callCache
@Injected(\.uuidFactory) private var uuidFactory
@Injected(\.currentDevice) private var currentDevice
@Injected(\.audioStore) private var audioStore
@Injected(\.permissions) private var permissions
+ @Injected(\.applicationStateAdapter) private var applicationStateAdapter
private let disposableBag = DisposableBag()
/// Represents a call that is being managed by the service.
@@ -91,17 +97,17 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
private var _storage: [UUID: CallEntry] = [:]
private let storageAccessQueue: UnfairQueue = .init()
- private var active: UUID? {
- didSet { observeCallSettings(active) }
- }
+ private var active: UUID?
var callCount: Int { storageAccessQueue.sync { _storage.count } }
private var callEndedNotificationCancellable: AnyCancellable?
private var ringingTimerCancellable: AnyCancellable?
- /// Handles audio session changes triggered by CallKit.
- private lazy var callKitAudioReducer = CallKitAudioSessionReducer(store: audioStore)
+ private let muteActionSubject = PassthroughSubject()
+ private var muteActionCancellable: AnyCancellable?
+ private let muteProcessingQueue = OperationQueue(maxConcurrentOperationCount: 1)
+ private var isMuted: Bool?
/// Initialize.
override public init() {
@@ -113,6 +119,18 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
.publisher(for: Notification.Name(CallNotification.callEnded))
.compactMap { $0.object as? Call }
.sink { [weak self] in self?.callEnded($0.cId, ringingTimedOut: false) }
+
+ /// - Important:
+ /// It used to debounce System's attempts to mute/unmute the call. It seems that the system
+ /// performs rapid mute/unmute attempts when the call is being joined or moving to foreground.
+ /// The observation below is in place to guard and normalise those attempts to avoid
+ /// - rapid speaker and mic toggles
+ /// - unnecessary attempts to mute/unmute the mic
+ muteActionCancellable = muteActionSubject
+ .removeDuplicates()
+ .filter { [weak self] _ in self?.applicationStateAdapter.state != .foreground }
+ .debounce(for: 0.5, scheduler: DispatchQueue.global(qos: .userInteractive))
+ .sink { [weak self] in self?.performMuteRequest($0) }
}
/// Report an incoming call to CallKit.
@@ -394,6 +412,8 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
///
/// of the audio session during a call.
audioStore.dispatch(.callKit(.activate(audioSession)))
+
+ observeCallSettings(active)
}
public func provider(
@@ -463,27 +483,6 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
log.error(error, subsystems: .callKit)
action.fail()
}
-
- let callSettings = callToJoinEntry.call.state.callSettings
- do {
- if callSettings.audioOn == false {
- try await requestTransaction(
- CXSetMutedCallAction(
- call: callToJoinEntry.callUUID,
- muted: true
- )
- )
- }
- } catch {
- log.error(
- """
- While joining call id:\(callToJoinEntry.call.cId) we failed to mute the microphone.
- \(callSettings)
- """,
- subsystems: .callKit,
- error: error
- )
- }
}
}
@@ -555,33 +554,23 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
action.fail()
return
}
- Task(disposableBag: disposableBag) { [permissions] in
- guard permissions.hasMicrophonePermission else {
- if action.isMuted {
- action.fulfill()
- } else {
- action.fail()
- }
- return
- }
- do {
- if action.isMuted {
- stackEntry.call.didPerform(.performSetMutedCall)
- try await stackEntry.call.microphone.disable()
- } else {
- stackEntry.call.didPerform(.performSetMutedCall)
- try await stackEntry.call.microphone.enable()
- }
- } catch {
- log.error(
- "Unable to perform muteCallAction isMuted:\(action.isMuted).",
- subsystems: .callKit,
- error: error
- )
+ guard permissions.hasMicrophonePermission else {
+ if action.isMuted {
+ action.fulfill()
+ } else {
+ action.fail()
}
- action.fulfill()
+ return
}
+
+ muteActionSubject.send(
+ .init(
+ callUUID: stackEntry.callUUID,
+ isMuted: action.isMuted
+ )
+ )
+ action.fulfill()
}
// MARK: - Helpers
@@ -639,12 +628,6 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
/// Called when `StreamVideo` changes. Adds/removes the audio reducer and
/// subscribes to events on real devices.
open func didUpdate(_ streamVideo: StreamVideo?) {
- if streamVideo != nil {
- audioStore.add(callKitAudioReducer)
- } else {
- audioStore.remove(callKitAudioReducer)
- }
-
guard currentDevice.deviceType != .simulator else {
return
}
@@ -796,19 +779,63 @@ open class CallKitService: NSObject, CXProviderDelegate, @unchecked Sendable {
.call
.state
.$callSettings
- .map { !$0.audioOn }
+ .map { $0.audioOn == false }
.removeDuplicates()
.log(.debug, subsystems: .callKit) { "Will perform SetMutedCallAction with muted:\($0). " }
- .sinkTask(storeIn: disposableBag) { [weak self] in
- do {
- try await self?.requestTransaction(CXSetMutedCallAction(call: callUUID, muted: $0))
- } catch {
- log.warning("Unable to apply CallSettings.audioOn:\(!$0).", subsystems: .callKit)
- }
- }
+ .sink { [weak self] in self?.performCallSettingMuteRequest($0, callUUID: callUUID) }
.store(in: disposableBag, key: key)
}
}
+
+ private func performCallSettingMuteRequest(
+ _ muted: Bool,
+ callUUID: UUID
+ ) {
+ muteProcessingQueue.addTaskOperation { [weak self] in
+ guard
+ let self,
+ callUUID == active,
+ isMuted != muted
+ else {
+ return
+ }
+ do {
+ try await requestTransaction(CXSetMutedCallAction(call: callUUID, muted: muted))
+ isMuted = muted
+ } catch {
+ log.warning("Unable to apply CallSettings.audioOn:\(!muted).", subsystems: .callKit)
+ }
+ }
+ }
+
+ private func performMuteRequest(_ request: MuteRequest) {
+ muteProcessingQueue.addTaskOperation { [weak self] in
+ guard
+ let self,
+ request.callUUID == active,
+ isMuted != request.isMuted,
+ let stackEntry = callEntry(for: request.callUUID)
+ else {
+ return
+ }
+
+ do {
+ if request.isMuted {
+ stackEntry.call.didPerform(.performSetMutedCall)
+ try await stackEntry.call.microphone.disable()
+ } else {
+ stackEntry.call.didPerform(.performSetMutedCall)
+ try await stackEntry.call.microphone.enable()
+ }
+ isMuted = request.isMuted
+ } catch {
+ log.error(
+ "Unable to set call uuid:\(request.callUUID) muted:\(request.isMuted) state.",
+ error: error
+ )
+ }
+ }
+ }
}
extension CallKitService: InjectionKey {
diff --git a/Sources/StreamVideo/CallSettings/MicrophoneManager.swift b/Sources/StreamVideo/CallSettings/MicrophoneManager.swift
index 8af75d6dc..c4da8ae52 100644
--- a/Sources/StreamVideo/CallSettings/MicrophoneManager.swift
+++ b/Sources/StreamVideo/CallSettings/MicrophoneManager.swift
@@ -12,35 +12,72 @@ public final class MicrophoneManager: ObservableObject, CallSettingsManager, @un
/// The status of the microphone.
@Published public internal(set) var status: CallSettingsStatus
let state = CallSettingsState()
-
+
init(callController: CallController, initialStatus: CallSettingsStatus) {
self.callController = callController
status = initialStatus
}
-
+
/// Toggles the microphone state.
- public func toggle() async throws {
- try await updateAudioStatus(status.next)
+ public func toggle(
+ file: StaticString = #file,
+ function: StaticString = #function,
+ line: UInt = #line
+ ) async throws {
+ try await updateAudioStatus(
+ status.next,
+ file: file,
+ function: function,
+ line: line
+ )
}
-
+
/// Enables the microphone.
- public func enable() async throws {
- try await updateAudioStatus(.enabled)
+ public func enable(
+ file: StaticString = #file,
+ function: StaticString = #function,
+ line: UInt = #line
+ ) async throws {
+ try await updateAudioStatus(
+ .enabled,
+ file: file,
+ function: function,
+ line: line
+ )
}
-
+
/// Disables the microphone.
- public func disable() async throws {
- try await updateAudioStatus(.disabled)
+ public func disable(
+ file: StaticString = #file,
+ function: StaticString = #function,
+ line: UInt = #line
+ ) async throws {
+ try await updateAudioStatus(
+ .disabled,
+ file: file,
+ function: function,
+ line: line
+ )
}
// MARK: - private
- private func updateAudioStatus(_ status: CallSettingsStatus) async throws {
+ private func updateAudioStatus(
+ _ status: CallSettingsStatus,
+ file: StaticString = #file,
+ function: StaticString = #function,
+ line: UInt = #line
+ ) async throws {
try await updateState(
newState: status.boolValue,
current: self.status.boolValue,
action: { [unowned self] state in
- try await callController.changeAudioState(isEnabled: state)
+ try await callController.changeAudioState(
+ isEnabled: state,
+ file: file,
+ function: function,
+ line: line
+ )
},
onUpdate: { _ in
self.status = status
diff --git a/Sources/StreamVideo/CallState.swift b/Sources/StreamVideo/CallState.swift
index ce44d79ed..1d45c0577 100644
--- a/Sources/StreamVideo/CallState.swift
+++ b/Sources/StreamVideo/CallState.swift
@@ -121,7 +121,7 @@ public class CallState: ObservableObject {
@Published public internal(set) var anonymousParticipantCount: UInt32 = 0
@Published public internal(set) var participantCount: UInt32 = 0
@Published public internal(set) var isInitialized: Bool = false
- @Published public internal(set) var callSettings = CallSettings()
+ @Published public internal(set) var callSettings: CallSettings = .default
@Published public internal(set) var isCurrentUserScreensharing: Bool = false
@Published public internal(set) var duration: TimeInterval = 0
diff --git a/Sources/StreamVideo/CallStateMachine/Stages/Call+Stage.swift b/Sources/StreamVideo/CallStateMachine/Stages/Call+Stage.swift
index 5c53a4171..660eac311 100644
--- a/Sources/StreamVideo/CallStateMachine/Stages/Call+Stage.swift
+++ b/Sources/StreamVideo/CallStateMachine/Stages/Call+Stage.swift
@@ -31,7 +31,7 @@ extension Call.StateMachine {
var ring: Bool
var notify: Bool
var source: JoinSource
- var deliverySubject: PassthroughSubject
+ var deliverySubject: CurrentValueSubject
var currentNumberOfRetries = 0
var retryPolicy: RetryPolicy = .fastAndSimple
diff --git a/Sources/StreamVideo/Controllers/CallController.swift b/Sources/StreamVideo/Controllers/CallController.swift
index e95c0f74f..bc5a05bb1 100644
--- a/Sources/StreamVideo/Controllers/CallController.swift
+++ b/Sources/StreamVideo/Controllers/CallController.swift
@@ -152,8 +152,18 @@ class CallController: @unchecked Sendable {
/// Changes the audio state for the current user.
/// - Parameter isEnabled: whether audio should be enabled.
- func changeAudioState(isEnabled: Bool) async throws {
- await webRTCCoordinator.changeAudioState(isEnabled: isEnabled)
+ func changeAudioState(
+ isEnabled: Bool,
+ file: StaticString = #file,
+ function: StaticString = #function,
+ line: UInt = #line
+ ) async throws {
+ await webRTCCoordinator.changeAudioState(
+ isEnabled: isEnabled,
+ file: file,
+ function: function,
+ line: line
+ )
}
/// Changes the video state for the current user.
diff --git a/Sources/StreamVideo/Errors/Errors.swift b/Sources/StreamVideo/Errors/Errors.swift
index 739c1cf11..857123b30 100644
--- a/Sources/StreamVideo/Errors/Errors.swift
+++ b/Sources/StreamVideo/Errors/Errors.swift
@@ -7,10 +7,11 @@ import Foundation
extension Stream_Video_Sfu_Models_Error: Error, ReflectiveStringConvertible {}
/// A Client error.
-public class ClientError: Error, ReflectiveStringConvertible, @unchecked Sendable {
- public struct Location: Equatable, Sendable {
+public class ClientError: Error, CustomStringConvertible, @unchecked Sendable {
+ public struct Location: Equatable, Sendable, CustomStringConvertible {
public let file: String
public let line: Int
+ public var description: String { "{ file:\(file), line:\(line) }" }
}
/// The file and line number which emitted the error.
@@ -33,7 +34,26 @@ public class ClientError: Error, ReflectiveStringConvertible, @unchecked Sendabl
/// Retrieve the localized description for this error.
public var localizedDescription: String { message ?? errorDescription ?? "" }
-
+
+ public var description: String {
+ var result = "ClientError {"
+ result += " location:\(location)"
+ if let message {
+ result += " message:\(message)"
+ }
+ if let apiError {
+ result += ", apiError:\(apiError)"
+ }
+ if let underlyingError {
+ result += ", underlyingError:\(underlyingError)"
+ }
+ if let errorDescription {
+ result += ", errorDescription:\(errorDescription)"
+ }
+ result += " }"
+ return result
+ }
+
/// A client error based on an external general error.
/// - Parameters:
/// - error: an external error.
diff --git a/Sources/StreamVideo/Generated/SystemEnvironment+Version.swift b/Sources/StreamVideo/Generated/SystemEnvironment+Version.swift
index ea1845004..f04c70441 100644
--- a/Sources/StreamVideo/Generated/SystemEnvironment+Version.swift
+++ b/Sources/StreamVideo/Generated/SystemEnvironment+Version.swift
@@ -7,7 +7,7 @@ import Foundation
extension SystemEnvironment {
/// A Stream Video version.
- public static let version: String = "1.36.0"
+ public static let version: String = "1.37.0"
/// The WebRTC version.
- public static let webRTCVersion: String = "137.0.43"
+ public static let webRTCVersion: String = "137.0.52"
}
diff --git a/Sources/StreamVideo/Info.plist b/Sources/StreamVideo/Info.plist
index 5c985b4ce..12e96635c 100644
--- a/Sources/StreamVideo/Info.plist
+++ b/Sources/StreamVideo/Info.plist
@@ -15,7 +15,7 @@
CFBundlePackageType
$(PRODUCT_BUNDLE_PACKAGE_TYPE)
CFBundleShortVersionString
- 1.36.0
+ 1.37.0
CFBundleVersion
$(CURRENT_PROJECT_VERSION)
NSHumanReadableCopyright
diff --git a/Sources/StreamVideo/Models/CallSettings.swift b/Sources/StreamVideo/Models/CallSettings.swift
index c67a63609..0dd02b309 100644
--- a/Sources/StreamVideo/Models/CallSettings.swift
+++ b/Sources/StreamVideo/Models/CallSettings.swift
@@ -7,6 +7,8 @@ import Foundation
/// Represents the settings for a call.
public final class CallSettings: ObservableObject, Sendable, Equatable, CustomStringConvertible {
+ public static let `default` = CallSettings()
+
/// Whether the audio is on for the current user.
public let audioOn: Bool
/// Whether the video is on for the current user.
diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift
new file mode 100644
index 000000000..5b723122d
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioDeviceModule.swift
@@ -0,0 +1,564 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AudioToolbox
+import AVFAudio
+import AVFoundation
+import Combine
+import Foundation
+import StreamWebRTC
+
+/// Bridges `RTCAudioDeviceModule` callbacks to Combine-based state so the
+/// audio pipeline can stay in sync with application logic.
+final class AudioDeviceModule: NSObject, RTCAudioDeviceModuleDelegate, Encodable, @unchecked Sendable {
+
+ /// Helper constants used across the module.
+ enum Constant {
+ /// WebRTC interfaces return integer result codes. We use this typed/named
+ /// constant to define the success of an operation.
+ static let successResult = 0
+
+ /// Audio pipeline floor in dB that we interpret as silence.
+ static let silenceDB: Float = -160
+ }
+
+ /// Events emitted as the underlying audio engine changes state.
+ enum Event: Equatable, CustomStringConvertible {
+ /// Outbound audio surpassed the silence threshold.
+ case speechActivityStarted
+ /// Outbound audio dropped back to silence.
+ case speechActivityEnded
+ /// A new `AVAudioEngine` instance has been created.
+ case didCreateAudioEngine(AVAudioEngine)
+ /// The engine is about to enable playout/recording paths.
+ case willEnableAudioEngine(AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
+ /// The engine is about to start rendering.
+ case willStartAudioEngine(AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
+ /// The engine has fully stopped.
+ case didStopAudioEngine(AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
+ /// The engine was disabled after stopping.
+ case didDisableAudioEngine(AVAudioEngine, isPlayoutEnabled: Bool, isRecordingEnabled: Bool)
+ /// The engine will be torn down.
+ case willReleaseAudioEngine(AVAudioEngine)
+ /// The input graph is configured with a new source node.
+ case configureInputFromSource(AVAudioEngine, source: AVAudioNode?, destination: AVAudioNode, format: AVAudioFormat)
+ /// The output graph is configured with a destination node.
+ case configureOutputFromSource(AVAudioEngine, source: AVAudioNode, destination: AVAudioNode?, format: AVAudioFormat)
+ /// Voice processing knobs changed.
+ case didUpdateAudioProcessingState(
+ voiceProcessingEnabled: Bool,
+ voiceProcessingBypassed: Bool,
+ voiceProcessingAGCEnabled: Bool,
+ stereoPlayoutEnabled: Bool
+ )
+
+ var description: String {
+ switch self {
+ case .speechActivityStarted:
+ return ".speechActivityStarted"
+
+ case .speechActivityEnded:
+ return ".speechActivityEnded"
+
+ case .didCreateAudioEngine(let engine):
+ return ".didCreateAudioEngine(\(engine))"
+
+ case .willEnableAudioEngine(let engine, let isPlayoutEnabled, let isRecordingEnabled):
+ return ".willEnableAudioEngine(\(engine), isPlayoutEnabled:\(isPlayoutEnabled), isRecordingEnabled:\(isRecordingEnabled))"
+
+ case .willStartAudioEngine(let engine, let isPlayoutEnabled, let isRecordingEnabled):
+ return ".willStartAudioEngine(\(engine), isPlayoutEnabled:\(isPlayoutEnabled), isRecordingEnabled:\(isRecordingEnabled))"
+
+ case .didStopAudioEngine(let engine, let isPlayoutEnabled, let isRecordingEnabled):
+ return ".didStopAudioEngine(\(engine), isPlayoutEnabled:\(isPlayoutEnabled), isRecordingEnabled:\(isRecordingEnabled))"
+
+ case .didDisableAudioEngine(let engine, let isPlayoutEnabled, let isRecordingEnabled):
+ return ".didDisableAudioEngine(\(engine), isPlayoutEnabled:\(isPlayoutEnabled), isRecordingEnabled:\(isRecordingEnabled))"
+
+ case .willReleaseAudioEngine(let engine):
+ return ".willReleaseAudioEngine(\(engine))"
+
+ case .configureInputFromSource(let engine, let source, let destination, let format):
+ return ".configureInputFromSource(\(engine), source:\(source), destination:\(destination), format:\(format))"
+
+ case .configureOutputFromSource(let engine, let source, let destination, let format):
+ return ".configureOutputFromSource(\(engine), source:\(source), destination:\(destination), format:\(format))"
+
+ case let .didUpdateAudioProcessingState(
+ voiceProcessingEnabled,
+ voiceProcessingBypassed,
+ voiceProcessingAGCEnabled,
+ stereoPlayoutEnabled
+ ):
+ return ".didUpdateAudioProcessingState(voiceProcessingEnabled:\(voiceProcessingEnabled), voiceProcessingBypassed:\(voiceProcessingBypassed), voiceProcessingAGCEnabled:\(voiceProcessingAGCEnabled), stereoPlayoutEnabled:\(stereoPlayoutEnabled))"
+ }
+ }
+ }
+
+ /// Tracks whether WebRTC is currently playing back audio.
+ private let isPlayingSubject: CurrentValueSubject
+ /// `true` while audio playout is active.
+ var isPlaying: Bool { isPlayingSubject.value }
+ /// Publisher that reflects playout activity changes.
+ var isPlayingPublisher: AnyPublisher { isPlayingSubject.eraseToAnyPublisher() }
+
+ /// Tracks whether WebRTC is capturing microphone samples.
+ private let isRecordingSubject: CurrentValueSubject
+ /// `true` while audio capture is active.
+ var isRecording: Bool { isRecordingSubject.value }
+ /// Publisher that reflects recording activity changes.
+ var isRecordingPublisher: AnyPublisher { isRecordingSubject.eraseToAnyPublisher() }
+
+ /// Tracks whether the microphone is muted at the ADM layer.
+ private let isMicrophoneMutedSubject: CurrentValueSubject
+ /// `true` if the microphone is muted.
+ var isMicrophoneMuted: Bool { isMicrophoneMutedSubject.value }
+ /// Publisher that reflects microphone mute changes.
+ var isMicrophoneMutedPublisher: AnyPublisher { isMicrophoneMutedSubject.eraseToAnyPublisher() }
+
+ /// Tracks whether stereo playout is configured.
+ private let isStereoPlayoutEnabledSubject: CurrentValueSubject
+ /// `true` if stereo playout is available and active.
+ var isStereoPlayoutEnabled: Bool { isStereoPlayoutEnabledSubject.value }
+ /// Publisher emitting stereo playout state.
+ var isStereoPlayoutEnabledPublisher: AnyPublisher { isStereoPlayoutEnabledSubject.eraseToAnyPublisher() }
+
+ /// Tracks whether VP processing is currently bypassed.
+ private let isVoiceProcessingBypassedSubject: CurrentValueSubject
+ /// `true` if the voice processing unit is bypassed.
+ var isVoiceProcessingBypassed: Bool { isVoiceProcessingBypassedSubject.value }
+ /// Publisher emitting VP bypass changes.
+ var isVoiceProcessingBypassedPublisher: AnyPublisher { isVoiceProcessingBypassedSubject.eraseToAnyPublisher() }
+
+ /// Tracks whether voice processing is enabled.
+ private let isVoiceProcessingEnabledSubject: CurrentValueSubject
+ /// `true` when Apple VP is active.
+ var isVoiceProcessingEnabled: Bool { isVoiceProcessingEnabledSubject.value }
+ /// Publisher emitting VP enablement changes.
+ var isVoiceProcessingEnabledPublisher: AnyPublisher { isVoiceProcessingEnabledSubject.eraseToAnyPublisher() }
+
+ /// Tracks whether automatic gain control is enabled inside VP.
+ private let isVoiceProcessingAGCEnabledSubject: CurrentValueSubject
+ /// `true` while AGC is active.
+ var isVoiceProcessingAGCEnabled: Bool { isVoiceProcessingAGCEnabledSubject.value }
+ /// Publisher emitting AGC changes.
+ var isVoiceProcessingAGCEnabledPublisher: AnyPublisher { isVoiceProcessingAGCEnabledSubject.eraseToAnyPublisher() }
+
+ /// Observes RMS audio levels (in dB) derived from the input tap.
+ private let audioLevelSubject = CurrentValueSubject(Constant.silenceDB) // default to silence
+ /// Latest measured audio level.
+ var audioLevel: Float { audioLevelSubject.value }
+ /// Publisher emitting audio level updates.
+ var audioLevelPublisher: AnyPublisher { audioLevelSubject.eraseToAnyPublisher() }
+
+ /// Wrapper around WebRTC `RTCAudioDeviceModule`.
+ private let source: any RTCAudioDeviceModuleControlling
+ /// Manages Combine subscriptions generated by this module.
+ private let disposableBag: DisposableBag = .init()
+
+ /// Serial queue used to deliver events to observers.
+ private let dispatchQueue: DispatchQueue
+ /// Internal relay that feeds `publisher`.
+ private let subject: PassthroughSubject
+ /// Object that taps engine nodes and publishes audio level data.
+ private var audioLevelsAdapter: AudioEngineNodeAdapting
+ /// Public stream of `Event` values describing engine transitions.
+ let publisher: AnyPublisher
+
+ /// Strong reference to the current engine so we can introspect it if needed.
+ private var engine: AVAudioEngine?
+
+ /// Textual diagnostics for logging and debugging.
+ override var description: String {
+ "{ " +
+ "isPlaying:\(isPlaying)" +
+ ", isRecording:\(isRecording)" +
+ ", isMicrophoneMuted:\(isMicrophoneMuted)" +
+ ", isStereoPlayoutEnabled:\(isStereoPlayoutEnabled)" +
+ ", isVoiceProcessingBypassed:\(isVoiceProcessingBypassed)" +
+ ", isVoiceProcessingEnabled:\(isVoiceProcessingEnabled)" +
+ ", isVoiceProcessingAGCEnabled:\(isVoiceProcessingAGCEnabled)" +
+ ", audioLevel:\(audioLevel)" +
+ ", source:\(source)" +
+ " }"
+ }
+
+ /// Creates a module that mirrors the provided WebRTC audio device module.
+ /// - Parameter source: The audio device module implementation to observe.
+ init(
+ _ source: any RTCAudioDeviceModuleControlling,
+ audioLevelsNodeAdapter: AudioEngineNodeAdapting = AudioEngineLevelNodeAdapter()
+ ) {
+ self.source = source
+ self.isPlayingSubject = .init(source.isPlaying)
+ self.isRecordingSubject = .init(source.isRecording)
+ self.isMicrophoneMutedSubject = .init(source.isMicrophoneMuted)
+ self.isStereoPlayoutEnabledSubject = .init(source.isStereoPlayoutEnabled)
+ self.isVoiceProcessingBypassedSubject = .init(source.isVoiceProcessingBypassed)
+ self.isVoiceProcessingEnabledSubject = .init(source.isVoiceProcessingEnabled)
+ self.isVoiceProcessingAGCEnabledSubject = .init(source.isVoiceProcessingAGCEnabled)
+ self.audioLevelsAdapter = audioLevelsNodeAdapter
+
+ let dispatchQueue = DispatchQueue(label: "io.getstream.audiodevicemodule", qos: .userInteractive)
+ let subject = PassthroughSubject()
+ self.subject = subject
+ self.dispatchQueue = dispatchQueue
+ self.publisher = subject
+ .receive(on: dispatchQueue)
+ .eraseToAnyPublisher()
+ super.init()
+
+ subject
+ .log(.debug, subsystems: .audioSession) { "\($0)" }
+ .sink { _ in }
+ .store(in: disposableBag)
+
+ audioLevelsAdapter.subject = audioLevelSubject
+ source.observer = self
+
+ source.isVoiceProcessingBypassed = true
+ }
+
+ // MARK: - Recording
+
+ /// Reinitializes the ADM, clearing its internal audio graph state.
+ func reset() {
+ _ = source.reset()
+ }
+
+ /// Switches between stereo and mono playout while keeping the recording
+ /// state consistent across reinitializations.
+ /// - Parameter isPreferred: `true` when stereo output should be used.
+ func setStereoPlayoutPreference(_ isPreferred: Bool) {
+ /// - Important: `.voiceProcessing` requires VP to be enabled in order to mute and
+ /// `.restartEngine` rebuilds the whole graph. Each of them has different issues:
+ /// - `.voiceProcessing`: as it requires VP to be enabled in order to mute/unmute that
+ /// means that for outputs where VP is disabled (e.g. stereo) we cannot mute/unmute.
+ /// - `.restartEngine`: rebuilds the whole graph and requires explicit calling of
+ /// `initAndStartRecording` .
+ _ = source.setMuteMode(isPreferred ? .inputMixer : .voiceProcessing)
+ /// - Important: We can probably set this one to false when the user doesn't have
+ /// sendAudio capability.
+ _ = source.setRecordingAlwaysPreparedMode(false)
+ source.prefersStereoPlayout = isPreferred
+ }
+
+ /// Starts or stops speaker playout on the ADM, retrying transient failures.
+ /// - Parameter isActive: `true` to start playout, `false` to stop.
+ /// - Throws: `ClientError` when WebRTC returns a non-zero status.
+ func setPlayout(_ isActive: Bool) throws {
+ guard isActive != isPlaying else {
+ return
+ }
+ if isActive {
+ if source.isPlayoutInitialized {
+ try throwingExecution("Unable to start playout") {
+ source.startPlayout()
+ }
+ } else {
+ try throwingExecution("Unable to initAndStart playout") {
+ source.initAndStartPlayout()
+ }
+ }
+ } else {
+ try throwingExecution("Unable to stop playout") {
+ source.stopPlayout()
+ }
+ }
+ }
+
+ /// Enables or disables recording on the wrapped audio device module.
+ /// - Parameter isEnabled: When `true` recording starts, otherwise stops.
+ /// - Throws: `ClientError` when the underlying module reports a failure.
+ func setRecording(_ isEnabled: Bool) throws {
+ guard isEnabled != isRecording else {
+ return
+ }
+ if isEnabled {
+ if source.isRecordingInitialized {
+ try throwingExecution("Unable to start recording") {
+ source.startRecording()
+ }
+ } else {
+ try throwingExecution("Unable to initAndStart recording") {
+ source.initAndStartRecording()
+ }
+ }
+ } else {
+ try throwingExecution("Unable to stop recording") {
+ source.stopRecording()
+ }
+ }
+
+ isRecordingSubject.send(isEnabled)
+ }
+
+ /// Updates the muted state of the microphone for the wrapped module.
+ /// - Parameter isMuted: `true` to mute the microphone, `false` to unmute.
+ /// - Throws: `ClientError` when the underlying module reports a failure.
+ func setMuted(_ isMuted: Bool) throws {
+ guard isMuted != source.isMicrophoneMuted else {
+ return
+ }
+
+ if !isMuted, !isRecording {
+ try setRecording(true)
+ }
+
+ try throwingExecution("Unable to setMicrophoneMuted:\(isMuted)") {
+ source.setMicrophoneMuted(isMuted)
+ }
+
+ isMicrophoneMutedSubject.send(isMuted)
+ }
+
+ /// Forces the ADM to recompute whether stereo output is supported.
+ func refreshStereoPlayoutState() {
+ source.refreshStereoPlayoutState()
+ }
+
+ // MARK: - RTCAudioDeviceModuleDelegate
+
+ /// Receives speech activity notifications emitted by WebRTC VAD.
+ func audioDeviceModule(
+ _ audioDeviceModule: RTCAudioDeviceModule,
+ didReceiveSpeechActivityEvent speechActivityEvent: RTCSpeechActivityEvent
+ ) {
+ switch speechActivityEvent {
+ case .started:
+ subject.send(.speechActivityStarted)
+ case .ended:
+ subject.send(.speechActivityEnded)
+ @unknown default:
+ break
+ }
+ }
+
+ /// Stores the created engine reference and emits an event so observers can
+ /// hook into the audio graph configuration.
+ func audioDeviceModule(
+ _ audioDeviceModule: RTCAudioDeviceModule,
+ didCreateEngine engine: AVAudioEngine
+ ) -> Int {
+ self.engine = engine
+ subject.send(.didCreateAudioEngine(engine))
+ return Constant.successResult
+ }
+
+ /// Keeps local playback/recording state in sync as WebRTC enables the
+ /// corresponding engine paths.
+ func audioDeviceModule(
+ _ audioDeviceModule: RTCAudioDeviceModule,
+ willEnableEngine engine: AVAudioEngine,
+ isPlayoutEnabled: Bool,
+ isRecordingEnabled: Bool
+ ) -> Int {
+ subject.send(
+ .willEnableAudioEngine(
+ engine,
+ isPlayoutEnabled: isPlayoutEnabled,
+ isRecordingEnabled: isRecordingEnabled
+ )
+ )
+ isPlayingSubject.send(isPlayoutEnabled)
+ isRecordingSubject.send(isRecordingEnabled)
+ return Constant.successResult
+ }
+
+ /// Mirrors state when the engine is about to start running and delivering
+ /// audio samples.
+ func audioDeviceModule(
+ _ audioDeviceModule: RTCAudioDeviceModule,
+ willStartEngine engine: AVAudioEngine,
+ isPlayoutEnabled: Bool,
+ isRecordingEnabled: Bool
+ ) -> Int {
+ subject.send(
+ .willStartAudioEngine(
+ engine,
+ isPlayoutEnabled: isPlayoutEnabled,
+ isRecordingEnabled: isRecordingEnabled
+ )
+ )
+ isPlayingSubject.send(isPlayoutEnabled)
+ isRecordingSubject.send(isRecordingEnabled)
+
+ return Constant.successResult
+ }
+
+ /// Updates state and notifies observers once the engine has completely
+ /// stopped.
+ func audioDeviceModule(
+ _ audioDeviceModule: RTCAudioDeviceModule,
+ didStopEngine engine: AVAudioEngine,
+ isPlayoutEnabled: Bool,
+ isRecordingEnabled: Bool
+ ) -> Int {
+ subject.send(
+ .didStopAudioEngine(
+ engine,
+ isPlayoutEnabled: isPlayoutEnabled,
+ isRecordingEnabled: isRecordingEnabled
+ )
+ )
+ isPlayingSubject.send(isPlayoutEnabled)
+ isRecordingSubject.send(isRecordingEnabled)
+ return Constant.successResult
+ }
+
+ /// Tracks when the engine has been disabled after stopping so clients can
+ /// react (e.g., rebuilding audio graphs).
+ func audioDeviceModule(
+ _ audioDeviceModule: RTCAudioDeviceModule,
+ didDisableEngine engine: AVAudioEngine,
+ isPlayoutEnabled: Bool,
+ isRecordingEnabled: Bool
+ ) -> Int {
+ subject.send(
+ .didDisableAudioEngine(
+ engine,
+ isPlayoutEnabled: isPlayoutEnabled,
+ isRecordingEnabled: isRecordingEnabled
+ )
+ )
+ isPlayingSubject.send(isPlayoutEnabled)
+ isRecordingSubject.send(isRecordingEnabled)
+ return Constant.successResult
+ }
+
+ /// Clears internal references before WebRTC disposes the engine.
+ func audioDeviceModule(
+ _ audioDeviceModule: RTCAudioDeviceModule,
+ willReleaseEngine engine: AVAudioEngine
+ ) -> Int {
+ self.engine = nil
+ subject.send(.willReleaseAudioEngine(engine))
+ audioLevelsAdapter.uninstall(on: 0)
+ return Constant.successResult
+ }
+
+ /// Keeps observers informed when WebRTC sets up the input graph and installs
+ /// an audio level tap to monitor microphone activity.
+ func audioDeviceModule(
+ _ audioDeviceModule: RTCAudioDeviceModule,
+ engine: AVAudioEngine,
+ configureInputFromSource source: AVAudioNode?,
+ toDestination destination: AVAudioNode,
+ format: AVAudioFormat,
+ context: [AnyHashable: Any]
+ ) -> Int {
+ subject.send(
+ .configureInputFromSource(
+ engine,
+ source: source,
+ destination: destination,
+ format: format
+ )
+ )
+ audioLevelsAdapter.installInputTap(
+ on: destination,
+ format: format,
+ bus: 0,
+ bufferSize: 1024
+ )
+ return Constant.successResult
+ }
+
+ /// Emits an event whenever WebRTC reconfigures the output graph.
+ func audioDeviceModule(
+ _ audioDeviceModule: RTCAudioDeviceModule,
+ engine: AVAudioEngine,
+ configureOutputFromSource source: AVAudioNode,
+ toDestination destination: AVAudioNode?,
+ format: AVAudioFormat,
+ context: [AnyHashable: Any]
+ ) -> Int {
+ subject.send(
+ .configureOutputFromSource(
+ engine,
+ source: source,
+ destination: destination,
+ format: format
+ )
+ )
+ return Constant.successResult
+ }
+
+ /// Currently unused: CallKit/RoutePicker own the device selection UX.
+ func audioDeviceModuleDidUpdateDevices(
+ _ audioDeviceModule: RTCAudioDeviceModule
+ ) {
+ // No-op
+ }
+
+ /// Mirrors state changes coming from CallKit/WebRTC voice-processing
+ /// controls so UI can reflect the correct toggles.
+ func audioDeviceModule(
+ _ module: RTCAudioDeviceModule,
+ didUpdateAudioProcessingState state: RTCAudioProcessingState
+ ) {
+ subject.send(
+ .didUpdateAudioProcessingState(
+ voiceProcessingEnabled: state.voiceProcessingEnabled,
+ voiceProcessingBypassed: state.voiceProcessingBypassed,
+ voiceProcessingAGCEnabled: state.voiceProcessingAGCEnabled,
+ stereoPlayoutEnabled: state.stereoPlayoutEnabled
+ )
+ )
+ isVoiceProcessingEnabledSubject.send(state.voiceProcessingEnabled)
+ isVoiceProcessingBypassedSubject.send(state.voiceProcessingBypassed)
+ isVoiceProcessingAGCEnabledSubject.send(state.voiceProcessingAGCEnabled)
+ isStereoPlayoutEnabledSubject.send(state.stereoPlayoutEnabled)
+ }
+
+ /// Mirrors the subset of properties that can be encoded for debugging.
+ private enum CodingKeys: String, CodingKey {
+ case isPlaying
+ case isRecording
+ case isMicrophoneMuted
+ case isStereoPlayoutEnabled
+ case isVoiceProcessingBypassed
+ case isVoiceProcessingEnabled
+ case isVoiceProcessingAGCEnabled
+
+ case audioLevel
+ }
+
+ /// Serializes the module state, primarily for diagnostic payloads.
+ func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ try container.encode(isPlaying, forKey: .isPlaying)
+ try container.encode(isRecording, forKey: .isRecording)
+ try container.encode(isMicrophoneMuted, forKey: .isMicrophoneMuted)
+ try container.encode(isStereoPlayoutEnabled, forKey: .isStereoPlayoutEnabled)
+ try container.encode(isVoiceProcessingBypassed, forKey: .isVoiceProcessingBypassed)
+ try container.encode(isVoiceProcessingEnabled, forKey: .isVoiceProcessingEnabled)
+ try container.encode(isVoiceProcessingAGCEnabled, forKey: .isVoiceProcessingAGCEnabled)
+ try container.encode(audioLevel, forKey: .audioLevel)
+ }
+
+ // MARK: - Private helpers
+
+ /// Runs a WebRTC ADM call and translates its integer result into a
+ /// `ClientError` enriched with call-site metadata.
+ private func throwingExecution(
+ _ message: @autoclosure () -> String,
+ file: StaticString = #file,
+ function: StaticString = #function,
+ line: UInt = #line,
+ _ operation: () -> Int
+ ) throws {
+ let result = operation()
+
+ guard result != Constant.successResult else {
+ return
+ }
+
+ throw ClientError(
+ "\(message()) (Error code:\(result))",
+ file,
+ line
+ )
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioEngineLevelNodeAdapter.swift b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioEngineLevelNodeAdapter.swift
new file mode 100644
index 000000000..15bd57b71
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/AudioEngineLevelNodeAdapter.swift
@@ -0,0 +1,122 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Accelerate
+import AVFoundation
+import Combine
+import Foundation
+
+protocol AudioEngineNodeAdapting {
+
+ var subject: CurrentValueSubject? { get set }
+
+ func installInputTap(
+ on node: AVAudioNode,
+ format: AVAudioFormat,
+ bus: Int,
+ bufferSize: UInt32
+ )
+
+ func uninstall(on bus: Int)
+}
+
+/// Observes an `AVAudioMixerNode` and publishes decibel readings for UI and
+/// analytics consumers.
+final class AudioEngineLevelNodeAdapter: AudioEngineNodeAdapting {
+
+ enum Constant {
+ // The down limit of audio pipeline in DB that is considered silence.
+ static let silenceDB: Float = -160
+ }
+
+ var subject: CurrentValueSubject?
+
+ private var inputTap: AVAudioMixerNode?
+
+ /// Installs a tap on the supplied audio node to monitor input levels.
+ /// - Parameters:
+ /// - node: The node to observe; must be an `AVAudioMixerNode`.
+ /// - format: Audio format expected by the tap.
+ /// - bus: Output bus to observe.
+ /// - bufferSize: Tap buffer size.
+ func installInputTap(
+ on node: AVAudioNode,
+ format: AVAudioFormat,
+ bus: Int = 0,
+ bufferSize: UInt32 = 1024
+ ) {
+ guard let mixer = node as? AVAudioMixerNode, inputTap == nil else { return }
+
+ mixer.installTap(
+ onBus: bus,
+ bufferSize: bufferSize,
+ format: format
+ ) { [weak self] buffer, _ in
+ self?.processInputBuffer(buffer)
+ }
+
+ inputTap = mixer
+ log.debug("Input node installed", subsystems: .audioRecording)
+ }
+
+ /// Removes the tap and resets observed audio levels.
+ /// - Parameter bus: Bus to remove the tap from, defaults to `0`.
+ func uninstall(on bus: Int = 0) {
+ if let mixer = inputTap, mixer.engine != nil {
+ mixer.removeTap(onBus: 0)
+ }
+ subject?.send(Constant.silenceDB)
+ inputTap = nil
+ log.debug("Input node uninstalled", subsystems: .audioRecording)
+ }
+
+ // MARK: - Private Helpers
+
+ /// Processes the PCM buffer produced by the tap and computes a clamped RMS
+ /// value which is forwarded to the publisher.
+ private func processInputBuffer(_ buffer: AVAudioPCMBuffer) {
+ // Safely unwrap the `subject` (used to publish updates) and the
+ // `floatChannelData` (pointer to the interleaved or non-interleaved
+ // channel samples in memory). If either is missing, exit early since
+ // processing cannot continue.
+ guard
+ let subject,
+ let channelData = buffer.floatChannelData
+ else { return }
+
+ // Obtain the total number of frames in the buffer as a vDSP-compatible
+ // length type (`vDSP_Length`). This represents how many samples exist
+ // per channel in the current audio buffer.
+ let frameCount = vDSP_Length(buffer.frameLength)
+
+ // Declare a variable to store the computed RMS (root-mean-square)
+ // amplitude value for the buffer. It will represent the signal's
+ // average power in linear scale (not decibels yet).
+ var rms: Float = 0
+
+ // Use Apple's Accelerate framework to efficiently compute the RMS
+ // (root mean square) of the float samples in the first channel.
+ // - Parameters:
+ // - channelData[0]: Pointer to the first channel’s samples.
+ // - 1: Stride between consecutive elements (every sample).
+ // - &rms: Output variable to store the computed RMS.
+ // - frameCount: Number of samples to process.
+ vDSP_rmsqv(channelData[0], 1, &rms, frameCount)
+
+ // Convert the linear RMS value to decibels using the formula
+ // 20 * log10(rms). To avoid a log of zero (which is undefined),
+ // use `max(rms, Float.ulpOfOne)` to ensure a minimal positive value.
+ let rmsDB = 20 * log10(max(rms, Float.ulpOfOne))
+
+ // Clamp the computed decibel value to a reasonable audio level range
+ // between -160 dB (silence) and 0 dB (maximum). This prevents extreme
+ // or invalid values that may occur due to noise or computation errors.
+ let clampedRMS = max(-160.0, min(0.0, Float(rmsDB)))
+
+ // Publish the clamped decibel value to the CurrentValueSubject so that
+ // subscribers (e.g., UI level meters or analytics systems) receive the
+ // updated level reading.
+ subject.send(clampedRMS)
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/RTCAudioDeviceModuleControlling.swift b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/RTCAudioDeviceModuleControlling.swift
new file mode 100644
index 000000000..e64d82028
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/AudioDeviceModule/RTCAudioDeviceModuleControlling.swift
@@ -0,0 +1,47 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Combine
+import StreamWebRTC
+
+/// Abstraction over `RTCAudioDeviceModule` so tests can provide fakes while
+/// production code continues to rely on the WebRTC-backed implementation.
+protocol RTCAudioDeviceModuleControlling: AnyObject {
+ var observer: RTCAudioDeviceModuleDelegate? { get set }
+ var isPlaying: Bool { get }
+ var isRecording: Bool { get }
+ var isPlayoutInitialized: Bool { get }
+ var isRecordingInitialized: Bool { get }
+ var isMicrophoneMuted: Bool { get }
+ var isStereoPlayoutEnabled: Bool { get }
+ var isVoiceProcessingBypassed: Bool { get set }
+ var isVoiceProcessingEnabled: Bool { get }
+ var isVoiceProcessingAGCEnabled: Bool { get }
+ var prefersStereoPlayout: Bool { get set }
+
+ func reset() -> Int
+ func initAndStartPlayout() -> Int
+ func startPlayout() -> Int
+ func stopPlayout() -> Int
+ func initAndStartRecording() -> Int
+ func setMicrophoneMuted(_ isMuted: Bool) -> Int
+ func startRecording() -> Int
+ func stopRecording() -> Int
+ func refreshStereoPlayoutState()
+ func setMuteMode(_ mode: RTCAudioEngineMuteMode) -> Int
+ func setRecordingAlwaysPreparedMode(_ alwaysPreparedRecording: Bool) -> Int
+}
+
+extension RTCAudioDeviceModule: RTCAudioDeviceModuleControlling {
+ /// Convenience wrapper that mirrors the old `initPlayout` and
+ /// `startPlayout` sequence so the caller can request playout in one call.
+ func initAndStartPlayout() -> Int {
+ let result = initPlayout()
+ if result == 0 {
+ return startPlayout()
+ } else {
+ return result
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+AVAudioRecorderMiddleware.swift b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+AVAudioRecorderMiddleware.swift
index 8fee69d2c..1c0552968 100644
--- a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+AVAudioRecorderMiddleware.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+AVAudioRecorderMiddleware.swift
@@ -22,20 +22,46 @@ extension StreamCallAudioRecorder.Namespace {
/// ensure thread safety when accessing the recorder instance.
final class AVAudioRecorderMiddleware: Middleware, @unchecked Sendable {
+ /// Tracks which metering backend is active so we can flip between
+ /// `AVAudioRecorder` and the audio device module seamlessly.
+ enum Mode: Equatable {
+ case invalid
+ case audioRecorder(AVAudioRecorder)
+ case audioDeviceModule(AudioDeviceModule)
+ }
+
/// The audio store for managing permissions and session state.
@Injected(\.permissions) private var permissions
+ @Injected(\.audioStore) private var audioStore
- /// Builder for creating and caching the audio recorder instance.
- private var audioRecorder: AVAudioRecorder?
+ private var mode: Mode
/// Serial queue for recorder operations to ensure thread safety.
private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
/// Subscription for publishing meter updates at refresh rate.
private var updateMetersCancellable: AnyCancellable?
+ /// Listens for ADM availability and pivots the metering source on the
+ /// fly when stereo playout is enabled.
+ private var audioDeviceModuleCancellable: AnyCancellable?
init(audioRecorder: AVAudioRecorder? = nil) {
- self.audioRecorder = audioRecorder
+ if let audioRecorder {
+ mode = .audioRecorder(audioRecorder)
+ } else if let audioRecorder = try? AVAudioRecorder.build() {
+ mode = .audioRecorder(audioRecorder)
+ } else {
+ mode = .invalid
+ }
+
+ let initialMode = self.mode
+
+ super.init()
+
+ audioDeviceModuleCancellable = audioStore
+ .publisher(\.audioDeviceModule)
+ .receive(on: processingQueue)
+ .sink { [weak self] in self?.didUpdate($0, initialMode: initialMode) }
}
// MARK: - Middleware
@@ -107,79 +133,124 @@ extension StreamCallAudioRecorder.Namespace {
return
}
- if audioRecorder == nil {
- do {
- self.audioRecorder = try AVAudioRecorder.build()
- } catch {
- log.error(error, subsystems: .audioRecording)
- return
- }
+ guard mode != .invalid else {
+ log.warning(
+ "Unable to start meters observation as mode set to .none",
+ subsystems: .audioRecording
+ )
+ return
}
- guard let audioRecorder else {
+ let mode = self.mode
+ stopObservation(for: mode)
+
+ guard await checkRequiredPermissions() else {
+ dispatcher?.dispatch(.setIsRecording(false))
return
}
- if updateMetersCancellable != nil {
- // In order for AVAudioRecorder to keep receive metering updates
- // we need to stop and start everytime there is a change in the
- // AVAudioSession configuration.
- audioRecorder.stop()
- audioRecorder.isMeteringEnabled = false
- }
+ startObservation(for: mode)
+ }
+ }
+
+ /// Stops audio recording and cleans up resources.
+ ///
+ /// This method:
+ /// 1. Stops the active recording
+ /// 2. Disables metering
+ /// 3. Cancels the meter update timer
+ private func stopRecording() {
+ processingQueue.addOperation { [weak self] in
+ guard let self else { return }
+ stopObservation(for: mode)
+ }
+ }
+
+ private func checkRequiredPermissions() async -> Bool {
+ do {
+ return try await permissions.requestMicrophonePermission()
+ } catch {
+ log.error(error, subsystems: .audioRecording)
+ return false
+ }
+ }
- updateMetersCancellable?.cancel()
- updateMetersCancellable = nil
+ private func stopObservation(for mode: Mode) {
+ guard updateMetersCancellable != nil else {
+ return
+ }
- do {
- let hasPermission = try await permissions.requestMicrophonePermission()
- audioRecorder.isMeteringEnabled = true
+ updateMetersCancellable?.cancel()
+ updateMetersCancellable = nil
- guard
- hasPermission,
- audioRecorder.record()
- else {
- dispatcher?.dispatch(.setIsRecording(false))
- audioRecorder.isMeteringEnabled = false
- return
- }
+ switch mode {
+ case .invalid:
+ break
+ case .audioRecorder(let audioRecorder):
+ // In order for AVAudioRecorder to keep receive metering updates
+ // we need to stop and start everytime there is a change in the
+ // AVAudioSession configuration.
+ audioRecorder.stop()
+ audioRecorder.isMeteringEnabled = false
+ log.debug("AVAudioRecorder stopped.", subsystems: .audioRecording)
+ case .audioDeviceModule:
+ log.debug("AVAudioDeviceModule audioLevel observation stopped.", subsystems: .audioRecording)
+ }
+ }
+
+ private func startObservation(for mode: Mode) {
+ guard updateMetersCancellable == nil else {
+ return
+ }
+
+ switch mode {
+ case .invalid:
+ break
+
+ case .audioRecorder(let audioRecorder):
+ let isRecording = audioRecorder.record()
+ if isRecording {
+ audioRecorder.isMeteringEnabled = true
updateMetersCancellable = DefaultTimer
.publish(every: ScreenPropertiesAdapter.currentValue.refreshRate)
.map { [weak audioRecorder] _ in audioRecorder?.updateMeters() }
.compactMap { [weak audioRecorder] in audioRecorder?.averagePower(forChannel: 0) }
.sink { [weak self] in self?.dispatcher?.dispatch(.setMeter($0)) }
-
log.debug("AVAudioRecorder started...", subsystems: .audioRecording)
- } catch {
- log.error(error, subsystems: .audioRecording)
+ } else {
+ audioRecorder.isMeteringEnabled = false
+ dispatcher?.dispatch(.setIsRecording(false))
}
+
+ case .audioDeviceModule(let audioDeviceModule):
+ updateMetersCancellable = audioDeviceModule
+ .audioLevelPublisher
+ .log(.debug, subsystems: .audioRecording) { "AVAudioDeviceModule audioLevel observation value:\($0)." }
+ .sink { [weak self] in self?.dispatcher?.dispatch(.setMeter($0)) }
+ log.debug("AVAudioDeviceModule audioLevel observation started...", subsystems: .audioRecording)
}
}
- /// Stops audio recording and cleans up resources.
- ///
- /// This method:
- /// 1. Stops the active recording
- /// 2. Disables metering
- /// 3. Cancels the meter update timer
- private func stopRecording() {
- processingQueue.addOperation { [weak self] in
- guard
- let self,
- updateMetersCancellable != nil,
- let audioRecorder
- else {
- self?.updateMetersCancellable?.cancel()
- self?.updateMetersCancellable = nil
- return
+ private func didUpdate(
+ _ audioDeviceModule: AudioDeviceModule?,
+ initialMode: Mode
+ ) {
+ stopRecording()
+
+ let newMode: Mode = {
+ if let audioDeviceModule {
+ return .audioDeviceModule(audioDeviceModule)
+ } else {
+ return initialMode
}
+ }()
- audioRecorder.stop()
- audioRecorder.isMeteringEnabled = false
- updateMetersCancellable?.cancel()
- updateMetersCancellable = nil
- log.debug("AVAudioRecorder stopped.", subsystems: .audioRecording)
+ processingQueue.addTaskOperation { [weak self] in
+ self?.mode = newMode
+ if self?.state?.shouldRecord == true, self?.state?.isRecording == true {
+ self?.startRecording()
+ }
}
}
}
diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+CategoryMiddleware.swift b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+CategoryMiddleware.swift
index 8b05e3497..1f04e3ba7 100644
--- a/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+CategoryMiddleware.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/AudioRecorder/Namespace/Middleware/StreamCallAudioRecorder+CategoryMiddleware.swift
@@ -33,7 +33,9 @@ extension StreamCallAudioRecorder.Namespace {
// Monitor for category changes that are incompatible with recording
cancellable = audioStore
- .publisher(\.category)
+ // Observe the derived configuration so system-driven category
+ // changes also stop the local recorder.
+ .publisher(\.audioSessionConfiguration.category)
.filter { $0 != .playAndRecord && $0 != .record }
.sink { [weak self] _ in
// Stop recording when category becomes incompatible
diff --git a/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift b/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift
index 9f3e4d06a..dfb279022 100644
--- a/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/AudioSessionConfiguration.swift
@@ -5,7 +5,7 @@
import AVFoundation
/// Represents the audio session configuration.
-public struct AudioSessionConfiguration: ReflectiveStringConvertible, Equatable, Sendable {
+public struct AudioSessionConfiguration: CustomStringConvertible, Equatable, Sendable {
var isActive: Bool
/// The audio session category.
var category: AVAudioSession.Category
@@ -16,6 +16,17 @@ public struct AudioSessionConfiguration: ReflectiveStringConvertible, Equatable,
/// The audio session port override.
var overrideOutputAudioPort: AVAudioSession.PortOverride?
+ public var description: String {
+ var result = "{ "
+ result += "isActive:\(isActive)"
+ result += ", category:\(category)"
+ result += ", mode:\(mode)"
+ result += ", options:\(options)"
+ result += ", overrideOutputAudioPort:\(overrideOutputAudioPort)"
+ result += " }"
+ return result
+ }
+
/// Compares two `AudioSessionConfiguration` instances for equality.
public static func == (lhs: Self, rhs: Self) -> Bool {
lhs.isActive == rhs.isActive &&
diff --git a/Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift
index f613ea5bc..5bf9db5c9 100644
--- a/Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/CallAudioSession.swift
@@ -12,7 +12,34 @@ final class CallAudioSession: @unchecked Sendable {
@Injected(\.audioStore) private var audioStore
- var currentRoute: AVAudioSessionRouteDescription { audioStore.session.currentRoute }
+ /// Bundles the reactive inputs we need to evaluate whenever call
+ /// capabilities or settings change, keeping log context attached.
+ private struct Input {
+ var callSettings: CallSettings
+ var ownCapabilities: Set
+ var currentRoute: RTCAudioStore.StoreState.AudioRoute?
+ var file: StaticString
+ var function: StaticString
+ var line: UInt
+
+ init(
+ callSettings: CallSettings,
+ ownCapabilities: Set,
+ currentRoute: RTCAudioStore.StoreState.AudioRoute? = nil,
+ file: StaticString = #file,
+ function: StaticString = #function,
+ line: UInt = #line
+ ) {
+ self.callSettings = callSettings
+ self.ownCapabilities = ownCapabilities
+ self.currentRoute = currentRoute
+ self.file = file
+ self.function = function
+ self.line = line
+ }
+ }
+
+ var currentRouteIsExternal: Bool { audioStore.state.currentRoute.isExternal }
private(set) weak var delegate: StreamAudioSessionAdapterDelegate?
private(set) var statsAdapter: WebRTCStatsAdapting?
@@ -23,16 +50,31 @@ final class CallAudioSession: @unchecked Sendable {
@Atomic private(set) var policy: AudioSessionPolicy
private let disposableBag = DisposableBag()
+ private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
- private var interruptionEffect: RTCAudioStore.InterruptionEffect?
- private var routeChangeEffect: RTCAudioStore.RouteChangeEffect?
+ /// Serialises policy evaluations so the AVAudioSession only receives one
+ /// configuration at a time even when upstream publishers fire in bursts.
+ private let processingPipeline = PassthroughSubject()
- init(
- policy: AudioSessionPolicy = DefaultAudioSessionPolicy()
- ) {
+ private var lastAppliedConfiguration: AudioSessionConfiguration?
+ private var lastCallSettings: CallSettings?
+ private var lastOwnCapabilities: Set?
+
+ init(policy: AudioSessionPolicy = DefaultAudioSessionPolicy()) {
self.policy = policy
- initialAudioSessionConfiguration()
+ /// - Important: This runs whenever an CallAudioSession is created and ensures that
+ /// the configuration is correctly for calling. This is quite important for CallKit as if the category and
+ /// mode aren't set correctly it won't activate the audioSession.
+ audioStore.dispatch(
+ .avAudioSession(
+ .setCategoryAndModeAndCategoryOptions(
+ .playAndRecord,
+ mode: .voiceChat,
+ categoryOptions: [.allowBluetoothHFP, .allowBluetoothA2DP]
+ )
+ )
+ )
}
func activate(
@@ -44,39 +86,25 @@ final class CallAudioSession: @unchecked Sendable {
) {
disposableBag.removeAll()
- self.delegate = delegate
- self.statsAdapter = statsAdapter
-
- Publishers
- .CombineLatest(callSettingsPublisher, ownCapabilitiesPublisher)
- .compactMap { [policy] in policy.configuration(for: $0, ownCapabilities: $1) }
- .removeDuplicates()
- // We add a little debounce delay to avoid multiple requests to
- // overwhelm the AVAudioSession. The value has been set empirically
- // and it can be adapter if required.
- .debounce(for: .seconds(0.5), scheduler: DispatchQueue.global(qos: .userInteractive))
- .log(.debug, subsystems: .audioSession) { "Updated configuration: \($0)" }
- .sinkTask(storeIn: disposableBag) { [weak self] in await self?.didUpdateConfiguration($0) }
+ processingPipeline
+ .debounce(for: .milliseconds(250), scheduler: processingQueue)
+ .receive(on: processingQueue)
+ .sink { [weak self] in self?.process($0) }
.store(in: disposableBag)
- audioStore.dispatch(.audioSession(.isAudioEnabled(true)))
+ self.delegate = delegate
+ self.statsAdapter = statsAdapter
- if shouldSetActive {
- audioStore.dispatch(.audioSession(.isActive(true)))
- } else {
- // In this codepath it means that we are being activated from CallKit.
- // As CallKit is taking over the audioSession we perform a quick
- // restart to ensure that our configuration has been activated
- // and respected.
- audioStore.restartAudioSession()
- }
+ // Expose the policy's stereo preference so the audio device module can
+ // reconfigure itself before WebRTC starts playout.
+ audioStore.dispatch(.stereo(.setPlayoutPreferred(policy is LivestreamAudioSessionPolicy)))
- interruptionEffect = .init(audioStore)
- routeChangeEffect = .init(
- audioStore,
+ configureCallSettingsAndCapabilitiesObservation(
callSettingsPublisher: callSettingsPublisher,
- delegate: delegate
+ ownCapabilitiesPublisher: ownCapabilitiesPublisher
)
+ configureCurrentRouteObservation()
+ configureCallOptionsObservation()
statsAdapter?.trace(.init(audioSession: traceRepresentation))
}
@@ -88,9 +116,13 @@ final class CallAudioSession: @unchecked Sendable {
disposableBag.removeAll()
delegate = nil
- interruptionEffect = nil
- routeChangeEffect = nil
- audioStore.dispatch(.audioSession(.isActive(false)))
+
+ audioStore.dispatch([
+ .webRTCAudioSession(.setAudioEnabled(false)),
+ .setAudioDeviceModule(nil),
+ .setActive(false)
+ ])
+
statsAdapter?.trace(.init(audioSession: traceRepresentation))
}
@@ -100,130 +132,197 @@ final class CallAudioSession: @unchecked Sendable {
ownCapabilities: Set
) {
self.policy = policy
- Task(disposableBag: disposableBag) { [weak self] in
- guard let self else { return }
- await didUpdateConfiguration(
- policy.configuration(for: callSettings, ownCapabilities: ownCapabilities)
- )
+
+ guard delegate != nil else {
+ return
}
+
+ processingPipeline.send(
+ .init(
+ callSettings: callSettings,
+ ownCapabilities: ownCapabilities,
+ currentRoute: audioStore.state.currentRoute
+ )
+ )
}
// MARK: - Private Helpers
- private func didUpdateConfiguration(
- _ configuration: AudioSessionConfiguration
- ) async {
- defer { statsAdapter?.trace(.init(audioSession: traceRepresentation)) }
-
- guard
- !Task.isCancelled
- else {
- return
- }
+ private func process(
+ _ input: Input
+ ) {
+ log.debug(
+ "⚙️ Processing input:\(input).",
+ functionName: input.function,
+ fileName: input.file,
+ lineNumber: input.line
+ )
+ didUpdate(
+ callSettings: input.callSettings,
+ ownCapabilities: input.ownCapabilities,
+ currentRoute: input.currentRoute ?? audioStore.state.currentRoute,
+ file: input.file,
+ function: input.function,
+ line: input.line
+ )
+ }
- do {
- if configuration.isActive {
- try await audioStore.dispatchAsync(
- .audioSession(
- .setCategory(
- configuration.category,
- mode: configuration.mode,
- options: configuration.options
- )
+ /// Wires call setting and capability updates into the processing queue so
+ /// downstream work always executes serially.
+ private func configureCallSettingsAndCapabilitiesObservation(
+ callSettingsPublisher: AnyPublisher,
+ ownCapabilitiesPublisher: AnyPublisher, Never>
+ ) {
+ Publishers
+ .CombineLatest(callSettingsPublisher, ownCapabilitiesPublisher)
+ .receive(on: processingQueue)
+ .sink { [weak self] in
+ guard let self else {
+ return
+ }
+
+ processingPipeline.send(
+ .init(
+ callSettings: $0,
+ ownCapabilities: $1
)
)
}
- } catch {
- log.error(
- "Unable to apply configuration category:\(configuration.category) mode:\(configuration.mode) options:\(configuration.options).",
- subsystems: .audioSession,
- error: error
- )
- }
+ .store(in: disposableBag)
+ }
- if configuration.isActive, let overrideOutputAudioPort = configuration.overrideOutputAudioPort {
- do {
- try await audioStore.dispatchAsync(
- .audioSession(
- .setOverrideOutputPort(overrideOutputAudioPort)
+ /// Reapplies the last known category options when the system clears them,
+ /// which happens after some CallKit activations.
+ private func configureCallOptionsObservation() {
+ audioStore
+ .publisher(\.audioSessionConfiguration.options)
+ .removeDuplicates()
+ .filter { $0.isEmpty }
+ .receive(on: processingQueue)
+ .compactMap { [weak self] _ in self?.lastAppliedConfiguration?.options }
+ .sink { [weak self] in self?.audioStore.dispatch(.avAudioSession(.setCategoryOptions($0))) }
+ .store(in: disposableBag)
+ }
+
+ /// Keeps the delegate informed of hardware flips while also re-evaluating
+ /// the policy when we detect a reconfiguration-worthy route change.
+ private func configureCurrentRouteObservation() {
+ audioStore
+ .publisher(\.currentRoute)
+ .removeDuplicates()
+ .filter { $0.reason.requiresReconfiguration }
+ .receive(on: processingQueue)
+ .sink { [weak self] in
+ guard let self, let lastCallSettings, let lastOwnCapabilities else { return }
+ if lastCallSettings.speakerOn != $0.isSpeaker {
+ self.delegate?.audioSessionAdapterDidUpdateSpeakerOn(
+ $0.isSpeaker,
+ file: #file,
+ function: #function,
+ line: #line
)
- )
- } catch {
- log.error(
- "Unable to apply configuration overrideOutputAudioPort:\(overrideOutputAudioPort).",
- subsystems: .audioSession,
- error: error
- )
+ } else {
+ processingPipeline.send(
+ .init(
+ callSettings: lastCallSettings,
+ ownCapabilities: lastOwnCapabilities,
+ currentRoute: $0
+ )
+ )
+ }
}
- }
-
- await handleAudioOutputUpdateIfRequired(configuration)
+ .store(in: disposableBag)
}
- private func handleAudioOutputUpdateIfRequired(
- _ configuration: AudioSessionConfiguration
- ) async {
- guard
- configuration.isActive != audioStore.state.isActive
- else {
- return
- }
- do {
- try await audioStore.dispatchAsync(
- .audioSession(
- .setAVAudioSessionActive(configuration.isActive)
- )
- )
- } catch {
- log.error(
- "Failed while to applying AudioSession isActive:\(configuration.isActive) in order to match CallSettings.audioOutputOn.",
- subsystems: .audioSession,
- error: error
- )
- }
+ private func didUpdate(
+ callSettings: CallSettings,
+ ownCapabilities: Set,
+ currentRoute: RTCAudioStore.StoreState.AudioRoute,
+ file: StaticString = #file,
+ function: StaticString = #function,
+ line: UInt = #line
+ ) {
+ defer { statsAdapter?.trace(.init(audioSession: traceRepresentation)) }
+
+ applyConfiguration(
+ policy.configuration(
+ for: callSettings,
+ ownCapabilities: ownCapabilities
+ ),
+ callSettings: callSettings,
+ ownCapabilities: ownCapabilities,
+ file: file,
+ function: function,
+ line: line
+ )
}
- /// - Important: This method runs whenever an CallAudioSession is created and ensures that
- /// the configuration is correctly for calling. This is quite important for CallKit as if the category and
- /// mode aren't set correctly it won't activate the audioSession.
- private func initialAudioSessionConfiguration() {
- let state = audioStore.state
- let requiresCategoryUpdate = state.category != .playAndRecord
- let requiresModeUpdate = state.mode != .voiceChat
-
- guard requiresCategoryUpdate || requiresModeUpdate else {
- log.info(
- "AudioSession initial configuration isn't required.",
- subsystems: .audioSession
- )
- return
- }
+ /// Breaks the configuration into store actions so reducers update the
+ /// audio session and our own bookkeeping in a single dispatch.
+ private func applyConfiguration(
+ _ configuration: AudioSessionConfiguration,
+ callSettings: CallSettings,
+ ownCapabilities: Set,
+ file: StaticString = #file,
+ function: StaticString = #function,
+ line: UInt = #line
+ ) {
+ log.debug(
+ "CallAudioSession will apply configuration:\(configuration)",
+ subsystems: .audioSession,
+ functionName: function,
+ fileName: file,
+ lineNumber: line
+ )
- audioStore.dispatch(
- .audioSession(
- .setCategory(
- .playAndRecord,
- mode: .voiceChat,
- options: .allowBluetooth
+ var actions: [StoreActionBox] = []
+
+ actions.append(.normal(.setMicrophoneMuted(!callSettings.audioOn || !ownCapabilities.contains(.sendAudio))))
+
+ actions.append(
+ .normal(
+ .avAudioSession(
+ .setCategoryAndModeAndCategoryOptions(
+ configuration.category,
+ mode: configuration.mode,
+ categoryOptions: configuration.options
+ )
)
)
)
+
+ actions.append(contentsOf: [
+ // Setting only the audioEnabled doesn't stop the audio playout
+ // as if a new track gets added later on WebRTC will try to restart
+ // the playout. However, the combination of audioEnabled:false
+ // and AVAudioSession.active:false seems to work.
+ .normal(.webRTCAudioSession(.setAudioEnabled(configuration.isActive))),
+ .normal(.setActive(configuration.isActive)),
+ .normal(.avAudioSession(.setOverrideOutputAudioPort(configuration.overrideOutputAudioPort ?? .none)))
+ ])
+
+ audioStore.dispatch(
+ actions,
+ file: file,
+ function: function,
+ line: line
+ )
+
+ lastAppliedConfiguration = configuration
+ lastCallSettings = callSettings
+ lastOwnCapabilities = ownCapabilities
}
}
extension CallAudioSession {
struct TraceRepresentation: Encodable {
- var state: RTCAudioStore.State
+ var state: RTCAudioStore.StoreState
var hasDelegate: Bool
- var hasInterruptionEffect: Bool
- var hasRouteChangeEffect: Bool
var policy: String
init(_ source: CallAudioSession) {
state = source.audioStore.state
hasDelegate = source.delegate != nil
- hasInterruptionEffect = source.interruptionEffect != nil
- hasRouteChangeEffect = source.routeChangeEffect != nil
policy = String(describing: source.policy)
}
}
diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift
index 7f14fc7c9..c6afe56e1 100644
--- a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.CategoryOptions+Convenience.swift
@@ -14,11 +14,17 @@ extension AVAudioSession.CategoryOptions {
appIsInForeground: Bool
) -> AVAudioSession.CategoryOptions {
[
- .allowBluetooth,
+ .allowBluetoothHFP,
.allowBluetoothA2DP
]
}
/// Category options for playback.
static let playback: AVAudioSession.CategoryOptions = []
+
+ #if !canImport(AVFoundation, _version: 2360.61.4.11)
+ /// Older SDKs only expose ``allowBluetooth`` so we map the HFP alias to it
+ /// to avoid peppering the codebase with availability checks.
+ public static let allowBluetoothHFP = Self.allowBluetooth
+ #endif
}
diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.RouteChangeReason+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.RouteChangeReason+Convenience.swift
new file mode 100644
index 000000000..67a4404b6
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSession.RouteChangeReason+Convenience.swift
@@ -0,0 +1,31 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+
+extension AVAudioSession.RouteChangeReason {
+
+ /// Mirrors the filtering logic used by WebRTC so we ignore redundant
+ /// callbacks such as `categoryChange` that would otherwise spam the store.
+ var isValidRouteChange: Bool {
+ switch self {
+ case .categoryChange, .routeConfigurationChange:
+ return false
+ default:
+ return true
+ }
+ }
+
+ /// Flags reasons that represent real hardware transitions so we can rebuild
+ /// the audio graph when necessary.
+ var requiresReconfiguration: Bool {
+ switch self {
+ case .categoryChange, .override, .wakeFromSleep, .newDeviceAvailable, .oldDeviceUnavailable:
+ return true
+ default:
+ return false
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions+Convenience.swift
index ac753beae..5ea33caf7 100644
--- a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions+Convenience.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionCategoryOptions+Convenience.swift
@@ -27,11 +27,19 @@ extension AVAudioSession.CategoryOptions {
options.append(".duckOthers")
}
+ #if canImport(AVFoundation, _version: 2360.61.4.11)
+ // Adds ".allowBluetooth" if present, permitting audio playback through
+ // Bluetooth devices.
+ if contains(.allowBluetoothHFP) {
+ options.append(".allowBluetoothHFP")
+ }
+ #else
// Adds ".allowBluetooth" if present, permitting audio playback through
// Bluetooth devices.
if contains(.allowBluetooth) {
options.append(".allowBluetooth")
}
+ #endif
// Adds ".defaultToSpeaker" if present, enabling speaker output by default.
if contains(.defaultToSpeaker) {
diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift
index 2939fb57a..2bd39992f 100644
--- a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionMode+Convenience.swift
@@ -11,4 +11,16 @@ extension AVAudioSession.Mode {
public var description: String {
rawValue
}
+
+ /// Indicates whether the mode keeps stereo playout active or if WebRTC
+ /// should fall back to mono because of voice-processing constraints.
+ var supportsStereoPlayout: Bool {
+ switch self {
+ case .videoChat, .voiceChat, .gameChat:
+ return false
+
+ default:
+ return true
+ }
+ }
}
diff --git a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortOverride+Convenience.swift b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortOverride+Convenience.swift
index 992224d84..79afe073d 100644
--- a/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortOverride+Convenience.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/Extensions/AVAudioSessionPortOverride+Convenience.swift
@@ -9,11 +9,11 @@ extension AVAudioSession.PortOverride {
public var description: String {
switch self {
case .none:
- return "None"
+ return ".none"
case .speaker:
- return "Speaker"
+ return ".speaker"
@unknown default:
- return "Unknown"
+ return ".unknown"
}
}
}
diff --git a/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift b/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift
index 27476d9ef..80dea145a 100644
--- a/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/Policies/DefaultAudioSessionPolicy.swift
@@ -28,7 +28,7 @@ public struct DefaultAudioSessionPolicy: AudioSessionPolicy {
category: .playAndRecord,
mode: .voiceChat,
options: [
- .allowBluetooth,
+ .allowBluetoothHFP,
.allowBluetoothA2DP
],
overrideOutputAudioPort: callSettings.speakerOn
diff --git a/Sources/StreamVideo/Utils/AudioSession/Policies/LivestreamAudioSessionPolicy.swift b/Sources/StreamVideo/Utils/AudioSession/Policies/LivestreamAudioSessionPolicy.swift
new file mode 100644
index 000000000..007c21ac2
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/Policies/LivestreamAudioSessionPolicy.swift
@@ -0,0 +1,29 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Foundation
+
+/// Provides an audio session configuration tailored for livestream calls,
+/// keeping stereo playout active while respecting the local capabilities.
+public struct LivestreamAudioSessionPolicy: AudioSessionPolicy {
+
+ public init() {}
+
+ /// Builds the configuration used when a call toggles livestream mode.
+ /// Stereo playout is preferred (thus the category and the options), but the policy falls back to playback
+ /// category if the current user cannot transmit audio. A2DP is required to allow external devices
+ /// to play stereo.
+ public func configuration(
+ for callSettings: CallSettings,
+ ownCapabilities: Set
+ ) -> AudioSessionConfiguration {
+ .init(
+ isActive: callSettings.audioOutputOn,
+ category: ownCapabilities.contains(.sendAudio) ? .playAndRecord : .playback,
+ mode: .default,
+ options: .allowBluetoothA2DP,
+ overrideOutputAudioPort: callSettings.speakerOn ? .speaker : nil
+ )
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift
index 7f51fcf4a..14aff5284 100644
--- a/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/Protocols/AVAudioSessionProtocol.swift
@@ -11,7 +11,7 @@ protocol AVAudioSessionProtocol {
/// - Parameters:
/// - category: The audio category (e.g., `.playAndRecord`).
/// - mode: The audio mode (e.g., `.voiceChat`).
- /// - categoryOptions: The options for the category (e.g., `.allowBluetooth`).
+ /// - categoryOptions: The options for the category (e.g., `.allowBluetoothHFP`).
/// - Throws: An error if setting the category fails.
func setCategory(
_ category: AVAudioSession.Category,
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+AudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+AudioSession.swift
deleted file mode 100644
index 16eb7fb9e..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+AudioSession.swift
+++ /dev/null
@@ -1,49 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import AVFoundation
-import Foundation
-
-extension RTCAudioStoreAction {
-
- /// Enumerates the supported actions for audio session state changes.
- ///
- /// Use these cases to express updates and configuration changes to the
- /// audio session, including activation, interruption, category, output
- /// port, and permissions.
- enum AudioSession {
- /// Activates or deactivates the audio session.
- case isActive(Bool)
-
- /// Sets the interruption state of the audio session.
- case isInterrupted(Bool)
-
- /// Enables or disables audio.
- case isAudioEnabled(Bool)
-
- /// Enables or disables manual audio management.
- case useManualAudio(Bool)
-
- /// Sets the session category, mode, and options.
- case setCategory(
- AVAudioSession.Category,
- mode: AVAudioSession.Mode,
- options: AVAudioSession.CategoryOptions
- )
-
- /// Overrides the output audio port (e.g., speaker, none).
- case setOverrideOutputPort(AVAudioSession.PortOverride)
-
- /// Sets whether system alerts should not interrupt the session.
- case setPrefersNoInterruptionsFromSystemAlerts(Bool)
-
- /// Sets the recording permission state for the session.
- case setHasRecordingPermission(Bool)
-
- /// Used when activating/deactivating audioOutput from CallSettings.
- /// - Warning: It has the potential to cause misalignment with the underline RTCAudioSession.
- /// It should be used with caution.
- case setAVAudioSessionActive(Bool)
- }
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+CallKit.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+CallKit.swift
deleted file mode 100644
index 98106253e..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+CallKit.swift
+++ /dev/null
@@ -1,21 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import AVFoundation
-import Foundation
-
-extension RTCAudioStoreAction {
-
- /// An action describing a CallKit-driven change to the AVAudioSession.
- ///
- /// Use this enum to represent explicit audio session activation and deactivation
- /// events that are triggered by CallKit and should be handled by the reducer.
- enum CallKit {
- /// Indicates that the audio session was activated via CallKit.
- case activate(AVAudioSession)
-
- /// Indicates that the audio session was deactivated via CallKit.
- case deactivate(AVAudioSession)
- }
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+Generic.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+Generic.swift
deleted file mode 100644
index b659553e0..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction+Generic.swift
+++ /dev/null
@@ -1,16 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import Foundation
-
-extension RTCAudioStoreAction {
-
- /// Represents actions that can be performed within the RTCAudioStore to control audio behavior
- /// or timing.
- enum Generic {
- /// An action that introduces a delay for a specified number of seconds before proceeding with
- /// the next operation.
- case delay(seconds: TimeInterval)
- }
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction.swift
deleted file mode 100644
index 4c526fd0c..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Actions/RTCAudioStoreAction.swift
+++ /dev/null
@@ -1,15 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import Foundation
-
-indirect enum RTCAudioStoreAction: Sendable {
- case generic(RTCAudioStoreAction.Generic)
-
- case audioSession(RTCAudioStoreAction.AudioSession)
-
- case callKit(RTCAudioStoreAction.CallKit)
-
- case failable(RTCAudioStoreAction)
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/AudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/AudioSessionProtocol.swift
deleted file mode 100644
index 9feb882a4..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/AudioSessionProtocol.swift
+++ /dev/null
@@ -1,51 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import AVFoundation
-import Foundation
-import StreamWebRTC
-
-protocol AudioSessionProtocol: AnyObject {
- var avSession: AVAudioSessionProtocol { get }
-
- var prefersNoInterruptionsFromSystemAlerts: Bool { get }
-
- func setPrefersNoInterruptionsFromSystemAlerts(_ newValue: Bool) throws
-
- var isActive: Bool { get }
-
- func setActive(_ isActive: Bool) throws
-
- var isAudioEnabled: Bool { get set }
-
- var useManualAudio: Bool { get set }
-
- var category: String { get }
-
- var mode: String { get }
-
- var categoryOptions: AVAudioSession.CategoryOptions { get }
-
- var recordPermissionGranted: Bool { get }
-
- func requestRecordPermission() async -> Bool
-
- var currentRoute: AVAudioSessionRouteDescription { get }
-
- func add(_ delegate: RTCAudioSessionDelegate)
-
- func remove(_ delegate: RTCAudioSessionDelegate)
-
- func audioSessionDidActivate(_ audioSession: AVAudioSession)
-
- func audioSessionDidDeactivate(_ audioSession: AVAudioSession)
-
- func perform(
- _ operation: (AudioSessionProtocol) throws -> Void
- ) throws
-
- func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws
-
- func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AVAudioSessionObserver.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AVAudioSessionObserver.swift
new file mode 100644
index 000000000..66d62fea6
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AVAudioSessionObserver.swift
@@ -0,0 +1,126 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Combine
+import Foundation
+
+extension AVAudioSession {
+ /// Captures a stable view of the session so state changes can be diffed
+ /// outside of the AVAudioSession API, which otherwise exposes mutable
+ /// objects.
+ struct Snapshot: Equatable, CustomStringConvertible {
+ var category: AVAudioSession.Category
+ var mode: AVAudioSession.Mode
+ var categoryOptions: AVAudioSession.CategoryOptions
+ var routeSharingPolicy: AVAudioSession.RouteSharingPolicy
+ var availableModes: [AVAudioSession.Mode]
+ var preferredInput: RTCAudioStore.StoreState.AudioRoute.Port?
+ var renderingMode: String
+ var prefersEchoCancelledInput: Bool
+ var isEchoCancelledInputEnabled: Bool
+ var isEchoCancelledInputAvailable: Bool
+ var maximumOutputNumberOfChannels: Int
+ var outputNumberOfChannels: Int
+ var preferredOutputNumberOfChannels: Int
+
+ /// Produces a compact string payload that is easy to log when
+ /// diagnosing audio route transitions.
+ var description: String {
+ var result = "{"
+ result += "category:\(category)"
+ result += ", mode:\(mode)"
+ result += ", categoryOptions:\(categoryOptions)"
+ result += ", routeSharingPolicy:\(routeSharingPolicy)"
+ result += ", availableModes:\(availableModes)"
+ result += ", preferredInput:\(preferredInput)"
+ result += ", renderingMode:\(renderingMode)"
+ result += ", prefersEchoCancelledInput:\(prefersEchoCancelledInput)"
+ result += ", isEchoCancelledInputEnabled:\(isEchoCancelledInputEnabled)"
+ result += ", isEchoCancelledInputAvailable:\(isEchoCancelledInputAvailable)"
+ result += ", maximumOutputNumberOfChannels:\(maximumOutputNumberOfChannels)"
+ result += ", outputNumberOfChannels:\(outputNumberOfChannels)"
+ result += ", preferredOutputNumberOfChannels:\(preferredOutputNumberOfChannels)"
+ result += " }"
+ return result
+ }
+
+ /// Builds a new snapshot by pulling the latest values from the shared
+ /// AVAudioSession instance.
+ init(_ source: AVAudioSession = .sharedInstance()) {
+ self.category = source.category
+ self.mode = source.mode
+ self.categoryOptions = source.categoryOptions
+ self.routeSharingPolicy = source.routeSharingPolicy
+ self.availableModes = source.availableModes
+ self.preferredInput = source.preferredInput.map { .init($0) } ?? nil
+ #if compiler(>=6.0)
+ if #available(iOS 17.2, *) { self.renderingMode = "\(source.renderingMode)" }
+ else { self.renderingMode = "" }
+ #else
+ self.renderingMode = ""
+ #endif
+
+ #if compiler(>=6.0)
+ if #available(iOS 18.2, *) { self.prefersEchoCancelledInput = source.prefersEchoCancelledInput
+ } else { self.prefersEchoCancelledInput = false }
+ #else
+ self.prefersEchoCancelledInput = false
+ #endif
+
+ #if compiler(>=6.0)
+ if #available(iOS 18.2, *) { self.isEchoCancelledInputEnabled = source.isEchoCancelledInputEnabled
+ } else { self.isEchoCancelledInputEnabled = false }
+ #else
+ self.isEchoCancelledInputEnabled = false
+ #endif
+
+ #if compiler(>=6.0)
+ if #available(iOS 18.2, *) { self.isEchoCancelledInputAvailable = source.isEchoCancelledInputAvailable
+ } else { self.isEchoCancelledInputAvailable = false }
+ #else
+ self.isEchoCancelledInputAvailable = false
+ #endif
+ self.maximumOutputNumberOfChannels = source.maximumOutputNumberOfChannels
+ self.outputNumberOfChannels = source.outputNumberOfChannels
+ self.preferredOutputNumberOfChannels = source.preferredOutputNumberOfChannels
+ }
+ }
+}
+
+/// Polls the shared AVAudioSession on a timer so stores can react using Combine.
+final class AVAudioSessionObserver {
+
+ var publisher: AnyPublisher { subject.eraseToAnyPublisher() }
+
+ private let subject: CurrentValueSubject = .init(.init())
+ private var cancellable: AnyCancellable?
+
+ /// Starts emitting snapshots roughly every 100ms, which is fast enough to
+ /// catch rapid route transitions without adding noticeable overhead.
+ func startObserving() {
+ cancellable = DefaultTimer
+ .publish(every: 0.1)
+ .sink { [weak self] _ in self?.subject.send(.init()) }
+ }
+
+ /// Cancels the observation timer and stops sending snapshot updates.
+ func stopObserving() {
+ cancellable?.cancel()
+ cancellable = nil
+ }
+}
+
+extension AVAudioSessionObserver: InjectionKey {
+ nonisolated(unsafe) static var currentValue: AVAudioSessionObserver = .init()
+}
+
+extension InjectedValues {
+ /// Injects the audio session observer so effects can subscribe without
+ /// hard-coding their own polling logic.
+ var avAudioSessionObserver: AVAudioSessionObserver {
+ get { InjectedValues[AVAudioSessionObserver.self] }
+ set { InjectedValues[AVAudioSessionObserver.self] = newValue }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AudioSessions/AudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AudioSessions/AudioSessionProtocol.swift
new file mode 100644
index 000000000..b6cb0435e
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AudioSessions/AudioSessionProtocol.swift
@@ -0,0 +1,88 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+import StreamWebRTC
+
+/// Abstraction over the WebRTC audio session that lets the store coordinate
+/// audio behaviour without tying tests to the concrete implementation.
+protocol AudioSessionProtocol: AnyObject {
+ var avSession: AVAudioSessionProtocol { get }
+
+ /// Indicates whether the system should suppress interruption alerts while
+ /// the session is active.
+ var prefersNoInterruptionsFromSystemAlerts: Bool { get }
+
+ /// Toggles preference for system interruption suppression.
+ /// - Parameter newValue: `true` to suppress alerts, `false` otherwise.
+ func setPrefersNoInterruptionsFromSystemAlerts(_ newValue: Bool) throws
+
+ var isActive: Bool { get }
+
+ func setActive(_ isActive: Bool) throws
+
+ var isAudioEnabled: Bool { get set }
+
+ var useManualAudio: Bool { get set }
+
+ var category: String { get }
+
+ var mode: String { get }
+
+ var categoryOptions: AVAudioSession.CategoryOptions { get }
+
+ var recordPermissionGranted: Bool { get }
+
+ func requestRecordPermission() async -> Bool
+
+ var currentRoute: AVAudioSessionRouteDescription { get }
+
+ func add(_ delegate: RTCAudioSessionDelegate)
+
+ func remove(_ delegate: RTCAudioSessionDelegate)
+
+ func audioSessionDidActivate(_ audioSession: AVAudioSession)
+
+ func audioSessionDidDeactivate(_ audioSession: AVAudioSession)
+
+ /// Executes an operation while the session lock is held.
+ /// - Parameter operation: Closure that receives a locked `AudioSessionProtocol`.
+ func perform(
+ _ operation: (AudioSessionProtocol) throws -> Void
+ ) throws
+
+ func overrideOutputAudioPort(_ port: AVAudioSession.PortOverride) throws
+
+ func setPreferredOutputNumberOfChannels(_ noOfChannels: Int) throws
+
+ /// Applies the provided configuration to the audio session.
+ /// - Parameter configuration: Desired audio session configuration.
+ func setConfiguration(_ configuration: RTCAudioSessionConfiguration) throws
+
+ /// Applies the provided configuration to the audio session while optionally
+ /// restoring the active state.
+ /// - Parameters:
+ /// - configuration: Desired audio session configuration.
+ /// - active: When `true`, the session should be reactivated after applying
+ /// the configuration.
+ func setConfiguration(
+ _ configuration: RTCAudioSessionConfiguration,
+ active: Bool
+ ) throws
+}
+
+extension AudioSessionProtocol {
+
+ func setConfiguration(
+ _ configuration: RTCAudioSessionConfiguration,
+ active: Bool
+ ) throws {
+ try setConfiguration(configuration)
+
+ guard active else { return }
+
+ try setActive(true)
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/RTCAudioSession+AudioSessionProtocol.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AudioSessions/RTCAudioSession+AudioSessionProtocol.swift
similarity index 86%
rename from Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/RTCAudioSession+AudioSessionProtocol.swift
rename to Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AudioSessions/RTCAudioSession+AudioSessionProtocol.swift
index 6ce718a9b..1c6a31b84 100644
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/AudioSessions/RTCAudioSession+AudioSessionProtocol.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/AudioSessions/RTCAudioSession+AudioSessionProtocol.swift
@@ -5,6 +5,8 @@
import Foundation
import StreamWebRTC
+/// Conforms the WebRTC audio session to the lightweight protocol used by the
+/// store so tests can swap the implementation with fakes.
extension RTCAudioSession: AudioSessionProtocol {
var avSession: any AVAudioSessionProtocol {
session
@@ -41,6 +43,7 @@ extension RTCAudioSession: AudioSessionProtocol {
}
}
+ /// Locks the session for configuration while running the supplied closure.
func perform(
_ operation: (AudioSessionProtocol) throws -> Void
) throws {
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioSessionPublisher.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioSessionPublisher.swift
new file mode 100644
index 000000000..3a50a5c9d
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioSessionPublisher.swift
@@ -0,0 +1,73 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Combine
+import Foundation
+import StreamWebRTC
+
+/// Publishes significant `RTCAudioSessionDelegate` callbacks as Combine
+/// events so middleware can react declaratively.
+final class RTCAudioSessionPublisher: NSObject, RTCAudioSessionDelegate, @unchecked Sendable {
+
+ /// Events emitted when the WebRTC audio session changes state.
+ enum Event: Equatable {
+ case didBeginInterruption
+
+ case didEndInterruption(shouldResumeSession: Bool)
+
+ case didChangeRoute(
+ reason: AVAudioSession.RouteChangeReason,
+ from: AVAudioSessionRouteDescription,
+ to: AVAudioSessionRouteDescription
+ )
+ }
+
+ /// The Combine publisher that emits session events.
+ private(set) lazy var publisher: AnyPublisher = subject.eraseToAnyPublisher()
+
+ private let source: RTCAudioSession
+ private let subject: PassthroughSubject = .init()
+
+ /// Creates a publisher for the provided WebRTC audio session.
+ /// - Parameter source: The session to observe.
+ init(_ source: RTCAudioSession) {
+ self.source = source
+ super.init()
+ _ = publisher
+ source.add(self)
+ }
+
+ deinit {
+ source.remove(self)
+ }
+
+ // MARK: - RTCAudioSessionDelegate
+
+ func audioSessionDidBeginInterruption(_ session: RTCAudioSession) {
+ subject.send(.didBeginInterruption)
+ }
+
+ func audioSessionDidEndInterruption(
+ _ session: RTCAudioSession,
+ shouldResumeSession: Bool
+ ) {
+ subject.send(.didEndInterruption(shouldResumeSession: shouldResumeSession))
+ }
+
+ /// Forwards route change notifications and includes the new route in the
+ /// payload.
+ func audioSessionDidChangeRoute(
+ _ session: RTCAudioSession,
+ reason: AVAudioSession.RouteChangeReason,
+ previousRoute: AVAudioSessionRouteDescription
+ ) {
+ subject.send(
+ .didChangeRoute(
+ reason: reason,
+ from: previousRoute,
+ to: session.currentRoute
+ )
+ )
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionConfigurationValidator.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionConfigurationValidator.swift
new file mode 100644
index 000000000..7761cd382
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionConfigurationValidator.swift
@@ -0,0 +1,130 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+
+extension RTCAudioStore.StoreState.AVAudioSessionConfiguration {
+
+ /// Indicates whether the configuration is part of the documented
+ /// allowlist of `AVAudioSession` combinations.
+ var isValid: Bool {
+ Self.validate(
+ category: category,
+ mode: mode,
+ options: options
+ )
+ }
+}
+
+extension RTCAudioStore.StoreState.AVAudioSessionConfiguration {
+
+ private struct AllowedConfiguration {
+ let modes: Set
+ let options: AVAudioSession.CategoryOptions
+ }
+
+ // Authoritative allow‑list per Apple documentation.
+ private static let allowedConfigurations: [AVAudioSession.Category: AllowedConfiguration] = {
+ var map: [AVAudioSession.Category: AllowedConfiguration] = [:]
+
+ func makeModes(_ modes: [AVAudioSession.Mode]) -> Set {
+ Set(modes)
+ }
+
+ // .playback
+ var playbackModes: Set = makeModes(
+ [
+ .default,
+ .moviePlayback,
+ .spokenAudio
+ ]
+ )
+ if #available(iOS 15.0, *) { playbackModes.insert(.voicePrompt) }
+ map[.playback] = AllowedConfiguration(
+ modes: playbackModes,
+ options: [
+ .mixWithOthers,
+ .duckOthers,
+ .interruptSpokenAudioAndMixWithOthers,
+ .defaultToSpeaker,
+ .allowBluetoothA2DP
+ ]
+ )
+
+ // .playAndRecord
+ var playAndRecordModes: Set =
+ makeModes(
+ [
+ .default,
+ .voiceChat,
+ .videoChat,
+ .gameChat,
+ .videoRecording,
+ .measurement,
+ .spokenAudio
+ ]
+ )
+ if #available(iOS 15.0, *) { playAndRecordModes.insert(.voicePrompt) }
+ var playAndRecordOptions: AVAudioSession.CategoryOptions =
+ [
+ .mixWithOthers,
+ .duckOthers,
+ .interruptSpokenAudioAndMixWithOthers,
+ .defaultToSpeaker,
+ .allowBluetoothHFP,
+ .allowBluetoothA2DP
+ ]
+ map[.playAndRecord] = AllowedConfiguration(
+ modes: playAndRecordModes,
+ options: playAndRecordOptions
+ )
+
+ // .record
+ map[.record] = AllowedConfiguration(
+ modes: makeModes([.default, .measurement]),
+ options: [.duckOthers]
+ )
+
+ // .multiRoute
+ var multiRouteOptions: AVAudioSession.CategoryOptions = [.mixWithOthers]
+ map[.multiRoute] = AllowedConfiguration(
+ modes: makeModes([.default, .measurement]),
+ options: multiRouteOptions
+ )
+
+ // .ambient / .soloAmbient
+ let ambientOptions: AVAudioSession.CategoryOptions =
+ [.mixWithOthers, .duckOthers, .interruptSpokenAudioAndMixWithOthers]
+ map[.ambient] = AllowedConfiguration(
+ modes: makeModes([.default]),
+ options: ambientOptions
+ )
+ map[.soloAmbient] = AllowedConfiguration(
+ modes: makeModes([.default]),
+ options: ambientOptions
+ )
+
+ return map
+ }()
+
+ /// Validates a combination of category, mode, and options against the
+ /// allowlist derived from Apple's documentation.
+ private static func validate(
+ category: AVAudioSession.Category,
+ mode: AVAudioSession.Mode,
+ options: AVAudioSession.CategoryOptions
+ ) -> Bool {
+ guard let allowed = allowedConfigurations[category] else {
+ return false
+ }
+ guard allowed.modes.contains(mode) else {
+ return false
+ }
+ guard allowed.options.contains(options) else {
+ return false
+ }
+ return true
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionEffect.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionEffect.swift
new file mode 100644
index 000000000..42af665a9
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionEffect.swift
@@ -0,0 +1,73 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Combine
+import Foundation
+import StreamWebRTC
+
+extension RTCAudioStore {
+
+ /// Mirrors the system audio session into the store so reducers can keep a
+ /// coherent view of category, mode, and options that were set by other
+ /// actors such as CallKit or Control Center.
+ final class AVAudioSessionEffect: StoreEffect, @unchecked Sendable {
+
+ @Injected(\.avAudioSessionObserver) private var avAudioSessionObserver
+ private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
+ private var audioDeviceModuleCancellable: AnyCancellable?
+ private var avAudioSessionObserverCancellable: AnyCancellable?
+
+ override init() {
+ super.init()
+ }
+
+ /// Subscribes to adm availability changes and starts forwarding
+ /// snapshots once we have an audio device module configured.
+ override func set(
+ statePublisher: AnyPublisher?
+ ) {
+ avAudioSessionObserverCancellable?.cancel()
+ avAudioSessionObserverCancellable = nil
+ audioDeviceModuleCancellable?.cancel()
+ audioDeviceModuleCancellable = nil
+ avAudioSessionObserver.stopObserving()
+
+ guard let statePublisher else {
+ return
+ }
+
+ audioDeviceModuleCancellable = statePublisher
+ .map(\.audioDeviceModule)
+ .removeDuplicates()
+ .compactMap { $0 }
+ .sink { [weak self] in self?.didUpdate($0) }
+ }
+
+ // MARK: - Private Helpers
+
+ private func didUpdate(_ audioDeviceModule: AudioDeviceModule) {
+ avAudioSessionObserverCancellable?.cancel()
+ avAudioSessionObserverCancellable = nil
+ avAudioSessionObserver.stopObserving()
+
+ avAudioSessionObserverCancellable = avAudioSessionObserver
+ .publisher
+ .removeDuplicates()
+ .sink { [weak self] in self?.didUpdate($0) }
+
+ avAudioSessionObserver.startObserving()
+ }
+
+ private func didUpdate(_ state: AVAudioSession.Snapshot) {
+ dispatcher?.dispatch(
+ [
+ .normal(.avAudioSession(.systemSetCategory(state.category))),
+ .normal(.avAudioSession(.systemSetMode(state.mode))),
+ .normal(.avAudioSession(.systemSetCategoryOptions(state.categoryOptions)))
+ ]
+ )
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+InterruptionEffect.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+InterruptionEffect.swift
deleted file mode 100644
index 7346d6c8f..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+InterruptionEffect.swift
+++ /dev/null
@@ -1,97 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import Foundation
-import StreamWebRTC
-
-extension RTCAudioStore {
-
- /// Handles AVAudioSession interruptions for `RTCAudioStore`.
- ///
- /// This class listens for audio session interruption events and updates the `RTCAudioStore` state accordingly.
- /// It manages the audio session's interruption state, audio enablement, and session activation.
- /// When an interruption begins, it disables audio and marks the session as interrupted.
- /// When the interruption ends, it optionally resumes the session by restoring the audio session category,
- /// mode, and options, with appropriate delays to ensure smooth recovery.
- final class InterruptionEffect: NSObject, RTCAudioSessionDelegate, @unchecked Sendable {
-
- /// The audio session instance used to observe interruption events.
- private let session: AudioSessionProtocol
- /// A weak reference to the `RTCAudioStore` to dispatch state changes.
- private weak var store: RTCAudioStore?
- private let disposableBag = DisposableBag()
-
- /// Creates a new `InterruptionEffect` that listens to the given `RTCAudioStore`'s audio session.
- ///
- /// - Parameter store: The `RTCAudioStore` instance whose session interruptions will be handled.
- /// The effect registers itself as a delegate of the store's audio session.
- init(_ store: RTCAudioStore) {
- session = store.session
- self.store = store
- super.init()
-
- session.add(self)
- }
-
- deinit {
- session.remove(self)
- }
-
- // MARK: - RTCAudioSessionDelegate
-
- /// Called when the audio session begins an interruption.
- ///
- /// Updates the store to indicate the audio session is interrupted and disables audio.
- /// - Parameter session: The audio session that began the interruption.
- func audioSessionDidBeginInterruption(_ session: RTCAudioSession) {
- store?.dispatch(.audioSession(.isInterrupted(true)))
- store?.dispatch(.audioSession(.isAudioEnabled(false)))
- }
-
- /// Called when the audio session ends an interruption.
- ///
- /// Updates the store to indicate the interruption ended. If the session should resume,
- /// it disables audio and session activation briefly, then restores the audio session category,
- /// mode, and options with delays, before re-enabling audio and activating the session.
- ///
- /// - Note: The delay is necessary as CallKit and AVAudioSession together are racey and we
- /// need to ensure that our configuration will go through without other parts of the app making
- /// changes later on.
- ///
- /// - Parameters:
- /// - session: The audio session that ended the interruption.
- /// - shouldResumeSession: A Boolean indicating whether the audio session should resume.
- func audioSessionDidEndInterruption(
- _ session: RTCAudioSession,
- shouldResumeSession: Bool
- ) {
- guard let store else {
- return
- }
-
- store.dispatch(.audioSession(.isInterrupted(false)))
- if shouldResumeSession {
- Task(disposableBag: disposableBag) {
- log.debug(
- "AudioSession will restart...",
- subsystems: .audioSession
- )
- do {
- _ = try await store.restartAudioSessionSync()
- log.debug(
- "AudioSession restart completed.",
- subsystems: .audioSession
- )
- } catch {
- log.error(
- "Audio session restart failed.",
- subsystems: .audioSession,
- error: error
- )
- }
- }
- }
- }
- }
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+RouteChangeEffect.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+RouteChangeEffect.swift
deleted file mode 100644
index 7876c70ac..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Effects/RTCAudioStore+RouteChangeEffect.swift
+++ /dev/null
@@ -1,117 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import Combine
-import Foundation
-import StreamWebRTC
-
-extension RTCAudioStore {
-
- /// An effect handler that listens for audio session route changes and updates call
- /// settings as needed.
- ///
- /// This class observes changes in the audio route (such as switching between speaker,
- /// Bluetooth, or headphones) and ensures the app's call settings stay in sync with the
- /// current audio configuration.
- final class RouteChangeEffect: NSObject, RTCAudioSessionDelegate {
-
- /// The device being used, injected for device-specific route handling.
- @Injected(\.currentDevice) private var currentDevice
-
- /// The audio session being observed for route changes.
- private let session: AudioSessionProtocol
- /// The RTCAudioStore being updated on route change events.
- private weak var store: RTCAudioStore?
- /// Delegate for notifying about call settings changes.
- private weak var delegate: StreamAudioSessionAdapterDelegate?
- /// Tracks the current call settings subscription.
- private var callSettingsCancellable: AnyCancellable?
- /// The most recent active call settings for route change comparison.
- private var activeCallSettings: CallSettings?
-
- /// Initializes the effect, sets up the route change observer, and subscribes to call settings.
- ///
- /// - Parameters:
- /// - store: The audio store to update on changes.
- /// - callSettingsPublisher: Publishes the latest call settings.
- /// - delegate: Delegate for updating call settings in response to route changes.
- init(
- _ store: RTCAudioStore,
- callSettingsPublisher: AnyPublisher,
- delegate: StreamAudioSessionAdapterDelegate
- ) {
- session = store.session
- self.store = store
- self.delegate = delegate
- super.init()
-
- callSettingsCancellable = callSettingsPublisher
- .removeDuplicates()
- .dropFirst() // We drop the first one as we allow on init the CallAudioSession to configure as expected.
- .sink { [weak self] in self?.activeCallSettings = $0 }
- session.add(self)
- }
-
- deinit {
- session.remove(self)
- }
-
- // MARK: - RTCAudioSessionDelegate
-
- /// Handles audio route changes and updates call settings if the speaker state
- /// has changed compared to the current configuration.
- ///
- /// - Parameters:
- /// - session: The session where the route change occurred.
- /// - reason: The reason for the route change.
- /// - previousRoute: The previous audio route before the change.
- func audioSessionDidChangeRoute(
- _ session: RTCAudioSession,
- reason: AVAudioSession.RouteChangeReason,
- previousRoute: AVAudioSessionRouteDescription
- ) {
- guard let activeCallSettings else {
- return
- }
-
- /// We rewrite the reference to RTCAudioSession with our internal session in order to allow
- /// easier stubbing for tests. That's a safe operation as our internal session is already pointing
- /// to the shared RTCAudioSession.
- let session = self.session
-
- guard currentDevice.deviceType == .phone else {
- if activeCallSettings.speakerOn != session.currentRoute.isSpeaker {
- log.warning(
- """
- AudioSession didChangeRoute with speakerOn:\(session.currentRoute.isSpeaker)
- while CallSettings have speakerOn:\(activeCallSettings.speakerOn).
- We will update CallSettings to match the AudioSession's
- current configuration
- """,
- subsystems: .audioSession
- )
- delegate?.audioSessionAdapterDidUpdateSpeakerOn(
- session.currentRoute.isSpeaker
- )
- }
- return
- }
-
- switch (activeCallSettings.speakerOn, session.currentRoute.isSpeaker) {
- case (true, false):
- delegate?.audioSessionAdapterDidUpdateSpeakerOn(
- false
- )
-
- case (false, true) where session.category == AVAudioSession.Category.playAndRecord.rawValue:
- delegate?.audioSessionAdapterDidUpdateSpeakerOn(
- true
- )
-
- default:
- break
- }
- }
- }
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Extensions/RTCAudioStore+RestartAudioSession.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Extensions/RTCAudioStore+RestartAudioSession.swift
deleted file mode 100644
index 8869e7f2c..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Extensions/RTCAudioStore+RestartAudioSession.swift
+++ /dev/null
@@ -1,93 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import Foundation
-
-extension RTCAudioStore {
-
- /// Actions used to restart the audio session in a safe order.
- ///
- /// Sequence: deactivate, short delay, reapply category/mode/options,
- /// reapply output port override, short delay, then reactivate.
- private var restartAudioSessionActions: [RTCAudioStoreAction] {
- let state = self.state
- return [
- .audioSession(.isActive(false)),
- .audioSession(.isAudioEnabled(false)),
- .generic(.delay(seconds: 0.2)),
- .audioSession(
- .setCategory(
- state.category,
- mode: state.mode,
- options: state.options
- )
- ),
- .audioSession(
- .setOverrideOutputPort(state.overrideOutputAudioPort)
- ),
- .generic(.delay(seconds: 0.2)),
- .audioSession(.isAudioEnabled(true)),
- .audioSession(.isActive(true))
- ]
- }
-
- /// Restarts the audio session asynchronously using the store's current
- /// configuration.
- ///
- /// The restart sequence deactivates the session, allows a brief settle,
- /// reapplies category, mode and options, reapplies the output port
- /// override, and reactivates the session.
- ///
- /// - Parameters:
- /// - file: Call-site file used for logging context.
- /// - function: Call-site function used for logging context.
- /// - line: Call-site line used for logging context.
- func restartAudioSession(
- file: StaticString = #file,
- function: StaticString = #function,
- line: UInt = #line
- ) {
- log.debug(
- "Store identifier:RTCAudioStore will restart AudioSession asynchronously.",
- subsystems: .audioSession
- )
- dispatch(
- restartAudioSessionActions,
- file: file,
- function: function,
- line: line
- )
- }
-
- /// Restarts the audio session and suspends until completion.
- ///
- /// Mirrors ``restartAudioSession()`` but executes synchronously and
- /// surfaces errors from the underlying audio-session operations.
- ///
- /// - Parameters:
- /// - file: Call-site file used for logging context.
- /// - function: Call-site function used for logging context.
- /// - line: Call-site line used for logging context.
- /// - Throws: Errors thrown by dispatched audio-session actions.
- func restartAudioSessionSync(
- file: StaticString = #file,
- function: StaticString = #function,
- line: UInt = #line
- ) async throws {
- log.debug(
- "Store identifier:RTCAudioStore will restart AudioSession.",
- subsystems: .audioSession
- )
- try await dispatchAsync(
- restartAudioSessionActions,
- file: file,
- function: function,
- line: line
- )
- log.debug(
- "Store identifier:RTCAudioStore did restart AudioSession.",
- subsystems: .audioSession
- )
- }
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Middleware/RTCAudioStoreMiddleware.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Middleware/RTCAudioStoreMiddleware.swift
deleted file mode 100644
index 991b19cd8..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Middleware/RTCAudioStoreMiddleware.swift
+++ /dev/null
@@ -1,28 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import Foundation
-
-/// A middleware protocol for intercepting and handling actions applied to the RTCAudioStore state.
-/// Implementers can observe or modify actions as they are processed, enabling custom behavior or side effects.
-protocol RTCAudioStoreMiddleware: AnyObject {
-
- /// Applies an action to the RTCAudioStore state, with context information.
- ///
- /// - Parameters:
- /// - state: The current state of the RTCAudioStore.
- /// - action: The action to be applied to the state.
- /// - file: The source file from which the action originated.
- /// - function: The function from which the action originated.
- /// - line: The line number in the source file where the action originated.
- ///
- /// Use this method to observe or modify actions before they affect the state.
- func apply(
- state: RTCAudioStore.State,
- action: RTCAudioStoreAction,
- file: StaticString,
- function: StaticString,
- line: UInt
- )
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+InterruptionsEffect.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+InterruptionsEffect.swift
new file mode 100644
index 000000000..0b1db6b93
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+InterruptionsEffect.swift
@@ -0,0 +1,69 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+import StreamWebRTC
+
+extension RTCAudioStore {
+
+ /// Converts audio session interruption callbacks into store actions so the
+ /// audio pipeline can gracefully pause and resume.
+ final class InterruptionsEffect: StoreEffect, @unchecked Sendable {
+
+ private let audioSessionObserver: RTCAudioSessionPublisher
+ private let disposableBag = DisposableBag()
+
+ convenience init(_ source: RTCAudioSession) {
+ self.init(.init(source))
+ }
+
+ init(_ audioSessionObserver: RTCAudioSessionPublisher) {
+ self.audioSessionObserver = audioSessionObserver
+ super.init()
+
+ audioSessionObserver
+ .publisher
+ .sink { [weak self] in self?.handle($0) }
+ .store(in: disposableBag)
+ }
+
+ // MARK: - Private Helpers
+
+ /// Handles the underlying audio session events and dispatches the
+ /// appropriate store actions.
+ private func handle(
+ _ event: RTCAudioSessionPublisher.Event
+ ) {
+ switch event {
+ case .didBeginInterruption:
+ dispatcher?.dispatch(.setInterrupted(true))
+
+ case .didEndInterruption(let shouldResumeSession):
+ var actions: [Namespace.Action] = [
+ .setInterrupted(false)
+ ]
+
+ if
+ shouldResumeSession,
+ let state = stateProvider?(),
+ state.audioDeviceModule != nil {
+ let isRecording = state.isRecording
+ let isMicrophoneMuted = state.isMicrophoneMuted
+
+ if isRecording {
+ actions.append(.setRecording(false))
+ actions.append(.setRecording(true))
+ }
+
+ actions.append(.setMicrophoneMuted(isMicrophoneMuted))
+ }
+ dispatcher?.dispatch(actions.map(\.box))
+
+ default:
+ break
+ }
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+RouteChangeEffect.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+RouteChangeEffect.swift
new file mode 100644
index 000000000..22cf6e109
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+RouteChangeEffect.swift
@@ -0,0 +1,49 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Combine
+import Foundation
+import StreamWebRTC
+
+extension RTCAudioStore {
+
+ /// Bridges `RTCAudioSession` route updates into store state so downstream
+ /// features can react to speaker/headset transitions.
+ final class RouteChangeEffect: StoreEffect, @unchecked Sendable {
+
+ private let audioSessionObserver: RTCAudioSessionPublisher
+ private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
+ private var disposableBag = DisposableBag()
+
+ convenience init(_ source: RTCAudioSession) {
+ self.init(.init(source))
+ }
+
+ init(_ audioSessionObserver: RTCAudioSessionPublisher) {
+ self.audioSessionObserver = audioSessionObserver
+ super.init()
+
+ audioSessionObserver
+ .publisher
+ .compactMap {
+ switch $0 {
+ case let .didChangeRoute(reason, from, to):
+ return (
+ reason,
+ RTCAudioStore.StoreState.AudioRoute(from),
+ RTCAudioStore.StoreState.AudioRoute(to, reason: reason)
+ )
+ default:
+ return nil
+ }
+ }
+ .receive(on: processingQueue)
+ .log(.debug, subsystems: .audioSession) { "AudioRoute updated \($1) → \($2) due to reason:\($0)." }
+ .map { $0.2 }
+ .sink { [weak self] in self?.dispatcher?.dispatch(.setCurrentRoute($0)) }
+ .store(in: disposableBag)
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+StereoPlayoutEffect.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+StereoPlayoutEffect.swift
new file mode 100644
index 000000000..a6a720ca0
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Effects/RTCAudioStore+StereoPlayoutEffect.swift
@@ -0,0 +1,68 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Combine
+import Foundation
+
+extension RTCAudioStore {
+
+ /// Observes the audio device module to detect when stereo playout becomes
+ /// available, keeping the store's stereo state aligned with WebRTC.
+ final class StereoPlayoutEffect: StoreEffect, @unchecked Sendable {
+
+ private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
+ private let disposableBag = DisposableBag()
+ private var audioDeviceModuleCancellable: AnyCancellable?
+
+ override func set(
+ statePublisher: AnyPublisher?
+ ) {
+ audioDeviceModuleCancellable?.cancel()
+ audioDeviceModuleCancellable = nil
+ processingQueue.cancelAllOperations()
+ disposableBag.removeAll()
+
+ guard let statePublisher else {
+ return
+ }
+
+ audioDeviceModuleCancellable = statePublisher
+ .map(\.audioDeviceModule)
+ .removeDuplicates()
+ .receive(on: processingQueue)
+ .sink { [weak self] in self?.didUpdate(audioDeviceModule: $0, statePublisher: statePublisher) }
+ }
+
+ // MARK: - Private Helpers
+
+ private func didUpdate(
+ audioDeviceModule: AudioDeviceModule?,
+ statePublisher: AnyPublisher
+ ) {
+ disposableBag.removeAll()
+
+ guard let audioDeviceModule else {
+ return
+ }
+
+ /// This is important to support cases (e.g. a wired headphone) that do not trigger a valid
+ /// route change for WebRTC causing the user to join the call without stereo and requiring
+ /// either toggling the speaker or reconnect their wired headset.
+ statePublisher
+ .map(\.currentRoute)
+ .removeDuplicates()
+ .debounce(for: .seconds(2), scheduler: processingQueue)
+ .sink { [weak audioDeviceModule] _ in audioDeviceModule?.refreshStereoPlayoutState() }
+ .store(in: disposableBag)
+
+ audioDeviceModule
+ .isStereoPlayoutEnabledPublisher
+ .removeDuplicates()
+ .receive(on: processingQueue)
+ .sink { [weak self] in self?.dispatcher?.dispatch(.stereo(.setPlayoutEnabled($0))) }
+ .store(in: disposableBag)
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+AudioDeviceModuleMiddleware.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+AudioDeviceModuleMiddleware.swift
new file mode 100644
index 000000000..e3e06d30b
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Middleware/RTCAudioStore+AudioDeviceModuleMiddleware.swift
@@ -0,0 +1,155 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+import StreamWebRTC
+
+extension RTCAudioStore {
+
+ /// Keeps the `AudioDeviceModule` in sync with store-driven intent and
+ /// propagates ADM state changes back into the store.
+ final class AudioDeviceModuleMiddleware: Middleware,
+ @unchecked Sendable {
+
+ private let disposableBag = DisposableBag()
+
+ /// Responds to store actions that require interacting with the ADM or
+ /// listening for its publisher output.
+ override func apply(
+ state: RTCAudioStore.StoreState,
+ action: RTCAudioStore.StoreAction,
+ file: StaticString,
+ function: StaticString,
+ line: UInt
+ ) {
+ switch action {
+ case .setInterrupted(let value):
+ if let audioDeviceModule = state.audioDeviceModule {
+ log.throwing(
+ "Unable to process setInterrupted:\(value).",
+ subsystems: .audioSession
+ ) {
+ try didSetInterrupted(
+ value,
+ state: state,
+ audioDeviceModule: audioDeviceModule
+ )
+ }
+ }
+
+ case .setRecording(let value):
+ if let audioDeviceModule = state.audioDeviceModule {
+ log.throwing(
+ "Unable to process setRecording:\(value).",
+ subsystems: .audioSession
+ ) {
+ try audioDeviceModule.setRecording(value)
+ }
+ }
+
+ case .setMicrophoneMuted(let value):
+ if let audioDeviceModule = state.audioDeviceModule {
+ log.throwing(
+ "Unable to process setMicrophoneMuted:\(value).",
+ subsystems: .audioSession
+ ) {
+ try didSetMicrophoneMuted(
+ value,
+ state: state,
+ audioDeviceModule: audioDeviceModule
+ )
+ }
+ }
+
+ case .setAudioDeviceModule(let value):
+ log.throwing(
+ "Unable to process setAudioDeviceModule:\(value).",
+ subsystems: .audioSession
+ ) {
+ try didSetAudioDeviceModule(
+ value,
+ state: state
+ )
+ }
+
+ case .stereo(.setPlayoutPreferred(let value)):
+ state.audioDeviceModule?.setStereoPlayoutPreference(value)
+
+ case let .webRTCAudioSession(.setAudioEnabled(value)):
+ log.throwing(
+ "Unable to process setPlayout:\(value).",
+ subsystems: .audioSession
+ ) {
+ try state.audioDeviceModule?.setPlayout(value)
+ }
+
+ default:
+ break
+ }
+ }
+
+ // MARK: - Private Helpers
+
+ /// Reacts to interruption updates by suspending or resuming ADM
+ /// recording as needed.
+ private func didSetInterrupted(
+ _ value: Bool,
+ state: RTCAudioStore.StoreState,
+ audioDeviceModule: AudioDeviceModule
+ ) throws {
+ guard
+ !value,
+ state.isActive,
+ state.isRecording
+ else {
+ return
+ }
+
+ // Restart the ADM
+ try audioDeviceModule.setRecording(false)
+ try audioDeviceModule.setRecording(true)
+ }
+
+ /// Applies the store's microphone muted state to the ADM.
+ private func didSetMicrophoneMuted(
+ _ value: Bool,
+ state: RTCAudioStore.StoreState,
+ audioDeviceModule: AudioDeviceModule
+ ) throws {
+ try audioDeviceModule.setMuted(value)
+ }
+
+ /// Handles ADM swapping by wiring up observers and ensuring the previous
+ /// module is stopped.
+ private func didSetAudioDeviceModule(
+ _ audioDeviceModule: AudioDeviceModule?,
+ state: RTCAudioStore.StoreState
+ ) throws {
+ state.audioDeviceModule?.reset()
+
+ disposableBag.removeAll()
+
+ guard let audioDeviceModule else {
+ return
+ }
+
+ audioDeviceModule.setStereoPlayoutPreference(
+ state.stereoConfiguration.playout.preferred
+ )
+
+ audioDeviceModule
+ .isRecordingPublisher
+ .removeDuplicates()
+ .sink { [weak self] in self?.dispatcher?.dispatch(.audioDeviceModuleSetRecording($0)) }
+ .store(in: disposableBag)
+
+ audioDeviceModule
+ .isMicrophoneMutedPublisher
+ .removeDuplicates()
+ .sink { [weak self] in self?.dispatcher?.dispatch(.setMicrophoneMuted($0)) }
+ .store(in: disposableBag)
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Action.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Action.swift
new file mode 100644
index 000000000..80097e0ca
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Action.swift
@@ -0,0 +1,183 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+
+extension RTCAudioStore {
+
+ /// Actions that drive the permissions state machine.
+ ///
+ /// Use these to update cached statuses or to trigger system prompts
+ /// via middleware responsible for requesting permissions.
+ public enum StoreAction: Sendable, Equatable, StoreActionBoxProtocol, CustomStringConvertible {
+
+ enum StereoAction: Equatable, Sendable, CustomStringConvertible {
+ case setPlayoutPreferred(Bool)
+ case setPlayoutEnabled(Bool)
+
+ var description: String {
+ switch self {
+ case .setPlayoutPreferred(let value):
+ return ".setPlayoutPreferred(\(value))"
+
+ case .setPlayoutEnabled(let value):
+ return ".setPlayoutEnabled(\(value))"
+ }
+ }
+ }
+
+ enum AVAudioSessionAction: Equatable, Sendable, CustomStringConvertible {
+ case systemSetCategory(AVAudioSession.Category)
+ case setCategory(AVAudioSession.Category)
+ case systemSetMode(AVAudioSession.Mode)
+ case setMode(AVAudioSession.Mode)
+ case systemSetCategoryOptions(AVAudioSession.CategoryOptions)
+ case setCategoryOptions(AVAudioSession.CategoryOptions)
+
+ case setCategoryAndMode(AVAudioSession.Category, mode: AVAudioSession.Mode)
+ case setCategoryAndCategoryOptions(
+ AVAudioSession.Category,
+ categoryOptions: AVAudioSession.CategoryOptions
+ )
+ case setModeAndCategoryOptions(
+ AVAudioSession.Mode,
+ categoryOptions: AVAudioSession.CategoryOptions
+ )
+ case setCategoryAndModeAndCategoryOptions(
+ AVAudioSession.Category,
+ mode: AVAudioSession.Mode,
+ categoryOptions: AVAudioSession.CategoryOptions
+ )
+ case setOverrideOutputAudioPort(AVAudioSession.PortOverride)
+
+ var description: String {
+ switch self {
+ case .systemSetCategory(let category):
+ return ".systemSetCategory(\(category))"
+
+ case .setCategory(let category):
+ return ".setCategory(\(category))"
+
+ case .systemSetMode(let mode):
+ return ".systemSetMode(\(mode))"
+
+ case .setMode(let mode):
+ return ".setMode(\(mode))"
+
+ case .systemSetCategoryOptions(let categoryOptions):
+ return ".systemSetCategoryOptions(\(categoryOptions))"
+
+ case .setCategoryOptions(let categoryOptions):
+ return ".setCategoryOptions(\(categoryOptions))"
+
+ case .setCategoryAndMode(let category, let mode):
+ return ".setCategoryAndMode(\(category), mode:\(mode))"
+
+ case .setCategoryAndCategoryOptions(let category, let categoryOptions):
+ return ".setCategoryAndCategoryOptions(\(category), categoryOptions:\(categoryOptions))"
+
+ case .setModeAndCategoryOptions(let mode, let categoryOptions):
+ return ".setModeAndCategoryOptions(\(mode), categoryOptions:\(categoryOptions))"
+
+ case .setCategoryAndModeAndCategoryOptions(let category, let mode, let categoryOptions):
+ return ".setModeAndCategoryOptions(\(category), mode:\(mode), categoryOptions:\(categoryOptions))"
+
+ case .setOverrideOutputAudioPort(let portOverride):
+ return ".setOverrideOutputAudioPort(\(portOverride))"
+ }
+ }
+ }
+
+ enum WebRTCAudioSessionAction: Equatable, Sendable, CustomStringConvertible {
+ case setAudioEnabled(Bool)
+ case setUseManualAudio(Bool)
+ case setPrefersNoInterruptionsFromSystemAlerts(Bool)
+
+ var description: String {
+ switch self {
+ case .setAudioEnabled(let value):
+ return ".setAudioEnabled(\(value))"
+
+ case .setUseManualAudio(let value):
+ return ".setUseManualAudio(\(value))"
+
+ case .setPrefersNoInterruptionsFromSystemAlerts(let value):
+ return ".setPrefersNoInterruptionsFromSystemAlerts(\(value))"
+ }
+ }
+ }
+
+ enum CallKitAction: Equatable, Sendable, CustomStringConvertible {
+ case activate(AVAudioSession)
+ case deactivate(AVAudioSession)
+
+ var description: String {
+ switch self {
+ case .activate(let value):
+ return ".activate(\(value))"
+
+ case .deactivate(let value):
+ return ".deactivate(\(value))"
+ }
+ }
+ }
+
+ case setActive(Bool)
+ case setInterrupted(Bool)
+ case setRecording(Bool)
+ /// Used to signal from ADM to the store that the recording state has changed.
+ case audioDeviceModuleSetRecording(Bool)
+ case setMicrophoneMuted(Bool)
+ case setHasRecordingPermission(Bool)
+
+ case setAudioDeviceModule(AudioDeviceModule?)
+ case setCurrentRoute(RTCAudioStore.StoreState.AudioRoute)
+
+ case avAudioSession(AVAudioSessionAction)
+ case webRTCAudioSession(WebRTCAudioSessionAction)
+ case stereo(StereoAction)
+ case callKit(CallKitAction)
+
+ var description: String {
+ switch self {
+ case .setActive(let value):
+ return ".setActive(\(value))"
+
+ case .setInterrupted(let value):
+ return ".setInterrupted(\(value))"
+
+ case .setRecording(let value):
+ return ".setRecording(\(value))"
+
+ case .audioDeviceModuleSetRecording(let value):
+ return ".audioDeviceModuleSetRecording(\(value))"
+
+ case .setMicrophoneMuted(let value):
+ return ".setMicrophoneMuted(\(value))"
+
+ case .setHasRecordingPermission(let value):
+ return ".setHasRecordingPermission(\(value))"
+
+ case .setAudioDeviceModule(let value):
+ return ".setAudioDeviceModule(\(value))"
+
+ case .setCurrentRoute(let value):
+ return ".setCurrentRoute(\(value))"
+
+ case .avAudioSession(let value):
+ return ".avAudioSession(\(value))"
+
+ case .webRTCAudioSession(let value):
+ return ".webRTCAudioSession(\(value))"
+
+ case .stereo(let value):
+ return ".stereo(\(value))"
+
+ case .callKit(let value):
+ return ".callKit(\(value))"
+ }
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Coordinator.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Coordinator.swift
new file mode 100644
index 000000000..55e31d5db
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Coordinator.swift
@@ -0,0 +1,139 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Foundation
+
+extension RTCAudioStore {
+
+ /// Skips redundant store work by evaluating whether an action would mutate
+ /// the current state before allowing reducers to run.
+ final class Coordinator: StoreCoordinator, @unchecked Sendable {
+ /// Returns `true` when reducers should execute for the given action and
+ /// state combination.
+ override func shouldExecute(
+ action: StoreAction,
+ state: StoreState
+ ) -> Bool {
+ switch action {
+ case let .setActive(value):
+ return value != state.isActive
+
+ case let .setInterrupted(value):
+ return value != state.isInterrupted
+
+ case let .setRecording(value):
+ return value != state.isRecording
+
+ case let .audioDeviceModuleSetRecording(value):
+ return value != state.isRecording
+
+ case let .setMicrophoneMuted(value):
+ return value != state.isMicrophoneMuted
+
+ case let .setHasRecordingPermission(value):
+ return value != state.hasRecordingPermission
+
+ case let .setAudioDeviceModule(value):
+ return value !== state.audioDeviceModule
+
+ case let .setCurrentRoute(value):
+ return value != state.currentRoute
+
+ case let .avAudioSession(value):
+ return shouldExecute(
+ action: value,
+ state: state.audioSessionConfiguration
+ )
+
+ case let .webRTCAudioSession(value):
+ return shouldExecute(
+ action: value,
+ state: state.webRTCAudioSessionConfiguration
+ )
+
+ case .callKit:
+ return true
+
+ case let .stereo(value):
+ return shouldExecute(
+ action: value,
+ state: state.stereoConfiguration
+ )
+ }
+ }
+
+ // MARK: - Private Helpers
+
+ /// Determines if an AVAudioSession action would alter the configuration.
+ private func shouldExecute(
+ action: StoreAction.AVAudioSessionAction,
+ state: StoreState.AVAudioSessionConfiguration
+ ) -> Bool {
+ switch action {
+ case let .systemSetCategory(value):
+ return value != state.category
+
+ case let .systemSetMode(value):
+ return value != state.mode
+
+ case let .systemSetCategoryOptions(value):
+ return value != state.options
+
+ case let .setCategory(value):
+ return value != state.category
+
+ case let .setMode(value):
+ return value != state.mode
+
+ case let .setCategoryOptions(value):
+ return value != state.options
+
+ case let .setCategoryAndMode(category, mode):
+ return category != state.category || mode != state.mode
+
+ case let .setCategoryAndCategoryOptions(category, categoryOptions):
+ return category != state.category || categoryOptions != state.options
+
+ case let .setModeAndCategoryOptions(mode, categoryOptions):
+ return mode != state.mode || categoryOptions != state.options
+
+ case let .setCategoryAndModeAndCategoryOptions(category, mode, categoryOptions):
+ return category != state.category || mode != state.mode || categoryOptions != state.options
+
+ case let .setOverrideOutputAudioPort(value):
+ return value != state.overrideOutputAudioPort
+ }
+ }
+
+ /// Determines if a WebRTC action would change the tracked configuration.
+ private func shouldExecute(
+ action: StoreAction.WebRTCAudioSessionAction,
+ state: StoreState.WebRTCAudioSessionConfiguration
+ ) -> Bool {
+ switch action {
+ case let .setAudioEnabled(value):
+ return value != state.isAudioEnabled
+
+ case let .setUseManualAudio(value):
+ return value != state.useManualAudio
+
+ case let .setPrefersNoInterruptionsFromSystemAlerts(value):
+ return value != state.prefersNoInterruptionsFromSystemAlerts
+ }
+ }
+
+ private func shouldExecute(
+ action: StoreAction.StereoAction,
+ state: StoreState.StereoConfiguration
+ ) -> Bool {
+ switch action {
+ case let .setPlayoutPreferred(value):
+ state.playout.preferred != value
+
+ case let .setPlayoutEnabled(value):
+ state.playout.enabled != value
+ }
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Namespace.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Namespace.swift
new file mode 100644
index 000000000..103e289c1
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+Namespace.swift
@@ -0,0 +1,51 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Foundation
+import StreamWebRTC
+
+extension RTCAudioStore {
+
+ /// Namespace that defines the store configuration for permission
+ /// management.
+ enum Namespace: StoreNamespace {
+ typealias State = StoreState
+
+ typealias Action = StoreAction
+
+ static let identifier: String = "io.getstream.audio.store"
+
+ static func reducers(audioSession: RTCAudioSession) -> [Reducer] {
+ [
+ DefaultReducer(audioSession),
+ AVAudioSessionReducer(audioSession),
+ WebRTCAudioSessionReducer(audioSession),
+ CallKitReducer(audioSession)
+ ]
+ }
+
+ static func middleware(audioSession: RTCAudioSession) -> [Middleware] {
+ [
+ AudioDeviceModuleMiddleware()
+ ]
+ }
+
+ static func effects(audioSession: RTCAudioSession) -> Set> {
+ [
+ InterruptionsEffect(audioSession),
+ StereoPlayoutEffect(),
+ RouteChangeEffect(audioSession),
+ AVAudioSessionEffect()
+ ]
+ }
+
+ static func logger() -> StoreLogger {
+ .init(logSkipped: false)
+ }
+
+ static func coordinator() -> StoreCoordinator {
+ Coordinator()
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+State.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+State.swift
new file mode 100644
index 000000000..a90c8b201
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/RTCAudioStore+State.swift
@@ -0,0 +1,320 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+
+extension RTCAudioStore {
+
+ /// The state container for all permission statuses.
+ struct StoreState: CustomStringConvertible, Encodable, Hashable, Sendable {
+
+ struct StereoConfiguration: CustomStringConvertible, Encodable, Hashable, Sendable {
+ struct Playout: CustomStringConvertible, Encodable, Hashable, Sendable {
+ var preferred: Bool
+ var enabled: Bool
+
+ var description: String { "{ preferred:\(preferred), enabled:\(enabled) }" }
+ }
+
+ var playout: Playout
+
+ var description: String {
+ "{ playout:\(playout) }"
+ }
+ }
+
+ struct AVAudioSessionConfiguration: CustomStringConvertible, Encodable, Hashable, Sendable {
+ var category: AVAudioSession.Category
+ /// The AVAudioSession mode. Encoded as its string value.
+ var mode: AVAudioSession.Mode
+ /// The AVAudioSession category options. Encoded as its raw value.
+ var options: AVAudioSession.CategoryOptions
+ /// The AVAudioSession port override. Encoded as its raw value.
+ var overrideOutputAudioPort: AVAudioSession.PortOverride
+
+ var description: String {
+ " { " +
+ "category:\(category), " +
+ "mode:\(mode), " +
+ "options:\(options), " +
+ "overrideOutputAudioPort:\(overrideOutputAudioPort)" +
+ " }"
+ }
+
+ static func == (
+ lhs: AVAudioSessionConfiguration,
+ rhs: AVAudioSessionConfiguration
+ ) -> Bool {
+ lhs.category == rhs.category
+ && lhs.mode == rhs.mode
+ && lhs.options == rhs.options
+ && lhs.overrideOutputAudioPort == rhs.overrideOutputAudioPort
+ }
+
+ private enum CodingKeys: String, CodingKey {
+ case category
+ case mode
+ case options
+ case overrideOutputAudioPort
+ }
+
+ func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ try container.encode(category.rawValue, forKey: .category)
+ try container.encode(mode.rawValue, forKey: .mode)
+ try container.encode(options.rawValue, forKey: .options)
+ try container.encode(
+ overrideOutputAudioPort.rawValue,
+ forKey: .overrideOutputAudioPort
+ )
+ }
+
+ init(
+ category: AVAudioSession.Category,
+ mode: AVAudioSession.Mode,
+ options: AVAudioSession.CategoryOptions,
+ overrideOutputAudioPort: AVAudioSession.PortOverride
+ ) {
+ self.category = category
+ self.mode = mode
+ self.options = options
+ self.overrideOutputAudioPort = overrideOutputAudioPort
+ }
+
+ func hash(into hasher: inout Hasher) {
+ hasher.combine(category.rawValue)
+ hasher.combine(mode.rawValue)
+ hasher.combine(options.rawValue)
+ hasher.combine(overrideOutputAudioPort.rawValue)
+ }
+ }
+
+ struct WebRTCAudioSessionConfiguration: CustomStringConvertible, Encodable, Hashable, Sendable {
+ /// If true, audio is enabled.
+ var isAudioEnabled: Bool
+ /// If true, manual audio management is enabled.
+ var useManualAudio: Bool
+ var prefersNoInterruptionsFromSystemAlerts: Bool
+
+ var description: String {
+ " { " +
+ "isAudioEnabled:\(isAudioEnabled)" +
+ ", useManualAudio:\(useManualAudio)" +
+ ", prefersNoInterruptionsFromSystemAlerts:\(prefersNoInterruptionsFromSystemAlerts)" +
+ " }"
+ }
+ }
+
+ struct AudioRoute: Hashable, CustomStringConvertible, Encodable, Sendable {
+
+ struct Port: Hashable, CustomStringConvertible, Encodable, Sendable {
+ private static let externalPorts: Set = [
+ .bluetoothA2DP, .bluetoothLE, .bluetoothHFP, .carAudio, .headphones
+ ]
+
+ private enum CodingKeys: String, CodingKey {
+ case type
+ case name
+ case id
+ }
+
+ var type: String
+ var name: String
+ var id: String
+
+ var isExternal: Bool
+ var isSpeaker: Bool
+ var isReceiver: Bool
+ var channels: Int
+
+ let source: AVAudioSessionPortDescription?
+
+ var description: String {
+ " { id:\(id), name:\(name), type:\(type) }"
+ }
+
+ init(_ source: AVAudioSessionPortDescription) {
+ self.type = source.portType.rawValue
+ self.name = source.portName
+ self.id = source.uid
+ self.isExternal = Self.externalPorts.contains(source.portType)
+ self.isSpeaker = source.portType == .builtInSpeaker
+ self.isReceiver = source.portType == .builtInReceiver
+ self.channels = source.channels?.endIndex ?? 0
+ self.source = source
+ }
+
+ init(
+ type: String,
+ name: String,
+ id: String,
+ isExternal: Bool,
+ isSpeaker: Bool,
+ isReceiver: Bool,
+ channels: Int
+ ) {
+ self.type = type
+ self.name = name
+ self.id = id
+ self.isExternal = isExternal
+ self.isSpeaker = isSpeaker
+ self.isReceiver = isReceiver
+ self.channels = channels
+ self.source = nil
+ }
+ }
+
+ let inputs: [Port]
+ let outputs: [Port]
+ let reason: AVAudioSession.RouteChangeReason
+
+ var isExternal: Bool
+ var isSpeaker: Bool
+ var isReceiver: Bool
+
+ var supportsStereoOutput: Bool
+ var supportsStereoInput: Bool
+
+ var description: String {
+ var result = "{ "
+ result += "inputs:\(inputs)"
+ result += ", outputs:\(outputs)"
+ result += ", reason:\(reason)"
+ result += ", supportsStereoInput:\(supportsStereoInput)"
+ result += ", supportsStereoOutput:\(supportsStereoOutput)"
+ result += " }"
+ return result
+ }
+
+ init(
+ _ source: AVAudioSessionRouteDescription,
+ reason: AVAudioSession.RouteChangeReason = .unknown
+ ) {
+ self.init(
+ inputs: source.inputs.map(Port.init),
+ outputs: source.outputs.map(Port.init),
+ reason: reason
+ )
+ }
+
+ init(
+ inputs: [Port],
+ outputs: [Port],
+ reason: AVAudioSession.RouteChangeReason = .unknown
+ ) {
+ self.inputs = inputs
+ self.outputs = outputs
+ self.reason = reason
+ self.isExternal = outputs.first { $0.isExternal } != nil
+ self.isSpeaker = outputs.first { $0.isSpeaker } != nil
+ self.isReceiver = outputs.first { $0.isReceiver } != nil
+ self.supportsStereoInput = inputs.first { $0.channels > 1 } != nil
+ self.supportsStereoOutput = outputs.first { $0.channels > 1 } != nil
+ }
+
+ static let empty = AudioRoute(inputs: [], outputs: [])
+ }
+
+ var isActive: Bool
+ var isInterrupted: Bool
+ var isRecording: Bool
+ var isMicrophoneMuted: Bool
+ var hasRecordingPermission: Bool
+
+ var audioDeviceModule: AudioDeviceModule?
+ var currentRoute: AudioRoute
+
+ var audioSessionConfiguration: AVAudioSessionConfiguration
+ var webRTCAudioSessionConfiguration: WebRTCAudioSessionConfiguration
+ var stereoConfiguration: StereoConfiguration
+
+ var description: String {
+ " { " +
+ "isActive:\(isActive)" +
+ ", isInterrupted:\(isInterrupted)" +
+ ", isRecording:\(isRecording)" +
+ ", isMicrophoneMuted:\(isMicrophoneMuted)" +
+ ", hasRecordingPermission:\(hasRecordingPermission)" +
+ ", audioSessionConfiguration:\(audioSessionConfiguration)" +
+ ", webRTCAudioSessionConfiguration:\(webRTCAudioSessionConfiguration)" +
+ ", stereoConfiguration:\(stereoConfiguration)" +
+ ", audioDeviceModule:\(audioDeviceModule)" +
+ ", currentRoute:\(currentRoute)" +
+ " }"
+ }
+
+ private enum CodingKeys: String, CodingKey {
+ case isActive
+ case isInterrupted
+ case isRecording
+ case isMicrophoneMuted
+ case hasRecordingPermission
+ case audioSessionConfiguration
+ case webRTCAudioSessionConfiguration
+ case stereoConfiguration
+ case audioDeviceModule
+ case currentRoute
+ }
+
+ func encode(to encoder: Encoder) throws {
+ var container = encoder.container(keyedBy: CodingKeys.self)
+ try container.encode(isActive, forKey: .isActive)
+ try container.encode(isInterrupted, forKey: .isInterrupted)
+ try container.encode(isRecording, forKey: .isRecording)
+ try container.encode(isMicrophoneMuted, forKey: .isMicrophoneMuted)
+ try container.encode(
+ hasRecordingPermission,
+ forKey: .hasRecordingPermission
+ )
+ try container.encode(
+ audioSessionConfiguration,
+ forKey: .audioSessionConfiguration
+ )
+ try container.encode(
+ webRTCAudioSessionConfiguration,
+ forKey: .webRTCAudioSessionConfiguration
+ )
+ try container.encode(
+ stereoConfiguration,
+ forKey: .stereoConfiguration
+ )
+ try container.encodeIfPresent(
+ audioDeviceModule,
+ forKey: .audioDeviceModule
+ )
+ try container.encode(currentRoute, forKey: .currentRoute)
+ }
+
+ static func == (lhs: StoreState, rhs: StoreState) -> Bool {
+ lhs.isActive == rhs.isActive
+ && lhs.isInterrupted == rhs.isInterrupted
+ && lhs.isRecording == rhs.isRecording
+ && lhs.isMicrophoneMuted == rhs.isMicrophoneMuted
+ && lhs.hasRecordingPermission == rhs.hasRecordingPermission
+ && lhs.audioSessionConfiguration == rhs.audioSessionConfiguration
+ && lhs.webRTCAudioSessionConfiguration == rhs.webRTCAudioSessionConfiguration
+ && lhs.stereoConfiguration == rhs.stereoConfiguration
+ && lhs.audioDeviceModule === rhs.audioDeviceModule
+ && lhs.currentRoute == rhs.currentRoute
+ }
+
+ func hash(into hasher: inout Hasher) {
+ hasher.combine(isActive)
+ hasher.combine(isInterrupted)
+ hasher.combine(isRecording)
+ hasher.combine(isMicrophoneMuted)
+ hasher.combine(hasRecordingPermission)
+ hasher.combine(audioSessionConfiguration)
+ hasher.combine(webRTCAudioSessionConfiguration)
+ hasher.combine(stereoConfiguration)
+ if let audioDeviceModule {
+ hasher.combine(ObjectIdentifier(audioDeviceModule))
+ } else {
+ hasher.combine(0 as UInt8)
+ }
+ hasher.combine(currentRoute)
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+AVAudioSessionReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+AVAudioSessionReducer.swift
new file mode 100644
index 000000000..09fc0ecbf
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+AVAudioSessionReducer.swift
@@ -0,0 +1,240 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import AVFoundation
+import Foundation
+import StreamWebRTC
+
+extension RTCAudioStore.Namespace {
+
+ /// Applies `AVAudioSession` specific actions to both the live WebRTC session
+ /// and the store state, keeping them aligned.
+ final class AVAudioSessionReducer: Reducer, @unchecked Sendable {
+
+ private let source: AudioSessionProtocol
+
+ init(_ source: AudioSessionProtocol) {
+ self.source = source
+ }
+
+ /// Handles `StoreAction.avAudioSession` cases by mutating the session and
+ /// returning an updated state snapshot.
+ override func reduce(
+ state: State,
+ action: Action,
+ file: StaticString,
+ function: StaticString,
+ line: UInt
+ ) async throws -> State {
+ var updatedState = state
+
+ if case let .setCurrentRoute(value) = action {
+ updatedState.audioSessionConfiguration.overrideOutputAudioPort = value.isSpeaker ? .speaker : .none
+ }
+
+ guard case let .avAudioSession(action) = action else {
+ return updatedState
+ }
+
+ switch action {
+ case let .systemSetCategory(value):
+ updatedState.audioSessionConfiguration.category = value
+
+ case let .systemSetMode(value):
+ updatedState.audioSessionConfiguration.mode = value
+
+ case let .systemSetCategoryOptions(value):
+ updatedState.audioSessionConfiguration.options = value
+
+ case let .setCategory(value):
+ try performUpdate(
+ state: state.audioSessionConfiguration,
+ category: value,
+ mode: state.audioSessionConfiguration.mode,
+ categoryOptions: state.audioSessionConfiguration.options
+ )
+ updatedState.audioSessionConfiguration.category = value
+ updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none
+
+ case let .setMode(value):
+ try performUpdate(
+ state: state.audioSessionConfiguration,
+ category: state.audioSessionConfiguration.category,
+ mode: value,
+ categoryOptions: state.audioSessionConfiguration.options
+ )
+ updatedState.audioSessionConfiguration.mode = value
+ updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none
+
+ case let .setCategoryOptions(value):
+ try performUpdate(
+ state: state.audioSessionConfiguration,
+ category: state.audioSessionConfiguration.category,
+ mode: state.audioSessionConfiguration.mode,
+ categoryOptions: value
+ )
+ updatedState.audioSessionConfiguration.options = value
+ updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none
+
+ case let .setCategoryAndMode(category, mode):
+ try performUpdate(
+ state: state.audioSessionConfiguration,
+ category: category,
+ mode: mode,
+ categoryOptions: state.audioSessionConfiguration.options
+ )
+ updatedState.audioSessionConfiguration.category = category
+ updatedState.audioSessionConfiguration.mode = mode
+ updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none
+
+ case let .setCategoryAndCategoryOptions(category, categoryOptions):
+ try performUpdate(
+ state: state.audioSessionConfiguration,
+ category: category,
+ mode: state.audioSessionConfiguration.mode,
+ categoryOptions: categoryOptions
+ )
+ updatedState.audioSessionConfiguration.category = category
+ updatedState.audioSessionConfiguration.options = categoryOptions
+ updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none
+
+ case let .setModeAndCategoryOptions(mode, categoryOptions):
+ try performUpdate(
+ state: state.audioSessionConfiguration,
+ category: state.audioSessionConfiguration.category,
+ mode: mode,
+ categoryOptions: categoryOptions
+ )
+ updatedState.audioSessionConfiguration.mode = mode
+ updatedState.audioSessionConfiguration.options = categoryOptions
+ updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none
+
+ case let .setCategoryAndModeAndCategoryOptions(category, mode, categoryOptions):
+ try performUpdate(
+ state: state.audioSessionConfiguration,
+ category: category,
+ mode: mode,
+ categoryOptions: categoryOptions
+ )
+ updatedState.audioSessionConfiguration.category = category
+ updatedState.audioSessionConfiguration.mode = mode
+ updatedState.audioSessionConfiguration.options = categoryOptions
+ updatedState.audioSessionConfiguration.overrideOutputAudioPort = .none
+
+ case let .setOverrideOutputAudioPort(value):
+ if state.audioSessionConfiguration.category == .playAndRecord {
+ try source.perform {
+ try $0.overrideOutputAudioPort(value)
+ }
+ updatedState.audioSessionConfiguration.overrideOutputAudioPort = value
+ } else {
+ updatedState = try await setDefaultToSpeaker(
+ state: state,
+ speakerOn: value == .speaker
+ )
+ }
+ }
+
+ return updatedState
+ }
+
+ // MARK: - Private Helpers
+
+ /// Ensures the requested configuration is valid, applies it to the
+ /// session, and returns the canonicalised state.
+ private func performUpdate(
+ state: State.AVAudioSessionConfiguration,
+ category: AVAudioSession.Category,
+ mode: AVAudioSession.Mode,
+ categoryOptions: AVAudioSession.CategoryOptions
+ ) throws {
+ guard
+ state.category != category
+ || state.mode != mode
+ || state.options != categoryOptions
+ else {
+ log.debug(
+ "AVAudioSession configuration didn't change category:\(category), mode:\(mode), categoryOptions:\(categoryOptions).",
+ subsystems: .audioSession
+ )
+ return
+ }
+
+ guard
+ State.AVAudioSessionConfiguration(
+ category: category,
+ mode: mode,
+ options: categoryOptions,
+ overrideOutputAudioPort: state.overrideOutputAudioPort
+ ).isValid
+ else {
+ throw ClientError(
+ "Invalid AVAudioSession configuration category:\(category) mode:\(mode) options:\(categoryOptions)."
+ )
+ }
+
+ let requiresRestart = source.isActive
+
+ let webRTCConfiguration = RTCAudioSessionConfiguration.webRTC()
+ webRTCConfiguration.category = category.rawValue
+ webRTCConfiguration.mode = mode.rawValue
+ webRTCConfiguration.categoryOptions = categoryOptions
+
+ try source.perform { session in
+ if requiresRestart {
+ try session.setActive(false)
+ }
+
+ try session.setConfiguration(
+ webRTCConfiguration,
+ active: requiresRestart
+ )
+ }
+
+ /// We update the `webRTC` default configuration because, the WebRTC audioStack
+ /// can be restarted for various reasons. When the stack restarts it gets reconfigured
+ /// with the `webRTC` configuration. If then the configuration is invalid compared
+ /// to the state we expect we may find ourselves in a difficult to recover situation,
+ /// as our callSetting may be failing to get applied.
+ /// By updating the `webRTC` configuration we ensure that the audioStack will
+ /// start from the last known state in every restart, making things simpler to recover.
+ RTCAudioSessionConfiguration.setWebRTC(webRTCConfiguration)
+ }
+
+ /// Updates the `defaultToSpeaker` option to mirror a requested override.
+ private func setDefaultToSpeaker(
+ state: State,
+ speakerOn: Bool
+ ) async throws -> State {
+ var categoryOptions = source.categoryOptions
+ let defaultToSpeakerExists = categoryOptions.contains(.defaultToSpeaker)
+
+ var didUpdate = false
+ switch (speakerOn, defaultToSpeakerExists) {
+ case (true, false):
+ categoryOptions.insert(.defaultToSpeaker)
+ didUpdate = true
+
+ case (false, true):
+ categoryOptions.remove(.defaultToSpeaker)
+ didUpdate = true
+
+ default:
+ break
+ }
+
+ guard didUpdate else {
+ return state
+ }
+
+ return try await reduce(
+ state: state,
+ action: .avAudioSession(.setCategoryOptions(categoryOptions)),
+ file: #file,
+ function: #function,
+ line: #line
+ )
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+CallKitReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+CallKitReducer.swift
new file mode 100644
index 000000000..0971d972f
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+CallKitReducer.swift
@@ -0,0 +1,48 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Foundation
+import StreamWebRTC
+
+extension RTCAudioStore.Namespace {
+
+ /// Updates store state in response to CallKit activation events so it stays
+ /// aligned with `RTCAudioSession`.
+ final class CallKitReducer: Reducer, @unchecked Sendable {
+
+ private let source: AudioSessionProtocol
+
+ init(_ source: AudioSessionProtocol) {
+ self.source = source
+ }
+
+ /// Applies CallKit actions by forwarding the callbacks to the WebRTC
+ /// session and returning the updated activity flag.
+ override func reduce(
+ state: State,
+ action: Action,
+ file: StaticString,
+ function: StaticString,
+ line: UInt
+ ) async throws -> State {
+ guard case let .callKit(action) = action else {
+ return state
+ }
+
+ var updatedState = state
+
+ switch action {
+ case let .activate(audioSession):
+ source.audioSessionDidActivate(audioSession)
+ updatedState.isActive = source.isActive
+
+ case let .deactivate(audioSession):
+ source.audioSessionDidDeactivate(audioSession)
+ updatedState.isActive = source.isActive
+ }
+
+ return updatedState
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+DefaultReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+DefaultReducer.swift
new file mode 100644
index 000000000..8e05fc4c5
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+DefaultReducer.swift
@@ -0,0 +1,96 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Foundation
+import StreamWebRTC
+
+extension RTCAudioStore.Namespace {
+
+ /// Handles simple state mutations that do not require direct WebRTC calls
+ /// beyond what is already encoded in the action.
+ final class DefaultReducer: Reducer, @unchecked Sendable {
+
+ private let source: AudioSessionProtocol
+
+ init(_ source: AudioSessionProtocol) {
+ self.source = source
+ super.init()
+ }
+
+ /// Applies non-specialised store actions, mutating the state and
+ /// performing lightweight side effects where needed.
+ override func reduce(
+ state: State,
+ action: Action,
+ file: StaticString,
+ function: StaticString,
+ line: UInt
+ ) async throws -> State {
+ var updatedState = state
+
+ switch action {
+ case let .setActive(value):
+ if value != source.isActive {
+ try source.perform {
+ try $0.setActive(value)
+ try $0.avSession.setIsActive(value)
+ }
+ }
+ updatedState.isActive = value
+ try updatedState.audioDeviceModule?.setPlayout(value)
+
+ case let .setInterrupted(value):
+ updatedState.isInterrupted = value
+
+ case let .setRecording(value):
+ updatedState.isRecording = value
+
+ case let .audioDeviceModuleSetRecording(value):
+ updatedState.isRecording = value
+
+ case let .setMicrophoneMuted(value):
+ updatedState.isMicrophoneMuted = value
+
+ case let .setHasRecordingPermission(value):
+ updatedState.hasRecordingPermission = value
+
+ case let .setAudioDeviceModule(value):
+ updatedState.audioDeviceModule = value
+ if value == nil {
+ updatedState.isRecording = false
+ updatedState.isMicrophoneMuted = true
+ updatedState.stereoConfiguration = .init(
+ playout: .init(
+ preferred: false,
+ enabled: false
+ )
+ )
+ }
+
+ case let .setCurrentRoute(value):
+ updatedState.currentRoute = value
+
+ case let .stereo(.setPlayoutPreferred(value)):
+ updatedState.stereoConfiguration.playout.preferred = value
+
+ case let .stereo(.setPlayoutEnabled(value)):
+ updatedState.stereoConfiguration.playout.enabled = value
+
+ case .avAudioSession:
+ break
+
+ case .webRTCAudioSession:
+ break
+
+ case .stereo:
+ break
+
+ case .callKit:
+ break
+ }
+
+ return updatedState
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+WebRTCAudioSessionReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+WebRTCAudioSessionReducer.swift
new file mode 100644
index 000000000..2d976f0d2
--- /dev/null
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Namespace/Reducers/RTCAudioStore+WebRTCAudioSessionReducer.swift
@@ -0,0 +1,54 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Foundation
+import StreamWebRTC
+
+extension RTCAudioStore.Namespace {
+
+ /// Synchronises WebRTC-specific knobs (manual audio, interruptions) with
+ /// the underlying session.
+ final class WebRTCAudioSessionReducer: Reducer, @unchecked Sendable {
+
+ private let source: AudioSessionProtocol
+
+ init(_ source: AudioSessionProtocol) {
+ self.source = source
+ }
+
+ /// Applies `.webRTCAudioSession` actions to both the store and the
+ /// WebRTC session instance.
+ override func reduce(
+ state: State,
+ action: Action,
+ file: StaticString,
+ function: StaticString,
+ line: UInt
+ ) async throws -> State {
+ guard case let .webRTCAudioSession(action) = action else {
+ return state
+ }
+
+ var updatedState = state
+
+ switch action {
+ case let .setAudioEnabled(value):
+ source.isAudioEnabled = value
+ updatedState.webRTCAudioSessionConfiguration.isAudioEnabled = value
+
+ case let .setUseManualAudio(value):
+ source.useManualAudio = value
+ updatedState.webRTCAudioSessionConfiguration.useManualAudio = value
+
+ case let .setPrefersNoInterruptionsFromSystemAlerts(value):
+ if #available(iOS 14.5, *) {
+ try source.setPrefersNoInterruptionsFromSystemAlerts(value)
+ updatedState.webRTCAudioSessionConfiguration.prefersNoInterruptionsFromSystemAlerts = value
+ }
+ }
+
+ return updatedState
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/RTCAudioStore.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/RTCAudioStore.swift
index d74b3a49a..1e3e32ab4 100644
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/RTCAudioStore.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/RTCAudioStore.swift
@@ -6,291 +6,127 @@ import Combine
import Foundation
import StreamWebRTC
-/// Stores and manages the audio session state for real-time communication calls.
-///
-/// `RTCAudioStore` coordinates actions, state updates, and reducers for audio
-/// session control. It centralizes audio configuration, provides state
-/// observation, and enables serial action processing to avoid concurrency
-/// issues. Use this type to access and manage all call audio state in a
-/// thread-safe, observable way.
+/// Redux-style store that keeps WebRTC, CallKit, and app audio state aligned
+/// while exposing Combine publishers to observers.
final class RTCAudioStore: @unchecked Sendable {
- static let shared = RTCAudioStore()
-
- /// The current state of the audio session.
- var state: State { stateSubject.value }
+ private let store: Store
- /// The underlying WebRTC audio session being managed.
- let session: AudioSessionProtocol
-
- private let stateSubject: CurrentValueSubject
- private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
+ /// Shared instance used by the dependency injection container.
+ static let shared = RTCAudioStore()
- @Atomic private(set) var middleware: [RTCAudioStoreMiddleware] = []
- @Atomic private(set) var reducers: [RTCAudioStoreReducer] = []
+ var state: Namespace.State { store.state }
+ private let audioSession: RTCAudioSession
+ /// Creates a store backed by the provided WebRTC audio session instance.
+ /// - Parameter audioSession: The underlying WebRTC audio session.
init(
- session: AudioSessionProtocol = RTCAudioSession.sharedInstance(),
- underlyingQueue: dispatch_queue_t? = .global(qos: .userInteractive)
+ audioSession: RTCAudioSession = .sharedInstance()
) {
- self.session = session
-
- stateSubject = .init(
- .init(
- isActive: session.isActive,
+ self.audioSession = audioSession
+ self.store = Namespace.store(
+ initialState: .init(
+ isActive: false,
isInterrupted: false,
- prefersNoInterruptionsFromSystemAlerts: session.prefersNoInterruptionsFromSystemAlerts,
- isAudioEnabled: session.isAudioEnabled,
- useManualAudio: session.useManualAudio,
- category: .init(rawValue: session.category),
- mode: .init(rawValue: session.mode),
- options: session.categoryOptions,
- overrideOutputAudioPort: .none,
- hasRecordingPermission: session.recordPermissionGranted
- )
+ isRecording: false,
+ isMicrophoneMuted: true,
+ hasRecordingPermission: false,
+ audioDeviceModule: nil,
+ currentRoute: .init(audioSession.currentRoute),
+ audioSessionConfiguration: .init(
+ category: .soloAmbient,
+ mode: .default,
+ options: [],
+ overrideOutputAudioPort: .none
+ ),
+ webRTCAudioSessionConfiguration: .init(
+ isAudioEnabled: false,
+ useManualAudio: false,
+ prefersNoInterruptionsFromSystemAlerts: false
+ ),
+ stereoConfiguration: .init(
+ playout: .init(
+ preferred: false,
+ enabled: false
+ )
+ )
+ ),
+ reducers: Namespace.reducers(audioSession: audioSession),
+ middleware: Namespace.middleware(audioSession: audioSession),
+ effects: Namespace.effects(audioSession: audioSession)
)
- processingQueue.underlyingQueue = underlyingQueue
-
- add(RTCAudioSessionReducer(store: self))
- dispatch(.audioSession(.setPrefersNoInterruptionsFromSystemAlerts(true)))
- dispatch(.audioSession(.useManualAudio(true)))
- dispatch(.audioSession(.isAudioEnabled(false)))
+ store.dispatch([
+ .normal(.webRTCAudioSession(.setPrefersNoInterruptionsFromSystemAlerts(true))),
+ .normal(.webRTCAudioSession(.setUseManualAudio(true))),
+ .normal(.webRTCAudioSession(.setAudioEnabled(false)))
+ ])
}
- // MARK: - State Observation
+ // MARK: - Observation
- /// Publishes changes to the specified state property.
- ///
- /// Use this to observe changes for a specific audio state key path.
- func publisher(
- _ keyPath: KeyPath
- ) -> AnyPublisher {
- stateSubject
- .map { $0[keyPath: keyPath] }
- .removeDuplicates()
- .eraseToAnyPublisher()
+ func add(_ middleware: Middleware) {
+ store.add(middleware)
}
- // MARK: - Reducers
-
- /// Adds middleware to observe or intercept audio actions.
- func add(_ value: T) {
- guard middleware.first(where: { $0 === value }) == nil else {
- return
- }
- middleware.append(value)
- }
-
- /// Removes previously added middleware.
- func remove(_ value: T) {
- middleware = middleware.filter { $0 !== value }
- }
-
- // MARK: - Reducers
-
- /// Adds a reducer to handle audio session actions.
- func add(_ value: T) {
- guard reducers.first(where: { $0 === value }) == nil else {
- return
- }
- reducers.append(value)
- }
-
- /// Adds a reducer to handle audio session actions.
- func remove(_ value: T) {
- reducers = reducers.filter { $0 !== value }
+ /// Emits values when the provided key path changes within the store state.
+ /// - Parameter keyPath: The state value to observe.
+ /// - Returns: A publisher of distinct values for the key path.
+ func publisher(
+ _ keyPath: KeyPath
+ ) -> AnyPublisher {
+ store.publisher(keyPath)
}
- // MARK: - Actions dispatch
-
- /// Dispatches an audio store action asynchronously and waits for completion.
- func dispatchAsync(
- _ actions: [RTCAudioStoreAction],
- file: StaticString = #file,
- function: StaticString = #function,
- line: UInt = #line
- ) async throws {
- try await processingQueue.addSynchronousTaskOperation { [weak self] in
- guard let self else {
- return
- }
-
- for action in actions {
- await applyDelayIfRequired(for: action)
-
- if case let .failable(nestedAction) = action {
- do {
- try perform(
- nestedAction,
- file: file,
- function: function,
- line: line
- )
- } catch {
- log.warning(
- "RTCAudioStore action:\(nestedAction) failed with error:\(error).",
- functionName: function,
- fileName: file,
- lineNumber: line
- )
- }
- } else {
- try perform(
- action,
- file: file,
- function: function,
- line: line
- )
- }
- }
- }
- }
+ // MARK: - Dispatch
- /// Dispatches an audio store action asynchronously and waits for completion.
- func dispatchAsync(
- _ action: RTCAudioStoreAction,
+ @discardableResult
+ /// Dispatches boxed actions, preserving call site metadata for tracing.
+ func dispatch(
+ _ actions: [StoreActionBox],
file: StaticString = #file,
function: StaticString = #function,
line: UInt = #line
- ) async throws {
- try await dispatchAsync(
- [action],
+ ) -> StoreTask {
+ store.dispatch(
+ actions,
file: file,
function: function,
line: line
)
}
+ @discardableResult
+ /// Dispatches a sequence of namespace actions to the underlying store.
func dispatch(
- _ actions: [RTCAudioStoreAction],
+ _ actions: [Namespace.Action],
file: StaticString = #file,
function: StaticString = #function,
line: UInt = #line
- ) {
- processingQueue.addTaskOperation { [weak self] in
- guard let self else {
- return
- }
-
- for action in actions {
- do {
- await applyDelayIfRequired(for: action)
-
- if case let .failable(nestedAction) = action {
- do {
- try perform(
- nestedAction,
- file: file,
- function: function,
- line: line
- )
- } catch {
- log.warning(
- "RTCAudioStore action:\(nestedAction) failed with error:\(error).",
- functionName: function,
- fileName: file,
- lineNumber: line
- )
- }
- } else {
- try perform(
- action,
- file: file,
- function: function,
- line: line
- )
- }
- } catch {
- log.error(
- error,
- subsystems: .audioSession,
- functionName: function,
- fileName: file,
- lineNumber: line
- )
- }
- }
- }
+ ) -> StoreTask {
+ store.dispatch(
+ actions,
+ file: file,
+ function: function,
+ line: line
+ )
}
- /// Dispatches an audio store action for processing on the queue.
+ @discardableResult
+ /// Dispatches a single action by boxing it before forwarding to the
+ /// underlying store implementation.
func dispatch(
- _ action: RTCAudioStoreAction,
- file: StaticString = #file,
- function: StaticString = #function,
- line: UInt = #line
- ) {
- dispatch([action], file: file, function: function, line: line)
- }
-
- // MARK: - Private Helpers
-
- private func perform(
- _ action: RTCAudioStoreAction,
+ _ action: Namespace.Action,
file: StaticString = #file,
function: StaticString = #function,
line: UInt = #line
- ) throws {
- let state = stateSubject.value
-
- let middleware = middleware
- let reducers = reducers
-
- middleware.forEach {
- $0.apply(
- state: state,
- action: action,
- file: file,
- function: function,
- line: line
- )
- }
-
- do {
- let updatedState = try reducers
- .reduce(state) {
- try $1.reduce(
- state: $0,
- action: action,
- file: file,
- function: function,
- line: line
- )
- }
-
- stateSubject.send(updatedState)
-
- log.debug(
- "Store identifier:RTCAudioStore completed action:\(action) state:\(updatedState).",
- subsystems: .audioSession,
- functionName: function,
- fileName: file,
- lineNumber: line
- )
- } catch {
- log.error(
- "Store identifier:RTCAudioStore failed to apply action:\(action) state:\(state).",
- subsystems: .audioSession,
- error: error,
- functionName: function,
- fileName: file,
- lineNumber: line
- )
- throw error
- }
- }
-
- /// Delays are important for flows like interruptionEnd where we need to perform multiple operations
- /// at once while the same session may be accessed/modified from another part of the app (e.g. CallKit).
- private func applyDelayIfRequired(for action: RTCAudioStoreAction) async {
- guard
- case let .generic(.delay(interval)) = action
- else {
- return
- }
-
- try? await Task.sleep(nanoseconds: UInt64(1_000_000_000 * interval))
+ ) -> StoreTask {
+ store.dispatch(
+ [action],
+ file: file,
+ function: function,
+ line: line
+ )
}
}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/CallKitAudioSessionReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/CallKitAudioSessionReducer.swift
deleted file mode 100644
index 01cba71f2..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/CallKitAudioSessionReducer.swift
+++ /dev/null
@@ -1,71 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import Foundation
-import StreamWebRTC
-
-/// A reducer that manages audio session state changes triggered by CallKit.
-///
-/// `CallKitAudioSessionReducer` implements the `RTCAudioStoreReducer` protocol
-/// and is responsible for updating the audio state in response to CallKit-related
-/// actions, such as audio session activation or deactivation. This allows for
-/// proper coordination of the WebRTC audio session lifecycle when the system
-/// audio session is managed externally by CallKit.
-final class CallKitAudioSessionReducer: RTCAudioStoreReducer {
-
- /// The underlying WebRTC audio session that is managed by this reducer.
- private let source: AudioSessionProtocol
-
- /// Creates a new reducer for handling CallKit-related audio session changes.
- ///
- /// - Parameter source: The `RTCAudioSession` instance to manage. Defaults to
- /// the shared singleton instance.
- init(store: RTCAudioStore) {
- source = store.session
- }
-
- // MARK: - RTCAudioStoreReducer
-
- /// Updates the audio session state based on a CallKit-related action.
- ///
- /// This method responds to `.callKit` actions from the audio store, updating
- /// the state to reflect changes triggered by CallKit, such as activating or
- /// deactivating the audio session. The reducer delegates the activation or
- /// deactivation to the underlying `RTCAudioSession`.
- ///
- /// - Parameters:
- /// - state: The current audio session state.
- /// - action: The audio store action to handle.
- /// - file: The file from which the action originated (used for logging).
- /// - function: The function from which the action originated (used for logging).
- /// - line: The line number from which the action originated (used for logging).
- /// - Returns: The updated audio session state after processing the action.
- func reduce(
- state: RTCAudioStore.State,
- action: RTCAudioStoreAction,
- file: StaticString,
- function: StaticString,
- line: UInt
- ) throws -> RTCAudioStore.State {
- guard
- case let .callKit(action) = action
- else {
- return state
- }
-
- var updatedState = state
-
- switch action {
- case let .activate(audioSession):
- source.audioSessionDidActivate(audioSession)
- updatedState.isActive = source.isActive
-
- case let .deactivate(audioSession):
- source.audioSessionDidDeactivate(audioSession)
- updatedState.isActive = source.isActive
- }
-
- return updatedState
- }
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioSessionReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioSessionReducer.swift
deleted file mode 100644
index fdc70458f..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioSessionReducer.swift
+++ /dev/null
@@ -1,146 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import Foundation
-import StreamWebRTC
-
-/// A reducer responsible for managing changes to the audio session state within the WebRTC context.
-/// This class listens for audio-related actions and applies corresponding updates to the shared
-/// `RTCAudioSession` instance, ensuring the audio session is configured and controlled consistently.
-/// It handles activation, interruption, audio enabling, category settings, output port overrides,
-/// and permissions, encapsulating the logic for applying these changes safely and atomically.
-final class RTCAudioSessionReducer: RTCAudioStoreReducer {
-
- private let source: AudioSessionProtocol
-
- /// Initializes the reducer with a given `RTCAudioSession` source.
- /// - Parameter source: The audio session instance to manage. Defaults to the shared singleton.
- init(store: RTCAudioStore) {
- source = store.session
- }
-
- // MARK: - RTCAudioStoreReducer
-
- /// Processes an audio-related action and returns the updated audio store state.
- ///
- /// This method interprets the provided action, performs necessary operations on the underlying
- /// `RTCAudioSession`, and returns a new state reflecting any changes. It safely handles session
- /// configuration updates and respects current state to avoid redundant operations.
- ///
- /// - Parameters:
- /// - state: The current audio store state.
- /// - action: The action to apply to the state.
- /// - file: The source file from which the action originated.
- /// - function: The function from which the action originated.
- /// - line: The line number from which the action originated.
- /// - Throws: Rethrows errors from audio session configuration operations.
- /// - Returns: The updated audio store state after applying the action.
- func reduce(
- state: RTCAudioStore.State,
- action: RTCAudioStoreAction,
- file: StaticString,
- function: StaticString,
- line: UInt
- ) throws -> RTCAudioStore.State {
- guard
- case let .audioSession(action) = action
- else {
- return state
- }
-
- var updatedState = state
-
- switch action {
- case let .isActive(value):
- guard updatedState.isActive != value else {
- break
- }
- try source.perform { try $0.setActive(value) }
- updatedState.isActive = value
-
- case let .isInterrupted(value):
- updatedState.isInterrupted = value
-
- case let .isAudioEnabled(value):
- source.isAudioEnabled = value
- updatedState.isAudioEnabled = value
-
- case let .useManualAudio(value):
- source.useManualAudio = value
- updatedState.useManualAudio = value
-
- case let .setCategory(category, mode, options):
- try source.perform {
- /// We update the `webRTC` default configuration because, the WebRTC audioStack
- /// can be restarted for various reasons. When the stack restarts it gets reconfigured
- /// with the `webRTC` configuration. If then the configuration is invalid compared
- /// to the state we expect we may find ourselves in a difficult to recover situation,
- /// as our callSetting may be failing to get applied.
- /// By updating the `webRTC` configuration we ensure that the audioStack will
- /// start from the last known state in every restart, making things simpler to recover.
- let webRTCConfiguration = RTCAudioSessionConfiguration.webRTC()
- webRTCConfiguration.category = category.rawValue
- webRTCConfiguration.mode = mode.rawValue
- webRTCConfiguration.categoryOptions = options
-
- try $0.setConfiguration(webRTCConfiguration)
- RTCAudioSessionConfiguration.setWebRTC(webRTCConfiguration)
- }
-
- updatedState.category = category
- updatedState.mode = mode
- updatedState.options = options
-
- case let .setOverrideOutputPort(port):
- try source.perform {
- try $0.overrideOutputAudioPort(port)
- }
-
- updatedState.overrideOutputAudioPort = port
-
- case let .setPrefersNoInterruptionsFromSystemAlerts(value):
- if #available(iOS 14.5, *) {
- try source.perform {
- try $0.setPrefersNoInterruptionsFromSystemAlerts(value)
- }
-
- updatedState.prefersNoInterruptionsFromSystemAlerts = value
- }
-
- case let .setHasRecordingPermission(value):
- updatedState.hasRecordingPermission = value
-
- case let .setAVAudioSessionActive(value):
- /// In the case where audioOutputOn has changed the order of actions matters
- /// When activating we need:
- /// 1. activate AVAudioSession
- /// 2. set isAudioEnabled = true
- /// 3. set RTCAudioSession.isActive = true
- ///
- /// When deactivating we need:
- /// 1. set RTCAudioSession.isActive = false
- /// 2. set isAudioEnabled = false
- /// 3. deactivate AVAudioSession
- ///
- /// - Weird behaviour:
- /// We ignore the errors in AVAudioSession as in the case of CallKit we may fail to
- /// deactivate the call but the following calls will ensure that there is no audio.
- try source.perform {
- if value {
- try? $0.avSession.setIsActive(value)
- $0.isAudioEnabled = value
- try $0.setActive(value)
- } else {
- try? $0.setActive(value)
- $0.isAudioEnabled = value
- try? $0.avSession.setIsActive(value)
- }
- }
- updatedState.isActive = value
- updatedState.isAudioEnabled = value
- }
-
- return updatedState
- }
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioStoreReducer.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioStoreReducer.swift
deleted file mode 100644
index 27773100f..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Reducers/RTCAudioStoreReducer.swift
+++ /dev/null
@@ -1,30 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import Foundation
-
-/// A protocol that defines how to handle state changes in the RTCAudioStore.
-///
-/// Implementers of this protocol provide logic to process actions and produce a new state.
-/// This is useful for managing audio-related state in a predictable and testable way.
-protocol RTCAudioStoreReducer: AnyObject {
-
- /// Processes an action and returns the updated state of the RTCAudioStore.
- ///
- /// - Parameters:
- /// - state: The current state before the action is applied.
- /// - action: The action to be handled which may modify the state.
- /// - file: The source file where the action was dispatched (for debugging).
- /// - function: The function name where the action was dispatched (for debugging).
- /// - line: The line number where the action was dispatched (for debugging).
- /// - Throws: An error if the state reduction fails.
- /// - Returns: The new state after applying the action.
- func reduce(
- state: RTCAudioStore.State,
- action: RTCAudioStoreAction,
- file: StaticString,
- function: StaticString,
- line: UInt
- ) throws -> RTCAudioStore.State
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/State/RTCAudioStore+State.swift b/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/State/RTCAudioStore+State.swift
deleted file mode 100644
index 340d27909..000000000
--- a/Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/State/RTCAudioStore+State.swift
+++ /dev/null
@@ -1,90 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import AVFoundation
-import Foundation
-import StreamWebRTC
-
-extension RTCAudioStore {
- /// A value type representing the current state of the RTCAudioStore.
- ///
- /// This struct encapsulates all relevant audio session properties, including
- /// activation, interruption, permissions, and AVAudioSession configuration.
- /// Properties are explicitly encoded for diagnostics, analytics, or
- /// persistence. Non-encodable AVFoundation types are encoded using their
- /// string or raw value representations to ensure compatibility.
- ///
- /// - Note: Properties such as `category`, `mode`, `options`, and
- /// `overrideOutputAudioPort` are encoded as their string or raw values.
- struct State: Equatable, Encodable {
-
- /// Indicates if the audio session is currently active.
- var isActive: Bool
- /// Indicates if the audio session is currently interrupted.
- var isInterrupted: Bool
- /// If true, prefers no interruptions from system alerts.
- var prefersNoInterruptionsFromSystemAlerts: Bool
- /// If true, audio is enabled.
- var isAudioEnabled: Bool
- /// If true, manual audio management is enabled.
- var useManualAudio: Bool
- /// The AVAudioSession category. Encoded as its string value.
- var category: AVAudioSession.Category
- /// The AVAudioSession mode. Encoded as its string value.
- var mode: AVAudioSession.Mode
- /// The AVAudioSession category options. Encoded as its raw value.
- var options: AVAudioSession.CategoryOptions
- /// The AVAudioSession port override. Encoded as its raw value.
- var overrideOutputAudioPort: AVAudioSession.PortOverride
- /// Indicates if the app has permission to record audio.
- var hasRecordingPermission: Bool
-
- /// The initial default state for the audio store.
- static let initial = State(
- isActive: false,
- isInterrupted: false,
- prefersNoInterruptionsFromSystemAlerts: true,
- isAudioEnabled: false,
- useManualAudio: false,
- category: .playAndRecord,
- mode: .voiceChat,
- options: .allowBluetooth,
- overrideOutputAudioPort: .none,
- hasRecordingPermission: false
- )
-
- /// Encodes this state into the given encoder.
- ///
- /// AVFoundation types are encoded as their string or raw value
- /// representations for compatibility.
- /// - Parameter encoder: The encoder to write data to.
- func encode(to encoder: Encoder) throws {
- var container = encoder.container(keyedBy: CodingKeys.self)
- try container.encode(isActive, forKey: .isActive)
- try container.encode(isInterrupted, forKey: .isInterrupted)
- try container.encode(prefersNoInterruptionsFromSystemAlerts, forKey: .prefersNoInterruptionsFromSystemAlerts)
- try container.encode(isAudioEnabled, forKey: .isAudioEnabled)
- try container.encode(useManualAudio, forKey: .useManualAudio)
- try container.encode(category.rawValue, forKey: .category)
- try container.encode(mode.rawValue, forKey: .mode)
- try container.encode(options.rawValue, forKey: .options)
- try container.encode(overrideOutputAudioPort.rawValue, forKey: .overrideOutputAudioPort)
- try container.encode(hasRecordingPermission, forKey: .hasRecordingPermission)
- }
-
- /// Coding keys for encoding and decoding the state.
- private enum CodingKeys: String, CodingKey {
- case isActive
- case isInterrupted
- case prefersNoInterruptionsFromSystemAlerts
- case isAudioEnabled
- case useManualAudio
- case category
- case mode
- case options
- case overrideOutputAudioPort
- case hasRecordingPermission
- }
- }
-}
diff --git a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift
index d51906136..ed62582b0 100644
--- a/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift
+++ b/Sources/StreamVideo/Utils/AudioSession/StreamAudioSessionAdapterDelegate.swift
@@ -12,6 +12,9 @@ protocol StreamAudioSessionAdapterDelegate: AnyObject {
/// - audioSession: The `AudioSession` instance that made the update.
/// - callSettings: The updated `CallSettings`.
func audioSessionAdapterDidUpdateSpeakerOn(
- _ speakerOn: Bool
+ _ speakerOn: Bool,
+ file: StaticString,
+ function: StaticString,
+ line: UInt
)
}
diff --git a/Sources/StreamVideo/Utils/CustomStringInterpolation/String.StringInterpolation+Nil.swift b/Sources/StreamVideo/Utils/CustomStringInterpolation/String.StringInterpolation+Nil.swift
index 0f0536309..3f64e9612 100644
--- a/Sources/StreamVideo/Utils/CustomStringInterpolation/String.StringInterpolation+Nil.swift
+++ b/Sources/StreamVideo/Utils/CustomStringInterpolation/String.StringInterpolation+Nil.swift
@@ -5,7 +5,19 @@
import Foundation
extension String.StringInterpolation {
+ /// Appends a textual representation of an optional, replacing `nil` with
+ /// the literal string `"nil"`.
mutating func appendInterpolation(_ value: T?) {
appendInterpolation(value ?? "nil" as CustomStringConvertible)
}
+
+ /// Appends object references using `CustomStringConvertible` when
+ /// available, otherwise falls back to the memory address.
+ mutating func appendInterpolation(_ value: T) {
+ if let convertible = value as? CustomStringConvertible {
+ appendInterpolation(convertible)
+ } else {
+ appendInterpolation("\(Unmanaged.passUnretained(value).toOpaque())")
+ }
+ }
}
diff --git a/Sources/StreamVideo/Utils/Logger/Logger+ThrowingExecution.swift b/Sources/StreamVideo/Utils/Logger/Logger+ThrowingExecution.swift
new file mode 100644
index 000000000..54e4377c6
--- /dev/null
+++ b/Sources/StreamVideo/Utils/Logger/Logger+ThrowingExecution.swift
@@ -0,0 +1,32 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Foundation
+
+extension Logger {
+
+ /// Executes a throwing operation and routes any failures to the logging
+ /// backend using the supplied metadata.
+ func throwing(
+ _ message: @autoclosure () -> String = "",
+ subsystems: LogSubsystem,
+ file: StaticString = #file,
+ function: StaticString = #function,
+ line: UInt = #line,
+ _ operation: () throws -> Void
+ ) {
+ do {
+ try operation()
+ } catch {
+ self.error(
+ message(),
+ subsystems: subsystems,
+ error: error,
+ functionName: function,
+ fileName: file,
+ lineNumber: line
+ )
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/Logger/Logger+WebRTC.swift b/Sources/StreamVideo/Utils/Logger/Logger+WebRTC.swift
new file mode 100644
index 000000000..4ff3291c1
--- /dev/null
+++ b/Sources/StreamVideo/Utils/Logger/Logger+WebRTC.swift
@@ -0,0 +1,121 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Foundation
+import StreamWebRTC
+
+extension Logger {
+
+ public enum WebRTC {
+ public enum LogMode { case none, validFilesOnly, all }
+
+ public nonisolated(unsafe) static var mode: LogMode = .all {
+ didSet { RTCLogger.default.didUpdate(mode: mode) }
+ }
+
+ nonisolated(unsafe) static var severity: RTCLoggingSeverity = .init(LogConfig.level) {
+ didSet { RTCLogger.default.didUpdate(severity: severity) }
+ }
+
+ enum ValidFile: String {
+ case audioEngineDevice = "audio_engine_device.mm"
+ }
+
+ nonisolated(unsafe) static var validFiles: [ValidFile] = [
+ .audioEngineDevice
+ ]
+ }
+}
+
+extension RTCLoggingSeverity {
+
+ init(_ logLevel: LogLevel) {
+ switch logLevel {
+ case .debug:
+ self = .verbose
+ case .info:
+ self = .info
+ case .warning:
+ self = .warning
+ case .error:
+ self = .error
+ }
+ }
+}
+
+extension Logger.WebRTC {
+ final class RTCLogger: @unchecked Sendable {
+ static let `default` = RTCLogger()
+
+ private let logger = RTCCallbackLogger()
+ private var isRunning = false
+ private let processingQueue = OperationQueue(maxConcurrentOperationCount: 1)
+
+ private init() {
+ didUpdate(mode: mode)
+ }
+
+ func didUpdate(severity: RTCLoggingSeverity) {
+ processingQueue.addOperation { [weak self] in
+ self?.logger.severity = severity
+ }
+ }
+
+ func didUpdate(mode: LogMode) {
+ processingQueue.addOperation { [weak self] in
+ guard let self else {
+ return
+ }
+
+ guard mode != .none else {
+ return
+ }
+
+ guard !self.isRunning else {
+ return
+ }
+
+ logger.start { [weak self] in self?.process($0) }
+
+ self.isRunning = true
+ }
+ }
+
+ private func process(_ message: String) {
+ let trimmedMessage = message.trimmingCharacters(
+ in: .whitespacesAndNewlines
+ )
+
+ switch severity {
+ case .none, .verbose:
+ if isMessageFromValidFile(trimmedMessage) {
+ log.debug(trimmedMessage, subsystems: .webRTCInternal)
+ }
+ case .info:
+ if isMessageFromValidFile(trimmedMessage) {
+ log.info(trimmedMessage, subsystems: .webRTCInternal)
+ }
+ case .warning:
+ log.warning(trimmedMessage, subsystems: .webRTCInternal)
+ case .error:
+ log.error(trimmedMessage, subsystems: .webRTCInternal)
+ @unknown default:
+ log.debug(trimmedMessage, subsystems: .webRTCInternal)
+ }
+ }
+
+ private func isMessageFromValidFile(_ message: String) -> Bool {
+ guard mode == .validFilesOnly, !validFiles.isEmpty else {
+ return true
+ }
+
+ for validFile in validFiles {
+ if message.contains(validFile.rawValue) {
+ return true
+ }
+ }
+ return false
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/Logger/Logger.swift b/Sources/StreamVideo/Utils/Logger/Logger.swift
index 72b4ddfd3..60832558b 100644
--- a/Sources/StreamVideo/Utils/Logger/Logger.swift
+++ b/Sources/StreamVideo/Utils/Logger/Logger.swift
@@ -151,6 +151,7 @@ public enum LogConfig {
public nonisolated(unsafe) static var level: LogLevel = .error {
didSet {
invalidateLogger()
+ Logger.WebRTC.severity = .init(level)
}
}
@@ -298,8 +299,8 @@ public enum LogConfig {
}
public static var webRTCLogsEnabled: Bool {
- get { WebRTCLogger.default.enabled }
- set { WebRTCLogger.default.enabled = newValue }
+ get { Logger.WebRTC.mode != .none }
+ set { Logger.WebRTC.mode = newValue ? .all : .none }
}
/// Invalidates the current logger instance so it can be recreated.
diff --git a/Sources/StreamVideo/Utils/Logger/WebRTCLogger.swift b/Sources/StreamVideo/Utils/Logger/WebRTCLogger.swift
deleted file mode 100644
index 3d248740f..000000000
--- a/Sources/StreamVideo/Utils/Logger/WebRTCLogger.swift
+++ /dev/null
@@ -1,50 +0,0 @@
-//
-// Copyright © 2025 Stream.io Inc. All rights reserved.
-//
-
-import Foundation
-import OSLog
-import StreamWebRTC
-
-final class WebRTCLogger: @unchecked Sendable {
-
- static let `default` = WebRTCLogger()
-
- var enabled: Bool = false {
- didSet { didUpdate(enabled) }
- }
-
- var severity: RTCLoggingSeverity = .error {
- didSet { webRTCLogger.severity = severity }
- }
-
- private let webRTCLogger: RTCCallbackLogger = .init()
-
- private init() {
- webRTCLogger.severity = .verbose
- }
-
- private func didUpdate(_ enabled: Bool) {
- guard enabled else {
- webRTCLogger.stop()
- return
- }
- webRTCLogger.start { message, severity in
- let trimmedMessage = message.trimmingCharacters(
- in: .whitespacesAndNewlines
- )
- switch severity {
- case .none, .verbose:
- log.debug(trimmedMessage, subsystems: .webRTCInternal)
- case .info:
- log.info(trimmedMessage, subsystems: .webRTCInternal)
- case .warning:
- log.warning(trimmedMessage, subsystems: .webRTCInternal)
- case .error:
- log.error(trimmedMessage, subsystems: .webRTCInternal)
- @unknown default:
- log.debug(trimmedMessage, subsystems: .webRTCInternal)
- }
- }
- }
-}
diff --git a/Sources/StreamVideo/Utils/PermissionsStore/PermissionsStore.swift b/Sources/StreamVideo/Utils/PermissionsStore/PermissionsStore.swift
index 12e3f66ad..4ff6ec4d4 100644
--- a/Sources/StreamVideo/Utils/PermissionsStore/PermissionsStore.swift
+++ b/Sources/StreamVideo/Utils/PermissionsStore/PermissionsStore.swift
@@ -67,7 +67,9 @@ public final class PermissionStore: ObservableObject, @unchecked Sendable {
$hasMicrophonePermission
.removeDuplicates()
- .sink { [weak self] in self?.audioStore.dispatch(.audioSession(.setHasRecordingPermission($0))) }
+ .sink { [weak self] in
+ self?.audioStore.dispatch(.setHasRecordingPermission($0))
+ }
.store(in: disposableBag)
}
diff --git a/Sources/StreamVideo/Utils/Proximity/Policies/SpeakerProximityPolicy.swift b/Sources/StreamVideo/Utils/Proximity/Policies/SpeakerProximityPolicy.swift
index b846201ab..69b8af661 100644
--- a/Sources/StreamVideo/Utils/Proximity/Policies/SpeakerProximityPolicy.swift
+++ b/Sources/StreamVideo/Utils/Proximity/Policies/SpeakerProximityPolicy.swift
@@ -36,7 +36,7 @@ public final class SpeakerProximityPolicy: ProximityPolicy, @unchecked Sendable
guard
let self,
let call,
- audioStore.session.currentRoute.isExternal == false
+ audioStore.state.currentRoute.isExternal == false
else {
return
}
diff --git a/Sources/StreamVideo/Utils/RetriableTask.swift b/Sources/StreamVideo/Utils/RetriableTask.swift
new file mode 100644
index 000000000..fe593dfce
--- /dev/null
+++ b/Sources/StreamVideo/Utils/RetriableTask.swift
@@ -0,0 +1,52 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Foundation
+
+/// A helper that retries synchronous operations a fixed number of times.
+enum RetriableTask {
+ /// Runs the provided throwing operation up to the requested number of iterations.
+ /// The call stops as soon as the operation succeeds, or rethrows the last error
+ /// if all attempts fail.
+ /// - Parameters:
+ /// - iterations: Maximum number of times the operation should be executed.
+ /// - operation: The work item to execute repeatedly until it succeeds.
+ /// - Throws: The final error thrown by `operation` if it never succeeds.
+ static func run(
+ iterations: Int,
+ operation: () throws -> Void
+ ) throws {
+ try execute(
+ currentIteration: 0,
+ iterations: iterations,
+ operation: operation
+ )
+ }
+
+ /// Recursively executes the operation, incrementing the iteration until
+ /// the maximum is reached or the call succeeds.
+ private static func execute(
+ currentIteration: Int,
+ iterations: Int,
+ operation: () throws -> Void
+ ) throws {
+ do {
+ return try operation()
+ } catch {
+ if currentIteration < iterations - 1 {
+ do {
+ return try execute(
+ currentIteration: currentIteration + 1,
+ iterations: iterations,
+ operation: operation
+ )
+ } catch {
+ throw error
+ }
+ } else {
+ throw error
+ }
+ }
+ }
+}
diff --git a/Sources/StreamVideo/Utils/Store/Reducer.swift b/Sources/StreamVideo/Utils/Store/Reducer.swift
index a1ce504ea..8fa662e0b 100644
--- a/Sources/StreamVideo/Utils/Store/Reducer.swift
+++ b/Sources/StreamVideo/Utils/Store/Reducer.swift
@@ -51,6 +51,16 @@ import Foundation
/// state. They are executed in sequence, with each reducer receiving the
/// state produced by the previous one.
class Reducer: @unchecked Sendable {
+ /// Closure for dispatching new actions to the store.
+ ///
+ /// Use this to trigger additional actions in response to the current
+ /// action. The dispatcher is automatically set when the middleware is
+ /// added to a store.
+ ///
+ /// - Warning: Avoid creating infinite loops by dispatching actions
+ /// that trigger the same middleware repeatedly.
+ var dispatcher: Store.Dispatcher?
+
/// Processes an action to produce a new state.
///
/// Override this method to implement state transformation logic. The
diff --git a/Sources/StreamVideo/Utils/Store/Store.swift b/Sources/StreamVideo/Utils/Store/Store.swift
index 223b24e49..db8f0878d 100644
--- a/Sources/StreamVideo/Utils/Store/Store.swift
+++ b/Sources/StreamVideo/Utils/Store/Store.swift
@@ -51,6 +51,8 @@ final class Store: @unchecked Sendable {
/// For observing changes, use ``publisher(_:)`` instead.
var state: Namespace.State { stateSubject.value }
+ let statePublisher: AnyPublisher
+
/// Unique identifier for this store instance.
private let identifier: String
@@ -59,7 +61,10 @@ final class Store: @unchecked Sendable {
/// Executor that processes actions through the pipeline.
private let executor: StoreExecutor
-
+
+ /// Coordinator that can skip redundant actions before execution.
+ private let coordinator: StoreCoordinator
+
/// Publisher that holds and emits the current state.
private let stateSubject: CurrentValueSubject
@@ -72,6 +77,8 @@ final class Store: @unchecked Sendable {
/// Array of middleware that handle side effects.
private var middleware: [Middleware]
+ private var effects: Set>
+
/// Initializes a new store with the specified configuration.
///
/// - Parameters:
@@ -81,22 +88,31 @@ final class Store: @unchecked Sendable {
/// - middleware: Array of middleware for side effects.
/// - logger: Logger for recording store operations.
/// - executor: Executor for processing the action pipeline.
+ /// - coordinator: Coordinator that validates actions before execution.
init(
identifier: String,
initialState: Namespace.State,
reducers: [Reducer],
middleware: [Middleware],
+ effects: Set>,
logger: StoreLogger,
- executor: StoreExecutor
+ executor: StoreExecutor,
+ coordinator: StoreCoordinator
) {
self.identifier = identifier
- stateSubject = .init(initialState)
- self.reducers = reducers
+ let stateSubject = CurrentValueSubject(initialState)
+ self.stateSubject = stateSubject
+ self.statePublisher = stateSubject.eraseToAnyPublisher()
+ self.reducers = []
self.middleware = []
+ self.effects = []
self.logger = logger
self.executor = executor
+ self.coordinator = coordinator
+ reducers.forEach { add($0) }
middleware.forEach { add($0) }
+ effects.forEach { add($0) }
}
// MARK: - Middleware Management
@@ -158,6 +174,7 @@ final class Store: @unchecked Sendable {
return
}
reducers.append(value)
+ value.dispatcher = .init(self)
}
}
@@ -172,6 +189,45 @@ final class Store: @unchecked Sendable {
return
}
reducers = reducers.filter { $0 !== value }
+ value.dispatcher = nil
+ }
+ }
+
+ // MARK: - Effects Management
+
+ /// Adds an effect to respond to state changes.
+ ///
+ /// Effects are executed every time the store's state gets updated.
+ ///
+ /// - Parameter value: The effect to add.
+ func add>(_ value: T) {
+ processingQueue.addOperation { [weak self] in
+ guard
+ let self
+ else {
+ return
+ }
+ effects.insert(value)
+ value.dispatcher = .init(self)
+ value.set(statePublisher: statePublisher)
+ value.stateProvider = { [weak self] in self?.state }
+ }
+ }
+
+ /// Removes a previously added reducer.
+ ///
+ /// - Parameter value: The reducer to remove.
+ func remove>(_ value: T) {
+ processingQueue.addOperation { [weak self] in
+ guard
+ let self
+ else {
+ return
+ }
+ effects.remove(value)
+ value.dispatcher = nil
+ value.set(statePublisher: nil)
+ value.stateProvider = nil
}
}
@@ -241,17 +297,17 @@ final class Store: @unchecked Sendable {
/// logger.error("Action failed: \(error)")
/// }
/// ```
-
+ ///
+ /// - Returns: A ``StoreTask`` that can be awaited or ignored for
+ /// fire-and-forget semantics.
@discardableResult
- /// - Returns: A ``StoreTask`` that can be awaited for completion
- /// or ignored for fire-and-forget semantics.
func dispatch(
_ actions: [StoreActionBox],
file: StaticString = #file,
function: StaticString = #function,
line: UInt = #line
) -> StoreTask {
- let task = StoreTask(executor: executor)
+ let task = StoreTask(executor: executor, coordinator: coordinator)
processingQueue.addTaskOperation { [weak self] in
guard let self else {
return
@@ -272,9 +328,13 @@ final class Store: @unchecked Sendable {
return task
}
+ /// Dispatches a single boxed action asynchronously.
+ ///
+ /// Wraps the action in an array and forwards to
+ /// ``dispatch(_:file:function:line:)``.
+ ///
+ /// - Returns: A ``StoreTask`` that can be awaited or ignored.
@discardableResult
- /// - Returns: A ``StoreTask`` that can be awaited for completion
- /// or ignored for fire-and-forget semantics.
func dispatch(
_ action: StoreActionBox,
file: StaticString = #file,
@@ -289,9 +349,13 @@ final class Store: @unchecked Sendable {
)
}
+ /// Dispatches multiple unboxed actions asynchronously.
+ ///
+ /// Actions are boxed automatically before being forwarded to
+ /// ``dispatch(_:file:function:line:)``.
+ ///
+ /// - Returns: A ``StoreTask`` that can be awaited or ignored.
@discardableResult
- /// - Returns: A ``StoreTask`` that can be awaited for completion
- /// or ignored for fire-and-forget semantics.
func dispatch(
_ actions: [Namespace.Action],
file: StaticString = #file,
@@ -306,9 +370,13 @@ final class Store: @unchecked Sendable {
)
}
+ /// Dispatches a single unboxed action asynchronously.
+ ///
+ /// The action is boxed automatically and forwarded to
+ /// ``dispatch(_:file:function:line:)``.
+ ///
+ /// - Returns: A ``StoreTask`` that can be awaited or ignored.
@discardableResult
- /// - Returns: A ``StoreTask`` that can be awaited for completion
- /// or ignored for fire-and-forget semantics.
func dispatch(
_ action: Namespace.Action,
file: StaticString = #file,
diff --git a/Sources/StreamVideo/Utils/Store/StoreCoordinator.swift b/Sources/StreamVideo/Utils/Store/StoreCoordinator.swift
new file mode 100644
index 000000000..29f2e5198
--- /dev/null
+++ b/Sources/StreamVideo/Utils/Store/StoreCoordinator.swift
@@ -0,0 +1,33 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Foundation
+
+/// Coordinates store actions to prevent redundant state transitions.
+///
+/// The coordinator evaluates an action against the current state before the
+/// store processes it.
+/// Implementations can override ``shouldExecute(action:state:)``
+/// to skip actions that would not yield a different state,
+/// reducing unnecessary work along the pipeline.
+class StoreCoordinator: @unchecked Sendable {
+
+ /// Determines whether an action should run for the provided state snapshot.
+ ///
+ /// This default implementation always executes the action.
+ /// Subclasses can override the method to run diffing logic or other
+ /// heuristics that detect state changes and return `false` when the action
+ /// can be safely skipped.
+ ///
+ /// - Parameters:
+ /// - action: The action that is about to be dispatched.
+ /// - state: The current state before the action runs.
+ /// - Returns: `true` to process the action; `false` to skip it.
+ func shouldExecute(
+ action: Namespace.Action,
+ state: Namespace.State
+ ) -> Bool {
+ true
+ }
+}
diff --git a/Sources/StreamVideo/Utils/Store/StoreEffect.swift b/Sources/StreamVideo/Utils/Store/StoreEffect.swift
new file mode 100644
index 000000000..636db57af
--- /dev/null
+++ b/Sources/StreamVideo/Utils/Store/StoreEffect.swift
@@ -0,0 +1,51 @@
+//
+// Copyright © 2025 Stream.io Inc. All rights reserved.
+//
+
+import Combine
+import Foundation
+
+/// Base type for async side-effects that observe the store and can dispatch
+/// follow-up actions without touching reducers directly.
+class StoreEffect: @unchecked Sendable, Hashable {
+ private lazy var identifier = "store.\(type(of: self))"
+
+ /// Closure for dispatching new actions to the store.
+ ///
+ /// Use this to trigger additional actions in response to the current
+ /// action. The dispatcher is automatically set when the middleware is
+ /// added to a store.
+ ///
+ /// - Warning: Avoid creating infinite loops by dispatching actions
+ /// that trigger the same middleware repeatedly.
+ var dispatcher: Store.Dispatcher?
+
+ /// Closure for accessing the current store state.
+ ///
+ /// This provider is automatically set when the middleware is added to
+ /// a store. It returns the current state at the time of access.
+ var stateProvider: (() -> Namespace.State?)?
+
+ /// The current store state, if available.
+ ///
+ /// Returns `nil` if the middleware hasn't been added to a store yet.
+ /// Use this property to make decisions based on the current state.
+ var state: Namespace.State? { stateProvider?() }
+
+ /// Supplies the state publisher once the effect is attached to a store,
+ /// giving subclasses a hook to start or stop their observations.
+ func set(statePublisher: AnyPublisher?) {
+ // No-op
+ }
+
+ func hash(into hasher: inout Hasher) {
+ hasher.combine(identifier)
+ }
+
+ static func == (
+ lhs: StoreEffect,
+ rhs: StoreEffect
+ ) -> Bool {
+ lhs.identifier == rhs.identifier && lhs === rhs
+ }
+}
diff --git a/Sources/StreamVideo/Utils/Store/StoreLogger.swift b/Sources/StreamVideo/Utils/Store/StoreLogger.swift
index 35b6b1b15..87b31feaf 100644
--- a/Sources/StreamVideo/Utils/Store/StoreLogger.swift
+++ b/Sources/StreamVideo/Utils/Store/StoreLogger.swift
@@ -46,17 +46,30 @@ class StoreLogger {
/// aggregation tools.
let logSubsystem: LogSubsystem
+ /// Aggregated metrics recorded for dispatched actions.
+ ///
+ /// Statistics are enabled in DEBUG builds to help monitor action
+ /// throughput.
let statistics: StoreStatistics = .init()
+ let logSkipped: Bool
+
/// Initializes a new store logger.
///
/// - Parameter logSubsystem: The subsystem for categorizing logs.
/// Defaults to `.other`.
- init(logSubsystem: LogSubsystem = .other) {
+ init(
+ logSubsystem: LogSubsystem = .other,
+ logSkipped: Bool = true
+ ) {
self.logSubsystem = logSubsystem
+ self.logSkipped = logSkipped
#if DEBUG
- statistics.enable(interval: 60) { [weak self] in self?.report($0, interval: $1) }
+ statistics.enable(interval: 60) {
+ [weak self] numberOfActions, interval in
+ self?.report(numberOfActions, interval: interval)
+ }
#endif
}
@@ -82,7 +95,41 @@ class StoreLogger {
) {
defer { statistics.record(action) }
log.debug(
- "Store identifier:\(identifier) completed action:\(action) state:\(state).",
+ "Store identifier:\(identifier) completed action:\(action) "
+ + "state:\(state).",
+ subsystems: logSubsystem,
+ functionName: function,
+ fileName: file,
+ lineNumber: line
+ )
+ }
+
+ /// Called when an action is skipped by the coordinator.
+ ///
+ /// Override to customize logging or metrics for redundant actions
+ /// that do not require processing.
+ ///
+ /// - Parameters:
+ /// - identifier: The store's unique identifier.
+ /// - action: The action that was skipped.
+ /// - state: The snapshot used when making the decision.
+ /// - file: Source file where the action was dispatched.
+ /// - function: Function where the action was dispatched.
+ /// - line: Line number where the action was dispatched.
+ func didSkip(
+ identifier: String,
+ action: Namespace.Action,
+ state: Namespace.State,
+ file: StaticString,
+ function: StaticString,
+ line: UInt
+ ) {
+ defer { statistics.record(action) }
+
+ guard logSkipped else { return }
+
+ log.debug(
+ "Store identifier:\(identifier) skipped action:\(action).",
subsystems: logSubsystem,
functionName: function,
fileName: file,
@@ -121,12 +168,21 @@ class StoreLogger {
)
}
+ /// Reports aggregated statistics for the store.
+ ///
+ /// This hook is invoked on a timer when statistics tracking is
+ /// enabled. Override to forward metrics or customize formatting.
+ ///
+ /// - Parameters:
+ /// - numberOfActions: Count of actions recorded in the interval.
+ /// - interval: The time window for the reported statistics.
func report(
_ numberOfActions: Int,
interval: TimeInterval
) {
log.debug(
- "Store identifier:\(Namespace.identifier) performs \(numberOfActions) per \(interval) seconds.",
+ "Store identifier:\(Namespace.identifier) performs "
+ + "\(numberOfActions) per \(interval) seconds.",
subsystems: logSubsystem
)
}
diff --git a/Sources/StreamVideo/Utils/Store/StoreNamespace.swift b/Sources/StreamVideo/Utils/Store/StoreNamespace.swift
index b959b80bf..c9813eb9b 100644
--- a/Sources/StreamVideo/Utils/Store/StoreNamespace.swift
+++ b/Sources/StreamVideo/Utils/Store/StoreNamespace.swift
@@ -74,6 +74,8 @@ protocol StoreNamespace: Sendable {
/// - Returns: Array of middleware for this store.
static func middleware() -> [Middleware]
+ static func effects() -> Set>
+
/// Creates the logger for this store.
///
/// Override to provide custom logging behavior.
@@ -89,20 +91,34 @@ protocol StoreNamespace: Sendable {
/// - Returns: An executor instance for this store.
static func executor() -> StoreExecutor
+ /// Creates the coordinator for evaluating actions before execution.
+ ///
+ /// Override to provide custom logic that skips redundant actions.
+ ///
+ /// - Returns: A coordinator instance for this store.
+ static func coordinator() -> StoreCoordinator
+
/// Creates a configured store instance.
///
/// This method assembles all components into a functioning store.
/// The default implementation should work for most cases.
///
- /// - Parameter initialState: The initial state for the store.
- ///
+ /// - Parameters:
+ /// - initialState: The initial state for the store.
+ /// - reducers: Reducers used to transform state.
+ /// - middleware: Middleware that handle side effects.
+ /// - logger: Logger responsible for diagnostics.
+ /// - executor: Executor that runs the action pipeline.
+ /// - coordinator: Coordinator that can skip redundant actions.
/// - Returns: A fully configured store instance.
static func store(
initialState: State,
reducers: [Reducer],
middleware: [Middleware],
+ effects: Set>,
logger: StoreLogger,
- executor: StoreExecutor
+ executor: StoreExecutor,
+ coordinator: StoreCoordinator
) -> Store
}
@@ -116,12 +132,17 @@ extension StoreNamespace {
/// Default implementation returns empty array.
static func middleware() -> [Middleware] { [] }
+ static func effects() -> Set> { [] }
+
/// Default implementation returns basic logger.
static func logger() -> StoreLogger { .init() }
/// Default implementation returns basic executor.
static func executor() -> StoreExecutor { .init() }
+ /// Default implementation returns a coordinator with no skip logic.
+ static func coordinator() -> StoreCoordinator { .init() }
+
/// Default implementation creates a store with all components.
///
/// This implementation:
@@ -131,20 +152,25 @@ extension StoreNamespace {
/// 4. Adds middleware from `middleware()`
/// 5. Uses logger from `logger()`
/// 6. Uses executor from `executor()`
+ /// 7. Uses coordinator from `coordinator()`
static func store(
initialState: State,
reducers: [Reducer] = Self.reducers(),
middleware: [Middleware] = Self.middleware(),
+ effects: Set> = Self.effects(),
logger: StoreLogger = Self.logger(),
- executor: StoreExecutor = Self.executor()
+ executor: StoreExecutor = Self.executor(),
+ coordinator: StoreCoordinator = Self.coordinator()
) -> Store {
.init(
identifier: Self.identifier,
initialState: initialState,
reducers: reducers,
middleware: middleware,
+ effects: effects,
logger: logger,
- executor: executor
+ executor: executor,
+ coordinator: coordinator
)
}
}
diff --git a/Sources/StreamVideo/Utils/Store/StoreStatistics.swift b/Sources/StreamVideo/Utils/Store/StoreStatistics.swift
index 98e5d5940..76e83a511 100644
--- a/Sources/StreamVideo/Utils/Store/StoreStatistics.swift
+++ b/Sources/StreamVideo/Utils/Store/StoreStatistics.swift
@@ -10,7 +10,7 @@ final class StoreStatistics {
typealias Reporter = (Int, TimeInterval) -> Void
private let processingQueue = UnfairQueue()
- private var actions: [Namespace.Action] = []
+ private var actions: [String] = []
private var cancellable: AnyCancellable?
private var interval: TimeInterval = 0
@@ -31,7 +31,7 @@ final class StoreStatistics {
}
func record(_ action: Namespace.Action) {
- processingQueue.sync { actions.append(action) }
+ processingQueue.sync { actions.append("\(action)") }
}
private func flush() {
diff --git a/Sources/StreamVideo/Utils/Store/StoreTask.swift b/Sources/StreamVideo/Utils/Store/StoreTask.swift
index 5ae03cade..658274bef 100644
--- a/Sources/StreamVideo/Utils/Store/StoreTask.swift
+++ b/Sources/StreamVideo/Utils/Store/StoreTask.swift
@@ -5,10 +5,10 @@
import Combine
import Foundation
-/// A lightweight handle for a single dispatched store action.
+/// A lightweight handle for dispatched store actions.
///
-/// `StoreTask` coordinates the execution of one action via
-/// ``StoreExecutor`` and exposes a way to await the result. Callers can
+/// `StoreTask` coordinates the execution of one or more actions via
+/// ``StoreExecutor`` and ``StoreCoordinator``. Callers can
/// dispatch-and-forget using `run(...)` and optionally await completion
/// or failure later with ``result()``.
///
@@ -22,27 +22,30 @@ final class StoreTask: Sendable {
private enum State { case idle, running, completed, failed(Error) }
private let executor: StoreExecutor
+ private let coordinator: StoreCoordinator
private let resultSubject: CurrentValueSubject = .init(.idle)
init(
- executor: StoreExecutor
+ executor: StoreExecutor,
+ coordinator: StoreCoordinator
) {
self.executor = executor
+ self.coordinator = coordinator
}
// MARK: - Execution
- /// Executes the given action through the store pipeline.
+ /// Executes the given actions through the store pipeline.
///
/// The task transitions to `.running`, delegates to the
- /// ``StoreExecutor`` and records completion or failure. Errors are
- /// captured and can be retrieved by awaiting ``result()``.
+ /// ``StoreExecutor`` and ``StoreCoordinator``, and records completion
+ /// or failure. Errors are captured and can be retrieved by awaiting
+ /// ``result()``.
///
/// - Parameters:
/// - identifier: Store identifier for logging context.
/// - state: Current state snapshot before processing.
- /// - action: Action to execute.
- /// - delay: Optional before/after delays.
+ /// - actions: Actions to execute, each optionally delayed.
/// - reducers: Reducers to apply in order.
/// - middleware: Middleware for side effects.
/// - logger: Logger used for diagnostics.
@@ -64,11 +67,28 @@ final class StoreTask: Sendable {
) async {
resultSubject.send(.running)
do {
- var workingState = state
+ var updatedState = state
for action in actions {
- workingState = try await executor.run(
+ guard
+ coordinator.shouldExecute(
+ action: action.wrappedValue,
+ state: updatedState
+ )
+ else {
+ logger.didSkip(
+ identifier: identifier,
+ action: action.wrappedValue,
+ state: updatedState,
+ file: file,
+ function: function,
+ line: line
+ )
+ continue
+ }
+
+ updatedState = try await executor.run(
identifier: identifier,
- state: workingState,
+ state: updatedState,
action: action,
reducers: reducers,
middleware: middleware,
diff --git a/Sources/StreamVideo/Utils/Swift6Migration/Encodable+Retroactive.swift b/Sources/StreamVideo/Utils/Swift6Migration/Encodable+Retroactive.swift
index e2833ae4e..8017746f0 100644
--- a/Sources/StreamVideo/Utils/Swift6Migration/Encodable+Retroactive.swift
+++ b/Sources/StreamVideo/Utils/Swift6Migration/Encodable+Retroactive.swift
@@ -19,6 +19,7 @@ extension RTCSessionDescription: @retroactive Encodable {}
extension RTCConfiguration: @retroactive Encodable {}
extension RTCIceServer: @retroactive Encodable {}
extension RTCCryptoOptions: @retroactive Encodable {}
+extension AVAudioSession.RouteChangeReason: @retroactive Encodable {}
#else
extension RTCSignalingState: Encodable {}
extension RTCMediaStream: Encodable {}
@@ -33,6 +34,7 @@ extension RTCSessionDescription: Encodable {}
extension RTCConfiguration: Encodable {}
extension RTCIceServer: Encodable {}
extension RTCCryptoOptions: Encodable {}
+extension AVAudioSession.RouteChangeReason: Encodable {}
#endif
extension RTCSignalingState {
diff --git a/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift b/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift
index 8a251b938..47c8f735f 100644
--- a/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift
+++ b/Sources/StreamVideo/WebRTC/DefaultRTCMediaConstraints.swift
@@ -9,7 +9,17 @@ extension RTCMediaConstraints {
nonisolated(unsafe) static let defaultConstraints = RTCMediaConstraints(
mandatoryConstraints: nil,
- optionalConstraints: ["DtlsSrtpKeyAgreement": kRTCMediaConstraintsValueTrue]
+ optionalConstraints: [
+ "DtlsSrtpKeyAgreement": kRTCMediaConstraintsValueTrue,
+ /// Added support for Google's media constraints to improve transmitted audio
+ /// https://github.com/GetStream/react-native-webrtc/pull/20/commits/6476119456005dc35ba00e9bf4d4c4124c6066e8
+ "googAutoGainControl": kRTCMediaConstraintsValueTrue,
+ "googNoiseSuppression": kRTCMediaConstraintsValueTrue,
+ "googEchoCancellation": kRTCMediaConstraintsValueTrue,
+ "googHighpassFilter": kRTCMediaConstraintsValueTrue,
+ "googTypingNoiseDetection": kRTCMediaConstraintsValueTrue,
+ "googAudioMirroring": kRTCMediaConstraintsValueFalse
+ ]
)
nonisolated(unsafe) static let iceRestartConstraints = RTCMediaConstraints(
diff --git a/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift b/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift
index 39feab1f8..5305f545c 100644
--- a/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift
+++ b/Sources/StreamVideo/WebRTC/PeerConnectionFactory.swift
@@ -20,7 +20,7 @@ final class PeerConnectionFactory: @unchecked Sendable {
)
let decoderFactory = RTCDefaultVideoDecoderFactory()
return RTCPeerConnectionFactory(
- audioDeviceModuleType: .platformDefault,
+ audioDeviceModuleType: .audioEngine,
bypassVoiceProcessing: false,
encoderFactory: encoderFactory,
decoderFactory: decoderFactory,
@@ -44,34 +44,33 @@ final class PeerConnectionFactory: @unchecked Sendable {
defaultDecoder.supportedCodecs()
}
- var audioDeviceModule: RTCAudioDeviceModule { factory.audioDeviceModule }
+ private(set) lazy var audioDeviceModule: AudioDeviceModule = .init(factory.audioDeviceModule)
/// Creates or retrieves a PeerConnectionFactory instance for a given
/// audio processing module.
/// - Parameter audioProcessingModule: The RTCAudioProcessingModule to use.
/// - Returns: A PeerConnectionFactory instance.
static func build(
- audioProcessingModule: RTCAudioProcessingModule
+ audioProcessingModule: RTCAudioProcessingModule,
+ audioDeviceModuleSource: RTCAudioDeviceModuleControlling? = nil
) -> PeerConnectionFactory {
- if let factory = PeerConnectionFactoryStorage.shared.factory(
- for: audioProcessingModule
- ) {
- return factory
- } else {
- return .init(audioProcessingModule)
- }
+ return .init(audioProcessingModule, audioDeviceModuleSource: audioDeviceModuleSource)
}
/// Private initializer to ensure instances are created through the `build` method.
/// - Parameter audioProcessingModule: The RTCAudioProcessingModule to use.
- private init(_ audioProcessingModule: RTCAudioProcessingModule) {
+ private init(
+ _ audioProcessingModule: RTCAudioProcessingModule,
+ audioDeviceModuleSource: RTCAudioDeviceModuleControlling?
+ ) {
self.audioProcessingModule = audioProcessingModule
_ = factory
- PeerConnectionFactoryStorage.shared.store(self, for: audioProcessingModule)
- }
-
- deinit {
- PeerConnectionFactoryStorage.shared.remove(for: audioProcessingModule)
+
+ if let audioDeviceModuleSource {
+ audioDeviceModule = .init(audioDeviceModuleSource)
+ } else {
+ _ = audioDeviceModule
+ }
}
// MARK: - Builders
@@ -212,56 +211,3 @@ final class PeerConnectionFactory: @unchecked Sendable {
.baseline(for: videoCodec)
}
}
-
-/// A thread-safe storage class for managing PeerConnectionFactory instances.
-final class PeerConnectionFactoryStorage: @unchecked Sendable {
- /// Shared singleton instance of PeerConnectionFactoryStorage.
- static let shared = PeerConnectionFactoryStorage()
-
- /// Dictionary to store PeerConnectionFactory instances, keyed by module address.
- private var storage: [String: PeerConnectionFactory] = [:]
-
- /// Queue to ensure thread-safe access to the storage.
- private let queue = UnfairQueue()
-
- /// Stores a PeerConnectionFactory instance for a given RTCAudioProcessingModule.
- /// - Parameters:
- /// - factory: The PeerConnectionFactory to store.
- /// - module: The RTCAudioProcessingModule associated with the factory.
- func store(
- _ factory: PeerConnectionFactory,
- for module: RTCAudioProcessingModule
- ) {
- queue.sync {
- storage[key(for: module)] = factory
- }
- }
-
- /// Retrieves a PeerConnectionFactory instance for a given RTCAudioProcessingModule.
- /// - Parameter module: The RTCAudioProcessingModule to lookup.
- /// - Returns: The associated PeerConnectionFactory, if found.
- func factory(for module: RTCAudioProcessingModule) -> PeerConnectionFactory? {
- queue.sync {
- storage[key(for: module)]
- }
- }
-
- /// Removes a PeerConnectionFactory instance for a given RTCAudioProcessingModule.
- /// If the storage becomes empty after removal, it cleans up SSL.
- /// - Parameter module: The RTCAudioProcessingModule to remove.
- func remove(for module: RTCAudioProcessingModule) {
- queue.sync {
- storage[key(for: module)] = nil
- if storage.isEmpty {
- /// SSL cleanUp should only occur when no factory is active. During tests where
- /// factories are being created on demand this is causing failures. The storage ensures
- /// that only when there is no other factory the SSL will be cleaned up.
- RTCCleanupSSL()
- }
- }
- }
-
- private func key(for object: AnyObject) -> String {
- "\(Unmanaged.passUnretained(object).toOpaque())"
- }
-}
diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/CallParticipant+Convenience.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/CallParticipant+Convenience.swift
index ae2067e53..787b053fd 100644
--- a/Sources/StreamVideo/WebRTC/v2/Extensions/CallParticipant+Convenience.swift
+++ b/Sources/StreamVideo/WebRTC/v2/Extensions/CallParticipant+Convenience.swift
@@ -56,6 +56,16 @@ extension CallParticipant {
type: .screenShare
)
)
+
+ /// We subscribe to screenShareAudio anytime a user is screenSharing. In the future
+ /// that should be driven by events to know if the user is actually publishing audio.
+ result.append(
+ .init(
+ for: userId,
+ sessionId: sessionId,
+ type: .screenShareAudio
+ )
+ )
}
return result
diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift
index 40149b5e8..869cebe4a 100644
--- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift
+++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/RTCPeerConnectionCoordinator.swift
@@ -53,6 +53,8 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable {
private var callSettings: CallSettings
+ private let mungeSubscriberStereo = true
+
/// A publisher that we use to observe setUp status. Once the setUp has been completed we expect
/// a `true` value to be sent. After that, any subsequent observations will rely on the `currentValue`
/// to know that the setUp completed, without having to wait for it.
@@ -808,8 +810,21 @@ class RTCPeerConnectionCoordinator: @unchecked Sendable {
)
)
- let answer = try await createAnswer()
- try await setLocalDescription(answer)
+ var answer = try await createAnswer()
+ if mungeSubscriberStereo {
+ let munger = SDPParser()
+ let visitor = StereoEnableVisitor()
+ munger.registerVisitor(visitor)
+ await munger.parse(sdp: answer.sdp)
+ let munged = visitor.applyStereoUpdates(to: answer.sdp)
+ let mungedAnswer = RTCSessionDescription(type: answer.type, sdp: munged)
+ try await setLocalDescription(mungedAnswer)
+ log.debug("Munged Subscriber offer: \(mungedAnswer)", subsystems: subsystem)
+
+ answer = mungedAnswer
+ } else {
+ try await setLocalDescription(answer)
+ }
try await sfuAdapter.sendAnswer(
sessionDescription: answer.sdp,
diff --git a/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Parser/Visitors/StereoEnableVisitor.swift b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Parser/Visitors/StereoEnableVisitor.swift
index 5ac0261e2..8176fb985 100644
--- a/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Parser/Visitors/StereoEnableVisitor.swift
+++ b/Sources/StreamVideo/WebRTC/v2/SDP Parsing/Parser/Visitors/StereoEnableVisitor.swift
@@ -14,8 +14,18 @@ final class StereoEnableVisitor: SDPLineVisitor {
case foundOpus(mid: String, payload: String)
}
+ private enum Constants {
+ static let fmtpDelimiter: Character = ";"
+ static let keyValueSeparator: Character = "="
+ static let stereoParameters: [(key: String, value: String)] = [
+ ("stereo", "1"),
+ ("sprop-stereo", "1")
+ ]
+ }
+
private var state: State = .idle
private(set) var found: [String: MidStereoInformation] = [:]
+ private(set) var fmtpLineReplacements: [String: String] = [:]
/// Prefixes handled by this visitor: mid, rtpmap, and fmtp lines.
var supportedPrefixes: Set {
@@ -43,6 +53,7 @@ final class StereoEnableVisitor: SDPLineVisitor {
state = .foundOpus(mid: mid, payload: String(parts[0]))
case let (_, .foundOpus(mid, codecPayload)) where line.hasPrefix(SupportedPrefix.fmtp.rawValue):
+ let originalLine = line
let parts = line
.replacingOccurrences(of: SupportedPrefix.fmtp.rawValue, with: "")
.split(separator: " ", maxSplits: 1)
@@ -55,18 +66,23 @@ final class StereoEnableVisitor: SDPLineVisitor {
let payload = String(parts[0])
let config = String(parts[1])
- guard
- payload == codecPayload,
- config.contains("stereo=1")
- else {
+ guard payload == codecPayload else {
state = .idle
return
}
+ let (updatedConfig, didMutate) = ensureStereoConfiguration(in: config)
+ if didMutate {
+ let updatedLine = "\(SupportedPrefix.fmtp.rawValue)\(payload) \(updatedConfig)"
+ fmtpLineReplacements[originalLine] = updatedLine
+ } else {
+ fmtpLineReplacements.removeValue(forKey: originalLine)
+ }
+
found[mid] = .init(
mid: mid,
codecPayload: codecPayload,
- isStereoEnabled: true
+ isStereoEnabled: updatedConfig.contains("stereo=1")
)
state = .idle
@@ -74,4 +90,77 @@ final class StereoEnableVisitor: SDPLineVisitor {
break
}
}
+
+ /// Applies the computed stereo updates to the provided SDP, returning a new SDP string.
+ /// - Parameter sdp: The original SDP string.
+ /// - Returns: The SDP string with stereo parameters enforced where required.
+ func applyStereoUpdates(to sdp: String) -> String {
+ guard fmtpLineReplacements.isEmpty == false else { return sdp }
+
+ let delimiter = "\r\n"
+ var lines = sdp.components(separatedBy: delimiter)
+
+ for index in lines.indices {
+ let line = lines[index]
+ if let replacement = fmtpLineReplacements[line] {
+ lines[index] = replacement
+ }
+ }
+
+ return lines.joined(separator: delimiter)
+ }
+
+ /// Resets the internal state allowing the visitor to be reused.
+ func reset() {
+ state = .idle
+ found.removeAll()
+ fmtpLineReplacements.removeAll()
+ }
+
+ private func ensureStereoConfiguration(in config: String) -> (String, Bool) {
+ let components = config
+ .split(separator: Constants.fmtpDelimiter)
+ .map { $0.trimmingCharacters(in: .whitespaces) }
+ .filter { !$0.isEmpty }
+
+ var order: [String] = []
+ var values: [String: String] = [:]
+
+ for component in components {
+ let keyValue = component.split(separator: Constants.keyValueSeparator, maxSplits: 1)
+ let key = keyValue[0].trimmingCharacters(in: .whitespaces)
+ let value = keyValue.count > 1
+ ? keyValue[1].trimmingCharacters(in: .whitespaces)
+ : ""
+
+ if values[key] == nil {
+ order.append(key)
+ }
+ values[key] = value
+ }
+
+ var didMutate = false
+
+ for (key, value) in Constants.stereoParameters {
+ if let existing = values[key] {
+ if existing != value {
+ values[key] = value
+ didMutate = true
+ }
+ } else {
+ values[key] = value
+ order.append(key)
+ didMutate = true
+ }
+ }
+
+ let updatedConfig = order.map { key -> String in
+ guard let value = values[key], value.isEmpty == false else {
+ return key
+ }
+ return "\(key)=\(value)"
+ }.joined(separator: String(Constants.fmtpDelimiter))
+
+ return (updatedConfig, didMutate)
+ }
}
diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift
index 37114964e..6268e0f12 100644
--- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift
+++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Joining.swift
@@ -359,18 +359,26 @@ extension WebRTCCoordinator.StateMachine.Stage {
try Task.checkCancellation()
if !isFastReconnecting {
- /// Configures the audio session for the current call using the provided
- /// join source. This ensures the session setup reflects whether the
- /// join was triggered in-app or via CallKit and applies the correct
- /// audio routing and category.
- try await coordinator.stateAdapter.configureAudioSession(
- source: context.joinSource
- )
+ try await withThrowingTaskGroup(of: Void.self) { [context] group in
+ group.addTask { [context] in
+ /// Configures the audio session for the current call using the provided
+ /// join source. This ensures the session setup reflects whether the
+ /// join was triggered in-app or via CallKit and applies the correct
+ /// audio routing and category.
+ try await coordinator.stateAdapter.configureAudioSession(
+ source: context.joinSource
+ )
+ }
- /// Configures all peer connections after the audio session is ready.
- /// Ensures signaling, media, and routing are correctly established for
- /// all tracks as part of the join process.
- try await coordinator.stateAdapter.configurePeerConnections()
+ group.addTask {
+ /// Configures all peer connections after the audio session is ready.
+ /// Ensures signaling, media, and routing are correctly established for
+ /// all tracks as part of the join process.
+ try await coordinator.stateAdapter.configurePeerConnections()
+ }
+
+ try await group.waitForAll()
+ }
// Once our PeerConnection have been created we consume the
// eventBucket we created above in order to re-apply any event
diff --git a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift
index 309a8b1c3..179718eb7 100644
--- a/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift
+++ b/Sources/StreamVideo/WebRTC/v2/StateMachine/Stages/WebRTCCoordinator+Stage.swift
@@ -9,7 +9,7 @@ extension WebRTCCoordinator.StateMachine {
class Stage: StreamStateMachineStage, @unchecked Sendable {
/// Context holding the state and dependencies for the stage.
- struct Context {
+ struct Context: @unchecked Sendable {
weak var coordinator: WebRTCCoordinator?
var authenticator: WebRTCAuthenticating = WebRTCAuthenticator()
var sfuEventObserver: SFUEventAdapter?
diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift
index c4a481884..e9e8a5ded 100644
--- a/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift
+++ b/Sources/StreamVideo/WebRTC/v2/WebRTCAuthenticator.swift
@@ -38,6 +38,8 @@ protocol WebRTCAuthenticating {
/// Concrete implementation of WebRTCAuthenticating.
struct WebRTCAuthenticator: WebRTCAuthenticating {
+ @Injected(\.audioStore) private var audioStore
+
/// Authenticates the WebRTC connection.
/// - Parameters:
/// - coordinator: The WebRTC coordinator.
@@ -90,9 +92,7 @@ struct WebRTCAuthenticator: WebRTCAuthenticating {
let remoteCallSettings = CallSettings(response.call.settings)
let callSettings = {
var result = initialCallSettings ?? remoteCallSettings
- if
- coordinator.stateAdapter.audioSession.currentRoute.isExternal,
- result.speakerOn {
+ if audioStore.state.currentRoute.isExternal, result.speakerOn {
result = result.withUpdatedSpeakerState(false)
}
return result
diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift
index 0afb4cb10..969cf7d0a 100644
--- a/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift
+++ b/Sources/StreamVideo/WebRTC/v2/WebRTCCoordinator.swift
@@ -135,9 +135,25 @@ final class WebRTCCoordinator: @unchecked Sendable {
/// Changes the audio state (enabled/disabled) for the call.
///
/// - Parameter isEnabled: Whether the audio should be enabled.
- func changeAudioState(isEnabled: Bool) async {
+ func changeAudioState(
+ isEnabled: Bool,
+ file: StaticString = #file,
+ function: StaticString = #function,
+ line: UInt = #line
+ ) async {
await stateAdapter
- .enqueueCallSettings { $0.withUpdatedAudioState(isEnabled) }
+ .enqueueCallSettings(
+ functionName: function,
+ fileName: file,
+ lineNumber: line
+ ) {
+ $0.withUpdatedAudioState(
+ isEnabled,
+ file: file,
+ function: function,
+ line: line
+ )
+ }
}
/// Changes the video state (enabled/disabled) for the call.
diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCPermissionsAdapter.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCPermissionsAdapter.swift
index 7e9777cf8..46d92a188 100644
--- a/Sources/StreamVideo/WebRTC/v2/WebRTCPermissionsAdapter.swift
+++ b/Sources/StreamVideo/WebRTC/v2/WebRTCPermissionsAdapter.swift
@@ -138,11 +138,11 @@ final class WebRTCPermissionsAdapter: @unchecked Sendable {
}
var updatedCallSettings = callSettings
- if callSettings.audioOn, !permissions.hasMicrophonePermission {
+ if callSettings.audioOn, permissions.state.microphonePermission != .granted {
updatedCallSettings = updatedCallSettings.withUpdatedAudioState(false)
}
- if callSettings.videoOn, !permissions.hasCameraPermission {
+ if callSettings.videoOn, permissions.state.cameraPermission != .granted {
updatedCallSettings = updatedCallSettings.withUpdatedVideoState(false)
}
diff --git a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift
index c7e3eff9e..88d409d43 100644
--- a/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift
+++ b/Sources/StreamVideo/WebRTC/v2/WebRTCStateAdapter.swift
@@ -35,6 +35,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W
}
@Injected(\.permissions) private var permissions
+ @Injected(\.audioStore) private var audioStore
// Properties for user, API key, call ID, video configuration, and factories.
let unifiedSessionId: String = UUID().uuidString
@@ -51,7 +52,7 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W
/// Published properties that represent different parts of the WebRTC state.
@Published private(set) var sessionID: String = UUID().uuidString
@Published private(set) var token: String = ""
- @Published private(set) var callSettings: CallSettings = .init()
+ @Published private(set) var callSettings: CallSettings = .default
@Published private(set) var audioSettings: AudioSettings = .init()
/// Published property to track video options and update them.
@@ -116,14 +117,50 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W
rtcPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordinatorProviding,
videoCaptureSessionProvider: VideoCaptureSessionProvider = .init(),
screenShareSessionProvider: ScreenShareSessionProvider = .init()
+ ) {
+ self.init(
+ user: user,
+ apiKey: apiKey,
+ callCid: callCid,
+ videoConfig: videoConfig,
+ peerConnectionFactory: PeerConnectionFactory.build(
+ audioProcessingModule: videoConfig.audioProcessingModule
+ ),
+ rtcPeerConnectionCoordinatorFactory: rtcPeerConnectionCoordinatorFactory,
+ videoCaptureSessionProvider: videoCaptureSessionProvider,
+ screenShareSessionProvider: screenShareSessionProvider
+ )
+ }
+
+ /// Initializes the WebRTC state adapter with user details and connection
+ /// configurations.
+ ///
+ /// - Parameters:
+ /// - user: The user participating in the call.
+ /// - apiKey: The API key for authenticating WebRTC calls.
+ /// - callCid: The call identifier (callCid).
+ /// - videoConfig: Configuration for video settings.
+ /// - peerConnectionFactory: The factory to use when constructing peerConnection and for the
+ /// audioSession..
+ /// - rtcPeerConnectionCoordinatorFactory: Factory for peer connection
+ /// creation.
+ /// - videoCaptureSessionProvider: Provides sessions for video capturing.
+ /// - screenShareSessionProvider: Provides sessions for screen sharing.
+ init(
+ user: User,
+ apiKey: String,
+ callCid: String,
+ videoConfig: VideoConfig,
+ peerConnectionFactory: PeerConnectionFactory,
+ rtcPeerConnectionCoordinatorFactory: RTCPeerConnectionCoordinatorProviding,
+ videoCaptureSessionProvider: VideoCaptureSessionProvider = .init(),
+ screenShareSessionProvider: ScreenShareSessionProvider = .init()
) {
self.user = user
self.apiKey = apiKey
self.callCid = callCid
self.videoConfig = videoConfig
- let peerConnectionFactory = PeerConnectionFactory.build(
- audioProcessingModule: videoConfig.audioProcessingModule
- )
+ let peerConnectionFactory = peerConnectionFactory
self.peerConnectionFactory = peerConnectionFactory
self.rtcPeerConnectionCoordinatorFactory = rtcPeerConnectionCoordinatorFactory
self.videoCaptureSessionProvider = videoCaptureSessionProvider
@@ -509,6 +546,13 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W
}
await set(callSettings: updatedCallSettings)
+ log.debug(
+ "CallSettings updated \(currentCallSettings) -> \(updatedCallSettings)",
+ subsystems: .webRTC,
+ functionName: functionName,
+ fileName: fileName,
+ lineNumber: lineNumber
+ )
guard
let publisher = await self.publisher
@@ -672,6 +716,10 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W
}
func configureAudioSession(source: JoinSource?) async throws {
+ try await audioStore.dispatch([
+ .setAudioDeviceModule(peerConnectionFactory.audioDeviceModule)
+ ]).result()
+
audioSession.activate(
callSettingsPublisher: $callSettings.removeDuplicates().eraseToAnyPublisher(),
ownCapabilitiesPublisher: $ownCapabilities.removeDuplicates().eraseToAnyPublisher(),
@@ -700,19 +748,32 @@ actor WebRTCStateAdapter: ObservableObject, StreamAudioSessionAdapterDelegate, W
// MARK: - AudioSessionDelegate
- nonisolated func audioSessionAdapterDidUpdateSpeakerOn(_ speakerOn: Bool) {
+ nonisolated func audioSessionAdapterDidUpdateSpeakerOn(
+ _ speakerOn: Bool,
+ file: StaticString,
+ function: StaticString,
+ line: UInt
+
+ ) {
Task(disposableBag: disposableBag) { [weak self] in
guard let self else {
return
}
- await self.enqueueCallSettings {
+ await self.enqueueCallSettings(
+ functionName: function,
+ fileName: file,
+ lineNumber: line
+ ) {
$0.withUpdatedSpeakerState(speakerOn)
}
- log.debug(
- "AudioSession delegated updated speakerOn:\(speakerOn).",
- subsystems: .audioSession
- )
}
+ log.debug(
+ "AudioSession delegated updated speakerOn:\(speakerOn).",
+ subsystems: .audioSession,
+ functionName: function,
+ fileName: file,
+ lineNumber: line
+ )
}
// MARK: - WebRTCPermissionsAdapterDelegate
diff --git a/Sources/StreamVideoSwiftUI/CallView/CallControls/Stateless/StatelessMicrophoneIconView.swift b/Sources/StreamVideoSwiftUI/CallView/CallControls/Stateless/StatelessMicrophoneIconView.swift
index 0189e1f56..50ad3aa74 100644
--- a/Sources/StreamVideoSwiftUI/CallView/CallControls/Stateless/StatelessMicrophoneIconView.swift
+++ b/Sources/StreamVideoSwiftUI/CallView/CallControls/Stateless/StatelessMicrophoneIconView.swift
@@ -40,7 +40,7 @@ public struct StatelessMicrophoneIconView: View {
@MainActor
public init(
call: Call?,
- callSettings: CallSettings = .init(),
+ callSettings: CallSettings = .default,
size: CGFloat = 44,
controlStyle: ToggleControlStyle = .init(
enabled: .init(icon: Appearance.default.images.micTurnOn, iconStyle: .transparent),
diff --git a/Sources/StreamVideoSwiftUI/CallView/CallControls/Stateless/StatelessVideoIconView.swift b/Sources/StreamVideoSwiftUI/CallView/CallControls/Stateless/StatelessVideoIconView.swift
index 5cab591c4..2e58befb5 100644
--- a/Sources/StreamVideoSwiftUI/CallView/CallControls/Stateless/StatelessVideoIconView.swift
+++ b/Sources/StreamVideoSwiftUI/CallView/CallControls/Stateless/StatelessVideoIconView.swift
@@ -39,7 +39,7 @@ public struct StatelessVideoIconView: View {
/// - actionHandler: An optional closure to handle button tap actions.
public init(
call: Call?,
- callSettings: CallSettings = .init(),
+ callSettings: CallSettings = .default,
size: CGFloat = 44,
controlStyle: ToggleControlStyle = .init(
enabled: .init(icon: Appearance.default.images.videoTurnOn, iconStyle: .transparent),
diff --git a/Sources/StreamVideoSwiftUI/CallViewModel.swift b/Sources/StreamVideoSwiftUI/CallViewModel.swift
index c72384d0c..250992bf2 100644
--- a/Sources/StreamVideoSwiftUI/CallViewModel.swift
+++ b/Sources/StreamVideoSwiftUI/CallViewModel.swift
@@ -247,7 +247,7 @@ open class CallViewModel: ObservableObject {
callSettings: CallSettings? = nil
) {
self.participantsLayout = participantsLayout
- self.callSettings = callSettings ?? CallSettings()
+ self.callSettings = callSettings ?? .default
localCallSettingsChange = callSettings != nil
subscribeToCallEvents()
@@ -785,7 +785,7 @@ open class CallViewModel: ObservableObject {
// Reset the CallSettings so that the next Call will be joined
// with either new overrides or the values provided from the API.
- callSettings = .init()
+ callSettings = .default
localCallSettingsChange = false
}
diff --git a/Sources/StreamVideoSwiftUI/Info.plist b/Sources/StreamVideoSwiftUI/Info.plist
index 5c985b4ce..12e96635c 100644
--- a/Sources/StreamVideoSwiftUI/Info.plist
+++ b/Sources/StreamVideoSwiftUI/Info.plist
@@ -15,7 +15,7 @@
CFBundlePackageType
$(PRODUCT_BUNDLE_PACKAGE_TYPE)
CFBundleShortVersionString
- 1.36.0
+ 1.37.0
CFBundleVersion
$(CURRENT_PROJECT_VERSION)
NSHumanReadableCopyright
diff --git a/Sources/StreamVideoUIKit/Info.plist b/Sources/StreamVideoUIKit/Info.plist
index 5c985b4ce..12e96635c 100644
--- a/Sources/StreamVideoUIKit/Info.plist
+++ b/Sources/StreamVideoUIKit/Info.plist
@@ -15,7 +15,7 @@
CFBundlePackageType
$(PRODUCT_BUNDLE_PACKAGE_TYPE)
CFBundleShortVersionString
- 1.36.0
+ 1.37.0
CFBundleVersion
$(CURRENT_PROJECT_VERSION)
NSHumanReadableCopyright
diff --git a/StreamVideo-XCFramework.podspec b/StreamVideo-XCFramework.podspec
index 433096e55..c97790d0e 100644
--- a/StreamVideo-XCFramework.podspec
+++ b/StreamVideo-XCFramework.podspec
@@ -1,6 +1,6 @@
Pod::Spec.new do |spec|
spec.name = 'StreamVideo-XCFramework'
- spec.version = '1.36.0'
+ spec.version = '1.37.0'
spec.summary = 'StreamVideo iOS Video Client'
spec.description = 'StreamVideo is the official Swift client for Stream Video, a service for building video applications.'
@@ -24,7 +24,7 @@ Pod::Spec.new do |spec|
spec.prepare_command = <<-CMD
mkdir -p Frameworks/
- curl -sL "https://github.com/GetStream/stream-video-swift-webrtc/releases/download/137.0.43/StreamWebRTC.xcframework.zip" -o Frameworks/StreamWebRTC.zip
+ curl -sL "https://github.com/GetStream/stream-video-swift-webrtc/releases/download/137.0.52/StreamWebRTC.xcframework.zip" -o Frameworks/StreamWebRTC.zip
unzip -o Frameworks/StreamWebRTC.zip -d Frameworks/
rm Frameworks/StreamWebRTC.zip
CMD
diff --git a/StreamVideo.podspec b/StreamVideo.podspec
index 95083a646..8fdd01d4d 100644
--- a/StreamVideo.podspec
+++ b/StreamVideo.podspec
@@ -1,6 +1,6 @@
Pod::Spec.new do |spec|
spec.name = 'StreamVideo'
- spec.version = '1.36.0'
+ spec.version = '1.37.0'
spec.summary = 'StreamVideo iOS Video Client'
spec.description = 'StreamVideo is the official Swift client for Stream Video, a service for building video applications.'
@@ -25,7 +25,7 @@ Pod::Spec.new do |spec|
spec.prepare_command = <<-CMD
mkdir -p Frameworks/
- curl -sL "https://github.com/GetStream/stream-video-swift-webrtc/releases/download/137.0.43/StreamWebRTC.xcframework.zip" -o Frameworks/StreamWebRTC.zip
+ curl -sL "https://github.com/GetStream/stream-video-swift-webrtc/releases/download/137.0.52/StreamWebRTC.xcframework.zip" -o Frameworks/StreamWebRTC.zip
unzip -o Frameworks/StreamWebRTC.zip -d Frameworks/
rm Frameworks/StreamWebRTC.zip
CMD
diff --git a/StreamVideo.xcodeproj/project.pbxproj b/StreamVideo.xcodeproj/project.pbxproj
index fcf701949..8a5325bf0 100644
--- a/StreamVideo.xcodeproj/project.pbxproj
+++ b/StreamVideo.xcodeproj/project.pbxproj
@@ -69,20 +69,8 @@
40151F9E2E74466400326540 /* AudioProcessingStore+DefaultReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40151F9D2E74466400326540 /* AudioProcessingStore+DefaultReducer.swift */; };
40151FA02E7446FC00326540 /* AudioProcessingStore+CapturedChannelsMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40151F9F2E7446FC00326540 /* AudioProcessingStore+CapturedChannelsMiddleware.swift */; };
40151FA22E74481100326540 /* AudioProcessingStore+AudioFilterMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40151FA12E74481100326540 /* AudioProcessingStore+AudioFilterMiddleware.swift */; };
- 4019A2502E40E08B00CE70A4 /* RTCAudioStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A24F2E40E08B00CE70A4 /* RTCAudioStore.swift */; };
- 4019A2542E40E25000CE70A4 /* RTCAudioStoreAction+Generic.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2532E40E25000CE70A4 /* RTCAudioStoreAction+Generic.swift */; };
- 4019A2572E40E27000CE70A4 /* RTCAudioStore+State.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2562E40E27000CE70A4 /* RTCAudioStore+State.swift */; };
- 4019A25A2E40E2A600CE70A4 /* RTCAudioStoreAction.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2592E40E2A600CE70A4 /* RTCAudioStoreAction.swift */; };
- 4019A25C2E40E2E800CE70A4 /* RTCAudioStoreReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A25B2E40E2E800CE70A4 /* RTCAudioStoreReducer.swift */; };
- 4019A25E2E40E45D00CE70A4 /* RTCAudioSessionReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A25D2E40E45D00CE70A4 /* RTCAudioSessionReducer.swift */; };
- 4019A2632E40EB6000CE70A4 /* RTCAudioStoreMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2622E40EB6000CE70A4 /* RTCAudioStoreMiddleware.swift */; };
- 4019A2682E40ED5900CE70A4 /* RTCAudioStore+InterruptionEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2672E40ED5900CE70A4 /* RTCAudioStore+InterruptionEffect.swift */; };
4019A26D2E40F48300CE70A4 /* CallAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A26C2E40F48300CE70A4 /* CallAudioSession.swift */; };
- 4019A26F2E40FC8F00CE70A4 /* RTCAudioStore+RouteChangeEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A26E2E40FC8F00CE70A4 /* RTCAudioStore+RouteChangeEffect.swift */; };
- 4019A2782E42225800CE70A4 /* CallKitAudioSessionReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2772E42225800CE70A4 /* CallKitAudioSessionReducer.swift */; };
4019A27A2E42475300CE70A4 /* JoinSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2792E42475300CE70A4 /* JoinSource.swift */; };
- 4019A27C2E43397100CE70A4 /* RTCAudioStoreAction+AudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A27B2E43397100CE70A4 /* RTCAudioStoreAction+AudioSession.swift */; };
- 4019A27E2E43398C00CE70A4 /* RTCAudioStoreAction+CallKit.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A27D2E43398C00CE70A4 /* RTCAudioStoreAction+CallKit.swift */; };
4019A2802E43529000CE70A4 /* AudioSessionProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A27F2E43529000CE70A4 /* AudioSessionProtocol.swift */; };
4019A2832E4352DF00CE70A4 /* RTCAudioSession+AudioSessionProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2822E4352DF00CE70A4 /* RTCAudioSession+AudioSessionProtocol.swift */; };
4019A2872E43565A00CE70A4 /* MockAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4019A2862E43565A00CE70A4 /* MockAudioSession.swift */; };
@@ -146,7 +134,6 @@
40245F652BE27B2000FCF075 /* StatelessAudioOutputIconView_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40245F642BE27B2000FCF075 /* StatelessAudioOutputIconView_Tests.swift */; };
40245F672BE27B8400FCF075 /* StatelessSpeakerIconView_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40245F662BE27B8400FCF075 /* StatelessSpeakerIconView_Tests.swift */; };
40245F692BE27CCB00FCF075 /* StatelessParticipantsListButton_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40245F682BE27CCB00FCF075 /* StatelessParticipantsListButton_Tests.swift */; };
- 4026BEEA2EA79FD400360AD0 /* CallFlow_PerformanceTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4026BEE92EA79FD400360AD0 /* CallFlow_PerformanceTests.swift */; };
402778832BD13C62002F4399 /* NoiseCancellationFilter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402778822BD13C62002F4399 /* NoiseCancellationFilter.swift */; };
4028FE982DC4F638001F9DC3 /* ConsumableBucket.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4028FE972DC4F638001F9DC3 /* ConsumableBucket.swift */; };
4028FE9A2DC4FC8E001F9DC3 /* ConsumableBucketItemTransformer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4028FE992DC4FC8E001F9DC3 /* ConsumableBucketItemTransformer.swift */; };
@@ -181,12 +168,12 @@
402C544B2B6B9FF000672BFB /* CallButtonView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4091460A2B690AA4007F3C17 /* CallButtonView.swift */; };
402C545B2B6BE50500672BFB /* MockStreamStatistics.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402C545A2B6BE50500672BFB /* MockStreamStatistics.swift */; };
402C545D2B6BE5E200672BFB /* StreamCallStatisticsFormatter_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402C545C2B6BE5E200672BFB /* StreamCallStatisticsFormatter_Tests.swift */; };
+ 402C5C5F2ECB3C2D0096F212 /* LivestreamAudioSessionPolicy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402C5C5E2ECB3C2D0096F212 /* LivestreamAudioSessionPolicy.swift */; };
+ 402C5C612ECB96D30096F212 /* AVAudioSessionObserver.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402C5C602ECB96D30096F212 /* AVAudioSessionObserver.swift */; };
+ 402C5C632ECB988A0096F212 /* RTCAudioStore+AVAudioSessionEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402C5C622ECB988A0096F212 /* RTCAudioStore+AVAudioSessionEffect.swift */; };
402D0E882D0C94CD00E9B83F /* RTCAudioTrack+Clone.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402D0E872D0C94CD00E9B83F /* RTCAudioTrack+Clone.swift */; };
402D0E8A2D0C94E600E9B83F /* RTCVideoTrack+Clone.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402D0E892D0C94E600E9B83F /* RTCVideoTrack+Clone.swift */; };
402D0E8C2D0C94F900E9B83F /* CallSettings+Audio.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402D0E8B2D0C94F900E9B83F /* CallSettings+Audio.swift */; };
- 402E69A22EA65FF90082F7FA /* BatteryStoreObservationMiddleware_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402E69A02EA65FF90082F7FA /* BatteryStoreObservationMiddleware_Tests.swift */; };
- 402E69A32EA65FF90082F7FA /* BatteryStore_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402E699E2EA65FF90082F7FA /* BatteryStore_Tests.swift */; };
- 402E69A42EA65FF90082F7FA /* BatteryStoreDefaultReducer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402E699F2EA65FF90082F7FA /* BatteryStoreDefaultReducer_Tests.swift */; };
402EE1302AA8861B00312632 /* DemoChatViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402EE12F2AA8861B00312632 /* DemoChatViewModel.swift */; };
402F04A92B70ED8600CA1986 /* StreamCallStatisticsReporter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402F04A62B70ED8600CA1986 /* StreamCallStatisticsReporter.swift */; };
402F04AA2B70ED8600CA1986 /* Statistics+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 402F04A72B70ED8600CA1986 /* Statistics+Convenience.swift */; };
@@ -223,6 +210,8 @@
40382F472C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40382F442C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift */; };
40382F482C89D03700C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40382F412C89CF9300C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift */; };
40382F502C8B3DAE00C2D00F /* StreamRTCPeerConnection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40382F4F2C8B3DA800C2D00F /* StreamRTCPeerConnection.swift */; };
+ 4039088D2EC2311A00B19FA1 /* StoreEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039088C2EC2311A00B19FA1 /* StoreEffect.swift */; };
+ 403908AC2EC2A99C00B19FA1 /* RTCAudioStore+StereoPlayoutEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 403908AB2EC2A99C00B19FA1 /* RTCAudioStore+StereoPlayoutEffect.swift */; };
4039F0C02D0099E40078159E /* RTCRtpCodecCapability+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039F0BF2D0099E40078159E /* RTCRtpCodecCapability+Convenience.swift */; };
4039F0CA2D0222E40078159E /* Stream_Video_Sfu_Models_PublishOption+VideoLayers.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039F0C92D0222E40078159E /* Stream_Video_Sfu_Models_PublishOption+VideoLayers.swift */; };
4039F0CC2D0241120078159E /* AudioCodec.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4039F0CB2D0241120078159E /* AudioCodec.swift */; };
@@ -280,6 +269,7 @@
404A81342DA3CB66001F7FA8 /* CallStateMachine_RejectedStageTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 403FB1612BFE22A40047A696 /* CallStateMachine_RejectedStageTests.swift */; };
404A81362DA3CBF0001F7FA8 /* CallConfigurationTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404A81352DA3CBF0001F7FA8 /* CallConfigurationTests.swift */; };
404A81382DA3CC0C001F7FA8 /* CallConfiguration.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404A81372DA3CC0C001F7FA8 /* CallConfiguration.swift */; };
+ 404B546B2ED06D8C009378F2 /* RetriableTask.swift in Sources */ = {isa = PBXBuildFile; fileRef = 404B546A2ED06D8C009378F2 /* RetriableTask.swift */; };
404C27CB2BF2552800DF2937 /* XCTestCase+PredicateFulfillment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */; };
404C27CC2BF2552900DF2937 /* XCTestCase+PredicateFulfillment.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409CA7982BEE21720045F7AA /* XCTestCase+PredicateFulfillment.swift */; };
404CAEE72B8F48F6007087BC /* DemoBackgroundEffectSelector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E95F2B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift */; };
@@ -487,7 +477,6 @@
4097B3832BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift in Sources */ = {isa = PBXBuildFile; fileRef = 4097B3822BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift */; };
40986C3A2CCB6D2F00510F88 /* RTCRtpEncodingParameters_Test.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40986C392CCB6D2F00510F88 /* RTCRtpEncodingParameters_Test.swift */; };
40986C3C2CCB6E4B00510F88 /* RTCRtpTransceiverInit_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40986C3B2CCB6E4B00510F88 /* RTCRtpTransceiverInit_Tests.swift */; };
- 40986C3E2CD1148F00510F88 /* AudioSession_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40986C3D2CD1148F00510F88 /* AudioSession_Tests.swift */; };
409AF6E62DAFAC4700EE7BF6 /* PictureInPictureReconnectionView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409AF6E52DAFAC4700EE7BF6 /* PictureInPictureReconnectionView.swift */; };
409AF6E82DAFC80200EE7BF6 /* PictureInPictureContent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409AF6E72DAFC80200EE7BF6 /* PictureInPictureContent.swift */; };
409AF6EA2DAFE1B000EE7BF6 /* PictureInPictureContentProviderTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 409AF6E92DAFE1B000EE7BF6 /* PictureInPictureContentProviderTests.swift */; };
@@ -508,15 +497,7 @@
40A0E9602B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E95F2B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift */; };
40A0E9622B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E9612B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift */; };
40A0E9682B88E04D0089E8D3 /* CIImage_Resize_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0E9672B88E04D0089E8D3 /* CIImage_Resize_Tests.swift */; };
- 40A0FFB12EA63CB900F39D8F /* BatteryStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFB02EA63CB900F39D8F /* BatteryStore.swift */; };
- 40A0FFB42EA63D3C00F39D8F /* BatteryStore+Namespace.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFB32EA63D3C00F39D8F /* BatteryStore+Namespace.swift */; };
- 40A0FFB62EA63D8F00F39D8F /* BatteryStore+State.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFB52EA63D8F00F39D8F /* BatteryStore+State.swift */; };
- 40A0FFB82EA63D9700F39D8F /* BatteryStore+Action.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFB72EA63D9700F39D8F /* BatteryStore+Action.swift */; };
- 40A0FFBB2EA63E9A00F39D8F /* BatteryStore+DefaultReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFBA2EA63E9A00F39D8F /* BatteryStore+DefaultReducer.swift */; };
- 40A0FFBE2EA63FE500F39D8F /* BatteryStore+ObservationMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFBD2EA63FE500F39D8F /* BatteryStore+ObservationMiddleware.swift */; };
40A0FFC02EA6418000F39D8F /* Sequence+AsyncReduce.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A0FFBF2EA6418000F39D8F /* Sequence+AsyncReduce.swift */; };
- 40A317E82EB504C900733948 /* ModerationBlurViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A317E72EB504C900733948 /* ModerationBlurViewModifier.swift */; };
- 40A317EB2EB5081500733948 /* ModerationWarningViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A317EA2EB5081500733948 /* ModerationWarningViewModifier.swift */; };
40A7C5B52E099B4600EEDF9C /* ParticipantEventResetAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A7C5B22E099B1000EEDF9C /* ParticipantEventResetAdapter.swift */; };
40A7C5B82E099D6200EEDF9C /* ParticipantEventResetAdapter_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A7C5B72E099D6200EEDF9C /* ParticipantEventResetAdapter_Tests.swift */; };
40A9416E2B4D959F006D6965 /* StreamPictureInPictureAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40A9416D2B4D959F006D6965 /* StreamPictureInPictureAdapter.swift */; };
@@ -677,7 +658,24 @@
40B575D42DCCECE800F489B8 /* MockAVPictureInPictureController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B575D22DCCECDA00F489B8 /* MockAVPictureInPictureController.swift */; };
40B575D82DCCF00200F489B8 /* StreamPictureInPictureControllerProtocol.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B575D52DCCEFB500F489B8 /* StreamPictureInPictureControllerProtocol.swift */; };
40B713692A275F1400D1FE67 /* AppState.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8456E6C5287EB55F004E180E /* AppState.swift */; };
- 40BAD0B32EA7CE3200CCD3D7 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 40BAD0B22EA7CE3200CCD3D7 /* StreamWebRTC */; };
+ 40B8FFA72EC393A80061E3F6 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 40B8FFA62EC393A80061E3F6 /* StreamWebRTC */; };
+ 40B8FFA92EC393B50061E3F6 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 40B8FFA82EC393B50061E3F6 /* StreamWebRTC */; };
+ 40B8FFAB2EC393BB0061E3F6 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 40B8FFAA2EC393BB0061E3F6 /* StreamWebRTC */; };
+ 40B8FFB62EC3949F0061E3F6 /* BatteryStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFB42EC3949F0061E3F6 /* BatteryStore.swift */; };
+ 40B8FFB72EC3949F0061E3F6 /* BatteryStore+Action.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFB02EC3949F0061E3F6 /* BatteryStore+Action.swift */; };
+ 40B8FFB82EC3949F0061E3F6 /* BatteryStore+Namespace.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFB12EC3949F0061E3F6 /* BatteryStore+Namespace.swift */; };
+ 40B8FFB92EC3949F0061E3F6 /* BatteryStore+State.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFB22EC3949F0061E3F6 /* BatteryStore+State.swift */; };
+ 40B8FFBA2EC3949F0061E3F6 /* BatteryStore+ObservationMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFAC2EC3949F0061E3F6 /* BatteryStore+ObservationMiddleware.swift */; };
+ 40B8FFBB2EC3949F0061E3F6 /* BatteryStore+DefaultReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFAE2EC3949F0061E3F6 /* BatteryStore+DefaultReducer.swift */; };
+ 40B8FFC02EC394AA0061E3F6 /* CallModerationBlurEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFBC2EC394AA0061E3F6 /* CallModerationBlurEvent.swift */; };
+ 40B8FFC12EC394AA0061E3F6 /* RingCallRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFBE2EC394AA0061E3F6 /* RingCallRequest.swift */; };
+ 40B8FFC22EC394AA0061E3F6 /* CallModerationWarningEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFBD2EC394AA0061E3F6 /* CallModerationWarningEvent.swift */; };
+ 40B8FFC32EC394AA0061E3F6 /* RingCallResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFBF2EC394AA0061E3F6 /* RingCallResponse.swift */; };
+ 40B8FFC72EC394C50061E3F6 /* ModerationWarningViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFC52EC394C50061E3F6 /* ModerationWarningViewModifier.swift */; };
+ 40B8FFC82EC394C50061E3F6 /* ModerationBlurViewModifier.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFC42EC394C50061E3F6 /* ModerationBlurViewModifier.swift */; };
+ 40B8FFCD2EC394D30061E3F6 /* BatteryStoreDefaultReducer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFCA2EC394D30061E3F6 /* BatteryStoreDefaultReducer_Tests.swift */; };
+ 40B8FFCE2EC394D30061E3F6 /* BatteryStore_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFC92EC394D30061E3F6 /* BatteryStore_Tests.swift */; };
+ 40B8FFCF2EC394D30061E3F6 /* BatteryStoreObservationMiddleware_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B8FFCB2EC394D30061E3F6 /* BatteryStoreObservationMiddleware_Tests.swift */; };
40BBC4792C6227DC002AEF92 /* DemoNoiseCancellationButtonView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40BBC4782C6227DC002AEF92 /* DemoNoiseCancellationButtonView.swift */; };
40BBC47C2C6227F1002AEF92 /* View+PresentDemoMoreMenu.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40BBC47B2C6227F1002AEF92 /* View+PresentDemoMoreMenu.swift */; };
40BBC47E2C62287F002AEF92 /* DemoReconnectionButtonView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40BBC47D2C62287F002AEF92 /* DemoReconnectionButtonView.swift */; };
@@ -741,7 +739,6 @@
40C4E8352E60BC6300FC29BC /* CallKitMissingPermissionPolicy_EndCallTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40C4E8342E60BC6300FC29BC /* CallKitMissingPermissionPolicy_EndCallTests.swift */; };
40C4E83F2E65B6E300FC29BC /* MockDefaultAPIEndpoints.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40C4E83E2E65B6E300FC29BC /* MockDefaultAPIEndpoints.swift */; };
40C4E8402E65B74400FC29BC /* MockDefaultAPIEndpoints.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40C4E83E2E65B6E300FC29BC /* MockDefaultAPIEndpoints.swift */; };
- 40C4E85D2E6999A500FC29BC /* RTCAudioStore+RestartAudioSession.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40C4E85C2E6999A500FC29BC /* RTCAudioStore+RestartAudioSession.swift */; };
40C4E85F2E69B5C100FC29BC /* ParticipantSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40C4E85E2E69B5C100FC29BC /* ParticipantSource.swift */; };
40C689182C64DDC70054528A /* Publisher+TaskSink.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40C689172C64DDC70054528A /* Publisher+TaskSink.swift */; };
40C708D62D8D729500D3501F /* Gleap in Frameworks */ = {isa = PBXBuildFile; productRef = 40C708D52D8D729500D3501F /* Gleap */; };
@@ -814,13 +811,9 @@
40D36AE22DDE023800972D75 /* WebRTCStatsCollecting.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D36AE12DDE023800972D75 /* WebRTCStatsCollecting.swift */; };
40D36AE42DDE02D100972D75 /* MockWebRTCStatsCollector.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D36AE32DDE02D100972D75 /* MockWebRTCStatsCollector.swift */; };
40D6ADDD2ACDB51C00EF5336 /* VideoRenderer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D6ADDC2ACDB51C00EF5336 /* VideoRenderer_Tests.swift */; };
- 40D75C522E437FBC000E0438 /* InterruptionEffect_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C512E437FBC000E0438 /* InterruptionEffect_Tests.swift */; };
- 40D75C542E438317000E0438 /* RouteChangeEffect_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C532E438317000E0438 /* RouteChangeEffect_Tests.swift */; };
40D75C562E4385FE000E0438 /* MockAVAudioSessionPortDescription.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C552E4385FE000E0438 /* MockAVAudioSessionPortDescription.swift */; };
40D75C582E438607000E0438 /* MockAVAudioSessionRouteDescription.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C572E438607000E0438 /* MockAVAudioSessionRouteDescription.swift */; };
40D75C5C2E438633000E0438 /* AVAudioSessionRouteDescription+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C5B2E438633000E0438 /* AVAudioSessionRouteDescription+Dummy.swift */; };
- 40D75C5F2E438AC0000E0438 /* CallKitAudioSessionReducer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C5E2E438AC0000E0438 /* CallKitAudioSessionReducer_Tests.swift */; };
- 40D75C612E438BBF000E0438 /* RTCAudioSessionReducer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C602E438BBF000E0438 /* RTCAudioSessionReducer_Tests.swift */; };
40D75C632E4396D2000E0438 /* RTCAudioStore_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C622E4396D2000E0438 /* RTCAudioStore_Tests.swift */; };
40D75C652E44F5CE000E0438 /* CameraInterruptionsHandler.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D75C642E44F5CE000E0438 /* CameraInterruptionsHandler.swift */; };
40D946412AA5ECEF00C8861B /* CodeScanner.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40D946402AA5ECEF00C8861B /* CodeScanner.swift */; };
@@ -839,6 +832,22 @@
40E18AAF2CD51E9400A65C9F /* LockQueuing.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E18AAE2CD51E8E00A65C9F /* LockQueuing.swift */; };
40E18AB22CD51FC100A65C9F /* UnfairQueueTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E18AB12CD51FC100A65C9F /* UnfairQueueTests.swift */; };
40E18AB42CD522F700A65C9F /* RecursiveQueueTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E18AB32CD522F700A65C9F /* RecursiveQueueTests.swift */; };
+ 40E1C8972EA0F73000AC3647 /* StoreCoordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8962EA0F73000AC3647 /* StoreCoordinator.swift */; };
+ 40E1C8992EA1080100AC3647 /* Logger+ThrowingExecution.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8982EA1080100AC3647 /* Logger+ThrowingExecution.swift */; };
+ 40E1C89B2EA1156600AC3647 /* RTCAudioDeviceModuleControlling.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C89A2EA1156600AC3647 /* RTCAudioDeviceModuleControlling.swift */; };
+ 40E1C89D2EA115AB00AC3647 /* MockRTCAudioDeviceModule.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C89C2EA115AB00AC3647 /* MockRTCAudioDeviceModule.swift */; };
+ 40E1C8A02EA1176C00AC3647 /* AudioDeviceModule_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C89F2EA1176C00AC3647 /* AudioDeviceModule_Tests.swift */; };
+ 40E1C8A22EA13C9700AC3647 /* MockAudioEngineNodeAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8A12EA13C9700AC3647 /* MockAudioEngineNodeAdapter.swift */; };
+ 40E1C8A52EA14D0500AC3647 /* RTCAudioSessionPublisher_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8A42EA14D0500AC3647 /* RTCAudioSessionPublisher_Tests.swift */; };
+ 40E1C8A72EA1517400AC3647 /* RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8A62EA1517400AC3647 /* RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift */; };
+ 40E1C8AB2EA1561D00AC3647 /* RTCAudioStore_CoordinatorTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8A92EA1561D00AC3647 /* RTCAudioStore_CoordinatorTests.swift */; };
+ 40E1C8AF2EA157FD00AC3647 /* RTCAudioStore_WebRTCAudioSessionReducerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8AD2EA157FD00AC3647 /* RTCAudioStore_WebRTCAudioSessionReducerTests.swift */; };
+ 40E1C8B12EA15A9200AC3647 /* RTCAudioStore_DefaultReducerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8B02EA15A9200AC3647 /* RTCAudioStore_DefaultReducerTests.swift */; };
+ 40E1C8B32EA18C8400AC3647 /* RTCAudioStore_CallKitReducerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8B22EA18C8400AC3647 /* RTCAudioStore_CallKitReducerTests.swift */; };
+ 40E1C8B62EA18E4D00AC3647 /* RTCAudioStore_AVAudioSessionReducerTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8B52EA18E4D00AC3647 /* RTCAudioStore_AVAudioSessionReducerTests.swift */; };
+ 40E1C8BA2EA1946300AC3647 /* RTCAudioStore_InterruptionsEffectTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8B92EA1946300AC3647 /* RTCAudioStore_InterruptionsEffectTests.swift */; };
+ 40E1C8BC2EA195E000AC3647 /* RTCAudioStore_AudioDeviceModuleMiddlewareTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8BB2EA195E000AC3647 /* RTCAudioStore_AudioDeviceModuleMiddlewareTests.swift */; };
+ 40E1C8BF2EA1992500AC3647 /* CallAudioSession_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E1C8BD2EA1992500AC3647 /* CallAudioSession_Tests.swift */; };
40E3632E2D09DBFA0028C52A /* Int+DefaultValues.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E3632D2D09DBFA0028C52A /* Int+DefaultValues.swift */; };
40E363312D09DC650028C52A /* CGSize+DefaultValues.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363302D09DC650028C52A /* CGSize+DefaultValues.swift */; };
40E363362D09E4C80028C52A /* Stream_Video_Sfu_Models_VideoQuality+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363352D09E4C80028C52A /* Stream_Video_Sfu_Models_VideoQuality+Convenience.swift */; };
@@ -861,13 +870,41 @@
40E363752D0A2C6B0028C52A /* CGSize+Adapt.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363742D0A2C6B0028C52A /* CGSize+Adapt.swift */; };
40E363772D0A2E320028C52A /* BroadcastBufferReaderKey.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E363762D0A2E320028C52A /* BroadcastBufferReaderKey.swift */; };
40E741FF2D553ACD0044C955 /* CurrentDevice.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E741FE2D553ACD0044C955 /* CurrentDevice.swift */; };
- 40E7A45B2E29495500E8AB8B /* WebRTCLogger.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E7A4582E29487700E8AB8B /* WebRTCLogger.swift */; };
40E9B3B12BCD755F00ACF18F /* MemberResponse+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B02BCD755F00ACF18F /* MemberResponse+Dummy.swift */; };
40E9B3B32BCD93AE00ACF18F /* JoinCallResponse+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B22BCD93AE00ACF18F /* JoinCallResponse+Dummy.swift */; };
40E9B3B52BCD93F500ACF18F /* Credentials+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B42BCD93F500ACF18F /* Credentials+Dummy.swift */; };
40E9B3B72BCD941600ACF18F /* SFUResponse+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40E9B3B62BCD941600ACF18F /* SFUResponse+Dummy.swift */; };
+ 40ED20E92EC64B78004FF2F0 /* AVAudioSession.RouteChangeReason+Convenience.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40ED20E82EC64B78004FF2F0 /* AVAudioSession.RouteChangeReason+Convenience.swift */; };
40ED6D4B2B14F0E600FB5F69 /* Launch Screen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 40ED6D4A2B14F0E600FB5F69 /* Launch Screen.storyboard */; };
40EDA17C2C13792D00583A65 /* View+AlertWithTextField.swift in Sources */ = {isa = PBXBuildFile; fileRef = 408937902C134305000EEB69 /* View+AlertWithTextField.swift */; };
+ 40EE9D2B2E969F010000EA92 /* AudioDeviceModule.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D282E969F010000EA92 /* AudioDeviceModule.swift */; };
+ 40EE9D2C2E969F010000EA92 /* AudioEngineLevelNodeAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D292E969F010000EA92 /* AudioEngineLevelNodeAdapter.swift */; };
+ 40EE9D352E97B3370000EA92 /* RTCAudioStore.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D342E97B3370000EA92 /* RTCAudioStore.swift */; };
+ 40EE9D3E2E97B38F0000EA92 /* RTCAudioStore+Namespace.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D3D2E97B38F0000EA92 /* RTCAudioStore+Namespace.swift */; };
+ 40EE9D402E97B3970000EA92 /* RTCAudioStore+State.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D3F2E97B3970000EA92 /* RTCAudioStore+State.swift */; };
+ 40EE9D422E97B39E0000EA92 /* RTCAudioStore+Action.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D412E97B39E0000EA92 /* RTCAudioStore+Action.swift */; };
+ 40EE9D462E97BC940000EA92 /* RTCAudioStore+DefaultReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D452E97BC940000EA92 /* RTCAudioStore+DefaultReducer.swift */; };
+ 40EE9D482E97BCA50000EA92 /* RTCAudioStore+AVAudioSessionReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D472E97BCA50000EA92 /* RTCAudioStore+AVAudioSessionReducer.swift */; };
+ 40EE9D4A2E97BCB20000EA92 /* RTCAudioStore+WebRTCAudioSessionReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D492E97BCB20000EA92 /* RTCAudioStore+WebRTCAudioSessionReducer.swift */; };
+ 40EE9D4D2E97C3910000EA92 /* RTCAudioStore+AVAudioSessionConfigurationValidator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D4C2E97C3910000EA92 /* RTCAudioStore+AVAudioSessionConfigurationValidator.swift */; };
+ 40EE9D4F2E97C7500000EA92 /* RTCAudioStore+CallKitReducer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D4E2E97C7500000EA92 /* RTCAudioStore+CallKitReducer.swift */; };
+ 40EE9D512E97C7C60000EA92 /* RTCAudioStore+RouteChangeEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D502E97C7C60000EA92 /* RTCAudioStore+RouteChangeEffect.swift */; };
+ 40EE9D532E97C8B70000EA92 /* RTCAudioSessionPublisher.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D522E97C8B70000EA92 /* RTCAudioSessionPublisher.swift */; };
+ 40EE9D552E97CA7F0000EA92 /* RTCAudioStore+InterruptionsEffect.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D542E97CA7F0000EA92 /* RTCAudioStore+InterruptionsEffect.swift */; };
+ 40EE9D572E97FC980000EA92 /* RTCAudioStore+AudioDeviceModuleMiddleware.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D562E97FC980000EA92 /* RTCAudioStore+AudioDeviceModuleMiddleware.swift */; };
+ 40EE9D5B2E991C6A0000EA92 /* RTCAudioStore+Coordinator.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EE9D5A2E991C6A0000EA92 /* RTCAudioStore+Coordinator.swift */; };
+ 40EF61A32ED876FF00ED1F04 /* AudioEngineLevelNodeAdapter_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61A22ED876FF00ED1F04 /* AudioEngineLevelNodeAdapter_Tests.swift */; };
+ 40EF61A52ED87E7D00ED1F04 /* LivestreamAudioSessionPolicyTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61A42ED87E7D00ED1F04 /* LivestreamAudioSessionPolicyTests.swift */; };
+ 40EF61AA2ED8801600ED1F04 /* AVAudioSessionObserver_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61A62ED8801600ED1F04 /* AVAudioSessionObserver_Tests.swift */; };
+ 40EF61AB2ED8801600ED1F04 /* RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61A82ED8801600ED1F04 /* RTCAudioStore_AVAudioSessionConfigurationValidatorTests.swift */; };
+ 40EF61AC2ED8801600ED1F04 /* RTCAudioSessionPublisher_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61A72ED8801600ED1F04 /* RTCAudioSessionPublisher_Tests.swift */; };
+ 40EF61AE2ED881BE00ED1F04 /* RTCAudioStore_AVAudioSessionEffectTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61AD2ED881BE00ED1F04 /* RTCAudioStore_AVAudioSessionEffectTests.swift */; };
+ 40EF61B02ED8849A00ED1F04 /* RTCAudioStore_RouteChangeEffectTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61AF2ED8849A00ED1F04 /* RTCAudioStore_RouteChangeEffectTests.swift */; };
+ 40EF61B22ED8859500ED1F04 /* RTCAudioStore_StereoPlayoutEffectTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61B12ED8859500ED1F04 /* RTCAudioStore_StereoPlayoutEffectTests.swift */; };
+ 40EF61B72ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61B62ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift */; };
+ 40EF61B82ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61B62ED8917100ED1F04 /* RTCAudioStoreState+Dummy.swift */; };
+ 40EF61BA2ED893A400ED1F04 /* MockStoreDispatcher.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61B92ED893A400ED1F04 /* MockStoreDispatcher.swift */; };
+ 40EF61BE2ED8B01300ED1F04 /* Logger+WebRTC.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40EF61BD2ED8B01300ED1F04 /* Logger+WebRTC.swift */; };
40F017392BBEAF6400E89FD1 /* MockCallKitService.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F017382BBEAF6400E89FD1 /* MockCallKitService.swift */; };
40F0173B2BBEB1A900E89FD1 /* CallKitAdapterTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F0173A2BBEB1A900E89FD1 /* CallKitAdapterTests.swift */; };
40F0173E2BBEB86800E89FD1 /* TestsAuthenticationProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40F0173D2BBEB86800E89FD1 /* TestsAuthenticationProvider.swift */; };
@@ -1087,12 +1124,6 @@
82E3BA552A0BAF4B001AB93E /* WebSocketClientEnvironment_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 82E3BA522A0BAF4B001AB93E /* WebSocketClientEnvironment_Mock.swift */; };
82E3BA562A0BAF64001AB93E /* WebSocketEngine_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84F58B8A29EEACAD00010C4C /* WebSocketEngine_Mock.swift */; };
82E3BA572A0BAF65001AB93E /* WebSocketEngine_Mock.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84F58B8A29EEACAD00010C4C /* WebSocketEngine_Mock.swift */; };
- 82EB8F572B0277730038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F562B0277730038B5A2 /* StreamWebRTC */; };
- 82EB8F592B0277E70038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F582B0277E70038B5A2 /* StreamWebRTC */; };
- 82EB8F5B2B0277EC0038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F5A2B0277EC0038B5A2 /* StreamWebRTC */; };
- 82EB8F5D2B0277F10038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F5C2B0277F10038B5A2 /* StreamWebRTC */; };
- 82EB8F5F2B0277F60038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F5E2B0277F60038B5A2 /* StreamWebRTC */; };
- 82EB8F612B0277FB0038B5A2 /* StreamWebRTC in Frameworks */ = {isa = PBXBuildFile; productRef = 82EB8F602B0277FB0038B5A2 /* StreamWebRTC */; };
82FB89372A702A9200AC16A1 /* Authentication_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 82FB89362A702A9200AC16A1 /* Authentication_Tests.swift */; };
82FF40B52A17C6C200B4D95E /* CallControlsView_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 82FF40B42A17C6C200B4D95E /* CallControlsView_Tests.swift */; };
82FF40B72A17C6CD00B4D95E /* ReconnectionView_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 82FF40B62A17C6CD00B4D95E /* ReconnectionView_Tests.swift */; };
@@ -1138,8 +1169,6 @@
8414081129F284A800FF2D7C /* AssertJSONEqual.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8414081029F284A800FF2D7C /* AssertJSONEqual.swift */; };
8414081329F28B5700FF2D7C /* RTCConfiguration_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8414081229F28B5600FF2D7C /* RTCConfiguration_Tests.swift */; };
8414081529F28FFC00FF2D7C /* CallSettings_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8414081429F28FFC00FF2D7C /* CallSettings_Tests.swift */; };
- 841457372EBE5BF100D0D034 /* RingCallResponse.swift in Sources */ = {isa = PBXBuildFile; fileRef = 841457362EBE5BF100D0D034 /* RingCallResponse.swift */; };
- 841457382EBE5BF100D0D034 /* RingCallRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 841457352EBE5BF100D0D034 /* RingCallRequest.swift */; };
8415D3E1290B2AF2006E53CB /* outgoing.m4a in Resources */ = {isa = PBXBuildFile; fileRef = 8415D3E0290B2AF2006E53CB /* outgoing.m4a */; };
8415D3E3290BC882006E53CB /* Sounds.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8415D3E2290BC882006E53CB /* Sounds.swift */; };
841947982886D9CD0007B36E /* BundleExtensions.swift in Sources */ = {isa = PBXBuildFile; fileRef = 841947972886D9CD0007B36E /* BundleExtensions.swift */; };
@@ -1315,8 +1344,6 @@
845C09952C10A7D700F725B3 /* SessionTimer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 845C09902C0E0B7600F725B3 /* SessionTimer.swift */; };
845C09972C11AAA200F725B3 /* RejectCallRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 845C09962C11AAA100F725B3 /* RejectCallRequest.swift */; };
845E31062A7121D6004DC470 /* BroadcastObserver_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 845E31052A7121D6004DC470 /* BroadcastObserver_Tests.swift */; };
- 8464FBA92EB3832000933768 /* CallModerationBlurEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8464FBA72EB3832000933768 /* CallModerationBlurEvent.swift */; };
- 8464FBAA2EB3832000933768 /* CallModerationWarningEvent.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8464FBA82EB3832000933768 /* CallModerationWarningEvent.swift */; };
8468821328DFA448003BA9EE /* UnsecureRepository.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8468821228DFA448003BA9EE /* UnsecureRepository.swift */; };
8469593229BB3D7500134EA0 /* SignalServer_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8469593129BB3D7500134EA0 /* SignalServer_Tests.swift */; };
8469593429BB5CE200134EA0 /* HTTPConfig.swift in Sources */ = {isa = PBXBuildFile; fileRef = 8469593329BB5CE200134EA0 /* HTTPConfig.swift */; };
@@ -1867,20 +1894,8 @@
40151F9D2E74466400326540 /* AudioProcessingStore+DefaultReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AudioProcessingStore+DefaultReducer.swift"; sourceTree = ""; };
40151F9F2E7446FC00326540 /* AudioProcessingStore+CapturedChannelsMiddleware.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AudioProcessingStore+CapturedChannelsMiddleware.swift"; sourceTree = ""; };
40151FA12E74481100326540 /* AudioProcessingStore+AudioFilterMiddleware.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "AudioProcessingStore+AudioFilterMiddleware.swift"; sourceTree = ""; };
- 4019A24F2E40E08B00CE70A4 /* RTCAudioStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStore.swift; sourceTree = ""; };
- 4019A2532E40E25000CE70A4 /* RTCAudioStoreAction+Generic.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStoreAction+Generic.swift"; sourceTree = ""; };
- 4019A2562E40E27000CE70A4 /* RTCAudioStore+State.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+State.swift"; sourceTree = ""; };
- 4019A2592E40E2A600CE70A4 /* RTCAudioStoreAction.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStoreAction.swift; sourceTree = ""; };
- 4019A25B2E40E2E800CE70A4 /* RTCAudioStoreReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStoreReducer.swift; sourceTree = ""; };
- 4019A25D2E40E45D00CE70A4 /* RTCAudioSessionReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioSessionReducer.swift; sourceTree = ""; };
- 4019A2622E40EB6000CE70A4 /* RTCAudioStoreMiddleware.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCAudioStoreMiddleware.swift; sourceTree = ""; };
- 4019A2672E40ED5900CE70A4 /* RTCAudioStore+InterruptionEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+InterruptionEffect.swift"; sourceTree = ""; };
4019A26C2E40F48300CE70A4 /* CallAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallAudioSession.swift; sourceTree = ""; };
- 4019A26E2E40FC8F00CE70A4 /* RTCAudioStore+RouteChangeEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+RouteChangeEffect.swift"; sourceTree = ""; };
- 4019A2772E42225800CE70A4 /* CallKitAudioSessionReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitAudioSessionReducer.swift; sourceTree = ""; };
4019A2792E42475300CE70A4 /* JoinSource.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = JoinSource.swift; sourceTree = ""; };
- 4019A27B2E43397100CE70A4 /* RTCAudioStoreAction+AudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStoreAction+AudioSession.swift"; sourceTree = ""; };
- 4019A27D2E43398C00CE70A4 /* RTCAudioStoreAction+CallKit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStoreAction+CallKit.swift"; sourceTree = ""; };
4019A27F2E43529000CE70A4 /* AudioSessionProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioSessionProtocol.swift; sourceTree = ""; };
4019A2822E4352DF00CE70A4 /* RTCAudioSession+AudioSessionProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioSession+AudioSessionProtocol.swift"; sourceTree = ""; };
4019A2862E43565A00CE70A4 /* MockAudioSession.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAudioSession.swift; sourceTree = ""; };
@@ -1914,7 +1929,6 @@
40245F642BE27B2000FCF075 /* StatelessAudioOutputIconView_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StatelessAudioOutputIconView_Tests.swift; sourceTree = ""; };
40245F662BE27B8400FCF075 /* StatelessSpeakerIconView_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StatelessSpeakerIconView_Tests.swift; sourceTree = ""; };
40245F682BE27CCB00FCF075 /* StatelessParticipantsListButton_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StatelessParticipantsListButton_Tests.swift; sourceTree = ""; };
- 4026BEE92EA79FD400360AD0 /* CallFlow_PerformanceTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallFlow_PerformanceTests.swift; sourceTree = ""; };
402778822BD13C62002F4399 /* NoiseCancellationFilter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = NoiseCancellationFilter.swift; sourceTree = ""; };
4028FE972DC4F638001F9DC3 /* ConsumableBucket.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConsumableBucket.swift; sourceTree = ""; };
4028FE992DC4FC8E001F9DC3 /* ConsumableBucketItemTransformer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ConsumableBucketItemTransformer.swift; sourceTree = ""; };
@@ -1939,12 +1953,12 @@
402C2A402DCE184400E60006 /* Combine+Sendable.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Combine+Sendable.swift"; sourceTree = ""; };
402C545A2B6BE50500672BFB /* MockStreamStatistics.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockStreamStatistics.swift; sourceTree = ""; };
402C545C2B6BE5E200672BFB /* StreamCallStatisticsFormatter_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamCallStatisticsFormatter_Tests.swift; sourceTree = ""; };
+ 402C5C5E2ECB3C2D0096F212 /* LivestreamAudioSessionPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LivestreamAudioSessionPolicy.swift; sourceTree = ""; };
+ 402C5C602ECB96D30096F212 /* AVAudioSessionObserver.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AVAudioSessionObserver.swift; sourceTree = ""; };
+ 402C5C622ECB988A0096F212 /* RTCAudioStore+AVAudioSessionEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = "RTCAudioStore+AVAudioSessionEffect.swift"; path = "Sources/StreamVideo/Utils/AudioSession/RTCAudioStore/Components/RTCAudioStore+AVAudioSessionEffect.swift"; sourceTree = SOURCE_ROOT; };
402D0E872D0C94CD00E9B83F /* RTCAudioTrack+Clone.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioTrack+Clone.swift"; sourceTree = ""; };
402D0E892D0C94E600E9B83F /* RTCVideoTrack+Clone.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCVideoTrack+Clone.swift"; sourceTree = ""; };
402D0E8B2D0C94F900E9B83F /* CallSettings+Audio.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CallSettings+Audio.swift"; sourceTree = ""; };
- 402E699E2EA65FF90082F7FA /* BatteryStore_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStore_Tests.swift; sourceTree = ""; };
- 402E699F2EA65FF90082F7FA /* BatteryStoreDefaultReducer_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStoreDefaultReducer_Tests.swift; sourceTree = ""; };
- 402E69A02EA65FF90082F7FA /* BatteryStoreObservationMiddleware_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStoreObservationMiddleware_Tests.swift; sourceTree = ""; };
402EE12F2AA8861B00312632 /* DemoChatViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoChatViewModel.swift; sourceTree = ""; };
402F04A62B70ED8600CA1986 /* StreamCallStatisticsReporter.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = StreamCallStatisticsReporter.swift; sourceTree = ""; };
402F04A72B70ED8600CA1986 /* Statistics+Convenience.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "Statistics+Convenience.swift"; sourceTree = ""; };
@@ -1971,6 +1985,8 @@
40382F412C89CF9300C2D00F /* Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Stream_Video_Sfu_Models_ConnectionQuality+Convenience.swift"; sourceTree = ""; };
40382F442C89D00200C2D00F /* Stream_Video_Sfu_Models_Participant+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Stream_Video_Sfu_Models_Participant+Convenience.swift"; sourceTree = ""; };
40382F4F2C8B3DA800C2D00F /* StreamRTCPeerConnection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamRTCPeerConnection.swift; sourceTree = ""; };
+ 4039088C2EC2311A00B19FA1 /* StoreEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StoreEffect.swift; sourceTree = ""; };
+ 403908AB2EC2A99C00B19FA1 /* RTCAudioStore+StereoPlayoutEffect.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCAudioStore+StereoPlayoutEffect.swift"; sourceTree = ""; };
4039F0BF2D0099E40078159E /* RTCRtpCodecCapability+Convenience.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "RTCRtpCodecCapability+Convenience.swift"; sourceTree = ""; };
4039F0C92D0222E40078159E /* Stream_Video_Sfu_Models_PublishOption+VideoLayers.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Stream_Video_Sfu_Models_PublishOption+VideoLayers.swift"; sourceTree = ""; };
4039F0CB2D0241120078159E /* AudioCodec.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioCodec.swift; sourceTree = ""; };
@@ -2025,6 +2041,7 @@
404A81302DA3C5F0001F7FA8 /* MockDefaultAPI.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockDefaultAPI.swift; sourceTree = ""; };
404A81352DA3CBF0001F7FA8 /* CallConfigurationTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallConfigurationTests.swift; sourceTree = ""; };
404A81372DA3CC0C001F7FA8 /* CallConfiguration.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallConfiguration.swift; sourceTree = ""; };
+ 404B546A2ED06D8C009378F2 /* RetriableTask.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RetriableTask.swift; sourceTree = ""; };
4050725F2E5F49D5003D2109 /* CallKitMissingPermissionPolicy.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitMissingPermissionPolicy.swift; sourceTree = ""; };
405072612E5F4CCA003D2109 /* CallKitMissingPermissionPolicyProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallKitMissingPermissionPolicyProtocol.swift; sourceTree = ""; };
405072642E5F4CDD003D2109 /* CallKitMissingPermissionPolicy+NoOp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "CallKitMissingPermissionPolicy+NoOp.swift"; sourceTree = ""; };
@@ -2190,7 +2207,6 @@
4097B3822BF4E37B0057992D /* OnChangeViewModifier_iOS13.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = OnChangeViewModifier_iOS13.swift; sourceTree = ""; };
40986C392CCB6D2F00510F88 /* RTCRtpEncodingParameters_Test.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCRtpEncodingParameters_Test.swift; sourceTree = ""; };
40986C3B2CCB6E4B00510F88 /* RTCRtpTransceiverInit_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTCRtpTransceiverInit_Tests.swift; sourceTree = ""; };
- 40986C3D2CD1148F00510F88 /* AudioSession_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioSession_Tests.swift; sourceTree = ""; };
409AF6E52DAFAC4700EE7BF6 /* PictureInPictureReconnectionView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PictureInPictureReconnectionView.swift; sourceTree = ""; };
409AF6E72DAFC80200EE7BF6 /* PictureInPictureContent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PictureInPictureContent.swift; sourceTree = ""; };
409AF6E92DAFE1B000EE7BF6 /* PictureInPictureContentProviderTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PictureInPictureContentProviderTests.swift; sourceTree = ""; };
@@ -2207,15 +2223,7 @@
40A0E95F2B88ABC80089E8D3 /* DemoBackgroundEffectSelector.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DemoBackgroundEffectSelector.swift; sourceTree = ""; };
40A0E9612B88D3DC0089E8D3 /* UIInterfaceOrientation+CGOrientation.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "UIInterfaceOrientation+CGOrientation.swift"; sourceTree = ""; };
40A0E9672B88E04D0089E8D3 /* CIImage_Resize_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CIImage_Resize_Tests.swift; sourceTree = ""; };
- 40A0FFB02EA63CB900F39D8F /* BatteryStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStore.swift; sourceTree = ""; };
- 40A0FFB32EA63D3C00F39D8F /* BatteryStore+Namespace.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+Namespace.swift"; sourceTree = ""; };
- 40A0FFB52EA63D8F00F39D8F /* BatteryStore+State.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+State.swift"; sourceTree = ""; };
- 40A0FFB72EA63D9700F39D8F /* BatteryStore+Action.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+Action.swift"; sourceTree = ""; };
- 40A0FFBA2EA63E9A00F39D8F /* BatteryStore+DefaultReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+DefaultReducer.swift"; sourceTree = ""; };
- 40A0FFBD2EA63FE500F39D8F /* BatteryStore+ObservationMiddleware.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+ObservationMiddleware.swift"; sourceTree = ""; };
40A0FFBF2EA6418000F39D8F /* Sequence+AsyncReduce.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Sequence+AsyncReduce.swift"; sourceTree = ""; };
- 40A317E72EB504C900733948 /* ModerationBlurViewModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ModerationBlurViewModifier.swift; sourceTree = ""; };
- 40A317EA2EB5081500733948 /* ModerationWarningViewModifier.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ModerationWarningViewModifier.swift; sourceTree = ""; };
40A7C5B22E099B1000EEDF9C /* ParticipantEventResetAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ParticipantEventResetAdapter.swift; sourceTree = ""; };
40A7C5B72E099D6200EEDF9C /* ParticipantEventResetAdapter_Tests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ParticipantEventResetAdapter_Tests.swift; sourceTree = ""; };
40A9416D2B4D959F006D6965 /* StreamPictureInPictureAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamPictureInPictureAdapter.swift; sourceTree = ""; };
@@ -2331,6 +2339,21 @@
40B575CF2DCCEBA900F489B8 /* PictureInPictureEnforcedStopAdapterTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PictureInPictureEnforcedStopAdapterTests.swift; sourceTree = ""; };
40B575D22DCCECDA00F489B8 /* MockAVPictureInPictureController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MockAVPictureInPictureController.swift; sourceTree = ""; };
40B575D52DCCEFB500F489B8 /* StreamPictureInPictureControllerProtocol.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamPictureInPictureControllerProtocol.swift; sourceTree = ""; };
+ 40B8FFAC2EC3949F0061E3F6 /* BatteryStore+ObservationMiddleware.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+ObservationMiddleware.swift"; sourceTree = ""; };
+ 40B8FFAE2EC3949F0061E3F6 /* BatteryStore+DefaultReducer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+DefaultReducer.swift"; sourceTree = ""; };
+ 40B8FFB02EC3949F0061E3F6 /* BatteryStore+Action.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+Action.swift"; sourceTree = ""; };
+ 40B8FFB12EC3949F0061E3F6 /* BatteryStore+Namespace.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+Namespace.swift"; sourceTree = ""; };
+ 40B8FFB22EC3949F0061E3F6 /* BatteryStore+State.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "BatteryStore+State.swift"; sourceTree = ""; };
+ 40B8FFB42EC3949F0061E3F6 /* BatteryStore.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BatteryStore.swift; sourceTree = ""; };
+ 40B8FFBC2EC394AA0061E3F6 /* CallModerationBlurEvent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallModerationBlurEvent.swift; sourceTree = ""; };
+ 40B8FFBD2EC394AA0061E3F6 /* CallModerationWarningEvent.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CallModerationWarningEvent.swift; sourceTree = ""; };
+ 40B8FFBE2EC394AA0061E3F6 /* RingCallRequest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RingCallRequest.swift; sourceTree = "