Skip to content

Commit 017ea2d

Browse files
authored
Fixes audio renderer adapter (#807)
#784 Broke `LocalAudioTrack.add(audioRenderer: AudioRenderer)` so this PR fixes it...
1 parent 70d3dde commit 017ea2d

File tree

5 files changed

+105
-53
lines changed

5 files changed

+105
-53
lines changed

Sources/LiveKit/Audio/Manager/AudioManager.swift

Lines changed: 13 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -117,9 +117,17 @@ public class AudioManager: Loggable {
117117

118118
// MARK: - AudioProcessingModule
119119

120-
private lazy var capturePostProcessingDelegateAdapter = AudioCustomProcessingDelegateAdapter(label: "capturePost")
121-
122-
private lazy var renderPreProcessingDelegateAdapter = AudioCustomProcessingDelegateAdapter(label: "renderPre")
120+
private lazy var capturePostProcessingDelegateAdapter = AudioCustomProcessingDelegateAdapter(
121+
label: "capturePost",
122+
rtcDelegateGetter: { RTC.audioProcessingModule.capturePostProcessingDelegate },
123+
rtcDelegateSetter: { RTC.audioProcessingModule.capturePostProcessingDelegate = $0 }
124+
)
125+
126+
private lazy var renderPreProcessingDelegateAdapter = AudioCustomProcessingDelegateAdapter(
127+
label: "renderPre",
128+
rtcDelegateGetter: { RTC.audioProcessingModule.renderPreProcessingDelegate },
129+
rtcDelegateSetter: { RTC.audioProcessingModule.renderPreProcessingDelegate = $0 }
130+
)
123131

124132
let capturePostProcessingDelegateSubject = CurrentValueSubject<AudioCustomProcessingDelegate?, Never>(nil)
125133

@@ -128,15 +136,7 @@ public class AudioManager: Loggable {
128136
/// - Note: If you only need to observe the buffer (rather than modify it), use ``add(localAudioRenderer:)`` instead
129137
public var capturePostProcessingDelegate: AudioCustomProcessingDelegate? {
130138
didSet {
131-
if let capturePostProcessingDelegate {
132-
// Clear WebRTC delegate first - this triggers audioProcessingRelease() on the old target
133-
RTC.audioProcessingModule.capturePostProcessingDelegate = nil
134-
capturePostProcessingDelegateAdapter.set(target: capturePostProcessingDelegate)
135-
RTC.audioProcessingModule.capturePostProcessingDelegate = capturePostProcessingDelegateAdapter
136-
} else {
137-
RTC.audioProcessingModule.capturePostProcessingDelegate = nil
138-
capturePostProcessingDelegateAdapter.set(target: nil)
139-
}
139+
capturePostProcessingDelegateAdapter.set(target: capturePostProcessingDelegate, oldTarget: oldValue)
140140
capturePostProcessingDelegateSubject.send(capturePostProcessingDelegate)
141141
}
142142
}
@@ -147,15 +147,7 @@ public class AudioManager: Loggable {
147147
/// - Note: If you need to observe the buffer for individual tracks, use ``RemoteAudioTrack/add(audioRenderer:)`` instead
148148
public var renderPreProcessingDelegate: AudioCustomProcessingDelegate? {
149149
didSet {
150-
if let renderPreProcessingDelegate {
151-
// Clear WebRTC delegate first - this triggers release() on the old target
152-
RTC.audioProcessingModule.renderPreProcessingDelegate = nil
153-
renderPreProcessingDelegateAdapter.set(target: renderPreProcessingDelegate)
154-
RTC.audioProcessingModule.renderPreProcessingDelegate = renderPreProcessingDelegateAdapter
155-
} else {
156-
RTC.audioProcessingModule.renderPreProcessingDelegate = nil
157-
renderPreProcessingDelegateAdapter.set(target: nil)
158-
}
150+
renderPreProcessingDelegateAdapter.set(target: renderPreProcessingDelegate, oldTarget: oldValue)
159151
}
160152
}
161153

Sources/LiveKit/Protocols/AudioCustomProcessingDelegate.swift

Lines changed: 39 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,16 +57,53 @@ class AudioCustomProcessingDelegateAdapter: MulticastDelegate<AudioRenderer>, @u
5757

5858
private var _state = StateSync(State())
5959

60-
func set(target: AudioCustomProcessingDelegate?) {
60+
private let rtcDelegateGetter: () -> LKRTCAudioCustomProcessingDelegate?
61+
private let rtcDelegateSetter: (LKRTCAudioCustomProcessingDelegate?) -> Void
62+
63+
func set(target: AudioCustomProcessingDelegate?, oldTarget: AudioCustomProcessingDelegate? = nil) {
64+
// Clear WebRTC delegate first if there's an old target - this triggers audioProcessingRelease() on it
65+
if oldTarget != nil {
66+
rtcDelegateSetter(nil)
67+
}
6168
_state.mutate { $0.target = target }
69+
updateRTCConnection()
6270
}
6371

64-
init(label: String) {
72+
init(label: String,
73+
rtcDelegateGetter: @escaping () -> LKRTCAudioCustomProcessingDelegate?,
74+
rtcDelegateSetter: @escaping (LKRTCAudioCustomProcessingDelegate?) -> Void)
75+
{
6576
self.label = label
77+
self.rtcDelegateGetter = rtcDelegateGetter
78+
self.rtcDelegateSetter = rtcDelegateSetter
6679
super.init(label: "AudioCustomProcessingDelegateAdapter.\(label)")
6780
log("label: \(label)")
6881
}
6982

83+
// Override add/remove to manage RTC connection
84+
override func add(delegate: AudioRenderer) {
85+
super.add(delegate: delegate)
86+
updateRTCConnection()
87+
}
88+
89+
override func remove(delegate: AudioRenderer) {
90+
super.remove(delegate: delegate)
91+
updateRTCConnection()
92+
}
93+
94+
private func updateRTCConnection() {
95+
let shouldBeConnected = target != nil || isDelegatesNotEmpty
96+
let isConnected = rtcDelegateGetter() === self
97+
98+
if shouldBeConnected, !isConnected {
99+
// Connect
100+
rtcDelegateSetter(self)
101+
} else if !shouldBeConnected, isConnected {
102+
// Disconnect
103+
rtcDelegateSetter(nil)
104+
}
105+
}
106+
70107
// MARK: - AudioCustomProcessingDelegate
71108

72109
func audioProcessingInitialize(sampleRate sampleRateHz: Int, channels: Int) {

Tests/LiveKitAudioTests/AudioProcessingLifecycle.swift

Lines changed: 51 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -66,11 +66,7 @@ class AudioProcessingLifecycle: LKTestCase {
6666
let room1 = rooms[0]
6767
// Publish mic
6868
try await room1.localParticipant.setMicrophone(enabled: true)
69-
do {
70-
// 1 secs...
71-
let ns = UInt64(1 * 1_000_000_000)
72-
try await Task.sleep(nanoseconds: ns)
73-
}
69+
await self.sleep(forSeconds: 1)
7470

7571
// Verify processorA was initialized and received audio
7672
let stateA = processorA._state.copy()
@@ -79,11 +75,7 @@ class AudioProcessingLifecycle: LKTestCase {
7975

8076
// Switch to processorB
8177
AudioManager.shared.capturePostProcessingDelegate = processorB
82-
do {
83-
// 1 secs...
84-
let ns = UInt64(1 * 1_000_000_000)
85-
try await Task.sleep(nanoseconds: ns)
86-
}
78+
await self.sleep(forSeconds: 1)
8779

8880
// Verify processorA was released
8981
let stateA2 = processorA._state.copy()
@@ -102,4 +94,53 @@ class AudioProcessingLifecycle: LKTestCase {
10294
let stateB2 = processorB._state.copy()
10395
XCTAssertTrue(stateB2.entries.contains(.release), "Processor B should have been released")
10496
}
97+
98+
func testLocalAudioTrackRendererAPI() async throws {
99+
try await withRooms([RoomTestingOptions(canPublish: true)]) { rooms in
100+
let room1 = rooms[0]
101+
102+
// Create a test renderer
103+
let renderer = TestAudioRenderer()
104+
105+
// Publish microphone
106+
try await room1.localParticipant.setMicrophone(enabled: true)
107+
108+
// Get the local audio track
109+
guard let localAudioTrack = room1.localParticipant.audioTracks.first?.track as? LocalAudioTrack else {
110+
XCTFail("No local audio track found")
111+
return
112+
}
113+
114+
// Add renderer via LocalAudioTrack extension method
115+
localAudioTrack.add(audioRenderer: renderer)
116+
117+
// Wait for audio to flow
118+
await self.sleep(forSeconds: 1)
119+
120+
// Verify renderer received audio
121+
let count = renderer.renderCount.copy()
122+
XCTAssertGreaterThan(count, 0, "Renderer should have received audio buffers via LocalAudioTrack.add()")
123+
124+
// Remove renderer
125+
localAudioTrack.remove(audioRenderer: renderer)
126+
127+
// Reset count
128+
renderer.renderCount.mutate { $0 = 0 }
129+
130+
// Wait a bit
131+
await self.sleep(forSeconds: 1)
132+
133+
// Verify no more audio is received
134+
let countAfterRemove = renderer.renderCount.copy()
135+
XCTAssertEqual(countAfterRemove, 0, "Renderer should not receive audio after removal")
136+
}
137+
}
138+
}
139+
140+
private class TestAudioRenderer: AudioRenderer, @unchecked Sendable {
141+
let renderCount = StateSync<Int>(0)
142+
143+
func render(pcmBuffer _: AVAudioPCMBuffer) {
144+
renderCount.mutate { $0 += 1 }
145+
}
105146
}

Tests/LiveKitAudioTests/LocalAudioTrackRecorderTests.swift

Lines changed: 0 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -165,26 +165,6 @@ class LocalAudioTrackRecorderTests: LKTestCase {
165165
XCTAssertGreaterThan(dataCount2, 0, "Should have received audio data from recorder2")
166166
}
167167

168-
func testStartingTwice() async throws {
169-
let localTrack = LocalAudioTrack.createTrack(options: .noProcessing)
170-
171-
let recorder = LocalAudioTrackRecorder(
172-
track: localTrack,
173-
format: .pcmFormatInt16,
174-
sampleRate: 48000
175-
)
176-
177-
for await _ in try await recorder.start().prefix(10) {
178-
// swiftformat:disable hoistAwait
179-
await XCTAssertThrowsErrorAsync(try await recorder.start())
180-
recorder.stop()
181-
}
182-
183-
_ = try await recorder.start()
184-
185-
recorder.stop()
186-
}
187-
188168
func testObjCCompatibility() async throws {
189169
let localTrack = LocalAudioTrack.createTrack(options: .noProcessing)
190170

Tests/LiveKitCoreTests/Token/TokenSourceTests.swift

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,9 @@
1515
*/
1616

1717
@testable import LiveKit
18+
#if canImport(LiveKitTestSupport)
1819
import LiveKitTestSupport
20+
#endif
1921

2022
class TokenSourceTests: LKTestCase {
2123
actor MockValidJWTSource: TokenSourceConfigurable {

0 commit comments

Comments
 (0)