Skip to content

Commit 4a44ee9

Browse files
committed
Merge remote-tracking branch 'upstream/main' into broadcast-ipc
2 parents 3f13b29 + 76b6deb commit 4a44ee9

File tree

11 files changed

+269
-45
lines changed

11 files changed

+269
-45
lines changed

LiveKitClient.podspec

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
Pod::Spec.new do |spec|
22
spec.name = "LiveKitClient"
3-
spec.version = "2.1.0"
3+
spec.version = "2.1.1"
44
spec.summary = "LiveKit Swift Client SDK. Easily build live audio or video experiences into your mobile app, game or website."
55
spec.homepage = "https://github.com/livekit/client-sdk-swift"
66
spec.license = {:type => "Apache 2.0", :file => "LICENSE"}
@@ -10,7 +10,7 @@ Pod::Spec.new do |spec|
1010
spec.osx.deployment_target = "10.15"
1111

1212
spec.swift_versions = ["5.7"]
13-
spec.source = {:git => "https://github.com/livekit/client-sdk-swift.git", :tag => "2.1.0"}
13+
spec.source = {:git => "https://github.com/livekit/client-sdk-swift.git", :tag => "2.1.1"}
1414

1515
spec.source_files = "Sources/**/*"
1616

Package.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ let package = Package(
1818
],
1919
dependencies: [
2020
// LK-Prefixed Dynamic WebRTC XCFramework
21-
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.16"),
21+
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.18"),
2222
.package(url: "https://github.com/apple/swift-protobuf.git", from: "1.26.0"),
2323
.package(url: "https://github.com/apple/swift-log.git", from: "1.5.4"),
2424
// Only used for DocC generation

[email protected]

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ let package = Package(
2020
],
2121
dependencies: [
2222
// LK-Prefixed Dynamic WebRTC XCFramework
23-
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.16"),
23+
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.18"),
2424
.package(url: "https://github.com/apple/swift-protobuf.git", from: "1.26.0"),
2525
.package(url: "https://github.com/apple/swift-log.git", from: "1.5.4"),
2626
// Only used for DocC generation

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ Add the dependency and also to your target
3838
let package = Package(
3939
...
4040
dependencies: [
41-
.package(name: "LiveKit", url: "https://github.com/livekit/client-sdk-swift.git", .upToNextMajor("2.1.0")),
41+
.package(name: "LiveKit", url: "https://github.com/livekit/client-sdk-swift.git", .upToNextMajor("2.1.1")),
4242
],
4343
targets: [
4444
.target(

Sources/LiveKit/Core/RPC.swift

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -22,19 +22,19 @@ import Foundation
2222
/// serialized and sent across the wire. The sender will receive an equivalent error on the other side.
2323
///
2424
/// Built-in types are included but developers may use any message string, with a max length of 256 bytes.
25-
struct RpcError: Error {
25+
public struct RpcError: Error {
2626
/// The error code of the RPC call. Error codes 1001-1999 are reserved for built-in errors.
2727
///
2828
/// See `RpcError.BuiltInError` for built-in error information.
29-
let code: Int
29+
public let code: Int
3030

3131
/// A message to include. Strings over 256 bytes will be truncated.
32-
let message: String
32+
public let message: String
3333

3434
/// An optional data payload. Must be smaller than 15KB in size, or else will be truncated.
35-
let data: String
35+
public let data: String
3636

37-
enum BuiltInError {
37+
public enum BuiltInError {
3838
case applicationError
3939
case connectionTimeout
4040
case responseTimeout
@@ -47,7 +47,7 @@ struct RpcError: Error {
4747
case unsupportedServer
4848
case unsupportedVersion
4949

50-
var code: Int {
50+
public var code: Int {
5151
switch self {
5252
case .applicationError: return 1500
5353
case .connectionTimeout: return 1501
@@ -63,7 +63,7 @@ struct RpcError: Error {
6363
}
6464
}
6565

66-
var message: String {
66+
public var message: String {
6767
switch self {
6868
case .applicationError: return "Application error in method handler"
6969
case .connectionTimeout: return "Connection timeout"
@@ -124,16 +124,16 @@ public typealias RpcHandler = (RpcInvocationData) async throws -> String
124124

125125
public struct RpcInvocationData {
126126
/// A unique identifier for this RPC request
127-
let requestId: String
127+
public let requestId: String
128128

129129
/// The identity of the RemoteParticipant who initiated the RPC call
130-
let callerIdentity: Participant.Identity
130+
public let callerIdentity: Participant.Identity
131131

132132
/// The data sent by the caller (as a string)
133-
let payload: String
133+
public let payload: String
134134

135135
/// The maximum time available to return a response
136-
let responseTimeout: TimeInterval
136+
public let responseTimeout: TimeInterval
137137
}
138138

139139
struct PendingRpcResponse {

Sources/LiveKit/LiveKit.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ let logger = Logger(label: "LiveKitSDK")
3939
@objc
4040
public class LiveKitSDK: NSObject {
4141
@objc(sdkVersion)
42-
public static let version = "2.1.0"
42+
public static let version = "2.1.1"
4343

4444
@objc
4545
public static func setLoggerStandardOutput() {

Sources/LiveKit/Track/AudioManager.swift

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -241,6 +241,9 @@ public class AudioManager: Loggable {
241241
/// Starts mic input to the SDK even without any ``Room`` or a connection.
242242
/// Audio buffers will flow into ``LocalAudioTrack/add(audioRenderer:)`` and ``capturePostProcessingDelegate``.
243243
public func startLocalRecording() {
244+
// Always unmute APM if muted by last session.
245+
RTC.audioProcessingModule.isMuted = false
246+
// Start recording on the ADM.
244247
RTC.audioDeviceModule.initAndStartRecording()
245248
}
246249

@@ -255,8 +258,33 @@ public class AudioManager: Loggable {
255258
_state.mutate { $0.engineObservers = engineObservers }
256259
}
257260

261+
/// Set to `true` to enable legacy mic mute mode.
262+
///
263+
/// - Default: Uses `AVAudioEngine`'s `isVoiceProcessingInputMuted` internally.
264+
/// This is fast, and muted speaker detection works. However, iOS will play a sound effect.
265+
/// - Legacy: Restarts the internal `AVAudioEngine` without mic input when muted.
266+
/// This is slower, and muted speaker detection does not work. No sound effect is played.
267+
public var isLegacyMuteMode: Bool {
268+
get { RTC.audioDeviceModule.muteMode == .restartEngine }
269+
set { RTC.audioDeviceModule.muteMode = newValue ? .restartEngine : .voiceProcessing }
270+
}
271+
258272
// MARK: - For testing
259273

274+
var isEngineRunning: Bool {
275+
RTC.audioDeviceModule.isEngineRunning
276+
}
277+
278+
var isMicrophoneMuted: Bool {
279+
get { RTC.audioDeviceModule.isMicrophoneMuted }
280+
set { RTC.audioDeviceModule.isMicrophoneMuted = newValue }
281+
}
282+
283+
var engineState: RTCAudioEngineState {
284+
get { RTC.audioDeviceModule.engineState }
285+
set { RTC.audioDeviceModule.engineState = newValue }
286+
}
287+
260288
var isPlayoutInitialized: Bool {
261289
RTC.audioDeviceModule.isPlayoutInitialized
262290
}

Tests/LiveKitTests/AudioEngineTests.swift

Lines changed: 48 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -106,32 +106,55 @@ class AudioEngineTests: XCTestCase {
106106
}
107107

108108
// Test start generating local audio buffer without joining to room.
109-
func testPrejoinLocalAudioBuffer() async throws {
110-
// Set up expectation...
111-
let didReceiveAudioFrame = expectation(description: "Did receive audio frame")
112-
didReceiveAudioFrame.assertForOverFulfill = false
113-
114-
// Start watching for audio frame...
115-
let audioFrameWatcher = AudioTrackWatcher(id: "notifier01") { _ in
116-
didReceiveAudioFrame.fulfill()
109+
func testPreconnectAudioBuffer() async throws {
110+
print("Setting recording always prepared mode...")
111+
AudioManager.shared.isRecordingAlwaysPrepared = true
112+
113+
var counter = 0
114+
// Executes 10 times by default.
115+
measure {
116+
counter += 1
117+
print("Measuring attempt \(counter)...")
118+
// Set up expectation...
119+
let didReceiveAudioFrame = expectation(description: "Did receive audio frame")
120+
didReceiveAudioFrame.assertForOverFulfill = false
121+
122+
let didConnectToRoom = expectation(description: "Did connect to room")
123+
didConnectToRoom.assertForOverFulfill = false
124+
125+
// Create an audio frame watcher...
126+
let audioFrameWatcher = AudioTrackWatcher(id: "notifier01") { _ in
127+
didReceiveAudioFrame.fulfill()
128+
}
129+
130+
let localMicTrack = LocalAudioTrack.createTrack()
131+
// Attach audio frame watcher...
132+
localMicTrack.add(audioRenderer: audioFrameWatcher)
133+
134+
Task.detached {
135+
print("Starting local recording...")
136+
AudioManager.shared.startLocalRecording()
137+
}
138+
139+
// Wait for audio frame...
140+
print("Waiting for first audio frame...")
141+
// await fulfillment(of: [didReceiveAudioFrame], timeout: 10)
142+
wait(for: [didReceiveAudioFrame], timeout: 30)
143+
144+
Task.detached {
145+
print("Connecting to room...")
146+
try await self.withRooms([RoomTestingOptions(canPublish: true)]) { rooms in
147+
print("Publishing mic...")
148+
try await rooms[0].localParticipant.setMicrophone(enabled: true)
149+
didConnectToRoom.fulfill()
150+
}
151+
}
152+
153+
print("Waiting for room to connect & disconnect...")
154+
wait(for: [didConnectToRoom], timeout: 30)
155+
156+
localMicTrack.remove(audioRenderer: audioFrameWatcher)
117157
}
118-
119-
let localMicTrack = LocalAudioTrack.createTrack()
120-
// Attach audio frame watcher...
121-
localMicTrack.add(audioRenderer: audioFrameWatcher)
122-
123-
Task.detached {
124-
print("Starting audio track in 3 seconds...")
125-
try? await Task.sleep(nanoseconds: 3 * 1_000_000_000)
126-
AudioManager.shared.startLocalRecording()
127-
}
128-
129-
// Wait for audio frame...
130-
print("Waiting for first audio frame...")
131-
await fulfillment(of: [didReceiveAudioFrame], timeout: 10)
132-
133-
// Remove audio frame watcher...
134-
localMicTrack.remove(audioRenderer: audioFrameWatcher)
135158
}
136159

137160
// Test the manual rendering mode (no-device mode) of AVAudioEngine based AudioDeviceModule.

0 commit comments

Comments
 (0)