Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

VPAU improvements #505

Draft
wants to merge 6 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Package.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ let package = Package(
],
dependencies: [
// LK-Prefixed Dynamic WebRTC XCFramework
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.11"),
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.11-exp.2"),
.package(url: "https://github.com/apple/swift-protobuf.git", from: "1.26.0"),
.package(url: "https://github.com/apple/swift-log.git", from: "1.5.4"),
// Only used for DocC generation
Expand Down
2 changes: 1 addition & 1 deletion [email protected]
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ let package = Package(
],
dependencies: [
// LK-Prefixed Dynamic WebRTC XCFramework
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.11"),
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.11-exp.2"),
.package(url: "https://github.com/apple/swift-protobuf.git", from: "1.26.0"),
.package(url: "https://github.com/apple/swift-log.git", from: "1.5.4"),
// Only used for DocC generation
Expand Down
120 changes: 41 additions & 79 deletions Sources/LiveKit/Track/AudioManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,27 @@ public class LKAudioBuffer: NSObject {

// Audio Session Configuration related
public class AudioManager: Loggable {
#if os(iOS)
class AudioSessionDelegateObserver: NSObject, Loggable, LKRTCAudioSessionDelegate {
func audioSessionDidStartPlayOrRecord(_: LKRTCAudioSession) {
log()
}

func audioSession(_: LKRTCAudioSession, audioUnitWillInitialize isRecord: Bool) {
log("isRecord: \(isRecord)")
let config = LKRTCAudioSessionConfiguration.webRTC()
config.category = AVAudioSession.Category.playAndRecord.rawValue
config.categoryOptions = [.allowBluetooth, .allowBluetoothA2DP, .allowAirPlay]
config.mode = AVAudioSession.Mode.videoChat.rawValue
LKRTCAudioSessionConfiguration.setWebRTC(config)
}

func audioSessionDidStopPlayOrRecord(_: LKRTCAudioSession) {
log()
}
}
#endif

// MARK: - Public

#if compiler(>=6.0)
Expand All @@ -68,6 +89,7 @@ public class AudioManager: Loggable {
#endif

public typealias DeviceUpdateFunc = (_ audioManager: AudioManager) -> Void
public typealias OnSpeechUpdate = (_ audioManager: AudioManager, _ event: Int) -> Void

#if os(iOS) || os(visionOS) || os(tvOS)

Expand Down Expand Up @@ -208,13 +230,22 @@ public class AudioManager: Loggable {

public var onDeviceUpdate: DeviceUpdateFunc? {
didSet {
RTC.audioDeviceModule.setDevicesUpdatedHandler { [weak self] in
RTC.audioDeviceModule.setDevicesDidUpdateCallback { [weak self] in
guard let self else { return }
self.onDeviceUpdate?(self)
}
}
}

public var onSpeechEvent: OnSpeechUpdate? {
didSet {
RTC.audioDeviceModule.setSpeechActivityCallback { [weak self] event in
guard let self else { return }
self.onSpeechEvent?(self, event.rawValue)
}
}
}

// MARK: - Internal

enum `Type` {
Expand All @@ -226,98 +257,29 @@ public class AudioManager: Loggable {

// MARK: - Private

private let _configureRunner = SerialRunnerActor<Void>()

#if os(iOS) || os(visionOS) || os(tvOS)
private func _asyncConfigure(newState: State, oldState: State) async throws {
try await _configureRunner.run {
self.log("\(oldState) -> \(newState)")
let configureFunc = newState.customConfigureFunc ?? self.defaultConfigureAudioSessionFunc
configureFunc(newState, oldState)
}
}
#endif

func trackDidStart(_ type: Type) async throws {
let (newState, oldState) = state.mutate { state in
let oldState = state
state.mutate { state in
if type == .local { state.localTracksCount += 1 }
if type == .remote { state.remoteTracksCount += 1 }
return (state, oldState)
}
#if os(iOS) || os(visionOS) || os(tvOS)
try await _asyncConfigure(newState: newState, oldState: oldState)
#endif
}

func trackDidStop(_ type: Type) async throws {
let (newState, oldState) = state.mutate { state in
let oldState = state
state.mutate { state in
if type == .local { state.localTracksCount = max(state.localTracksCount - 1, 0) }
if type == .remote { state.remoteTracksCount = max(state.remoteTracksCount - 1, 0) }
return (state, oldState)
}
#if os(iOS) || os(visionOS) || os(tvOS)
try await _asyncConfigure(newState: newState, oldState: oldState)
#endif
}

#if os(iOS) || os(visionOS) || os(tvOS)
/// The default implementation when audio session configuration is requested by the SDK.
/// Configure the `RTCAudioSession` of `WebRTC` framework.
///
/// > Note: It is recommended to use `RTCAudioSessionConfiguration.webRTC()` to obtain an instance of `RTCAudioSessionConfiguration` instead of instantiating directly.
///
/// - Parameters:
/// - configuration: A configured RTCAudioSessionConfiguration
/// - setActive: passing true/false will call `AVAudioSession.setActive` internally
public func defaultConfigureAudioSessionFunc(newState: State, oldState: State) {
// Lazily computed config
let computeConfiguration: (() -> AudioSessionConfiguration) = {
switch newState.trackState {
case .none:
// Use .soloAmbient configuration
return .soloAmbient
case .remoteOnly where newState.isSpeakerOutputPreferred:
// Use .playback configuration with spoken audio
return .playback
default:
// Use .playAndRecord configuration
return newState.isSpeakerOutputPreferred ? .playAndRecordSpeaker : .playAndRecordReceiver
}
}

let configuration = newState.sessionConfiguration ?? computeConfiguration()
#if os(iOS)
let _audioSessionDelegateObserver = AudioSessionDelegateObserver()

var setActive: Bool?
if newState.trackState != .none, oldState.trackState == .none {
// activate audio session when there is any local/remote audio track
setActive = true
} else if newState.trackState == .none, oldState.trackState != .none {
// deactivate audio session when there are no more local/remote audio tracks
setActive = false
}

let session = LKRTCAudioSession.sharedInstance()
// Check if needs setConfiguration
guard configuration != session.toAudioSessionConfiguration() else {
log("Skipping configure audio session, no changes")
return
}

session.lockForConfiguration()
defer { session.unlockForConfiguration() }
init() {
LKRTCAudioSession.sharedInstance().add(_audioSessionDelegateObserver)
}

do {
log("Configuring audio session: \(String(describing: configuration))")
if let setActive {
try session.setConfiguration(configuration.toRTCType(), active: setActive)
} else {
try session.setConfiguration(configuration.toRTCType())
}
} catch {
log("Failed to configure audio session with error: \(error)", .error)
}
deinit {
LKRTCAudioSession.sharedInstance().remove(_audioSessionDelegateObserver)
}
#endif
}
Expand Down
Loading