Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

AVAudioEngine version #526

Draft
wants to merge 5 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Package.swift
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ let package = Package(
],
dependencies: [
// LK-Prefixed Dynamic WebRTC XCFramework
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.11"),
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.12-exp.1"),
.package(url: "https://github.com/apple/swift-protobuf.git", from: "1.26.0"),
.package(url: "https://github.com/apple/swift-log.git", from: "1.5.4"),
// Only used for DocC generation
Expand Down
2 changes: 1 addition & 1 deletion [email protected]
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ let package = Package(
],
dependencies: [
// LK-Prefixed Dynamic WebRTC XCFramework
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.11"),
.package(url: "https://github.com/livekit/webrtc-xcframework.git", exact: "125.6422.12-exp.1"),
.package(url: "https://github.com/apple/swift-protobuf.git", from: "1.26.0"),
.package(url: "https://github.com/apple/swift-log.git", from: "1.5.4"),
// Only used for DocC generation
Expand Down
2 changes: 2 additions & 0 deletions Sources/LiveKit/Core/RTC.swift
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,8 @@ class RTC {
static let audioSenderCapabilities = peerConnectionFactory.rtpSenderCapabilities(forKind: kRTCMediaStreamTrackKindAudio)

static let peerConnectionFactory: LKRTCPeerConnectionFactory = {
RTCSetMinDebugLogLevel(.verbose)

logger.log("Initializing SSL...", type: Room.self)

RTCInitializeSSL()
Expand Down
119 changes: 40 additions & 79 deletions Sources/LiveKit/Track/AudioManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,29 @@ public class LKAudioBuffer: NSObject {

// Audio Session Configuration related
public class AudioManager: Loggable {
#if os(iOS)
class AudioSessionDelegateObserver: NSObject, Loggable, LKRTCAudioSessionDelegate {
func audioSessionDidStartPlayOrRecord(_: LKRTCAudioSession) {
log()
}

func audioSession(_: LKRTCAudioSession, audioEngineWillUpdateStateWithOutputEnabled isOutputEnabled: Bool, inputEnabled isInputEnabled: Bool) {
log("isOutputEnabled: \(isOutputEnabled), isInputEnabled: \(isInputEnabled)")

// Configure audio session
let config = LKRTCAudioSessionConfiguration.webRTC()
config.category = AVAudioSession.Category.playAndRecord.rawValue
config.categoryOptions = [.allowBluetooth, .allowBluetoothA2DP, .allowAirPlay]
config.mode = AVAudioSession.Mode.videoChat.rawValue
Comment on lines +74 to +75
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

my 2c: it might be worth allowing end-users to customize these options. our current setup requires .defaultToSpeaker. We also did not allow airplay specifically because of issues with noise cancellation.

(THANK YOU for going down the AVAudioEngine rabbit hole. I know it's a huge one)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, I will allow devs to customize the session. 🙂

LKRTCAudioSessionConfiguration.setWebRTC(config)
}

func audioSessionDidStopPlayOrRecord(_: LKRTCAudioSession) {
log()
}
}
#endif

// MARK: - Public

#if compiler(>=6.0)
Expand Down Expand Up @@ -189,11 +212,13 @@ public class AudioManager: Loggable {
public let defaultInputDevice = AudioDevice(ioDevice: LKRTCIODevice.defaultDevice(with: .input))

public var outputDevices: [AudioDevice] {
RTC.audioDeviceModule.outputDevices.map { AudioDevice(ioDevice: $0) }
[]
// RTC.audioDeviceModule.outputDevices.map { AudioDevice(ioDevice: $0) }
}

public var inputDevices: [AudioDevice] {
RTC.audioDeviceModule.inputDevices.map { AudioDevice(ioDevice: $0) }
[]
// RTC.audioDeviceModule.inputDevices.map { AudioDevice(ioDevice: $0) }
}

public var outputDevice: AudioDevice {
Expand Down Expand Up @@ -224,102 +249,38 @@ public class AudioManager: Loggable {

let state = StateSync(State())

// MARK: - Private

private let _configureRunner = SerialRunnerActor<Void>()
#if os(iOS)
let _audioSessionDelegateObserver = AudioSessionDelegateObserver()
init() {
LKRTCAudioSession.sharedInstance().add(_audioSessionDelegateObserver)
}

#if os(iOS) || os(visionOS) || os(tvOS)
private func _asyncConfigure(newState: State, oldState: State) async throws {
try await _configureRunner.run {
self.log("\(oldState) -> \(newState)")
let configureFunc = newState.customConfigureFunc ?? self.defaultConfigureAudioSessionFunc
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

make sure you also remove the customConfigureFunc property, and it seems like this will need to be a breaking change so we'll want to ensure this release goes out as 2.1.x and includes notes about this change

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes this is not intended to merge "as-is", I'll need to figure out backward compatibility.
The track counting approach was not a good idea. Instead this version gets direct callback from the audio device module, that session configuration is required.

configureFunc(newState, oldState)
}
deinit {
LKRTCAudioSession.sharedInstance().remove(_audioSessionDelegateObserver)
}
#endif

// MARK: - Private

private let _configureRunner = SerialRunnerActor<Void>()

func trackDidStart(_ type: Type) async throws {
let (newState, oldState) = state.mutate { state in
state.mutate { state in
let oldState = state
if type == .local { state.localTracksCount += 1 }
if type == .remote { state.remoteTracksCount += 1 }
return (state, oldState)
}
#if os(iOS) || os(visionOS) || os(tvOS)
try await _asyncConfigure(newState: newState, oldState: oldState)
#endif
}

func trackDidStop(_ type: Type) async throws {
let (newState, oldState) = state.mutate { state in
state.mutate { state in
let oldState = state
if type == .local { state.localTracksCount = max(state.localTracksCount - 1, 0) }
if type == .remote { state.remoteTracksCount = max(state.remoteTracksCount - 1, 0) }
return (state, oldState)
}
#if os(iOS) || os(visionOS) || os(tvOS)
try await _asyncConfigure(newState: newState, oldState: oldState)
#endif
}

#if os(iOS) || os(visionOS) || os(tvOS)
/// The default implementation when audio session configuration is requested by the SDK.
/// Configure the `RTCAudioSession` of `WebRTC` framework.
///
/// > Note: It is recommended to use `RTCAudioSessionConfiguration.webRTC()` to obtain an instance of `RTCAudioSessionConfiguration` instead of instantiating directly.
///
/// - Parameters:
/// - configuration: A configured RTCAudioSessionConfiguration
/// - setActive: passing true/false will call `AVAudioSession.setActive` internally
public func defaultConfigureAudioSessionFunc(newState: State, oldState: State) {
// Lazily computed config
let computeConfiguration: (() -> AudioSessionConfiguration) = {
switch newState.trackState {
case .none:
// Use .soloAmbient configuration
return .soloAmbient
case .remoteOnly where newState.isSpeakerOutputPreferred:
// Use .playback configuration with spoken audio
return .playback
default:
// Use .playAndRecord configuration
return newState.isSpeakerOutputPreferred ? .playAndRecordSpeaker : .playAndRecordReceiver
}
}

let configuration = newState.sessionConfiguration ?? computeConfiguration()

var setActive: Bool?
if newState.trackState != .none, oldState.trackState == .none {
// activate audio session when there is any local/remote audio track
setActive = true
} else if newState.trackState == .none, oldState.trackState != .none {
// deactivate audio session when there are no more local/remote audio tracks
setActive = false
}

let session = LKRTCAudioSession.sharedInstance()
// Check if needs setConfiguration
guard configuration != session.toAudioSessionConfiguration() else {
log("Skipping configure audio session, no changes")
return
}

session.lockForConfiguration()
defer { session.unlockForConfiguration() }

do {
log("Configuring audio session: \(String(describing: configuration))")
if let setActive {
try session.setConfiguration(configuration.toRTCType(), active: setActive)
} else {
try session.setConfiguration(configuration.toRTCType())
}
} catch {
log("Failed to configure audio session with error: \(error)", .error)
}
}
#endif
}

public extension AudioManager {
Expand Down
Loading