-
Notifications
You must be signed in to change notification settings - Fork 106
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
AVAudioEngine version #526
base: main
Are you sure you want to change the base?
Changes from 4 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -59,6 +59,29 @@ public class LKAudioBuffer: NSObject { | |
|
||
// Audio Session Configuration related | ||
public class AudioManager: Loggable { | ||
#if os(iOS) | ||
class AudioSessionDelegateObserver: NSObject, Loggable, LKRTCAudioSessionDelegate { | ||
func audioSessionDidStartPlayOrRecord(_: LKRTCAudioSession) { | ||
log() | ||
} | ||
|
||
func audioSession(_: LKRTCAudioSession, audioEngineWillUpdateStateWithOutputEnabled isOutputEnabled: Bool, inputEnabled isInputEnabled: Bool) { | ||
log("isOutputEnabled: \(isOutputEnabled), isInputEnabled: \(isInputEnabled)") | ||
|
||
// Configure audio session | ||
let config = LKRTCAudioSessionConfiguration.webRTC() | ||
config.category = AVAudioSession.Category.playAndRecord.rawValue | ||
config.categoryOptions = [.allowBluetooth, .allowBluetoothA2DP, .allowAirPlay] | ||
config.mode = AVAudioSession.Mode.videoChat.rawValue | ||
LKRTCAudioSessionConfiguration.setWebRTC(config) | ||
} | ||
|
||
func audioSessionDidStopPlayOrRecord(_: LKRTCAudioSession) { | ||
log() | ||
} | ||
} | ||
#endif | ||
|
||
// MARK: - Public | ||
|
||
#if compiler(>=6.0) | ||
|
@@ -189,11 +212,13 @@ public class AudioManager: Loggable { | |
public let defaultInputDevice = AudioDevice(ioDevice: LKRTCIODevice.defaultDevice(with: .input)) | ||
|
||
public var outputDevices: [AudioDevice] { | ||
RTC.audioDeviceModule.outputDevices.map { AudioDevice(ioDevice: $0) } | ||
[] | ||
// RTC.audioDeviceModule.outputDevices.map { AudioDevice(ioDevice: $0) } | ||
} | ||
|
||
public var inputDevices: [AudioDevice] { | ||
RTC.audioDeviceModule.inputDevices.map { AudioDevice(ioDevice: $0) } | ||
[] | ||
// RTC.audioDeviceModule.inputDevices.map { AudioDevice(ioDevice: $0) } | ||
} | ||
|
||
public var outputDevice: AudioDevice { | ||
|
@@ -224,102 +249,38 @@ public class AudioManager: Loggable { | |
|
||
let state = StateSync(State()) | ||
|
||
// MARK: - Private | ||
|
||
private let _configureRunner = SerialRunnerActor<Void>() | ||
#if os(iOS) | ||
let _audioSessionDelegateObserver = AudioSessionDelegateObserver() | ||
init() { | ||
LKRTCAudioSession.sharedInstance().add(_audioSessionDelegateObserver) | ||
} | ||
|
||
#if os(iOS) || os(visionOS) || os(tvOS) | ||
private func _asyncConfigure(newState: State, oldState: State) async throws { | ||
try await _configureRunner.run { | ||
self.log("\(oldState) -> \(newState)") | ||
let configureFunc = newState.customConfigureFunc ?? self.defaultConfigureAudioSessionFunc | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. make sure you also remove the There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes this is not intended to merge "as-is", I'll need to figure out backward compatibility. |
||
configureFunc(newState, oldState) | ||
} | ||
deinit { | ||
LKRTCAudioSession.sharedInstance().remove(_audioSessionDelegateObserver) | ||
} | ||
#endif | ||
|
||
// MARK: - Private | ||
|
||
private let _configureRunner = SerialRunnerActor<Void>() | ||
|
||
func trackDidStart(_ type: Type) async throws { | ||
let (newState, oldState) = state.mutate { state in | ||
state.mutate { state in | ||
let oldState = state | ||
if type == .local { state.localTracksCount += 1 } | ||
if type == .remote { state.remoteTracksCount += 1 } | ||
return (state, oldState) | ||
} | ||
#if os(iOS) || os(visionOS) || os(tvOS) | ||
try await _asyncConfigure(newState: newState, oldState: oldState) | ||
#endif | ||
} | ||
|
||
func trackDidStop(_ type: Type) async throws { | ||
let (newState, oldState) = state.mutate { state in | ||
state.mutate { state in | ||
let oldState = state | ||
if type == .local { state.localTracksCount = max(state.localTracksCount - 1, 0) } | ||
if type == .remote { state.remoteTracksCount = max(state.remoteTracksCount - 1, 0) } | ||
return (state, oldState) | ||
} | ||
#if os(iOS) || os(visionOS) || os(tvOS) | ||
try await _asyncConfigure(newState: newState, oldState: oldState) | ||
#endif | ||
} | ||
|
||
#if os(iOS) || os(visionOS) || os(tvOS) | ||
/// The default implementation when audio session configuration is requested by the SDK. | ||
/// Configure the `RTCAudioSession` of `WebRTC` framework. | ||
/// | ||
/// > Note: It is recommended to use `RTCAudioSessionConfiguration.webRTC()` to obtain an instance of `RTCAudioSessionConfiguration` instead of instantiating directly. | ||
/// | ||
/// - Parameters: | ||
/// - configuration: A configured RTCAudioSessionConfiguration | ||
/// - setActive: passing true/false will call `AVAudioSession.setActive` internally | ||
public func defaultConfigureAudioSessionFunc(newState: State, oldState: State) { | ||
// Lazily computed config | ||
let computeConfiguration: (() -> AudioSessionConfiguration) = { | ||
switch newState.trackState { | ||
case .none: | ||
// Use .soloAmbient configuration | ||
return .soloAmbient | ||
case .remoteOnly where newState.isSpeakerOutputPreferred: | ||
// Use .playback configuration with spoken audio | ||
return .playback | ||
default: | ||
// Use .playAndRecord configuration | ||
return newState.isSpeakerOutputPreferred ? .playAndRecordSpeaker : .playAndRecordReceiver | ||
} | ||
} | ||
|
||
let configuration = newState.sessionConfiguration ?? computeConfiguration() | ||
|
||
var setActive: Bool? | ||
if newState.trackState != .none, oldState.trackState == .none { | ||
// activate audio session when there is any local/remote audio track | ||
setActive = true | ||
} else if newState.trackState == .none, oldState.trackState != .none { | ||
// deactivate audio session when there are no more local/remote audio tracks | ||
setActive = false | ||
} | ||
|
||
let session = LKRTCAudioSession.sharedInstance() | ||
// Check if needs setConfiguration | ||
guard configuration != session.toAudioSessionConfiguration() else { | ||
log("Skipping configure audio session, no changes") | ||
return | ||
} | ||
|
||
session.lockForConfiguration() | ||
defer { session.unlockForConfiguration() } | ||
|
||
do { | ||
log("Configuring audio session: \(String(describing: configuration))") | ||
if let setActive { | ||
try session.setConfiguration(configuration.toRTCType(), active: setActive) | ||
} else { | ||
try session.setConfiguration(configuration.toRTCType()) | ||
} | ||
} catch { | ||
log("Failed to configure audio session with error: \(error)", .error) | ||
} | ||
} | ||
#endif | ||
} | ||
|
||
public extension AudioManager { | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
my 2c: it might be worth allowing end-users to customize these options. our current setup requires
.defaultToSpeaker
. We also did not allow airplay specifically because of issues with noise cancellation.(THANK YOU for going down the AVAudioEngine rabbit hole. I know it's a huge one)
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yes, I will allow devs to customize the session. 🙂