From 70fcedd5a536c0d4edd0576488550355bf5b1bc7 Mon Sep 17 00:00:00 2001 From: cloudwebrtc Date: Sat, 14 Dec 2024 13:37:05 +0800 Subject: [PATCH] feat: add TrackProcessor support. --- lib/livekit_client.dart | 1 + lib/src/events.dart | 15 +++++++++ lib/src/participant/local.dart | 9 ++++++ lib/src/track/local/audio.dart | 9 +++++- lib/src/track/local/local.dart | 54 ++++++++++++++++++++++++++++++++ lib/src/track/local/video.dart | 18 ++++++----- lib/src/track/options.dart | 9 ++++++ lib/src/track/processor.dart | 42 +++++++++++++++++++++++++ lib/src/track/processor_web.dart | 14 +++++++++ lib/src/track/track.dart | 1 + 10 files changed, 164 insertions(+), 8 deletions(-) create mode 100644 lib/src/track/processor.dart create mode 100644 lib/src/track/processor_web.dart diff --git a/lib/livekit_client.dart b/lib/livekit_client.dart index d9a71cc1..d656f04a 100644 --- a/lib/livekit_client.dart +++ b/lib/livekit_client.dart @@ -44,6 +44,7 @@ export 'src/track/remote/audio.dart'; export 'src/track/remote/remote.dart'; export 'src/track/remote/video.dart'; export 'src/track/track.dart'; +export 'src/track/processor.dart'; export 'src/types/other.dart'; export 'src/types/participant_permissions.dart'; export 'src/types/video_dimensions.dart'; diff --git a/lib/src/events.dart b/lib/src/events.dart index cc193540..4ad44264 100644 --- a/lib/src/events.dart +++ b/lib/src/events.dart @@ -12,6 +12,8 @@ // See the License for the specific language governing permissions and // limitations under the License. +import 'package:livekit_client/src/track/processor.dart'; + import 'core/engine.dart'; import 'core/room.dart'; import 'core/signal_client.dart'; @@ -578,3 +580,16 @@ class AudioVisualizerEvent with TrackEvent { String toString() => '${runtimeType}' 'track: ${track})'; } + +class TrackProcessorUpdateEvent with TrackEvent { + final Track track; + final TrackProcessor? processor; + const TrackProcessorUpdateEvent({ + required this.track, + this.processor, + }); + + @override + String toString() => '${runtimeType}' + 'track: ${track})'; +} diff --git a/lib/src/participant/local.dart b/lib/src/participant/local.dart index 9b328272..1731ff27 100644 --- a/lib/src/participant/local.dart +++ b/lib/src/participant/local.dart @@ -115,6 +115,8 @@ class LocalParticipant extends Participant { // did publish await track.onPublish(); + await track.processor?.onPublish(room); + await room.applyAudioSpeakerSettings(); var listener = track.createListener(); @@ -330,6 +332,7 @@ class LocalParticipant extends Participant { // did publish await track.onPublish(); + await track.processor?.onPublish(room); var listener = track.createListener(); listener.on((TrackEndedEvent event) { @@ -384,6 +387,12 @@ class LocalParticipant extends Participant { // did unpublish await track.onUnpublish(); + + if (track.processor != null) { + await track.processor?.onUnpublish(); + await track.stopProcessor(); + } + await room.applyAudioSpeakerSettings(); } diff --git a/lib/src/track/local/audio.dart b/lib/src/track/local/audio.dart index 90b91dd5..971716b9 100644 --- a/lib/src/track/local/audio.dart +++ b/lib/src/track/local/audio.dart @@ -16,6 +16,7 @@ import 'dart:async'; import 'package:collection/collection.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc; +import 'package:livekit_client/src/track/processor.dart'; import 'package:meta/meta.dart'; import '../../events.dart'; @@ -136,12 +137,18 @@ class LocalAudioTrack extends LocalTrack options ??= const AudioCaptureOptions(); final stream = await LocalTrack.createStream(options); - return LocalAudioTrack( + var track = LocalAudioTrack( TrackSource.microphone, stream, stream.getAudioTracks().first, options, enableVisualizer: enableVisualizer, ); + + if (options.processor != null) { + await track.setProcessor(options.processor); + } + + return track; } } diff --git a/lib/src/track/local/local.dart b/lib/src/track/local/local.dart index b64c62e8..17cc10fa 100644 --- a/lib/src/track/local/local.dart +++ b/lib/src/track/local/local.dart @@ -31,6 +31,7 @@ import '../../support/native.dart'; import '../../support/platform.dart'; import '../../types/other.dart'; import '../options.dart'; +import '../processor.dart'; import '../remote/audio.dart'; import '../remote/video.dart'; import '../track.dart'; @@ -119,6 +120,10 @@ abstract class LocalTrack extends Track { bool _stopped = false; + TrackProcessor? _processor; + + TrackProcessor? get processor => _processor; + LocalTrack( TrackType kind, TrackSource source, @@ -253,6 +258,8 @@ abstract class LocalTrack extends Track { final newStream = await LocalTrack.createStream(currentOptions); final newTrack = newStream.getTracks().first; + await stopProcessor(); + // replace track on sender try { await sender?.replaceTrack(newTrack); @@ -267,6 +274,10 @@ abstract class LocalTrack extends Track { // set new stream & track to this object updateMediaStreamAndTrack(newStream, newTrack); + if (_processor != null) { + await setProcessor(_processor!); + } + // mark as started await start(); @@ -277,6 +288,49 @@ abstract class LocalTrack extends Track { )); } + Future setProcessor(TrackProcessor? processor) async { + if (processor == null) { + return; + } + + if (_processor != null) { + await stopProcessor(); + } + + _processor = processor; + + var processorOptions = ProcessorOptions( + kind: kind, + track: mediaStreamTrack, + ); + + await _processor!.init(processorOptions); + + logger.fine('processor initialized'); + + events.emit(TrackProcessorUpdateEvent(track: this, processor: _processor)); + } + + @internal + Future stopProcessor({bool keepElement = false}) async { + if (_processor == null) return; + + logger.fine('stopping processor'); + await _processor?.processedTrack?.stop(); + await _processor?.destroy(); + _processor = null; + + if (!keepElement) { + // processorElement?.remove(); + // processorElement = null; + } + // apply original track constraints in case the processor changed them + //await this._mediaStreamTrack.applyConstraints(this._constraints); + // force re-setting of the mediaStreamTrack on the sender + //await this.setMediaStreamTrack(this._mediaStreamTrack, true); + events.emit(TrackProcessorUpdateEvent(track: this)); + } + @internal @mustCallSuper Future onPublish() async { diff --git a/lib/src/track/local/video.dart b/lib/src/track/local/video.dart index b7c0fc20..babef889 100644 --- a/lib/src/track/local/video.dart +++ b/lib/src/track/local/video.dart @@ -28,6 +28,7 @@ import '../../stats/stats.dart'; import '../../support/platform.dart'; import '../../types/other.dart'; import '../options.dart'; +import '../processor.dart'; import 'audio.dart'; import 'local.dart'; @@ -162,12 +163,9 @@ class LocalVideoTrack extends LocalTrack with VideoTrack { } // Private constructor - LocalVideoTrack._( - TrackSource source, - rtc.MediaStream stream, - rtc.MediaStreamTrack track, - this.currentOptions, - ) : super( + LocalVideoTrack._(TrackSource source, rtc.MediaStream stream, + rtc.MediaStreamTrack track, this.currentOptions) + : super( TrackType.VIDEO, source, stream, @@ -181,12 +179,18 @@ class LocalVideoTrack extends LocalTrack with VideoTrack { options ??= const CameraCaptureOptions(); final stream = await LocalTrack.createStream(options); - return LocalVideoTrack._( + var track = LocalVideoTrack._( TrackSource.camera, stream, stream.getVideoTracks().first, options, ); + + if (options.processor != null) { + await track.setProcessor(options.processor); + } + + return track; } /// Creates a LocalVideoTrack from the display. diff --git a/lib/src/track/options.dart b/lib/src/track/options.dart index e1f3a945..4e502510 100644 --- a/lib/src/track/options.dart +++ b/lib/src/track/options.dart @@ -18,6 +18,7 @@ import '../support/platform.dart'; import '../track/local/audio.dart'; import '../track/local/video.dart'; import '../types/video_parameters.dart'; +import 'processor.dart'; /// A type that represents front or back of the camera. enum CameraPosition { @@ -202,10 +203,14 @@ abstract class VideoCaptureOptions extends LocalTrackOptions { // Limit the maximum frameRate of the capture device. final double? maxFrameRate; + /// A processor to apply to the video track. + final TrackProcessor? processor; + const VideoCaptureOptions({ this.params = VideoParametersPresets.h540_169, this.deviceId, this.maxFrameRate, + this.processor, }); @override @@ -250,6 +255,9 @@ class AudioCaptureOptions extends LocalTrackOptions { /// set to false to only toggle enabled instead of stop/replaceTrack for muting final bool stopAudioCaptureOnMute; + /// A processor to apply to the audio track. + final TrackProcessor? processor; + const AudioCaptureOptions({ this.deviceId, this.noiseSuppression = true, @@ -258,6 +266,7 @@ class AudioCaptureOptions extends LocalTrackOptions { this.highPassFilter = false, this.typingNoiseDetection = true, this.stopAudioCaptureOnMute = true, + this.processor, }); @override diff --git a/lib/src/track/processor.dart b/lib/src/track/processor.dart new file mode 100644 index 00000000..ca1225ce --- /dev/null +++ b/lib/src/track/processor.dart @@ -0,0 +1,42 @@ +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +import '../core/room.dart'; +import '../types/other.dart'; + +class ProcessorOptions { + T kind; + MediaStreamTrack track; + ProcessorOptions({ + required this.kind, + required this.track, + }); +} + +class AudioProcessorOptions extends ProcessorOptions { + AudioProcessorOptions({ + required MediaStreamTrack track, + }) : super(kind: TrackType.AUDIO, track: track); +} + +class VideoProcessorOptions extends ProcessorOptions { + VideoProcessorOptions({ + required MediaStreamTrack track, + }) : super(kind: TrackType.VIDEO, track: track); +} + +abstract class TrackProcessor { + String get name; + + Future init(T options); + + Future restart(); + + Future destroy(); + + /// for flutter web only + MediaStreamTrack? get processedTrack; + + Future onPublish(Room room); + + Future onUnpublish(); +} diff --git a/lib/src/track/processor_web.dart b/lib/src/track/processor_web.dart new file mode 100644 index 00000000..795d2e07 --- /dev/null +++ b/lib/src/track/processor_web.dart @@ -0,0 +1,14 @@ +import 'package:web/web.dart'; + +import 'processor.dart'; + +class AudioProcessorOptionsWeb extends AudioProcessorOptions { + AudioProcessorOptionsWeb({ + this.audioElement, + this.audioContext, + required super.track, + }); + + HTMLAudioElement? audioElement; + AudioContext? audioContext; +} diff --git a/lib/src/track/track.dart b/lib/src/track/track.dart index cbddaabc..d15ffa25 100644 --- a/lib/src/track/track.dart +++ b/lib/src/track/track.dart @@ -15,6 +15,7 @@ import 'dart:async'; import 'package:flutter_webrtc/flutter_webrtc.dart' as rtc; +import 'package:livekit_client/src/track/processor.dart'; import 'package:meta/meta.dart'; import 'package:uuid/uuid.dart';