From 5bbcefbf0d8be59025fef8111253a8a0baaf6001 Mon Sep 17 00:00:00 2001 From: Zita Szupera Date: Tue, 5 Sep 2023 12:18:42 +0200 Subject: [PATCH] feat: new device api remote mutes (#988) ## In this PR 1. Remote mutes If a user is muted remotely the camera/audio device manager's state is properly updated now. There are two kinds of remote mutes: - Soft mute: you're muted but you can unmute yourself - Hard mute, your permission is taken away The device manager is updated in both of these cases. @santhoshvai this was the [missing feature why you couldn't use device manager status in the participant SDK component](https://github.com/GetStream/stream-video-js/pull/958). 2. `MediaStream` release is called in RN The `mediaStream.release()` call was missing in some cases, this is now fixed 3. Throw error in case of an RPC error So far, the RPC calls didn't reject the promise if an error happened, this now fixed. 4. Logs to device managers 5. Update location hint request timeout (unrelated to device API) 6. Stop publishing on remote soft mute So far the `Call` didn't stop publishing on soft remote mutes, this is fixed now --------- Co-authored-by: Vishal Narkhede --- packages/client/src/Call.ts | 84 +++++++++++++++++-- packages/client/src/StreamSfuClient.ts | 4 + .../src/coordinator/connection/location.ts | 4 +- packages/client/src/devices/CameraManager.ts | 15 ++-- .../src/devices/InputMediaDeviceManager.ts | 70 +++++++++++++--- .../client/src/devices/MicrophoneManager.ts | 11 +-- .../__tests__/InputMediaDeviceManager.test.ts | 17 ++-- .../client/src/devices/__tests__/mocks.ts | 4 + .../__tests__/DynascaleManager.test.ts | 3 + packages/client/src/rtc/Publisher.ts | 42 ++++++++-- .../src/rtc/__tests__/Publisher.test.ts | 1 + sample-apps/client/ts-quickstart/.env-example | 1 + sample-apps/client/ts-quickstart/src/main.ts | 7 +- 13 files changed, 208 insertions(+), 55 deletions(-) diff --git a/packages/client/src/Call.ts b/packages/client/src/Call.ts index 1780639988..a170b4662b 100644 --- a/packages/client/src/Call.ts +++ b/packages/client/src/Call.ts @@ -277,6 +277,37 @@ export class Call { this.camera = new CameraManager(this); this.microphone = new MicrophoneManager(this); + + this.state.localParticipant$.subscribe(async (p) => { + // Mute via device manager + // If integrator doesn't use device manager, we mute using stopPublish + if ( + !p?.publishedTracks.includes(TrackType.VIDEO) && + this.publisher?.isPublishing(TrackType.VIDEO) + ) { + this.logger( + 'info', + `Local participant's video track is muted remotely`, + ); + await this.camera.disable(); + if (this.publisher.isPublishing(TrackType.VIDEO)) { + this.stopPublish(TrackType.VIDEO); + } + } + if ( + !p?.publishedTracks.includes(TrackType.AUDIO) && + this.publisher?.isPublishing(TrackType.AUDIO) + ) { + this.logger( + 'info', + `Local participant's audio track is muted remotely`, + ); + await this.microphone.disable(); + if (this.publisher.isPublishing(TrackType.AUDIO)) { + this.stopPublish(TrackType.AUDIO); + } + } + }); this.speaker = new SpeakerManager(); } @@ -309,10 +340,50 @@ export class Call { const hasPermission = this.permissionsContext.hasPermission( permission as OwnCapability, ); - if (!hasPermission && this.publisher.isPublishing(trackType)) { - this.stopPublish(trackType).catch((err) => { - this.logger('error', `Error stopping publish ${trackType}`, err); - }); + if ( + !hasPermission && + (this.publisher.isPublishing(trackType) || + this.publisher.isLive(trackType)) + ) { + // Stop tracks, then notify device manager + this.stopPublish(trackType) + .catch((err) => { + this.logger( + 'error', + `Error stopping publish ${trackType}`, + err, + ); + }) + .then(() => { + if ( + trackType === TrackType.VIDEO && + this.camera.state.status === 'enabled' + ) { + this.camera + .disable() + .catch((err) => + this.logger( + 'error', + `Error disabling camera after pemission revoked`, + err, + ), + ); + } + if ( + trackType === TrackType.AUDIO && + this.microphone.state.status === 'enabled' + ) { + this.microphone + .disable() + .catch((err) => + this.logger( + 'error', + `Error disabling microphone after pemission revoked`, + err, + ), + ); + } + }); } } }), @@ -1112,7 +1183,10 @@ export class Call { * @param stopTrack if `true` the track will be stopped, else it will be just disabled */ stopPublish = async (trackType: TrackType, stopTrack: boolean = true) => { - this.logger('info', `stopPublish ${TrackType[trackType]}`); + this.logger( + 'info', + `stopPublish ${TrackType[trackType]}, stop tracks: ${stopTrack}`, + ); await this.publisher?.unpublishStream(trackType, stopTrack); }; diff --git a/packages/client/src/StreamSfuClient.ts b/packages/client/src/StreamSfuClient.ts index 7d60465292..abc004b8d0 100644 --- a/packages/client/src/StreamSfuClient.ts +++ b/packages/client/src/StreamSfuClient.ts @@ -396,5 +396,9 @@ const retryable = async ( retryAttempt < MAX_RETRIES ); + if (rpcCallResult.response.error) { + throw rpcCallResult.response.error; + } + return rpcCallResult; }; diff --git a/packages/client/src/coordinator/connection/location.ts b/packages/client/src/coordinator/connection/location.ts index df4636a0de..5dfe43ee25 100644 --- a/packages/client/src/coordinator/connection/location.ts +++ b/packages/client/src/coordinator/connection/location.ts @@ -5,7 +5,7 @@ const HINT_URL = `https://hint.stream-io-video.com/`; export const getLocationHint = async ( hintUrl: string = HINT_URL, - timeout: number = 1500, + timeout: number = 2000, ) => { const abortController = new AbortController(); const timeoutId = setTimeout(() => abortController.abort(), timeout); @@ -18,7 +18,7 @@ export const getLocationHint = async ( logger('debug', `Location header: ${awsPop}`); return awsPop.substring(0, 3); // AMS1-P2 -> AMS } catch (e) { - logger('error', `Failed to get location hint from ${HINT_URL}`, e); + logger('warn', `Failed to get location hint from ${HINT_URL}`, e); return 'ERR'; } finally { clearTimeout(timeoutId); diff --git a/packages/client/src/devices/CameraManager.ts b/packages/client/src/devices/CameraManager.ts index e39e814680..6ebf73aa4a 100644 --- a/packages/client/src/devices/CameraManager.ts +++ b/packages/client/src/devices/CameraManager.ts @@ -12,7 +12,7 @@ export class CameraManager extends InputMediaDeviceManager { }; constructor(call: Call) { - super(call, new CameraManagerState()); + super(call, new CameraManagerState(), TrackType.VIDEO); } /** @@ -59,6 +59,10 @@ export class CameraManager extends InputMediaDeviceManager { height !== this.targetResolution.height ) await this.applySettingsToStream(); + this.logger( + 'debug', + `${width}x${height} target resolution applied to media stream`, + ); } } @@ -85,12 +89,7 @@ export class CameraManager extends InputMediaDeviceManager { return this.call.stopPublish(TrackType.VIDEO, stopTracks); } - protected muteTracks(): void { - this.state.mediaStream - ?.getVideoTracks() - .forEach((t) => (t.enabled = false)); - } - protected unmuteTracks(): void { - this.state.mediaStream?.getVideoTracks().forEach((t) => (t.enabled = true)); + protected getTrack() { + return this.state.mediaStream?.getVideoTracks()[0]; } } diff --git a/packages/client/src/devices/InputMediaDeviceManager.ts b/packages/client/src/devices/InputMediaDeviceManager.ts index e21c1848bf..f63f11960c 100644 --- a/packages/client/src/devices/InputMediaDeviceManager.ts +++ b/packages/client/src/devices/InputMediaDeviceManager.ts @@ -2,8 +2,10 @@ import { Observable } from 'rxjs'; import { Call } from '../Call'; import { CallingState } from '../store'; import { InputMediaDeviceManagerState } from './InputMediaDeviceManagerState'; -import { disposeOfMediaStream } from './devices'; import { isReactNative } from '../helpers/platforms'; +import { Logger } from '../coordinator/connection/types'; +import { getLogger } from '../logger'; +import { TrackType } from '../gen/video/sfu/models/models'; export abstract class InputMediaDeviceManager< T extends InputMediaDeviceManagerState, @@ -16,7 +18,15 @@ export abstract class InputMediaDeviceManager< * @internal */ disablePromise?: Promise; - constructor(protected readonly call: Call, public readonly state: T) {} + logger: Logger; + + constructor( + protected readonly call: Call, + public readonly state: T, + protected readonly trackType: TrackType, + ) { + this.logger = getLogger([`${TrackType[trackType].toLowerCase()} manager`]); + } /** * Lists the available audio/video devices @@ -129,32 +139,68 @@ export abstract class InputMediaDeviceManager< protected abstract stopPublishStream(stopTracks: boolean): Promise; - protected abstract muteTracks(): void; - - protected abstract unmuteTracks(): void; + protected abstract getTrack(): undefined | MediaStreamTrack; private async muteStream(stopTracks: boolean = true) { if (!this.state.mediaStream) { return; } + this.logger('debug', `${stopTracks ? 'Stopping' : 'Disabling'} stream`); if (this.call.state.callingState === CallingState.JOINED) { await this.stopPublishStream(stopTracks); - } else if (this.state.mediaStream) { - stopTracks - ? disposeOfMediaStream(this.state.mediaStream) - : this.muteTracks(); } - if (stopTracks) { + this.muteLocalStream(stopTracks); + if (this.getTrack()?.readyState === 'ended') { + // @ts-expect-error release() is present in react-native-webrtc and must be called to dispose the stream + if (typeof this.state.mediaStream.release === 'function') { + // @ts-expect-error + this.state.mediaStream.release(); + } this.state.setMediaStream(undefined); } } + private muteTrack() { + const track = this.getTrack(); + if (!track || !track.enabled) { + return; + } + track.enabled = false; + } + + private unmuteTrack() { + const track = this.getTrack(); + if (!track || track.enabled) { + return; + } + track.enabled = true; + } + + private stopTrack() { + const track = this.getTrack(); + if (!track || track.readyState === 'ended') { + return; + } + track.stop(); + } + + private muteLocalStream(stopTracks: boolean) { + if (!this.state.mediaStream) { + return; + } + stopTracks ? this.stopTrack() : this.muteTrack(); + } + private async unmuteStream() { + this.logger('debug', 'Starting stream'); let stream: MediaStream; - if (this.state.mediaStream) { + if (this.state.mediaStream && this.getTrack()?.readyState === 'live') { stream = this.state.mediaStream; - this.unmuteTracks(); + this.unmuteTrack(); } else { + if (this.state.mediaStream) { + this.stopTrack(); + } const constraints = { deviceId: this.state.selectedDevice }; stream = await this.getStream(constraints); } diff --git a/packages/client/src/devices/MicrophoneManager.ts b/packages/client/src/devices/MicrophoneManager.ts index f4b0c210fe..91b6271479 100644 --- a/packages/client/src/devices/MicrophoneManager.ts +++ b/packages/client/src/devices/MicrophoneManager.ts @@ -7,7 +7,7 @@ import { TrackType } from '../gen/video/sfu/models/models'; export class MicrophoneManager extends InputMediaDeviceManager { constructor(call: Call) { - super(call, new MicrophoneManagerState()); + super(call, new MicrophoneManagerState(), TrackType.AUDIO); } protected getDevices(): Observable { @@ -25,12 +25,7 @@ export class MicrophoneManager extends InputMediaDeviceManager (t.enabled = false)); - } - protected unmuteTracks(): void { - this.state.mediaStream?.getAudioTracks().forEach((t) => (t.enabled = true)); + protected getTrack() { + return this.state.mediaStream?.getAudioTracks()[0]; } } diff --git a/packages/client/src/devices/__tests__/InputMediaDeviceManager.test.ts b/packages/client/src/devices/__tests__/InputMediaDeviceManager.test.ts index c71d60e8ec..d9378eef94 100644 --- a/packages/client/src/devices/__tests__/InputMediaDeviceManager.test.ts +++ b/packages/client/src/devices/__tests__/InputMediaDeviceManager.test.ts @@ -7,14 +7,7 @@ import { mockCall, mockVideoDevices, mockVideoStream } from './mocks'; import { InputMediaDeviceManager } from '../InputMediaDeviceManager'; import { InputMediaDeviceManagerState } from '../InputMediaDeviceManagerState'; import { of } from 'rxjs'; -import { disposeOfMediaStream } from '../devices'; - -vi.mock('../devices.ts', () => { - console.log('MOCKING devices'); - return { - disposeOfMediaStream: vi.fn(), - }; -}); +import { TrackType } from '../../gen/video/sfu/models/models'; vi.mock('../../Call.ts', () => { console.log('MOCKING Call'); @@ -32,11 +25,10 @@ class TestInputMediaDeviceManager extends InputMediaDeviceManager Promise.resolve(mockVideoStream())); public publishStream = vi.fn(); public stopPublishStream = vi.fn(); - public muteTracks = vi.fn(); - public unmuteTracks = vi.fn(); + public getTrack = () => this.state.mediaStream!.getVideoTracks()[0]; constructor(call: Call) { - super(call, new TestInputMediaDeviceManagerState()); + super(call, new TestInputMediaDeviceManagerState(), TrackType.VIDEO); } } @@ -135,11 +127,12 @@ describe('InputMediaDeviceManager.test', () => { it('select device when status is enabled', async () => { await manager.enable(); const prevStream = manager.state.mediaStream; + vi.spyOn(prevStream!.getVideoTracks()[0], 'stop'); const deviceId = mockVideoDevices[1].deviceId; await manager.select(deviceId); - expect(disposeOfMediaStream).toHaveBeenCalledWith(prevStream); + expect(prevStream!.getVideoTracks()[0].stop).toHaveBeenCalledWith(); }); it('select device when status is enabled and in call', async () => { diff --git a/packages/client/src/devices/__tests__/mocks.ts b/packages/client/src/devices/__tests__/mocks.ts index 401bc410e6..846e6b3b86 100644 --- a/packages/client/src/devices/__tests__/mocks.ts +++ b/packages/client/src/devices/__tests__/mocks.ts @@ -93,6 +93,10 @@ export const mockVideoStream = () => { height: 720, }), enabled: true, + readyState: 'live', + stop: () => { + track.readyState = 'eneded'; + }, }; return { getVideoTracks: () => [track], diff --git a/packages/client/src/helpers/__tests__/DynascaleManager.test.ts b/packages/client/src/helpers/__tests__/DynascaleManager.test.ts index d2788f4485..cf459d1f37 100644 --- a/packages/client/src/helpers/__tests__/DynascaleManager.test.ts +++ b/packages/client/src/helpers/__tests__/DynascaleManager.test.ts @@ -53,6 +53,7 @@ describe('DynascaleManager', () => { call.state.updateOrAddParticipant('session-id', { userId: 'user-id', sessionId: 'session-id', + publishedTracks: [], }); const element = document.createElement('div'); @@ -113,6 +114,7 @@ describe('DynascaleManager', () => { call.state.updateOrAddParticipant('session-id', { userId: 'user-id', sessionId: 'session-id', + publishedTracks: [], }); // @ts-ignore @@ -120,6 +122,7 @@ describe('DynascaleManager', () => { userId: 'user-id-local', sessionId: 'session-id-local', isLocalParticipant: true, + publishedTracks: [], }); const cleanup = dynascaleManager.bindAudioElement( diff --git a/packages/client/src/rtc/Publisher.ts b/packages/client/src/rtc/Publisher.ts index e829941d38..3d10585852 100644 --- a/packages/client/src/rtc/Publisher.ts +++ b/packages/client/src/rtc/Publisher.ts @@ -253,6 +253,9 @@ export class Publisher { // by an external factor as permission revokes, device disconnected, etc. // keep in mind that `track.stop()` doesn't trigger this event. track.addEventListener('ended', handleTrackEnded); + if (!track.enabled) { + track.enabled = true; + } transceiver = this.pc.addTransceiver(track, { direction: 'sendonly', @@ -310,17 +313,24 @@ export class Publisher { if ( transceiver && transceiver.sender.track && - transceiver.sender.track.readyState === 'live' + (stopTrack + ? transceiver.sender.track.readyState === 'live' + : transceiver.sender.track.enabled) ) { stopTrack ? transceiver.sender.track.stop() : (transceiver.sender.track.enabled = false); - return this.notifyTrackMuteStateChanged( - undefined, - transceiver.sender.track, - trackType, - true, - ); + // We don't need to notify SFU if unpublishing in response to remote soft mute + if (!this.state.localParticipant?.publishedTracks.includes(trackType)) { + return; + } else { + return this.notifyTrackMuteStateChanged( + undefined, + transceiver.sender.track, + trackType, + true, + ); + } } }; @@ -330,6 +340,24 @@ export class Publisher { * @param trackType the track type to check. */ isPublishing = (trackType: TrackType): boolean => { + const transceiverForTrackType = this.transceiverRegistry[trackType]; + if (transceiverForTrackType && transceiverForTrackType.sender) { + const sender = transceiverForTrackType.sender; + return ( + !!sender.track && + sender.track.readyState === 'live' && + sender.track.enabled + ); + } + return false; + }; + + /** + * Returns true if the given track type is currently live + * + * @param trackType the track type to check. + */ + isLive = (trackType: TrackType): boolean => { const transceiverForTrackType = this.transceiverRegistry[trackType]; if (transceiverForTrackType && transceiverForTrackType.sender) { const sender = transceiverForTrackType.sender; diff --git a/packages/client/src/rtc/__tests__/Publisher.test.ts b/packages/client/src/rtc/__tests__/Publisher.test.ts index a12e7231ef..ceb662c3e8 100644 --- a/packages/client/src/rtc/__tests__/Publisher.test.ts +++ b/packages/client/src/rtc/__tests__/Publisher.test.ts @@ -180,6 +180,7 @@ describe('Publisher', () => { expect(state.localParticipant?.videoDeviceId).toEqual('test-device-id'); expect(state.localParticipant?.publishedTracks).toContain(TrackType.VIDEO); + expect(track.enabled).toBe(true); expect(state.localParticipant?.videoStream).toEqual(mediaStream); expect(transceiver.setCodecPreferences).toHaveBeenCalled(); expect(sfuClient.updateMuteState).toHaveBeenCalledWith( diff --git a/sample-apps/client/ts-quickstart/.env-example b/sample-apps/client/ts-quickstart/.env-example index 3ef370dfa0..b2bca63cc0 100644 --- a/sample-apps/client/ts-quickstart/.env-example +++ b/sample-apps/client/ts-quickstart/.env-example @@ -1,3 +1,4 @@ VITE_STREAM_API_KEY= VITE_STREAM_USER_TOKEN= VITE_STREAM_CALL_ID= # optional +VITE_STREAM_LOG_LEVEL=warn # optional diff --git a/sample-apps/client/ts-quickstart/src/main.ts b/sample-apps/client/ts-quickstart/src/main.ts index 9570ae872e..4b9ad65eb4 100644 --- a/sample-apps/client/ts-quickstart/src/main.ts +++ b/sample-apps/client/ts-quickstart/src/main.ts @@ -31,7 +31,12 @@ const callId = import.meta.env.VITE_STREAM_CALL_ID || (new Date().getTime() + Math.round(Math.random() * 100)).toString(); -const client = new StreamVideoClient({ apiKey, token, user }); +const client = new StreamVideoClient({ + apiKey, + token, + user, + options: { logLevel: import.meta.env.VITE_STREAM_LOG_LEVEL }, +}); const call = client.call('default', callId); call.join({ create: true }).then(async () => { // render mic and camera controls