From b77c4afff2a539a5e4b80c7a4ee83e9587eed102 Mon Sep 17 00:00:00 2001 From: Timo Date: Fri, 6 Dec 2024 18:12:51 +0100 Subject: [PATCH] refactor - still should not work without a fixing upstream LK: https://github.com/livekit/components-js/pull/1042 https://github.com/livekit/components-js/pull/1043 --- src/App.tsx | 54 ++++++++----- src/livekit/TrackProcessorContext.tsx | 111 ++++++++++++++++++++++++++ src/livekit/useLiveKit.ts | 90 +++++---------------- src/room/LobbyView.tsx | 60 +++++--------- src/settings/SettingsModal.tsx | 30 +++---- yarn.lock | 16 ++-- 6 files changed, 204 insertions(+), 157 deletions(-) create mode 100644 src/livekit/TrackProcessorContext.tsx diff --git a/src/App.tsx b/src/App.tsx index 8d841dba7..5d96877b0 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -28,6 +28,7 @@ import { Initializer } from "./initializer"; import { MediaDevicesProvider } from "./livekit/MediaDevicesContext"; import { widget } from "./widget"; import { useTheme } from "./useTheme"; +import { ProcessorProvider } from "./livekit/TrackProcessorContext"; const SentryRoute = Sentry.withSentryRouting(Route); @@ -82,27 +83,25 @@ export const App: FC = ({ history }) => { {loaded ? ( - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + ) : ( @@ -113,3 +112,16 @@ export const App: FC = ({ history }) => { ); }; + +const Providers: FC<{ + children: JSX.Element; +}> = ({ children }) => { + // We use this to stack all used providers to not make the App component to verbose + return ( + + + {children} + + + ); +}; diff --git a/src/livekit/TrackProcessorContext.tsx b/src/livekit/TrackProcessorContext.tsx new file mode 100644 index 000000000..c2bc38267 --- /dev/null +++ b/src/livekit/TrackProcessorContext.tsx @@ -0,0 +1,111 @@ +/* +Copyright 2024 New Vector Ltd. + +SPDX-License-Identifier: AGPL-3.0-only +Please see LICENSE in the repository root for full details. +*/ + +import { + BackgroundBlur as backgroundBlur, + BackgroundOptions, + ProcessorWrapper, +} from "@livekit/track-processors"; +import { + createContext, + FC, + useCallback, + useContext, + useEffect, + useRef, + useState, +} from "react"; +import { logger } from "matrix-js-sdk/src/logger"; +import { LocalVideoTrack } from "livekit-client"; + +import { + backgroundBlur as backgroundBlurSettings, + useSetting, +} from "../settings/settings"; + +type ProcessorState = { + supported: boolean | undefined; + processor: undefined | ProcessorWrapper; + /** + * Call this method to try to initialize a processor. + * This only needs to happen if supported is undefined. + * If the backgroundBlur setting is set to true this does not need to be called + * and the processorState.supported will update automatically to the correct value. + */ + checkSupported: () => void; +}; +const ProcessorContext = createContext(undefined); + +export const useTrackProcessor = (): ProcessorState | undefined => + useContext(ProcessorContext); + +export const useTrackProcessorSync = ( + videoTrack: LocalVideoTrack | null, +): void => { + const { processor } = useTrackProcessor() || {}; + useEffect(() => { + if (processor && !videoTrack?.getProcessor()) { + void videoTrack?.setProcessor(processor); + } + if (!processor && videoTrack?.getProcessor()) { + void videoTrack?.stopProcessor(); + } + }, [processor, videoTrack]); +}; + +interface Props { + children: JSX.Element; +} +export const ProcessorProvider: FC = ({ children }) => { + // The setting the user wants to have + const [blurActivated] = useSetting(backgroundBlurSettings); + + // If `ProcessorState.supported` is undefined the user can activate that we want + // to have it at least checked (this is useful to show the settings menu properly) + // We dont want to try initializing the blur if the user is not even looking at the setting + const [shouldCheckSupport, setShouldCheckSupport] = useState(blurActivated); + + // Cache the processor so we only need to initialize it once. + const blur = useRef | undefined>( + undefined, + ); + + const checkSupported = useCallback(() => { + setShouldCheckSupport(true); + }, []); + // This is the actual state exposed through the context + const [processorState, setProcessorState] = useState(() => ({ + supported: false, + processor: undefined, + checkSupported, + })); + + useEffect(() => { + if (!shouldCheckSupport) return; + try { + if (!blur.current) blur.current = backgroundBlur(15, { delegate: "GPU" }); + setProcessorState({ + checkSupported, + supported: true, + processor: blurActivated ? blur.current : undefined, + }); + } catch (e) { + setProcessorState({ + checkSupported, + supported: false, + processor: undefined, + }); + logger.error("disable background blur", e); + } + }, [blurActivated, checkSupported, shouldCheckSupport]); + + return ( + + {children} + + ); +}; diff --git a/src/livekit/useLiveKit.ts b/src/livekit/useLiveKit.ts index 85f3178b2..4836141e2 100644 --- a/src/livekit/useLiveKit.ts +++ b/src/livekit/useLiveKit.ts @@ -9,9 +9,8 @@ import { ConnectionState, E2EEOptions, ExternalE2EEKeyProvider, - LocalTrackPublication, + LocalVideoTrack, Room, - RoomEvent, RoomOptions, Track, } from "livekit-client"; @@ -19,7 +18,6 @@ import { useEffect, useMemo, useRef } from "react"; import E2EEWorker from "livekit-client/e2ee-worker?worker"; import { logger } from "matrix-js-sdk/src/logger"; import { MatrixRTCSession } from "matrix-js-sdk/src/matrixrtc/MatrixRTCSession"; -import { BackgroundBlur as backgroundBlur } from "@livekit/track-processors"; import { defaultLiveKitOptions } from "./options"; import { SFUConfig } from "./openIDSFU"; @@ -29,7 +27,6 @@ import { MediaDevices, useMediaDevices, } from "./MediaDevicesContext"; -import { backgroundBlur as backgroundBlurSettings } from "../settings/settings"; import { ECConnectionState, useECConnectionState, @@ -37,7 +34,11 @@ import { import { MatrixKeyProvider } from "../e2ee/matrixKeyProvider"; import { E2eeType } from "../e2ee/e2eeType"; import { EncryptionSystem } from "../e2ee/sharedKeyManagement"; -import { useSetting } from "../settings/settings"; +import { + useTrackProcessor, + useTrackProcessorSync, +} from "./TrackProcessorContext"; +import { useInitial } from "../useInitial"; interface UseLivekitResult { livekitRoom?: Room; @@ -83,22 +84,16 @@ export function useLiveKit( const initialMuteStates = useRef(muteStates); const devices = useMediaDevices(); const initialDevices = useRef(devices); - const blur = useMemo(() => { - let b = undefined; - try { - b = backgroundBlur(15, { delegate: "GPU" }); - } catch (e) { - logger.error("disable background blur", e); - } - return b; - }, []); + + const { processor } = useTrackProcessor() || {}; + const initialProcessor = useInitial(() => processor); const roomOptions = useMemo( (): RoomOptions => ({ ...defaultLiveKitOptions, videoCaptureDefaults: { ...defaultLiveKitOptions.videoCaptureDefaults, deviceId: initialDevices.current.videoInput.selectedId, - processor: blur, + processor: initialProcessor, }, audioCaptureDefaults: { ...defaultLiveKitOptions.audioCaptureDefaults, @@ -109,7 +104,7 @@ export function useLiveKit( }, e2ee: e2eeOptions, }), - [blur, e2eeOptions], + [e2eeOptions, initialProcessor], ); // Store if audio/video are currently updating. If to prohibit unnecessary calls @@ -134,6 +129,15 @@ export function useLiveKit( return r; }, [roomOptions, e2eeSystem]); + const videoTrack = useMemo( + () => + Array.from(room.localParticipant.videoTrackPublications.values()).find( + (v) => v.source === Track.Source.Camera, + )?.track as LocalVideoTrack | null, + [room.localParticipant.videoTrackPublications], + ); + useTrackProcessorSync(videoTrack); + const connectionState = useECConnectionState( { deviceId: initialDevices.current.audioInput.selectedId, @@ -143,58 +147,6 @@ export function useLiveKit( sfuConfig, ); - const [showBackgroundBlur] = useSetting(backgroundBlurSettings); - const videoTrackPromise = useRef< - undefined | Promise - >(undefined); - - useEffect(() => { - // Don't even try if we cannot blur on this platform - if (!blur) return; - if (!room || videoTrackPromise.current) return; - const update = async (): Promise => { - let publishCallback: undefined | ((track: LocalTrackPublication) => void); - videoTrackPromise.current = new Promise< - LocalTrackPublication | undefined - >((resolve) => { - const videoTrack = Array.from( - room.localParticipant.videoTrackPublications.values(), - ).find((v) => v.source === Track.Source.Camera); - if (videoTrack) { - resolve(videoTrack); - } - publishCallback = (videoTrack: LocalTrackPublication): void => { - if (videoTrack.source === Track.Source.Camera) { - resolve(videoTrack); - } - }; - room.on(RoomEvent.LocalTrackPublished, publishCallback); - }); - - const videoTrack = await videoTrackPromise.current; - - if (publishCallback) - room.off(RoomEvent.LocalTrackPublished, publishCallback); - - if (videoTrack !== undefined) { - if ( - showBackgroundBlur && - videoTrack.track?.getProcessor()?.name !== "background-blur" - ) { - logger.info("Blur: set blur"); - - void videoTrack.track?.setProcessor(blur); - } else if ( - videoTrack.track?.getProcessor()?.name === "background-blur" - ) { - void videoTrack.track?.stopProcessor(); - } - } - videoTrackPromise.current = undefined; - }; - void update(); - }, [blur, room, showBackgroundBlur]); - useEffect(() => { // Sync the requested mute states with LiveKit's mute states. We do it this // way around rather than using LiveKit as the source of truth, so that the @@ -261,6 +213,7 @@ export function useLiveKit( audioMuteUpdating.current = true; trackPublication = await participant.setMicrophoneEnabled( buttonEnabled.current.audio, + room.options.audioCaptureDefaults, ); audioMuteUpdating.current = false; break; @@ -268,6 +221,7 @@ export function useLiveKit( videoMuteUpdating.current = true; trackPublication = await participant.setCameraEnabled( buttonEnabled.current.video, + room.options.videoCaptureDefaults, ); videoMuteUpdating.current = false; break; diff --git a/src/room/LobbyView.tsx b/src/room/LobbyView.tsx index 3626f82ec..545eff099 100644 --- a/src/room/LobbyView.tsx +++ b/src/room/LobbyView.tsx @@ -13,10 +13,13 @@ import classNames from "classnames"; import { useHistory } from "react-router-dom"; import { logger } from "matrix-js-sdk/src/logger"; import { usePreviewTracks } from "@livekit/components-react"; -import { LocalVideoTrack, Track } from "livekit-client"; +import { + CreateLocalTracksOptions, + LocalVideoTrack, + Track, +} from "livekit-client"; import { useObservable } from "observable-hooks"; import { map } from "rxjs"; -import { BackgroundBlur as backgroundBlur } from "@livekit/track-processors"; import inCallStyles from "./InCallView.module.css"; import styles from "./LobbyView.module.css"; @@ -33,14 +36,16 @@ import { VideoButton, } from "../button/Button"; import { SettingsModal, defaultSettingsTab } from "../settings/SettingsModal"; -import { backgroundBlur as backgroundBlurSettings } from "../settings/settings"; import { useMediaQuery } from "../useMediaQuery"; import { E2eeType } from "../e2ee/e2eeType"; import { Link } from "../button/Link"; import { useMediaDevices } from "../livekit/MediaDevicesContext"; import { useInitial } from "../useInitial"; import { useSwitchCamera as useShowSwitchCamera } from "./useSwitchCamera"; -import { useSetting } from "../settings/settings"; +import { + useTrackProcessor, + useTrackProcessorSync, +} from "../livekit/TrackProcessorContext"; interface Props { client: MatrixClient; @@ -111,20 +116,10 @@ export const LobbyView: FC = ({ muteStates.audio.enabled && { deviceId: devices.audioInput.selectedId }, ); - const blur = useMemo(() => { - let b = undefined; - try { - b = backgroundBlur(15, { delegate: "GPU" }); - } catch (e) { - logger.error( - "disable background blur because its not supported by the platform.", - e, - ); - } - return b; - }, []); + const { processor } = useTrackProcessor() || {}; - const localTrackOptions = useMemo( + const initialProcessor = useInitial(() => processor); + const localTrackOptions = useMemo( () => ({ // The only reason we request audio here is to get the audio permission // request over with at the same time. But changing the audio settings @@ -135,14 +130,14 @@ export const LobbyView: FC = ({ audio: Object.assign({}, initialAudioOptions), video: muteStates.video.enabled && { deviceId: devices.videoInput.selectedId, - // It should be possible to set a processor here: - // processor: blur, + processor: initialProcessor, }, }), [ initialAudioOptions, muteStates.video.enabled, devices.videoInput.selectedId, + initialProcessor, ], ); @@ -157,28 +152,11 @@ export const LobbyView: FC = ({ const tracks = usePreviewTracks(localTrackOptions, onError); - const videoTrack = useMemo( - () => - (tracks?.find((t) => t.kind === Track.Kind.Video) ?? - null) as LocalVideoTrack | null, - [tracks], - ); - - const [showBackgroundBlur] = useSetting(backgroundBlurSettings); - - useEffect(() => { - // Fon't even try if we cannot blur on this platform - if (!blur) return; - const updateBlur = async (showBlur: boolean): Promise => { - if (showBlur && !videoTrack?.getProcessor()) { - await videoTrack?.setProcessor(blur); - } else { - await videoTrack?.stopProcessor(); - } - }; - if (videoTrack) void updateBlur(showBackgroundBlur); - }, [videoTrack, showBackgroundBlur, blur]); - + const videoTrack = useMemo(() => { + const track = tracks?.find((t) => t.kind === Track.Kind.Video); + return track as LocalVideoTrack | null; + }, [tracks]); + useTrackProcessorSync(videoTrack); const showSwitchCamera = useShowSwitchCamera( useObservable( (inputs) => inputs.pipe(map(([video]) => video)), diff --git a/src/settings/SettingsModal.tsx b/src/settings/SettingsModal.tsx index 4e9da3591..11d73f4d3 100644 --- a/src/settings/SettingsModal.tsx +++ b/src/settings/SettingsModal.tsx @@ -5,12 +5,10 @@ SPDX-License-Identifier: AGPL-3.0-only Please see LICENSE in the repository root for full details. */ -import { ChangeEvent, FC, ReactNode, useCallback, useState } from "react"; +import { ChangeEvent, FC, ReactNode, useCallback, useEffect, useState } from "react"; import { Trans, useTranslation } from "react-i18next"; import { MatrixClient } from "matrix-js-sdk/src/matrix"; import { Root as Form, Separator, Text } from "@vector-im/compound-web"; -import { BackgroundBlur as backgroundBlur } from "@livekit/track-processors"; -import { logger } from "matrix-js-sdk/src/logger"; import { Modal } from "../Modal"; import styles from "./SettingsModal.module.css"; @@ -36,6 +34,7 @@ import { isFirefox } from "../Platform"; import { PreferencesSettingsTab } from "./PreferencesSettingsTab"; import { Slider } from "../Slider"; import { DeviceSelection } from "./DeviceSelection"; +import { useTrackProcessor } from "../livekit/TrackProcessorContext"; type SettingsTab = | "audio" @@ -75,18 +74,11 @@ export const SettingsModal: FC = ({ // Generate a `Checkbox` input to turn blur on or off. const BlurCheckbox: React.FC = (): ReactNode => { - const [blur, setBlur] = useSetting(backgroundBlurSetting); - let canBlur = true; - try { - backgroundBlur(15); - } catch (e) { - logger.debug( - "Cannot blur, so we do not show the option in settings. error: ", - e, - ); - canBlur = false; - setBlur(false); - } + const { supported, checkSupported } = useTrackProcessor() || {}; + useEffect(() => checkSupported?.(), [checkSupported]); + + const [blurActive, setBlurActive] = useSetting(backgroundBlurSetting); + return ( <>

{t("settings.background_blur_header")}

@@ -96,12 +88,12 @@ export const SettingsModal: FC = ({ id="activateBackgroundBlur" label={t("settings.background_blur_label")} description={ - canBlur ? "" : t("settings.blur_not_supported_by_browser") + supported ? "" : t("settings.blur_not_supported_by_browser") } type="checkbox" - checked={blur} - onChange={(b): void => setBlur(b.target.checked)} - disabled={!canBlur} + checked={!!blurActive} + onChange={(b): void => setBlurActive(b.target.checked)} + disabled={!supported} /> diff --git a/yarn.lock b/yarn.lock index 1a456433a..b4fc53aa4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1805,10 +1805,10 @@ resolved "https://registry.yarnpkg.com/@livekit/mutex/-/mutex-1.0.0.tgz#9493102d92ff75dfb0445eccc46c7c7ac189d385" integrity sha512-aiUhoThBNF9UyGTxEURFzJLhhPLIVTnQiEVMjRhPnfHNKLfo2JY9xovHKIus7B78UD5hsP6DlgpmAsjrz4U0Iw== -"@livekit/protocol@1.24.0": - version "1.24.0" - resolved "https://registry.yarnpkg.com/@livekit/protocol/-/protocol-1.24.0.tgz#b23acab25c11027bf26c1b42f9b782682f2da585" - integrity sha512-9dCsqnkMn7lvbI4NGh18zhLDsrXyUcpS++TEFgEk5Xv1WM3R2kT3EzqgL1P/mr3jaabM6rJ8wZA/KJLuQNpF5w== +"@livekit/protocol@1.29.3": + version "1.29.3" + resolved "https://registry.yarnpkg.com/@livekit/protocol/-/protocol-1.29.3.tgz#486ce215c0c591ad64036d9b13c7e28f5417cf03" + integrity sha512-5La/pm2LsSeCbm7xNe/TvHGYu7uVwDpLrlycpgo5nzofGq/TH67255vS8ni/1Y7vrFuAI8VYG/s42mcC1UF6tQ== dependencies: "@bufbuild/protobuf" "^1.10.0" @@ -6125,12 +6125,12 @@ lines-and-columns@^1.1.6: integrity sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg== livekit-client@^2.5.7: - version "2.7.0" - resolved "https://registry.yarnpkg.com/livekit-client/-/livekit-client-2.7.0.tgz#d7a80aff4ad335dd093b0c90d0d715466539651a" - integrity sha512-4vjfSReFNAUD+2oLUz9qFRWztJaI/+AexpOmCgizNsPYpvvqgAvEGxapnhuAug9uP7JVYaKPXaTCq90MWZoDHg== + version "2.7.3" + resolved "https://registry.yarnpkg.com/livekit-client/-/livekit-client-2.7.3.tgz#70a5f5016f3f50b1282f4b9090aa17a39f8bde09" + integrity sha512-oHEmUTFjIJARi5R87PsobZx8y2HCSUwla3Nu71EqDOAMnNY9aoGMLsJVao5Y+v1TSk71rgRm991fihgxtbg5xw== dependencies: "@livekit/mutex" "1.0.0" - "@livekit/protocol" "1.24.0" + "@livekit/protocol" "1.29.3" events "^3.3.0" loglevel "^1.8.0" sdp-transform "^2.14.1"