(false);
+
+ const [errorDetails, setErrorDetails] = React.useState<
+ MediaPermissionsError | undefined
+ >();
+
+ // Create wrapper refs to access values even during setTimeout
+ // https://github.com/facebook/react/issues/14010
+ const showDialogRef = useRef(showDialog);
+ showDialogRef.current = showDialog;
+ const audioAllowedRef = useRef(audioAllowed);
+ audioAllowedRef.current = audioAllowed;
+
+ React.useEffect(() => {
+ checkMediaPermissions();
+ }, []);
+
+ React.useEffect(() => {
+ console.log('audio allowed permission changed: ', audioAllowed);
+ if (audioAllowed) {
+ // set the default devices
+ // MediaManager.findMediaDevices();
+ }
+ }, [audioAllowed]);
+
+ const checkForExplanationDialog = () => {
+ if (
+ (!audioAllowedRef.current) &&
+ showDialogRef.current === null
+ )
+ setShowDialog(DialogType.explanation);
+ };
+
+ const checkMediaPermissions = () => {
+ // TODO: listen to if there is a change on the audio/video piece?
+
+ requestMediaPermissions({
+ audio: true,
+ video: false,
+ })
+ .then(() => {
+ setAudioAllowed(true);
+ setShowDialog(null);
+ })
+ .catch((error: MediaPermissionsError) => {
+ console.log('MediaOnboardingDialog: ', error);
+ if (
+ error.type ===
+ MediaPermissionsErrorType.SystemPermissionDenied
+ ) {
+ // user denied permission
+ setShowDialog(DialogType.systemDenied);
+ } else if (
+ error.type ===
+ MediaPermissionsErrorType.UserPermissionDenied
+ ) {
+ // browser doesn't have access to devices
+ setShowDialog(DialogType.userDenied);
+ } else if (
+ error.type ===
+ MediaPermissionsErrorType.CouldNotStartVideoSource
+ ) {
+ // most likely when other apps or tabs are using the cam/mic (mostly windows)
+ setShowDialog(DialogType.trackError);
+ } else {
+ }
+ setErrorDetails(error);
+ });
+
+ setTimeout(() => {
+ checkForExplanationDialog();
+ }, 500);
+ };
+
+ const _renderTryAgain = (text?: string) => {
+ return (
+
+ {
+ if (browser.getBrowserName() === 'Safari') {
+ // If on Safari, rechecking permissions results in glitches so just refresh the page
+ window.location.reload();
+ } else {
+ checkMediaPermissions();
+ }
+ }}
+ color="primary"
+ style={{ float: 'right' }}
+ >
+ {text ? text : 'Retry'}
+
+
+ );
+ };
+
+ const _renderErrorMessage = () => {
+ if (!errorDetails) return null;
+ return (
+
+ );
+ };
+
+ const _renderExplanationDialog = () => {
+ return (
+
+
+ Allow App to use your camera and microphone
+
+
+ This voice analyst app needs access to your camera and microphone so the ai can hear you.
+
+
+ );
+ };
+
+ const _renderUserDeniedDialog = () => {
+ return (
+
+
+ Microphone are blocked
+
+
+ App requires access to your microphone.{' '}
+ {browser.getBrowserName() !== 'Safari' && (
+
+ Click the camera blocked icon{' '}
+ {' '}
+ in your browser's address bar.
+
+ )}
+
+ {_renderErrorMessage()}
+ {_renderTryAgain()}
+
+ );
+ };
+
+ const _renderSystemDeniedDialog = () => {
+ const settingsDataByOS = {
+ macOS: {
+ name: 'System Preferences',
+ link: 'x-apple.systempreferences:com.apple.preference.security?Privacy_Camera',
+ },
+ };
+
+ return (
+
+
+ Can't use your microphone
+
+
+ Your browser might not have access to your
+ microphone. To fix this problem, open{' '}
+ {
+ // @ts-ignore
+ settingsDataByOS[browser.getOSName()] ? (
+ {
+ window.open(
+ // @ts-ignore
+ settingsDataByOS[browser.getOSName()]
+ .link,
+ '_blank',
+ );
+ }}
+ >
+ {
+ // @ts-ignore
+ settingsDataByOS[browser.getOSName()].name
+ }
+
+ ) : (
+ 'Settings'
+ )
+ }
+ .
+
+ {_renderErrorMessage()}
+ {_renderTryAgain()}
+
+ );
+ };
+
+ const _renderTrackErrorDialog = () => {
+ return (
+
+
+ Can't start your microphone
+
+
+ Another application (Zoom, Webex) or browser tab (Google
+ Meet, Messenger Video) might already be using your webcam.
+ Please turn off other cameras before proceeding.
+
+ {_renderErrorMessage()}
+ {_renderTryAgain()}
+
+ );
+ };
+
+ const _renderDialogContent = () => {
+ switch (showDialog) {
+ case DialogType.explanation:
+ return _renderExplanationDialog();
+ case DialogType.systemDenied:
+ return _renderSystemDeniedDialog();
+ case DialogType.userDenied:
+ return _renderUserDeniedDialog();
+ case DialogType.trackError:
+ return _renderTrackErrorDialog();
+ }
+ };
+ return (
+ {
+ setShowDialog(null);
+ }}>
+ {showDialog && _renderDialogContent()}
+ {_renderTryAgain()}
+
+ );
+};
+
+export default MediaVerification;
\ No newline at end of file
diff --git a/src/components/Note.tsx b/src/components/Note.tsx
new file mode 100644
index 0000000..e77fe3e
--- /dev/null
+++ b/src/components/Note.tsx
@@ -0,0 +1,53 @@
+import {FC, useState, useEffect } from "react";
+import { Text } from '@mantine/core';
+import { useAudioAnalyser } from '../contexts/AudioAnalyserContext';
+import { calculateFrequency, calculateNote, toDecimals } from "../utils/sound";
+
+const Note: FC = () => {
+ const { analyser } = useAudioAnalyser();
+ const [lastFrequency, setLastFrequency] = useState("");
+ const [note, setNote] = useState("");
+
+ useEffect(() => {
+ if (!analyser) {
+ return;
+ }
+
+ let raf: number;
+
+ const data = new Float32Array(analyser.frequencyBinCount);
+
+ const draw = () => {
+ raf = requestAnimationFrame(draw);
+ analyser.getFloatFrequencyData(data);
+
+ const frequency = calculateFrequency(data);
+
+ if (frequency && frequency < 1100) {
+
+ setNote(calculateNote(frequency) || "");
+
+ setLastFrequency(toDecimals(frequency, 1));
+
+ }
+ };
+ draw();
+
+ return () => {
+ cancelAnimationFrame(raf);
+ }
+
+ }, [analyser]);
+
+ if (!analyser || !note || note === 'undefined-Infinity') {
+ return null;
+ }
+ return (
+ <>
+ {note}
+ {lastFrequency} Hz
+ >
+ );
+};
+
+export default Note;
\ No newline at end of file
diff --git a/src/contexts/AudioAnalyserContext.tsx b/src/contexts/AudioAnalyserContext.tsx
new file mode 100644
index 0000000..100a44d
--- /dev/null
+++ b/src/contexts/AudioAnalyserContext.tsx
@@ -0,0 +1,61 @@
+import React, { createContext, FunctionComponent, useEffect, useState, useContext } from 'react';
+import { useInputAudio } from './InputAudioContext';
+
+interface AudioAnalyserContextValue {
+ analyser: AnalyserNode | undefined;
+}
+
+const AudioAnalyserContext = createContext({
+ analyser: undefined,
+});
+
+export const useAudioAnalyser = () => useContext(AudioAnalyserContext);
+
+interface Props {
+ children: React.ReactNode;
+}
+
+export const AudioAnalyserProvider: FunctionComponent = ({ children }) => {
+ const [analyser, setAnalyser] = useState();
+ const { source } = useInputAudio();
+
+ useEffect(() => {
+ if (source) {
+ const analyserNode = source.context.createAnalyser();
+ analyserNode.smoothingTimeConstant = 0.4;
+ analyserNode.maxDecibels = 0;
+ analyserNode.minDecibels = -100;
+ analyserNode.fftSize = 256;
+ source.connect(analyserNode);
+ setAnalyser(analyserNode);
+ }
+ }, [source]);
+
+ useEffect(() => {
+ if (analyser && source) {
+ source.connect(analyser);
+ }
+
+ if (!source) {
+ if (analyser) {
+ analyser.disconnect();
+ setAnalyser(undefined);
+ }
+ }
+
+ return () => {
+ if (analyser) {
+ analyser.disconnect();
+ setAnalyser(undefined);
+ }
+ }
+ }, [analyser, source])
+
+ return (
+
+ {children}
+
+ )
+}
+
+export default AudioAnalyserContext;
\ No newline at end of file
diff --git a/src/contexts/InputAudioContext.tsx b/src/contexts/InputAudioContext.tsx
new file mode 100644
index 0000000..f0f8bc5
--- /dev/null
+++ b/src/contexts/InputAudioContext.tsx
@@ -0,0 +1,66 @@
+import React, { createContext, useContext, FunctionComponent, useEffect, useState, useCallback } from 'react'
+import { useMediaStream } from './MediaStreamContext';
+
+interface InputAudioContextValue {
+ audioCtx: AudioContext | undefined;
+ source: AudioNode | undefined;
+}
+
+const InputAudioContext = createContext({
+ audioCtx: undefined,
+ source: undefined,
+});
+
+export const useInputAudio = () => useContext(InputAudioContext);
+
+interface Props {
+ children: React.ReactNode;
+}
+
+export const InputAudioProvider: FunctionComponent = ({ children }) => {
+ const [context, setContext] = useState();
+ const [source, setSource] = useState();
+ const { stream } = useMediaStream();
+
+ const stop = useCallback(async () => {
+ try {
+ if (context) {
+ await context.close();
+ setContext(undefined);
+ }
+ if (source) {
+ source.disconnect();
+ setSource(undefined);
+ }
+ } catch(e) {
+ let error = e as Error;
+ console.error(error.name, error.message);
+ }
+ }, [context, source]);
+
+ useEffect(() => {
+ if (stream) {
+ const audioCtx = new AudioContext();
+ setSource(audioCtx.createMediaStreamSource(stream));
+ setContext(audioCtx);
+ }
+ }, [stream]);
+
+ useEffect(() => {
+ if (!stream) {
+ stop();
+ }
+
+ return () => {
+ stop();
+ }
+ }, [stream, stop]);
+
+ return (
+
+ {children}
+
+ )
+}
+
+export default InputAudioContext;
\ No newline at end of file
diff --git a/src/contexts/MediaStreamContext.tsx b/src/contexts/MediaStreamContext.tsx
new file mode 100644
index 0000000..9515c56
--- /dev/null
+++ b/src/contexts/MediaStreamContext.tsx
@@ -0,0 +1,52 @@
+import React, { createContext, FunctionComponent, useCallback, useContext, useState, useEffect } from 'react';
+
+interface MediaStreamContextValue {
+ stream: MediaStream | undefined;
+ start: () => void;
+ stop: () => void;
+}
+
+const MediaStreamContext = createContext({
+ stream: undefined,
+ start: () => {},
+ stop: () => {},
+});
+
+export const useMediaStream = () => useContext(MediaStreamContext);
+
+interface Props {
+ audio: boolean;
+ video: boolean;
+ children: React.ReactNode;
+}
+export const MediaStreamProvider: FunctionComponent = ({ children, audio, video }) => {
+ const [stream, setStream] = useState();
+
+ useEffect(() => {
+ return () => {
+ if (stream) {
+ stream.getTracks().forEach(track => track.stop());
+ }
+ }
+ }, [stream]);
+
+ const start = useCallback(async () => {
+ const mediaStream = await navigator.mediaDevices.getUserMedia({ audio, video });
+ setStream(mediaStream);
+ }, [audio, video]);
+
+ const stop = useCallback(() => {
+ if (stream) {
+ stream.getTracks().forEach(track => track.stop());
+ setStream(undefined);
+ }
+ }, [stream]);
+
+ return (
+
+ {children}
+
+ );
+}
+
+export default MediaStreamContext;
\ No newline at end of file
diff --git a/src/contexts/index.ts b/src/contexts/index.ts
new file mode 100644
index 0000000..6c9f432
--- /dev/null
+++ b/src/contexts/index.ts
@@ -0,0 +1,3 @@
+export * from './AudioAnalyserContext';
+export * from './InputAudioContext';
+export * from './MediaStreamContext';
diff --git a/src/index.css b/src/index.css
new file mode 100644
index 0000000..409ca5c
--- /dev/null
+++ b/src/index.css
@@ -0,0 +1,70 @@
+:root {
+ font-family: Inter, Avenir, Helvetica, Arial, sans-serif;
+ font-size: 16px;
+ line-height: 24px;
+ font-weight: 400;
+
+ color-scheme: light dark;
+ color: rgba(255, 255, 255, 0.87);
+ background-color: #242424;
+
+ font-synthesis: none;
+ text-rendering: optimizeLegibility;
+ -webkit-font-smoothing: antialiased;
+ -moz-osx-font-smoothing: grayscale;
+ -webkit-text-size-adjust: 100%;
+}
+
+a {
+ font-weight: 500;
+ color: #646cff;
+ text-decoration: inherit;
+}
+a:hover {
+ color: #535bf2;
+}
+
+body {
+ margin: 0;
+ display: flex;
+ place-items: center;
+ min-width: 320px;
+ min-height: 100vh;
+}
+
+h1 {
+ font-size: 3.2em;
+ line-height: 1.1;
+}
+
+button {
+ border-radius: 8px;
+ border: 1px solid transparent;
+ padding: 0.6em 1.2em;
+ font-size: 1.5em;
+ font-weight: 500;
+ font-family: inherit;
+ background-color: #1a1a1a;
+ cursor: pointer;
+ transition: border-color 0.25s;
+}
+button:hover {
+ border-color: #646cff;
+}
+button:focus,
+button:focus-visible {
+ outline: 4px auto -webkit-focus-ring-color;
+}
+
+@media (prefers-color-scheme: light) {
+ :root {
+ color: #213547;
+ background-color: #ffffff;
+ }
+ a:hover {
+ color: #747bff;
+ }
+ button {
+ background-color: #f9f9f9;
+ }
+}
diff --git a/src/main.tsx b/src/main.tsx
new file mode 100644
index 0000000..733f473
--- /dev/null
+++ b/src/main.tsx
@@ -0,0 +1,17 @@
+import React from 'react'
+import ReactDOM from 'react-dom/client'
+import App from './App'
+import './index.css'
+import * as contexts from './contexts';
+
+ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render(
+
+
+
+
+
+
+
+
+
+)
diff --git a/src/utils/sound.ts b/src/utils/sound.ts
new file mode 100644
index 0000000..5997f16
--- /dev/null
+++ b/src/utils/sound.ts
@@ -0,0 +1,77 @@
+// const notes = ['A', 'B', 'C', 'D', 'E', 'F', 'G'];
+const notes = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B'];
+
+const A4 = 440;
+const C0 = Math.round(A4 * Math.pow(2, -4.75)); // 16
+
+interface calculateFrequencyOptions {
+ rate?: number;
+}
+
+// Code by fritzvd (signaltohertz) - https://github.com/fritzvd/signaltohertz
+// Changes: function name
+const calculateFrequency = (frequencies: Float32Array, options: calculateFrequencyOptions = {}) => {
+ const { rate = 22050 / 1024 } = options; // defaults in audioContext.
+
+ let maxI = 0;
+ let max = frequencies[0];
+
+ for (let i = 0; frequencies.length > i; i++) {
+ const oldmax = max;
+ const newmax = Math.max(max, frequencies[i]);
+ if (oldmax != newmax) {
+ max = newmax;
+ maxI = i;
+ }
+ }
+ return maxI * rate;
+};
+
+// Calculate amount of steps away from C0
+const calculateSemiTone = (frequency: number) => {
+ const semiTonesAway = 12 * Math.log2(frequency / C0);
+ return semiTonesAway;
+};
+
+// Uses C0 as base
+const calculateOctave = (semiTonesAway: number) => {
+ const octave = Math.floor(semiTonesAway / 12);
+ return octave;
+};
+
+const calculateCents = (currentFrequency: number, lastFrequency: number) => {
+ const cents = 1200 * Math.log2(lastFrequency / currentFrequency);
+ return cents;
+};
+
+const calculateNote = (frequency: number) => {
+ const semiTone = calculateSemiTone(frequency);
+ const octave = calculateOctave(semiTone);
+ const notePosition = Math.floor(semiTone % 12);
+ const note = notes[notePosition] + String(octave);
+ return note;
+};
+
+const toDecimals = (number: number, decimals: number) => {
+ const fixedNumber = number.toFixed(decimals);
+ return fixedNumber;
+};
+
+// Handle error
+const throwError = (err: string) => {
+ throw new Error(`Something went wrong: ${err}`);
+};
+
+const logError = (err: string) => {
+ console.error(err);
+};
+
+export {
+ calculateFrequency,
+ calculateSemiTone,
+ calculateCents,
+ calculateNote,
+ toDecimals,
+ throwError,
+ logError,
+};
\ No newline at end of file
diff --git a/src/utils/useInterval.ts b/src/utils/useInterval.ts
new file mode 100644
index 0000000..a2e6528
--- /dev/null
+++ b/src/utils/useInterval.ts
@@ -0,0 +1,27 @@
+import { useEffect, useRef } from 'react'
+
+import { useIsomorphicLayoutEffect } from 'usehooks-ts'
+
+function useInterval(callback: () => void, delay: number | null) {
+ const savedCallback = useRef(callback)
+
+ // Remember the latest callback if it changes.
+ useIsomorphicLayoutEffect(() => {
+ savedCallback.current = callback
+ }, [callback])
+
+ // Set up the interval.
+ useEffect(() => {
+ // Don't schedule if no delay is specified.
+ // Note: 0 is a valid value for delay.
+ if (!delay && delay !== 0) {
+ return
+ }
+
+ const id = setInterval(() => savedCallback.current(), delay)
+
+ return () => clearInterval(id)
+ }, [delay])
+}
+
+export default useInterval
\ No newline at end of file
diff --git a/src/vite-env.d.ts b/src/vite-env.d.ts
new file mode 100644
index 0000000..11f02fe
--- /dev/null
+++ b/src/vite-env.d.ts
@@ -0,0 +1 @@
+///
diff --git a/tsconfig.json b/tsconfig.json
new file mode 100644
index 0000000..3d0a51a
--- /dev/null
+++ b/tsconfig.json
@@ -0,0 +1,21 @@
+{
+ "compilerOptions": {
+ "target": "ESNext",
+ "useDefineForClassFields": true,
+ "lib": ["DOM", "DOM.Iterable", "ESNext"],
+ "allowJs": false,
+ "skipLibCheck": true,
+ "esModuleInterop": false,
+ "allowSyntheticDefaultImports": true,
+ "strict": true,
+ "forceConsistentCasingInFileNames": true,
+ "module": "ESNext",
+ "moduleResolution": "Node",
+ "resolveJsonModule": true,
+ "isolatedModules": true,
+ "noEmit": true,
+ "jsx": "react-jsx"
+ },
+ "include": ["src"],
+ "references": [{ "path": "./tsconfig.node.json" }]
+}
diff --git a/tsconfig.node.json b/tsconfig.node.json
new file mode 100644
index 0000000..9d31e2a
--- /dev/null
+++ b/tsconfig.node.json
@@ -0,0 +1,9 @@
+{
+ "compilerOptions": {
+ "composite": true,
+ "module": "ESNext",
+ "moduleResolution": "Node",
+ "allowSyntheticDefaultImports": true
+ },
+ "include": ["vite.config.ts"]
+}
diff --git a/vite.config.ts b/vite.config.ts
new file mode 100644
index 0000000..bfc2f1b
--- /dev/null
+++ b/vite.config.ts
@@ -0,0 +1,10 @@
+import { defineConfig } from 'vite'
+import react from '@vitejs/plugin-react'
+
+// https://vitejs.dev/config/
+export default defineConfig({
+ plugins: [react()],
+ server: {
+ port: 3000,
+ }
+})