Skip to content

Commit

Permalink
visualize wave
Browse files Browse the repository at this point in the history
  • Loading branch information
Kvadratni committed Dec 20, 2024
1 parent 0cac75a commit 20deb97
Show file tree
Hide file tree
Showing 2 changed files with 195 additions and 131 deletions.
240 changes: 134 additions & 106 deletions ui/desktop/src/components/AudioRecorder.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,132 @@ import { getApiUrl } from "../config";
import WaveSurfer from 'wavesurfer.js';
import RecordPlugin from 'wavesurfer.js/dist/plugins/record.esm.js';

interface AudioRecorderProps {
onTranscription: (text: string) => void;
}

export function AudioRecorder({ onTranscription }: AudioRecorderProps) {
const [isRecording, setIsRecording] = useState(false);
const [progress, setProgress] = useState('00:00');
// Separate button component
export const AudioButton = ({
isRecording,
onClick,
}: {
isRecording: boolean;
onClick: () => void;
}) => (
<Button
type="button"
size="icon"
variant="ghost"
onClick={onClick}
className={`text-indigo-600 dark:text-indigo-300 hover:text-indigo-700 dark:hover:text-indigo-200 hover:bg-indigo-100 dark:hover:bg-indigo-800 flex-shrink-0`}
>
{isRecording ? <Square size={20} /> : <Mic size={20} />}
</Button>
);

// Separate waveform component with its own state management
export const AudioWaveform = React.forwardRef<
HTMLDivElement,
{
isRecording: boolean;
onRecordEnd?: (blob: Blob) => void;

Check failure on line 32 in ui/desktop/src/components/AudioRecorder.tsx

View workflow job for this annotation

GitHub Actions / build

'Blob' is not defined
className?: string;
}
>(({ isRecording, onRecordEnd, className = '' }, ref) => {
const wavesurferRef = useRef<WaveSurfer | null>(null);
const recordPluginRef = useRef<any>(null);
const [progress, setProgress] = useState('00:00');

const handleRecordProgress = useCallback((time: number) => {
const minutes = Math.floor((time % 3600000) / 60000);
const seconds = Math.floor((time % 60000) / 1000);
const formattedTime = [minutes, seconds]
.map(v => v < 10 ? '0' + v : v)
.join(':');
setProgress(formattedTime);
}, []);

useEffect(() => {
const container = ref as React.RefObject<HTMLDivElement>;
if (!container.current) return;

const wavesurfer = WaveSurfer.create({
container: container.current,
waveColor: 'rgb(99, 102, 241)', // Indigo-600
progressColor: 'rgb(79, 70, 229)', // Indigo-700
height: 26,
barWidth: 2,
barGap: 1,
barRadius: 1,
normalize: true,
minPxPerSec: 50, // Increase this value to make the waveform wider
});

const recordPlugin = wavesurfer.registerPlugin(
RecordPlugin.create({
renderRecordedAudio: false,
scrollingWaveform: false,
continuousWaveform: true,
continuousWaveformDuration: 30,
})
);

if (onRecordEnd) {
recordPlugin.on('record-end', onRecordEnd);
}
recordPlugin.on('record-progress', handleRecordProgress);

wavesurferRef.current = wavesurfer;
recordPluginRef.current = recordPlugin;

return () => {
wavesurfer.destroy();
wavesurferRef.current = null;
recordPluginRef.current = null;
};
}, [ref, onRecordEnd, handleRecordProgress]);

useEffect(() => {
const recordPlugin = recordPluginRef.current;
if (!recordPlugin) return;

const handleRecording = async () => {
if (isRecording) {
try {
await recordPlugin.startRecording();
} catch (err) {
console.error('Failed to start recording:', err);
}
} else {
try {
if (recordPlugin.isRecording()) {
await recordPlugin.stopRecording();
setProgress('00:00');
}
} catch (err) {
console.error('Failed to stop recording:', err);
}
}
};

handleRecording();
}, [isRecording]);

return (
<div
className={`flex-grow transition-all duration-200 ${
isRecording ? 'opacity-100 h-[26px]' : 'opacity-0 h-0'
} ${className}`}
>
<div ref={ref} className="w-full h-full" />
</div>
);
});

AudioWaveform.displayName = 'AudioWaveform';

// Main AudioRecorder component that combines both
export function AudioRecorder({ onTranscription, containerClassName }: {
onTranscription: (text: string) => void;
containerClassName?: string;
}) {
const [isRecording, setIsRecording] = useState(false);
const micContainerRef = useRef<HTMLDivElement>(null);

const handleRecordEnd = useCallback(async (blob: Blob) => {

Check failure on line 136 in ui/desktop/src/components/AudioRecorder.tsx

View workflow job for this annotation

GitHub Actions / build

'Blob' is not defined
Expand Down Expand Up @@ -43,108 +160,19 @@ export function AudioRecorder({ onTranscription }: AudioRecorderProps) {
}
}, [onTranscription]);

const handleRecordProgress = useCallback((time: number) => {
const minutes = Math.floor((time % 3600000) / 60000);
const seconds = Math.floor((time % 60000) / 1000);
const formattedTime = [minutes, seconds]
.map(v => v < 10 ? '0' + v : v)
.join(':');
setProgress(formattedTime);
const handleToggleRecording = useCallback(() => {
setIsRecording(prev => !prev);
}, []);

useEffect(() => {
let wavesurfer: WaveSurfer | null = null;
let recordPlugin: any = null;

const initializeWaveSurfer = () => {
if (!micContainerRef.current) return;

// Create new WaveSurfer instance
wavesurfer = WaveSurfer.create({
container: micContainerRef.current,
waveColor: 'rgb(99, 102, 241)', // Indigo-600
progressColor: 'rgb(79, 70, 229)', // Indigo-700
height: 40,
});

// Initialize Record plugin
recordPlugin = wavesurfer.registerPlugin(
RecordPlugin.create({
renderRecordedAudio: false,
scrollingWaveform: false,
continuousWaveform: true,
continuousWaveformDuration: 30,
})
);

// Set up event handlers
recordPlugin.on('record-end', handleRecordEnd);
recordPlugin.on('record-progress', handleRecordProgress);

// Store references
wavesurferRef.current = wavesurfer;
recordPluginRef.current = recordPlugin;
};

initializeWaveSurfer();

// Cleanup
return () => {
if (wavesurfer) {
wavesurfer.destroy();
}
wavesurferRef.current = null;
recordPluginRef.current = null;
};
}, [handleRecordEnd, handleRecordProgress]);

const startRecording = async () => {
console.log('Attempting to start recording...');
try {
if (!recordPluginRef.current) {
console.error('Record plugin not initialized');
return;
}

await recordPluginRef.current.startRecording();
console.log('Recording started!');
setIsRecording(true);
} catch (err) {
console.error('Failed to start recording:', err);
}
};

const stopRecording = async () => {
if (!recordPluginRef.current || !isRecording) return;

console.log('Stopping recording...');
try {
await recordPluginRef.current.stopRecording();
setIsRecording(false);
setProgress('00:00');
} catch (err) {
console.error('Failed to stop recording:', err);
}
};

return (
<div className="flex flex-col gap-2">
<div className="flex items-center gap-2">
<div
ref={micContainerRef}
className={`waveform transition-opacity duration-200 ${isRecording ? 'opacity-100'
: 'opacity-0'}`}
/>
<Button
type="button"
size="icon"
variant="ghost"
onClick={isRecording ? stopRecording : startRecording}
className={`text-indigo-600 dark:text-indigo-300 hover:text-indigo-700 dark:hover:text-indigo-200 hover:bg-indigo-100 dark:hover:bg-indigo-800`}
>
{isRecording ? <Square size={20}/> : <Mic size={20}/>}
</Button>
</div>
<div className={`flex items-center gap-2 w-full ${containerClassName || ''}`}>
<AudioWaveform
ref={micContainerRef}
isRecording={isRecording}
onRecordEnd={handleRecordEnd}
className="flex-grow"
/>
<AudioButton isRecording={isRecording} onClick={handleToggleRecording} />
</div>
);
}
86 changes: 61 additions & 25 deletions ui/desktop/src/components/Input.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ import { Button } from './ui/button';
import Send from './ui/Send';
import Stop from './ui/Stop';
import { Paperclip } from 'lucide-react';
import { AudioRecorder } from './AudioRecorder';
import { getApiUrl } from "../config";
import { AudioButton, AudioWaveform } from './AudioRecorder';

interface InputProps {
handleSubmit: (e: React.FormEvent) => void;
Expand All @@ -27,7 +28,9 @@ export default function Input({
onStop
}: InputProps) {
const [value, setValue] = useState('');
const [isRecording, setIsRecording] = useState(false);
const textAreaRef = useRef<HTMLTextAreaElement>(null);
const waveformRef = useRef<HTMLDivElement>(null);

useEffect(() => {
if (textAreaRef.current && !disabled) {
Expand Down Expand Up @@ -81,36 +84,69 @@ export default function Input({
}
};

const handleTranscription = (text: string) => {
if(text != undefined) {
setValue(text);
textAreaRef.current?.focus();
const handleRecordEnd = async (blob: Blob) => {

Check failure on line 87 in ui/desktop/src/components/Input.tsx

View workflow job for this annotation

GitHub Actions / build

'Blob' is not defined
try {
console.log('Recording completed, size:', blob.size, 'type:', blob.type);
const formData = new FormData();

Check failure on line 90 in ui/desktop/src/components/Input.tsx

View workflow job for this annotation

GitHub Actions / build

'FormData' is not defined
formData.append('audio', blob, 'audio.webm');

const response = await fetch(getApiUrl('/transcribe'), {
method: 'POST',
body: formData,
});

if (!response.ok) {
throw new Error('Transcription failed');
}

const result = await response.json();
console.log('Received response:', result);
if (result.success) {
setValue(result.text);
textAreaRef.current?.focus();
} else {
console.error('Transcription error:', result.error);
}
} catch (err) {
console.error('Transcription error:', err);
}
};

return (
<form onSubmit={onFormSubmit} className="flex relative bg-white dark:bg-gray-800 h-auto px-[16px] pr-[68px] py-[1rem]">
<textarea
autoFocus
id="dynamic-textarea"
placeholder="What should goose do?"
value={value}
onChange={handleChange}
onKeyDown={handleKeyDown}
disabled={disabled}
ref={textAreaRef}
rows={1}
style={{
minHeight: `${minHeight}px`,
maxHeight: `${maxHeight}px`,
overflowY: 'auto'
}}
className={`w-full outline-none border-none focus:ring-0 bg-transparent p-0 text-14 resize-none ${
disabled ? 'cursor-not-allowed opacity-50' : ''
}`}
/>
<div className="relative flex-grow">
<textarea
autoFocus
id="dynamic-textarea"
placeholder="What should goose do?"
value={value}
onChange={handleChange}
onKeyDown={handleKeyDown}
disabled={disabled}
ref={textAreaRef}
rows={1}
style={{
minHeight: `${minHeight}px`,
maxHeight: `${maxHeight}px`,
overflowY: 'auto'
}}
className={`w-full outline-none border-none focus:ring-0 bg-transparent p-0 text-14 resize-none ${
disabled ? 'cursor-not-allowed opacity-50' : ''
}
${!isRecording ? 'opacity-100' : 'opacity-0'}`}
/>
<AudioWaveform
ref={waveformRef}
isRecording={isRecording}
onRecordEnd={handleRecordEnd}
className="absolute left-0 right-0 bottom-0 z-5 overflow-hidden w-5/6"
/>
</div>
<div className="absolute right-[68px] top-1/2 -translate-y-1/2 flex items-center gap-2">
<AudioRecorder onTranscription={handleTranscription} />
<AudioButton
isRecording={isRecording}
onClick={() => setIsRecording(!isRecording)}
/>
<Button
type="button"
size="icon"
Expand Down

0 comments on commit 20deb97

Please sign in to comment.