Release 1.1.0: Add Import Audio Files feature

- New Import tab with drag-and-drop support for audio files
- Support for 8 formats: MP3, MP4, WAV, M4A, FLAC, OGG, AAC, WMA
- File metadata display (duration, size, format)
- Editable meeting titles
- Progress tracking with visual indicators
- Smart template selection
- Auto-navigation to Transcription view
- Updated README with BlackHole requirement and Teams config
- Added get_audio_metadata Rust command
- Version bump to 1.1.0
This commit is contained in:
michael.borak
2026-01-21 09:08:56 +01:00
parent 79f509951c
commit a06e473e85
12 changed files with 1041 additions and 171 deletions

View File

@@ -7,6 +7,7 @@ import TranscriptionView from "./components/TranscriptionView";
import Tabs from "./components/Tabs";
import MeetingsView from "./components/MeetingsView";
import HistoryView from "./components/HistoryView";
import Import from "./components/Import";
import ToastContainer, { ToastMessage, ToastType } from "./components/ui/Toast";
export interface PromptTemplate {
@@ -24,8 +25,8 @@ export interface EmailTemplate {
}
function App() {
const [view, setView] = useState<'recorder' | 'settings' | 'transcription' | 'meetings' | 'history'>('recorder');
const [lastTab, setLastTab] = useState<'recorder' | 'transcription' | 'meetings' | 'history'>('recorder');
const [view, setView] = useState<'recorder' | 'settings' | 'transcription' | 'meetings' | 'history' | 'import'>('recorder');
const [lastTab, setLastTab] = useState<'recorder' | 'transcription' | 'meetings' | 'history' | 'import'>('recorder');
// Auto-start recording state to handle "Join & Record" transition
@@ -311,6 +312,14 @@ Thanks!`
}
};
const handleRenameHistory = (id: string, newSubject: string) => {
const newHistory = history.map(item =>
item.id === id ? { ...item, subject: newSubject } : item
);
setHistory(newHistory);
localStorage.setItem('infomaniak_history', JSON.stringify(newHistory));
};
const handleDeleteHistory = (id: string) => {
const newHistory = history.filter(item => item.id !== id);
setHistory(newHistory);
@@ -343,7 +352,7 @@ Thanks!`
</div>
<Tabs
currentTab={view as 'recorder' | 'transcription' | 'meetings' | 'history'}
currentTab={view as 'recorder' | 'transcription' | 'meetings' | 'history' | 'import'}
onTabChange={(t) => setView(t)}
/>
</div>
@@ -410,6 +419,7 @@ Thanks!`
history={history}
onLoad={handleLoadHistory}
onDelete={handleDeleteHistory}
onRename={handleRenameHistory}
/>
)}
@@ -429,6 +439,23 @@ Thanks!`
/>
)}
{view === 'import' && (
<Import
apiKey={apiKey}
productId={productId}
prompts={prompts}
selectedModel={selectedModel}
onSaveToHistory={handleSaveToHistory}
onComplete={() => setView('transcription')}
addToast={addToast}
setTranscription={setTranscription}
setSummary={setSummary}
/>
)}
{view === 'settings' && (

View File

@@ -1,4 +1,5 @@
import { FileText, Trash2, Calendar } from 'lucide-react';
import { FileText, Trash2, Calendar, Pencil, Check, X } from 'lucide-react';
import { useState } from 'react';
interface HistoryItem {
id: string;
@@ -13,9 +14,30 @@ interface HistoryViewProps {
history: HistoryItem[];
onLoad: (item: HistoryItem) => void;
onDelete: (id: string) => void;
onRename: (id: string, newSubject: string) => void;
}
export default function HistoryView({ history, onLoad, onDelete }: HistoryViewProps) {
export default function HistoryView({ history, onLoad, onDelete, onRename }: HistoryViewProps) {
const [editingId, setEditingId] = useState<string | null>(null);
const [editValue, setEditValue] = useState("");
const startEditing = (item: HistoryItem) => {
setEditingId(item.id);
setEditValue(item.subject || "Untitled Recording");
};
const saveEdit = () => {
if (editingId && editValue.trim()) {
onRename(editingId, editValue.trim());
setEditingId(null);
}
};
const cancelEdit = () => {
setEditingId(null);
setEditValue("");
};
return (
<div className="flex flex-col w-full h-full bg-background p-6">
<h1 className="text-2xl font-bold mb-6 flex items-center gap-2">
@@ -33,26 +55,58 @@ export default function HistoryView({ history, onLoad, onDelete }: HistoryViewPr
{history.map(item => (
<div key={item.id} className="bg-card border border-border rounded-xl p-4 hover:shadow-md transition-all group">
<div className="flex justify-between items-start">
<div
className="flex-1 cursor-pointer"
onClick={() => onLoad(item)}
>
<h3 className="text-lg font-semibold group-hover:text-primary transition-colors mb-1">
{item.subject || "Untitled Recording"}
</h3>
<div className="flex items-center gap-2 text-xs text-muted-foreground mb-2">
<div className="flex-1">
{editingId === item.id ? (
<div className="flex items-center gap-2 mb-2" onClick={(e) => e.stopPropagation()}>
<input
autoFocus
type="text"
className="flex-1 bg-background border border-input rounded px-2 py-1 text-sm font-semibold focus:outline-none focus:ring-1 focus:ring-ring"
value={editValue}
onChange={(e) => setEditValue(e.target.value)}
onKeyDown={(e) => {
if (e.key === 'Enter') saveEdit();
if (e.key === 'Escape') cancelEdit();
}}
/>
<button onClick={saveEdit} className="p-1 text-green-500 hover:bg-green-500/10 rounded">
<Check size={16} />
</button>
<button onClick={cancelEdit} className="p-1 text-muted-foreground hover:bg-muted rounded">
<X size={16} />
</button>
</div>
) : (
<div
className="cursor-pointer"
onClick={() => onLoad(item)}
>
<h3 className="text-lg font-semibold group-hover:text-primary transition-colors mb-1 flex items-center gap-2">
{item.subject || "Untitled Recording"}
<button
onClick={(e) => { e.stopPropagation(); startEditing(item); }}
className="opacity-0 group-hover:opacity-100 text-muted-foreground hover:text-foreground p-1 rounded hover:bg-muted transition-all"
title="Rename"
>
<Pencil size={14} />
</button>
</h3>
</div>
)}
<div className="flex items-center gap-2 text-xs text-muted-foreground mb-2" onClick={() => !editingId && onLoad(item)}>
<Calendar size={12} />
{item.date}
{item.filename && <span className="bg-secondary px-1.5 py-0.5 rounded text-[10px] font-mono">{item.filename}</span>}
</div>
<p className="text-sm text-foreground/70 line-clamp-2">
<p className="text-sm text-foreground/70 line-clamp-2 cursor-pointer" onClick={() => !editingId && onLoad(item)}>
{item.summary ? item.summary.substring(0, 150) + "..." : "No summary available."}
</p>
</div>
<button
onClick={(e) => { e.stopPropagation(); onDelete(item.id); }}
className="text-muted-foreground hover:text-destructive p-2 rounded-lg hover:bg-destructive/10 transition-colors opacity-0 group-hover:opacity-100"
className="text-muted-foreground hover:text-destructive p-2 rounded-lg hover:bg-destructive/10 transition-colors opacity-0 group-hover:opacity-100 ml-2"
title="Delete"
>
<Trash2 size={18} />

385
src/components/Import.tsx Normal file
View File

@@ -0,0 +1,385 @@
import React, { useState, useCallback } from 'react';
import { Upload, FileAudio, X, Check, Loader2 } from 'lucide-react';
import { invoke } from "@tauri-apps/api/core";
import { open } from '@tauri-apps/plugin-dialog';
import logo from '../assets/logo.png';
interface PromptTemplate {
id: string;
name: string;
content: string;
keywords?: string[];
}
interface ImportProps {
apiKey: string;
productId: string;
prompts: PromptTemplate[];
selectedModel: string;
onSaveToHistory: (transcription: string, summary: string) => void;
onComplete: () => void; // Navigate to Transcription view
addToast: (msg: string, type: 'success' | 'error' | 'info', duration?: number) => void;
setTranscription: (text: string) => void;
setSummary: (text: string) => void;
}
interface AudioMetadata {
duration: number;
size: number;
format: string;
}
type ProcessingStage = 'idle' | 'validating' | 'transcribing' | 'summarizing' | 'complete';
const SUPPORTED_FORMATS = ['mp3', 'mp4', 'm4a', 'wav', 'flac', 'ogg', 'aac', 'wma'];
const Import: React.FC<ImportProps> = ({
apiKey,
productId,
prompts,
selectedModel,
onSaveToHistory,
onComplete,
addToast,
setTranscription,
setSummary
}) => {
const [isDragging, setIsDragging] = useState(false);
const [selectedFile, setSelectedFile] = useState<string | null>(null);
const [metadata, setMetadata] = useState<AudioMetadata | null>(null);
const [meetingTitle, setMeetingTitle] = useState('');
const [stage, setStage] = useState<ProcessingStage>('idle');
const [selectedPromptId, setSelectedPromptId] = useState<string>('');
// Set default prompt
React.useEffect(() => {
if (prompts.length > 0 && !selectedPromptId) {
setSelectedPromptId(prompts[0].id);
}
}, [prompts, selectedPromptId]);
const validateFile = (filePath: string): boolean => {
const extension = filePath.split('.').pop()?.toLowerCase();
if (!extension || !SUPPORTED_FORMATS.includes(extension)) {
addToast(`Unsupported format. Supported: ${SUPPORTED_FORMATS.join(', ').toUpperCase()}`, 'error', 5000);
return false;
}
return true;
};
const extractFilename = (path: string): string => {
const parts = path.split(/[/\\]/);
const filename = parts[parts.length - 1];
return filename.replace(/\.[^/.]+$/, ''); // Remove extension
};
const formatDuration = (seconds: number): string => {
const mins = Math.floor(seconds / 60);
const secs = Math.floor(seconds % 60);
return `${mins}:${secs.toString().padStart(2, '0')}`;
};
const formatSize = (bytes: number): string => {
if (bytes < 1024 * 1024) {
return `${(bytes / 1024).toFixed(1)} KB`;
}
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`;
};
const handleFileSelect = async (filePath: string) => {
if (!validateFile(filePath)) return;
setStage('validating');
setSelectedFile(filePath);
setMeetingTitle(extractFilename(filePath));
try {
// Get metadata (we'll need to implement this in Rust backend)
const meta = await invoke<AudioMetadata>('get_audio_metadata', { filePath });
setMetadata(meta);
setStage('idle');
addToast('File loaded successfully', 'success', 2000);
} catch (e) {
console.error('Metadata error:', e);
// Even if metadata fails, allow processing
setMetadata(null);
setStage('idle');
}
};
const handleDrop = useCallback((e: React.DragEvent) => {
e.preventDefault();
setIsDragging(false);
const files = Array.from(e.dataTransfer.files);
if (files.length > 0) {
// @ts-ignore - Tauri provides path on File object
const filePath = files[0].path;
if (filePath) {
handleFileSelect(filePath);
} else {
addToast('Failed to get file path', 'error');
}
}
}, []);
const handleManualSelect = async () => {
try {
const selected = await open({
multiple: false,
filters: [{
name: 'Audio/Video',
extensions: SUPPORTED_FORMATS
}]
});
if (selected && typeof selected === 'string') {
handleFileSelect(selected);
}
} catch (e) {
console.error('File picker error:', e);
addToast('Failed to open file picker', 'error');
}
};
const handleProcess = async () => {
if (!selectedFile) return;
if (!apiKey || !productId) {
addToast('Please configure API key in Settings', 'error');
return;
}
try {
setStage('transcribing');
const transText = await invoke<string>('transcribe_audio', {
filePath: selectedFile,
apiKey,
productId
});
setTranscription(transText);
if (!transText || transText.trim().length === 0) {
addToast('No speech detected in file', 'error');
setStage('idle');
return;
}
// Smart prompt selection (copied from Recorder.tsx)
let activePrompt = prompts.find(p => p.id === selectedPromptId);
const lowerText = transText.toLowerCase();
let bestMatchId = selectedPromptId;
let maxMatches = 0;
for (const p of prompts) {
if (!p.keywords) continue;
let matches = 0;
for (const kw of p.keywords) {
if (lowerText.includes(kw.toLowerCase())) {
matches++;
}
}
if (matches > maxMatches) {
maxMatches = matches;
bestMatchId = p.id;
}
}
if (bestMatchId !== selectedPromptId) {
const newPrompt = prompts.find(p => p.id === bestMatchId);
if (newPrompt) {
addToast(`Smart Select: Switched to "${newPrompt.name}"`, 'info', 3000);
activePrompt = newPrompt;
}
}
const promptContent = activePrompt ? activePrompt.content : "Summarize this.";
setStage('summarizing');
const sumText = await invoke<string>('summarize_text', {
text: transText,
apiKey,
productId,
prompt: promptContent,
model: selectedModel
});
setSummary(sumText);
// Save to history
onSaveToHistory(transText, sumText);
setStage('complete');
addToast('Import complete!', 'success', 3000);
// Navigate to Transcription view
setTimeout(() => {
onComplete();
}, 1000);
} catch (e) {
console.error('Processing error:', e);
addToast(`Error: ${e}`, 'error');
setStage('idle');
}
};
const handleReset = () => {
setSelectedFile(null);
setMetadata(null);
setMeetingTitle('');
setStage('idle');
};
const getStageInfo = () => {
switch (stage) {
case 'validating': return { icon: Loader2, text: 'Validating file...', color: 'text-blue-500' };
case 'transcribing': return { icon: Loader2, text: 'Transcribing audio...', color: 'text-purple-500' };
case 'summarizing': return { icon: Loader2, text: 'Generating summary...', color: 'text-green-500' };
case 'complete': return { icon: Check, text: 'Complete!', color: 'text-green-500' };
default: return null;
}
};
const stageInfo = getStageInfo();
const isProcessing = stage !== 'idle' && stage !== 'complete';
return (
<div className="flex flex-col w-full h-full bg-background relative">
{/* Header */}
<div className="w-full flex justify-center items-center p-4 shrink-0">
<img src={logo} alt="Logo" className="h-10 object-contain" />
</div>
{/* Main Content */}
<div className="flex-1 overflow-y-auto px-6 pb-6 flex flex-col items-center">
<h1 className="text-xl font-bold mb-2 text-foreground">Import Audio File</h1>
<p className="text-muted-foreground mb-6 text-center text-sm">
Upload a recording for transcription and summarization
</p>
{/* Drag & Drop Zone */}
<div
onDragOver={(e) => { e.preventDefault(); setIsDragging(true); }}
onDragLeave={() => setIsDragging(false)}
onDrop={handleDrop}
className={`w-full max-w-md border-2 border-dashed rounded-lg p-8 mb-6 transition-all ${isDragging
? 'border-primary bg-primary/5 scale-105'
: selectedFile
? 'border-green-500 bg-green-500/5'
: 'border-border bg-secondary/30 hover:border-primary/50'
}`}
>
<div className="flex flex-col items-center justify-center gap-4">
{selectedFile ? (
<>
<FileAudio size={48} className="text-green-500" />
<div className="text-center">
<p className="font-semibold text-foreground">{meetingTitle}</p>
{metadata && (
<p className="text-xs text-muted-foreground mt-1">
{formatDuration(metadata.duration)} {formatSize(metadata.size)} {metadata.format.toUpperCase()}
</p>
)}
</div>
<button
onClick={handleReset}
className="text-xs text-muted-foreground hover:text-foreground flex items-center gap-1"
>
<X size={14} /> Change file
</button>
</>
) : (
<>
<Upload size={48} className="text-muted-foreground" />
<div className="text-center">
<p className="font-semibold text-foreground">Drag & Drop audio file</p>
<p className="text-xs text-muted-foreground mt-1">
or click below to browse
</p>
</div>
<button
onClick={handleManualSelect}
disabled={isProcessing}
className="px-4 py-2 bg-primary text-primary-foreground rounded-lg hover:bg-primary/90 disabled:opacity-50 text-sm font-semibold transition-all"
>
Select File
</button>
<p className="text-xs text-muted-foreground">
Supported: MP3, MP4, WAV, M4A, FLAC, OGG, AAC, WMA
</p>
</>
)}
</div>
</div>
{/* Configuration Section */}
{selectedFile && (
<div className="w-full max-w-md space-y-4">
{/* Meeting Title */}
<div>
<label className="text-xs font-semibold text-muted-foreground uppercase tracking-wider block mb-1">
Meeting Title
</label>
<input
type="text"
value={meetingTitle}
onChange={(e) => setMeetingTitle(e.target.value)}
disabled={isProcessing}
className="w-full p-2 text-sm bg-secondary rounded border border-border outline-none focus:ring-2 focus:ring-primary disabled:opacity-50"
placeholder="Enter meeting title..."
/>
</div>
{/* AI Template */}
<div>
<label className="text-xs font-semibold text-muted-foreground uppercase tracking-wider block mb-1">
AI Template
</label>
<select
value={selectedPromptId}
onChange={(e) => setSelectedPromptId(e.target.value)}
disabled={isProcessing || prompts.length === 0}
className="w-full p-2 text-sm bg-secondary rounded border border-border outline-none focus:ring-2 focus:ring-primary disabled:opacity-50"
>
{prompts.map(p => (
<option key={p.id} value={p.id}>{p.name}</option>
))}
{prompts.length === 0 && <option value="">No templates</option>}
</select>
</div>
{/* Process Button */}
<button
onClick={handleProcess}
disabled={!selectedFile || isProcessing || !apiKey}
className="w-full py-3 text-base font-semibold bg-primary text-primary-foreground rounded-lg hover:bg-primary/90 disabled:opacity-50 disabled:cursor-not-allowed transition-all shadow-md hover:shadow-lg flex items-center justify-center gap-2"
>
{isProcessing ? (
<>
<Loader2 size={20} className="animate-spin" />
Processing...
</>
) : (
<>
<Upload size={20} />
Transcribe & Summarize
</>
)}
</button>
{/* Progress Indicator */}
{stageInfo && (
<div className="flex items-center justify-center gap-2 p-3 bg-secondary/50 rounded-lg border border-border">
<stageInfo.icon size={16} className={`${stageInfo.color} ${stage !== 'complete' ? 'animate-spin' : ''}`} />
<span className={`text-sm font-medium ${stageInfo.color}`}>
{stageInfo.text}
</span>
</div>
)}
</div>
)}
</div>
</div>
);
};
export default Import;

View File

@@ -1,4 +1,4 @@
import React, { useState, useEffect } from 'react';
import React, { useState, useEffect, useRef } from 'react';
import { Mic, Square, Users, Headphones } from 'lucide-react';
import { invoke } from "@tauri-apps/api/core";
import { listen } from '@tauri-apps/api/event';
@@ -58,6 +58,10 @@ const Recorder: React.FC<RecorderProps> = ({
const [isRecording, setIsRecording] = useState(false);
const [isStopping, setIsStopping] = useState(false); // New lock state
const [isPaused, setIsPaused] = useState(false);
const [isWaiting, setIsWaiting] = useState(false); // New state for Auto-Start
const [autoStartEnabled, setAutoStartEnabled] = useState(false); // Toggle state
const [status, setStatus] = useState<string>('Ready to record');
const [selectedDevice, setSelectedDevice] = useState<string>('');
const [selectedPromptId, setSelectedPromptId] = useState<string>('');
@@ -149,19 +153,33 @@ const Recorder: React.FC<RecorderProps> = ({
const startRecording = async (deviceIdOverride?: string) => {
try {
setStatus('Starting...');
setStatus('Starting...');
// Check override or state
const targetDeviceId = deviceIdOverride || selectedDevice;
// Pass customFilename (camelCase key maps to snake_case in Rust automatically or we need to check Tauri mapping, usually it maps camel to camel? Rust expects snake. Let's use snake_case in invoke args to be safe)
await invoke('start_recording', { deviceId: targetDeviceId, savePath: savePath || null, customFilename: props.recordingSubject || null });
await invoke('start_recording', {
deviceId: targetDeviceId,
savePath: savePath || null,
customFilename: props.recordingSubject || null,
waitForSpeech: autoStartEnabled // Pass the toggle state
});
setIsRecording(true);
setIsPaused(false);
setTranscription('');
setSummary('');
setStatus('Recording...');
addToast('Recording started', 'success', 2000);
if (autoStartEnabled) {
setIsWaiting(true);
setStatus('Waiting for audio...');
addToast('Standing by for audio...', 'info', 3000);
} else {
setIsWaiting(false);
setStatus('Recording...');
addToast('Recording started', 'success', 2000);
}
} catch (e) {
console.error(e);
setStatus(`Error: ${e}`);
@@ -170,43 +188,83 @@ const Recorder: React.FC<RecorderProps> = ({
}
};
// VAD & Auto-Stop Logic
useEffect(() => {
let unlisten: () => void;
// Refs for interval access to avoid dependency cycles
const lastSpeechTimeRef = useRef<number>(Date.now());
const isStoppingRef = useRef(false);
const setupListener = async () => {
unlisten = await listen<{ is_speech: boolean, probability: number }>('vad-event', (event) => {
// Update refs when state changes
useEffect(() => {
lastSpeechTimeRef.current = lastSpeechTime;
}, [lastSpeechTime]);
useEffect(() => {
isStoppingRef.current = isStopping;
}, [isStopping]);
// 1. Event Listeners Effect (Run ONCE when recording starts)
useEffect(() => {
let unlistenVAD: () => void;
let unlistenTrigger: () => void;
const setupListeners = async () => {
if (!isRecording) return;
console.log("Setting up VAD listeners...");
// VAD Event Listener
unlistenVAD = await listen<{ is_speech: boolean, probability: number }>('vad-event', (event) => {
if (event.payload.is_speech) {
setLastSpeechTime(Date.now());
lastSpeechTimeRef.current = Date.now(); // Update ref immediately
setSilenceDuration(0);
}
});
// Auto-Start Trigger Listener
unlistenTrigger = await listen('auto-recording-triggered', () => {
console.log("Auto-Start Triggered from Backend!");
// Only trigger if we are actually waiting
setIsWaiting((prev) => {
if (prev) {
addToast("Audio detected! Recording started.", 'success', 4000);
return false;
}
return prev;
});
setStatus('Recording (Auto-Started)...');
setLastSpeechTime(Date.now());
});
};
if (isRecording && !isPaused) {
setupListener();
setLastSpeechTime(Date.now()); // Reset on start
if (isRecording) {
setupListeners();
}
const interval = setInterval(() => {
if (isRecording && !isPaused) {
const diff = (Date.now() - lastSpeechTime) / 1000;
setSilenceDuration(diff);
return () => {
// Cleanup listeners
if (unlistenVAD) unlistenVAD();
if (unlistenTrigger) unlistenTrigger();
};
}, [isRecording, addToast]); // Dependencies for listener setup
// Auto-stop after 30 seconds of silence
if (diff > 30 && !isStopping) { // Check lock
console.log("Auto-stopping due to silence");
addToast("Auto-stopping (Silence detected)", "info", 3000);
stopRecording();
}
// Auto-Stop Interval Effect
useEffect(() => {
if (!isRecording || isPaused || isWaiting) return;
const interval = setInterval(() => {
const now = Date.now();
const diff = (now - lastSpeechTimeRef.current) / 1000;
setSilenceDuration(diff);
// Auto-stop after 30 seconds of silence
if (diff > 30 && !isStoppingRef.current) {
console.log("Auto-stopping due to silence");
addToast("Auto-stopping (Silence detected)", "info", 3000);
stopRecording();
}
}, 1000);
return () => {
if (unlisten) unlisten();
clearInterval(interval);
};
}, [isRecording, isPaused, lastSpeechTime]);
return () => clearInterval(interval);
}, [isRecording, isPaused, isWaiting, addToast]); // Dependencies for interval lifecycle
// Handle Auto Start Prop
useEffect(() => {
@@ -273,6 +331,7 @@ const Recorder: React.FC<RecorderProps> = ({
try {
setIsRecording(false);
setIsPaused(false);
setIsWaiting(false); // Reset waiting state
setStatus('Processing...');
const filePath = await invoke<string>('stop_recording');
@@ -357,6 +416,8 @@ const Recorder: React.FC<RecorderProps> = ({
}
};
return (
<div className="flex flex-col w-full h-full bg-background relative">
{/* Fixed Header - Reduced padding */}
@@ -367,9 +428,9 @@ const Recorder: React.FC<RecorderProps> = ({
{/* Scrollable Content - Reduced spacing */}
<div className="flex-1 overflow-y-auto px-6 pb-6 flex flex-col items-center">
<div className="mb-4 relative shrink-0">
<div className={`w-24 h-24 rounded-full flex items-center justify-center transition-all duration-300 ${isRecording ? (isPaused ? 'bg-yellow-500/10' : 'bg-red-500/10 animate-pulse') : 'bg-secondary'}`}>
<div className={`w-24 h-24 rounded-full flex items-center justify-center transition-all duration-300 ${isRecording ? (isWaiting ? 'bg-blue-500/20' : isPaused ? 'bg-yellow-500/10' : 'bg-red-500/10 animate-pulse') : 'bg-secondary'}`}>
{isRecording ? (
<div className={`w-16 h-16 rounded-full flex items-center justify-center shadow-[0_0_20px_rgba(239,68,68,0.5)] ${isPaused ? 'bg-yellow-500' : 'bg-red-500'}`}>
<div className={`w-16 h-16 rounded-full flex items-center justify-center shadow-[0_0_20px_rgba(239,68,68,0.5)] ${isWaiting ? 'bg-blue-500 animate-pulse' : isPaused ? 'bg-yellow-500' : 'bg-red-500'}`}>
<Mic size={32} className="text-white animate-bounce" />
</div>
) : (
@@ -381,12 +442,12 @@ const Recorder: React.FC<RecorderProps> = ({
</div>
<h1 className="text-xl font-bold mb-1 text-foreground">
{isRecording ? (isPaused ? 'Paused' : 'Listening...') : 'Ready to Record'}
{isRecording ? (isWaiting ? 'Waiting for Audio...' : isPaused ? 'Paused' : 'Listening...') : 'Ready to Record'}
</h1>
<p className="text-muted-foreground mb-4 text-center text-xs h-5">
{status}
{isRecording && !isPaused && silenceDuration > 10 && (
{isRecording && !isPaused && !isWaiting && silenceDuration > 10 && (
<span className="block text-xs text-yellow-500 mt-0.5 opacity-80">
Silence detected: {Math.floor(silenceDuration)}s
</span>
@@ -395,30 +456,46 @@ const Recorder: React.FC<RecorderProps> = ({
<div className="w-full max-w-sm space-y-3 mb-4 shrink-0">
{!isRecording ? (
<button
onClick={() => startRecording()}
disabled={!apiKey || !productId}
className="w-full py-3 text-base font-semibold bg-primary text-primary-foreground rounded-lg hover:bg-primary/90 disabled:opacity-50 disabled:cursor-not-allowed transition-all shadow-md hover:shadow-lg"
>
{!apiKey ? 'Configure API Key First' : 'Start Recording'}
</button>
<>
<button
onClick={() => startRecording()}
disabled={!apiKey || !productId}
className="w-full py-3 text-base font-semibold bg-primary text-primary-foreground rounded-lg hover:bg-primary/90 disabled:opacity-50 disabled:cursor-not-allowed transition-all shadow-md hover:shadow-lg"
>
{!apiKey ? 'Configure API Key First' : (autoStartEnabled ? 'Standby (Auto-Start)' : 'Start Recording')}
</button>
<div className="flex items-center justify-center gap-2 mt-2">
<label className="flex items-center gap-2 cursor-pointer select-none">
<input
type="checkbox"
checked={autoStartEnabled}
onChange={(e) => setAutoStartEnabled(e.target.checked)}
className="w-4 h-4 accent-primary rounded cursor-pointer"
/>
<span className="text-xs text-muted-foreground font-medium">Auto-start when audio detected</span>
</label>
</div>
</>
) : (
<div className="flex gap-2 w-full">
<button
onClick={togglePause}
className={`flex-1 py-4 text-lg font-semibold rounded-lg transition-all shadow-md hover:shadow-lg flex items-center justify-center gap-2 ${isPaused
? 'bg-blue-600 text-white hover:bg-blue-700'
: 'bg-yellow-500 text-white hover:bg-yellow-600'
}`}
>
{isPaused ? 'Resume' : 'Pause'}
</button>
{/* In Waiting mode, we can only Stop (Cancel) */}
{!isWaiting && (
<button
onClick={togglePause}
className={`flex-1 py-4 text-lg font-semibold rounded-lg transition-all shadow-md hover:shadow-lg flex items-center justify-center gap-2 ${isPaused
? 'bg-blue-600 text-white hover:bg-blue-700'
: 'bg-yellow-500 text-white hover:bg-yellow-600'
}`}
>
{isPaused ? 'Resume' : 'Pause'}
</button>
)}
<button
onClick={stopRecording}
className="flex-1 py-4 text-lg font-semibold bg-destructive text-destructive-foreground rounded-lg hover:bg-destructive/90 transition-all shadow-md hover:shadow-lg flex items-center justify-center gap-2"
>
<Square size={20} fill="currentColor" />
Stop
{isWaiting ? 'Cancel' : 'Stop'}
</button>
</div>
)}

View File

@@ -1,9 +1,9 @@
import React from 'react';
import { Mic, FileText, Calendar } from 'lucide-react';
import { Mic, FileText, Calendar, Upload } from 'lucide-react';
interface TabsProps {
currentTab: 'recorder' | 'transcription' | 'settings' | 'meetings' | 'history';
onTabChange: (tab: 'recorder' | 'transcription' | 'settings' | 'meetings' | 'history') => void;
currentTab: 'recorder' | 'transcription' | 'settings' | 'meetings' | 'history' | 'import';
onTabChange: (tab: 'recorder' | 'transcription' | 'settings' | 'meetings' | 'history' | 'import') => void;
}
const Tabs: React.FC<TabsProps> = ({ currentTab, onTabChange }) => {
@@ -16,6 +16,13 @@ const Tabs: React.FC<TabsProps> = ({ currentTab, onTabChange }) => {
<Mic size={16} />
Recording
</button>
<button
onClick={() => onTabChange('import')}
className={`flex items-center gap-2 px-4 py-2 rounded-lg text-sm font-medium transition-colors ${currentTab === 'import' ? 'bg-secondary text-foreground' : 'text-muted-foreground hover:text-foreground hover:bg-secondary/50'}`}
>
<Upload size={16} />
Import
</button>
<button
onClick={() => onTabChange('transcription')}
className={`flex items-center gap-2 px-4 py-2 rounded-lg text-sm font-medium transition-colors ${currentTab === 'transcription' ? 'bg-secondary text-foreground' : 'text-muted-foreground hover:text-foreground hover:bg-secondary/50'}`}