flutter_whisper_api 1.0.0
flutter_whisper_api: ^1.0.0 copied to clipboard
A Flutter package for seamless integration with OpenAI's Whisper API for speech-to-text conversion with audio recording capabilities.
import 'package:flutter/material.dart';
import 'package:flutter_whisper_api/flutter_whisper_api.dart';
import 'dart:io';
void main() {
runApp(const MyApp());
}
class MyApp extends StatelessWidget {
const MyApp({super.key});
@override
Widget build(BuildContext context) {
return MaterialApp(
title: 'Flutter Whisper API Example',
theme: ThemeData(primarySwatch: Colors.blue, useMaterial3: true),
home: const WhisperExampleScreen(),
);
}
}
class WhisperExampleScreen extends StatefulWidget {
const WhisperExampleScreen({super.key});
@override
State<WhisperExampleScreen> createState() => _WhisperExampleScreenState();
}
class _WhisperExampleScreenState extends State<WhisperExampleScreen> {
late WhisperRecorder _recorder;
WhisperClient? _client;
String _transcribedText = '';
bool _isRecording = false;
bool _isTranscribing = false;
double? _currentAmplitude;
final _apiKeyController = TextEditingController();
@override
void initState() {
super.initState();
_recorder = WhisperRecorder();
_checkPermissions();
}
Future<void> _checkPermissions() async {
final hasPermission = await _recorder.hasPermission();
if (!hasPermission) {
await _recorder.requestPermission();
}
}
void _initializeClient() {
if (_apiKeyController.text.isNotEmpty) {
_client = WhisperClient(apiKey: _apiKeyController.text);
}
}
Future<void> _startRecording() async {
try {
_initializeClient();
setState(() {
_isRecording = true;
_transcribedText = '';
});
await _recorder.startRecording(quality: WhisperAudioQuality.medium);
// Start amplitude monitoring
_monitorAmplitude();
} catch (e) {
_showError('Failed to start recording: $e');
setState(() {
_isRecording = false;
});
}
}
Future<void> _stopRecording() async {
try {
final file = await _recorder.stopRecording();
setState(() {
_isRecording = false;
_currentAmplitude = null;
});
if (file != null && _client != null) {
await _transcribeAudio(file);
}
} catch (e) {
_showError('Failed to stop recording: $e');
setState(() {
_isRecording = false;
});
}
}
Future<void> _transcribeAudio(File audioFile) async {
if (_client == null) {
_showError('Please enter your OpenAI API key first');
return;
}
setState(() {
_isTranscribing = true;
});
try {
final request = WhisperRequest(
audioFile: audioFile,
language: 'en', // You can make this configurable
);
final response = await _client!.transcribe(request);
setState(() {
_transcribedText = response.text;
_isTranscribing = false;
});
} catch (e) {
_showError('Failed to transcribe audio: $e');
setState(() {
_isTranscribing = false;
});
}
}
void _monitorAmplitude() async {
while (_isRecording) {
final amplitude = await _recorder.getAmplitude();
if (mounted && _isRecording) {
setState(() {
_currentAmplitude = amplitude;
});
}
await Future.delayed(const Duration(milliseconds: 100));
}
}
void _showError(String message) {
ScaffoldMessenger.of(context).showSnackBar(
SnackBar(content: Text(message), backgroundColor: Colors.red),
);
}
@override
void dispose() {
_recorder.dispose();
_client?.dispose();
_apiKeyController.dispose();
super.dispose();
}
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text('Flutter Whisper API Example'),
backgroundColor: Theme.of(context).colorScheme.inversePrimary,
),
body: Padding(
padding: const EdgeInsets.all(16.0),
child: Column(
children: [
// API Key Input
Card(
child: Padding(
padding: const EdgeInsets.all(16.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
const Text(
'OpenAI API Key',
style: TextStyle(
fontSize: 16,
fontWeight: FontWeight.bold,
),
),
const SizedBox(height: 8),
TextField(
controller: _apiKeyController,
decoration: const InputDecoration(
hintText: 'Enter your OpenAI API key',
border: OutlineInputBorder(),
),
obscureText: true,
),
const SizedBox(height: 8),
const Text(
'Get your API key from: https://platform.openai.com/api-keys',
style: TextStyle(fontSize: 12, color: Colors.grey),
),
],
),
),
),
const SizedBox(height: 20),
// Recording Controls
Card(
child: Padding(
padding: const EdgeInsets.all(16.0),
child: Column(
children: [
// Amplitude Indicator
if (_isRecording && _currentAmplitude != null)
Column(
children: [
const Text('Recording...'),
const SizedBox(height: 8),
LinearProgressIndicator(
value:
_currentAmplitude! * 2, // Scale for visibility
backgroundColor: Colors.grey[300],
valueColor: AlwaysStoppedAnimation<Color>(
Colors.green.withValues(alpha: 0.8),
),
),
const SizedBox(height: 16),
],
),
// Record Button
ElevatedButton.icon(
onPressed: _apiKeyController.text.isEmpty
? null
: _isRecording
? _stopRecording
: _startRecording,
icon: Icon(_isRecording ? Icons.stop : Icons.mic),
label: Text(
_isRecording ? 'Stop Recording' : 'Start Recording',
),
style: ElevatedButton.styleFrom(
backgroundColor: _isRecording
? Colors.red
: Colors.blue,
foregroundColor: Colors.white,
padding: const EdgeInsets.symmetric(
horizontal: 24,
vertical: 12,
),
),
),
],
),
),
),
const SizedBox(height: 20),
// Transcription Result
Expanded(
child: Card(
child: Padding(
padding: const EdgeInsets.all(16.0),
child: Column(
crossAxisAlignment: CrossAxisAlignment.start,
children: [
Row(
children: [
const Text(
'Transcription',
style: TextStyle(
fontSize: 16,
fontWeight: FontWeight.bold,
),
),
if (_isTranscribing)
const Padding(
padding: EdgeInsets.only(left: 8.0),
child: SizedBox(
width: 16,
height: 16,
child: CircularProgressIndicator(
strokeWidth: 2,
),
),
),
],
),
const SizedBox(height: 16),
Expanded(
child: Container(
width: double.infinity,
decoration: BoxDecoration(
border: Border.all(color: Colors.grey[300]!),
borderRadius: BorderRadius.circular(8),
),
padding: const EdgeInsets.all(12),
child: SingleChildScrollView(
child: Text(
_transcribedText.isEmpty
? 'Transcribed text will appear here...'
: _transcribedText,
style: TextStyle(
fontSize: 14,
color: _transcribedText.isEmpty
? Colors.grey
: Colors.black,
),
),
),
),
),
],
),
),
),
),
],
),
),
);
}
}