tasq/lib/services/ai_service.dart

203 lines
6.4 KiB
Dart

import 'dart:convert';
import 'package:flutter_dotenv/flutter_dotenv.dart';
import 'package:google_generative_ai/google_generative_ai.dart';
import 'package:http/http.dart' as http;
/// Unified AI text-enhancement service.
///
/// Tries Gemini (free-tier flash/lite models) first, with automatic
/// 429-retry across all discovered models. If every Gemini model fails
/// (quota exhausted or any unrecoverable error) it seamlessly falls back
/// to the DeepSeek API.
///
/// Usage:
/// ```dart
/// final result = await AiService().enhanceText(
/// myText,
/// promptInstruction: 'Fix grammar and translate to English …',
/// );
/// ```
class AiService {
static final AiService _instance = AiService._internal();
factory AiService() => _instance;
late final String _geminiApiKey;
late final String _deepseekApiKey;
/// Cached Gemini model IDs (flash / lite, generateContent-capable).
List<String> _geminiModels = [];
AiService._internal() {
final gKey = dotenv.env['GEMINI_API_KEY'];
if (gKey == null || gKey.isEmpty) {
throw Exception('GEMINI_API_KEY not found in .env');
}
_geminiApiKey = gKey;
final dsKey = dotenv.env['DEEPSEEK_API_KEY'];
if (dsKey == null || dsKey.isEmpty) {
throw Exception('DEEPSEEK_API_KEY not found in .env');
}
_deepseekApiKey = dsKey;
}
// ---------------------------------------------------------------------------
// Public API
// ---------------------------------------------------------------------------
/// Fixes spelling / grammar, improves clarity, and translates [text] to
/// professional English.
///
/// Supply [promptInstruction] to give the model field-specific context
/// (e.g. "This is an IT helpdesk ticket subject …"). If omitted a
/// sensible generic instruction is used.
///
/// Order of preference:
/// 1. Gemini flash / lite models (auto-retry on 429 across all models)
/// 2. DeepSeek `deepseek-chat` (fallback on total Gemini failure)
///
/// Throws only if **both** providers fail.
/// [onFallbackToDeepSeek] is called (from the same isolate) just before
/// switching to the DeepSeek provider, so callers can update UI accordingly.
///
/// This method never throws — if both providers fail it returns [text] unchanged.
Future<String> enhanceText(
String text, {
String? promptInstruction,
void Function()? onFallbackToDeepSeek,
}) async {
if (text.trim().isEmpty) return text;
final instruction =
promptInstruction ??
'Fix spelling and grammar, improve clarity, and translate to '
'professional English. Return ONLY the improved text, '
'no explanations:';
final prompt = '$instruction\n\n"$text"';
// --- 1. Try Gemini ---
try {
return await _geminiGenerate(prompt, fallback: text);
} catch (_) {
// All Gemini models failed — fall through to DeepSeek.
onFallbackToDeepSeek?.call();
}
// --- 2. Fallback: DeepSeek ---
try {
return await _deepseekGenerate(prompt, fallback: text);
} catch (_) {
// Both providers failed — return original text unchanged.
return text;
}
}
// ---------------------------------------------------------------------------
// Gemini
// ---------------------------------------------------------------------------
Future<List<String>> _getGeminiModels() async {
if (_geminiModels.isNotEmpty) return _geminiModels;
try {
final uri = Uri.parse(
'https://generativelanguage.googleapis.com/v1beta/models'
'?key=$_geminiApiKey',
);
final res = await http.get(uri);
if (res.statusCode == 200) {
final data = jsonDecode(res.body) as Map<String, dynamic>;
final rawModels = (data['models'] as List<dynamic>?) ?? [];
final discovered = <String>[];
for (final m in rawModels) {
final fullName = m['name'] as String? ?? '';
final lower = fullName.toLowerCase();
final methods =
(m['supportedGenerationMethods'] as List<dynamic>?) ?? [];
if (methods.contains('generateContent') &&
(lower.contains('flash') || lower.contains('lite'))) {
final id = fullName.startsWith('models/')
? fullName.substring('models/'.length)
: fullName;
discovered.add(id);
}
}
discovered.sort((a, b) => b.compareTo(a));
_geminiModels = discovered;
}
} catch (_) {
// Fall through to hard-coded list.
}
if (_geminiModels.isEmpty) {
_geminiModels = [
'gemini-2.5-flash-lite',
'gemini-2.5-flash',
'gemini-2.0-flash',
'gemini-1.5-flash',
];
}
return _geminiModels;
}
Future<String> _geminiGenerate(
String prompt, {
required String fallback,
}) async {
final models = await _getGeminiModels();
Object? lastError;
for (final modelId in models) {
try {
final model = GenerativeModel(model: modelId, apiKey: _geminiApiKey);
final response = await model.generateContent([Content.text(prompt)]);
return response.text ?? fallback;
} catch (e) {
lastError = e;
// Try the next model regardless of error type.
}
}
throw Exception('All Gemini models failed. Last error: $lastError');
}
// ---------------------------------------------------------------------------
// DeepSeek (OpenAI-compatible REST)
// ---------------------------------------------------------------------------
Future<String> _deepseekGenerate(
String prompt, {
required String fallback,
}) async {
const url = 'https://api.deepseek.com/chat/completions';
final body = jsonEncode({
'model': 'deepseek-chat',
'messages': [
{'role': 'user', 'content': prompt},
],
});
final res = await http.post(
Uri.parse(url),
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer $_deepseekApiKey',
},
body: body,
);
if (res.statusCode == 200) {
final data = jsonDecode(res.body) as Map<String, dynamic>;
final choices = data['choices'] as List<dynamic>?;
final content = choices?.firstOrNull?['message']?['content'] as String?;
return content?.trim() ?? fallback;
}
throw Exception(
'DeepSeek request failed (HTTP ${res.statusCode}): ${res.body}',
);
}
}