414 lines
16 KiB
PHP
414 lines
16 KiB
PHP
<?php
|
|
|
|
namespace App\Http\Controllers;
|
|
|
|
use App\Http\Requests\SwitchModelRequest;
|
|
use App\Services\AIService;
|
|
use Illuminate\Http\Request;
|
|
use Illuminate\Support\Facades\Log;
|
|
|
|
class AIModelController extends Controller
|
|
{
|
|
/**
|
|
* AI服务
|
|
*/
|
|
protected $aiService;
|
|
|
|
/**
|
|
* 构造函数
|
|
*/
|
|
public function __construct()
|
|
{
|
|
$this->aiService = new AIService();
|
|
}
|
|
|
|
/**
|
|
* 获取可用模型列表
|
|
*/
|
|
public function getModels()
|
|
{
|
|
try {
|
|
$services = $this->aiService->getAvailableServices();
|
|
$models = [];
|
|
|
|
$modelConfigs = [
|
|
'openai' => [
|
|
['id' => 'gpt-4', 'name' => 'GPT-4', 'description' => 'OpenAI最强大的模型'],
|
|
['id' => 'gpt-4-turbo', 'name' => 'GPT-4 Turbo', 'description' => 'GPT-4的优化版本'],
|
|
['id' => 'gpt-3.5-turbo', 'name' => 'GPT-3.5 Turbo', 'description' => '性价比高的通用模型'],
|
|
['id' => 'gpt-3.5-turbo-instruct', 'name' => 'GPT-3.5 Instruct', 'description' => '指令优化版本'],
|
|
],
|
|
'azure_openai' => [
|
|
['id' => 'gpt-4', 'name' => 'GPT-4 (Azure)', 'description' => 'Azure托管的GPT-4'],
|
|
['id' => 'gpt-35-turbo', 'name' => 'GPT-3.5 Turbo (Azure)', 'description' => 'Azure托管的GPT-3.5'],
|
|
],
|
|
'anthropic' => [
|
|
['id' => 'claude-3-opus-20240229', 'name' => 'Claude 3 Opus', 'description' => 'Anthropic最强大的模型'],
|
|
['id' => 'claude-3-sonnet-20240229', 'name' => 'Claude 3 Sonnet', 'description' => '平衡性能与成本'],
|
|
['id' => 'claude-3-haiku-20240307', 'name' => 'Claude 3 Haiku', 'description' => '快速且经济的模型'],
|
|
],
|
|
'aliyun_qwen' => [
|
|
['id' => 'qwen-max', 'name' => '通义千问 Max', 'description' => '阿里云最强大的模型'],
|
|
['id' => 'qwen-plus', 'name' => '通义千问 Plus', 'description' => '高性能版本'],
|
|
['id' => 'qwen-turbo', 'name' => '通义千问 Turbo', 'description' => '快速响应版本'],
|
|
['id' => 'qwen-7b-chat', 'name' => '通义千问 7B', 'description' => '轻量级版本'],
|
|
['id' => 'qwen-14b-chat', 'name' => '通义千问 14B', 'description' => '中等规模版本'],
|
|
],
|
|
'local' => [
|
|
['id' => 'local-model', 'name' => '本地模型', 'description' => '本地部署的模型'],
|
|
],
|
|
];
|
|
|
|
foreach ($services as $service) {
|
|
$models[$service] = [
|
|
'service' => $service,
|
|
'display_name' => $this->getServiceDisplayName($service),
|
|
'models' => $modelConfigs[$service] ?? [],
|
|
'current_model' => config("ai.services.{$service}.model", ''),
|
|
];
|
|
}
|
|
|
|
// 获取当前使用的服务
|
|
$currentService = $this->aiService->getCurrentService();
|
|
|
|
// 获取服务状态
|
|
$serviceStatus = $this->aiService->getServiceStatus();
|
|
|
|
return response()->json([
|
|
'code' => 200,
|
|
'data' => [
|
|
'models' => $models,
|
|
'current_service' => $currentService,
|
|
'service_status' => $serviceStatus,
|
|
'default_service' => config('ai.default'),
|
|
],
|
|
'message' => 'success'
|
|
]);
|
|
} catch (\Exception $e) {
|
|
Log::error('获取模型列表失败: ' . $e->getMessage());
|
|
|
|
return response()->json([
|
|
'code' => 500,
|
|
'message' => '获取模型列表失败: ' . $e->getMessage()
|
|
], 500);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 切换模型
|
|
*/
|
|
public function switchModel(SwitchModelRequest $request)
|
|
{
|
|
try {
|
|
$service = $request->input('service');
|
|
$model = $request->input('model');
|
|
|
|
// 验证服务是否存在
|
|
$availableServices = $this->aiService->getAvailableServices();
|
|
if (!in_array($service, $availableServices)) {
|
|
return response()->json([
|
|
'code' => 400,
|
|
'message' => '不支持的服务: ' . $service
|
|
], 400);
|
|
}
|
|
|
|
// 验证模型是否在服务中可用
|
|
$modelConfigs = $this->getModelConfigs();
|
|
$serviceModels = array_column($modelConfigs[$service] ?? [], 'id');
|
|
|
|
if (!in_array($model, $serviceModels)) {
|
|
return response()->json([
|
|
'code' => 400,
|
|
'message' => '服务 ' . $service . ' 不支持模型: ' . $model
|
|
], 400);
|
|
}
|
|
|
|
// 切换服务
|
|
$this->aiService->setService($service);
|
|
|
|
// 更新配置(在实际项目中,可能需要持久化存储)
|
|
$currentService = $this->aiService->getCurrentService();
|
|
|
|
// 测试新模型连接
|
|
$testResult = $this->aiService->testConnection();
|
|
|
|
if (!$testResult['success']) {
|
|
return response()->json([
|
|
'code' => 503,
|
|
'message' => '模型切换失败: ' . ($testResult['error'] ?? '连接测试失败'),
|
|
'data' => $testResult
|
|
], 503);
|
|
}
|
|
|
|
// 记录操作日志
|
|
\App\Models\OperationLog::log([
|
|
'module' => 'AI助手',
|
|
'action' => '切换模型',
|
|
'method' => 'POST',
|
|
'path' => 'api/ai/models/switch',
|
|
'request_data' => ['service' => $service, 'model' => $model],
|
|
'response_data' => $testResult,
|
|
'remark' => '切换AI模型到 ' . $service . '/' . $model,
|
|
]);
|
|
|
|
return response()->json([
|
|
'code' => 200,
|
|
'data' => [
|
|
'service' => $currentService,
|
|
'model' => $model,
|
|
'test_result' => $testResult,
|
|
'message' => '模型切换成功'
|
|
],
|
|
'message' => '模型切换成功'
|
|
]);
|
|
} catch (\Exception $e) {
|
|
Log::error('切换模型失败: ' . $e->getMessage());
|
|
|
|
return response()->json([
|
|
'code' => 500,
|
|
'message' => '切换模型失败: ' . $e->getMessage()
|
|
], 500);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 获取模型配置
|
|
*/
|
|
private function getModelConfigs(): array
|
|
{
|
|
return [
|
|
'openai' => [
|
|
['id' => 'gpt-4', 'name' => 'GPT-4', 'description' => 'OpenAI最强大的模型'],
|
|
['id' => 'gpt-4-turbo', 'name' => 'GPT-4 Turbo', 'description' => 'GPT-4的优化版本'],
|
|
['id' => 'gpt-3.5-turbo', 'name' => 'GPT-3.5 Turbo', 'description' => '性价比高的通用模型'],
|
|
['id' => 'gpt-3.5-turbo-instruct', 'name' => 'GPT-3.5 Instruct', 'description' => '指令优化版本'],
|
|
],
|
|
'azure_openai' => [
|
|
['id' => 'gpt-4', 'name' => 'GPT-4 (Azure)', 'description' => 'Azure托管的GPT-4'],
|
|
['id' => 'gpt-35-turbo', 'name' => 'GPT-3.5 Turbo (Azure)', 'description' => 'Azure托管的GPT-3.5'],
|
|
],
|
|
'anthropic' => [
|
|
['id' => 'claude-3-opus-20240229', 'name' => 'Claude 3 Opus', 'description' => 'Anthropic最强大的模型'],
|
|
['id' => 'claude-3-sonnet-20240229', 'name' => 'Claude 3 Sonnet', 'description' => '平衡性能与成本'],
|
|
['id' => 'claude-3-haiku-20240307', 'name' => 'Claude 3 Haiku', 'description' => '快速且经济的模型'],
|
|
],
|
|
'aliyun_qwen' => [
|
|
['id' => 'qwen-max', 'name' => '通义千问 Max', 'description' => '阿里云最强大的模型'],
|
|
['id' => 'qwen-plus', 'name' => '通义千问 Plus', 'description' => '高性能版本'],
|
|
['id' => 'qwen-turbo', 'name' => '通义千问 Turbo', 'description' => '快速响应版本'],
|
|
['id' => 'qwen-7b-chat', 'name' => '通义千问 7B', 'description' => '轻量级版本'],
|
|
['id' => 'qwen-14b-chat', 'name' => '通义千问 14B', 'description' => '中等规模版本'],
|
|
],
|
|
'local' => [
|
|
['id' => 'local-model', 'name' => '本地模型', 'description' => '本地部署的模型'],
|
|
],
|
|
];
|
|
}
|
|
|
|
/**
|
|
* 获取服务显示名称
|
|
*/
|
|
private function getServiceDisplayName(string $service): string
|
|
{
|
|
$names = [
|
|
'openai' => 'OpenAI',
|
|
'azure_openai' => 'Azure OpenAI',
|
|
'anthropic' => 'Anthropic Claude',
|
|
'aliyun_qwen' => '阿里云通义千问',
|
|
'local' => '本地模型',
|
|
];
|
|
|
|
return $names[$service] ?? $service;
|
|
}
|
|
|
|
/**
|
|
* 获取当前模型信息
|
|
*/
|
|
public function getCurrentModel()
|
|
{
|
|
try {
|
|
$currentService = $this->aiService->getCurrentService();
|
|
$serviceConfig = config("ai.services.{$currentService}", []);
|
|
|
|
$modelInfo = [
|
|
'service' => $currentService,
|
|
'service_display_name' => $this->getServiceDisplayName($currentService),
|
|
'model' => $serviceConfig['model'] ?? '',
|
|
'max_tokens' => $serviceConfig['max_tokens'] ?? 2000,
|
|
'temperature' => $serviceConfig['temperature'] ?? 0.7,
|
|
'timeout' => $serviceConfig['timeout'] ?? 30,
|
|
];
|
|
|
|
// 获取服务状态
|
|
$testResult = $this->aiService->testConnection();
|
|
$modelInfo['status'] = $testResult['success'] ? 'online' : 'offline';
|
|
$modelInfo['test_message'] = $testResult['message'];
|
|
|
|
return response()->json([
|
|
'code' => 200,
|
|
'data' => $modelInfo,
|
|
'message' => 'success'
|
|
]);
|
|
} catch (\Exception $e) {
|
|
Log::error('获取当前模型失败: ' . $e->getMessage());
|
|
|
|
return response()->json([
|
|
'code' => 500,
|
|
'message' => '获取当前模型失败: ' . $e->getMessage()
|
|
], 500);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 测试模型连接
|
|
*/
|
|
public function testModel(Request $request)
|
|
{
|
|
try {
|
|
$service = $request->input('service', $this->aiService->getCurrentService());
|
|
$model = $request->input('model', '');
|
|
|
|
// 如果指定了服务,切换到该服务
|
|
if ($service && $service !== $this->aiService->getCurrentService()) {
|
|
$availableServices = $this->aiService->getAvailableServices();
|
|
if (in_array($service, $availableServices)) {
|
|
$this->aiService->setService($service);
|
|
}
|
|
}
|
|
|
|
// 如果指定了模型,更新配置(在实际项目中可能需要持久化)
|
|
if ($model) {
|
|
// 这里可以添加更新模型配置的逻辑
|
|
}
|
|
|
|
$testResult = $this->aiService->testConnection();
|
|
|
|
return response()->json([
|
|
'code' => 200,
|
|
'data' => $testResult,
|
|
'message' => $testResult['success'] ? '连接测试成功' : '连接测试失败'
|
|
]);
|
|
} catch (\Exception $e) {
|
|
Log::error('测试模型连接失败: ' . $e->getMessage());
|
|
|
|
return response()->json([
|
|
'code' => 500,
|
|
'message' => '测试模型连接失败: ' . $e->getMessage()
|
|
], 500);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 获取模型使用统计
|
|
*/
|
|
public function getModelStatistics(Request $request)
|
|
{
|
|
try {
|
|
$period = $request->input('period', 'day'); // day, week, month
|
|
|
|
// 这里可以添加从数据库获取模型使用统计的逻辑
|
|
// 暂时返回模拟数据
|
|
|
|
$statistics = [
|
|
'total_requests' => 1250,
|
|
'successful_requests' => 1200,
|
|
'failed_requests' => 50,
|
|
'total_tokens' => 1250000,
|
|
'avg_response_time' => '1.2秒',
|
|
'popular_models' => [
|
|
['model' => 'gpt-3.5-turbo', 'count' => 800, 'percentage' => 64],
|
|
['model' => 'qwen-max', 'count' => 300, 'percentage' => 24],
|
|
['model' => 'claude-3-haiku', 'count' => 150, 'percentage' => 12],
|
|
],
|
|
'usage_by_hour' => $this->generateHourlyUsage(),
|
|
];
|
|
|
|
return response()->json([
|
|
'code' => 200,
|
|
'data' => $statistics,
|
|
'message' => 'success'
|
|
]);
|
|
} catch (\Exception $e) {
|
|
Log::error('获取模型统计失败: ' . $e->getMessage());
|
|
|
|
return response()->json([
|
|
'code' => 500,
|
|
'message' => '获取模型统计失败: ' . $e->getMessage()
|
|
], 500);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* 生成小时使用数据
|
|
*/
|
|
private function generateHourlyUsage(): array
|
|
{
|
|
$usage = [];
|
|
for ($i = 0; $i < 24; $i++) {
|
|
$usage[] = [
|
|
'hour' => sprintf('%02d:00', $i),
|
|
'requests' => rand(10, 100),
|
|
'tokens' => rand(1000, 10000),
|
|
];
|
|
}
|
|
return $usage;
|
|
}
|
|
|
|
/**
|
|
* 获取模型推荐
|
|
*/
|
|
public function getModelRecommendations()
|
|
{
|
|
try {
|
|
$currentService = $this->aiService->getCurrentService();
|
|
$serviceConfig = config("ai.services.{$currentService}", []);
|
|
$currentModel = $serviceConfig['model'] ?? '';
|
|
|
|
$recommendations = [
|
|
'current_model' => $currentModel,
|
|
'recommendations' => [
|
|
[
|
|
'model' => 'gpt-3.5-turbo',
|
|
'service' => 'openai',
|
|
'reason' => '性价比高,响应速度快,适合日常对话',
|
|
'cost_per_1k_tokens' => 0.002,
|
|
'max_tokens' => 4096,
|
|
],
|
|
[
|
|
'model' => 'qwen-max',
|
|
'service' => 'aliyun_qwen',
|
|
'reason' => '中文理解能力强,适合中文场景',
|
|
'cost_per_1k_tokens' => 0.004,
|
|
'max_tokens' => 6000,
|
|
],
|
|
[
|
|
'model' => 'claude-3-haiku',
|
|
'service' => 'anthropic',
|
|
'reason' => '快速且经济,适合简单任务',
|
|
'cost_per_1k_tokens' => 0.00025,
|
|
'max_tokens' => 4096,
|
|
],
|
|
],
|
|
'selection_criteria' => [
|
|
'对于简单对话和日常任务,推荐使用 gpt-3.5-turbo',
|
|
'对于中文内容和复杂分析,推荐使用 qwen-max',
|
|
'对于需要快速响应的简单任务,推荐使用 claude-3-haiku',
|
|
'对于需要最高质量的复杂任务,考虑使用 gpt-4 或 claude-3-opus',
|
|
],
|
|
];
|
|
|
|
return response()->json([
|
|
'code' => 200,
|
|
'data' => $recommendations,
|
|
'message' => 'success'
|
|
]);
|
|
} catch (\Exception $e) {
|
|
Log::error('获取模型推荐失败: ' . $e->getMessage());
|
|
|
|
return response()->json([
|
|
'code' => 500,
|
|
'message' => '获取模型推荐失败: ' . $e->getMessage()
|
|
], 500);
|
|
}
|
|
}
|
|
} |