添加API密钥管理功能,包括获取、更新和验证API密钥的接口,增强了对Mathpix和其他API的支持;优化了设置面板的用户体验,改进了API密钥的状态显示和编辑功能,确保用户能够方便地管理和验证API密钥。

This commit is contained in:
Zylan
2025-04-02 21:56:38 +08:00
parent cebfaba26c
commit a46b8ac229
10 changed files with 2516 additions and 476 deletions

View File

@@ -5,7 +5,9 @@ from .base import BaseModel
class AlibabaModel(BaseModel):
def __init__(self, api_key: str, temperature: float = 0.7, system_prompt: str = None, language: str = None, model_name: str = None):
self.model_name = model_name or "QVQ-Max-2025-03-25" # 默认使用QVQ-Max模型
# 如果没有提供模型名称,才使用默认值
self.model_name = model_name if model_name else "QVQ-Max-2025-03-25"
print(f"初始化阿里巴巴模型: {self.model_name}")
# 在super().__init__之前设置model_name这样get_default_system_prompt能使用它
super().__init__(api_key, temperature, system_prompt, language)
@@ -36,22 +38,35 @@ class AlibabaModel(BaseModel):
"qwen-vl-max-latest": "qwen-vl-max", # 修正为正确的API标识符
}
print(f"模型名称: {self.model_name}")
# 从模型映射表中获取模型标识符,如果不存在则使用默认值
model_id = model_mapping.get(self.model_name)
if model_id:
print(f"从映射表中获取到模型标识符: {model_id}")
return model_id
# 如果没有精确匹配,检查是否包含特定前缀
if self.model_name and "qwen-vl" in self.model_name:
if "max" in self.model_name:
if self.model_name and "qwen-vl" in self.model_name.lower():
if "max" in self.model_name.lower():
print(f"识别为qwen-vl-max模型")
return "qwen-vl-max"
elif "plus" in self.model_name:
elif "plus" in self.model_name.lower():
print(f"识别为qwen-vl-plus模型")
return "qwen-vl-plus"
elif "lite" in self.model_name:
elif "lite" in self.model_name.lower():
print(f"识别为qwen-vl-lite模型")
return "qwen-vl-lite"
print(f"默认使用qwen-vl-max模型")
return "qwen-vl-max" # 默认使用最强版本
# 如果包含QVQ或alibaba关键词默认使用qvq-max
if self.model_name and ("qvq" in self.model_name.lower() or "alibaba" in self.model_name.lower()):
print(f"识别为QVQ模型使用qvq-max")
return "qvq-max"
# 最后的默认值
print(f"警告:无法识别的模型名称 {self.model_name}默认使用qvq-max")
return "qvq-max"
def analyze_text(self, text: str, proxies: dict = None) -> Generator[dict, None, None]:
@@ -107,7 +122,8 @@ class AlibabaModel(BaseModel):
is_answering = False
# 检查是否为通义千问VL模型不支持reasoning_content
is_qwen_vl = "qwen-vl" in self.get_model_identifier()
is_qwen_vl = "qwen-vl" in self.get_model_identifier().lower()
print(f"分析文本使用模型标识符: {self.get_model_identifier()}, 是否为千问VL模型: {is_qwen_vl}")
for chunk in response:
if not chunk.choices:
@@ -237,7 +253,8 @@ class AlibabaModel(BaseModel):
is_answering = False
# 检查是否为通义千问VL模型不支持reasoning_content
is_qwen_vl = "qwen-vl" in self.get_model_identifier()
is_qwen_vl = "qwen-vl" in self.get_model_identifier().lower()
print(f"分析图像使用模型标识符: {self.get_model_identifier()}, 是否为千问VL模型: {is_qwen_vl}")
for chunk in response:
if not chunk.choices:

View File

@@ -56,7 +56,7 @@ class AnthropicModel(BaseModel):
# 处理推理配置
if hasattr(self, 'reasoning_config') and self.reasoning_config:
# 如果设置了extended reasoning
if self.reasoning_config.get('reasoning_depth') == 'extended':
if self.reasoning_config.get('reasoning_depth') == 'extended':
think_budget = self.reasoning_config.get('think_budget', max_tokens // 2)
payload['thinking'] = {
'type': 'enabled',
@@ -64,7 +64,6 @@ class AnthropicModel(BaseModel):
}
# 如果设置了instant模式
elif self.reasoning_config.get('speed_mode') == 'instant':
payload['speed_mode'] = 'instant'
# 确保当使用speed_mode时不包含thinking参数
if 'thinking' in payload:
del payload['thinking']

View File

@@ -23,8 +23,22 @@ class DeepSeekModel(BaseModel):
# 通过模型名称来确定实际的API调用标识符
if self.model_name == "deepseek-chat":
return "deepseek-chat"
# deepseek-reasoner是默认的推理模型名称
return "deepseek-reasoner"
# 如果是deepseek-reasoner或包含reasoner的模型名称返回推理模型标识符
if "reasoner" in self.model_name.lower():
return "deepseek-reasoner"
# 对于deepseek-chat也返回对应的模型名称
if "chat" in self.model_name.lower() or self.model_name == "deepseek-chat":
return "deepseek-chat"
# 根据配置中的模型ID来确定实际的模型类型
if self.model_name == "deepseek-reasoner":
return "deepseek-reasoner"
elif self.model_name == "deepseek-chat":
return "deepseek-chat"
# 默认使用deepseek-chat作为API标识符
print(f"未知的DeepSeek模型名称: {self.model_name}使用deepseek-chat作为默认值")
return "deepseek-chat"
def analyze_text(self, text: str, proxies: dict = None) -> Generator[dict, None, None]:
"""Stream DeepSeek's response for text analysis"""
@@ -75,10 +89,10 @@ class DeepSeekModel(BaseModel):
}
# 只有非推理模型才设置temperature参数
if not self.model_name.endswith('reasoner') and self.temperature is not None:
if not self.get_model_identifier().endswith('reasoner') and self.temperature is not None:
params["temperature"] = self.temperature
print(f"调用DeepSeek API: {self.get_model_identifier()}, 是否设置温度: {not self.model_name.endswith('reasoner')}")
print(f"调用DeepSeek API: {self.get_model_identifier()}, 是否设置温度: {not self.get_model_identifier().endswith('reasoner')}, 温度值: {self.temperature if not self.get_model_identifier().endswith('reasoner') else 'N/A'}")
response = client.chat.completions.create(**params)
@@ -253,7 +267,7 @@ class DeepSeekModel(BaseModel):
}
# 只有非推理模型才设置temperature参数
if not self.model_name.endswith('reasoner') and self.temperature is not None:
if not self.get_model_identifier().endswith('reasoner') and self.temperature is not None:
params["temperature"] = self.temperature
response = client.chat.completions.create(**params)

View File

@@ -98,36 +98,26 @@ class ModelFactory:
@classmethod
def create_model(cls, model_name: str, api_key: str, temperature: float = 0.7, system_prompt: str = None, language: str = None) -> BaseModel:
"""
Create and return an instance of the specified model.
Create a model instance based on the model name.
Args:
model_name: The identifier of the model to create
api_key: The API key for the model
temperature: Optional temperature parameter for response generation
system_prompt: Optional custom system prompt
language: Optional language preference for responses
model_name: The identifier for the model
api_key: The API key for the model service
temperature: The temperature to use for generation
system_prompt: The system prompt to use
language: The preferred language for responses
Returns:
An instance of the specified model
Raises:
ValueError: If the model_name is not recognized
A model instance
"""
model_info = cls._models.get(model_name)
if not model_info:
if model_name not in cls._models:
raise ValueError(f"Unknown model: {model_name}")
model_info = cls._models[model_name]
model_class = model_info['class']
# 对于Mathpix模型不传递language参数
if model_name == 'mathpix':
return model_class(
api_key=api_key,
temperature=temperature,
system_prompt=system_prompt
)
else:
# 对于所有其他模型传递model_name参数
# 对于DeepSeek模型需要传递正确的模型名称
if 'deepseek' in model_name.lower():
return model_class(
api_key=api_key,
temperature=temperature,
@@ -135,6 +125,30 @@ class ModelFactory:
language=language,
model_name=model_name
)
# 对于阿里巴巴模型,也需要传递正确的模型名称
elif 'qwen' in model_name.lower() or 'qvq' in model_name.lower() or 'alibaba' in model_name.lower():
return model_class(
api_key=api_key,
temperature=temperature,
system_prompt=system_prompt,
language=language,
model_name=model_name
)
# 对于Mathpix模型不传递language参数
elif model_name == 'mathpix':
return model_class(
api_key=api_key,
temperature=temperature,
system_prompt=system_prompt
)
else:
# 其他模型仅传递标准参数
return model_class(
api_key=api_key,
temperature=temperature,
system_prompt=system_prompt,
language=language
)
@classmethod
def get_available_models(cls) -> list[Dict[str, Any]]: