添加自定义端点设置
This commit is contained in:
@@ -12,24 +12,31 @@ class AIService:
|
||||
- OpenAI (gpt-3.5-turbo, gpt-4等)
|
||||
- Anthropic Claude (claude-3-opus, claude-3-sonnet等)
|
||||
- Google Gemini (gemini-pro等)
|
||||
- 自定义端点(兼容OpenAI格式的API)
|
||||
- 其他LiteLLM支持的所有模型
|
||||
"""
|
||||
# 获取AI配置
|
||||
self.model = os.getenv('AI_MODEL', 'gpt-3.5-turbo')
|
||||
self.api_key = os.getenv('AI_API_KEY', os.getenv('OPENAI_API_KEY')) # 兼容旧配置
|
||||
self.api_base = os.getenv('AI_API_BASE') # 可选:自定义API端点
|
||||
self.custom_llm_provider = os.getenv('AI_CUSTOM_PROVIDER') # 可选:自定义端点的API格式(如 'openai')
|
||||
self.temperature = float(os.getenv('AI_TEMPERATURE', '0.7'))
|
||||
self.max_tokens = int(os.getenv('AI_MAX_TOKENS', '500'))
|
||||
|
||||
# 设置环境变量供LiteLLM使用
|
||||
if self.api_key:
|
||||
# 如果使用自定义端点,优先使用OPENAI_API_KEY(兼容OpenAI格式的端点)
|
||||
if self.api_base and self.custom_llm_provider == 'openai':
|
||||
os.environ['OPENAI_API_KEY'] = self.api_key
|
||||
# 根据模型类型设置相应的环境变量
|
||||
if self.model.startswith('gpt-'):
|
||||
elif self.model.startswith('gpt-'):
|
||||
os.environ['OPENAI_API_KEY'] = self.api_key
|
||||
elif self.model.startswith('claude-'):
|
||||
os.environ['ANTHROPIC_API_KEY'] = self.api_key
|
||||
elif self.model.startswith('gemini-'):
|
||||
os.environ['GEMINI_API_KEY'] = self.api_key
|
||||
# 如果没有设置自定义provider,使用默认的Gemini API Key
|
||||
if not self.custom_llm_provider:
|
||||
os.environ['GEMINI_API_KEY'] = self.api_key
|
||||
# LiteLLM会自动处理其他模型的API密钥
|
||||
|
||||
def polish_description(self, description):
|
||||
@@ -67,6 +74,10 @@ class AIService:
|
||||
if self.api_base:
|
||||
kwargs["api_base"] = self.api_base
|
||||
|
||||
# 如果设置了自定义provider(用于兼容OpenAI格式的自定义端点)
|
||||
if self.custom_llm_provider:
|
||||
kwargs["custom_llm_provider"] = self.custom_llm_provider
|
||||
|
||||
# 使用LiteLLM统一接口调用
|
||||
response = completion(**kwargs)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user