hyf-backend/th_agenter/core/new_agent.py

70 lines
2.7 KiB
Python
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""LLM工厂类用于创建和管理LLM实例"""
from typing import Optional
from langchain_openai import ChatOpenAI
from langgraph.prebuilt import create_react_agent
from loguru import logger
from requests import Session
from .config import get_settings
async def new_llm(session: Session = None, model: Optional[str] = None,
temperature: Optional[float] = None,
streaming: bool = False) -> ChatOpenAI:
"""创建LLM实例
Args:
model: 可选,指定使用的模型名称。如果不指定,将使用配置文件中的默认模型
temperature: 可选,模型温度参数
streaming: 是否启用流式响应默认False
Returns:
ChatOpenAI实例
"""
settings = get_settings()
llm_config = await settings.llm.get_current_config(session)
if model:
# 根据指定的模型获取对应配置
if model.startswith('deepseek'):
llm_config['model'] = settings.llm.deepseek_model
llm_config['api_key'] = settings.llm.deepseek_api_key
llm_config['base_url'] = settings.llm.deepseek_base_url
elif model.startswith('doubao'):
llm_config['model'] = settings.llm.doubao_model
llm_config['api_key'] = settings.llm.doubao_api_key
llm_config['base_url'] = settings.llm.doubao_base_url
elif model.startswith('glm'):
llm_config['model'] = settings.llm.zhipu_model
llm_config['api_key'] = settings.llm.zhipu_api_key
llm_config['base_url'] = settings.llm.zhipu_base_url
elif model.startswith('moonshot'):
llm_config['model'] = settings.llm.moonshot_model
llm_config['api_key'] = settings.llm.moonshot_api_key
llm_config['base_url'] = settings.llm.moonshot_base_url
llm = ChatOpenAI(
model=llm_config['model'],
api_key=llm_config['api_key'],
base_url=llm_config['base_url'],
temperature=temperature if temperature is not None else llm_config['temperature'],
max_tokens=llm_config['max_tokens'],
streaming=streaming
)
return llm
async def new_agent(session: Session = None, model: Optional[str] = None,
temperature: Optional[float] = None,
streaming: bool = False) -> ChatOpenAI:
"""创建LLM实例
Args:
model: 可选,指定使用的模型名称。如果不指定,将使用配置文件中的默认模型
temperature: 可选,模型温度参数
streaming: 是否启用流式响应默认False
Returns:
ChatOpenAI实例
"""
llm = await new_llm(session, model, temperature, streaming)
return create_react_agent(llm, [])