from langchain_openai import ChatOpenAI from loguru import logger from DrGraph.utils.Constant import Constant from LLM.llm_model_base import LLM_Model_Base class Chat_LLM(LLM_Model_Base): def __init__(self, model_name: str = "gpt-4o-mini", temperature: float = 0.7): super().__init__(model_name, temperature) self.name = '聊天模型' self.mode = Constant.LLM_MODE_CHAT self.llmModel = ChatOpenAI( model_name=self.model_name, temperature=self.temperature, ) # 返回消息格式,以便在chatbot中显示 def invoke(self, prompt: str): prompt_template_value = self.buildPromptTemplateValue( prompt=prompt, methodType=Constant.LLM_PROMPT_TEMPLATE_METHOD_INVOKE, valueType=Constant.LLM_PROMPT_VALUE_MESSAGES) try: response = self.llmModel.invoke(prompt_template_value) logger.info(f"{self.name} >>> 2. 助手回复: {type(response)}\n{response}") # response = {"role": "assistant", "content": response.content} except Exception as e: logger.error(e) return response