hxf/backend/th_agenter/llm/llm_model_ollama.py

28 lines
1.1 KiB
Python

from loguru import logger
from DrGraph.utils.Constant import Constant
from LLM.llm_model_base import LLM_Model_Base
from langchain_ollama import ChatOllama
class Chat_Ollama(LLM_Model_Base):
def __init__(self, base_url="http://127.0.0.1:11434", model_name: str = "OxW/Qwen3-0.6B-GGUF:latest", temperature: float = 0.7):
super().__init__(model_name, temperature)
self.name = '私有化Ollama模型'
self.base_url = base_url
self.llmModel = ChatOllama(
base_url = self.base_url,
model=model_name,
temperature=temperature
)
self.mode = Constant.LLM_MODE_LOCAL_OLLAMA
def invoke(self, prompt: str):
prompt_template_value = self.buildPromptTemplateValue(
prompt=prompt,
methodType=Constant.LLM_PROMPT_TEMPLATE_METHOD_INVOKE,
valueType=Constant.LLM_PROMPT_VALUE_MESSAGES)
try:
response = self.llmModel.invoke(prompt_template_value)
logger.info(f"{self.name} >>> 2. 助手回复: {type(response)}\n{response}")
except Exception as e:
logger.error(e)
return response