Compare commits

...

1 Commits
main ... jcq

Author SHA1 Message Date
karljiang118440 260fe77ad0 update 2025-12-17 19:26:36 +08:00
418 changed files with 28136 additions and 10462 deletions

View File

@ -1,90 +0,0 @@
# ========================================
# 大模型配置 (支持OpenAI协议的第三方服务)
# ========================================
# 可选择的提供商: openai, deepseek, doubao, zhipu, moonshot
LLM_PROVIDER=doubao
# Embedding模型配置
# 可选择的提供商: openai, deepseek, doubao, zhipu, moonshot
EMBEDDING_PROVIDER=zhipu
# OpenAI配置
OPENAI_API_KEY=your-openai-api-key
OPENAI_BASE_URL=https://api.openai.com/v1
OPENAI_MODEL=gpt-3.5-turbo
# DeepSeek配置
DEEPSEEK_API_KEY=your-deepseek-api-key
DEEPSEEK_BASE_URL=https://api.deepseek.com/v1
DEEPSEEK_MODEL=deepseek-chat
# 豆包(字节跳动)配置
DOUBAO_API_KEY=your-doubao-api-key
DOUBAO_BASE_URL=https://ark.cn-beijing.volces.com/api/v3
DOUBAO_MODEL=doubao-1-5-pro-32k-250115
# 智谱AI配置
ZHIPU_API_KEY=your-zhipu-api-key
ZHIPU_BASE_URL=https://open.bigmodel.cn/api/paas/v4
ZHIPU_MODEL=glm-4
ZHIPU_EMBEDDING_MODEL=embedding-3
# 月之暗面配置
MOONSHOT_API_KEY=your-moonshot-api-key
MOONSHOT_BASE_URL=https://api.moonshot.cn/v1
MOONSHOT_MODEL=moonshot-v1-8k
MOONSHOT_EMBEDDING_MODEL=moonshot-embedding
# Embedding模型配置
OPENAI_EMBEDDING_MODEL=text-embedding-ada-002
DEEPSEEK_EMBEDDING_MODEL=deepseek-embedding
DOUBAO_EMBEDDING_MODEL=doubao-embedding
# 工具API配置
## tavilySearch api
TAVILY_API_KEY=your-tavily-api-key
## 心知天气api
WEATHER_API_KEY=your_xinzhi_api_key
# ========================================
# 应用配置
# ========================================
# 后端应用配置
APP_NAME=TH-Agenter
APP_VERSION=0.1.0
DEBUG=true
ENVIRONMENT=development
HOST=0.0.0.0
PORT=8000
# 前端应用配置
VITE_API_BASE_URL=http://localhost:8000/api
VITE_APP_TITLE=TH-Agenter
VITE_APP_VERSION=1.0.0
VITE_ENABLE_MOCK=false
VITE_UPLOAD_MAX_SIZE=10485760
VITE_SUPPORTED_FILE_TYPES=pdf,txt,md,doc,docx,ppt,pptx,xls,xlsx
# ========================================
# 安全配置
# ========================================
SECRET_KEY=your-secret-key-here-change-in-production
ALGORITHM=HS256
ACCESS_TOKEN_EXPIRE_MINUTES=300
# ========================================
# 数据库配置
# ========================================
# 数据库URL配置
DATABASE_URL=sqlite:///./TH-Agenter.db
# DATABASE_URL=postgresql://iagent:iagent@118.196.30.45:5432/iagent
# ========================================
# 向量数据库配置
# ========================================
VECTOR_DB_TYPE=pgvector
PGVECTOR_HOST=118.196.30.45
PGVECTOR_PORT=5432
PGVECTOR_DATABASE=iagent
PGVECTOR_USER=iagent
PGVECTOR_PASSWORD=iagent

View File

@ -1,89 +0,0 @@
# ========================================
# 大模型配置 (支持OpenAI协议的第三方服务)
# ========================================
# 可选择的提供商: openai, deepseek, doubao, zhipu, moonshot
LLM_PROVIDER=doubao
# Embedding模型配置
# 可选择的提供商: openai, deepseek, doubao, zhipu, moonshot
EMBEDDING_PROVIDER=zhipu
# OpenAI配置
OPENAI_API_KEY=your-openai-api-key
OPENAI_BASE_URL=https://api.openai.com/v1
OPENAI_MODEL=gpt-3.5-turbo
# DeepSeek配置
DEEPSEEK_API_KEY=your-deepseek-api-key
DEEPSEEK_BASE_URL=https://api.deepseek.com/v1
DEEPSEEK_MODEL=deepseek-chat
# 豆包(字节跳动)配置
DOUBAO_API_KEY=your-doubao-api-key
DOUBAO_BASE_URL=https://ark.cn-beijing.volces.com/api/v3
DOUBAO_MODEL=doubao-1-5-pro-32k-250115
# 智谱AI配置
ZHIPU_API_KEY=your-zhipu-api-key
ZHIPU_BASE_URL=https://open.bigmodel.cn/api/paas/v4
ZHIPU_MODEL=glm-4
ZHIPU_EMBEDDING_MODEL=embedding-3
# 月之暗面配置
MOONSHOT_API_KEY=your-moonshot-api-key
MOONSHOT_BASE_URL=https://api.moonshot.cn/v1
MOONSHOT_MODEL=moonshot-v1-8k
MOONSHOT_EMBEDDING_MODEL=moonshot-embedding
# Embedding模型配置
OPENAI_EMBEDDING_MODEL=text-embedding-ada-002
DEEPSEEK_EMBEDDING_MODEL=deepseek-embedding
DOUBAO_EMBEDDING_MODEL=doubao-embedding
# 工具API配置
## tavilySearch api
TAVILY_API_KEY=your-tavily-api-key
## 心知天气api
WEATHER_API_KEY=your_xinzhi_api_key
# ========================================
# 应用配置
# ========================================
# 后端应用配置
APP_NAME=TH-Agenter
APP_VERSION=0.1.0
DEBUG=true
ENVIRONMENT=development
HOST=0.0.0.0
PORT=8000
# 前端应用配置
VITE_API_BASE_URL=http://localhost:8000/api
VITE_APP_TITLE=TH-Agenter
VITE_APP_VERSION=1.0.0
VITE_ENABLE_MOCK=false
VITE_UPLOAD_MAX_SIZE=10485760
VITE_SUPPORTED_FILE_TYPES=pdf,txt,md,doc,docx,ppt,pptx,xls,xlsx
# ========================================
# 安全配置
# ========================================
SECRET_KEY=your-secret-key-here-change-in-production
ALGORITHM=HS256
ACCESS_TOKEN_EXPIRE_MINUTES=300
# ========================================
# 数据库配置
# ========================================
DATABASE_URL=postgresql://iagent:iagent@118.196.30.45:5432/iagent
# ========================================
# 向量数据库配置
# ========================================
VECTOR_DB_TYPE=pgvector
PGVECTOR_HOST=localhost
PGVECTOR_PORT=5432
PGVECTOR_DATABASE=mydb
PGVECTOR_USER=myuser
PGVECTOR_PASSWORD=mypassword

179
backend/.gitignore vendored
View File

@ -1,179 +0,0 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control. See https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# VS Code
.vscode/*
!.vscode/extensions.json
# Logs
logs/
*.log
webIOs/output/logs/
# OS generated files
Thumbs.db
.DS_Store
.DS_Store?
._*
.Spotlight-V100
.Trashes
ehthumbs.db
# FastAPI specific
*.pyc
uvicorn*.log

Binary file not shown.

View File

@ -1,147 +0,0 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts.
# this is typically a path given in POSIX (e.g. forward slashes)
# format, relative to the token %(here)s which refers to the location of this
# ini file
script_location = %(here)s/alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory. for multiple paths, the path separator
# is defined by "path_separator" below.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the tzdata library which can be installed by adding
# `alembic[tz]` to the pip requirements.
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to <script_location>/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "path_separator"
# below.
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
# path_separator; This indicates what character is used to split lists of file
# paths, including version_locations and prepend_sys_path within configparser
# files such as alembic.ini.
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
# to provide os-dependent path splitting.
#
# Note that in order to support legacy alembic.ini files, this default does NOT
# take place if path_separator is not present in alembic.ini. If this
# option is omitted entirely, fallback logic is as follows:
#
# 1. Parsing of the version_locations option falls back to using the legacy
# "version_path_separator" key, which if absent then falls back to the legacy
# behavior of splitting on spaces and/or commas.
# 2. Parsing of the prepend_sys_path option falls back to the legacy
# behavior of splitting on spaces, commas, or colons.
#
# Valid values for path_separator are:
#
# path_separator = :
# path_separator = ;
# path_separator = space
# path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# database URL. This is consumed by the user-maintained env.py script only.
# other means of configuring database URLs may be customized within the env.py
# file.
# sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the module runner, against the "ruff" module
# hooks = ruff
# ruff.type = module
# ruff.module = ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Alternatively, use the exec runner to execute a binary found on your PATH
# hooks = ruff
# ruff.type = exec
# ruff.executable = ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration. This is also consumed by the user-maintained
# env.py script only.
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

View File

@ -1,4 +0,0 @@
Generic single-database configuration with an async dbapi.
alembic revision --autogenerate -m "init"
alembic upgrade head

View File

@ -1,101 +0,0 @@
import asyncio, os
from logging.config import fileConfig
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
from dotenv import load_dotenv
load_dotenv()
database_url = os.getenv("DATABASE_URL")
if not database_url:
raise ValueError("环境变量DATABASE_URL未设置")
config.set_main_option("sqlalchemy.url", database_url)
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from th_agenter.db import Base
# from th_agenter.models import User, Conversation, Message, KnowledgeBase, Document, AgentConfig, ExcelFile, Role, UserRole, LLMConfig, Workflow, WorkflowExecution, NodeExecution, DatabaseConfig, TableMetadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@ -1,28 +0,0 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@ -1,359 +0,0 @@
"""init
Revision ID: 1ea5548d641d
Revises:
Create Date: 2025-12-13 13:47:07.838600
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '1ea5548d641d'
down_revision: Union[str, Sequence[str], None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('agent_configs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('enabled_tools', sa.JSON(), nullable=False),
sa.Column('max_iterations', sa.Integer(), nullable=False),
sa.Column('temperature', sa.String(length=10), nullable=False),
sa.Column('system_message', sa.Text(), nullable=True),
sa.Column('verbose', sa.Boolean(), nullable=False),
sa.Column('model_name', sa.String(length=100), nullable=False),
sa.Column('max_tokens', sa.Integer(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('is_default', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_agent_configs'))
)
op.create_index(op.f('ix_agent_configs_id'), 'agent_configs', ['id'], unique=False)
op.create_index(op.f('ix_agent_configs_name'), 'agent_configs', ['name'], unique=False)
op.create_table('conversations',
sa.Column('title', sa.String(length=200), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('knowledge_base_id', sa.Integer(), nullable=True),
sa.Column('system_prompt', sa.Text(), nullable=True),
sa.Column('model_name', sa.String(length=100), nullable=False),
sa.Column('temperature', sa.String(length=10), nullable=False),
sa.Column('max_tokens', sa.Integer(), nullable=False),
sa.Column('is_archived', sa.Boolean(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_conversations'))
)
op.create_index(op.f('ix_conversations_id'), 'conversations', ['id'], unique=False)
op.create_table('database_configs',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('db_type', sa.String(length=20), nullable=False),
sa.Column('host', sa.String(length=255), nullable=False),
sa.Column('port', sa.Integer(), nullable=False),
sa.Column('database', sa.String(length=100), nullable=False),
sa.Column('username', sa.String(length=100), nullable=False),
sa.Column('password', sa.Text(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('is_default', sa.Boolean(), nullable=False),
sa.Column('connection_params', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_database_configs')),
sa.UniqueConstraint('db_type', name=op.f('uq_database_configs_db_type'))
)
op.create_index(op.f('ix_database_configs_id'), 'database_configs', ['id'], unique=False)
op.create_table('documents',
sa.Column('knowledge_base_id', sa.Integer(), nullable=False),
sa.Column('filename', sa.String(length=255), nullable=False),
sa.Column('original_filename', sa.String(length=255), nullable=False),
sa.Column('file_path', sa.String(length=500), nullable=False),
sa.Column('file_size', sa.Integer(), nullable=False),
sa.Column('file_type', sa.String(length=50), nullable=False),
sa.Column('mime_type', sa.String(length=100), nullable=True),
sa.Column('is_processed', sa.Boolean(), nullable=False),
sa.Column('processing_error', sa.Text(), nullable=True),
sa.Column('content', sa.Text(), nullable=True),
sa.Column('doc_metadata', sa.JSON(), nullable=True),
sa.Column('chunk_count', sa.Integer(), nullable=False),
sa.Column('embedding_model', sa.String(length=100), nullable=True),
sa.Column('vector_ids', sa.JSON(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_documents'))
)
op.create_index(op.f('ix_documents_id'), 'documents', ['id'], unique=False)
op.create_table('excel_files',
sa.Column('original_filename', sa.String(length=255), nullable=False),
sa.Column('file_path', sa.String(length=500), nullable=False),
sa.Column('file_size', sa.Integer(), nullable=False),
sa.Column('file_type', sa.String(length=50), nullable=False),
sa.Column('sheet_names', sa.JSON(), nullable=False),
sa.Column('default_sheet', sa.String(length=100), nullable=True),
sa.Column('columns_info', sa.JSON(), nullable=False),
sa.Column('preview_data', sa.JSON(), nullable=False),
sa.Column('data_types', sa.JSON(), nullable=True),
sa.Column('total_rows', sa.JSON(), nullable=True),
sa.Column('total_columns', sa.JSON(), nullable=True),
sa.Column('is_processed', sa.Boolean(), nullable=False),
sa.Column('processing_error', sa.Text(), nullable=True),
sa.Column('last_accessed', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_excel_files'))
)
op.create_index(op.f('ix_excel_files_id'), 'excel_files', ['id'], unique=False)
op.create_table('knowledge_bases',
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('embedding_model', sa.String(length=100), nullable=False),
sa.Column('chunk_size', sa.Integer(), nullable=False),
sa.Column('chunk_overlap', sa.Integer(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('vector_db_type', sa.String(length=50), nullable=False),
sa.Column('collection_name', sa.String(length=100), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_knowledge_bases'))
)
op.create_index(op.f('ix_knowledge_bases_id'), 'knowledge_bases', ['id'], unique=False)
op.create_index(op.f('ix_knowledge_bases_name'), 'knowledge_bases', ['name'], unique=False)
op.create_table('llm_configs',
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('provider', sa.String(length=50), nullable=False),
sa.Column('model_name', sa.String(length=100), nullable=False),
sa.Column('api_key', sa.String(length=500), nullable=False),
sa.Column('base_url', sa.String(length=200), nullable=True),
sa.Column('max_tokens', sa.Integer(), nullable=False),
sa.Column('temperature', sa.Float(), nullable=False),
sa.Column('top_p', sa.Float(), nullable=False),
sa.Column('frequency_penalty', sa.Float(), nullable=False),
sa.Column('presence_penalty', sa.Float(), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('is_default', sa.Boolean(), nullable=False),
sa.Column('is_embedding', sa.Boolean(), nullable=False),
sa.Column('extra_config', sa.JSON(), nullable=True),
sa.Column('usage_count', sa.Integer(), nullable=False),
sa.Column('last_used_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_llm_configs'))
)
op.create_index(op.f('ix_llm_configs_id'), 'llm_configs', ['id'], unique=False)
op.create_index(op.f('ix_llm_configs_name'), 'llm_configs', ['name'], unique=False)
op.create_index(op.f('ix_llm_configs_provider'), 'llm_configs', ['provider'], unique=False)
op.create_table('messages',
sa.Column('conversation_id', sa.Integer(), nullable=False),
sa.Column('role', sa.Enum('USER', 'ASSISTANT', 'SYSTEM', name='messagerole'), nullable=False),
sa.Column('content', sa.Text(), nullable=False),
sa.Column('message_type', sa.Enum('TEXT', 'IMAGE', 'FILE', 'AUDIO', name='messagetype'), nullable=False),
sa.Column('message_metadata', sa.JSON(), nullable=True),
sa.Column('context_documents', sa.JSON(), nullable=True),
sa.Column('prompt_tokens', sa.Integer(), nullable=True),
sa.Column('completion_tokens', sa.Integer(), nullable=True),
sa.Column('total_tokens', sa.Integer(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_messages'))
)
op.create_index(op.f('ix_messages_id'), 'messages', ['id'], unique=False)
op.create_table('roles',
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('code', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('is_system', sa.Boolean(), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_roles'))
)
op.create_index(op.f('ix_roles_code'), 'roles', ['code'], unique=True)
op.create_index(op.f('ix_roles_id'), 'roles', ['id'], unique=False)
op.create_index(op.f('ix_roles_name'), 'roles', ['name'], unique=True)
op.create_table('table_metadata',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('table_name', sa.String(length=100), nullable=False),
sa.Column('table_schema', sa.String(length=50), nullable=False),
sa.Column('table_type', sa.String(length=20), nullable=False),
sa.Column('table_comment', sa.Text(), nullable=True),
sa.Column('database_config_id', sa.Integer(), nullable=True),
sa.Column('columns_info', sa.JSON(), nullable=False),
sa.Column('primary_keys', sa.JSON(), nullable=True),
sa.Column('foreign_keys', sa.JSON(), nullable=True),
sa.Column('indexes', sa.JSON(), nullable=True),
sa.Column('sample_data', sa.JSON(), nullable=True),
sa.Column('row_count', sa.Integer(), nullable=False),
sa.Column('is_enabled_for_qa', sa.Boolean(), nullable=False),
sa.Column('qa_description', sa.Text(), nullable=True),
sa.Column('business_context', sa.Text(), nullable=True),
sa.Column('last_synced_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_table_metadata'))
)
op.create_index(op.f('ix_table_metadata_id'), 'table_metadata', ['id'], unique=False)
op.create_index(op.f('ix_table_metadata_table_name'), 'table_metadata', ['table_name'], unique=False)
op.create_table('users',
sa.Column('username', sa.String(length=50), nullable=False),
sa.Column('email', sa.String(length=100), nullable=False),
sa.Column('hashed_password', sa.String(length=255), nullable=False),
sa.Column('full_name', sa.String(length=100), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('avatar_url', sa.String(length=255), nullable=True),
sa.Column('bio', sa.Text(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id', name=op.f('pk_users'))
)
op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True)
op.create_index(op.f('ix_users_id'), 'users', ['id'], unique=False)
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
op.create_table('user_roles',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('role_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['role_id'], ['roles.id'], name=op.f('fk_user_roles_role_id_roles')),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name=op.f('fk_user_roles_user_id_users')),
sa.PrimaryKeyConstraint('user_id', 'role_id', name=op.f('pk_user_roles'))
)
op.create_table('workflows',
sa.Column('name', sa.String(length=100), nullable=False, comment='工作流名称'),
sa.Column('description', sa.Text(), nullable=True, comment='工作流描述'),
sa.Column('status', sa.Enum('DRAFT', 'PUBLISHED', 'ARCHIVED', name='workflowstatus'), nullable=False, comment='工作流状态'),
sa.Column('is_active', sa.Boolean(), nullable=False, comment='是否激活'),
sa.Column('definition', sa.JSON(), nullable=False, comment='工作流定义'),
sa.Column('version', sa.String(length=20), nullable=False, comment='版本号'),
sa.Column('owner_id', sa.Integer(), nullable=False, comment='所有者ID'),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['owner_id'], ['users.id'], name=op.f('fk_workflows_owner_id_users')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_workflows'))
)
op.create_index(op.f('ix_workflows_id'), 'workflows', ['id'], unique=False)
op.create_table('workflow_executions',
sa.Column('workflow_id', sa.Integer(), nullable=False, comment='工作流ID'),
sa.Column('status', sa.Enum('PENDING', 'RUNNING', 'COMPLETED', 'FAILED', 'CANCELLED', name='executionstatus'), nullable=False, comment='执行状态'),
sa.Column('input_data', sa.JSON(), nullable=True, comment='输入数据'),
sa.Column('output_data', sa.JSON(), nullable=True, comment='输出数据'),
sa.Column('started_at', sa.String(length=50), nullable=True, comment='开始时间'),
sa.Column('completed_at', sa.String(length=50), nullable=True, comment='完成时间'),
sa.Column('error_message', sa.Text(), nullable=True, comment='错误信息'),
sa.Column('executor_id', sa.Integer(), nullable=False, comment='执行者ID'),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['executor_id'], ['users.id'], name=op.f('fk_workflow_executions_executor_id_users')),
sa.ForeignKeyConstraint(['workflow_id'], ['workflows.id'], name=op.f('fk_workflow_executions_workflow_id_workflows')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_workflow_executions'))
)
op.create_index(op.f('ix_workflow_executions_id'), 'workflow_executions', ['id'], unique=False)
op.create_table('node_executions',
sa.Column('workflow_execution_id', sa.Integer(), nullable=False, comment='工作流执行ID'),
sa.Column('node_id', sa.String(length=50), nullable=False, comment='节点ID'),
sa.Column('node_type', sa.Enum('START', 'END', 'LLM', 'CONDITION', 'LOOP', 'CODE', 'HTTP', 'TOOL', name='nodetype'), nullable=False, comment='节点类型'),
sa.Column('node_name', sa.String(length=100), nullable=False, comment='节点名称'),
sa.Column('status', sa.Enum('PENDING', 'RUNNING', 'COMPLETED', 'FAILED', 'CANCELLED', name='executionstatus'), nullable=False, comment='执行状态'),
sa.Column('input_data', sa.JSON(), nullable=True, comment='输入数据'),
sa.Column('output_data', sa.JSON(), nullable=True, comment='输出数据'),
sa.Column('started_at', sa.String(length=50), nullable=True, comment='开始时间'),
sa.Column('completed_at', sa.String(length=50), nullable=True, comment='完成时间'),
sa.Column('duration_ms', sa.Integer(), nullable=True, comment='执行时长(毫秒)'),
sa.Column('error_message', sa.Text(), nullable=True, comment='错误信息'),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('created_by', sa.Integer(), nullable=True),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['workflow_execution_id'], ['workflow_executions.id'], name=op.f('fk_node_executions_workflow_execution_id_workflow_executions')),
sa.PrimaryKeyConstraint('id', name=op.f('pk_node_executions'))
)
op.create_index(op.f('ix_node_executions_id'), 'node_executions', ['id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_node_executions_id'), table_name='node_executions')
op.drop_table('node_executions')
op.drop_index(op.f('ix_workflow_executions_id'), table_name='workflow_executions')
op.drop_table('workflow_executions')
op.drop_index(op.f('ix_workflows_id'), table_name='workflows')
op.drop_table('workflows')
op.drop_table('user_roles')
op.drop_index(op.f('ix_users_username'), table_name='users')
op.drop_index(op.f('ix_users_id'), table_name='users')
op.drop_index(op.f('ix_users_email'), table_name='users')
op.drop_table('users')
op.drop_index(op.f('ix_table_metadata_table_name'), table_name='table_metadata')
op.drop_index(op.f('ix_table_metadata_id'), table_name='table_metadata')
op.drop_table('table_metadata')
op.drop_index(op.f('ix_roles_name'), table_name='roles')
op.drop_index(op.f('ix_roles_id'), table_name='roles')
op.drop_index(op.f('ix_roles_code'), table_name='roles')
op.drop_table('roles')
op.drop_index(op.f('ix_messages_id'), table_name='messages')
op.drop_table('messages')
op.drop_index(op.f('ix_llm_configs_provider'), table_name='llm_configs')
op.drop_index(op.f('ix_llm_configs_name'), table_name='llm_configs')
op.drop_index(op.f('ix_llm_configs_id'), table_name='llm_configs')
op.drop_table('llm_configs')
op.drop_index(op.f('ix_knowledge_bases_name'), table_name='knowledge_bases')
op.drop_index(op.f('ix_knowledge_bases_id'), table_name='knowledge_bases')
op.drop_table('knowledge_bases')
op.drop_index(op.f('ix_excel_files_id'), table_name='excel_files')
op.drop_table('excel_files')
op.drop_index(op.f('ix_documents_id'), table_name='documents')
op.drop_table('documents')
op.drop_index(op.f('ix_database_configs_id'), table_name='database_configs')
op.drop_table('database_configs')
op.drop_index(op.f('ix_conversations_id'), table_name='conversations')
op.drop_table('conversations')
op.drop_index(op.f('ix_agent_configs_name'), table_name='agent_configs')
op.drop_index(op.f('ix_agent_configs_id'), table_name='agent_configs')
op.drop_table('agent_configs')
# ### end Alembic commands ###

View File

@ -1,66 +0,0 @@
import re
import traceback
from loguru import logger
class DrGraphSession:
def __init__(self, stepIndex: int, msg: str, session_id: str):
logger.info(f"DrGraphSession.__init__: stepIndex={stepIndex}, msg={msg}, session_id={session_id}")
self.stepIndex = stepIndex
self.session_id = session_id
match = re.search(r";(-\d+)", msg);
level = -3
if match:
level = int(match.group(1))
value = value.replace(f";{level}", "")
level = -3 + level
if "警告" in value or value.startswith("WARNING"):
self.log_warning(f"{self.stepIndex} 步 - {value}", level = level)
elif "异常" in value or value.startswith("EXCEPTION"):
self.log_exception(f"{self.stepIndex} 步 - {value}", level = level)
elif "成功" in value or value.startswith("SUCCESS"):
self.log_success(f"{self.stepIndex} 步 - {value}", level = level)
elif "开始" in value or value.startswith("START"):
self.log_success(f"{self.stepIndex} 步 - {value}", level = level)
elif "失败" in value or value.startswith("ERROR"):
self.log_error(f"{self.stepIndex} 步 - {value}", level = level)
else:
self.log_info(f"{self.stepIndex} 步 - {value}", level = level)
def log_prefix(self) -> str:
"""Get log prefix with session ID and desc."""
return f"〖Session{self.session_id}"
def parse_source_pos(self, level: int):
pos = (traceback.format_stack())[level - 1].strip().split('\n')[0]
match = re.search(r"File \"(.+?)\", line (\d+), in (\w+)", pos);
if match:
file = match.group(1).replace("F:\\DrGraph_Python\\FastAPI\\", "")
pos = f"{file}:{match.group(2)} in {match.group(3)}"
return pos
def log_info(self, msg: str, level: int = -2):
"""Log info message with session ID."""
pos = self.parse_source_pos(level)
logger.info(f"{self.log_prefix()} {msg} >>> @ {pos}")
def log_success(self, msg: str, level: int = -2):
"""Log success message with session ID."""
pos = self.parse_source_pos(level)
logger.success(f"{self.log_prefix()} {msg} >>> @ {pos}")
def log_warning(self, msg: str, level: int = -2):
"""Log warning message with session ID."""
pos = self.parse_source_pos(level)
logger.warning(f"{self.log_prefix()} {msg} >>> @ {pos}")
def log_error(self, msg: str, level: int = -2):
"""Log error message with session ID."""
pos = self.parse_source_pos(level)
logger.error(f"{self.log_prefix()} {msg} >>> @ {pos}")
def log_exception(self, msg: str, level: int = -2):
"""Log exception message with session ID."""
pos = self.parse_source_pos(level)
logger.exception(f"{self.log_prefix()} {msg} >>> @ {pos}")

View File

@ -1,9 +0,0 @@
import jwt
import inspect
print(f"jwt module path: {inspect.getfile(jwt)}")
print(f"jwt module attributes: {dir(jwt)}")
try:
print(f"jwt module __version__: {jwt.__version__}")
except AttributeError:
print("jwt module has no __version__ attribute")

View File

@ -11,7 +11,7 @@ app:
file:
upload_dir: "./data/uploads"
max_size: 10485760 # 10MB
allowed_extensions: [".txt", ".pdf", ".docx", ".md"]
allowed_extensions: [".txt", ".pdf", ".docx", ".doc", ".md"]
chunk_size: 1000
chunk_overlap: 200
semantic_splitter_enabled: true # 启用语义分割器
@ -46,4 +46,11 @@ cors:
chat:
max_history_length: 10
system_prompt: "你是一个有用的AI助手请根据提供的上下文信息回答用户的问题。"
max_response_tokens: 1000
max_response_tokens: 1000
# Redis Configuration
redis:
host: "localhost"
port: 6379
db: 0
password: null

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,40 @@
2025.10.13-2025.10.17
1、项目支撑
1.1)边缘计算项目评估支撑 -- 提供算法确定提供算法模型100%
1.2)陕西交控算法训练 ,待提供素材;
1.3水利标书文件算法部分补充100%
1.4)溧宁高速算法优化,已确定问题,待排期处理。
1.5外购数据集评估100%
2、算法开发
2.1) yolov11算法 的环境搭建,测试,训练完成 下一步将其移植到AILIB2框架优化现有算法。100%
2.2)公路违停算法优化:标注车道线数据集并训练,已完成 下一步测试算法效果。80%
3、DSP优化
3.1)自测功能中,下周完成转测 70%。
[2025-11-14 11:28:16] 执行命令: train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01
[2025-11-14 11:28:16] 完整命令: cd /home/thsw2/jcq/projects/yolov11/ultralytics-main/ultralytics && echo '===== 训练任务信息 =====' > 003_CarRoadLane_N_1114-1100.log && echo '运行名称: 003_CarRoadLane_N_1114-1100' >> 003_CarRoadLane_N_1114-1100.log && echo '训练时间: $(date)' >> 003_CarRoadLane_N_1114-1100.log && echo 'YOLO版本: yolo11' >> 003_CarRoadLane_N_1114-1100.log && echo '' >> 003_CarRoadLane_N_1114-1100.log && echo 'project_path: /home/thsw2/jcq/projects/yolov11/ultralytics-main/ultralytics' >> 003_CarRoadLane_N_1114-1100.log && echo 'data_file: /home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml' >> 003_CarRoadLane_N_1114-1100.log && echo 'model_file: /home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt' >> 003_CarRoadLane_N_1114-1100.log && echo 'epochs: 220' >> 003_CarRoadLane_N_1114-1100.log && echo 'batch_size: 4' >> 003_CarRoadLane_N_1114-1100.log && echo 'img_size: 1280' >> 003_CarRoadLane_N_1114-1100.log && echo 'workers: 4' >> 003_CarRoadLane_N_1114-1100.log && echo 'device: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'project: 003_CarRoadLane' >> 003_CarRoadLane_N_1114-1100.log && echo 'name: 003_CarRoadLane_N_1114-1100' >> 003_CarRoadLane_N_1114-1100.log && echo 'exist_ok: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'resume: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'optimizer: SGD' >> 003_CarRoadLane_N_1114-1100.log && echo 'lr0: 0.01' >> 003_CarRoadLane_N_1114-1100.log && echo '' >> 003_CarRoadLane_N_1114-1100.log && echo '===== 开始训练 =====' >> 003_CarRoadLane_N_1114-1100.log && CUDA_VISIBLE_DEVICES=0 nohup /home/thsw2/miniconda3/envs/jcq_yolo11/bin/yolo train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01 >> 003_CarRoadLane_N_1114-1100.log 2>&1 & echo $!
[2025-11-14 11:28:34] 输出: 994531
yolo train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01

View File

@ -0,0 +1,40 @@
2025.10.13-2025.10.17
1、项目支撑
1.1)边缘计算项目评估支撑 -- 提供算法确定提供算法模型100%
1.2)陕西交控算法训练 ,待提供素材;
1.3水利标书文件算法部分补充100%
1.4)溧宁高速算法优化,已确定问题,待排期处理。
1.5外购数据集评估100%
2、算法开发
2.1) yolov11算法 的环境搭建,测试,训练完成 下一步将其移植到AILIB2框架优化现有算法。100%
2.2)公路违停算法优化:标注车道线数据集并训练,已完成 下一步测试算法效果。80%
3、DSP优化
3.1)自测功能中,下周完成转测 70%。
[2025-11-14 11:28:16] 执行命令: train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01
[2025-11-14 11:28:16] 完整命令: cd /home/thsw2/jcq/projects/yolov11/ultralytics-main/ultralytics && echo '===== 训练任务信息 =====' > 003_CarRoadLane_N_1114-1100.log && echo '运行名称: 003_CarRoadLane_N_1114-1100' >> 003_CarRoadLane_N_1114-1100.log && echo '训练时间: $(date)' >> 003_CarRoadLane_N_1114-1100.log && echo 'YOLO版本: yolo11' >> 003_CarRoadLane_N_1114-1100.log && echo '' >> 003_CarRoadLane_N_1114-1100.log && echo 'project_path: /home/thsw2/jcq/projects/yolov11/ultralytics-main/ultralytics' >> 003_CarRoadLane_N_1114-1100.log && echo 'data_file: /home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml' >> 003_CarRoadLane_N_1114-1100.log && echo 'model_file: /home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt' >> 003_CarRoadLane_N_1114-1100.log && echo 'epochs: 220' >> 003_CarRoadLane_N_1114-1100.log && echo 'batch_size: 4' >> 003_CarRoadLane_N_1114-1100.log && echo 'img_size: 1280' >> 003_CarRoadLane_N_1114-1100.log && echo 'workers: 4' >> 003_CarRoadLane_N_1114-1100.log && echo 'device: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'project: 003_CarRoadLane' >> 003_CarRoadLane_N_1114-1100.log && echo 'name: 003_CarRoadLane_N_1114-1100' >> 003_CarRoadLane_N_1114-1100.log && echo 'exist_ok: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'resume: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'optimizer: SGD' >> 003_CarRoadLane_N_1114-1100.log && echo 'lr0: 0.01' >> 003_CarRoadLane_N_1114-1100.log && echo '' >> 003_CarRoadLane_N_1114-1100.log && echo '===== 开始训练 =====' >> 003_CarRoadLane_N_1114-1100.log && CUDA_VISIBLE_DEVICES=0 nohup /home/thsw2/miniconda3/envs/jcq_yolo11/bin/yolo train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01 >> 003_CarRoadLane_N_1114-1100.log 2>&1 & echo $!
[2025-11-14 11:28:34] 输出: 994531
yolo train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01

View File

@ -0,0 +1,40 @@
2025.10.13-2025.10.17
1、项目支撑
1.1)边缘计算项目评估支撑 -- 提供算法确定提供算法模型100%
1.2)陕西交控算法训练 ,待提供素材;
1.3水利标书文件算法部分补充100%
1.4)溧宁高速算法优化,已确定问题,待排期处理。
1.5外购数据集评估100%
2、算法开发
2.1) yolov11算法 的环境搭建,测试,训练完成 下一步将其移植到AILIB2框架优化现有算法。100%
2.2)公路违停算法优化:标注车道线数据集并训练,已完成 下一步测试算法效果。80%
3、DSP优化
3.1)自测功能中,下周完成转测 70%。
[2025-11-14 11:28:16] 执行命令: train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01
[2025-11-14 11:28:16] 完整命令: cd /home/thsw2/jcq/projects/yolov11/ultralytics-main/ultralytics && echo '===== 训练任务信息 =====' > 003_CarRoadLane_N_1114-1100.log && echo '运行名称: 003_CarRoadLane_N_1114-1100' >> 003_CarRoadLane_N_1114-1100.log && echo '训练时间: $(date)' >> 003_CarRoadLane_N_1114-1100.log && echo 'YOLO版本: yolo11' >> 003_CarRoadLane_N_1114-1100.log && echo '' >> 003_CarRoadLane_N_1114-1100.log && echo 'project_path: /home/thsw2/jcq/projects/yolov11/ultralytics-main/ultralytics' >> 003_CarRoadLane_N_1114-1100.log && echo 'data_file: /home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml' >> 003_CarRoadLane_N_1114-1100.log && echo 'model_file: /home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt' >> 003_CarRoadLane_N_1114-1100.log && echo 'epochs: 220' >> 003_CarRoadLane_N_1114-1100.log && echo 'batch_size: 4' >> 003_CarRoadLane_N_1114-1100.log && echo 'img_size: 1280' >> 003_CarRoadLane_N_1114-1100.log && echo 'workers: 4' >> 003_CarRoadLane_N_1114-1100.log && echo 'device: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'project: 003_CarRoadLane' >> 003_CarRoadLane_N_1114-1100.log && echo 'name: 003_CarRoadLane_N_1114-1100' >> 003_CarRoadLane_N_1114-1100.log && echo 'exist_ok: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'resume: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'optimizer: SGD' >> 003_CarRoadLane_N_1114-1100.log && echo 'lr0: 0.01' >> 003_CarRoadLane_N_1114-1100.log && echo '' >> 003_CarRoadLane_N_1114-1100.log && echo '===== 开始训练 =====' >> 003_CarRoadLane_N_1114-1100.log && CUDA_VISIBLE_DEVICES=0 nohup /home/thsw2/miniconda3/envs/jcq_yolo11/bin/yolo train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01 >> 003_CarRoadLane_N_1114-1100.log 2>&1 & echo $!
[2025-11-14 11:28:34] 输出: 994531
yolo train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01

View File

@ -0,0 +1,40 @@
2025.10.13-2025.10.17
1、项目支撑
1.1)边缘计算项目评估支撑 -- 提供算法确定提供算法模型100%
1.2)陕西交控算法训练 ,待提供素材;
1.3水利标书文件算法部分补充100%
1.4)溧宁高速算法优化,已确定问题,待排期处理。
1.5外购数据集评估100%
2、算法开发
2.1) yolov11算法 的环境搭建,测试,训练完成 下一步将其移植到AILIB2框架优化现有算法。100%
2.2)公路违停算法优化:标注车道线数据集并训练,已完成 下一步测试算法效果。80%
3、DSP优化
3.1)自测功能中,下周完成转测 70%。
[2025-11-14 11:28:16] 执行命令: train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01
[2025-11-14 11:28:16] 完整命令: cd /home/thsw2/jcq/projects/yolov11/ultralytics-main/ultralytics && echo '===== 训练任务信息 =====' > 003_CarRoadLane_N_1114-1100.log && echo '运行名称: 003_CarRoadLane_N_1114-1100' >> 003_CarRoadLane_N_1114-1100.log && echo '训练时间: $(date)' >> 003_CarRoadLane_N_1114-1100.log && echo 'YOLO版本: yolo11' >> 003_CarRoadLane_N_1114-1100.log && echo '' >> 003_CarRoadLane_N_1114-1100.log && echo 'project_path: /home/thsw2/jcq/projects/yolov11/ultralytics-main/ultralytics' >> 003_CarRoadLane_N_1114-1100.log && echo 'data_file: /home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml' >> 003_CarRoadLane_N_1114-1100.log && echo 'model_file: /home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt' >> 003_CarRoadLane_N_1114-1100.log && echo 'epochs: 220' >> 003_CarRoadLane_N_1114-1100.log && echo 'batch_size: 4' >> 003_CarRoadLane_N_1114-1100.log && echo 'img_size: 1280' >> 003_CarRoadLane_N_1114-1100.log && echo 'workers: 4' >> 003_CarRoadLane_N_1114-1100.log && echo 'device: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'project: 003_CarRoadLane' >> 003_CarRoadLane_N_1114-1100.log && echo 'name: 003_CarRoadLane_N_1114-1100' >> 003_CarRoadLane_N_1114-1100.log && echo 'exist_ok: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'resume: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'optimizer: SGD' >> 003_CarRoadLane_N_1114-1100.log && echo 'lr0: 0.01' >> 003_CarRoadLane_N_1114-1100.log && echo '' >> 003_CarRoadLane_N_1114-1100.log && echo '===== 开始训练 =====' >> 003_CarRoadLane_N_1114-1100.log && CUDA_VISIBLE_DEVICES=0 nohup /home/thsw2/miniconda3/envs/jcq_yolo11/bin/yolo train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01 >> 003_CarRoadLane_N_1114-1100.log 2>&1 & echo $!
[2025-11-14 11:28:34] 输出: 994531
yolo train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01

View File

@ -0,0 +1,40 @@
2025.10.13-2025.10.17
1、项目支撑
1.1)边缘计算项目评估支撑 -- 提供算法确定提供算法模型100%
1.2)陕西交控算法训练 ,待提供素材;
1.3水利标书文件算法部分补充100%
1.4)溧宁高速算法优化,已确定问题,待排期处理。
1.5外购数据集评估100%
2、算法开发
2.1) yolov11算法 的环境搭建,测试,训练完成 下一步将其移植到AILIB2框架优化现有算法。100%
2.2)公路违停算法优化:标注车道线数据集并训练,已完成 下一步测试算法效果。80%
3、DSP优化
3.1)自测功能中,下周完成转测 70%。
[2025-11-14 11:28:16] 执行命令: train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01
[2025-11-14 11:28:16] 完整命令: cd /home/thsw2/jcq/projects/yolov11/ultralytics-main/ultralytics && echo '===== 训练任务信息 =====' > 003_CarRoadLane_N_1114-1100.log && echo '运行名称: 003_CarRoadLane_N_1114-1100' >> 003_CarRoadLane_N_1114-1100.log && echo '训练时间: $(date)' >> 003_CarRoadLane_N_1114-1100.log && echo 'YOLO版本: yolo11' >> 003_CarRoadLane_N_1114-1100.log && echo '' >> 003_CarRoadLane_N_1114-1100.log && echo 'project_path: /home/thsw2/jcq/projects/yolov11/ultralytics-main/ultralytics' >> 003_CarRoadLane_N_1114-1100.log && echo 'data_file: /home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml' >> 003_CarRoadLane_N_1114-1100.log && echo 'model_file: /home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt' >> 003_CarRoadLane_N_1114-1100.log && echo 'epochs: 220' >> 003_CarRoadLane_N_1114-1100.log && echo 'batch_size: 4' >> 003_CarRoadLane_N_1114-1100.log && echo 'img_size: 1280' >> 003_CarRoadLane_N_1114-1100.log && echo 'workers: 4' >> 003_CarRoadLane_N_1114-1100.log && echo 'device: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'project: 003_CarRoadLane' >> 003_CarRoadLane_N_1114-1100.log && echo 'name: 003_CarRoadLane_N_1114-1100' >> 003_CarRoadLane_N_1114-1100.log && echo 'exist_ok: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'resume: False' >> 003_CarRoadLane_N_1114-1100.log && echo 'optimizer: SGD' >> 003_CarRoadLane_N_1114-1100.log && echo 'lr0: 0.01' >> 003_CarRoadLane_N_1114-1100.log && echo '' >> 003_CarRoadLane_N_1114-1100.log && echo '===== 开始训练 =====' >> 003_CarRoadLane_N_1114-1100.log && CUDA_VISIBLE_DEVICES=0 nohup /home/thsw2/miniconda3/envs/jcq_yolo11/bin/yolo train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01 >> 003_CarRoadLane_N_1114-1100.log 2>&1 & echo $!
[2025-11-14 11:28:34] 输出: 994531
yolo train data=/home/thsw2/jcq/projects/yolov11/tuohengTraintask/Datasets/jcq/003_CarRoadLane.yaml model=/home/thsw2/jcq/projects/yolov11/weights/yolo11n.pt epochs=220 batch=4 imgsz=1280 workers=4 project=003_CarRoadLane name=003_CarRoadLane_N_1114-1100 optimizer=SGD lr0=0.01

View File

@ -1,17 +0,0 @@
services:
db:
image: pgvector/pgvector:pg16
container_name: pgvector-db
environment:
POSTGRES_USER: drgraph
POSTGRES_PASSWORD: yingping
POSTGRES_DB: th_agenter
ports:
- "5432:5432"
volumes:
- pgdata:/var/lib/postgresql/data
restart: unless-stopped
volumes:
pgdata:
# docker exec -it pgvector-db psql -U drgraph -d th_agenter

View File

@ -1,157 +0,0 @@
# uvicorn main:app --host 0.0.0.0 --port 8000 --reload
from fastapi import FastAPI
from os.path import dirname, realpath
from dotenv import load_dotenv
load_dotenv()
from utils.util_log import init_logger
from loguru import logger
base_dir: str = dirname(realpath(__file__))
init_logger(base_dir)
from th_agenter.api.routes import router
from contextlib import asynccontextmanager
from starlette.exceptions import HTTPException as StarletteHTTPException
from fastapi.exceptions import RequestValidationError
from fastapi.responses import JSONResponse
from fastapi.staticfiles import StaticFiles
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Application lifespan manager."""
logger.info("[生命周期] - Starting up TH Agenter application...")
yield
# Shutdown
logger.info("[生命周期] - Shutting down TH Agenter application...")
def setup_exception_handlers(app: FastAPI) -> None:
"""Setup global exception handlers."""
# Import custom exceptions and handlers
from utils.util_exceptions import ChatAgentException, chat_agent_exception_handler
@app.exception_handler(ChatAgentException)
async def custom_chat_agent_exception_handler(request, exc):
return await chat_agent_exception_handler(request, exc)
@app.exception_handler(StarletteHTTPException)
async def http_exception_handler(request, exc):
from utils.util_exceptions import HxfErrorResponse
logger.exception(f"HTTP Exception: {exc.status_code} - {exc.detail} - {request.method} {request.url}")
return HxfErrorResponse(exc.status_code, exc.detail)
def make_json_serializable(obj):
"""递归地将对象转换为JSON可序列化的格式"""
if obj is None or isinstance(obj, (str, int, float, bool)):
return obj
elif isinstance(obj, bytes):
return obj.decode('utf-8')
elif isinstance(obj, (ValueError, Exception)):
return str(obj)
elif isinstance(obj, dict):
return {k: make_json_serializable(v) for k, v in obj.items()}
elif isinstance(obj, (list, tuple)):
return [make_json_serializable(item) for item in obj]
else:
# For any other object, convert to string
return str(obj)
@app.exception_handler(RequestValidationError)
async def validation_exception_handler(request, exc):
# Convert any non-serializable objects to strings in error details
try:
errors = make_json_serializable(exc.errors())
except Exception as e:
# Fallback: if even our conversion fails, use a simple error message
errors = [{"type": "serialization_error", "msg": f"Error processing validation details: {str(e)}"}]
logger.exception(f"Request Validation Error: {errors}")
logger.exception(f"validation_error: {errors}")
return JSONResponse(
status_code=422,
content={
"error": {
"type": "validation_error",
"message": "Request validation failed",
"details": errors
}
}
)
@app.exception_handler(Exception)
async def general_exception_handler(request, exc):
logger.error(f"Unhandled exception: {exc}", exc_info=True)
return JSONResponse(
status_code=500,
content={
"error": {
"type": "internal_error",
"message": "Internal server error"
}
}
)
def create_app() -> FastAPI:
"""Create and configure FastAPI application."""
from th_agenter.core.config import get_settings
settings = get_settings()
# Create FastAPI app
app = FastAPI(
title=settings.app_name,
version=settings.app_version,
description="基于Vue的第一个聊天智能体应用使用FastAPI后端由TH Agenter修改",
debug=settings.debug,
lifespan=lifespan,
)
app.mount("/static", StaticFiles(directory="static"), name="th_agenter_static")
# Add middleware
from th_agenter.core.app import setup_middleware
setup_middleware(app, settings)
# # Add exception handlers
setup_exception_handlers(app)
add_router(app)
return app
def add_router(app: FastAPI) -> None:
"""Add default routers to the FastAPI application."""
@app.get("/")
def read_root():
logger.info("Hello World")
return {"Hello": "World"}
# Include routers
app.include_router(router, prefix="/api")
# app.include_router(table_metadata.router)
# # 在现有导入中添加
# from ..api.endpoints import database_config
# # 在路由注册部分添加
# app.include_router(database_config.router)
# # Health check endpoint
# @app.get("/health")
# async def health_check():
# return {"status": "healthy", "version": settings.app_version}
# # Root endpoint
# @app.get("/")
# async def root():
# return {"message": "Chat Agent API is running"}
# # Test endpoint
# @app.get("/test")
# async def test_endpoint():
# return {"message": "API is working"}
app = create_app()
# from utils.util_test import test_db
# test_db()

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.9 KiB

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1 +1 @@
"""API module for TH Agenter."""
"""API module for TH-Agenter."""

View File

@ -1 +1 @@
"""API endpoints for TH Agenter."""
"""API endpoints for TH-Agenter."""

Some files were not shown because too many files have changed in this diff Show More