修改devops部署流程

This commit is contained in:
孙小云 2026-01-10 15:15:45 +08:00
parent 7f9f792023
commit 18b049d6c0
14 changed files with 1238 additions and 821 deletions

View File

@ -3,195 +3,9 @@
# 全局分支配置(所有仓库统一使用此分支)
global_branch: main
# Git 仓库配置
repositories:
# 认证服务
- name: ruoyi-auth
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-auth.git
branch: main
path: ruoyi-auth
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/auth/jar
docker_service: ruoyi-auth
# 网关服务
- name: ruoyi-gateway
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-gateway.git
branch: main
path: ruoyi-gateway
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/gateway/jar
docker_service: ruoyi-gateway
# 前端UI
- name: ruoyi-ui
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-ui.git
branch: main
path: ruoyi-ui
type: nodejs
deploy_script: deploy-ui.sh
build_commands:
- npm install
- npm run build:prod
artifact_path: dist
docker_path: docker/nginx/html/dist
docker_service: ruoyi-nginx
# 系统服务
- name: ruoyi-system
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-system.git
branch: main
path: ruoyi-modules/ruoyi-system
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/system/jar
docker_service: ruoyi-modules-system
# 文件服务
- name: ruoyi-file
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-file.git
branch: main
path: ruoyi-modules/ruoyi-file
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/file/jar
docker_service: ruoyi-modules-file
# 代码生成
- name: ruoyi-gen
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-gen.git
branch: main
path: ruoyi-modules/ruoyi-gen
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/gen/jar
docker_service: ruoyi-modules-gen
# 定时任务
- name: ruoyi-job
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-job.git
branch: main
path: ruoyi-modules/ruoyi-job
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/job/jar
docker_service: ruoyi-modules-job
# 监控服务
- name: ruoyi-monitor
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-visual.git
branch: main
path: ruoyi-visual/ruoyi-monitor
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/visual/monitor/jar
docker_service: ruoyi-visual-monitor
# 设备服务
- name: tuoheng-device
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-device.git
branch: main
path: ruoyi-modules/tuoheng-device
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/device/jar
docker_service: tuoheng-modules-device
# 审批服务
- name: tuoheng-approval
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-approval.git
branch: main
path: ruoyi-modules/tuoheng-approval
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/approval/jar
docker_service: tuoheng-modules-approval
# 航线服务
- name: tuoheng-airline
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-airline.git
branch: main
path: ruoyi-modules/tuoheng-airline
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/airline/jar
docker_service: tuoheng-modules-airline
# 任务服务
- name: tuoheng-task
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-task.git
branch: main
path: ruoyi-modules/tuoheng-task
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/task/jar
docker_service: tuoheng-modules-task
# FMS服务
- name: tuoheng-fms
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-fms.git
branch: main
path: ruoyi-modules/tuoheng-fms
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/fms/jar
docker_service: tuoheng-modules-fms
# 媒体服务
- name: tuoheng-media
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-media.git
branch: main
path: ruoyi-modules/tuoheng-media
type: java
deploy_script: deploy-java.sh
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/media/jar
docker_service: tuoheng-modules-media
# 主仓库配置
main_repository:
url: http://th.local.t-aaron.com:13000/THENG/a-cloud-all.git
branch: main
runtime_path: ./runtime
# 监听配置
@ -205,10 +19,8 @@ deploy:
# 日志配置
logging:
level: DEBUG # DEBUG, INFO, WARNING, ERROR - 改为 DEBUG 可以看到更详细的日志
file: .devops/logs/devops.log
max_size: 10485760 # 10MB
backup_count: 5
# 基础设施服务配置(只部署一次)
infrastructure:
@ -217,15 +29,169 @@ infrastructure:
pre_deploy_commands:
- cp sql/ry_20250523.sql docker/mysql/db/
- cp sql/ry_config_20250902.sql docker/mysql/db/
deployed_flag: .devops/.deployed_mysql
wait_time: 30 # MySQL 需要更长时间初始化
- name: ruoyi-redis
docker_service: ruoyi-redis
deployed_flag: .devops/.deployed_redis
wait_time: 10 # Redis 启动较快
- name: ruoyi-nacos
docker_service: ruoyi-nacos
deployed_flag: .devops/.deployed_nacos
wait_time: 20 # Nacos 需要等待 MySQL 就绪
# Git 仓库配置
repositories:
# 认证服务
- name: ruoyi-auth
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-auth.git
path: ruoyi-auth
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/auth/jar
docker_service: ruoyi-auth
# 网关服务
- name: ruoyi-gateway
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-gateway.git
path: ruoyi-gateway
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/gateway/jar
docker_service: ruoyi-gateway
# 前端UI
- name: ruoyi-ui
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-ui.git
path: ruoyi-ui
type: nodejs
build_commands:
- npm install
- npm run build:prod
artifact_path: dist
docker_path: docker/nginx/html/dist
docker_service: ruoyi-nginx
# 系统服务
- name: ruoyi-system
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-system.git
path: ruoyi-modules/ruoyi-system
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/system/jar
docker_service: ruoyi-modules-system
# 文件服务
- name: ruoyi-file
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-file.git
path: ruoyi-modules/ruoyi-file
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/file/jar
docker_service: ruoyi-modules-file
# 代码生成
- name: ruoyi-gen
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-gen.git
path: ruoyi-modules/ruoyi-gen
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/gen/jar
docker_service: ruoyi-modules-gen
# 定时任务
- name: ruoyi-job
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-job.git
path: ruoyi-modules/ruoyi-job
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/job/jar
docker_service: ruoyi-modules-job
# 监控服务
- name: ruoyi-monitor
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-visual.git
path: ruoyi-visual/ruoyi-monitor
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/visual/monitor/jar
docker_service: ruoyi-visual-monitor
# 设备服务
- name: tuoheng-device
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-device.git
path: ruoyi-modules/tuoheng-device
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/device/jar
docker_service: tuoheng-modules-device
# 审批服务
- name: tuoheng-approval
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-approval.git
path: ruoyi-modules/tuoheng-approval
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/approval/jar
docker_service: tuoheng-modules-approval
# 航线服务
- name: tuoheng-airline
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-airline.git
path: ruoyi-modules/tuoheng-airline
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/airline/jar
docker_service: tuoheng-modules-airline
# 任务服务
- name: tuoheng-task
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-task.git
path: ruoyi-modules/tuoheng-task
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/task/jar
docker_service: tuoheng-modules-task
# FMS服务
- name: tuoheng-fms
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-fms.git
path: ruoyi-modules/tuoheng-fms
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/fms/jar
docker_service: tuoheng-modules-fms
# 媒体服务
- name: tuoheng-media
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-media.git
path: ruoyi-modules/tuoheng-media
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/media/jar
docker_service: tuoheng-modules-media

200
.devops/config.yaml.bak Normal file
View File

@ -0,0 +1,200 @@
# DevOps 自动化部署配置文件
# 全局分支配置(所有仓库统一使用此分支)
global_branch: main
# Git 仓库配置
repositories:
# 认证服务
- name: ruoyi-auth
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-auth.git
path: ruoyi-auth
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/auth/jar
docker_service: ruoyi-auth
# 网关服务
- name: ruoyi-gateway
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-gateway.git
path: ruoyi-gateway
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/gateway/jar
docker_service: ruoyi-gateway
# 前端UI
- name: ruoyi-ui
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-ui.git
path: ruoyi-ui
type: nodejs
build_commands:
- npm install
- npm run build:prod
artifact_path: dist
docker_path: docker/nginx/html/dist
docker_service: ruoyi-nginx
# 系统服务
- name: ruoyi-system
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-system.git
path: ruoyi-modules/ruoyi-system
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/system/jar
docker_service: ruoyi-modules-system
# 文件服务
- name: ruoyi-file
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-file.git
path: ruoyi-modules/ruoyi-file
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/file/jar
docker_service: ruoyi-modules-file
# 代码生成
- name: ruoyi-gen
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-gen.git
path: ruoyi-modules/ruoyi-gen
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/gen/jar
docker_service: ruoyi-modules-gen
# 定时任务
- name: ruoyi-job
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-job.git
path: ruoyi-modules/ruoyi-job
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/job/jar
docker_service: ruoyi-modules-job
# 监控服务
- name: ruoyi-monitor
url: http://th.local.t-aaron.com:13000/THENG/a-ruoyi-visual.git
path: ruoyi-visual/ruoyi-monitor
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/visual/monitor/jar
docker_service: ruoyi-visual-monitor
# 设备服务
- name: tuoheng-device
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-device.git
path: ruoyi-modules/tuoheng-device
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/device/jar
docker_service: tuoheng-modules-device
# 审批服务
- name: tuoheng-approval
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-approval.git
path: ruoyi-modules/tuoheng-approval
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/approval/jar
docker_service: tuoheng-modules-approval
# 航线服务
- name: tuoheng-airline
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-airline.git
path: ruoyi-modules/tuoheng-airline
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/airline/jar
docker_service: tuoheng-modules-airline
# 任务服务
- name: tuoheng-task
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-task.git
path: ruoyi-modules/tuoheng-task
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/task/jar
docker_service: tuoheng-modules-task
# FMS服务
- name: tuoheng-fms
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-fms.git
path: ruoyi-modules/tuoheng-fms
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/fms/jar
docker_service: tuoheng-modules-fms
# 媒体服务
- name: tuoheng-media
url: http://th.local.t-aaron.com:13000/THENG/a-tuoheng-media.git
path: ruoyi-modules/tuoheng-media
type: java
build_commands:
- mvn clean package -DskipTests
artifact_path: target/*.jar
docker_path: docker/ruoyi/modules/media/jar
docker_service: tuoheng-modules-media
# 主仓库配置
main_repository:
url: http://th.local.t-aaron.com:13000/THENG/a-cloud-all.git
runtime_path: ./runtime
# 监听配置
monitor:
poll_interval: 10 # 轮询间隔(秒)
enabled_repos: [] # 空数组表示监听所有仓库,或指定具体仓库名称列表
# 部署配置
deploy:
docker_compose_path: ./docker/docker-compose.yml
# 日志配置
logging:
file: .devops/logs/devops.log
max_size: 10485760 # 10MB
# 基础设施服务配置(只部署一次)
infrastructure:
- name: ruoyi-mysql
docker_service: ruoyi-mysql
pre_deploy_commands:
- cp sql/ry_20250523.sql docker/mysql/db/
- cp sql/ry_config_20250902.sql docker/mysql/db/
deployed_flag: .devops/.deployed_mysql
wait_time: 30 # MySQL 需要更长时间初始化
- name: ruoyi-redis
docker_service: ruoyi-redis
deployed_flag: .devops/.deployed_redis
wait_time: 10 # Redis 启动较快
- name: ruoyi-nacos
docker_service: ruoyi-nacos
deployed_flag: .devops/.deployed_nacos
wait_time: 20 # Nacos 需要等待 MySQL 就绪

View File

@ -1,393 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
部署执行器
负责执行具体的部署任务
"""
import os
import sys
import logging
import subprocess
import shutil
import glob
from pathlib import Path
class Deployer:
"""部署执行器"""
def __init__(self, config):
"""初始化部署器"""
self.config = config
self.logger = logging.getLogger('Deployer')
# 获取项目根目录(.devops 的父目录)
project_root = Path(__file__).parent.parent.resolve()
# 将 runtime_path 转换为绝对路径
runtime_path = config['main_repository']['runtime_path']
if not Path(runtime_path).is_absolute():
self.runtime_path = project_root / runtime_path
else:
self.runtime_path = Path(runtime_path)
self.main_repo_url = config['main_repository']['url']
# 使用全局分支配置
self.global_branch = config.get('global_branch', 'main')
self.main_repo_branch = self.global_branch
self.logger.info(f"项目根目录: {project_root}")
self.logger.info(f"Runtime 目录: {self.runtime_path}")
self.logger.info(f"全局分支: {self.global_branch}")
def run_command(self, cmd, cwd=None, timeout=600):
"""执行命令"""
cwd_str = str(cwd) if cwd else "当前目录"
self.logger.info(f"执行目录: {cwd_str}")
self.logger.info(f"执行命令: {cmd}")
try:
result = subprocess.run(
cmd,
shell=True,
cwd=cwd,
capture_output=True,
text=True,
timeout=timeout
)
# 始终输出标准输出(如果有)
if result.stdout:
# 限制输出长度,避免日志过大
stdout_lines = result.stdout.strip().split('\n')
if len(stdout_lines) > 50:
self.logger.info(f"标准输出 (前30行):\n" + '\n'.join(stdout_lines[:30]))
self.logger.info(f"... (省略 {len(stdout_lines) - 50} 行)")
self.logger.info(f"标准输出 (后20行):\n" + '\n'.join(stdout_lines[-20:]))
else:
self.logger.info(f"标准输出:\n{result.stdout.strip()}")
if result.returncode != 0:
self.logger.error(f"命令执行失败 (退出码: {result.returncode})")
if result.stderr:
# 限制错误输出长度
stderr_lines = result.stderr.strip().split('\n')
if len(stderr_lines) > 50:
self.logger.error(f"错误输出 (前30行):\n" + '\n'.join(stderr_lines[:30]))
self.logger.error(f"... (省略 {len(stderr_lines) - 50} 行)")
self.logger.error(f"错误输出 (后20行):\n" + '\n'.join(stderr_lines[-20:]))
else:
self.logger.error(f"错误输出:\n{result.stderr.strip()}")
return False
self.logger.info("命令执行成功")
return True
except subprocess.TimeoutExpired:
self.logger.error(f"命令执行超时 (超时时间: {timeout}秒)")
return False
except Exception as e:
self.logger.error(f"命令执行异常: {e}")
return False
def ensure_main_repo(self):
"""确保主仓库存在并是最新的"""
# 克隆到 runtime/a-cloud-all 目录
repo_path = self.runtime_path / 'a-cloud-all'
# 检查是否是有效的 Git 仓库
if not (repo_path / '.git').exists():
self.logger.info("主仓库不存在,开始克隆...")
# 确保 runtime 目录存在
self.runtime_path.mkdir(parents=True, exist_ok=True)
# 克隆到 runtime/a-cloud-all 目录
cmd = f"git clone --recurse-submodules {self.main_repo_url} a-cloud-all"
if not self.run_command(cmd, cwd=self.runtime_path):
self.logger.error("克隆主仓库失败")
return False
self.logger.info("主仓库克隆成功")
else:
self.logger.info("主仓库已存在,更新代码...")
# 切换到配置的主分支
self.logger.info(f"切换到主分支: {self.main_repo_branch}")
if not self.run_command(f"git checkout {self.main_repo_branch}", cwd=repo_path):
return False
# 拉取主仓库最新代码
self.logger.info("拉取主仓库最新代码...")
if not self.run_command("git pull", cwd=repo_path):
return False
# 更新所有子模块到全局配置的分支
if not self.update_all_submodules(repo_path):
return False
self.logger.info("主仓库更新成功")
return True
def update_all_submodules(self, repo_path):
"""更新所有子模块到全局配置的分支"""
self.logger.info(f"更新所有子模块到分支: {self.global_branch}")
# 使用 git submodule foreach 批量更新所有子模块到全局分支
cmd = f"git submodule foreach 'git checkout {self.global_branch} && git pull'"
if not self.run_command(cmd, cwd=repo_path, timeout=600):
self.logger.warning("批量更新子模块失败")
return False
return True
def update_submodule(self, repo_config):
"""更新指定的子模块"""
repo_path = self.runtime_path / 'a-cloud-all'
submodule_path = repo_path / repo_config['path']
self.logger.info(f"更新子模块: {repo_config['name']}")
# 进入子模块目录
if not submodule_path.exists():
self.logger.error(f"子模块目录不存在: {submodule_path}")
return False
# 切换到全局配置的分支
if not self.run_command(f"git checkout {self.global_branch}", cwd=submodule_path):
return False
# 拉取最新代码
if not self.run_command(f"git pull origin {self.global_branch}", cwd=submodule_path):
return False
self.logger.info(f"子模块更新成功: {repo_config['name']}")
return True
def build_project(self, repo_config):
"""构建项目"""
repo_path = self.runtime_path / 'a-cloud-all'
self.logger.info(f"开始构建: {repo_config['name']}")
# 根据项目类型选择执行目录
if repo_config['type'] == 'nodejs':
# Node.js 项目在子模块目录执行
build_dir = repo_path / repo_config['path']
self.logger.info(f"Node.js 项目,在子模块目录执行构建")
else:
# Java 项目在主仓库根目录执行
build_dir = repo_path
self.logger.info(f"Java 项目,在主仓库根目录执行构建")
# 执行构建命令
for cmd in repo_config['build_commands']:
self.logger.info(f"执行构建命令: {cmd}")
if not self.run_command(cmd, cwd=build_dir, timeout=1800):
self.logger.error(f"构建失败: {cmd}")
return False
self.logger.info(f"构建成功: {repo_config['name']}")
return True
def copy_artifacts(self, repo_config):
"""复制构建产物到 docker 目录"""
repo_path = self.runtime_path / 'a-cloud-all'
submodule_path = repo_path / repo_config['path']
self.logger.info(f"复制构建产物: {repo_config['name']}")
# 获取构建产物路径
artifact_pattern = submodule_path / repo_config['artifact_path']
artifacts = glob.glob(str(artifact_pattern))
if not artifacts:
self.logger.error(f"未找到构建产物: {artifact_pattern}")
return False
# 目标目录
docker_path = repo_path / repo_config['docker_path']
docker_path.mkdir(parents=True, exist_ok=True)
# 复制文件
for artifact in artifacts:
artifact_path = Path(artifact)
if artifact_path.is_file():
dest = docker_path / artifact_path.name
shutil.copy2(artifact, dest)
self.logger.info(f"复制文件: {artifact_path.name}")
elif artifact_path.is_dir():
# 如果是目录(如 dist清空目标目录后复制
if docker_path.exists():
for item in docker_path.iterdir():
if item.name != '.gitkeep':
if item.is_dir():
shutil.rmtree(item)
else:
item.unlink()
shutil.copytree(artifact, docker_path, dirs_exist_ok=True)
self.logger.info(f"复制目录: {artifact_path.name}")
self.logger.info("构建产物复制完成")
return True
def run_deploy_script(self, repo_config):
"""执行部署脚本"""
repo_path = self.runtime_path / 'a-cloud-all'
script_name = repo_config['deploy_script']
script_path = repo_path / '.devops' / 'scripts' / script_name
if not script_path.exists():
self.logger.error(f"部署脚本不存在: {script_path}")
return False
self.logger.info(f"执行部署脚本: {script_name}")
# 准备脚本参数
docker_service = repo_config.get('docker_service', '')
docker_compose_path = self.config['deploy']['docker_compose_path']
# 执行脚本
cmd = f"bash {script_path} {repo_config['name']} {docker_service} {docker_compose_path}"
if not self.run_command(cmd, cwd=repo_path, timeout=600):
self.logger.error("部署脚本执行失败")
return False
self.logger.info("部署脚本执行成功")
return True
def commit_submodule_update(self, repo_config):
"""提交子模块更新到主仓库"""
if not self.config['deploy'].get('auto_commit', False):
self.logger.info("自动提交已禁用,跳过")
return True
repo_path = self.runtime_path / 'a-cloud-all'
self.logger.info("提交子模块更新到主仓库")
# 添加子模块更改
submodule_path = repo_config['path']
if not self.run_command(f"git add {submodule_path}", cwd=repo_path):
return False
# 检查是否有更改
result = subprocess.run(
"git diff --cached --quiet",
shell=True,
cwd=repo_path
)
if result.returncode == 0:
self.logger.info("没有需要提交的更改")
return True
# 提交更改
commit_msg = self.config['deploy']['commit_message'].format(
repo_name=repo_config['name']
)
if not self.run_command(f'git commit -m "{commit_msg}"', cwd=repo_path):
return False
# 推送前先拉取远程最新代码
self.logger.info("推送前先拉取远程最新代码...")
if not self.run_command(f"git pull --rebase origin {self.main_repo_branch}", cwd=repo_path):
self.logger.warning("拉取远程代码失败,尝试直接推送")
# 推送到远程
if not self.run_command(f"git push origin {self.main_repo_branch}", cwd=repo_path):
self.logger.warning("推送失败,但部署已完成")
return True
self.logger.info("子模块更新已提交并推送")
return True
def deploy(self, repo_config):
"""执行完整的部署流程"""
self.logger.info(f"=" * 60)
self.logger.info(f"开始部署: {repo_config['name']}")
self.logger.info(f"=" * 60)
try:
# 1. 确保主仓库存在
if not self.ensure_main_repo():
return False
# 2. 部署基础设施(首次部署)
if not self.deploy_infrastructure():
return False
# 3. 更新子模块
if not self.update_submodule(repo_config):
return False
# 3. 构建项目
if not self.build_project(repo_config):
return False
# 4. 复制构建产物
if not self.copy_artifacts(repo_config):
return False
# 5. 执行部署脚本
if not self.run_deploy_script(repo_config):
return False
self.logger.info(f"部署完成: {repo_config['name']}")
return True
except Exception as e:
self.logger.error(f"部署过程中发生异常: {e}", exc_info=True)
return False
def deploy_infrastructure(self):
"""部署基础设施服务(只部署一次)"""
if 'infrastructure' not in self.config:
return True
repo_path = self.runtime_path / 'a-cloud-all'
for infra in self.config['infrastructure']:
name = infra['name']
deployed_flag = repo_path / infra['deployed_flag']
# 检查是否已部署
if deployed_flag.exists():
self.logger.info(f"基础设施 {name} 已部署,跳过")
continue
self.logger.info(f"部署基础设施: {name}")
# 执行预部署命令
if 'pre_deploy_commands' in infra:
for cmd in infra['pre_deploy_commands']:
if not self.run_command(cmd, cwd=repo_path):
self.logger.error(f"预部署命令失败: {cmd}")
return False
# 部署服务
docker_service = infra['docker_service']
docker_dir = repo_path / 'docker'
cmd = f"docker-compose build --no-cache {docker_service} && docker-compose up -d {docker_service}"
if not self.run_command(cmd, cwd=docker_dir, timeout=1800):
self.logger.error(f"部署失败: {name}")
return False
# 等待服务启动(特别是 MySQL 和 Redis
wait_time = infra.get('wait_time', 10)
self.logger.info(f"等待 {name} 启动完成 ({wait_time} 秒)...")
import time
time.sleep(wait_time)
# 创建部署标记
deployed_flag.parent.mkdir(parents=True, exist_ok=True)
deployed_flag.touch()
self.logger.info(f"基础设施部署完成: {name}")
return True

View File

@ -9,7 +9,6 @@ import os
import sys
import time
import yaml
import logging
import subprocess
from datetime import datetime
from pathlib import Path
@ -17,7 +16,10 @@ from pathlib import Path
# 添加当前目录到 Python 路径
sys.path.insert(0, os.path.dirname(os.path.abspath(__file__)))
from deployer import Deployer
# 导入自定义模块
from scripts.log import Logger
from scripts import docker, maven, npm
from scripts.init import mysql, redis, nacos
class GitMonitor:
@ -26,55 +28,66 @@ class GitMonitor:
def __init__(self, config_path='.devops/config.yaml'):
"""初始化监听器"""
self.config_path = config_path
self.config = self._load_config()
self._setup_logging()
self.deployer = Deployer(self.config)
self.last_commits = {} # 存储每个仓库的最后一次提交 hash
self.config = None
self.last_commits = {}
self.global_branch = 'main'
self.project_root = None
self.runtime_path = None
# 读取全局分支配置
self.global_branch = self.config.get('global_branch', 'main')
# 初始化
self._print_startup_banner()
self._load_config()
self._init_paths()
self.logger.info("Git 监听器初始化完成")
self.logger.info(f"监听分支: {self.global_branch}")
def _print_startup_banner(self):
"""打印启动横幅"""
print("\n")
Logger.separator()
print(" RuoYi Cloud DevOps 自动化部署系统")
Logger.separator()
print(f"启动时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
Logger.separator()
print("\n")
def _load_config(self):
"""加载配置文件"""
with open(self.config_path, 'r', encoding='utf-8') as f:
return yaml.safe_load(f)
Logger.info(f"[步骤 1/3] 读取配置文件: {self.config_path}")
try:
with open(self.config_path, 'r', encoding='utf-8') as f:
self.config = yaml.safe_load(f)
def _setup_logging(self):
"""设置日志"""
log_config = self.config.get('logging', {})
log_level = getattr(logging, log_config.get('level', 'INFO'))
log_file = log_config.get('file', '.devops/logs/devops.log')
self.global_branch = self.config.get('global_branch', 'main')
# 确保日志目录存在
os.makedirs(os.path.dirname(log_file), exist_ok=True)
# 初始化日志配置
log_config = self.config.get('logging', {})
log_file = log_config.get('file', '.devops/logs/devops.log')
max_size = log_config.get('max_size', 10485760)
Logger.init(log_file=log_file, max_size=max_size)
# 配置日志格式
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
Logger.info(f"✓ 配置加载成功 - 全局分支: {self.global_branch}")
Logger.info(f"✓ 日志配置 - 文件: {log_file}, 最大大小: {max_size} 字节")
except Exception as e:
Logger.error(f"配置加载失败: {e}")
sys.exit(1)
# 文件处理器
file_handler = logging.FileHandler(log_file, encoding='utf-8')
file_handler.setFormatter(formatter)
file_handler.setLevel(log_level)
def _init_paths(self):
"""初始化路径"""
Logger.info("[步骤 2/3] 初始化路径")
try:
self.project_root = Path(__file__).parent.parent.resolve()
runtime_path = self.config['main_repository']['runtime_path']
# 控制台处理器
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
console_handler.setLevel(log_level)
if not Path(runtime_path).is_absolute():
self.runtime_path = self.project_root / runtime_path
else:
self.runtime_path = Path(runtime_path)
# 配置根 logger让所有子 logger 都能输出
root_logger = logging.getLogger()
root_logger.setLevel(log_level)
root_logger.addHandler(file_handler)
root_logger.addHandler(console_handler)
# 配置当前 logger
self.logger = logging.getLogger('GitMonitor')
self.logger.setLevel(log_level)
Logger.info(f"✓ 路径初始化成功")
Logger.info(f" 项目根目录: {self.project_root}")
Logger.info(f" Runtime 目录: {self.runtime_path}")
except Exception as e:
Logger.error(f"路径初始化失败: {e}")
sys.exit(1)
def get_remote_commit(self, repo_url, branch):
"""获取远程仓库的最新提交 hash"""
@ -84,11 +97,10 @@ class GitMonitor:
cmd, shell=True, capture_output=True, text=True, timeout=30
)
if result.returncode == 0 and result.stdout:
commit_hash = result.stdout.split()[0]
return commit_hash
return result.stdout.split()[0]
return None
except Exception as e:
self.logger.error(f"获取远程提交失败 {repo_url}: {e}")
Logger.error(f"获取远程提交失败 {repo_url}: {e}")
return None
def check_repository(self, repo_config):
@ -96,76 +108,198 @@ class GitMonitor:
repo_name = repo_config['name']
repo_url = repo_config['url']
self.logger.debug(f"检查仓库: {repo_name} (分支: {self.global_branch})")
# 获取最新提交(使用全局分支配置)
current_commit = self.get_remote_commit(repo_url, self.global_branch)
if not current_commit:
self.logger.warning(f"无法获取 {repo_name} 的最新提交")
return False
# 检查是否有新提交
last_commit = self.last_commits.get(repo_name)
if last_commit is None:
# 首次检查,记录当前提交
self.last_commits[repo_name] = current_commit
self.logger.info(f"初始化 {repo_name} 提交记录: {current_commit[:8]}")
Logger.info(f"初始化 {repo_name} 提交记录: {current_commit[:8]}")
return False
if current_commit != last_commit:
self.logger.info(
f"检测到 {repo_name} 有新提交: {last_commit[:8]} -> {current_commit[:8]}"
)
Logger.info(f"检测到 {repo_name} 新提交: {last_commit[:8]} -> {current_commit[:8]}")
self.last_commits[repo_name] = current_commit
return True
return False
def get_enabled_repos(self):
"""获取需要监听的仓库列表"""
enabled = self.config['monitor'].get('enabled_repos', [])
all_repos = self.config['repositories']
def update_main_repo(self):
"""更新主仓库和所有子模块"""
repo_path = self.runtime_path / 'a-cloud-all'
main_repo_url = self.config['main_repository']['url']
if not enabled:
# 空列表表示监听所有仓库
return all_repos
Logger.separator()
Logger.info("更新主仓库和子模块")
Logger.separator()
# 只返回启用的仓库
return [repo for repo in all_repos if repo['name'] in enabled]
# 检查主仓库是否存在
if not (repo_path / '.git').exists():
Logger.info("主仓库不存在,开始克隆...")
self.runtime_path.mkdir(parents=True, exist_ok=True)
cmd = f"git clone --recurse-submodules {main_repo_url} a-cloud-all"
result = subprocess.run(cmd, shell=True, cwd=self.runtime_path, capture_output=True, text=True)
if result.returncode != 0:
Logger.error("克隆主仓库失败")
return False
Logger.info("主仓库克隆成功")
else:
Logger.info("主仓库已存在,更新代码...")
# 切换到主分支
cmd = f"git checkout {self.global_branch}"
subprocess.run(cmd, shell=True, cwd=repo_path, capture_output=True)
# 拉取最新代码
cmd = "git pull"
result = subprocess.run(cmd, shell=True, cwd=repo_path, capture_output=True, text=True)
if result.returncode != 0:
Logger.error("拉取主仓库失败")
return False
# 更新所有子模块
cmd = f"git submodule foreach 'git checkout {self.global_branch} && git pull'"
result = subprocess.run(cmd, shell=True, cwd=repo_path, capture_output=True, text=True)
if result.returncode != 0:
Logger.error("更新子模块失败")
return False
Logger.info("主仓库和子模块更新成功")
return True
def init_infrastructure(self):
"""初始化基础设施MySQL、Redis、Nacos"""
repo_path = self.runtime_path / 'a-cloud-all'
# 检查是否已初始化
mysql_flag = repo_path / '.devops' / '.deployed_mysql'
redis_flag = repo_path / '.devops' / '.deployed_redis'
nacos_flag = repo_path / '.devops' / '.deployed_nacos'
# 初始化 MySQL
if not mysql_flag.exists():
Logger.info("初始化 MySQL...")
# 从配置文件中获取 MySQL 的预部署命令
infra_config = self.config.get('infrastructure', [])
mysql_config = next((item for item in infra_config if item['name'] == 'ruoyi-mysql'), None)
pre_deploy_commands = mysql_config.get('pre_deploy_commands', []) if mysql_config else []
if mysql.init_mysql(repo_path, pre_deploy_commands):
mysql_flag.parent.mkdir(parents=True, exist_ok=True)
mysql_flag.touch()
Logger.info("等待 MySQL 启动30秒...")
time.sleep(30)
else:
return False
# 初始化 Redis
if not redis_flag.exists():
Logger.info("初始化 Redis...")
if redis.init_redis(repo_path):
redis_flag.touch()
Logger.info("等待 Redis 启动10秒...")
time.sleep(10)
else:
return False
# 初始化 Nacos
if not nacos_flag.exists():
Logger.info("初始化 Nacos...")
if nacos.init_nacos(repo_path):
nacos_flag.touch()
Logger.info("等待 Nacos 启动20秒...")
time.sleep(20)
else:
return False
return True
def deploy(self, repo_config):
"""执行部署流程"""
repo_path = self.runtime_path / 'a-cloud-all'
Logger.separator()
Logger.info(f"开始部署: {repo_config['name']}")
Logger.separator()
try:
# 1. 更新主仓库和子模块
if not self.update_main_repo():
return False
# 2. 初始化基础设施
if not self.init_infrastructure():
return False
# 3. 根据项目类型执行打包
if repo_config['type'] == 'java':
# Maven 打包
work_dir = repo_path
commands = ' && '.join(repo_config['build_commands'])
source_path = repo_config['path'] + '/' + repo_config['artifact_path']
target_dir = repo_path / repo_config['docker_path']
if not maven.run_maven(work_dir, commands, source_path, target_dir):
return False
elif repo_config['type'] == 'nodejs':
# NPM 打包
work_dir = repo_path / repo_config['path']
commands = ' && '.join(repo_config['build_commands'])
source_dir = repo_config['artifact_path']
target_dir = repo_path / repo_config['docker_path']
if not npm.run_npm(work_dir, commands, source_dir, target_dir):
return False
# 4. Docker 部署
compose_dir = repo_path / 'docker'
service_name = repo_config['docker_service']
if not docker.run_docker_compose(compose_dir, service_name):
return False
Logger.info(f"部署完成: {repo_config['name']}")
return True
except Exception as e:
Logger.error(f"部署异常: {e}")
return False
def run_once(self):
"""执行一次检查"""
repos = self.get_enabled_repos()
self.logger.info(f"开始检查 {len(repos)} 个仓库...")
Logger.info("[步骤 3/3] 开始监听分支变化")
repos = self.config.get('repositories', [])
for repo_config in repos:
try:
if self.check_repository(repo_config):
# 检测到新提交,触发部署
self.logger.info(f"触发部署: {repo_config['name']}")
success = self.deployer.deploy(repo_config)
if success:
self.logger.info(f"部署成功: {repo_config['name']}")
Logger.info(f"触发部署: {repo_config['name']}")
if self.deploy(repo_config):
Logger.info(f"✓ 部署成功: {repo_config['name']}")
else:
self.logger.error(f"部署失败: {repo_config['name']}")
Logger.error(f"✗ 部署失败: {repo_config['name']}")
except Exception as e:
self.logger.error(f"处理仓库 {repo_config['name']} 时出错: {e}", exc_info=True)
Logger.error(f"处理仓库异常 {repo_config['name']}: {e}")
def run(self):
"""持续监听运行"""
poll_interval = self.config['monitor']['poll_interval']
self.logger.info(f"开始监听 Git 仓库,轮询间隔: {poll_interval}")
self.logger.info("按 Ctrl+C 停止监听")
Logger.info(f"开始持续监听,轮询间隔: {poll_interval}")
Logger.info("按 Ctrl+C 停止监听\n")
try:
while True:
self.run_once()
time.sleep(poll_interval)
except KeyboardInterrupt:
self.logger.info("收到停止信号,退出监听")
Logger.info("\n收到停止信号,退出监听")
except Exception as e:
self.logger.error(f"监听过程中发生错误: {e}", exc_info=True)
Logger.error(f"监听异常: {e}")
def main():
@ -173,16 +307,8 @@ def main():
import argparse
parser = argparse.ArgumentParser(description='Git 仓库监听器')
parser.add_argument(
'--config',
default='.devops/config.yaml',
help='配置文件路径'
)
parser.add_argument(
'--once',
action='store_true',
help='只执行一次检查,不持续监听'
)
parser.add_argument('--config', default='.devops/config.yaml', help='配置文件路径')
parser.add_argument('--once', action='store_true', help='只执行一次检查')
args = parser.parse_args()
@ -196,3 +322,4 @@ def main():
if __name__ == '__main__':
main()

View File

@ -1,61 +0,0 @@
#!/bin/bash
# 通用函数库
# 提供部署脚本使用的通用函数
# 颜色输出
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color
# 日志函数
log_info() {
echo -e "${GREEN}[INFO]${NC} $1"
}
log_warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# 检查命令是否存在
check_command() {
if ! command -v $1 &> /dev/null; then
log_error "命令不存在: $1"
return 1
fi
return 0
}
# 检查 Docker 服务是否运行
check_docker() {
if ! docker info &> /dev/null; then
log_error "Docker 未运行"
return 1
fi
return 0
}
# 等待服务健康检查
wait_for_healthy() {
local service=$1
local max_wait=${2:-60}
local count=0
log_info "等待服务健康检查: $service"
while [ $count -lt $max_wait ]; do
if docker-compose ps $service | grep -q "Up (healthy)"; then
log_info "服务已就绪: $service"
return 0
fi
sleep 2
count=$((count + 2))
done
log_warn "服务健康检查超时: $service"
return 1
}

View File

@ -1,44 +0,0 @@
#!/bin/bash
# Java 服务部署脚本
# 参数: $1=服务名称, $2=docker服务名, $3=docker-compose路径
# 注意jar 文件已由 deployer.py 复制到 docker 目录
set -e # 遇到错误立即退出
SERVICE_NAME=$1
DOCKER_SERVICE=$2
DOCKER_COMPOSE_PATH=$3
echo "=========================================="
echo "部署 Java 服务: $SERVICE_NAME"
echo "Docker 服务: $DOCKER_SERVICE"
echo "=========================================="
# 检查参数
if [ -z "$SERVICE_NAME" ] || [ -z "$DOCKER_SERVICE" ]; then
echo "错误: 缺少必要参数"
echo "用法: $0 <服务名称> <docker服务名> <docker-compose路径>"
exit 1
fi
# 获取脚本所在目录
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
echo "项目根目录: $PROJECT_ROOT"
# 进入 docker 目录
cd "$PROJECT_ROOT/docker"
echo "重新构建 Docker 镜像并启动服务..."
docker-compose build --no-cache "$DOCKER_SERVICE" && docker-compose up -d "$DOCKER_SERVICE"
echo "等待服务启动..."
sleep 5
echo "检查服务状态..."
docker-compose ps "$DOCKER_SERVICE"
echo "=========================================="
echo "部署完成: $SERVICE_NAME"
echo "=========================================="

View File

@ -1,44 +0,0 @@
#!/bin/bash
# UI 前端部署脚本
# 参数: $1=服务名称, $2=docker服务名, $3=docker-compose路径
# 注意dist 目录已由 deployer.py 复制到 docker/nginx/html/dist
set -e # 遇到错误立即退出
SERVICE_NAME=$1
DOCKER_SERVICE=$2
DOCKER_COMPOSE_PATH=$3
echo "=========================================="
echo "部署前端服务: $SERVICE_NAME"
echo "Docker 服务: $DOCKER_SERVICE"
echo "=========================================="
# 检查参数
if [ -z "$SERVICE_NAME" ] || [ -z "$DOCKER_SERVICE" ]; then
echo "错误: 缺少必要参数"
echo "用法: $0 <服务名称> <docker服务名> <docker-compose路径>"
exit 1
fi
# 获取脚本所在目录
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
echo "项目根目录: $PROJECT_ROOT"
# 进入 docker 目录
cd "$PROJECT_ROOT/docker"
echo "重新构建 Docker 镜像并启动服务..."
docker-compose build --no-cache "$DOCKER_SERVICE" && docker-compose up -d "$DOCKER_SERVICE"
echo "等待服务启动..."
sleep 3
echo "检查服务状态..."
docker-compose ps "$DOCKER_SERVICE"
echo "=========================================="
echo "部署完成: $SERVICE_NAME"
echo "=========================================="

85
.devops/scripts/docker.py Normal file
View File

@ -0,0 +1,85 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Docker Compose 部署模块
"""
import os
import subprocess
from pathlib import Path
from .log import Logger
def run_docker_compose(compose_dir, service_name):
"""
执行 docker-compose 命令
参数
compose_dir: docker-compose.yml 所在目录
service_name: 要构建和启动的服务名称
返回
bool: 成功返回 True失败返回 False
"""
try:
# 转换为绝对路径
compose_dir = Path(compose_dir).resolve()
Logger.separator()
Logger.info("开始 Docker 部署")
Logger.separator()
Logger.info(f"执行目录: {compose_dir}")
Logger.info(f"服务名称: {service_name}")
# 检查目录是否存在
if not compose_dir.exists():
Logger.error(f"目录不存在: {compose_dir}")
return False
# 检查 docker-compose.yml 是否存在
compose_file = compose_dir / "docker-compose.yml"
if not compose_file.exists():
Logger.error(f"docker-compose.yml 不存在: {compose_file}")
return False
# 构建镜像
Logger.info(f"执行命令: docker-compose build --no-cache {service_name}")
result = subprocess.run(
f"docker-compose build --no-cache {service_name}",
shell=True,
cwd=compose_dir,
capture_output=True,
text=True
)
if result.returncode != 0:
Logger.error(f"镜像构建失败: {service_name}")
if result.stderr:
Logger.error(f"错误信息: {result.stderr}")
return False
Logger.info(f"镜像构建成功: {service_name}")
# 启动服务
Logger.info(f"执行命令: docker-compose up -d {service_name}")
result = subprocess.run(
f"docker-compose up -d {service_name}",
shell=True,
cwd=compose_dir,
capture_output=True,
text=True
)
if result.returncode != 0:
Logger.error(f"服务启动失败: {service_name}")
if result.stderr:
Logger.error(f"错误信息: {result.stderr}")
return False
Logger.info(f"服务启动成功: {service_name}")
Logger.info(f"Docker 部署完成: {service_name}")
return True
except Exception as e:
Logger.error(f"Docker 部署异常: {e}")
return False

View File

@ -0,0 +1,128 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
MySQL 初始化模块
"""
import os
import subprocess
import shutil
from pathlib import Path
import sys
# 添加父目录到路径
sys.path.insert(0, str(Path(__file__).parent.parent))
from log import Logger
def init_mysql(project_root, pre_deploy_commands=None):
"""
初始化 MySQL
参数
project_root: 项目根目录
pre_deploy_commands: 预部署命令列表可选
返回
bool: 成功返回 True失败返回 False
"""
try:
project_root = Path(project_root).resolve()
Logger.separator()
Logger.info("开始初始化 MySQL")
Logger.separator()
Logger.info(f"项目根目录: {project_root}")
# 定义路径
sql_dir = project_root / "sql"
target_dir = project_root / "docker" / "mysql" / "db"
docker_dir = project_root / "docker"
# 复制 SQL 脚本
Logger.info("复制 SQL 脚本到 MySQL 初始化目录")
Logger.info(f"源目录: {sql_dir}")
Logger.info(f"目标目录: {target_dir}")
target_dir.mkdir(parents=True, exist_ok=True)
# 如果提供了预部署命令,从中提取 SQL 文件名
if pre_deploy_commands:
for cmd in pre_deploy_commands:
# 解析命令cp sql/xxx.sql docker/mysql/db/
if cmd.startswith('cp ') and '.sql' in cmd:
parts = cmd.split()
if len(parts) >= 2:
source_path = parts[1] # sql/xxx.sql
sql_file = Path(source_path).name
source_file = project_root / source_path
if source_file.exists():
Logger.info(f"复制文件: {sql_file}")
shutil.copy2(source_file, target_dir / sql_file)
else:
Logger.warn(f"SQL 文件不存在: {source_file}")
else:
# 默认的 SQL 文件列表(向后兼容)
sql_files = [
"ry_20250523.sql",
"ry_config_20250902.sql"
]
for sql_file in sql_files:
source_file = sql_dir / sql_file
if source_file.exists():
Logger.info(f"复制文件: {sql_file}")
shutil.copy2(source_file, target_dir / sql_file)
else:
Logger.warn(f"SQL 文件不存在: {source_file}")
Logger.info("目标目录内容:")
for item in target_dir.iterdir():
Logger.info(f" - {item.name}")
# 构建并启动 MySQL 容器
Logger.separator()
Logger.info("构建 MySQL 镜像")
Logger.separator()
Logger.info(f"执行目录: {docker_dir}")
Logger.info("执行命令: docker-compose build --no-cache ruoyi-mysql")
result = subprocess.run(
"docker-compose build --no-cache ruoyi-mysql",
shell=True,
cwd=docker_dir,
capture_output=True,
text=True
)
if result.returncode != 0:
Logger.error("MySQL 镜像构建失败")
if result.stderr:
Logger.error(f"错误信息: {result.stderr}")
return False
Logger.info("MySQL 镜像构建成功")
Logger.info("执行命令: docker-compose up -d ruoyi-mysql")
result = subprocess.run(
"docker-compose up -d ruoyi-mysql",
shell=True,
cwd=docker_dir,
capture_output=True,
text=True
)
if result.returncode != 0:
Logger.error("MySQL 容器启动失败")
if result.stderr:
Logger.error(f"错误信息: {result.stderr}")
return False
Logger.info("MySQL 容器启动成功")
Logger.info("MySQL 初始化完成")
return True
except Exception as e:
Logger.error(f"MySQL 初始化异常: {e}")
return False

View File

@ -0,0 +1,77 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Nacos 初始化模块
"""
import subprocess
from pathlib import Path
import sys
# 添加父目录到路径
sys.path.insert(0, str(Path(__file__).parent.parent))
from log import Logger
def init_nacos(project_root):
"""
初始化 Nacos
参数
project_root: 项目根目录
返回
bool: 成功返回 True失败返回 False
"""
try:
project_root = Path(project_root).resolve()
Logger.separator()
Logger.info("开始初始化 Nacos")
Logger.separator()
Logger.info(f"项目根目录: {project_root}")
docker_dir = project_root / "docker"
# 构建并启动 Nacos 容器
Logger.info(f"执行目录: {docker_dir}")
Logger.info("执行命令: docker-compose build --no-cache ruoyi-nacos")
result = subprocess.run(
"docker-compose build --no-cache ruoyi-nacos",
shell=True,
cwd=docker_dir,
capture_output=True,
text=True
)
if result.returncode != 0:
Logger.error("Nacos 镜像构建失败")
if result.stderr:
Logger.error(f"错误信息: {result.stderr}")
return False
Logger.info("Nacos 镜像构建成功")
Logger.info("执行命令: docker-compose up -d ruoyi-nacos")
result = subprocess.run(
"docker-compose up -d ruoyi-nacos",
shell=True,
cwd=docker_dir,
capture_output=True,
text=True
)
if result.returncode != 0:
Logger.error("Nacos 容器启动失败")
if result.stderr:
Logger.error(f"错误信息: {result.stderr}")
return False
Logger.info("Nacos 容器启动成功")
Logger.info("Nacos 初始化完成")
return True
except Exception as e:
Logger.error(f"Nacos 初始化异常: {e}")
return False

View File

@ -0,0 +1,77 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Redis 初始化模块
"""
import subprocess
from pathlib import Path
import sys
# 添加父目录到路径
sys.path.insert(0, str(Path(__file__).parent.parent))
from log import Logger
def init_redis(project_root):
"""
初始化 Redis
参数
project_root: 项目根目录
返回
bool: 成功返回 True失败返回 False
"""
try:
project_root = Path(project_root).resolve()
Logger.separator()
Logger.info("开始初始化 Redis")
Logger.separator()
Logger.info(f"项目根目录: {project_root}")
docker_dir = project_root / "docker"
# 构建并启动 Redis 容器
Logger.info(f"执行目录: {docker_dir}")
Logger.info("执行命令: docker-compose build --no-cache ruoyi-redis")
result = subprocess.run(
"docker-compose build --no-cache ruoyi-redis",
shell=True,
cwd=docker_dir,
capture_output=True,
text=True
)
if result.returncode != 0:
Logger.error("Redis 镜像构建失败")
if result.stderr:
Logger.error(f"错误信息: {result.stderr}")
return False
Logger.info("Redis 镜像构建成功")
Logger.info("执行命令: docker-compose up -d ruoyi-redis")
result = subprocess.run(
"docker-compose up -d ruoyi-redis",
shell=True,
cwd=docker_dir,
capture_output=True,
text=True
)
if result.returncode != 0:
Logger.error("Redis 容器启动失败")
if result.stderr:
Logger.error(f"错误信息: {result.stderr}")
return False
Logger.info("Redis 容器启动成功")
Logger.info("Redis 初始化完成")
return True
except Exception as e:
Logger.error(f"Redis 初始化异常: {e}")
return False

105
.devops/scripts/log.py Normal file
View File

@ -0,0 +1,105 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
日志管理模块
提供统一的日志输出功能支持控制台和文件输出
"""
import os
from datetime import datetime
from pathlib import Path
class Logger:
"""日志管理器"""
# 颜色定义
RED = '\033[0;31m'
GREEN = '\033[0;32m'
YELLOW = '\033[1;33m'
BLUE = '\033[0;34m'
NC = '\033[0m' # No Color
# 日志配置
_log_file = None
_max_size = 10485760 # 10MB
_initialized = False
@classmethod
def init(cls, log_file=None, max_size=None):
"""
初始化日志配置
参数
log_file: 日志文件路径
max_size: 日志文件最大大小字节
"""
if log_file:
cls._log_file = Path(log_file)
# 确保日志目录存在
cls._log_file.parent.mkdir(parents=True, exist_ok=True)
if max_size:
cls._max_size = max_size
cls._initialized = True
@classmethod
def _rotate_log(cls):
"""日志轮转:如果日志文件超过最大大小,进行轮转"""
if not cls._log_file or not cls._log_file.exists():
return
if cls._log_file.stat().st_size >= cls._max_size:
# 轮转日志文件
backup_file = cls._log_file.with_suffix('.log.1')
if backup_file.exists():
backup_file.unlink()
cls._log_file.rename(backup_file)
@classmethod
def _write_log(cls, level, message):
"""写入日志到控制台和文件"""
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
# 控制台输出(带颜色)
color_map = {
'INFO': cls.GREEN,
'ERROR': cls.RED,
'WARN': cls.YELLOW,
'DEBUG': cls.BLUE
}
color = color_map.get(level, cls.NC)
print(f"{color}[{level}]{cls.NC} {timestamp} - {message}")
# 文件输出(不带颜色)
if cls._log_file:
cls._rotate_log()
log_line = f"[{level}] {timestamp} - {message}\n"
with open(cls._log_file, 'a', encoding='utf-8') as f:
f.write(log_line)
@classmethod
def info(cls, message):
"""输出信息日志"""
cls._write_log('INFO', message)
@classmethod
def error(cls, message):
"""输出错误日志"""
cls._write_log('ERROR', message)
@classmethod
def warn(cls, message):
"""输出警告日志"""
cls._write_log('WARN', message)
@classmethod
def debug(cls, message):
"""输出调试日志"""
cls._write_log('DEBUG', message)
@staticmethod
def separator():
"""输出分隔线"""
print("=" * 60)

92
.devops/scripts/maven.py Normal file
View File

@ -0,0 +1,92 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Maven 打包和复制模块
"""
import os
import subprocess
import glob
from pathlib import Path
from .log import Logger
def run_maven(work_dir, maven_commands, source_path, target_dir):
"""
执行 Maven 打包和复制
参数
work_dir: 执行 maven 命令的目录
maven_commands: 执行的命令字符串
source_path: 复制的源路径支持通配符
target_dir: 复制的目标目录
返回
bool: 成功返回 True失败返回 False
"""
try:
# 转换为绝对路径
work_dir = Path(work_dir).resolve()
Logger.separator()
Logger.info("开始 Maven 打包")
Logger.separator()
Logger.info(f"执行目录: {work_dir}")
Logger.info(f"Maven 命令: {maven_commands}")
# 检查目录是否存在
if not work_dir.exists():
Logger.error(f"目录不存在: {work_dir}")
return False
# 执行 Maven 命令
Logger.info(f"执行命令: {maven_commands}")
result = subprocess.run(
maven_commands,
shell=True,
cwd=work_dir,
capture_output=True,
text=True
)
if result.returncode != 0:
Logger.error("Maven 打包失败")
if result.stderr:
Logger.error(f"错误信息: {result.stderr}")
return False
Logger.info("Maven 打包成功")
# 复制构建产物
Logger.separator()
Logger.info("开始复制构建产物")
Logger.separator()
source_full_path = work_dir / source_path
Logger.info(f"源路径: {source_full_path}")
Logger.info(f"目标目录: {target_dir}")
# 创建目标目录
Path(target_dir).mkdir(parents=True, exist_ok=True)
# 复制文件
files = glob.glob(str(source_full_path))
if not files:
Logger.error(f"未找到构建产物: {source_full_path}")
return False
for file in files:
file_path = Path(file)
dest = Path(target_dir) / file_path.name
Logger.info(f"复制文件: {file_path.name}")
import shutil
shutil.copy2(file, dest)
Logger.info("构建产物复制成功")
Logger.info("Maven 打包和复制完成")
return True
except Exception as e:
Logger.error(f"Maven 打包异常: {e}")
return False

102
.devops/scripts/npm.py Normal file
View File

@ -0,0 +1,102 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
NPM 打包和复制模块
"""
import os
import subprocess
import shutil
from pathlib import Path
from .log import Logger
def run_npm(work_dir, npm_commands, source_dir, target_dir):
"""
执行 NPM 打包和复制
参数
work_dir: 执行 npm 命令的目录
npm_commands: 执行的命令字符串
source_dir: 复制的源目录
target_dir: 复制的目标目录
返回
bool: 成功返回 True失败返回 False
"""
try:
# 转换为绝对路径
work_dir = Path(work_dir).resolve()
Logger.separator()
Logger.info("开始 NPM 打包")
Logger.separator()
Logger.info(f"执行目录: {work_dir}")
Logger.info(f"NPM 命令: {npm_commands}")
# 检查目录是否存在
if not work_dir.exists():
Logger.error(f"目录不存在: {work_dir}")
return False
# 执行 NPM 命令
Logger.info(f"执行命令: {npm_commands}")
result = subprocess.run(
npm_commands,
shell=True,
cwd=work_dir,
capture_output=True,
text=True
)
if result.returncode != 0:
Logger.error("NPM 打包失败")
if result.stderr:
Logger.error(f"错误信息: {result.stderr}")
return False
Logger.info("NPM 打包成功")
# 复制构建产物
Logger.separator()
Logger.info("开始复制构建产物")
Logger.separator()
source_full_path = work_dir / source_dir
Logger.info(f"源目录: {source_full_path}")
Logger.info(f"目标目录: {target_dir}")
# 检查源目录是否存在
if not source_full_path.exists():
Logger.error(f"源目录不存在: {source_full_path}")
return False
# 清空目标目录(保留 .gitkeep
target_path = Path(target_dir)
if target_path.exists():
Logger.info(f"清空目标目录: {target_dir}")
for item in target_path.iterdir():
if item.name != '.gitkeep':
if item.is_dir():
shutil.rmtree(item)
else:
item.unlink()
else:
target_path.mkdir(parents=True, exist_ok=True)
# 复制目录内容
Logger.info(f"复制目录内容...")
for item in source_full_path.iterdir():
dest = target_path / item.name
if item.is_dir():
shutil.copytree(item, dest, dirs_exist_ok=True)
else:
shutil.copy2(item, dest)
Logger.info("构建产物复制成功")
Logger.info("NPM 打包和复制完成")
return True
except Exception as e:
Logger.error(f"NPM 打包异常: {e}")
return False