a-cloud-all/docker/docker-compose.yml

535 lines
16 KiB
YAML
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

version : '3.8'
services:
ruoyi-nacos:
container_name: ruoyi-nacos
image: nacos-runtime
build:
context: ./nacos
environment:
- MODE=standalone
- TZ=Asia/Shanghai
volumes:
- ./nacos/logs/:/home/nacos/logs
- ./nacos/conf/application.properties:/home/nacos/conf/application.properties
ports:
- "8848:8848"
- "9848:9848"
- "9849:9849"
depends_on:
- ruoyi-mysql
ruoyi-mysql:
container_name: ruoyi-mysql
image: mysql-runtime
build:
context: ./mysql
ports:
- "3306:3306"
volumes:
- ./mysql/conf:/etc/mysql/conf.d
- ./mysql/logs:/logs
- ./mysql/data:/var/lib/mysql
command: [
'mysqld',
'--innodb-buffer-pool-size=80M',
'--character-set-server=utf8mb4',
'--collation-server=utf8mb4_unicode_ci',
'--default-time-zone=+8:00',
'--lower-case-table-names=1'
]
environment:
TZ: Asia/Shanghai
MYSQL_DATABASE: 'ry-cloud'
MYSQL_ROOT_PASSWORD: password
ruoyi-redis:
container_name: ruoyi-redis
image: redis-runtime
build:
context: ./redis
environment:
- TZ=Asia/Shanghai
ports:
- "6379:6379"
volumes:
- ./redis/conf/redis.conf:/home/ruoyi/redis/redis.conf
command: redis-server /home/ruoyi/redis/redis.conf
ruoyi-minio:
container_name: ruoyi-minio
image: registry.t-aaron.com/minio/minio:RELEASE.2024-12-18T13-15-44Z
environment:
- MINIO_ROOT_USER=minioadmin
- MINIO_ROOT_PASSWORD=minioadmin
- TZ=Asia/Shanghai
ports:
- "9000:9000"
- "9001:9001"
volumes:
- ./minio/data:/data
- ./minio/config:/root/.minio
command: server /data --console-address ":9001"
restart: unless-stopped
ruoyi-nginx:
container_name: ruoyi-nginx
image: nginx-runtime
build:
context: ./nginx
environment:
- TZ=Asia/Shanghai
ports:
- "8899:80"
volumes:
- ./nginx/html/dist:/home/ruoyi/projects/ruoyi-ui
- ./nginx/conf/nginx.conf:/etc/nginx/nginx.conf
- ./nginx/logs:/var/log/nginx
- ./nginx/conf.d:/etc/nginx/conf.d
depends_on:
- ruoyi-gateway
links:
- ruoyi-gateway
ruoyi-hyf:
container_name: ruoyi-hyf
image: hyf-runtime
build:
context: ./a_th_web
environment:
- TZ=Asia/Shanghai
ports:
- "9988:80"
volumes:
- ./a_th_web/html/dist:/home/ruoyi/projects/ruoyi-ui
- ./a_th_web/conf/nginx.conf:/etc/nginx/nginx.conf
- ./a_th_web/logs:/var/log/nginx
- ./a_th_web/conf.d:/etc/nginx/conf.d
depends_on:
- ruoyi-gateway
links:
- ruoyi-gateway
ruoyi-hxf:
container_name: ruoyi-hxf
image: hxf-runtime
build:
context: ./b_th_web
environment:
- TZ=Asia/Shanghai
ports:
- "9898:80"
volumes:
- ./b_th_web/html/dist:/home/ruoyi/projects/ruoyi-ui
- ./b_th_web/conf/nginx.conf:/etc/nginx/nginx.conf
- ./b_th_web/logs:/var/log/nginx
- ./b_th_web/conf.d:/etc/nginx/conf.d
depends_on:
- ruoyi-gateway
links:
- ruoyi-gateway
ruoyi-gateway:
container_name: ruoyi-gateway
image: gateway-runtime
build:
context: ./ruoyi/gateway
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "8080:8080"
depends_on:
- ruoyi-redis
links:
- ruoyi-redis
ruoyi-auth:
container_name: ruoyi-auth
image: ruoyi-auth-runtime
build:
context: ./ruoyi/auth
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9200:9200"
depends_on:
- ruoyi-redis
links:
- ruoyi-redis
ruoyi-modules-system:
container_name: ruoyi-modules-system
image: ruoyi-modules-system-runtime
build:
context: ./ruoyi/modules/system
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9201:9201"
depends_on:
- ruoyi-redis
- ruoyi-mysql
links:
- ruoyi-redis
- ruoyi-mysql
ruoyi-modules-gen:
container_name: ruoyi-modules-gen
image: ruoyi-modules-gen-runtime
build:
context: ./ruoyi/modules/gen
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9202:9202"
depends_on:
- ruoyi-mysql
links:
- ruoyi-mysql
ruoyi-modules-job:
container_name: ruoyi-modules-job
image: ruoyi-modules-job-runtime
build:
context: ./ruoyi/modules/job
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9203:9203"
depends_on:
- ruoyi-mysql
links:
- ruoyi-mysql
ruoyi-modules-file:
container_name: ruoyi-modules-file
image: ruoyi-modules-file-runtime
build:
context: ./ruoyi/modules/file
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9300:9300"
volumes:
- ./ruoyi/uploadPath:/home/ruoyi/uploadPath
ruoyi-visual-monitor:
container_name: ruoyi-visual-monitor
image: ruoyi-visual-monitor-runtime
build:
context: ./ruoyi/visual/monitor
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9100:9100"
tuoheng-modules-approval:
container_name: tuoheng-modules-approval
image: tuoheng-modules-approval-runtime
build:
context: ./ruoyi/modules/approval
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9212:9212"
depends_on:
- ruoyi-redis
- ruoyi-mysql
links:
- ruoyi-redis
- ruoyi-mysql
tuoheng-modules-device:
container_name: tuoheng-modules-device
image: tuoheng-modules-device-runtime
build:
context: ./ruoyi/modules/device
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9210:9210"
depends_on:
- ruoyi-redis
- ruoyi-mysql
links:
- ruoyi-redis
- ruoyi-mysql
tuoheng-modules-airline:
container_name: tuoheng-modules-airline
image: tuoheng-modules-airline-runtime
build:
context: ./ruoyi/modules/airline
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9211:9211"
depends_on:
- ruoyi-redis
- ruoyi-mysql
links:
- ruoyi-redis
- ruoyi-mysql
tuoheng-modules-task:
container_name: tuoheng-modules-task
image: tuoheng-modules-task-runtime
build:
context: ./ruoyi/modules/task
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9215:9215"
depends_on:
- ruoyi-redis
- ruoyi-mysql
links:
- ruoyi-redis
- ruoyi-mysql
tuoheng-modules-fms:
container_name: tuoheng-modules-fms
image: tuoheng-modules-fms-runtime
build:
context: ./ruoyi/modules/fms
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9213:9213"
depends_on:
- ruoyi-redis
- ruoyi-mysql
links:
- ruoyi-redis
- ruoyi-mysql
tuoheng-modules-media:
container_name: tuoheng-modules-media
image: tuoheng-modules-media-runtime
build:
context: ./ruoyi/modules/media
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
ports:
- "9214:9214"
depends_on:
- ruoyi-redis
- ruoyi-mysql
links:
- ruoyi-redis
- ruoyi-mysql
# ============================================================================
# WVP-PRO 视频管理平台
# ============================================================================
# WVP是基于GB28181协议的视频管理平台使用ZLMediaKit作为流媒体服务器
# 端口说明:
# - 18080:18978 → WVP的HTTP API端口外部通过18080访问
# - 5060:5060/udp → SIP信令端口用于GB28181设备注册和控制
# ============================================================================
wvp-pro:
container_name: wvp-pro
image: wvp-pro-runtime
build:
context: ./wvp/wvpjar
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
# Redis 配置
- REDIS_HOST=ruoyi-redis
- REDIS_PORT=6379
# MySQL 数据库配置
- DATABASE_HOST=ruoyi-mysql
- DATABASE_PORT=3306
- DATABASE_USER=ylcx
- DATABASE_PASSWORD=Tuoheng@2025
# SIP 配置
- SIP_ShowIP=127.0.0.1
- SIP_Port=5060
- SIP_Domain=3502000000
- SIP_Id=35020000002000000001
- SIP_Password=wvp_sip_password
# ========== ZLM 媒体服务器配置 ==========
# ZLM_HOST: WVP调用ZLM API时使用的地址容器内部服务名
# 用途wvp-pro通过 http://zlmediakit:80/index/api/xxx 调用ZLM的RESTful API
- ZLM_HOST=zlmediakit
# ZLM_HOOK_HOST: ZLM回调WVP时使用的地址容器内部服务名
# 用途zlmediakit通过 http://wvp-pro:18978/index/hook/on_publish 回调WVP
- ZLM_HOOK_HOST=wvp-pro
# ZLM_SERCERT: ZLM的API密钥必须与zlmediakit/config.ini中的api.secret一致
- ZLM_SERCERT=fgVdaI75GcSBPeSBvg8NL7aRrlkCtGPv
# ========== 流媒体地址配置 ==========
# Stream_IP: 生成播放地址时使用的IP宿主机外网IP或域名
# 用途WVP生成的播放地址格式为 http://45.120.103.238:9090/live/123.live.flv
# 对应application.yml中的media.stream-ip配置
- Stream_IP=45.120.103.238
# SDP_IP: WVP在国标信令中使用的IP
# 用途GB28181设备通过此IP与WVP进行媒体流传输
# 对应application.yml中的media.sdp-ip配置
- SDP_IP=45.120.103.238
# ========== 流媒体端口配置(宿主机外部端口) ==========
# 以下端口用于生成客户端播放地址必须与zlmediakit容器的端口映射一致
#
# MediaHttp: HTTP播放端口对应zlmediakit的9090:80映射
# 用途生成HTTP-FLV/HLS/TS/RTC播放地址
# 示例http://45.120.103.238:9090/live/123.live.flv
# 对应application.yml中的media.flv-port和media.ws-flv-port配置
- MediaHttp=9090
# MediaHttps: HTTPS播放端口对应zlmediakit的8443:443映射
# 用途生成HTTPS-FLV/HLS/TS播放地址
# 示例https://45.120.103.238:8443/live/123.live.flv
# 对应application.yml中的media.flv-ssl-port和media.ws-flv-ssl-port配置
- MediaHttps=8443
# MediaRtp: RTP代理端口对应zlmediakit的10000:10000映射
# 用途GB28181设备的RTP流传输
# 对应application.yml中的media.rtp-proxy-port配置
# 对应zlmediakit config.ini中的rtp_proxy.port=10000
- MediaRtp=10000
# MediaRtmp: RTMP推流端口对应zlmediakit的1935:1935映射
# 用途OBS等推流工具推流地址
# 示例rtmp://45.120.103.238:1935/live/123
# 对应application.yml中的media.rtmp-port配置
# 对应zlmediakit config.ini中的rtmp.port=1935
- MediaRtmp=1935
# MediaRtsp: RTSP推流/拉流端口对应zlmediakit的8554:554映射
# 用途RTSP协议的推流和拉流
# 示例rtsp://45.120.103.238:8554/live/123
# 对应application.yml中的media.rtsp-port配置
# 对应zlmediakit config.ini中的rtsp.port=554
- MediaRtsp=8554
# 录像配置
- RecordPushLive=false
- RecordSip=false
ports:
- "18080:18978"
- "5060:5060/udp"
# - "6379:6379"
volumes:
- ./wvp/logs:/home/ruoyi/logs
depends_on:
- ruoyi-redis
- ruoyi-mysql
- zlmediakit
links:
- ruoyi-redis
- ruoyi-mysql
- zlmediakit
restart: unless-stopped
# ============================================================================
# ZLMediaKit 流媒体服务器
# ============================================================================
# ZLMediaKit是高性能的流媒体服务器支持RTMP/RTSP/HLS/HTTP-FLV等多种协议
#
# 端口映射说明(格式:宿主机端口:容器内部端口):
# 1. API访问端口
# - 9090:80 → HTTP API端口WVP通过容器内部的80端口访问ZLM API
# - 8443:443 → HTTPS API端口通常不启用
#
# 2. 客户端播放端口:
# - 9090:80 → HTTP-FLV/HLS/TS/RTC播放客户端通过9090端口播放
# - 8443:443 → HTTPS播放端口
#
# 3. 推流/拉流协议端口:
# - 1935:1935 → RTMP推流端口OBS推流地址rtmp://IP:1935/live/stream
# - 8554:554 → RTSP推流/拉流端口
# - 10000:10000 → RTP代理端口TCP/UDP用于GB28181设备
# - 8000:8000/udp → WebRTC UDP端口
# - 9900:9000/udp → WebRTC UDP端口注意避免与minio 9000端口冲突
#
# 配置文件映射:
# - ./zlmediakit/config.ini → /opt/media/conf/config.ini
# 重要配置项:
# * general.mediaServerId=polaris必须与数据库wvp_media_server表的id字段一致
# * api.secret=fgVdaI75GcSBPeSBvg8NL7aRrlkCtGPv必须与ZLM_SERCERT一致
# * http.port=80容器内部HTTP端口
# * rtmp.port=1935RTMP端口
# * rtsp.port=554RTSP端口
# * rtp_proxy.port=10000RTP代理端口
# ============================================================================
zlmediakit:
container_name: zlmediakit
image: registry.t-aaron.com/zlmediakit/zlmediakit:Release.latest
environment:
- TZ=Asia/Shanghai
ports:
# RTMP推流端口容器内部1935映射到宿主机1935
# OBS推流地址rtmp://45.120.103.238:1935/live/streamId
# 对应config.ini中的rtmp.port=1935
- "1935:1935"
# HTTP端口容器内部80映射到宿主机9090
# 用途1WVP通过http://zlmediakit:80访问ZLM API
# 用途2客户端通过http://45.120.103.238:9090播放HTTP-FLV/HLS/TS
# 对应config.ini中的http.port=80
- "9090:80"
# HTTPS端口容器内部443映射到宿主机8443
# 客户端通过https://45.120.103.238:8443播放
# 对应config.ini中的http.sslport=443
- "8443:443"
# RTSP端口容器内部554映射到宿主机8554
# RTSP地址rtsp://45.120.103.238:8554/live/streamId
# 对应config.ini中的rtsp.port=554
- "8554:554"
# RTP代理端口TCP容器内部10000映射到宿主机10000
# 用于GB28181设备的RTP流传输
# 对应config.ini中的rtp_proxy.port=10000
- "10000:10000"
# RTP代理端口UDP容器内部10000映射到宿主机10000
- "10000:10000/udp"
# WebRTC UDP端口容器内部8000映射到宿主机8000
# 对应config.ini中的rtc.port=8000
- "8000:8000/udp"
# WebRTC UDP端口容器内部9000映射到宿主机9900
# 注意宿主机使用9900避免与minio的9000端口冲突
- "9900:9000/udp"
volumes:
- ./zlmediakit/config.ini:/opt/media/conf/config.ini
restart: unless-stopped
wvp-web:
container_name: wvp-web
image: wvp-web-runtime
build:
context: ./wvp/web
environment:
- TZ=Asia/Shanghai
ports:
- "28181:80"
volumes:
- ./wvp/web/html/dist:/home/ruoyi/projects/wvp-ui
- ./wvp/web/conf/nginx.conf:/etc/nginx/nginx.conf
- ./wvp/web/logs:/var/log/nginx
- ./wvp/web/conf.d:/etc/nginx/conf.d
depends_on:
- wvp-pro
links:
- wvp-pro
pgvector-db:
container_name: hyf-pgvector-db
image: registry.t-aaron.com/pgvector/pgvector:pg16
environment:
POSTGRES_USER: drgraph
POSTGRES_PASSWORD: yingping
POSTGRES_DB: th_agenter
TZ: Asia/Shanghai
ports:
- "5433:5432"
volumes:
- ./hyf_backend/pgdata:/var/lib/postgresql/data
- ./hyf_backend/initdb:/docker-entrypoint-initdb.d
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "pg_isready -U drgraph -d th_agenter"]
interval: 5s
timeout: 5s
retries: 5
hyf-backend:
container_name: hyf-backend
image: hyf-backend-runtime
build:
context: ./hyf_backend
dockerfile: dockerfile
environment:
- TZ=Asia/Shanghai
- DATABASE_URL=postgresql+asyncpg://drgraph:yingping@pgvector-db:5432/th_agenter
ports:
- "8800:8000"
volumes:
- ./hyf_backend/data/uploads:/app/data/uploads
- ./hyf_backend/data/chroma:/app/data/chroma
- ./hyf_backend/logs:/app/webIOs/output/logs
depends_on:
pgvector-db:
condition: service_healthy
restart: unless-stopped