-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcompose.yml
More file actions
155 lines (146 loc) · 4.74 KB
/
compose.yml
File metadata and controls
155 lines (146 loc) · 4.74 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
services:
llmdb:
image: postgres:16
container_name: llmdb
labels:
- "ns.module=llm"
- "ns.component=db"
- "ns.db_owner=llm"
environment:
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_USER: ${POSTGRES_USER}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
ports:
- "${PORT_DB_LLM:-${POSTGRES_PORT:-5435}}:5432"
volumes:
- ./data/postgres:/var/lib/postgresql/data
- ./db/init:/docker-entrypoint-initdb.d:ro
llmcore:
build: ./core
container_name: llmcore
labels:
- "ns.module=llm"
- "ns.component=core"
environment:
CORE_HTTP_ADDR: ${CORE_HTTP_HOST:-0.0.0.0}:${CORE_HTTP_PORT:-${PORT_HTTP_LLMCORE:-8080}}
CORE_GRPC_ADDR: ${CORE_GRPC_HOST:-0.0.0.0}:${CORE_GRPC_PORT:-${PORT_GRPC_LLMCORE:-9090}}
CORE_VERSION: ${LLM_MCP_VERSION}
DISCOVERY_INTERVAL: ${DISCOVERY_INTERVAL}
DEVICE_MAX_CONCURRENCY: ${DEVICE_MAX_CONCURRENCY}
STRICT_MODEL_LIMITS: ${STRICT_MODEL_LIMITS}
DEVICE_LIMITS_INTERVAL: ${DEVICE_LIMITS_INTERVAL}
DEVICE_LIMITS_JSON: ${DEVICE_LIMITS_JSON}
DEVICE_LIMITS_FILE: ${DEVICE_LIMITS_FILE}
OLLAMA_PORTS: ${OLLAMA_PORTS:-11434}
DB_DSN: postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@llmdb:5432/${POSTGRES_DB}?sslmode=disable
OPENAI_API_KEY: ${OPENAI_API_KEY}
OPENROUTER_API_KEY: ${OPENROUTER_API_KEY}
volumes:
- /usr/bin/tailscale:/usr/bin/tailscale:ro
- /var/run/tailscale/tailscaled.sock:/var/run/tailscale/tailscaled.sock:ro
ports:
- "${PORT_HTTP_LLMCORE:-${CORE_HTTP_PORT:-8080}}:8080"
depends_on:
- llmdb
llmworker:
build: ./worker
container_name: llmworker
labels:
- "ns.module=llm"
- "ns.component=worker"
environment:
CORE_GRPC_ADDR: llmcore:${CORE_GRPC_PORT:-${PORT_GRPC_LLMCORE:-9090}}
CORE_HTTP_URL: http://llmcore:8080
OLLAMA_BASE_URL: ${OLLAMA_BASE_URL}
OPENAI_API_KEY: ${OPENAI_API_KEY}
OPENROUTER_API_KEY: ${OPENROUTER_API_KEY}
extra_hosts:
- "host.docker.internal:host-gateway"
depends_on:
- llmcore
llmtelemetry:
build: ./telemetry
container_name: llmtelemetry
labels:
- "ns.module=llm"
- "ns.component=telemetry"
environment:
DB_DSN: postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@llmdb:5432/${POSTGRES_DB}?sslmode=disable
TELEGRAM_BOT_TOKEN: ${TELEGRAM_BOT_TOKEN}
TELEGRAM_CHAT_ID: ${TELEGRAM_CHAT_ID}
TELEGRAM_USE_MCP: ${TELEGRAM_USE_MCP:-0}
TELEGRAM_MCP_BASE_URL: ${TELEGRAM_MCP_BASE_URL:-http://tgapi:8000}
TELEGRAM_MCP_BOT_ID: ${TELEGRAM_MCP_BOT_ID}
TELEGRAM_MCP_CHAT_ID: ${TELEGRAM_MCP_CHAT_ID}
TELEGRAM_MCP_FALLBACK_DIRECT: ${TELEGRAM_MCP_FALLBACK_DIRECT:-1}
TELEGRAM_UPDATE_INTERVAL: ${TELEGRAM_UPDATE_INTERVAL}
TELEMETRY_RUNNING_LIMIT: ${TELEMETRY_RUNNING_LIMIT}
TELEMETRY_DEVICE_LIMIT: ${TELEMETRY_DEVICE_LIMIT}
TELEMETRY_MAX_LEN: ${TELEMETRY_MAX_LEN}
depends_on:
- llmcore
llmmcp:
build: ./mcp
container_name: llmmcp
labels:
- "ns.module=llm"
- "ns.component=mcp"
environment:
CORE_GRPC_ADDR: llmcore:${CORE_GRPC_PORT:-${PORT_GRPC_LLMCORE:-9090}}
CORE_HTTP_URL: http://llmcore:8080
MCP_HTTP_ADDR: ${MCP_HTTP_HOST:-0.0.0.0}:${MCP_HTTP_PORT:-${PORT_MCP_LLM:-3333}}
PROTO_PATH: /app/proto/llm.proto
ports:
- "${PORT_MCP_LLM:-${MCP_HTTP_PORT:-3333}}:3333"
depends_on:
- llmcore
# === Ollama сервисы (опциональные, через profiles) ===
# Одиночный Ollama: docker compose --profile ollama up
ollama:
image: ollama/ollama:latest
container_name: ollama
labels:
- "ns.module=llm"
- "ns.component=ollama"
environment:
OLLAMA_HOST: "0.0.0.0"
ports:
- "${OLLAMA_PORT_1:-11434}:11434"
volumes:
- ollama_data:/root/.ollama
restart: unless-stopped
profiles: [ollama, ollama-multi]
# Второй Ollama: docker compose --profile ollama-multi up
ollama-2:
image: ollama/ollama:latest
container_name: ollama-2
labels:
- "ns.module=llm"
- "ns.component=ollama"
environment:
OLLAMA_HOST: "0.0.0.0"
ports:
- "${OLLAMA_PORT_2:-11435}:11434"
volumes:
- ollama2_data:/root/.ollama
restart: unless-stopped
profiles: [ollama-multi]
# Третий Ollama: docker compose --profile ollama-multi up
ollama-3:
image: ollama/ollama:latest
container_name: ollama-3
labels:
- "ns.module=llm"
- "ns.component=ollama"
environment:
OLLAMA_HOST: "0.0.0.0"
ports:
- "${OLLAMA_PORT_3:-11436}:11434"
volumes:
- ollama3_data:/root/.ollama
restart: unless-stopped
profiles: [ollama-multi]
volumes:
ollama_data:
ollama2_data:
ollama3_data: