-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathdocker-compose.yml
48 lines (44 loc) · 947 Bytes
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
version: '3.8'
networks:
default:
ipam:
driver: default
config:
- subnet: 192.168.0.0/24
services:
ollama:
container_name: 'ollama'
restart: unless-stopped
build:
context: .
dockerfile: ./ollama.Dockerfile
volumes:
- ./ollama:/root/.ollama
env_file:
- .env
ports:
- '${LLM_PORT}:11434'
# Passing the GPU to our service/container
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
ollama_webui:
container_name: 'ollama-webui'
restart: unless-stopped
build:
context: .
dockerfile: ./ollama-webui.Dockerfile
volumes:
- ./ollama-webui:/app/backend/data
ports:
- '${WEBUI_PORT}:8080'
env_file:
- .env
environment:
- OLLAMA_API_BASE_URL=http://ollama:11434/api
depends_on:
- ollama