services:
ollama-webui:
image: ghcr.io/open-webui/open-webui:main
container_name: ollama-webui
restart: unless-stopped
ports:
- "8080:8080"
extra_hosts:
- "host.docker.internal:host-gateway"
volumes:
- ./ollama-webui:/app/backend/data
environment:
- OLLAMA_BASE_URL=http://host.docker.internal:11434
volumes:
ollama-webui:
以前書いた記事
https://www.kixking.xyz/2026/01/macollama-openwebui-docker-composegpu.html