version: '3.7' services: webui: image: "ghcr.io/open-webui/open-webui:${UI_IMAGE_TAG:-main}" container_name: ollama-open-webui volumes: - "${HOST_UI_DATA_DIR:-./data/open-webui}:/app/backend/data" depends_on: - ollama ports: - "${HTTP_UI_BIND_ADDR:-127.0.0.1}:${HTTP_UI_BIND_PORT-3000}:8080" environment: - "OLLAMA_API_BASE_URL=http://ollama:11434/api" - 'WEBUI_SECRET_KEY=${UI_SECRET_KEY:-changeme}' #extra_hosts: # - host.docker.internal:host-gateway restart: unless-stopped ollama: image: "ollama/ollama:${OLLAMA_IMAGE_TAG:-latest}" container_name: ollama #user: "${HOST_USER:-1000}" volumes: - "${HOST_OLLAMA_DATA_DIR:-./data}:/root/.ollama" ports: - "${HTTP_OLLAMA_BIND_ADDR:-127.0.0.1}:${HTTP_OLLAMA_BIND_PORT:-11434}:11434/tcp" env_file: .env restart: unless-stopped logging: driver: "json-file" options: max-size: "${LOG_MAX_SIZE:-5m}" max-file: "${LOG_MAX_FILE:-5}"