Merge pull request 'ollama. Stack added (ollama + open-webui).' (#78) from feature_ollama into master

Reviewed-on: #78
This commit is contained in:
Alexey Skobkin 2024-02-23 23:59:24 +00:00
commit f0f12fba83
5 changed files with 55 additions and 0 deletions

View file

@ -60,6 +60,7 @@ Not every stack is tested to fully work.
| Metube | ✅ | `alexta69/metube` | Web GUI for yt-dlp. | [Github](https://github.com/alexta69/metube) | | Metube | ✅ | `alexta69/metube` | Web GUI for yt-dlp. | [Github](https://github.com/alexta69/metube) |
| Murmur (Mumble server) | ✅ | `registry.gitlab.com/skobkin/docker-murmur` | Mumble VoIP server (custom build) | [Website](https://www.mumble.info), [Github](https://github.com/mumble-voip/mumble) | | Murmur (Mumble server) | ✅ | `registry.gitlab.com/skobkin/docker-murmur` | Mumble VoIP server (custom build) | [Website](https://www.mumble.info), [Github](https://github.com/mumble-voip/mumble) |
| NextCloud | ❌ Unfinished | `nextcloud` | File management, synchronization, management and GTD platform. | [Website](https://nextcloud.com), [Github](https://github.com/nextcloud/server) | | NextCloud | ❌ Unfinished | `nextcloud` | File management, synchronization, management and GTD platform. | [Website](https://nextcloud.com), [Github](https://github.com/nextcloud/server) |
| Ollama | ✅ | `ollama/ollama` | Toolkit for easily running LLM's locally. | [Website](https://ollama.com), [Github](https://github.com/ollama/ollama) |
| Open Streaming Platform | ✅ | `deamos/openstreamingplatform` | Live streaming platform. | [Website](https://openstreamingplatform.com), [Gitlab](https://gitlab.com/osp-group/flask-nginx-rtmp-manager) | | Open Streaming Platform | ✅ | `deamos/openstreamingplatform` | Live streaming platform. | [Website](https://openstreamingplatform.com), [Gitlab](https://gitlab.com/osp-group/flask-nginx-rtmp-manager) |
| OpenVPN | ✅ | `kylemanna/openvpn` | OpenVPN server with some management toolkit. | [Website](https://openvpn.net), [Image Github](https://www.github.com/kylemanna/docker-openvpn) | | OpenVPN | ✅ | `kylemanna/openvpn` | OpenVPN server with some management toolkit. | [Website](https://openvpn.net), [Image Github](https://www.github.com/kylemanna/docker-openvpn) |
| Owncast | ✅ | `gabekangas/owncast` | Live streaming platform with federation support. | [Website](https://owncast.online), [Github](https://github.com/owncast/owncast) | | Owncast | ✅ | `gabekangas/owncast` | Live streaming platform with federation support. | [Website](https://owncast.online), [Github](https://github.com/owncast/owncast) |

16
ollama/.env.dist Normal file
View file

@ -0,0 +1,16 @@
# see https://hub.docker.com/r/ollama/ollama
OLLAMA_IMAGE_TAG=latest
UI_IMAGE_TAG=main
#HOST_USER=1000
HOST_OLLAMA_DATA_DIR=./data/ollama
HTTP_OLLAMA_BIND_ADDR=127.0.0.1
HTTP_OLLAMA_BIND_PORT=11434
HOST_UI_DATA_DIR=./data/open-webui
HTTP_UI_BIND_ADDR=127.0.0.1
HTTP_UI_BIND_PORT=8010
UI_SECRET_KEY=changeme
LOG_MAX_SIZE=5m
LOG_MAX_FILE=5

2
ollama/data/ollama/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
/*
!/.gitignore

2
ollama/data/open-webui/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
/*
!/.gitignore

34
ollama/docker-compose.yml Normal file
View file

@ -0,0 +1,34 @@
version: '3.7'
services:
webui:
image: "ghcr.io/open-webui/open-webui:${UI_IMAGE_TAG:-main}"
container_name: ollama-open-webui
volumes:
- "${HOST_UI_DATA_DIR:-./data/open-webui}:/app/backend/data"
depends_on:
- ollama
ports:
- "${HTTP_UI_BIND_ADDR:-127.0.0.1}:${HTTP_UI_BIND_PORT-3000}:8080"
environment:
- "OLLAMA_API_BASE_URL=http://ollama:11434/api"
- 'WEBUI_SECRET_KEY=${UI_SECRET_KEY:-changeme}'
#extra_hosts:
# - host.docker.internal:host-gateway
restart: unless-stopped
ollama:
image: "ollama/ollama:${OLLAMA_IMAGE_TAG:-latest}"
container_name: ollama
#user: "${HOST_USER:-1000}"
volumes:
- "${HOST_OLLAMA_DATA_DIR:-./data}:/root/.ollama"
ports:
- "${HTTP_OLLAMA_BIND_ADDR:-127.0.0.1}:${HTTP_OLLAMA_BIND_PORT:-11434}:11434/tcp"
env_file: .env
restart: unless-stopped
logging:
driver: "json-file"
options:
max-size: "${LOG_MAX_SIZE:-5m}"
max-file: "${LOG_MAX_FILE:-5}"