From 0b23ca2cfcdb504fdfedc0ce329e2ba8d5613dac Mon Sep 17 00:00:00 2001 From: whitney Date: Wed, 22 Jan 2025 16:08:26 -0800 Subject: [PATCH] Add openwebui-ollama as chat --- Makefile | 48 ----------------------------------------- chat/README.md | 30 ++++++++++++++++++++++++++ chat/config.json | 12 +++++++++++ chat/docker-compose.yml | 25 +++++++++++++++++++++ 4 files changed, 67 insertions(+), 48 deletions(-) delete mode 100644 Makefile create mode 100644 chat/README.md create mode 100644 chat/config.json create mode 100644 chat/docker-compose.yml diff --git a/Makefile b/Makefile deleted file mode 100644 index 9fce9a4..0000000 --- a/Makefile +++ /dev/null @@ -1,48 +0,0 @@ -MODULES = traefik code-server covid19 gitea grafana homepage minecraft nordvpn photoprism plex -DOCKER_COMPOSE_DOWN = docker-compose down -DOCKER_PRUNE = docker system prune -f -DOCKER_COMPOSE_UP = docker-compose up -d -DOCKER_COMPOSE_BUILD = docker-compose build -DOCKER_START = $(DOCKER_COMPOSE_DOWN) && $(DOCKER_PRUNE) && $(DOCKER_COMPOSE_UP) -DOCKER_BUILD = $(DOCKER_COMPOSE_DOWN) && $(DOCKER_PRUNE) && $(DOCKER_COMPOSE_BUILD) && $(DOCKER_COMPOSE_UP) -DOCKER_NETWORK_CREATE = docker network create - -.PHONY: $(MODULES) - -all: network $(MODULES) - -network: - $(DOCKER_NETWORK_CREATE) traefik - -code-server: - cd code-server && $(DOCKER_BUILD) - -covid19: - cd covid19/covidapp-repo && docker build --tag="kr/covidapp" . - cd covid19 && $(DOCKER_START) - @echo "Setup complete, see README.md for instructions on seeding database." - -gitea: - cd gitea && $(DOCKER_BUILD) - -grafana: minecraft - cd grafana && $(DOCKER_COMPOSE_BUILD) && $(DOCKER_COMPOSE_UP) - -homepage: - cd homepage && $(DOCKER_BUILD) - -minecraft: - cd minecraft && $(DOCKER_COMPOSE_UP) - -nordvpn: - cd nordvpn && $(DOCKER_BUILD) - -photoprism: - cd photoprism && $(DOCKER_BUILD) - -plex: - cd plex && $(DOCKER_BUILD) - -traefik: - cd traefik && $(DOCKER_COMPOSE_UP) - diff --git a/chat/README.md b/chat/README.md new file mode 100644 index 0000000..1d9ee6d --- /dev/null +++ b/chat/README.md @@ -0,0 +1,30 @@ +# Chat + +# Implementations + +## Deepseek Stack using Ollama & OpenWebUI + +### Ollama (Manually) +Start a server by first +``` +ollama serve +``` +(Default port is 11434). +And then run your model (do just this to test a model.) +``` +ollama run deepseek-coder +``` +Stop the server by stopping the ollama service: +``` +systemctl stop ollama +``` + +### Or just run everything with Docker +``` +docker compose down && docker system prune -af && docker compose build && docker compose up -d && docker logs -f openwebui +``` + +#### Notes on first time setup +1. Create admin account +2. Wait a moment for the UI to load, sometimes takes a long time (like legitimately 10 minutes) with my card. + diff --git a/chat/config.json b/chat/config.json new file mode 100644 index 0000000..49eb848 --- /dev/null +++ b/chat/config.json @@ -0,0 +1,12 @@ +{ + "modelBackend": "ollama", + "ollamaConfig": { + "host": "http://host.docker.internal", + "port": 11434, + "defaultModel": "deepseek-coder" + }, + "otherSettings": { + "enableStreaming": true, + "timeout": 60 + } +} diff --git a/chat/docker-compose.yml b/chat/docker-compose.yml new file mode 100644 index 0000000..b047070 --- /dev/null +++ b/chat/docker-compose.yml @@ -0,0 +1,25 @@ +networks: + traefik: + external: true + +services: + openwebui: + image: ghcr.io/open-webui/open-webui:ollama + container_name: openwebui + networks: + - traefik + environment: + - TZ=UTC + - OPENWEBUI_MODEL_PATH=/data/models + restart: unless-stopped + volumes: + - openwebui_data:/data + labels: + - traefik.http.routers.chat.rule=Host(`chat.whitney.rip`) + - traefik.http.routers.chat.tls=true + - traefik.http.routers.chat.tls.certresolver=lets-encrypt + - traefik.http.services.chat.loadbalancer.server.port=8080 + +volumes: + openwebui_data: +