mirror of
https://github.com/runyanjake/olomana.git
synced 2025-10-04 21:27:29 -07:00
Add openwebui-ollama as chat
This commit is contained in:
parent
9db21bceec
commit
0b23ca2cfc
48
Makefile
48
Makefile
@ -1,48 +0,0 @@
|
||||
MODULES = traefik code-server covid19 gitea grafana homepage minecraft nordvpn photoprism plex
|
||||
DOCKER_COMPOSE_DOWN = docker-compose down
|
||||
DOCKER_PRUNE = docker system prune -f
|
||||
DOCKER_COMPOSE_UP = docker-compose up -d
|
||||
DOCKER_COMPOSE_BUILD = docker-compose build
|
||||
DOCKER_START = $(DOCKER_COMPOSE_DOWN) && $(DOCKER_PRUNE) && $(DOCKER_COMPOSE_UP)
|
||||
DOCKER_BUILD = $(DOCKER_COMPOSE_DOWN) && $(DOCKER_PRUNE) && $(DOCKER_COMPOSE_BUILD) && $(DOCKER_COMPOSE_UP)
|
||||
DOCKER_NETWORK_CREATE = docker network create
|
||||
|
||||
.PHONY: $(MODULES)
|
||||
|
||||
all: network $(MODULES)
|
||||
|
||||
network:
|
||||
$(DOCKER_NETWORK_CREATE) traefik
|
||||
|
||||
code-server:
|
||||
cd code-server && $(DOCKER_BUILD)
|
||||
|
||||
covid19:
|
||||
cd covid19/covidapp-repo && docker build --tag="kr/covidapp" .
|
||||
cd covid19 && $(DOCKER_START)
|
||||
@echo "Setup complete, see README.md for instructions on seeding database."
|
||||
|
||||
gitea:
|
||||
cd gitea && $(DOCKER_BUILD)
|
||||
|
||||
grafana: minecraft
|
||||
cd grafana && $(DOCKER_COMPOSE_BUILD) && $(DOCKER_COMPOSE_UP)
|
||||
|
||||
homepage:
|
||||
cd homepage && $(DOCKER_BUILD)
|
||||
|
||||
minecraft:
|
||||
cd minecraft && $(DOCKER_COMPOSE_UP)
|
||||
|
||||
nordvpn:
|
||||
cd nordvpn && $(DOCKER_BUILD)
|
||||
|
||||
photoprism:
|
||||
cd photoprism && $(DOCKER_BUILD)
|
||||
|
||||
plex:
|
||||
cd plex && $(DOCKER_BUILD)
|
||||
|
||||
traefik:
|
||||
cd traefik && $(DOCKER_COMPOSE_UP)
|
||||
|
30
chat/README.md
Normal file
30
chat/README.md
Normal file
@ -0,0 +1,30 @@
|
||||
# Chat
|
||||
|
||||
# Implementations
|
||||
|
||||
## Deepseek Stack using Ollama & OpenWebUI
|
||||
|
||||
### Ollama (Manually)
|
||||
Start a server by first
|
||||
```
|
||||
ollama serve
|
||||
```
|
||||
(Default port is 11434).
|
||||
And then run your model (do just this to test a model.)
|
||||
```
|
||||
ollama run deepseek-coder
|
||||
```
|
||||
Stop the server by stopping the ollama service:
|
||||
```
|
||||
systemctl stop ollama
|
||||
```
|
||||
|
||||
### Or just run everything with Docker
|
||||
```
|
||||
docker compose down && docker system prune -af && docker compose build && docker compose up -d && docker logs -f openwebui
|
||||
```
|
||||
|
||||
#### Notes on first time setup
|
||||
1. Create admin account
|
||||
2. Wait a moment for the UI to load, sometimes takes a long time (like legitimately 10 minutes) with my card.
|
||||
|
12
chat/config.json
Normal file
12
chat/config.json
Normal file
@ -0,0 +1,12 @@
|
||||
{
|
||||
"modelBackend": "ollama",
|
||||
"ollamaConfig": {
|
||||
"host": "http://host.docker.internal",
|
||||
"port": 11434,
|
||||
"defaultModel": "deepseek-coder"
|
||||
},
|
||||
"otherSettings": {
|
||||
"enableStreaming": true,
|
||||
"timeout": 60
|
||||
}
|
||||
}
|
25
chat/docker-compose.yml
Normal file
25
chat/docker-compose.yml
Normal file
@ -0,0 +1,25 @@
|
||||
networks:
|
||||
traefik:
|
||||
external: true
|
||||
|
||||
services:
|
||||
openwebui:
|
||||
image: ghcr.io/open-webui/open-webui:ollama
|
||||
container_name: openwebui
|
||||
networks:
|
||||
- traefik
|
||||
environment:
|
||||
- TZ=UTC
|
||||
- OPENWEBUI_MODEL_PATH=/data/models
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- openwebui_data:/data
|
||||
labels:
|
||||
- traefik.http.routers.chat.rule=Host(`chat.whitney.rip`)
|
||||
- traefik.http.routers.chat.tls=true
|
||||
- traefik.http.routers.chat.tls.certresolver=lets-encrypt
|
||||
- traefik.http.services.chat.loadbalancer.server.port=8080
|
||||
|
||||
volumes:
|
||||
openwebui_data:
|
||||
|
Loading…
x
Reference in New Issue
Block a user