olomana/ai/openwebui/config.json
2025-02-11 14:30:38 -08:00

13 lines
228 B
JSON

{
"modelBackend": "ollama",
"ollamaConfig": {
"host": "http://host.docker.internal",
"port": 11434,
"defaultModel": "deepseek-coder"
},
"otherSettings": {
"enableStreaming": true,
"timeout": 60
}
}