- Fix endpoints config: use `custom` array format (not `ollama` key) per LibreChat docs - Add required JWT_SECRET, JWT_REFRESH_SECRET, CREDS_KEY, CREDS_IV environment variables - Update config version to 1.2.8 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
41 lines
872 B
YAML
41 lines
872 B
YAML
# CERTifAI LibreChat Configuration
|
|
# Ollama backend for self-hosted LLM inference.
|
|
version: 1.2.8
|
|
|
|
cache: true
|
|
|
|
registration:
|
|
socialLogins:
|
|
- openid
|
|
|
|
interface:
|
|
privacyPolicy:
|
|
externalUrl: http://localhost:8000/privacy
|
|
termsOfService:
|
|
externalUrl: http://localhost:8000/impressum
|
|
endpointsMenu: true
|
|
modelSelect: true
|
|
parameters: true
|
|
|
|
endpoints:
|
|
custom:
|
|
- name: "Ollama"
|
|
apiKey: "ollama"
|
|
baseURL: "http://host.docker.internal:11434/v1/"
|
|
models:
|
|
default:
|
|
- "llama3.1:8b"
|
|
- "qwen3:30b-a3b"
|
|
fetch: true
|
|
titleConvo: true
|
|
titleModel: "current_model"
|
|
summarize: false
|
|
summaryModel: "current_model"
|
|
forcePrompt: false
|
|
modelDisplayLabel: "CERTifAI Ollama"
|
|
dropParams:
|
|
- stop
|
|
- user
|
|
- frequency_penalty
|
|
- presence_penalty
|