feat: use librechat instead of own chat (#14)
All checks were successful
CI / Format (push) Successful in 2s
CI / Clippy (push) Successful in 2m48s
CI / Security Audit (push) Successful in 1m44s
CI / Tests (push) Successful in 4m11s
CI / Deploy (push) Successful in 4s

Co-authored-by: Sharang Parnerkar <parnerkarsharang@gmail.com>
Reviewed-on: #14
This commit was merged in pull request #14.
This commit is contained in:
2026-02-24 10:45:41 +00:00
parent d814e22f9d
commit 208450e618
33 changed files with 968 additions and 2124 deletions

40
librechat/librechat.yaml Normal file
View File

@@ -0,0 +1,40 @@
# CERTifAI LibreChat Configuration
# Ollama backend for self-hosted LLM inference.
version: 1.2.8
cache: true
registration:
socialLogins:
- openid
interface:
privacyPolicy:
externalUrl: https://dash-dev.meghsakha.com/privacy
termsOfService:
externalUrl: https://dash-dev.meghsakha.com/impressum
endpointsMenu: true
modelSelect: true
parameters: true
endpoints:
custom:
- name: "Ollama"
apiKey: "ollama"
baseURL: "https://mac-mini-von-benjamin-2:11434/v1/"
models:
default:
- "llama3.1:8b"
- "qwen3:30b-a3b"
fetch: true
titleConvo: true
titleModel: "current_model"
summarize: false
summaryModel: "current_model"
forcePrompt: false
modelDisplayLabel: "CERTifAI Ollama"
dropParams:
- stop
- user
- frequency_penalty
- presence_penalty