# CERTifAI LibreChat Configuration # Ollama backend for self-hosted LLM inference. version: 1.2.8 cache: true registration: socialLogins: - openid interface: privacyPolicy: externalUrl: https://dash-dev.meghsakha.com/privacy termsOfService: externalUrl: https://dash-dev.meghsakha.com/impressum endpointsMenu: true modelSelect: true parameters: true endpoints: custom: - name: "Ollama" apiKey: "ollama" baseURL: "https://mac-mini-von-benjamin-2:11434/v1/" models: default: - "llama3.1:8b" - "qwen3:30b-a3b" fetch: true titleConvo: true titleModel: "current_model" summarize: false summaryModel: "current_model" forcePrompt: false modelDisplayLabel: "CERTifAI Ollama" dropParams: - stop - user - frequency_penalty - presence_penalty