diff --git a/docker-compose.yml b/docker-compose.yml index a86dccb..813e75f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -69,6 +69,11 @@ services: ALLOW_REGISTRATION: "false" ALLOW_SOCIAL_LOGIN: "true" ALLOW_SOCIAL_REGISTRATION: "true" + # JWT / encryption secrets (required by LibreChat) + CREDS_KEY: "97e95d72cdda06774a264f9fb7768097a6815dc1e930898d2e39c9a3a253b157" + CREDS_IV: "2ea456ab25279089b0ff9e7aca1df6e6" + JWT_SECRET: "767b962176666eab56e180e6f2d3fe95145dc6b978e37d4eb8d1da5421c5fb26" + JWT_REFRESH_SECRET: "51a43a1fca4b7b501b37e226a638645d962066e0686b82248921f3160e96501e" # App settings APP_TITLE: CERTifAI Chat CUSTOM_FOOTER: CERTifAI - Sovereign GenAI Infrastructure diff --git a/librechat/librechat.yaml b/librechat/librechat.yaml index f79d033..eeb0013 100644 --- a/librechat/librechat.yaml +++ b/librechat/librechat.yaml @@ -1,6 +1,6 @@ # CERTifAI LibreChat Configuration # Ollama backend for self-hosted LLM inference. -version: 1.2.1 +version: 1.2.8 cache: true @@ -18,18 +18,23 @@ interface: parameters: true endpoints: - ollama: - titleModel: "current_model" - # Use the Docker host network alias when running inside compose. - # Override OLLAMA_URL in .env for external Ollama instances. - url: "http://host.docker.internal:11434" - models: - fetch: true - summarize: true - forcePrompt: false - dropParams: - - stop - - user - - frequency_penalty - - presence_penalty - modelDisplayLabel: "CERTifAI Ollama" + custom: + - name: "Ollama" + apiKey: "ollama" + baseURL: "http://host.docker.internal:11434/v1/" + models: + default: + - "llama3.1:8b" + - "qwen3:30b-a3b" + fetch: true + titleConvo: true + titleModel: "current_model" + summarize: false + summaryModel: "current_model" + forcePrompt: false + modelDisplayLabel: "CERTifAI Ollama" + dropParams: + - stop + - user + - frequency_penalty + - presence_penalty