This repository has been archived on 2026-02-15. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
breakpilot-pwa/ai-compliance-sdk/internal/llm/anthropic_adapter.go
Benjamin Admin 21a844cb8a fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.

This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).

Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-09 09:51:32 +01:00

251 lines
6.2 KiB
Go

package llm
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"time"
)
// AnthropicAdapter implements the Provider interface for Anthropic API
type AnthropicAdapter struct {
apiKey string
baseURL string
defaultModel string
httpClient *http.Client
}
// NewAnthropicAdapter creates a new Anthropic adapter
func NewAnthropicAdapter(apiKey, defaultModel string) *AnthropicAdapter {
return &AnthropicAdapter{
apiKey: apiKey,
baseURL: "https://api.anthropic.com",
defaultModel: defaultModel,
httpClient: &http.Client{
Timeout: 5 * time.Minute,
},
}
}
// Name returns the provider name
func (a *AnthropicAdapter) Name() string {
return ProviderAnthropic
}
// IsAvailable checks if Anthropic API is reachable
func (a *AnthropicAdapter) IsAvailable(ctx context.Context) bool {
if a.apiKey == "" {
return false
}
// Simple check - we can't really ping Anthropic without making a request
// Just verify we have an API key
return true
}
// ListModels returns available Anthropic models
func (a *AnthropicAdapter) ListModels(ctx context.Context) ([]Model, error) {
// Anthropic doesn't have a models endpoint, return known models
return []Model{
{
ID: "claude-3-opus-20240229",
Name: "Claude 3 Opus",
Provider: ProviderAnthropic,
Description: "Most powerful model for complex tasks",
ContextSize: 200000,
Capabilities: []string{"chat"},
},
{
ID: "claude-3-sonnet-20240229",
Name: "Claude 3 Sonnet",
Provider: ProviderAnthropic,
Description: "Balanced performance and speed",
ContextSize: 200000,
Capabilities: []string{"chat"},
},
{
ID: "claude-3-haiku-20240307",
Name: "Claude 3 Haiku",
Provider: ProviderAnthropic,
Description: "Fast and efficient",
ContextSize: 200000,
Capabilities: []string{"chat"},
},
{
ID: "claude-3-5-sonnet-20240620",
Name: "Claude 3.5 Sonnet",
Provider: ProviderAnthropic,
Description: "Latest and most capable model",
ContextSize: 200000,
Capabilities: []string{"chat"},
},
}, nil
}
// Complete performs text completion (converted to chat)
func (a *AnthropicAdapter) Complete(ctx context.Context, req *CompletionRequest) (*CompletionResponse, error) {
// Anthropic only supports chat, so convert completion to chat
chatReq := &ChatRequest{
Model: req.Model,
Messages: []Message{
{Role: "user", Content: req.Prompt},
},
MaxTokens: req.MaxTokens,
Temperature: req.Temperature,
TopP: req.TopP,
Stop: req.Stop,
}
chatResp, err := a.Chat(ctx, chatReq)
if err != nil {
return nil, err
}
return &CompletionResponse{
ID: chatResp.ID,
Model: chatResp.Model,
Provider: chatResp.Provider,
Text: chatResp.Message.Content,
FinishReason: chatResp.FinishReason,
Usage: chatResp.Usage,
Duration: chatResp.Duration,
}, nil
}
// Chat performs chat completion
func (a *AnthropicAdapter) Chat(ctx context.Context, req *ChatRequest) (*ChatResponse, error) {
if a.apiKey == "" {
return nil, fmt.Errorf("anthropic API key not configured")
}
model := req.Model
if model == "" {
model = a.defaultModel
}
start := time.Now()
// Extract system message if present
var systemMessage string
var messages []map[string]string
for _, m := range req.Messages {
if m.Role == "system" {
systemMessage = m.Content
} else {
messages = append(messages, map[string]string{
"role": m.Role,
"content": m.Content,
})
}
}
maxTokens := req.MaxTokens
if maxTokens == 0 {
maxTokens = 4096
}
anthropicReq := map[string]any{
"model": model,
"messages": messages,
"max_tokens": maxTokens,
}
if systemMessage != "" {
anthropicReq["system"] = systemMessage
}
if req.Temperature > 0 {
anthropicReq["temperature"] = req.Temperature
}
if req.TopP > 0 {
anthropicReq["top_p"] = req.TopP
}
if len(req.Stop) > 0 {
anthropicReq["stop_sequences"] = req.Stop
}
body, err := json.Marshal(anthropicReq)
if err != nil {
return nil, err
}
httpReq, err := http.NewRequestWithContext(ctx, "POST", a.baseURL+"/v1/messages", bytes.NewReader(body))
if err != nil {
return nil, err
}
httpReq.Header.Set("Content-Type", "application/json")
httpReq.Header.Set("x-api-key", a.apiKey)
httpReq.Header.Set("anthropic-version", "2023-06-01")
resp, err := a.httpClient.Do(httpReq)
if err != nil {
return nil, fmt.Errorf("anthropic request failed: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
bodyBytes, _ := io.ReadAll(resp.Body)
return nil, fmt.Errorf("anthropic error (%d): %s", resp.StatusCode, string(bodyBytes))
}
var result struct {
ID string `json:"id"`
Type string `json:"type"`
Role string `json:"role"`
Content []struct {
Type string `json:"type"`
Text string `json:"text"`
} `json:"content"`
Model string `json:"model"`
StopReason string `json:"stop_reason"`
StopSequence string `json:"stop_sequence,omitempty"`
Usage struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
} `json:"usage"`
}
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
return nil, fmt.Errorf("failed to decode response: %w", err)
}
duration := time.Since(start)
// Extract text from content blocks
var responseText string
for _, block := range result.Content {
if block.Type == "text" {
responseText += block.Text
}
}
return &ChatResponse{
ID: result.ID,
Model: result.Model,
Provider: ProviderAnthropic,
Message: Message{
Role: "assistant",
Content: responseText,
},
FinishReason: result.StopReason,
Usage: UsageStats{
PromptTokens: result.Usage.InputTokens,
CompletionTokens: result.Usage.OutputTokens,
TotalTokens: result.Usage.InputTokens + result.Usage.OutputTokens,
},
Duration: duration,
}, nil
}
// Embed creates embeddings (Anthropic doesn't support embeddings natively)
func (a *AnthropicAdapter) Embed(ctx context.Context, req *EmbedRequest) (*EmbedResponse, error) {
return nil, fmt.Errorf("anthropic does not support embeddings - use Ollama or OpenAI")
}