feat(iace): LLM-gestuetzte Failure Mode Erkennung
Build + Deploy / build-admin-compliance (push) Successful in 1m42s
Build + Deploy / build-backend-compliance (push) Successful in 15s
Build + Deploy / build-ai-sdk (push) Successful in 9s
Build + Deploy / build-developer-portal (push) Successful in 11s
Build + Deploy / build-tts (push) Successful in 18s
Build + Deploy / build-document-crawler (push) Successful in 10s
Build + Deploy / build-dsms-gateway (push) Successful in 14s
Build + Deploy / build-dsms-node (push) Successful in 12s
CI / branch-name (push) Has been skipped
CI / guardrail-integrity (push) Has been skipped
CI / loc-budget (push) Failing after 14s
CI / secret-scan (push) Has been skipped
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / nodejs-build (push) Successful in 2m32s
CI / dep-audit (push) Has been skipped
CI / sbom-scan (push) Has been skipped
CI / test-go (push) Successful in 41s
CI / test-python-backend (push) Successful in 37s
CI / test-python-document-crawler (push) Successful in 25s
CI / test-python-dsms-gateway (push) Successful in 21s
CI / validate-canonical-controls (push) Successful in 13s
Build + Deploy / trigger-orca (push) Successful in 2m25s
Build + Deploy / build-admin-compliance (push) Successful in 1m42s
Build + Deploy / build-backend-compliance (push) Successful in 15s
Build + Deploy / build-ai-sdk (push) Successful in 9s
Build + Deploy / build-developer-portal (push) Successful in 11s
Build + Deploy / build-tts (push) Successful in 18s
Build + Deploy / build-document-crawler (push) Successful in 10s
Build + Deploy / build-dsms-gateway (push) Successful in 14s
Build + Deploy / build-dsms-node (push) Successful in 12s
CI / branch-name (push) Has been skipped
CI / guardrail-integrity (push) Has been skipped
CI / loc-budget (push) Failing after 14s
CI / secret-scan (push) Has been skipped
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / nodejs-build (push) Successful in 2m32s
CI / dep-audit (push) Has been skipped
CI / sbom-scan (push) Has been skipped
CI / test-go (push) Successful in 41s
CI / test-python-backend (push) Successful in 37s
CI / test-python-document-crawler (push) Successful in 25s
CI / test-python-dsms-gateway (push) Successful in 21s
CI / validate-canonical-controls (push) Successful in 13s
Build + Deploy / trigger-orca (push) Successful in 2m25s
POST /projects/:id/components/:cid/suggest-fms - Baut FMEA-Experten-Prompt aus Komponentenname + Maschinenkontext - LLM antwortet mit 5 FMs als JSON (Mode, Effect, S/O/D) - Fallback auf Bibliotheks-FMs wenn LLM nicht verfuegbar - Nutzt ProviderRegistry (Ollama primary, Anthropic fallback) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -29,6 +29,7 @@ type IACEHandler struct {
|
||||
ragClient *ucca.LegalRAGClient
|
||||
techFileGen *iace.TechFileGenerator
|
||||
exporter *iace.DocumentExporter
|
||||
llmRegistry *llm.ProviderRegistry
|
||||
}
|
||||
|
||||
// NewIACEHandler creates a new IACEHandler with all required dependencies.
|
||||
@@ -42,6 +43,7 @@ func NewIACEHandler(store *iace.Store, providerRegistry *llm.ProviderRegistry) *
|
||||
ragClient: ragClient,
|
||||
techFileGen: iace.NewTechFileGenerator(providerRegistry, ragClient, store),
|
||||
exporter: iace.NewDocumentExporter(),
|
||||
llmRegistry: providerRegistry,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,14 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/breakpilot/ai-compliance-sdk/internal/iace"
|
||||
"github.com/breakpilot/ai-compliance-sdk/internal/llm"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/google/uuid"
|
||||
)
|
||||
@@ -79,3 +83,140 @@ func (h *IACEHandler) ExportFMEA(c *gin.Context) {
|
||||
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
|
||||
c.Data(http.StatusOK, "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", xlsxBytes)
|
||||
}
|
||||
|
||||
// SuggestFailureModes handles POST /projects/:id/components/:cid/suggest-fms
|
||||
// Uses LLM to suggest failure modes for a specific component.
|
||||
func (h *IACEHandler) SuggestFailureModes(c *gin.Context) {
|
||||
projectID, err := uuid.Parse(c.Param("id"))
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid project ID"})
|
||||
return
|
||||
}
|
||||
componentID, err := uuid.Parse(c.Param("cid"))
|
||||
if err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "invalid component ID"})
|
||||
return
|
||||
}
|
||||
|
||||
ctx := c.Request.Context()
|
||||
project, err := h.store.GetProject(ctx, projectID)
|
||||
if err != nil || project == nil {
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "project not found"})
|
||||
return
|
||||
}
|
||||
|
||||
comp, err := h.store.GetComponent(ctx, componentID)
|
||||
if err != nil || comp == nil {
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "component not found"})
|
||||
return
|
||||
}
|
||||
|
||||
// Build LLM prompt
|
||||
prompt := fmt.Sprintf(
|
||||
`Du bist ein FMEA-Experte (Fehlermoeglich- und Einflussanalyse) nach AIAG-VDA.
|
||||
Fuer die Komponente "%s" (Typ: %s) in der Maschine "%s" (%s):
|
||||
|
||||
Nenne die 5 wichtigsten Failure Modes. Fuer jeden:
|
||||
- mode: Kurzbezeichnung der Fehlerart
|
||||
- name_de: Deutsche Beschreibung
|
||||
- effect: Systemauswirkung
|
||||
- severity: Schwere 1-10 (10=katastrophal)
|
||||
- occurrence: Auftretenswahrscheinlichkeit 1-10 (10=sehr haeufig)
|
||||
- detection: Entdeckbarkeit 1-10 (10=nicht erkennbar)
|
||||
|
||||
Antworte NUR mit einem JSON-Array, keine Erklaerungen:
|
||||
[{"mode":"...","name_de":"...","effect":"...","severity":N,"occurrence":N,"detection":N}]`,
|
||||
comp.Name, comp.ComponentType, project.MachineName, project.MachineType)
|
||||
|
||||
// Try LLM
|
||||
suggestions, err := callLLMForFMs(ctx, h.llmRegistry, prompt)
|
||||
if err != nil {
|
||||
// Fallback: return library FMs for this component type
|
||||
allFMs := iace.GetFailureModeLibrary()
|
||||
var fallback []iace.FailureModeEntry
|
||||
for _, fm := range allFMs {
|
||||
if fm.ComponentType == string(comp.ComponentType) && len(fallback) < 5 {
|
||||
fallback = append(fallback, fm)
|
||||
}
|
||||
}
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"suggestions": fallback,
|
||||
"source": "library_fallback",
|
||||
"total": len(fallback),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"suggestions": suggestions,
|
||||
"source": "llm",
|
||||
"total": len(suggestions),
|
||||
})
|
||||
}
|
||||
|
||||
func callLLMForFMs(ctx context.Context, registry *llm.ProviderRegistry, prompt string) ([]iace.FailureModeEntry, error) {
|
||||
if registry == nil {
|
||||
return nil, fmt.Errorf("no LLM registry")
|
||||
}
|
||||
|
||||
provider, err := registry.GetAvailable(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("no LLM provider available: %w", err)
|
||||
}
|
||||
|
||||
resp, err := provider.Chat(ctx, &llm.ChatRequest{
|
||||
Messages: []llm.Message{
|
||||
{Role: "user", Content: prompt},
|
||||
},
|
||||
Temperature: 0.3,
|
||||
MaxTokens: 1000,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("LLM call failed: %w", err)
|
||||
}
|
||||
|
||||
// Parse JSON from response
|
||||
content := strings.TrimSpace(resp.Message.Content)
|
||||
// Strip markdown code fences if present
|
||||
content = strings.TrimPrefix(content, "```json")
|
||||
content = strings.TrimPrefix(content, "```")
|
||||
content = strings.TrimSuffix(content, "```")
|
||||
content = strings.TrimSpace(content)
|
||||
|
||||
var rawFMs []struct {
|
||||
Mode string `json:"mode"`
|
||||
NameDE string `json:"name_de"`
|
||||
Effect string `json:"effect"`
|
||||
Severity int `json:"severity"`
|
||||
Occurrence int `json:"occurrence"`
|
||||
Detection int `json:"detection"`
|
||||
}
|
||||
if err := json.Unmarshal([]byte(content), &rawFMs); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse LLM response: %w", err)
|
||||
}
|
||||
|
||||
var result []iace.FailureModeEntry
|
||||
for i, fm := range rawFMs {
|
||||
result = append(result, iace.FailureModeEntry{
|
||||
ID: fmt.Sprintf("LLM-%03d", i+1),
|
||||
ComponentType: "llm_suggested",
|
||||
Mode: fm.Mode,
|
||||
NameDE: fm.NameDE,
|
||||
Effect: fm.Effect,
|
||||
DefaultSeverity: clamp(fm.Severity, 1, 10),
|
||||
DefaultOccurrence: clamp(fm.Occurrence, 1, 10),
|
||||
DefaultDetection: clamp(fm.Detection, 1, 10),
|
||||
})
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func clamp(v, min, max int) int {
|
||||
if v < min {
|
||||
return min
|
||||
}
|
||||
if v > max {
|
||||
return max
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
@@ -394,6 +394,7 @@ func registerIACERoutes(v1 *gin.RouterGroup, h *handlers.IACEHandler) {
|
||||
iaceRoutes.POST("/projects/:id/parse-narrative", h.ParseNarrative)
|
||||
iaceRoutes.POST("/projects/:id/delta-analysis", h.DeltaAnalysis)
|
||||
iaceRoutes.GET("/projects/:id/fmea/export", h.ExportFMEA)
|
||||
iaceRoutes.POST("/projects/:id/components/:cid/suggest-fms", h.SuggestFailureModes)
|
||||
iaceRoutes.POST("/projects/:id/apply-patterns", h.ApplyPatternResults)
|
||||
iaceRoutes.POST("/projects/:id/hazards/:hid/suggest-measures", h.SuggestMeasuresForHazard)
|
||||
iaceRoutes.POST("/projects/:id/mitigations/:mid/suggest-evidence", h.SuggestEvidenceForMitigation)
|
||||
|
||||
Reference in New Issue
Block a user