fix: Restore all files lost during destructive rebase

A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.

This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).

Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Benjamin Admin
2026-02-09 09:51:32 +01:00
parent f7487ee240
commit bfdaf63ba9
2009 changed files with 749983 additions and 1731 deletions

View File

@@ -0,0 +1,123 @@
"""Initial Classroom Engine Tables (Feature f13/f15)
Erstellt die Tabellen fuer:
- lesson_sessions: Unterrichtsstunden
- lesson_phase_history: Phasen-Verlauf
- teacher_settings: Lehrer-Einstellungen
Revision ID: 001
Revises: None
Create Date: 2026-01-15 12:00:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '001'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
# Enum-Typ fuer Unterrichtsphasen
lesson_phase_enum = postgresql.ENUM(
'not_started',
'einstieg',
'erarbeitung',
'sicherung',
'transfer',
'reflexion',
'ended',
name='lessonphaseenum',
create_type=False
)
def upgrade() -> None:
# Enum-Typ erstellen
lesson_phase_enum.create(op.get_bind(), checkfirst=True)
# lesson_sessions Tabelle
op.create_table(
'lesson_sessions',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('teacher_id', sa.String(100), nullable=False, index=True),
sa.Column('class_id', sa.String(50), nullable=False, index=True),
sa.Column('subject', sa.String(100), nullable=False),
sa.Column('topic', sa.String(500), nullable=True),
sa.Column(
'current_phase',
lesson_phase_enum,
nullable=False,
server_default='not_started'
),
sa.Column('is_paused', sa.Boolean(), default=False),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.Column('lesson_started_at', sa.DateTime(), nullable=True),
sa.Column('lesson_ended_at', sa.DateTime(), nullable=True),
sa.Column('phase_started_at', sa.DateTime(), nullable=True),
sa.Column('pause_started_at', sa.DateTime(), nullable=True),
sa.Column('total_paused_seconds', sa.Integer(), default=0),
sa.Column('phase_durations', sa.JSON(), default=dict),
sa.Column('phase_history', sa.JSON(), default=list),
sa.Column('notes', sa.Text(), default=''),
sa.Column('homework', sa.Text(), default=''),
)
# lesson_phase_history Tabelle
op.create_table(
'lesson_phase_history',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column(
'session_id',
sa.String(36),
sa.ForeignKey('lesson_sessions.id', ondelete='CASCADE'),
nullable=False,
index=True
),
sa.Column('phase', lesson_phase_enum, nullable=False),
sa.Column('started_at', sa.DateTime(), nullable=False),
sa.Column('ended_at', sa.DateTime(), nullable=True),
sa.Column('duration_seconds', sa.Integer(), nullable=True),
sa.Column('was_extended', sa.Boolean(), default=False),
sa.Column('extension_minutes', sa.Integer(), default=0),
sa.Column('pause_count', sa.Integer(), default=0),
sa.Column('total_pause_seconds', sa.Integer(), default=0),
)
# teacher_settings Tabelle
op.create_table(
'teacher_settings',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column(
'teacher_id',
sa.String(100),
unique=True,
nullable=False,
index=True
),
sa.Column('default_phase_durations', sa.JSON(), default=dict),
sa.Column('audio_enabled', sa.Boolean(), default=True),
sa.Column('high_contrast', sa.Boolean(), default=False),
sa.Column('show_statistics', sa.Boolean(), default=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.Column(
'updated_at',
sa.DateTime(),
server_default=sa.func.now(),
onupdate=sa.func.now()
),
)
def downgrade() -> None:
op.drop_table('teacher_settings')
op.drop_table('lesson_phase_history')
op.drop_table('lesson_sessions')
# Enum-Typ entfernen
lesson_phase_enum.drop(op.get_bind(), checkfirst=True)

View File

@@ -0,0 +1,52 @@
"""Add Lesson Templates Table (Feature f37)
Erstellt die lesson_templates Tabelle fuer wiederverwendbare
Stundenkonfigurationen.
Revision ID: 002
Revises: 001
Create Date: 2026-01-15 14:00:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '002'
down_revision: Union[str, None] = '001'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
'lesson_templates',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('teacher_id', sa.String(100), nullable=False, index=True),
sa.Column('name', sa.String(200), nullable=False),
sa.Column('description', sa.Text(), default=''),
sa.Column('subject', sa.String(100), default=''),
sa.Column('grade_level', sa.String(50), default=''),
sa.Column('phase_durations', sa.JSON(), default=dict),
sa.Column('default_topic', sa.String(500), default=''),
sa.Column('default_notes', sa.Text(), default=''),
sa.Column('is_public', sa.Boolean(), default=False),
sa.Column('usage_count', sa.Integer(), default=0),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), onupdate=sa.func.now()),
)
# Index fuer oeffentliche Vorlagen
op.create_index(
'ix_lesson_templates_public',
'lesson_templates',
['is_public', 'usage_count'],
postgresql_where=sa.text('is_public = true')
)
def downgrade() -> None:
op.drop_index('ix_lesson_templates_public', table_name='lesson_templates')
op.drop_table('lesson_templates')

View File

@@ -0,0 +1,56 @@
"""Add Homework Assignments Table (Feature f20)
Erstellt die homework_assignments Tabelle fuer das
Hausaufgaben-Tracking.
Revision ID: 003
Revises: 002
Create Date: 2026-01-15 16:00:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '003'
down_revision: Union[str, None] = '002'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
'homework_assignments',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('teacher_id', sa.String(100), nullable=False, index=True),
sa.Column('class_id', sa.String(50), nullable=False, index=True),
sa.Column('subject', sa.String(100), nullable=False),
sa.Column('title', sa.String(300), nullable=False),
sa.Column('description', sa.Text(), default=''),
sa.Column('session_id', sa.String(36), sa.ForeignKey('lesson_sessions.id'), nullable=True, index=True),
sa.Column('due_date', sa.DateTime(), nullable=True, index=True),
sa.Column(
'status',
sa.Enum('assigned', 'in_progress', 'completed', 'overdue', name='homeworkstatusenum'),
default='assigned',
nullable=False
),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), onupdate=sa.func.now()),
)
# Index fuer anstehende Hausaufgaben
op.create_index(
'ix_homework_pending',
'homework_assignments',
['teacher_id', 'status', 'due_date'],
)
def downgrade() -> None:
op.drop_index('ix_homework_pending', table_name='homework_assignments')
op.drop_table('homework_assignments')
# Enum-Typ entfernen
op.execute("DROP TYPE IF EXISTS homeworkstatusenum")

View File

@@ -0,0 +1,69 @@
"""Add Phase Materials Table (Feature f19)
Erstellt die phase_materials Tabelle fuer die
Material-Verknuepfung an Unterrichtsphasen.
Revision ID: 004
Revises: 003
Create Date: 2026-01-15 17:00:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '004'
down_revision: Union[str, None] = '003'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
'phase_materials',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('teacher_id', sa.String(100), nullable=False, index=True),
sa.Column('title', sa.String(300), nullable=False),
sa.Column(
'material_type',
sa.Enum('document', 'link', 'video', 'image', 'worksheet', 'presentation', 'other',
name='materialtypeenum'),
default='document',
nullable=False
),
sa.Column('url', sa.String(2000), nullable=True),
sa.Column('description', sa.Text(), default=''),
sa.Column('phase', sa.String(50), nullable=True, index=True),
sa.Column('subject', sa.String(100), default=''),
sa.Column('grade_level', sa.String(50), default=''),
sa.Column('tags', sa.JSON(), default=list),
sa.Column('is_public', sa.Boolean(), default=False),
sa.Column('usage_count', sa.Integer(), default=0),
sa.Column('session_id', sa.String(36), sa.ForeignKey('lesson_sessions.id'), nullable=True, index=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), onupdate=sa.func.now()),
)
# Index fuer Phasen-Suche
op.create_index(
'ix_phase_materials_search',
'phase_materials',
['teacher_id', 'phase', 'subject'],
)
# Index fuer oeffentliche Materialien
op.create_index(
'ix_phase_materials_public',
'phase_materials',
['is_public', 'usage_count'],
postgresql_where=sa.text('is_public = true')
)
def downgrade() -> None:
op.drop_index('ix_phase_materials_public', table_name='phase_materials')
op.drop_index('ix_phase_materials_search', table_name='phase_materials')
op.drop_table('phase_materials')
op.execute("DROP TYPE IF EXISTS materialtypeenum")

View File

@@ -0,0 +1,40 @@
"""Add Lesson Reflections Table (Phase 5: Analytics)
Erstellt die lesson_reflections Tabelle fuer
Post-Lesson Reflexionen.
Revision ID: 005
Revises: 004
Create Date: 2026-01-15 18:00:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '005'
down_revision: Union[str, None] = '004'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
'lesson_reflections',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('session_id', sa.String(36), sa.ForeignKey('lesson_sessions.id'), nullable=False, unique=True, index=True),
sa.Column('teacher_id', sa.String(100), nullable=False, index=True),
sa.Column('notes', sa.Text(), default=''),
sa.Column('overall_rating', sa.Integer(), nullable=True),
sa.Column('what_worked', sa.JSON(), default=list),
sa.Column('improvements', sa.JSON(), default=list),
sa.Column('notes_for_next_lesson', sa.Text(), default=''),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), onupdate=sa.func.now()),
)
def downgrade() -> None:
op.drop_table('lesson_reflections')

View File

@@ -0,0 +1,45 @@
"""Add teacher_feedback table for Phase 7.
Revision ID: 006_teacher_feedback
Revises: 005_lesson_reflections
Create Date: 2026-01-15 19:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers
revision = '006_teacher_feedback'
down_revision = '005_lesson_reflections'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create teacher_feedback table."""
op.create_table(
'teacher_feedback',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('teacher_id', sa.String(100), nullable=False, index=True),
sa.Column('teacher_name', sa.String(200), default=''),
sa.Column('teacher_email', sa.String(200), default=''),
sa.Column('title', sa.String(500), nullable=False),
sa.Column('description', sa.Text, nullable=False),
sa.Column('feedback_type', sa.String(50), nullable=False, default='improvement'),
sa.Column('priority', sa.String(50), nullable=False, default='medium'),
sa.Column('status', sa.String(50), nullable=False, default='new', index=True),
sa.Column('related_feature', sa.String(50), nullable=True),
sa.Column('context_url', sa.String(500), default=''),
sa.Column('context_phase', sa.String(50), default=''),
sa.Column('context_session_id', sa.String(36), nullable=True),
sa.Column('user_agent', sa.String(500), default=''),
sa.Column('response', sa.Text, default=''),
sa.Column('responded_at', sa.DateTime, nullable=True),
sa.Column('responded_by', sa.String(100), nullable=True),
sa.Column('created_at', sa.DateTime, default=sa.func.now()),
sa.Column('updated_at', sa.DateTime, default=sa.func.now(), onupdate=sa.func.now()),
)
def downgrade() -> None:
"""Drop teacher_feedback table."""
op.drop_table('teacher_feedback')

View File

@@ -0,0 +1,111 @@
"""Add teacher_contexts, schoolyear_events, recurring_routines tables for Phase 8.
Schuljahres-Begleiter Erweiterung mit 2-Schichten-Modell:
- Makro-Ebene: 7 Schuljahres-Phasen
- Mikro-Ebene: Events, Routinen, Arbeitsmodi
Revision ID: 007_teacher_context
Revises: 006_teacher_feedback
Create Date: 2026-01-15 20:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers
revision = '007_teacher_context'
down_revision = '006_teacher_feedback'
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create teacher_contexts, schoolyear_events, recurring_routines tables."""
# Teacher Context - Makro-State pro Lehrer
op.create_table(
'teacher_contexts',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('teacher_id', sa.String(100), unique=True, nullable=False, index=True),
# Schul-Kontext
sa.Column('federal_state', sa.String(10), default='BY'),
sa.Column('school_type', sa.String(50), default='gymnasium'),
# Schuljahr
sa.Column('schoolyear', sa.String(20), default='2024-2025'),
sa.Column('schoolyear_start', sa.DateTime, nullable=True),
# Makro-Phase
sa.Column('macro_phase', sa.String(50), nullable=False, default='onboarding'),
sa.Column('current_week', sa.Integer, default=1),
# Berechnete Flags
sa.Column('is_exam_period', sa.Boolean, default=False),
sa.Column('is_before_holidays', sa.Boolean, default=False),
# Onboarding-Status
sa.Column('onboarding_completed', sa.Boolean, default=False),
sa.Column('has_classes', sa.Boolean, default=False),
sa.Column('has_schedule', sa.Boolean, default=False),
# Metadaten
sa.Column('created_at', sa.DateTime, default=sa.func.now()),
sa.Column('updated_at', sa.DateTime, default=sa.func.now(), onupdate=sa.func.now()),
)
# Schoolyear Events - Einmalige Events (Klausuren, Elternabende, etc.)
op.create_table(
'schoolyear_events',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('teacher_id', sa.String(100), nullable=False, index=True),
# Event-Details
sa.Column('event_type', sa.String(50), nullable=False, default='other'),
sa.Column('title', sa.String(300), nullable=False),
sa.Column('description', sa.Text, default=''),
# Zeitraum
sa.Column('start_date', sa.DateTime, nullable=False, index=True),
sa.Column('end_date', sa.DateTime, nullable=True),
# Zuordnung
sa.Column('class_id', sa.String(100), nullable=True, index=True),
sa.Column('subject', sa.String(100), nullable=True),
# Status
sa.Column('status', sa.String(50), nullable=False, default='planned', index=True),
# Antizipations-Flags
sa.Column('needs_preparation', sa.Boolean, default=True),
sa.Column('preparation_done', sa.Boolean, default=False),
sa.Column('reminder_days_before', sa.Integer, default=7),
# Flexible Metadaten
sa.Column('extra_data', sa.JSON, default=dict),
# Timestamps
sa.Column('created_at', sa.DateTime, default=sa.func.now()),
sa.Column('updated_at', sa.DateTime, default=sa.func.now(), onupdate=sa.func.now()),
)
# Recurring Routines - Wiederkehrende Aktivitaeten
op.create_table(
'recurring_routines',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('teacher_id', sa.String(100), nullable=False, index=True),
# Routine-Details
sa.Column('routine_type', sa.String(50), nullable=False, default='other'),
sa.Column('title', sa.String(300), nullable=False),
sa.Column('description', sa.Text, default=''),
# Wiederholung
sa.Column('recurrence_pattern', sa.String(50), nullable=False, default='weekly'),
sa.Column('day_of_week', sa.Integer, nullable=True), # 0=Mo, 6=So
sa.Column('day_of_month', sa.Integer, nullable=True), # 1-31
sa.Column('time_of_day', sa.Time, nullable=True),
# Dauer
sa.Column('duration_minutes', sa.Integer, default=60),
# Aktiv?
sa.Column('is_active', sa.Boolean, default=True),
# Gueltigkeit
sa.Column('valid_from', sa.DateTime, nullable=True),
sa.Column('valid_until', sa.DateTime, nullable=True),
# Metadaten
sa.Column('extra_data', sa.JSON, default=dict),
# Timestamps
sa.Column('created_at', sa.DateTime, default=sa.func.now()),
sa.Column('updated_at', sa.DateTime, default=sa.func.now(), onupdate=sa.func.now()),
)
def downgrade() -> None:
"""Drop Phase 8 tables."""
op.drop_table('recurring_routines')
op.drop_table('schoolyear_events')
op.drop_table('teacher_contexts')

View File

@@ -0,0 +1,255 @@
"""Alerts Agent Tables
Erstellt die Tabellen für:
- alert_topics: Feed-Quellen (Google Alerts, RSS)
- alert_items: Einzelne Alerts/Artikel
- alert_rules: Filterregeln
- alert_profiles: Nutzer-Profile für Relevanz-Scoring
Revision ID: 008
Revises: 007
Create Date: 2026-01-15 21:00:00
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '008'
down_revision: Union[str, None] = '007'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
# Enum-Typen
alert_source_enum = postgresql.ENUM(
'google_alerts_rss',
'google_alerts_email',
'rss_feed',
'webhook',
'manual',
name='alertsourceenum',
create_type=False
)
alert_status_enum = postgresql.ENUM(
'new',
'processed',
'duplicate',
'scored',
'reviewed',
'archived',
name='alertstatusenum',
create_type=False
)
relevance_decision_enum = postgresql.ENUM(
'KEEP',
'DROP',
'REVIEW',
name='relevancedecisionenum',
create_type=False
)
feed_type_enum = postgresql.ENUM(
'rss',
'email',
'webhook',
name='feedtypeenum',
create_type=False
)
rule_action_enum = postgresql.ENUM(
'keep',
'drop',
'tag',
'email',
'webhook',
'slack',
name='ruleactionenum',
create_type=False
)
def upgrade() -> None:
# Enum-Typen erstellen
alert_source_enum.create(op.get_bind(), checkfirst=True)
alert_status_enum.create(op.get_bind(), checkfirst=True)
relevance_decision_enum.create(op.get_bind(), checkfirst=True)
feed_type_enum.create(op.get_bind(), checkfirst=True)
rule_action_enum.create(op.get_bind(), checkfirst=True)
# alert_topics Tabelle
op.create_table(
'alert_topics',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('user_id', sa.String(36), nullable=True, index=True),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('description', sa.Text(), default=''),
sa.Column('feed_url', sa.String(2000), nullable=True),
sa.Column(
'feed_type',
feed_type_enum,
nullable=False,
server_default='rss'
),
sa.Column('is_active', sa.Boolean(), default=True, index=True),
sa.Column('fetch_interval_minutes', sa.Integer(), default=60),
sa.Column('last_fetched_at', sa.DateTime(), nullable=True),
sa.Column('last_fetch_error', sa.Text(), nullable=True),
sa.Column('total_items_fetched', sa.Integer(), default=0),
sa.Column('items_kept', sa.Integer(), default=0),
sa.Column('items_dropped', sa.Integer(), default=0),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.Column(
'updated_at',
sa.DateTime(),
server_default=sa.func.now(),
onupdate=sa.func.now()
),
)
# alert_items Tabelle
op.create_table(
'alert_items',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column(
'topic_id',
sa.String(36),
sa.ForeignKey('alert_topics.id', ondelete='CASCADE'),
nullable=False,
index=True
),
sa.Column('title', sa.Text(), nullable=False),
sa.Column('url', sa.String(2000), nullable=False),
sa.Column('snippet', sa.Text(), default=''),
sa.Column('article_text', sa.Text(), nullable=True),
sa.Column('lang', sa.String(10), default='de'),
sa.Column('published_at', sa.DateTime(), nullable=True, index=True),
sa.Column('fetched_at', sa.DateTime(), server_default=sa.func.now(), index=True),
sa.Column('processed_at', sa.DateTime(), nullable=True),
sa.Column(
'source',
alert_source_enum,
nullable=False,
server_default='google_alerts_rss'
),
sa.Column('url_hash', sa.String(64), unique=True, nullable=False, index=True),
sa.Column('content_hash', sa.String(64), nullable=True),
sa.Column('canonical_url', sa.String(2000), nullable=True),
sa.Column(
'status',
alert_status_enum,
nullable=False,
server_default='new',
index=True
),
sa.Column('cluster_id', sa.String(36), nullable=True),
sa.Column('relevance_score', sa.Float(), nullable=True),
sa.Column('relevance_decision', relevance_decision_enum, nullable=True, index=True),
sa.Column('relevance_reasons', sa.JSON(), default=list),
sa.Column('relevance_summary', sa.Text(), nullable=True),
sa.Column('scored_by_model', sa.String(100), nullable=True),
sa.Column('scored_at', sa.DateTime(), nullable=True),
sa.Column('user_marked_relevant', sa.Boolean(), nullable=True),
sa.Column('user_tags', sa.JSON(), default=list),
sa.Column('user_notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.Column(
'updated_at',
sa.DateTime(),
server_default=sa.func.now(),
onupdate=sa.func.now()
),
)
# Composite Indexes für alert_items
op.create_index(
'ix_alert_items_topic_status',
'alert_items',
['topic_id', 'status']
)
op.create_index(
'ix_alert_items_topic_decision',
'alert_items',
['topic_id', 'relevance_decision']
)
# alert_rules Tabelle
op.create_table(
'alert_rules',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column(
'topic_id',
sa.String(36),
sa.ForeignKey('alert_topics.id', ondelete='CASCADE'),
nullable=True,
index=True
),
sa.Column('user_id', sa.String(36), nullable=True, index=True),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('description', sa.Text(), default=''),
sa.Column('conditions', sa.JSON(), nullable=False, default=list),
sa.Column(
'action_type',
rule_action_enum,
nullable=False,
server_default='keep'
),
sa.Column('action_config', sa.JSON(), default=dict),
sa.Column('priority', sa.Integer(), default=0, index=True),
sa.Column('is_active', sa.Boolean(), default=True, index=True),
sa.Column('match_count', sa.Integer(), default=0),
sa.Column('last_matched_at', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.Column(
'updated_at',
sa.DateTime(),
server_default=sa.func.now(),
onupdate=sa.func.now()
),
)
# alert_profiles Tabelle
op.create_table(
'alert_profiles',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('user_id', sa.String(36), unique=True, nullable=True, index=True),
sa.Column('name', sa.String(255), default='Default'),
sa.Column('priorities', sa.JSON(), default=list),
sa.Column('exclusions', sa.JSON(), default=list),
sa.Column('positive_examples', sa.JSON(), default=list),
sa.Column('negative_examples', sa.JSON(), default=list),
sa.Column('policies', sa.JSON(), default=dict),
sa.Column('total_scored', sa.Integer(), default=0),
sa.Column('total_kept', sa.Integer(), default=0),
sa.Column('total_dropped', sa.Integer(), default=0),
sa.Column('accuracy_estimate', sa.Float(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.Column(
'updated_at',
sa.DateTime(),
server_default=sa.func.now(),
onupdate=sa.func.now()
),
)
def downgrade() -> None:
# Tabellen löschen (umgekehrte Reihenfolge wegen Foreign Keys)
op.drop_table('alert_profiles')
op.drop_table('alert_rules')
op.drop_index('ix_alert_items_topic_decision', 'alert_items')
op.drop_index('ix_alert_items_topic_status', 'alert_items')
op.drop_table('alert_items')
op.drop_table('alert_topics')
# Enum-Typen löschen
rule_action_enum.drop(op.get_bind(), checkfirst=True)
feed_type_enum.drop(op.get_bind(), checkfirst=True)
relevance_decision_enum.drop(op.get_bind(), checkfirst=True)
alert_status_enum.drop(op.get_bind(), checkfirst=True)
alert_source_enum.drop(op.get_bind(), checkfirst=True)

View File

@@ -0,0 +1,143 @@
"""Test Registry Tables - Persistente Test-Speicherung
Revision ID: 009
Revises: 008
Create Date: 2026-02-02 10:00:00.000000
Erstellt Tabellen fuer:
- test_runs: Historie aller Test-Durchlaeufe
- test_results: Einzelne Test-Ergebnisse pro Run
- failed_tests_backlog: Persistenter Backlog fuer zu fixende Tests
- test_fixes_history: Historie aller Fix-Versuche
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers
revision = '009'
down_revision = '008'
branch_labels = None
depends_on = None
def upgrade() -> None:
# test_runs: Jeder Testlauf wird gespeichert
op.create_table(
'test_runs',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('run_id', sa.String(50), unique=True, nullable=False),
sa.Column('service', sa.String(100), nullable=False),
sa.Column('framework', sa.String(50), nullable=False),
sa.Column('started_at', sa.DateTime(), nullable=False),
sa.Column('completed_at', sa.DateTime(), nullable=True),
sa.Column('status', sa.String(20), nullable=False), # queued, running, completed, failed
sa.Column('total_tests', sa.Integer(), default=0),
sa.Column('passed_tests', sa.Integer(), default=0),
sa.Column('failed_tests', sa.Integer(), default=0),
sa.Column('skipped_tests', sa.Integer(), default=0),
sa.Column('duration_seconds', sa.Float(), default=0),
sa.Column('git_commit', sa.String(40), nullable=True),
sa.Column('git_branch', sa.String(100), nullable=True),
sa.Column('triggered_by', sa.String(50), nullable=True), # manual, ci, schedule
sa.Column('output', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.PrimaryKeyConstraint('id')
)
# Indizes fuer test_runs
op.create_index('idx_test_runs_service', 'test_runs', ['service'])
op.create_index('idx_test_runs_started_at', 'test_runs', ['started_at'])
op.create_index('idx_test_runs_run_id', 'test_runs', ['run_id'])
# test_results: Einzelne Test-Ergebnisse pro Run
op.create_table(
'test_results',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('run_id', sa.String(50), sa.ForeignKey('test_runs.run_id', ondelete='CASCADE'), nullable=False),
sa.Column('test_name', sa.String(500), nullable=False),
sa.Column('test_file', sa.String(500), nullable=True),
sa.Column('line_number', sa.Integer(), nullable=True),
sa.Column('status', sa.String(20), nullable=False), # passed, failed, skipped, error
sa.Column('duration_ms', sa.Float(), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('error_type', sa.String(100), nullable=True),
sa.Column('output', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.PrimaryKeyConstraint('id')
)
# Indizes fuer test_results
op.create_index('idx_test_results_run_id', 'test_results', ['run_id'])
op.create_index('idx_test_results_status', 'test_results', ['status'])
op.create_index('idx_test_results_test_name', 'test_results', ['test_name'])
# failed_tests_backlog: Persistenter Backlog fuer Fixes
op.create_table(
'failed_tests_backlog',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('test_name', sa.String(500), nullable=False),
sa.Column('test_file', sa.String(500), nullable=True),
sa.Column('service', sa.String(100), nullable=False),
sa.Column('framework', sa.String(50), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('error_type', sa.String(100), nullable=True),
sa.Column('first_failed_at', sa.DateTime(), nullable=False),
sa.Column('last_failed_at', sa.DateTime(), nullable=False),
sa.Column('failure_count', sa.Integer(), default=1),
sa.Column('status', sa.String(30), default='open'), # open, in_progress, fixed, wont_fix, flaky
sa.Column('priority', sa.String(20), default='medium'), # critical, high, medium, low
sa.Column('assigned_to', sa.String(100), nullable=True),
sa.Column('fix_suggestion', sa.Text(), nullable=True),
sa.Column('notes', sa.Text(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), onupdate=sa.func.now()),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('test_name', 'service', name='uq_backlog_test_service')
)
# Indizes fuer failed_tests_backlog
op.create_index('idx_backlog_status', 'failed_tests_backlog', ['status'])
op.create_index('idx_backlog_service', 'failed_tests_backlog', ['service'])
op.create_index('idx_backlog_priority', 'failed_tests_backlog', ['priority'])
# test_fixes_history: Historie aller Fix-Versuche
op.create_table(
'test_fixes_history',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('backlog_id', sa.Integer(), sa.ForeignKey('failed_tests_backlog.id', ondelete='CASCADE'), nullable=False),
sa.Column('fix_type', sa.String(50), nullable=True), # manual, auto_claude, auto_script
sa.Column('fix_description', sa.Text(), nullable=True),
sa.Column('commit_hash', sa.String(40), nullable=True),
sa.Column('success', sa.Boolean(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()),
sa.PrimaryKeyConstraint('id')
)
# Index fuer test_fixes_history
op.create_index('idx_fixes_backlog_id', 'test_fixes_history', ['backlog_id'])
# Aggregated Test Stats Tabelle (fuer schnelle Abfragen)
op.create_table(
'test_service_stats',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('service', sa.String(100), unique=True, nullable=False),
sa.Column('total_tests', sa.Integer(), default=0),
sa.Column('passed_tests', sa.Integer(), default=0),
sa.Column('failed_tests', sa.Integer(), default=0),
sa.Column('skipped_tests', sa.Integer(), default=0),
sa.Column('pass_rate', sa.Float(), default=0.0),
sa.Column('last_run_id', sa.String(50), nullable=True),
sa.Column('last_run_at', sa.DateTime(), nullable=True),
sa.Column('last_status', sa.String(20), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.func.now(), onupdate=sa.func.now()),
sa.PrimaryKeyConstraint('id')
)
def downgrade() -> None:
op.drop_table('test_service_stats')
op.drop_table('test_fixes_history')
op.drop_table('failed_tests_backlog')
op.drop_table('test_results')
op.drop_table('test_runs')

View File