fix: Restore all files lost during destructive rebase
A previous `git pull --rebase origin main` dropped 177 local commits,
losing 3400+ files across admin-v2, backend, studio-v2, website,
klausur-service, and many other services. The partial restore attempt
(660295e2) only recovered some files.
This commit restores all missing files from pre-rebase ref 98933f5e
while preserving post-rebase additions (night-scheduler, night-mode UI,
NightModeWidget dashboard integration).
Restored features include:
- AI Module Sidebar (FAB), OCR Labeling, OCR Compare
- GPU Dashboard, RAG Pipeline, Magic Help
- Klausur-Korrektur (8 files), Abitur-Archiv (5+ files)
- Companion, Zeugnisse-Crawler, Screen Flow
- Full backend, studio-v2, website, klausur-service
- All compliance SDKs, agent-core, voice-service
- CI/CD configs, documentation, scripts
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
246
admin-v2/app/api/education/abitur-archiv/route.ts
Normal file
246
admin-v2/app/api/education/abitur-archiv/route.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* Abitur-Archiv API Route
|
||||
* Extends abitur-docs with theme search and enhanced filtering
|
||||
*/
|
||||
|
||||
const BACKEND_URL = process.env.BACKEND_URL || 'http://localhost:8000'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
|
||||
// Check for theme/semantic search
|
||||
const thema = searchParams.get('thema')
|
||||
|
||||
if (thema) {
|
||||
// Use semantic search endpoint
|
||||
return await handleSemanticSearch(thema, searchParams)
|
||||
}
|
||||
|
||||
// Forward all query params to backend abitur-docs
|
||||
const queryString = searchParams.toString()
|
||||
const url = `${BACKEND_URL}/api/abitur-docs/${queryString ? `?${queryString}` : ''}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
// Return mock data for development if backend is not available
|
||||
if (response.status === 404 || response.status === 502) {
|
||||
return NextResponse.json(getMockDocuments(searchParams))
|
||||
}
|
||||
throw new Error(`Backend responded with ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
// If backend returns empty, use mock data for demo
|
||||
if (data.documents && Array.isArray(data.documents) && data.documents.length === 0 && data.total === 0) {
|
||||
return NextResponse.json(getMockDocuments(searchParams))
|
||||
}
|
||||
|
||||
// Enhance response with theme information
|
||||
return NextResponse.json({
|
||||
...data,
|
||||
themes: extractThemes(data.documents || [])
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Abitur-Archiv error:', error)
|
||||
return NextResponse.json(getMockDocuments(new URL(request.url).searchParams))
|
||||
}
|
||||
}
|
||||
|
||||
async function handleSemanticSearch(thema: string, searchParams: URLSearchParams) {
|
||||
try {
|
||||
// Try to call RAG search endpoint
|
||||
const url = `${BACKEND_URL}/api/rag/search`
|
||||
const response = await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
query: thema,
|
||||
collection: 'abitur_documents',
|
||||
limit: parseInt(searchParams.get('limit') || '20'),
|
||||
filters: {
|
||||
fach: searchParams.get('fach') || undefined,
|
||||
jahr: searchParams.get('jahr') ? parseInt(searchParams.get('jahr')!) : undefined,
|
||||
bundesland: searchParams.get('bundesland') || undefined,
|
||||
niveau: searchParams.get('niveau') || undefined,
|
||||
typ: searchParams.get('typ') || undefined,
|
||||
}
|
||||
}),
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
return NextResponse.json({
|
||||
documents: data.results || [],
|
||||
total: data.total || 0,
|
||||
page: 1,
|
||||
limit: parseInt(searchParams.get('limit') || '20'),
|
||||
total_pages: 1,
|
||||
search_query: thema
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('RAG search not available, falling back to mock')
|
||||
}
|
||||
|
||||
// Fallback to filtered mock data
|
||||
return NextResponse.json(getMockDocumentsWithTheme(thema, searchParams))
|
||||
}
|
||||
|
||||
function getMockDocuments(searchParams: URLSearchParams) {
|
||||
const page = parseInt(searchParams.get('page') || '1')
|
||||
const limit = parseInt(searchParams.get('limit') || '20')
|
||||
const fach = searchParams.get('fach')
|
||||
const jahr = searchParams.get('jahr')
|
||||
const bundesland = searchParams.get('bundesland')
|
||||
const niveau = searchParams.get('niveau')
|
||||
const typ = searchParams.get('typ')
|
||||
|
||||
// Generate mock documents
|
||||
const allDocs = generateMockDocs()
|
||||
|
||||
// Apply filters
|
||||
let filtered = allDocs
|
||||
if (fach) filtered = filtered.filter(d => d.fach === fach)
|
||||
if (jahr) filtered = filtered.filter(d => d.jahr === parseInt(jahr))
|
||||
if (bundesland) filtered = filtered.filter(d => d.bundesland === bundesland)
|
||||
if (niveau) filtered = filtered.filter(d => d.niveau === niveau)
|
||||
if (typ) filtered = filtered.filter(d => d.typ === typ)
|
||||
|
||||
// Paginate
|
||||
const start = (page - 1) * limit
|
||||
const docs = filtered.slice(start, start + limit)
|
||||
|
||||
return {
|
||||
documents: docs,
|
||||
total: filtered.length,
|
||||
page,
|
||||
limit,
|
||||
total_pages: Math.ceil(filtered.length / limit),
|
||||
themes: extractThemes(docs)
|
||||
}
|
||||
}
|
||||
|
||||
function getMockDocumentsWithTheme(thema: string, searchParams: URLSearchParams) {
|
||||
const limit = parseInt(searchParams.get('limit') || '20')
|
||||
const allDocs = generateMockDocs()
|
||||
|
||||
// Simple theme matching (in production this would be semantic search)
|
||||
const themaLower = thema.toLowerCase()
|
||||
let filtered = allDocs
|
||||
|
||||
// Match theme to aufgabentyp keywords
|
||||
if (themaLower.includes('gedicht')) {
|
||||
filtered = filtered.filter(d => d.themes?.includes('gedichtanalyse'))
|
||||
} else if (themaLower.includes('drama')) {
|
||||
filtered = filtered.filter(d => d.themes?.includes('dramenanalyse'))
|
||||
} else if (themaLower.includes('prosa') || themaLower.includes('roman')) {
|
||||
filtered = filtered.filter(d => d.themes?.includes('prosaanalyse'))
|
||||
} else if (themaLower.includes('eroerterung')) {
|
||||
filtered = filtered.filter(d => d.themes?.includes('eroerterung'))
|
||||
} else if (themaLower.includes('text') || themaLower.includes('analyse')) {
|
||||
filtered = filtered.filter(d => d.themes?.includes('textanalyse'))
|
||||
}
|
||||
|
||||
// Apply additional filters
|
||||
const fach = searchParams.get('fach')
|
||||
const jahr = searchParams.get('jahr')
|
||||
if (fach) filtered = filtered.filter(d => d.fach === fach)
|
||||
if (jahr) filtered = filtered.filter(d => d.jahr === parseInt(jahr))
|
||||
|
||||
return {
|
||||
documents: filtered.slice(0, limit),
|
||||
total: filtered.length,
|
||||
page: 1,
|
||||
limit,
|
||||
total_pages: Math.ceil(filtered.length / limit),
|
||||
search_query: thema,
|
||||
themes: extractThemes(filtered)
|
||||
}
|
||||
}
|
||||
|
||||
function generateMockDocs() {
|
||||
const faecher = ['deutsch', 'englisch']
|
||||
const jahre = [2021, 2022, 2023, 2024, 2025]
|
||||
const niveaus: Array<'eA' | 'gA'> = ['eA', 'gA']
|
||||
const typen: Array<'aufgabe' | 'erwartungshorizont'> = ['aufgabe', 'erwartungshorizont']
|
||||
const aufgabentypen = [
|
||||
{ nummer: 'I', themes: ['textanalyse', 'sachtext'] },
|
||||
{ nummer: 'II', themes: ['gedichtanalyse', 'lyrik'] },
|
||||
{ nummer: 'III', themes: ['prosaanalyse', 'epik'] },
|
||||
]
|
||||
|
||||
const docs = []
|
||||
let id = 1
|
||||
|
||||
for (const jahr of jahre) {
|
||||
for (const fach of faecher) {
|
||||
for (const niveau of niveaus) {
|
||||
for (const aufgabe of aufgabentypen) {
|
||||
for (const typ of typen) {
|
||||
const suffix = typ === 'erwartungshorizont' ? '_EWH' : ''
|
||||
const dateiname = `${jahr}_${capitalize(fach)}_${niveau}_Aufgabe_${aufgabe.nummer}${suffix}.pdf`
|
||||
|
||||
docs.push({
|
||||
id: `doc-${id++}`,
|
||||
dateiname,
|
||||
original_dateiname: dateiname,
|
||||
bundesland: 'niedersachsen',
|
||||
fach,
|
||||
jahr,
|
||||
niveau,
|
||||
typ,
|
||||
aufgaben_nummer: aufgabe.nummer,
|
||||
themes: aufgabe.themes,
|
||||
status: 'indexed' as const,
|
||||
confidence: 0.92 + Math.random() * 0.08,
|
||||
file_path: `/api/education/abitur-archiv/file/${dateiname}`,
|
||||
file_size: Math.floor(Math.random() * 500000) + 100000,
|
||||
indexed: true,
|
||||
vector_ids: [`vec-${id}-1`, `vec-${id}-2`],
|
||||
created_at: new Date(Date.now() - Math.random() * 365 * 24 * 60 * 60 * 1000).toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return docs
|
||||
}
|
||||
|
||||
function extractThemes(documents: any[]) {
|
||||
const themeCounts = new Map<string, number>()
|
||||
|
||||
for (const doc of documents) {
|
||||
const themes = doc.themes || []
|
||||
for (const theme of themes) {
|
||||
themeCounts.set(theme, (themeCounts.get(theme) || 0) + 1)
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(themeCounts.entries())
|
||||
.map(([label, count]) => ({
|
||||
label: capitalize(label),
|
||||
count,
|
||||
aufgabentyp: label,
|
||||
}))
|
||||
.sort((a, b) => b.count - a.count)
|
||||
.slice(0, 10)
|
||||
}
|
||||
|
||||
function capitalize(str: string): string {
|
||||
return str.charAt(0).toUpperCase() + str.slice(1)
|
||||
}
|
||||
105
admin-v2/app/api/education/abitur-archiv/suggest/route.ts
Normal file
105
admin-v2/app/api/education/abitur-archiv/suggest/route.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* Theme Suggestions API for Abitur-Archiv
|
||||
* Returns autocomplete suggestions for semantic search
|
||||
*/
|
||||
|
||||
const BACKEND_URL = process.env.BACKEND_URL || 'http://localhost:8000'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
const query = searchParams.get('q') || ''
|
||||
|
||||
if (query.length < 2) {
|
||||
return NextResponse.json({ suggestions: [], query })
|
||||
}
|
||||
|
||||
// Try to get suggestions from backend
|
||||
try {
|
||||
const url = `${BACKEND_URL}/api/abitur-archiv/suggest?q=${encodeURIComponent(query)}`
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
const data = await response.json()
|
||||
return NextResponse.json(data)
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('Backend suggest not available, using static suggestions')
|
||||
}
|
||||
|
||||
// Fallback to static suggestions
|
||||
return NextResponse.json({
|
||||
suggestions: getStaticSuggestions(query),
|
||||
query
|
||||
})
|
||||
} catch (error) {
|
||||
console.error('Suggest error:', error)
|
||||
return NextResponse.json({ suggestions: [], query: '' })
|
||||
}
|
||||
}
|
||||
|
||||
function getStaticSuggestions(query: string) {
|
||||
const allSuggestions = [
|
||||
// Textanalyse
|
||||
{ label: 'Textanalyse', count: 45, aufgabentyp: 'textanalyse', kategorie: 'Analyse' },
|
||||
{ label: 'Textanalyse Sachtext', count: 28, aufgabentyp: 'textanalyse_pragmatisch', kategorie: 'Analyse' },
|
||||
{ label: 'Textanalyse Rede', count: 12, aufgabentyp: 'textanalyse_rede', kategorie: 'Analyse' },
|
||||
{ label: 'Textanalyse Kommentar', count: 8, aufgabentyp: 'textanalyse_kommentar', kategorie: 'Analyse' },
|
||||
|
||||
// Gedichtanalyse
|
||||
{ label: 'Gedichtanalyse', count: 38, aufgabentyp: 'gedichtanalyse', kategorie: 'Lyrik' },
|
||||
{ label: 'Gedichtanalyse Romantik', count: 15, aufgabentyp: 'gedichtanalyse', zeitraum: 'Romantik', kategorie: 'Lyrik' },
|
||||
{ label: 'Gedichtanalyse Expressionismus', count: 12, aufgabentyp: 'gedichtanalyse', zeitraum: 'Expressionismus', kategorie: 'Lyrik' },
|
||||
{ label: 'Gedichtanalyse Barock', count: 8, aufgabentyp: 'gedichtanalyse', zeitraum: 'Barock', kategorie: 'Lyrik' },
|
||||
{ label: 'Gedichtanalyse Klassik', count: 10, aufgabentyp: 'gedichtanalyse', zeitraum: 'Klassik', kategorie: 'Lyrik' },
|
||||
{ label: 'Gedichtanalyse Moderne', count: 14, aufgabentyp: 'gedichtanalyse', zeitraum: 'Moderne', kategorie: 'Lyrik' },
|
||||
{ label: 'Gedichtvergleich', count: 18, aufgabentyp: 'gedichtvergleich', kategorie: 'Lyrik' },
|
||||
|
||||
// Dramenanalyse
|
||||
{ label: 'Dramenanalyse', count: 28, aufgabentyp: 'dramenanalyse', kategorie: 'Drama' },
|
||||
{ label: 'Dramenanalyse Faust', count: 14, aufgabentyp: 'dramenanalyse', kategorie: 'Drama' },
|
||||
{ label: 'Dramenanalyse Woyzeck', count: 8, aufgabentyp: 'dramenanalyse', kategorie: 'Drama' },
|
||||
{ label: 'Episches Theater Brecht', count: 10, aufgabentyp: 'dramenanalyse', kategorie: 'Drama' },
|
||||
{ label: 'Szenenanalyse', count: 22, aufgabentyp: 'szenenanalyse', kategorie: 'Drama' },
|
||||
|
||||
// Prosaanalyse
|
||||
{ label: 'Prosaanalyse', count: 25, aufgabentyp: 'prosaanalyse', kategorie: 'Epik' },
|
||||
{ label: 'Romananalyse', count: 18, aufgabentyp: 'prosaanalyse', kategorie: 'Epik' },
|
||||
{ label: 'Kurzgeschichte', count: 20, aufgabentyp: 'prosaanalyse', kategorie: 'Epik' },
|
||||
{ label: 'Novelle', count: 12, aufgabentyp: 'prosaanalyse', kategorie: 'Epik' },
|
||||
{ label: 'Erzaehlung', count: 15, aufgabentyp: 'prosaanalyse', kategorie: 'Epik' },
|
||||
|
||||
// Eroerterung
|
||||
{ label: 'Eroerterung', count: 32, aufgabentyp: 'eroerterung', kategorie: 'Argumentation' },
|
||||
{ label: 'Eroerterung textgebunden', count: 18, aufgabentyp: 'eroerterung_textgebunden', kategorie: 'Argumentation' },
|
||||
{ label: 'Eroerterung materialgestuetzt', count: 14, aufgabentyp: 'eroerterung_materialgestuetzt', kategorie: 'Argumentation' },
|
||||
{ label: 'Stellungnahme', count: 10, aufgabentyp: 'stellungnahme', kategorie: 'Argumentation' },
|
||||
|
||||
// Sprachreflexion
|
||||
{ label: 'Sprachreflexion', count: 15, aufgabentyp: 'sprachreflexion', kategorie: 'Sprache' },
|
||||
{ label: 'Sprachwandel', count: 8, aufgabentyp: 'sprachreflexion', kategorie: 'Sprache' },
|
||||
{ label: 'Sprachkritik', count: 6, aufgabentyp: 'sprachreflexion', kategorie: 'Sprache' },
|
||||
{ label: 'Kommunikation', count: 10, aufgabentyp: 'kommunikation', kategorie: 'Sprache' },
|
||||
|
||||
// Vergleich
|
||||
{ label: 'Vergleichende Analyse', count: 20, aufgabentyp: 'vergleich', kategorie: 'Vergleich' },
|
||||
{ label: 'Epochenvergleich', count: 12, aufgabentyp: 'epochenvergleich', kategorie: 'Vergleich' },
|
||||
]
|
||||
|
||||
const queryLower = query.toLowerCase()
|
||||
|
||||
// Filter suggestions based on query
|
||||
return allSuggestions
|
||||
.filter(s =>
|
||||
s.label.toLowerCase().includes(queryLower) ||
|
||||
s.aufgabentyp.toLowerCase().includes(queryLower) ||
|
||||
(s.zeitraum && s.zeitraum.toLowerCase().includes(queryLower)) ||
|
||||
s.kategorie.toLowerCase().includes(queryLower)
|
||||
)
|
||||
.slice(0, 8)
|
||||
}
|
||||
139
admin-v2/app/api/education/abitur-docs/route.ts
Normal file
139
admin-v2/app/api/education/abitur-docs/route.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
|
||||
/**
|
||||
* Proxy to backend /api/abitur-docs
|
||||
* Lists and manages Abitur documents (NiBiS, etc.)
|
||||
*/
|
||||
|
||||
const BACKEND_URL = process.env.BACKEND_URL || 'http://localhost:8000'
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const { searchParams } = new URL(request.url)
|
||||
|
||||
// Forward all query params to backend
|
||||
const queryString = searchParams.toString()
|
||||
const url = `${BACKEND_URL}/api/abitur-docs/${queryString ? `?${queryString}` : ''}`
|
||||
|
||||
const response = await fetch(url, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
// Return mock data for development if backend is not available
|
||||
if (response.status === 404 || response.status === 502) {
|
||||
return NextResponse.json(getMockDocuments(searchParams))
|
||||
}
|
||||
throw new Error(`Backend responded with ${response.status}`)
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
// If backend returns empty array, use mock data for demo purposes
|
||||
// (Backend uses in-memory storage which is lost on restart)
|
||||
if (Array.isArray(data) && data.length === 0) {
|
||||
console.log('Backend returned empty array, using mock data')
|
||||
return NextResponse.json(getMockDocuments(searchParams))
|
||||
}
|
||||
|
||||
// Handle paginated response with empty documents
|
||||
if (data.documents && Array.isArray(data.documents) && data.documents.length === 0 && data.total === 0) {
|
||||
console.log('Backend returned empty documents, using mock data')
|
||||
return NextResponse.json(getMockDocuments(searchParams))
|
||||
}
|
||||
|
||||
return NextResponse.json(data)
|
||||
} catch (error) {
|
||||
console.error('Abitur docs list error:', error)
|
||||
// Return mock data for development
|
||||
return NextResponse.json(getMockDocuments(new URL(request.url).searchParams))
|
||||
}
|
||||
}
|
||||
|
||||
function getMockDocuments(searchParams: URLSearchParams) {
|
||||
const page = parseInt(searchParams.get('page') || '1')
|
||||
const limit = parseInt(searchParams.get('limit') || '20')
|
||||
const fach = searchParams.get('fach')
|
||||
const jahr = searchParams.get('jahr')
|
||||
const bundesland = searchParams.get('bundesland')
|
||||
|
||||
// Generate mock documents
|
||||
const allDocs = generateMockDocs()
|
||||
|
||||
// Apply filters
|
||||
let filtered = allDocs
|
||||
if (fach) {
|
||||
filtered = filtered.filter(d => d.fach === fach)
|
||||
}
|
||||
if (jahr) {
|
||||
filtered = filtered.filter(d => d.jahr === parseInt(jahr))
|
||||
}
|
||||
if (bundesland) {
|
||||
filtered = filtered.filter(d => d.bundesland === bundesland)
|
||||
}
|
||||
|
||||
// Paginate
|
||||
const start = (page - 1) * limit
|
||||
const docs = filtered.slice(start, start + limit)
|
||||
|
||||
return {
|
||||
documents: docs,
|
||||
total: filtered.length,
|
||||
page,
|
||||
limit,
|
||||
total_pages: Math.ceil(filtered.length / limit),
|
||||
}
|
||||
}
|
||||
|
||||
function generateMockDocs() {
|
||||
const faecher = ['deutsch', 'mathematik', 'englisch', 'biologie', 'physik', 'chemie', 'geschichte']
|
||||
const jahre = [2024, 2025]
|
||||
const niveaus = ['eA', 'gA']
|
||||
const typen = ['aufgabe', 'erwartungshorizont']
|
||||
const nummern = ['I', 'II', 'III']
|
||||
|
||||
const docs = []
|
||||
let id = 1
|
||||
|
||||
for (const jahr of jahre) {
|
||||
for (const fach of faecher) {
|
||||
for (const niveau of niveaus) {
|
||||
for (const nummer of nummern) {
|
||||
for (const typ of typen) {
|
||||
const suffix = typ === 'erwartungshorizont' ? '_EWH' : ''
|
||||
const dateiname = `${jahr}_${capitalize(fach)}_${niveau}_${nummer}${suffix}.pdf`
|
||||
|
||||
docs.push({
|
||||
id: `doc-${id++}`,
|
||||
dateiname,
|
||||
original_dateiname: dateiname,
|
||||
bundesland: 'niedersachsen',
|
||||
fach,
|
||||
jahr,
|
||||
niveau,
|
||||
typ,
|
||||
aufgaben_nummer: nummer,
|
||||
status: 'indexed',
|
||||
confidence: 0.95,
|
||||
file_path: `/tmp/abitur-docs/${dateiname}`,
|
||||
file_size: Math.floor(Math.random() * 500000) + 100000,
|
||||
indexed: true,
|
||||
vector_ids: [`vec-${id}-1`, `vec-${id}-2`],
|
||||
created_at: new Date(Date.now() - Math.random() * 30 * 24 * 60 * 60 * 1000).toISOString(),
|
||||
updated_at: new Date().toISOString(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return docs
|
||||
}
|
||||
|
||||
function capitalize(str: string): string {
|
||||
return str.charAt(0).toUpperCase() + str.slice(1)
|
||||
}
|
||||
Reference in New Issue
Block a user