Add custom word entry + language pair support for learning units
Some checks failed
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-school (push) Successful in 31s
CI / test-go-edu-search (push) Successful in 31s
CI / test-python-klausur (push) Failing after 2m29s
CI / test-python-agent-core (push) Successful in 24s
CI / test-nodejs-website (push) Successful in 22s
Some checks failed
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-school (push) Successful in 31s
CI / test-go-edu-search (push) Successful in 31s
CI / test-python-klausur (push) Failing after 2m29s
CI / test-python-agent-core (push) Successful in 24s
CI / test-nodejs-website (push) Successful in 22s
- New UnitBuilder component with language pair selector (DE⇄EN, ES, FR, etc.) - Manual word entry form with auto-suggest from Kaikki dictionary (6M words) - "No results" prompt to add multi-word terms (e.g. "schottisches Hochland") - New backend endpoint GET /vocabulary/lookup-translation (any→any via EN hub) - Updated POST /vocabulary/units: accepts custom_words + source_lang/target_lang - Split unit endpoints into vocabulary/unit_api.py (500 LOC budget) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -22,11 +22,6 @@ from .db import (
|
||||
get_all_pos,
|
||||
VocabularyWord,
|
||||
)
|
||||
from units.learning import (
|
||||
LearningUnitCreate,
|
||||
create_learning_unit,
|
||||
get_learning_unit,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -239,130 +234,7 @@ async def api_tts(text: str = Query("", min_length=1), lang: str = Query("de")):
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class CreateUnitFromWordsPayload(BaseModel):
|
||||
title: str
|
||||
word_ids: List[str]
|
||||
grade: Optional[str] = None
|
||||
language: Optional[str] = "de"
|
||||
|
||||
|
||||
@router.post("/units")
|
||||
async def api_create_unit_from_words(payload: CreateUnitFromWordsPayload):
|
||||
"""Create a learning unit from selected vocabulary word IDs.
|
||||
|
||||
Fetches full word details, creates a LearningUnit in the
|
||||
learning_units system, and stores the vocabulary data.
|
||||
"""
|
||||
if not payload.word_ids:
|
||||
raise HTTPException(status_code=400, detail="Keine Woerter ausgewaehlt")
|
||||
|
||||
# Fetch all selected words
|
||||
words = []
|
||||
for wid in payload.word_ids:
|
||||
word = await get_word(wid)
|
||||
if word:
|
||||
words.append(word)
|
||||
|
||||
if not words:
|
||||
raise HTTPException(status_code=404, detail="Keine der Woerter gefunden")
|
||||
|
||||
# Create learning unit
|
||||
lu = create_learning_unit(LearningUnitCreate(
|
||||
title=payload.title,
|
||||
topic="Vocabulary",
|
||||
grade_level=payload.grade or "5-8",
|
||||
language=payload.language or "de",
|
||||
status="raw",
|
||||
))
|
||||
|
||||
# Save vocabulary data as analysis JSON for generators
|
||||
import os
|
||||
analysis_dir = os.path.expanduser("~/Arbeitsblaetter/Lerneinheiten")
|
||||
os.makedirs(analysis_dir, exist_ok=True)
|
||||
|
||||
vocab_data = [w.to_dict() for w in words]
|
||||
analysis_path = os.path.join(analysis_dir, f"{lu.id}_vocab.json")
|
||||
with open(analysis_path, "w", encoding="utf-8") as f:
|
||||
json.dump({"words": vocab_data, "title": payload.title}, f, ensure_ascii=False, indent=2)
|
||||
|
||||
# Also save as QA items for flashcards/type trainer
|
||||
qa_items = []
|
||||
for i, w in enumerate(words):
|
||||
qa_items.append({
|
||||
"id": f"qa_{i+1}",
|
||||
"question": w.english,
|
||||
"answer": w.german,
|
||||
"question_type": "knowledge",
|
||||
"key_terms": [w.english],
|
||||
"difficulty": w.difficulty,
|
||||
"source_hint": w.part_of_speech,
|
||||
"leitner_box": 0,
|
||||
"correct_count": 0,
|
||||
"incorrect_count": 0,
|
||||
"last_seen": None,
|
||||
"next_review": None,
|
||||
# Extra fields for enhanced flashcards
|
||||
"ipa_en": w.ipa_en,
|
||||
"ipa_de": w.ipa_de,
|
||||
"syllables_en": w.syllables_en,
|
||||
"syllables_de": w.syllables_de,
|
||||
"example_en": w.example_en,
|
||||
"example_de": w.example_de,
|
||||
"image_url": w.image_url,
|
||||
"audio_url_en": w.audio_url_en,
|
||||
"audio_url_de": w.audio_url_de,
|
||||
"part_of_speech": w.part_of_speech,
|
||||
"translations": w.translations,
|
||||
})
|
||||
|
||||
qa_path = os.path.join(analysis_dir, f"{lu.id}_qa.json")
|
||||
with open(qa_path, "w", encoding="utf-8") as f:
|
||||
json.dump({
|
||||
"qa_items": qa_items,
|
||||
"metadata": {
|
||||
"subject": "English Vocabulary",
|
||||
"grade_level": payload.grade or "5-8",
|
||||
"source_title": payload.title,
|
||||
"total_questions": len(qa_items),
|
||||
},
|
||||
}, f, ensure_ascii=False, indent=2)
|
||||
|
||||
# Auto-enrich words with images (Wikipedia + emoji fallback)
|
||||
try:
|
||||
from services.image_service import enrich_words_with_images
|
||||
await enrich_words_with_images(payload.word_ids)
|
||||
except Exception as e:
|
||||
logger.warning(f"Image enrichment failed (non-critical): {e}")
|
||||
|
||||
logger.info(f"Created vocab unit {lu.id} with {len(words)} words")
|
||||
|
||||
return {
|
||||
"unit_id": lu.id,
|
||||
"title": payload.title,
|
||||
"word_count": len(words),
|
||||
"status": "created",
|
||||
}
|
||||
|
||||
|
||||
@router.get("/units/{unit_id}")
|
||||
async def api_get_unit_words(unit_id: str):
|
||||
"""Get all words for a learning unit."""
|
||||
import os
|
||||
vocab_path = os.path.join(
|
||||
os.path.expanduser("~/Arbeitsblaetter/Lerneinheiten"),
|
||||
f"{unit_id}_vocab.json",
|
||||
)
|
||||
if not os.path.exists(vocab_path):
|
||||
raise HTTPException(status_code=404, detail="Unit nicht gefunden")
|
||||
|
||||
with open(vocab_path, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
return {
|
||||
"unit_id": unit_id,
|
||||
"title": data.get("title", ""),
|
||||
"words": data.get("words", []),
|
||||
}
|
||||
# Unit creation and translation lookup moved to vocabulary/unit_api.py
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
Reference in New Issue
Block a user