fix: vocab_entries fuer column_text Sub-Sessions generieren
Some checks failed
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-school (push) Successful in 32s
CI / test-go-edu-search (push) Successful in 31s
CI / test-python-klausur (push) Failing after 2m8s
CI / test-python-agent-core (push) Successful in 21s
CI / test-nodejs-website (push) Successful in 23s
Some checks failed
CI / go-lint (push) Has been skipped
CI / python-lint (push) Has been skipped
CI / nodejs-lint (push) Has been skipped
CI / test-go-school (push) Successful in 32s
CI / test-go-edu-search (push) Successful in 31s
CI / test-python-klausur (push) Failing after 2m8s
CI / test-python-agent-core (push) Successful in 21s
CI / test-nodejs-website (push) Successful in 23s
_cells_to_vocab_entries wurde nur bei is_vocab (column_en/column_de) aufgerufen. Fuer Sub-Sessions mit column_text wurden keine Eintraege erzeugt, daher blieb die Korrektur-Tabelle leer. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -2020,9 +2020,10 @@ async def detect_words(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
# For vocab layout: map cells 1:1 to vocab entries (row→entry).
|
# For vocab layout or single-column (box sub-sessions): map cells 1:1
|
||||||
# No content shuffling — each cell stays at its detected position.
|
# to vocab entries (row→entry).
|
||||||
if is_vocab:
|
has_text_col = 'column_text' in col_types
|
||||||
|
if is_vocab or has_text_col:
|
||||||
entries = _cells_to_vocab_entries(cells, columns_meta)
|
entries = _cells_to_vocab_entries(cells, columns_meta)
|
||||||
entries = _fix_phonetic_brackets(entries, pronunciation=pronunciation)
|
entries = _fix_phonetic_brackets(entries, pronunciation=pronunciation)
|
||||||
word_result["vocab_entries"] = entries
|
word_result["vocab_entries"] = entries
|
||||||
@@ -2169,7 +2170,8 @@ async def _word_batch_stream_generator(
|
|||||||
}
|
}
|
||||||
|
|
||||||
vocab_entries = None
|
vocab_entries = None
|
||||||
if is_vocab:
|
has_text_col = 'column_text' in col_types
|
||||||
|
if is_vocab or has_text_col:
|
||||||
entries = _cells_to_vocab_entries(cells, columns_meta)
|
entries = _cells_to_vocab_entries(cells, columns_meta)
|
||||||
entries = _fix_phonetic_brackets(entries, pronunciation=pronunciation)
|
entries = _fix_phonetic_brackets(entries, pronunciation=pronunciation)
|
||||||
word_result["vocab_entries"] = entries
|
word_result["vocab_entries"] = entries
|
||||||
@@ -2308,10 +2310,11 @@ async def _word_stream_generator(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
# For vocab layout: map cells 1:1 to vocab entries (row→entry).
|
# For vocab layout or single-column (box sub-sessions): map cells 1:1
|
||||||
# No content shuffling — each cell stays at its detected position.
|
# to vocab entries (row→entry).
|
||||||
vocab_entries = None
|
vocab_entries = None
|
||||||
if is_vocab:
|
has_text_col = 'column_text' in col_types
|
||||||
|
if is_vocab or has_text_col:
|
||||||
entries = _cells_to_vocab_entries(all_cells, columns_meta)
|
entries = _cells_to_vocab_entries(all_cells, columns_meta)
|
||||||
entries = _fix_character_confusion(entries)
|
entries = _fix_character_confusion(entries)
|
||||||
entries = _fix_phonetic_brackets(entries, pronunciation=pronunciation)
|
entries = _fix_phonetic_brackets(entries, pronunciation=pronunciation)
|
||||||
@@ -3980,7 +3983,8 @@ async def run_auto(session_id: str, req: RunAutoRequest, request: Request):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if is_vocab:
|
has_text_col = 'column_text' in col_types
|
||||||
|
if is_vocab or has_text_col:
|
||||||
entries = _cells_to_vocab_entries(cells, columns_meta)
|
entries = _cells_to_vocab_entries(cells, columns_meta)
|
||||||
entries = _fix_character_confusion(entries)
|
entries = _fix_character_confusion(entries)
|
||||||
entries = _fix_phonetic_brackets(entries, pronunciation=req.pronunciation)
|
entries = _fix_phonetic_brackets(entries, pronunciation=req.pronunciation)
|
||||||
|
|||||||
Reference in New Issue
Block a user