Fix bugs, N+1 queries, and wire settings in persian-tutor

- Replace inline __import__("datetime").timedelta hack with proper import
- Remove unused import random in anki_export.py
- Add error handling for Claude CLI subprocess failures in ai.py
- Fix hardcoded absolute path in stt.py with relative Path resolution
- Fix N+1 DB queries in vocab.get_flashcard_batch and dashboard.get_category_breakdown
  by adding db.get_all_word_progress() batch query
- Wire Ollama model and Whisper size settings to actually update config
  via ai.set_ollama_model() and stt.set_whisper_size()

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
dl92
2026-02-08 15:40:24 +00:00
parent 8b5eb8797f
commit 3a8705ece8
7 changed files with 57 additions and 12 deletions

View File

@@ -6,9 +6,18 @@ import ollama
DEFAULT_OLLAMA_MODEL = "qwen2.5:7b"
_ollama_model = DEFAULT_OLLAMA_MODEL
def ask_ollama(prompt, system=None, model=DEFAULT_OLLAMA_MODEL):
def set_ollama_model(model):
"""Change the Ollama model used for fast queries."""
global _ollama_model
_ollama_model = model
def ask_ollama(prompt, system=None, model=None):
"""Query Ollama with an optional system prompt."""
model = model or _ollama_model
messages = []
if system:
messages.append({"role": "system", "content": system})
@@ -24,6 +33,8 @@ def ask_claude(prompt):
capture_output=True,
text=True,
)
if result.returncode != 0:
raise RuntimeError(f"Claude CLI failed (exit {result.returncode}): {result.stderr.strip()}")
return result.stdout.strip()
@@ -34,8 +45,9 @@ def ask(prompt, system=None, quality="fast"):
return ask_ollama(prompt, system=system)
def chat_ollama(messages, system=None, model=DEFAULT_OLLAMA_MODEL):
def chat_ollama(messages, system=None, model=None):
"""Multi-turn conversation with Ollama."""
model = model or _ollama_model
all_messages = []
if system:
all_messages.append({"role": "system", "content": system})