diff --git a/main.js b/main.js index 975cbec..576e070 100644 --- a/main.js +++ b/main.js @@ -226,11 +226,12 @@ ${JSON.stringify(commits, null, 2)} // ---- 3. LLM Streaming Call ---- async function streamLLMCommitMessages(prompt, onDataChunk) { + const selectedModel = await window.settingsAPI.getCommitModel?.() ?? 'qwen2.5-coder:32b'; const response = await fetch('http://localhost:11434/api/generate', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ - model: 'qwen2.5-coder:32b', // ggf. Modell anpassen + model: selectedModel, // ggf. Modell anpassen prompt: prompt, stream: true })