Add app settings management and update API URL handling
This commit is contained in:
88
backend/app_settings.py
Normal file
88
backend/app_settings.py
Normal file
@@ -0,0 +1,88 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict
|
||||
|
||||
|
||||
APP_NAME = "Heimgeist"
|
||||
DEFAULT_BACKEND_API_URL = "http://127.0.0.1:8000"
|
||||
DEFAULT_OLLAMA_API_URL = "http://127.0.0.1:11434"
|
||||
DEFAULT_SETTINGS: Dict[str, Any] = {
|
||||
"backendApiUrl": DEFAULT_BACKEND_API_URL,
|
||||
"ollamaApiUrl": DEFAULT_OLLAMA_API_URL,
|
||||
}
|
||||
|
||||
|
||||
def _default_settings_dir() -> Path:
|
||||
if sys.platform == "darwin":
|
||||
return Path.home() / "Library" / "Application Support" / APP_NAME
|
||||
if os.name == "nt":
|
||||
appdata = os.getenv("APPDATA")
|
||||
if appdata:
|
||||
return Path(appdata) / APP_NAME
|
||||
return Path.home() / "AppData" / "Roaming" / APP_NAME
|
||||
return Path(os.getenv("XDG_CONFIG_HOME", str(Path.home() / ".config"))) / APP_NAME
|
||||
|
||||
|
||||
def settings_path() -> Path:
|
||||
custom_path = os.getenv("HEIMGEIST_SETTINGS_FILE")
|
||||
if custom_path:
|
||||
return Path(custom_path).expanduser()
|
||||
return _default_settings_dir() / "settings.json"
|
||||
|
||||
|
||||
def _looks_like_ollama_url(value: Any) -> bool:
|
||||
if not isinstance(value, str):
|
||||
return False
|
||||
|
||||
trimmed = value.strip()
|
||||
if not trimmed:
|
||||
return False
|
||||
|
||||
if ":11434" in trimmed:
|
||||
return True
|
||||
|
||||
return trimmed.rstrip("/").endswith("/api")
|
||||
|
||||
|
||||
def _normalize_url(value: Any, fallback: str) -> str:
|
||||
if not isinstance(value, str):
|
||||
return fallback
|
||||
|
||||
trimmed = value.strip().rstrip("/")
|
||||
return trimmed or fallback
|
||||
|
||||
|
||||
def load_app_settings() -> Dict[str, Any]:
|
||||
path = settings_path()
|
||||
try:
|
||||
raw = json.loads(path.read_text(encoding="utf-8"))
|
||||
except FileNotFoundError:
|
||||
raw = {}
|
||||
except Exception:
|
||||
raw = {}
|
||||
|
||||
if not isinstance(raw, dict):
|
||||
raw = {}
|
||||
|
||||
settings = {**DEFAULT_SETTINGS, **raw}
|
||||
if "backendApiUrl" not in raw and isinstance(raw.get("ollamaApiUrl"), str):
|
||||
if _looks_like_ollama_url(raw["ollamaApiUrl"]):
|
||||
settings["backendApiUrl"] = DEFAULT_BACKEND_API_URL
|
||||
settings["ollamaApiUrl"] = _normalize_url(raw["ollamaApiUrl"], DEFAULT_OLLAMA_API_URL)
|
||||
else:
|
||||
settings["backendApiUrl"] = _normalize_url(raw["ollamaApiUrl"], DEFAULT_BACKEND_API_URL)
|
||||
settings["ollamaApiUrl"] = DEFAULT_OLLAMA_API_URL
|
||||
else:
|
||||
settings["backendApiUrl"] = _normalize_url(settings.get("backendApiUrl"), DEFAULT_BACKEND_API_URL)
|
||||
settings["ollamaApiUrl"] = _normalize_url(settings.get("ollamaApiUrl"), DEFAULT_OLLAMA_API_URL)
|
||||
|
||||
return settings
|
||||
|
||||
|
||||
def get_ollama_api_url() -> str:
|
||||
settings = load_app_settings()
|
||||
return _normalize_url(settings.get("ollamaApiUrl"), DEFAULT_OLLAMA_API_URL)
|
||||
@@ -3,11 +3,12 @@ import httpx
|
||||
import json
|
||||
from typing import Dict, Any, List, AsyncGenerator
|
||||
|
||||
OLLAMA_URL = "http://127.0.0.1:11434"
|
||||
from .app_settings import get_ollama_api_url
|
||||
|
||||
async def list_models() -> Dict[str, Any]:
|
||||
ollama_url = get_ollama_api_url()
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
r = await client.get(f"{OLLAMA_URL}/api/tags")
|
||||
r = await client.get(f"{ollama_url}/api/tags")
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
# Normalize to a simple list of names
|
||||
@@ -15,13 +16,14 @@ async def list_models() -> Dict[str, Any]:
|
||||
return {"models": models}
|
||||
|
||||
async def chat(model: str, messages: List[Dict[str, str]]) -> str:
|
||||
ollama_url = get_ollama_api_url()
|
||||
payload = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"stream": False
|
||||
}
|
||||
async with httpx.AsyncClient(timeout=600.0) as client:
|
||||
r = await client.post(f"{OLLAMA_URL}/api/chat", json=payload)
|
||||
r = await client.post(f"{ollama_url}/api/chat", json=payload)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
# Ollama returns full conversation; pick last message content
|
||||
@@ -35,13 +37,14 @@ async def chat(model: str, messages: List[Dict[str, str]]) -> str:
|
||||
return data.get("content", "")
|
||||
|
||||
async def chat_stream(model: str, messages: List[Dict[str, str]]) -> AsyncGenerator[str, None]:
|
||||
ollama_url = get_ollama_api_url()
|
||||
payload = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"stream": True
|
||||
}
|
||||
async with httpx.AsyncClient(timeout=600.0) as client:
|
||||
async with client.stream("POST", f"{OLLAMA_URL}/api/chat", json=payload) as r:
|
||||
async with client.stream("POST", f"{ollama_url}/api/chat", json=payload) as r:
|
||||
r.raise_for_status()
|
||||
async for line in r.aiter_lines():
|
||||
if line:
|
||||
|
||||
@@ -6,19 +6,61 @@ const fs = require('fs')
|
||||
let mainWindow
|
||||
let settingsWindow = null
|
||||
|
||||
const settingsFilePath = path.join(app.getPath('userData'), 'settings.json')
|
||||
const DEFAULT_BACKEND_API_URL = 'http://127.0.0.1:8000'
|
||||
const DEFAULT_OLLAMA_API_URL = 'http://127.0.0.1:11434'
|
||||
const settingsFilePath = process.env.HEIMGEIST_SETTINGS_FILE || path.join(app.getPath('userData'), 'settings.json')
|
||||
let appSettings = {}
|
||||
const DEFAULT_UI_SCALE = 1
|
||||
const MIN_UI_SCALE = 0.7
|
||||
const MAX_UI_SCALE = 1.3
|
||||
|
||||
const defaultSettings = {
|
||||
ollamaApiUrl: 'http://127.0.0.1:8000',
|
||||
backendApiUrl: DEFAULT_BACKEND_API_URL,
|
||||
ollamaApiUrl: DEFAULT_OLLAMA_API_URL,
|
||||
colorScheme: 'Default',
|
||||
uiScale: DEFAULT_UI_SCALE,
|
||||
chatModel: 'llama3',
|
||||
}
|
||||
|
||||
function looksLikeOllamaUrl(value) {
|
||||
if (typeof value !== 'string') {
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = new URL(value)
|
||||
if (parsed.port === '11434') {
|
||||
return true
|
||||
}
|
||||
|
||||
return /^\/api\/?$/i.test(parsed.pathname || '')
|
||||
} catch (_error) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
function migrateSettings(rawSettings) {
|
||||
const source = rawSettings && typeof rawSettings === 'object' ? rawSettings : {}
|
||||
const nextSettings = { ...defaultSettings, ...source }
|
||||
let migrated = false
|
||||
|
||||
if (!Object.prototype.hasOwnProperty.call(source, 'backendApiUrl') && typeof source.ollamaApiUrl === 'string') {
|
||||
if (looksLikeOllamaUrl(source.ollamaApiUrl)) {
|
||||
nextSettings.backendApiUrl = DEFAULT_BACKEND_API_URL
|
||||
nextSettings.ollamaApiUrl = source.ollamaApiUrl
|
||||
} else {
|
||||
nextSettings.backendApiUrl = source.ollamaApiUrl
|
||||
nextSettings.ollamaApiUrl = DEFAULT_OLLAMA_API_URL
|
||||
}
|
||||
migrated = true
|
||||
}
|
||||
|
||||
nextSettings.backendApiUrl = String(nextSettings.backendApiUrl || '').trim()
|
||||
nextSettings.ollamaApiUrl = String(nextSettings.ollamaApiUrl || '').trim()
|
||||
|
||||
return { nextSettings, migrated }
|
||||
}
|
||||
|
||||
function normalizeUiScale(value) {
|
||||
const numericValue = Number(value)
|
||||
if (!Number.isFinite(numericValue)) {
|
||||
@@ -44,7 +86,11 @@ function loadSettings() {
|
||||
try {
|
||||
if (fs.existsSync(settingsFilePath)) {
|
||||
const data = fs.readFileSync(settingsFilePath, 'utf8')
|
||||
appSettings = { ...defaultSettings, ...JSON.parse(data) }
|
||||
const { nextSettings, migrated } = migrateSettings(JSON.parse(data))
|
||||
appSettings = nextSettings
|
||||
if (migrated) {
|
||||
saveSettings()
|
||||
}
|
||||
} else {
|
||||
appSettings = { ...defaultSettings }
|
||||
saveSettings()
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
import React, { useState, useEffect } from 'react';
|
||||
|
||||
const API_URL_KEY = 'ollamaApiUrl';
|
||||
const BACKEND_API_URL_KEY = 'backendApiUrl';
|
||||
const OLLAMA_API_URL_KEY = 'ollamaApiUrl';
|
||||
const MODEL_KEY = 'chatModel';
|
||||
const STREAM_KEY = 'streamOutput';
|
||||
const DEFAULT_BACKEND_API_URL = 'http://127.0.0.1:8000';
|
||||
const DEFAULT_OLLAMA_API_URL = 'http://127.0.0.1:11434';
|
||||
|
||||
function resolveBackendApiUrl(settings) {
|
||||
return settings.backendApiUrl || settings.ollamaApiUrl || DEFAULT_BACKEND_API_URL;
|
||||
}
|
||||
|
||||
export default function GeneralSettings({ onModelChange, onStreamOutputChange }) {
|
||||
const [backendApiUrl, setBackendApiUrl] = useState('');
|
||||
const [ollamaApiUrl, setOllamaApiUrl] = useState('');
|
||||
const [models, setModels] = useState([]);
|
||||
const [selectedModel, setSelectedModel] = useState('');
|
||||
@@ -12,6 +20,7 @@ export default function GeneralSettings({ onModelChange, onStreamOutputChange })
|
||||
|
||||
useEffect(() => {
|
||||
window.electronAPI.getSettings().then(settings => {
|
||||
setBackendApiUrl(resolveBackendApiUrl(settings));
|
||||
setOllamaApiUrl(settings.ollamaApiUrl);
|
||||
setSelectedModel(settings.chatModel || '');
|
||||
setStreamOutput(settings.streamOutput || false);
|
||||
@@ -19,8 +28,8 @@ export default function GeneralSettings({ onModelChange, onStreamOutputChange })
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (ollamaApiUrl) {
|
||||
fetch(ollamaApiUrl + '/models')
|
||||
if (backendApiUrl) {
|
||||
fetch(backendApiUrl + '/models')
|
||||
.then(r => r.json())
|
||||
.then(data => {
|
||||
const names = data.models?.map(m => m.name) || [];
|
||||
@@ -34,12 +43,18 @@ export default function GeneralSettings({ onModelChange, onStreamOutputChange })
|
||||
})
|
||||
.catch(err => console.error('Failed to load models', err));
|
||||
}
|
||||
}, [ollamaApiUrl, selectedModel]); // Depend on selectedModel to re-evaluate default selection
|
||||
}, [backendApiUrl, selectedModel]); // Depend on selectedModel to re-evaluate default selection
|
||||
|
||||
const handleUrlChange = (e) => {
|
||||
const handleBackendUrlChange = (e) => {
|
||||
const newUrl = e.target.value;
|
||||
setBackendApiUrl(newUrl);
|
||||
window.electronAPI.setSetting(BACKEND_API_URL_KEY, newUrl);
|
||||
};
|
||||
|
||||
const handleOllamaUrlChange = (e) => {
|
||||
const newUrl = e.target.value;
|
||||
setOllamaApiUrl(newUrl);
|
||||
window.electronAPI.setSetting(API_URL_KEY, newUrl);
|
||||
window.electronAPI.setSetting(OLLAMA_API_URL_KEY, newUrl);
|
||||
};
|
||||
|
||||
const handleModelChange = (e) => {
|
||||
@@ -63,14 +78,26 @@ export default function GeneralSettings({ onModelChange, onStreamOutputChange })
|
||||
return (
|
||||
<div className="settings-content-panel">
|
||||
<div className="setting-section">
|
||||
<h3>Ollama API URL</h3>
|
||||
<h3>Heimgeist Backend URL</h3>
|
||||
<input
|
||||
type="text"
|
||||
className="input"
|
||||
value={backendApiUrl}
|
||||
onChange={handleBackendUrlChange}
|
||||
placeholder={`e.g., ${DEFAULT_BACKEND_API_URL}`}
|
||||
/>
|
||||
<p className="setting-description">Internal UI requests like chats, sessions, and databases go to this URL.</p>
|
||||
</div>
|
||||
<div className="setting-section">
|
||||
<h3>Ollama URL</h3>
|
||||
<input
|
||||
type="text"
|
||||
className="input"
|
||||
value={ollamaApiUrl}
|
||||
onChange={handleUrlChange}
|
||||
placeholder="e.g., http://localhost:11434"
|
||||
onChange={handleOllamaUrlChange}
|
||||
placeholder={`e.g., ${DEFAULT_OLLAMA_API_URL}`}
|
||||
/>
|
||||
<p className="setting-description">Heimgeist uses this URL to talk to Ollama for models and chat generation.</p>
|
||||
</div>
|
||||
<div className="setting-section">
|
||||
<h3>Chat Model</h3>
|
||||
|
||||
Reference in New Issue
Block a user