update ollama model

This commit is contained in:
MacRimi
2026-03-19 10:08:08 +01:00
parent 10f8735f55
commit f2210946c2
2 changed files with 124 additions and 5 deletions

View File

@@ -15,7 +15,7 @@ import {
Bell, BellOff, Send, CheckCircle2, XCircle, Loader2,
AlertTriangle, Info, Settings2, Zap, Eye, EyeOff,
Trash2, ChevronDown, ChevronUp, ChevronRight, TestTube2, Mail, Webhook,
Copy, Server, Shield, ExternalLink
Copy, Server, Shield, ExternalLink, RefreshCw
} from "lucide-react"
interface ChannelConfig {
@@ -247,6 +247,8 @@ export function NotificationSettings() {
const [showProviderInfo, setShowProviderInfo] = useState(false)
const [testingAI, setTestingAI] = useState(false)
const [aiTestResult, setAiTestResult] = useState<{ success: boolean; message: string; model?: string } | null>(null)
const [ollamaModels, setOllamaModels] = useState<string[]>([])
const [loadingOllamaModels, setLoadingOllamaModels] = useState(false)
const [webhookSetup, setWebhookSetup] = useState<{
status: "idle" | "running" | "success" | "failed"
fallback_commands: string[]
@@ -594,6 +596,37 @@ export function NotificationSettings() {
}
}
const fetchOllamaModels = useCallback(async (url: string) => {
if (!url) return
setLoadingOllamaModels(true)
try {
const data = await fetchApi<{ success: boolean; models: string[]; message: string }>("/api/notifications/ollama-models", {
method: "POST",
body: JSON.stringify({ ollama_url: url }),
})
if (data.success) {
setOllamaModels(data.models)
// If current model not in list and there are models available, select first one
if (data.models.length > 0 && !data.models.includes(config.ai_model)) {
updateConfig(p => ({ ...p, ai_model: data.models[0] }))
}
} else {
setOllamaModels([])
}
} catch {
setOllamaModels([])
} finally {
setLoadingOllamaModels(false)
}
}, [config.ai_model])
// Fetch Ollama models when provider is ollama and URL changes
useEffect(() => {
if (config.ai_provider === 'ollama' && config.ai_ollama_url) {
fetchOllamaModels(config.ai_ollama_url)
}
}, [config.ai_provider, config.ai_ollama_url, fetchOllamaModels])
const handleTestAI = async () => {
setTestingAI(true)
setAiTestResult(null)
@@ -1449,12 +1482,47 @@ export function NotificationSettings() {
</div>
)}
{/* Model (read-only display) */}
{/* Model - selector for Ollama, read-only for others */}
<div className="space-y-2">
<Label className="text-xs sm:text-sm text-foreground/80">Model</Label>
<div className="h-9 px-3 flex items-center rounded-md border border-border bg-muted/50 text-sm font-mono text-muted-foreground">
{AI_PROVIDERS.find(p => p.value === config.ai_provider)?.model || "default"}
</div>
{config.ai_provider === "ollama" ? (
<div className="flex items-center gap-2">
<Select
value={config.ai_model || ""}
onValueChange={v => updateConfig(p => ({ ...p, ai_model: v }))}
disabled={!editMode || loadingOllamaModels}
>
<SelectTrigger className="h-9 text-sm font-mono flex-1">
<SelectValue placeholder={loadingOllamaModels ? "Loading models..." : "Select model"}>
{config.ai_model || (loadingOllamaModels ? "Loading..." : "Select model")}
</SelectValue>
</SelectTrigger>
<SelectContent>
{ollamaModels.length > 0 ? (
ollamaModels.map(m => (
<SelectItem key={m} value={m} className="font-mono">{m}</SelectItem>
))
) : (
<SelectItem value="_none" disabled className="text-muted-foreground">
{loadingOllamaModels ? "Loading models..." : "No models found"}
</SelectItem>
)}
</SelectContent>
</Select>
<button
onClick={() => fetchOllamaModels(config.ai_ollama_url)}
disabled={!editMode || loadingOllamaModels}
className="h-9 w-9 flex items-center justify-center rounded-md border border-border hover:bg-muted transition-colors shrink-0 disabled:opacity-50"
title="Refresh models"
>
<RefreshCw className={`h-4 w-4 ${loadingOllamaModels ? 'animate-spin' : ''}`} />
</button>
</div>
) : (
<div className="h-9 px-3 flex items-center rounded-md border border-border bg-muted/50 text-sm font-mono text-muted-foreground">
{AI_PROVIDERS.find(p => p.value === config.ai_provider)?.model || "default"}
</div>
)}
</div>
{/* Language selector */}

View File

@@ -101,6 +101,57 @@ def test_notification():
return jsonify({'error': str(e)}), 500
@notification_bp.route('/api/notifications/ollama-models', methods=['POST'])
def get_ollama_models():
"""Fetch available models from an Ollama server.
Request body:
{
"ollama_url": "http://localhost:11434"
}
Returns:
{
"success": true/false,
"models": ["model1", "model2", ...],
"message": "error message if failed"
}
"""
try:
import urllib.request
import urllib.error
data = request.get_json() or {}
ollama_url = data.get('ollama_url', 'http://localhost:11434')
url = f"{ollama_url.rstrip('/')}/api/tags"
req = urllib.request.Request(url, method='GET')
req.add_header('User-Agent', 'ProxMenux-Monitor/1.1')
with urllib.request.urlopen(req, timeout=10) as resp:
result = json.loads(resp.read().decode('utf-8'))
models = [m.get('name', '').split(':')[0] for m in result.get('models', [])]
# Remove duplicates and sort
models = sorted(list(set(models)))
return jsonify({
'success': True,
'models': models,
'message': f'Found {len(models)} models'
})
except urllib.error.URLError as e:
return jsonify({
'success': False,
'models': [],
'message': f'Cannot connect to Ollama: {str(e.reason)}'
})
except Exception as e:
return jsonify({
'success': False,
'models': [],
'message': f'Error: {str(e)}'
})
@notification_bp.route('/api/notifications/test-ai', methods=['POST'])
def test_ai_connection():
"""Test AI provider connection and configuration.