Update notification service

This commit is contained in:
MacRimi
2026-03-20 23:21:00 +01:00
parent 2f4ea02544
commit 900c7154b6
4 changed files with 167 additions and 26 deletions

View File

@@ -659,7 +659,7 @@ export function NotificationSettings() {
setLoadingProviderModels(true)
try {
const data = await fetchApi<{ success: boolean; models: string[]; message: string }>("/api/notifications/provider-models", {
const data = await fetchApi<{ success: boolean; models: string[]; recommended: string; message: string }>("/api/notifications/provider-models", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
@@ -671,10 +671,15 @@ export function NotificationSettings() {
})
if (data.success && data.models && data.models.length > 0) {
setProviderModels(data.models)
// Auto-select first model if current selection is empty or not in the list
// Auto-select recommended model if current selection is empty or not in the list
updateConfig(prev => {
if (!prev.ai_model || !data.models.includes(prev.ai_model)) {
return { ...prev, ai_model: data.models[0] }
const modelToSelect = data.recommended || data.models[0]
return {
...prev,
ai_model: modelToSelect,
ai_models: { ...prev.ai_models, [provider]: modelToSelect }
}
}
return prev
})

View File

@@ -0,0 +1,64 @@
{
"_description": "Verified AI models for ProxMenux notifications. Only models listed here will be shown to users. Models are tested to work with the chat/completions API format.",
"_updated": "2026-03-20",
"groq": {
"models": [
"llama-3.3-70b-versatile",
"llama-3.1-70b-versatile",
"llama-3.1-8b-instant",
"llama3-70b-8192",
"llama3-8b-8192",
"mixtral-8x7b-32768",
"gemma2-9b-it"
],
"recommended": "llama-3.3-70b-versatile"
},
"gemini": {
"models": [
"gemini-2.5-flash-lite",
"gemini-flash-lite-latest"
],
"recommended": "gemini-2.5-flash-lite"
},
"openai": {
"models": [
"gpt-4.1-mini"
"gpt-4o-mini"
],
"recommended": "gpt-4o-mini"
},
"anthropic": {
"models": [
"claude-3-5-haiku-latest",
"claude-3-5-sonnet-latest",
"claude-3-opus-latest"
],
"recommended": "claude-3-5-haiku-latest"
},
"openrouter": {
"models": [
"meta-llama/llama-3.3-70b-instruct",
"meta-llama/llama-3.1-70b-instruct",
"meta-llama/llama-3.1-8b-instruct",
"anthropic/claude-3.5-haiku",
"anthropic/claude-3.5-sonnet",
"google/gemini-flash-2.5-flash-lite",
"openai/gpt-4o-mini",
"mistralai/mistral-7b-instruct",
"mistralai/mixtral-8x7b-instruct"
],
"recommended": "meta-llama/llama-3.3-70b-instruct"
},
"ollama": {
"_note": "Ollama models are local, we don't filter them. User manages their own models.",
"models": [],
"recommended": ""
}
}

View File

@@ -102,9 +102,25 @@ def test_notification():
return jsonify({'error': str(e)}), 500
def load_verified_models():
"""Load verified models from config file."""
try:
config_path = Path(__file__).parent.parent / 'config' / 'verified_ai_models.json'
if config_path.exists():
with open(config_path, 'r') as f:
return json.load(f)
except Exception as e:
print(f"[flask_notification_routes] Failed to load verified models: {e}")
return {}
@notification_bp.route('/api/notifications/provider-models', methods=['POST'])
def get_provider_models():
"""Fetch available models from any AI provider.
"""Fetch available models from AI provider, filtered by verified models list.
Only returns models that:
1. Are available from the provider's API
2. Are in our verified_ai_models.json list (tested to work)
Request body:
{
@@ -118,6 +134,7 @@ def get_provider_models():
{
"success": true/false,
"models": ["model1", "model2", ...],
"recommended": "recommended-model",
"message": "status message"
}
"""
@@ -131,7 +148,13 @@ def get_provider_models():
if not provider:
return jsonify({'success': False, 'models': [], 'message': 'Provider not specified'})
# Handle Ollama separately (local, no API key)
# Load verified models config
verified_config = load_verified_models()
provider_config = verified_config.get(provider, {})
verified_models = set(provider_config.get('models', []))
recommended = provider_config.get('recommended', '')
# Handle Ollama separately (local, no filtering)
if provider == 'ollama':
import urllib.request
import urllib.error
@@ -147,25 +170,25 @@ def get_provider_models():
return jsonify({
'success': True,
'models': models,
'message': f'Found {len(models)} models'
'recommended': models[0] if models else '',
'message': f'Found {len(models)} local models'
})
# Handle Anthropic - no models list API, return known models
# Handle Anthropic - no models list API, return verified models directly
if provider == 'anthropic':
# Anthropic doesn't have a models list endpoint
# Return the known stable aliases that auto-update
models = [
models = list(verified_models) if verified_models else [
'claude-3-5-haiku-latest',
'claude-3-5-sonnet-latest',
'claude-3-opus-latest',
]
return jsonify({
'success': True,
'models': models,
'message': 'Anthropic uses stable aliases that auto-update'
'models': sorted(models),
'recommended': recommended or models[0],
'message': f'{len(models)} verified models'
})
# For other providers, use the provider's list_models method
# For other providers, fetch from API and filter by verified list
if not api_key:
return jsonify({'success': False, 'models': [], 'message': 'API key required'})
@@ -180,21 +203,51 @@ def get_provider_models():
if not ai_provider:
return jsonify({'success': False, 'models': [], 'message': f'Unknown provider: {provider}'})
models = ai_provider.list_models()
# Get all models from provider API
api_models = ai_provider.list_models()
if not models:
if not api_models:
# API failed, fall back to verified list only
if verified_models:
models = sorted(verified_models)
return jsonify({
'success': True,
'models': models,
'recommended': recommended or models[0],
'message': f'{len(models)} verified models (API unavailable)'
})
return jsonify({
'success': False,
'models': [],
'message': 'Could not retrieve models. Check your API key.'
})
# Sort and return
models = sorted(models)
# Filter: only models that are BOTH in API and verified list
if verified_models:
api_models_set = set(api_models)
filtered_models = [m for m in verified_models if m in api_models_set]
if not filtered_models:
# No intersection - maybe verified list is outdated
# Return verified list anyway (will fail on use if truly unavailable)
filtered_models = list(verified_models)
# Sort with recommended first
def sort_key(m):
if m == recommended:
return (0, m)
return (1, m)
models = sorted(filtered_models, key=sort_key)
else:
# No verified list for this provider, return all from API
models = sorted(api_models)
return jsonify({
'success': True,
'models': models,
'message': f'Found {len(models)} models'
'recommended': recommended if recommended in models else (models[0] if models else ''),
'message': f'{len(models)} verified models available'
})
except Exception as e:

View File

@@ -1688,17 +1688,37 @@ class NotificationManager:
return {'checked': False, 'migrated': False, 'message': 'No API key configured'}
try:
# Load verified models from config
verified_models = []
recommended_model = ''
try:
config_path = Path(__file__).parent.parent / 'config' / 'verified_ai_models.json'
if config_path.exists():
with open(config_path, 'r') as f:
verified_config = json.load(f)
provider_config = verified_config.get(provider_name, {})
verified_models = provider_config.get('models', [])
recommended_model = provider_config.get('recommended', '')
except Exception as e:
print(f"[NotificationManager] Failed to load verified models: {e}")
from ai_providers import get_provider
provider = get_provider(provider_name, api_key=api_key, model=current_model)
if not provider:
return {'checked': False, 'migrated': False, 'message': f'Unknown provider: {provider_name}'}
# Get available models
available_models = provider.list_models()
# Get available models from API
api_models = provider.list_models()
if not available_models:
# Can't verify (provider doesn't support listing or API error)
# Combine: use verified models that are also in API (or all verified if API fails)
if api_models and verified_models:
available_models = [m for m in verified_models if m in api_models]
elif verified_models:
available_models = verified_models
elif api_models:
available_models = api_models
else:
return {'checked': True, 'migrated': False, 'message': 'Could not retrieve model list'}
# Check if current model is available
@@ -1710,11 +1730,10 @@ class NotificationManager:
'message': f'Model {current_model} is available'
}
# Model not available - find best fallback
recommended = provider.get_recommended_model()
# Model not available - use recommended or first available
recommended = recommended_model if recommended_model in available_models else (available_models[0] if available_models else '')
if recommended == current_model:
# No better option found
if not recommended or recommended == current_model:
return {
'checked': True,
'migrated': False,