mirror of
https://github.com/MacRimi/ProxMenux.git
synced 2026-04-06 04:13:48 +00:00
67 lines
1.8 KiB
JSON
67 lines
1.8 KiB
JSON
{
|
|
"_description": "Verified AI models for ProxMenux notifications. Only models listed here will be shown to users. Models are tested to work with the chat/completions API format.",
|
|
"_updated": "2026-03-20",
|
|
|
|
"groq": {
|
|
"models": [
|
|
"llama-3.3-70b-versatile",
|
|
"llama-3.1-70b-versatile",
|
|
"llama-3.1-8b-instant",
|
|
"llama3-70b-8192",
|
|
"llama3-8b-8192",
|
|
"mixtral-8x7b-32768",
|
|
"gemma2-9b-it"
|
|
],
|
|
"recommended": "llama-3.3-70b-versatile"
|
|
},
|
|
|
|
"gemini": {
|
|
"models": [
|
|
"gemini-2.5-flash",
|
|
"gemini-2.5-flash-lite",
|
|
"gemini-2.5-pro"
|
|
],
|
|
"recommended": "gemini-2.5-flash",
|
|
"_note": "gemini-2.5-flash-lite is cheaper but may struggle with complex prompts. Use with simple/custom prompts.",
|
|
"_deprecated": ["gemini-2.0-flash", "gemini-2.0-flash-lite", "gemini-1.5-flash", "gemini-1.0-pro", "gemini-pro"]
|
|
},
|
|
|
|
"openai": {
|
|
"models": [
|
|
"gpt-4.1-mini",
|
|
"gpt-4o-mini"
|
|
],
|
|
"recommended": "gpt-4o-mini"
|
|
},
|
|
|
|
"anthropic": {
|
|
"models": [
|
|
"claude-3-5-haiku-latest",
|
|
"claude-3-5-sonnet-latest",
|
|
"claude-3-opus-latest"
|
|
],
|
|
"recommended": "claude-3-5-haiku-latest"
|
|
},
|
|
|
|
"openrouter": {
|
|
"models": [
|
|
"meta-llama/llama-3.3-70b-instruct",
|
|
"meta-llama/llama-3.1-70b-instruct",
|
|
"meta-llama/llama-3.1-8b-instruct",
|
|
"anthropic/claude-3.5-haiku",
|
|
"anthropic/claude-3.5-sonnet",
|
|
"google/gemini-flash-2.5-flash-lite",
|
|
"openai/gpt-4o-mini",
|
|
"mistralai/mistral-7b-instruct",
|
|
"mistralai/mixtral-8x7b-instruct"
|
|
],
|
|
"recommended": "meta-llama/llama-3.3-70b-instruct"
|
|
},
|
|
|
|
"ollama": {
|
|
"_note": "Ollama models are local, we don't filter them. User manages their own models.",
|
|
"models": [],
|
|
"recommended": ""
|
|
}
|
|
}
|