diff options
Diffstat (limited to 'g4f/models.py')
-rw-r--r-- | g4f/models.py | 1217 |
1 files changed, 943 insertions, 274 deletions
diff --git a/g4f/models.py b/g4f/models.py index e9016561..32a12d10 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -4,36 +4,67 @@ from dataclasses import dataclass from .Provider import IterListProvider, ProviderType from .Provider import ( - AI365VIP, - Bing, - Blackbox, - Chatgpt4o, - ChatgptFree, - DDG, - DeepInfra, - DeepInfraImage, - FreeChatgpt, - FreeGpt, - Gemini, - GeminiPro, - GeminiProChat, - GigaChat, - HuggingChat, - HuggingFace, - Koala, - Liaobots, - MetaAI, - OpenaiChat, - PerplexityLabs, - Pi, - Pizzagpt, - Reka, - Replicate, - ReplicateHome, - Vercel, - You, + Ai4Chat, + AIChatFree, + AiMathGPT, + Airforce, + Allyfy, + AmigoChat, + Bing, + Blackbox, + ChatGpt, + Chatgpt4Online, + ChatGptEs, + ChatgptFree, + ChatHub, + ChatifyAI, + Cloudflare, + DarkAI, + DDG, + DeepInfra, + DeepInfraChat, + Editee, + Free2GPT, + FreeChatgpt, + FreeGpt, + FreeNetfly, + Gemini, + GeminiPro, + GizAI, + GigaChat, + GPROChat, + HuggingChat, + HuggingFace, + Koala, + Liaobots, + MagickPen, + MetaAI, + NexraBing, + NexraBlackbox, + NexraChatGPT, + NexraDallE, + NexraDallE2, + NexraEmi, + NexraFluxPro, + NexraGeminiPro, + NexraMidjourney, + NexraQwen, + NexraSD15, + NexraSDLora, + NexraSDTurbo, + OpenaiChat, + PerplexityLabs, + Pi, + Pizzagpt, + Reka, + Replicate, + ReplicateHome, + RubiksAI, + TeachAnything, + Upstage, ) + @dataclass(unsafe_hash=True) class Model: """ @@ -53,120 +84,91 @@ class Model: """Returns a list of all model names.""" return _all_models + +### Default ### default = Model( name = "", base_provider = "", best_provider = IterListProvider([ - Bing, - You, - OpenaiChat, - FreeChatgpt, - AI365VIP, - Chatgpt4o, DDG, - ChatgptFree, - Koala, - Pizzagpt, - ]) -) - -# GPT-3.5 too, but all providers supports long requests and responses -gpt_35_long = Model( - name = 'gpt-3.5-turbo', - base_provider = 'openai', - best_provider = IterListProvider([ - FreeGpt, - You, - OpenaiChat, - Koala, - ChatgptFree, FreeChatgpt, - DDG, - AI365VIP, + HuggingChat, Pizzagpt, + ReplicateHome, + Upstage, + Blackbox, + Free2GPT, + MagickPen, + DeepInfraChat, + Airforce, + ChatHub, + ChatGptEs, + ChatHub, + AmigoChat, + ChatifyAI, + Cloudflare, + Editee, + AiMathGPT, ]) ) + + ############ ### Text ### ############ ### OpenAI ### -### GPT-3.5 / GPT-4 ### +# gpt-3 +gpt_3 = Model( + name = 'gpt-3', + base_provider = 'OpenAI', + best_provider = NexraChatGPT +) + # gpt-3.5 gpt_35_turbo = Model( name = 'gpt-3.5-turbo', - base_provider = 'openai', - best_provider = IterListProvider([ - FreeGpt, - You, - Koala, - OpenaiChat, - ChatgptFree, - FreeChatgpt, - DDG, - AI365VIP, - Pizzagpt, - ]) + base_provider = 'OpenAI', + best_provider = IterListProvider([Allyfy, NexraChatGPT, Airforce, DarkAI, Liaobots]) ) -gpt_35_turbo_16k = Model( - name = 'gpt-3.5-turbo-16k', - base_provider = 'openai', - best_provider = gpt_35_long.best_provider +# gpt-4 +gpt_4o = Model( + name = 'gpt-4o', + base_provider = 'OpenAI', + best_provider = IterListProvider([NexraChatGPT, Blackbox, ChatGptEs, AmigoChat, DarkAI, Editee, GizAI, Airforce, Liaobots, OpenaiChat]) ) -gpt_35_turbo_16k_0613 = Model( - name = 'gpt-3.5-turbo-16k-0613', - base_provider = 'openai', - best_provider = gpt_35_long.best_provider +gpt_4o_mini = Model( + name = 'gpt-4o-mini', + base_provider = 'OpenAI', + best_provider = IterListProvider([DDG, ChatGptEs, FreeNetfly, Pizzagpt, MagickPen, AmigoChat, RubiksAI, Liaobots, Airforce, GizAI, ChatgptFree, Koala, OpenaiChat, ChatGpt]) ) -gpt_35_turbo_0613 = Model( - name = 'gpt-3.5-turbo-0613', - base_provider = 'openai', - best_provider = gpt_35_turbo.best_provider +gpt_4_turbo = Model( + name = 'gpt-4-turbo', + base_provider = 'OpenAI', + best_provider = IterListProvider([Liaobots, Airforce, Bing]) ) -# gpt-4 gpt_4 = Model( name = 'gpt-4', - base_provider = 'openai', - best_provider = IterListProvider([ - Bing, Liaobots, - ]) + base_provider = 'OpenAI', + best_provider = IterListProvider([Chatgpt4Online, Ai4Chat, NexraBing, NexraChatGPT, Airforce, Bing, OpenaiChat, gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider]) ) -gpt_4_0613 = Model( - name = 'gpt-4-0613', - base_provider = 'openai', - best_provider = gpt_4.best_provider +# o1 +o1 = Model( + name = 'o1', + base_provider = 'OpenAI', + best_provider = AmigoChat ) -gpt_4_32k = Model( - name = 'gpt-4-32k', - base_provider = 'openai', - best_provider = gpt_4.best_provider -) - -gpt_4_32k_0613 = Model( - name = 'gpt-4-32k-0613', - base_provider = 'openai', - best_provider = gpt_4.best_provider -) - -gpt_4_turbo = Model( - name = 'gpt-4-turbo', - base_provider = 'openai', - best_provider = Bing -) - -gpt_4o = Model( - name = 'gpt-4o', - base_provider = 'openai', - best_provider = IterListProvider([ - You, Liaobots, Chatgpt4o, AI365VIP - ]) +o1_mini = Model( + name = 'o1-mini', + base_provider = 'OpenAI', + best_provider = IterListProvider([AmigoChat, GizAI]) ) @@ -180,131 +182,257 @@ gigachat = Model( ### Meta ### meta = Model( - name = "meta", - base_provider = "meta", + name = "meta-ai", + base_provider = "Meta", best_provider = MetaAI ) -llama_2_70b_chat = Model( - name = "meta/llama-2-70b-chat", - base_provider = "meta", - best_provider = IterListProvider([ReplicateHome]) +# llama 2 +llama_2_7b = Model( + name = "llama-2-7b", + base_provider = "Meta Llama", + best_provider = Cloudflare ) -llama3_8b_instruct = Model( - name = "meta-llama/Meta-Llama-3-8B-Instruct", - base_provider = "meta", - best_provider = IterListProvider([DeepInfra, PerplexityLabs, Replicate]) +llama_2_13b = Model( + name = "llama-2-13b", + base_provider = "Meta Llama", + best_provider = Airforce ) -llama3_70b_instruct = Model( - name = "meta-llama/Meta-Llama-3-70B-Instruct", - base_provider = "meta", - best_provider = IterListProvider([DeepInfra, PerplexityLabs, Replicate, HuggingChat, DDG]) +# llama 3 +llama_3_8b = Model( + name = "llama-3-8b", + base_provider = "Meta Llama", + best_provider = IterListProvider([Cloudflare, Airforce, DeepInfra, Replicate]) ) -codellama_34b_instruct = Model( - name = "codellama/CodeLlama-34b-Instruct-hf", - base_provider = "meta", - best_provider = HuggingChat +llama_3_70b = Model( + name = "llama-3-70b", + base_provider = "Meta Llama", + best_provider = IterListProvider([ReplicateHome, Airforce, DeepInfra, Replicate]) ) -codellama_70b_instruct = Model( - name = "codellama/CodeLlama-70b-Instruct-hf", - base_provider = "meta", - best_provider = IterListProvider([DeepInfra]) +# llama 3.1 +llama_3_1_8b = Model( + name = "llama-3.1-8b", + base_provider = "Meta Llama", + best_provider = IterListProvider([Blackbox, DeepInfraChat, ChatHub, Cloudflare, Airforce, GizAI, PerplexityLabs]) +) + +llama_3_1_70b = Model( + name = "llama-3.1-70b", + base_provider = "Meta Llama", + best_provider = IterListProvider([DDG, HuggingChat, Blackbox, FreeGpt, TeachAnything, Free2GPT, DeepInfraChat, DarkAI, Airforce, AiMathGPT, RubiksAI, GizAI, HuggingFace, PerplexityLabs]) +) + +llama_3_1_405b = Model( + name = "llama-3.1-405b", + base_provider = "Meta Llama", + best_provider = IterListProvider([DeepInfraChat, Blackbox, AmigoChat, DarkAI, Airforce]) +) + +# llama 3.2 +llama_3_2_1b = Model( + name = "llama-3.2-1b", + base_provider = "Meta Llama", + best_provider = Cloudflare +) + +llama_3_2_3b = Model( + name = "llama-3.2-3b", + base_provider = "Meta Llama", + best_provider = Cloudflare +) + +llama_3_2_11b = Model( + name = "llama-3.2-11b", + base_provider = "Meta Llama", + best_provider = IterListProvider([Cloudflare, HuggingChat, HuggingFace]) +) + +llama_3_2_90b = Model( + name = "llama-3.2-90b", + base_provider = "Meta Llama", + best_provider = IterListProvider([AmigoChat, Airforce]) +) + + +# llamaguard +llamaguard_7b = Model( + name = "llamaguard-7b", + base_provider = "Meta Llama", + best_provider = Airforce +) + +llamaguard_2_8b = Model( + name = "llamaguard-2-8b", + base_provider = "Meta Llama", + best_provider = Airforce ) ### Mistral ### +mistral_7b = Model( + name = "mistral-7b", + base_provider = "Mistral", + best_provider = IterListProvider([DeepInfraChat, Cloudflare, Airforce, DeepInfra]) +) + mixtral_8x7b = Model( - name = "mistralai/Mixtral-8x7B-Instruct-v0.1", - base_provider = "huggingface", - best_provider = IterListProvider([DeepInfra, HuggingFace, PerplexityLabs, HuggingChat, DDG]) + name = "mixtral-8x7b", + base_provider = "Mistral", + best_provider = IterListProvider([DDG, ReplicateHome, DeepInfraChat, ChatHub, Airforce, DeepInfra]) ) -mistral_7b_v02 = Model( - name = "mistralai/Mistral-7B-Instruct-v0.2", - base_provider = "huggingface", - best_provider = IterListProvider([DeepInfra, HuggingFace, HuggingChat, ReplicateHome]) +mixtral_8x22b = Model( + name = "mixtral-8x22b", + base_provider = "Mistral", + best_provider = IterListProvider([DeepInfraChat, Airforce]) +) + +mistral_nemo = Model( + name = "mistral-nemo", + base_provider = "Mistral", + best_provider = IterListProvider([HuggingChat, HuggingFace]) +) + +mistral_large = Model( + name = "mistral-large", + base_provider = "Mistral", + best_provider = IterListProvider([Editee, GizAI]) ) ### NousResearch ### -Nous_Hermes_2_Mixtral_8x7B_DPO = Model( - name = "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", +mixtral_8x7b_dpo = Model( + name = "mixtral-8x7b-dpo", base_provider = "NousResearch", - best_provider = IterListProvider([HuggingFace, HuggingChat]) + best_provider = Airforce ) +yi_34b = Model( + name = "yi-34b", + base_provider = "NousResearch", + best_provider = Airforce +) -### 01-ai ### -Yi_1_5_34B_Chat = Model( - name = "01-ai/Yi-1.5-34B-Chat", - base_provider = "01-ai", - best_provider = IterListProvider([HuggingFace, HuggingChat]) +hermes_3 = Model( + name = "hermes-3", + base_provider = "NousResearch", + best_provider = IterListProvider([HuggingChat, HuggingFace]) ) ### Microsoft ### -Phi_3_mini_4k_instruct = Model( - name = "microsoft/Phi-3-mini-4k-instruct", +phi_2 = Model( + name = "phi-2", base_provider = "Microsoft", - best_provider = IterListProvider([HuggingFace, HuggingChat]) + best_provider = Cloudflare ) +phi_3_medium_4k = Model( + name = "phi-3-medium-4k", + base_provider = "Microsoft", + best_provider = DeepInfraChat +) -### Google ### +phi_3_5_mini = Model( + name = "phi-3.5-mini", + base_provider = "Microsoft", + best_provider = IterListProvider([HuggingChat, HuggingFace]) +) + +### Google DeepMind ### # gemini +gemini_pro = Model( + name = 'gemini-pro', + base_provider = 'Google DeepMind', + best_provider = IterListProvider([GeminiPro, Blackbox, AIChatFree, GPROChat, NexraGeminiPro, AmigoChat, Editee, GizAI, Airforce, Liaobots]) +) + +gemini_flash = Model( + name = 'gemini-flash', + base_provider = 'Google DeepMind', + best_provider = IterListProvider([Blackbox, GizAI, Airforce, Liaobots]) +) + gemini = Model( name = 'gemini', - base_provider = 'Google', + base_provider = 'Google DeepMind', best_provider = Gemini ) -gemini_pro = Model( - name = 'gemini-pro', +# gemma +gemma_2b_9b = Model( + name = 'gemma-2b-9b', base_provider = 'Google', - best_provider = IterListProvider([GeminiPro, You, GeminiProChat]) + best_provider = Airforce ) -# gemma -gemma_2_9b_it = Model( - name = 'gemma-2-9b-it', +gemma_2b_27b = Model( + name = 'gemma-2b-27b', + base_provider = 'Google', + best_provider = IterListProvider([DeepInfraChat, Airforce]) +) + +gemma_2b = Model( + name = 'gemma-2b', + base_provider = 'Google', + best_provider = IterListProvider([ReplicateHome, Airforce]) +) + +gemma_7b = Model( + name = 'gemma-7b', + base_provider = 'Google', + best_provider = Cloudflare +) + +# gemma 2 +gemma_2_27b = Model( + name = 'gemma-2-27b', base_provider = 'Google', - best_provider = IterListProvider([PerplexityLabs]) + best_provider = Airforce ) -gemma_2_27b_it = Model( - name = 'gemma-2-27b-it', +gemma_2 = Model( + name = 'gemma-2', base_provider = 'Google', - best_provider = IterListProvider([PerplexityLabs]) + best_provider = ChatHub ) ### Anthropic ### -claude_v2 = Model( - name = 'claude-v2', - base_provider = 'anthropic', - best_provider = IterListProvider([Vercel]) +claude_2_1 = Model( + name = 'claude-2.1', + base_provider = 'Anthropic', + best_provider = Liaobots ) +# claude 3 claude_3_opus = Model( name = 'claude-3-opus', - base_provider = 'anthropic', - best_provider = You + base_provider = 'Anthropic', + best_provider = IterListProvider([Airforce, Liaobots]) ) claude_3_sonnet = Model( name = 'claude-3-sonnet', - base_provider = 'anthropic', - best_provider = You + base_provider = 'Anthropic', + best_provider = IterListProvider([Airforce, Liaobots]) ) claude_3_haiku = Model( name = 'claude-3-haiku', - base_provider = 'anthropic', - best_provider = IterListProvider([DDG, AI365VIP]) + base_provider = 'Anthropic', + best_provider = IterListProvider([DDG, Airforce, GizAI, Liaobots]) +) + +# claude 3.5 +claude_3_5_sonnet = Model( + name = 'claude-3.5-sonnet', + base_provider = 'Anthropic', + best_provider = IterListProvider([Blackbox, Editee, AmigoChat, Airforce, GizAI, Liaobots]) ) @@ -316,66 +444,453 @@ reka_core = Model( ) -### NVIDIA ### -nemotron_4_340b_instruct = Model( - name = 'nemotron-4-340b-instruct', - base_provider = 'NVIDIA', - best_provider = IterListProvider([PerplexityLabs]) +### Blackbox AI ### +blackboxai = Model( + name = 'blackboxai', + base_provider = 'Blackbox AI', + best_provider = IterListProvider([Blackbox, NexraBlackbox]) ) - -### Blackbox ### -blackbox = Model( - name = 'blackbox', - base_provider = 'Blackbox', +blackboxai_pro = Model( + name = 'blackboxai-pro', + base_provider = 'Blackbox AI', best_provider = Blackbox ) ### Databricks ### dbrx_instruct = Model( - name = 'databricks/dbrx-instruct', + name = 'dbrx-instruct', base_provider = 'Databricks', - best_provider = IterListProvider([DeepInfra]) + best_provider = IterListProvider([Airforce, DeepInfra]) ) ### CohereForAI ### command_r_plus = Model( - name = 'CohereForAI/c4ai-command-r-plus', + name = 'command-r-plus', base_provider = 'CohereForAI', - best_provider = IterListProvider([HuggingChat]) + best_provider = HuggingChat ) -### Other ### +### iFlytek ### +sparkdesk_v1_1 = Model( + name = 'sparkdesk-v1.1', + base_provider = 'iFlytek', + best_provider = FreeChatgpt +) + + +### Qwen ### +# qwen 1 +qwen_1_5_0_5b = Model( + name = 'qwen-1.5-0.5b', + base_provider = 'Qwen', + best_provider = Cloudflare +) + +qwen_1_5_7b = Model( + name = 'qwen-1.5-7b', + base_provider = 'Qwen', + best_provider = IterListProvider([Cloudflare, Airforce]) +) + +qwen_1_5_14b = Model( + name = 'qwen-1.5-14b', + base_provider = 'Qwen', + best_provider = IterListProvider([FreeChatgpt, Cloudflare, Airforce]) +) + +qwen_1_5_72b = Model( + name = 'qwen-1.5-72b', + base_provider = 'Qwen', + best_provider = Airforce +) + +qwen_1_5_110b = Model( + name = 'qwen-1.5-110b', + base_provider = 'Qwen', + best_provider = Airforce +) + +qwen_1_5_1_8b = Model( + name = 'qwen-1.5-1.8b', + base_provider = 'Qwen', + best_provider = Airforce +) + +# qwen 2 +qwen_2_72b = Model( + name = 'qwen-2-72b', + base_provider = 'Qwen', + best_provider = IterListProvider([DeepInfraChat, HuggingChat, Airforce, HuggingFace]) +) + +qwen = Model( + name = 'qwen', + base_provider = 'Qwen', + best_provider = NexraQwen +) + + +### Zhipu AI ### +glm_3_6b = Model( + name = 'glm-3-6b', + base_provider = 'Zhipu AI', + best_provider = FreeChatgpt +) + +glm_4_9b = Model( + name = 'glm-4-9B', + base_provider = 'Zhipu AI', + best_provider = FreeChatgpt +) + + +### 01-ai ### +yi_1_5_9b = Model( + name = 'yi-1.5-9b', + base_provider = '01-ai', + best_provider = FreeChatgpt +) + +### Upstage ### +solar_1_mini = Model( + name = 'solar-1-mini', + base_provider = 'Upstage', + best_provider = Upstage +) + +solar_10_7b = Model( + name = 'solar-10-7b', + base_provider = 'Upstage', + best_provider = Airforce +) + +solar_pro = Model( + name = 'solar-pro', + base_provider = 'Upstage', + best_provider = Upstage +) + + +### Inflection ### pi = Model( name = 'pi', - base_provider = 'inflection', + base_provider = 'Inflection', best_provider = Pi ) +### DeepSeek ### +deepseek = Model( + name = 'deepseek', + base_provider = 'DeepSeek', + best_provider = Airforce +) + +### WizardLM ### +wizardlm_2_7b = Model( + name = 'wizardlm-2-7b', + base_provider = 'WizardLM', + best_provider = DeepInfraChat +) + +wizardlm_2_8x22b = Model( + name = 'wizardlm-2-8x22b', + base_provider = 'WizardLM', + best_provider = IterListProvider([DeepInfraChat, Airforce]) +) + +### Yorickvp ### +llava_13b = Model( + name = 'llava-13b', + base_provider = 'Yorickvp', + best_provider = ReplicateHome +) + + +### OpenBMB ### +minicpm_llama_3_v2_5 = Model( + name = 'minicpm-llama-3-v2.5', + base_provider = 'OpenBMB', + best_provider = DeepInfraChat +) + + +### Lzlv ### +lzlv_70b = Model( + name = 'lzlv-70b', + base_provider = 'Lzlv', + best_provider = DeepInfraChat +) + + +### OpenChat ### +openchat_3_5 = Model( + name = 'openchat-3.5', + base_provider = 'OpenChat', + best_provider = Cloudflare +) + +openchat_3_6_8b = Model( + name = 'openchat-3.6-8b', + base_provider = 'OpenChat', + best_provider = DeepInfraChat +) + + +### Phind ### +phind_codellama_34b_v2 = Model( + name = 'phind-codellama-34b-v2', + base_provider = 'Phind', + best_provider = DeepInfraChat +) + + +### Cognitive Computations ### +dolphin_2_9_1_llama_3_70b = Model( + name = 'dolphin-2.9.1-llama-3-70b', + base_provider = 'Cognitive Computations', + best_provider = DeepInfraChat +) + + +### x.ai ### +grok_2 = Model( + name = 'grok-2', + base_provider = 'x.ai', + best_provider = Liaobots +) + +grok_2_mini = Model( + name = 'grok-2-mini', + base_provider = 'x.ai', + best_provider = Liaobots +) + + +### Perplexity AI ### +sonar_online = Model( + name = 'sonar-online', + base_provider = 'Perplexity AI', + best_provider = IterListProvider([ChatHub, PerplexityLabs]) +) + +sonar_chat = Model( + name = 'sonar-chat', + base_provider = 'Perplexity AI', + best_provider = PerplexityLabs +) + + +### Gryphe ### +mythomax_l2_13b = Model( + name = 'mythomax-l2-13b', + base_provider = 'Gryphe', + best_provider = Airforce +) + + +### Pawan ### +cosmosrp = Model( + name = 'cosmosrp', + base_provider = 'Pawan', + best_provider = Airforce +) + + +### TheBloke ### +german_7b = Model( + name = 'german-7b', + base_provider = 'TheBloke', + best_provider = Cloudflare +) + + +### Tinyllama ### +tinyllama_1_1b = Model( + name = 'tinyllama-1.1b', + base_provider = 'Tinyllama', + best_provider = Cloudflare +) + + +### Fblgit ### +cybertron_7b = Model( + name = 'cybertron-7b', + base_provider = 'Fblgit', + best_provider = Cloudflare +) + +### Nvidia ### +nemotron_70b = Model( + name = 'nemotron-70b', + base_provider = 'Nvidia', + best_provider = IterListProvider([HuggingChat, HuggingFace]) +) + + ############# ### Image ### ############# ### Stability AI ### -sdxl = Model( - name = 'stability-ai/sdxl', +sdxl_turbo = Model( + name = 'sdxl-turbo', + base_provider = 'Stability AI', + best_provider = NexraSDTurbo + +) + +sdxl_lora = Model( + name = 'sdxl-lora', base_provider = 'Stability AI', - best_provider = IterListProvider([ReplicateHome, DeepInfraImage]) + best_provider = NexraSDLora ) -### AI Forever ### -kandinsky_2_2 = Model( - name = 'ai-forever/kandinsky-2.2', - base_provider = 'AI Forever', +sdxl = Model( + name = 'sdxl', + base_provider = 'Stability AI', best_provider = IterListProvider([ReplicateHome]) ) +sd_1_5 = Model( + name = 'sd-1.5', + base_provider = 'Stability AI', + best_provider = IterListProvider([NexraSD15, GizAI]) + +) + +sd_3 = Model( + name = 'sd-3', + base_provider = 'Stability AI', + best_provider = ReplicateHome + +) + +sd_3_5 = Model( + name = 'sd-3.5', + base_provider = 'Stability AI', + best_provider = GizAI + +) + +### Playground ### +playground_v2_5 = Model( + name = 'playground-v2.5', + base_provider = 'Playground AI', + best_provider = ReplicateHome + +) + + +### Flux AI ### +flux = Model( + name = 'flux', + base_provider = 'Flux AI', + best_provider = IterListProvider([Airforce, Blackbox]) + +) + +flux_pro = Model( + name = 'flux-pro', + base_provider = 'Flux AI', + best_provider = IterListProvider([NexraFluxPro, AmigoChat]) + +) + +flux_realism = Model( + name = 'flux-realism', + base_provider = 'Flux AI', + best_provider = IterListProvider([Airforce, AmigoChat]) + +) + +flux_anime = Model( + name = 'flux-anime', + base_provider = 'Flux AI', + best_provider = Airforce + +) + +flux_3d = Model( + name = 'flux-3d', + base_provider = 'Flux AI', + best_provider = Airforce + +) + +flux_disney = Model( + name = 'flux-disney', + base_provider = 'Flux AI', + best_provider = Airforce + +) + +flux_pixel = Model( + name = 'flux-pixel', + base_provider = 'Flux AI', + best_provider = Airforce + +) + +flux_4o = Model( + name = 'flux-4o', + base_provider = 'Flux AI', + best_provider = Airforce + +) + +flux_schnell = Model( + name = 'flux-schnell', + base_provider = 'Flux AI', + best_provider = IterListProvider([ReplicateHome, GizAI]) + +) + + +### OpenAI ### +dalle_2 = Model( + name = 'dalle-2', + base_provider = 'OpenAI', + best_provider = NexraDallE2 + +) + +dalle = Model( + name = 'dalle', + base_provider = 'OpenAI', + best_provider = NexraDallE + +) + +### Midjourney ### +midjourney = Model( + name = 'midjourney', + base_provider = 'Midjourney', + best_provider = NexraMidjourney + +) + +### Other ### +emi = Model( + name = 'emi', + base_provider = '', + best_provider = NexraEmi + +) + +any_dark = Model( + name = 'any-dark', + base_provider = '', + best_provider = Airforce + +) + class ModelUtils: """ Utility class for mapping string identifiers to Model instances. @@ -385,113 +900,267 @@ class ModelUtils: """ convert: dict[str, Model] = { - ############ - ### Text ### - ############ - - ### OpenAI ### - ### GPT-3.5 / GPT-4 ### - # gpt-3.5 - 'gpt-3.5-turbo' : gpt_35_turbo, - 'gpt-3.5-turbo-0613' : gpt_35_turbo_0613, - 'gpt-3.5-turbo-16k' : gpt_35_turbo_16k, - 'gpt-3.5-turbo-16k-0613' : gpt_35_turbo_16k_0613, - 'gpt-3.5-long': gpt_35_long, +############ +### Text ### +############ + +### OpenAI ### +# gpt-3 +'gpt-3': gpt_3, + +# gpt-3.5 +'gpt-3.5-turbo': gpt_35_turbo, - # gpt-4 - 'gpt-4o' : gpt_4o, - 'gpt-4' : gpt_4, - 'gpt-4-0613' : gpt_4_0613, - 'gpt-4-32k' : gpt_4_32k, - 'gpt-4-32k-0613' : gpt_4_32k_0613, - 'gpt-4-turbo' : gpt_4_turbo, - - - ### Meta ### - "meta-ai": meta, +# gpt-4 +'gpt-4o': gpt_4o, +'gpt-4o-mini': gpt_4o_mini, +'gpt-4': gpt_4, +'gpt-4-turbo': gpt_4_turbo, + +# o1 +'o1': o1, +'o1-mini': o1_mini, + - 'llama-2-70b-chat': llama_2_70b_chat, - 'llama3-8b': llama3_8b_instruct, # alias - 'llama3-70b': llama3_70b_instruct, # alias - 'llama3-8b-instruct' : llama3_8b_instruct, - 'llama3-70b-instruct': llama3_70b_instruct, +### Meta ### +"meta-ai": meta, - 'codellama-34b-instruct': codellama_34b_instruct, - 'codellama-70b-instruct': codellama_70b_instruct, +# llama-2 +'llama-2-7b': llama_2_7b, +'llama-2-13b': llama_2_13b, +# llama-3 +'llama-3-8b': llama_3_8b, +'llama-3-70b': llama_3_70b, + +# llama-3.1 +'llama-3.1-8b': llama_3_1_8b, +'llama-3.1-70b': llama_3_1_70b, +'llama-3.1-405b': llama_3_1_405b, + +# llama-3.2 +'llama-3.2-1b': llama_3_2_1b, +'llama-3.2-3b': llama_3_2_3b, +'llama-3.2-11b': llama_3_2_11b, +'llama-3.2-90b': llama_3_2_90b, + +# llamaguard +'llamaguard-7b': llamaguard_7b, +'llamaguard-2-8b': llamaguard_2_8b, + + +### Mistral ### +'mistral-7b': mistral_7b, +'mixtral-8x7b': mixtral_8x7b, +'mixtral-8x22b': mixtral_8x22b, +'mistral-nemo': mistral_nemo, +'mistral-large': mistral_large, + + +### NousResearch ### +'mixtral-8x7b-dpo': mixtral_8x7b_dpo, +'hermes-3': hermes_3, + +'yi-34b': yi_34b, + + +### Microsoft ### +'phi-2': phi_2, +'phi_3_medium-4k': phi_3_medium_4k, +'phi-3.5-mini': phi_3_5_mini, + +### Google ### +# gemini +'gemini': gemini, +'gemini-pro': gemini_pro, +'gemini-flash': gemini_flash, + +# gemma +'gemma-2b': gemma_2b, +'gemma-2b-9b': gemma_2b_9b, +'gemma-2b-27b': gemma_2b_27b, +'gemma-7b': gemma_7b, + +# gemma-2 +'gemma-2': gemma_2, +'gemma-2-27b': gemma_2_27b, + + +### Anthropic ### +'claude-2.1': claude_2_1, + +# claude 3 +'claude-3-opus': claude_3_opus, +'claude-3-sonnet': claude_3_sonnet, +'claude-3-haiku': claude_3_haiku, - ### Mistral (Opensource) ### - 'mixtral-8x7b': mixtral_8x7b, - 'mistral-7b-v02': mistral_7b_v02, +# claude 3.5 +'claude-3.5-sonnet': claude_3_5_sonnet, + + +### Reka AI ### +'reka-core': reka_core, + +### Blackbox AI ### +'blackboxai': blackboxai, +'blackboxai-pro': blackboxai_pro, - ### NousResearch ### - 'Nous-Hermes-2-Mixtral-8x7B-DPO': Nous_Hermes_2_Mixtral_8x7B_DPO, + +### CohereForAI ### +'command-r+': command_r_plus, + + +### Databricks ### +'dbrx-instruct': dbrx_instruct, - ### 01-ai ### - 'Yi-1.5-34B-Chat': Yi_1_5_34B_Chat, - - - ### Microsoft ### - 'Phi-3-mini-4k-instruct': Phi_3_mini_4k_instruct, +### GigaChat ### +'gigachat': gigachat, + + +### iFlytek ### +'sparkdesk-v1.1': sparkdesk_v1_1, + + +### Qwen ### +'qwen': qwen, +'qwen-1.5-0.5b': qwen_1_5_0_5b, +'qwen-1.5-7b': qwen_1_5_7b, +'qwen-1.5-14b': qwen_1_5_14b, +'qwen-1.5-72b': qwen_1_5_72b, +'qwen-1.5-110b': qwen_1_5_110b, +'qwen-1.5-1.8b': qwen_1_5_1_8b, +'qwen-2-72b': qwen_2_72b, + + +### Zhipu AI ### +'glm-3-6b': glm_3_6b, +'glm-4-9b': glm_4_9b, + + +### 01-ai ### +'yi-1.5-9b': yi_1_5_9b, + + +### Upstage ### +'solar-mini': solar_1_mini, +'solar-10-7b': solar_10_7b, +'solar-pro': solar_pro, + +### Inflection ### +'pi': pi, - ### Google ### - # gemini - 'gemini': gemini, - 'gemini-pro': gemini_pro, +### DeepSeek ### +'deepseek': deepseek, + - # gemma - 'gemma-2-9b-it': gemma_2_9b_it, - 'gemma-2-27b-it': gemma_2_27b_it, +### Yorickvp ### +'llava-13b': llava_13b, - ### Anthropic ### - 'claude-v2': claude_v2, - 'claude-3-opus': claude_3_opus, - 'claude-3-sonnet': claude_3_sonnet, - 'claude-3-haiku': claude_3_haiku, +### WizardLM ### +'wizardlm-2-7b': wizardlm_2_7b, +'wizardlm-2-8x22b': wizardlm_2_8x22b, + + +### OpenBMB ### +'minicpm-llama-3-v2.5': minicpm_llama_3_v2_5, + + +### Lzlv ### +'lzlv-70b': lzlv_70b, + + +### OpenChat ### +'openchat-3.5': openchat_3_5, +'openchat-3.6-8b': openchat_3_6_8b, - ### Reka AI ### - 'reka': reka_core, +### Phind ### +'phind-codellama-34b-v2': phind_codellama_34b_v2, + + +### Cognitive Computations ### +'dolphin-2.9.1-llama-3-70b': dolphin_2_9_1_llama_3_70b, + + +### x.ai ### +'grok-2': grok_2, +'grok-2-mini': grok_2_mini, + + +### Perplexity AI ### +'sonar-online': sonar_online, +'sonar-chat': sonar_chat, + +### Gryphe ### +'mythomax-l2-13b': sonar_chat, - ### NVIDIA ### - 'nemotron-4-340b-instruct': nemotron_4_340b_instruct, - - - ### Blackbox ### - 'blackbox': blackbox, - - - ### CohereForAI ### - 'command-r+': command_r_plus, + +### Pawan ### +'cosmosrp': cosmosrp, - ### Databricks ### - 'dbrx-instruct': dbrx_instruct, +### TheBloke ### +'german-7b': german_7b, + + +### Tinyllama ### +'tinyllama-1.1b': tinyllama_1_1b, - ### GigaChat ### - 'gigachat': gigachat, +### Fblgit ### +'cybertron-7b': cybertron_7b, - # Other - 'pi': pi, +### Nvidia ### +'nemotron-70b': nemotron_70b, - ############# - ### Image ### - ############# - - ### Stability AI ### - 'sdxl': sdxl, +############# +### Image ### +############# + +### Stability AI ### +'sdxl': sdxl, +'sdxl-lora': sdxl_lora, +'sdxl-turbo': sdxl_turbo, +'sd-1.5': sd_1_5, +'sd-3': sd_3, +'sd-3.5': sd_3_5, + - ### AI Forever ### - 'kandinsky-2.2': kandinsky_2_2, +### Playground ### +'playground-v2.5': playground_v2_5, + + +### Flux AI ### +'flux': flux, +'flux-pro': flux_pro, +'flux-realism': flux_realism, +'flux-anime': flux_anime, +'flux-3d': flux_3d, +'flux-disney': flux_disney, +'flux-pixel': flux_pixel, +'flux-4o': flux_4o, +'flux-schnell': flux_schnell, + + +### OpenAI ### +'dalle': dalle, +'dalle-2': dalle_2, + +### Midjourney ### +'midjourney': midjourney, + + +### Other ### +'emi': emi, +'any-dark': any_dark, } _all_models = list(ModelUtils.convert.keys()) |