diff options
author | kqlio67 <kqlio67@users.noreply.github.com> | 2024-11-09 22:44:52 +0100 |
---|---|---|
committer | kqlio67 <kqlio67@users.noreply.github.com> | 2024-11-09 22:44:52 +0100 |
commit | 3a15957d221a5e532ffe888e79a1fc2245c0fde1 (patch) | |
tree | 735297d8f8e5e5713aed4baf53c8ec694e3cb969 | |
parent | Update (g4f/gui/client/index.html) (diff) | |
download | gpt4free-3a15957d221a5e532ffe888e79a1fc2245c0fde1.tar gpt4free-3a15957d221a5e532ffe888e79a1fc2245c0fde1.tar.gz gpt4free-3a15957d221a5e532ffe888e79a1fc2245c0fde1.tar.bz2 gpt4free-3a15957d221a5e532ffe888e79a1fc2245c0fde1.tar.lz gpt4free-3a15957d221a5e532ffe888e79a1fc2245c0fde1.tar.xz gpt4free-3a15957d221a5e532ffe888e79a1fc2245c0fde1.tar.zst gpt4free-3a15957d221a5e532ffe888e79a1fc2245c0fde1.zip |
-rw-r--r-- | docs/providers-and-models.md | 4 | ||||
-rw-r--r-- | g4f/Provider/airforce/AirforceChat.py | 261 | ||||
-rw-r--r-- | g4f/models.py | 244 |
3 files changed, 71 insertions, 438 deletions
diff --git a/docs/providers-and-models.md b/docs/providers-and-models.md index 0a253475..dc29eb23 100644 --- a/docs/providers-and-models.md +++ b/docs/providers-and-models.md @@ -19,8 +19,8 @@ This document provides an overview of various AI providers and models, including |----------|-------------|--------------|---------------|--------|--------|------| |[ai4chat.co](https://www.ai4chat.co)|`g4f.Provider.Ai4Chat`|`gpt-4`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌| |[aichatfree.info](https://aichatfree.info)|`g4f.Provider.AIChatFree`|`gemini-pro`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌| -|[api.airforce](https://api.airforce)|`g4f.Provider.AiMathGPT`|`llama-3.1-70b`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌| -|[api.airforce](https://api.airforce)|`g4f.Provider.Airforce`|`claude-3-haiku, claude-3-sonnet, claude-3-opus, gpt-4, gpt-4-turbo, gpt-4o-mini, gpt-3.5-turbo, llama-3-70b, llama-3-8b, llama-2-13b, llama-3.1-405b, llama-3.1-70b, llama-3.1-8b, llamaguard-2-8b, llamaguard-7b, llama-3.2-90b, llamaguard-3-8b, llama-3.2-11b, llamaguard-3-11b, llama-3.2-3b, llama-3.2-1b, llama-2-7b, mixtral-8x7b, mixtral-8x22b, mythomax-13b, openchat-3.5, qwen-2-72b, qwen-2-5-7b, qwen-2-5-72b, gemma-2b, gemma-2-9b, gemma-2b-27b, gemini-flash, gemini-pro, dbrx-instruct, deepseek-coder, hermes-2-dpo, hermes-2, openhermes-2.5, wizardlm-2-8x22b, phi-2, solar-10-7b, cosmosrp, lfm-40b, german-7b, zephyr-7b`|`flux, flux-realism', flux-anime, flux-3d, flux-disney, flux-pixel, flux-4o, any-dark, sdxl`|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌| +|[aimathgpt.forit.ai](https://aimathgpt.forit.ai)|`g4f.Provider.AiMathGPT`|`llama-3.1-70b`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌| +|[api.airforce](https://api.airforce)|`g4f.Provider.Airforce`|`gpt-4o, gpt-4o-mini, gpt-4-turbo, llama-2-7b, llama-3.1-8b, llama-3.1-70b, hermes-2-pro, hermes-2-dpo, phi-2, deepseek-coder, openchat-3.5, openhermes-2.5, cosmosrp, lfm-40b, german-7b, zephyr-7b, neural-7b`|`flux, flux-realism', flux-anime, flux-3d, flux-disney, flux-pixel, flux-4o, any-dark, sdxl`|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌| |[aiuncensored.info](https://www.aiuncensored.info)|`g4f.Provider.AIUncensored`|✔|✔|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌| |[allyfy.chat](https://allyfy.chat/)|`g4f.Provider.Allyfy`|`gpt-3.5-turbo`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌| |[bing.com](https://bing.com/chat)|`g4f.Provider.Bing`|`gpt-4`|✔|`gpt-4-vision`|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌+✔| diff --git a/g4f/Provider/airforce/AirforceChat.py b/g4f/Provider/airforce/AirforceChat.py index 8affbe5c..63a0460f 100644 --- a/g4f/Provider/airforce/AirforceChat.py +++ b/g4f/Provider/airforce/AirforceChat.py @@ -3,6 +3,7 @@ import re from aiohttp import ClientSession import json from typing import List +import requests from ...typing import AsyncResult, Messages from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin @@ -51,258 +52,50 @@ class AirforceChat(AsyncGeneratorProvider, ProviderModelMixin): supports_system_message = True supports_message_history = True - default_model = 'llama-3-70b-chat' - text_models = [ - # anthropic - 'claude-3-haiku-20240307', - 'claude-3-sonnet-20240229', - 'claude-3-5-sonnet-20240620', - 'claude-3-5-sonnet-20241022', - 'claude-3-opus-20240229', - - # openai - 'chatgpt-4o-latest', - 'gpt-4', - 'gpt-4-turbo', - 'gpt-4o-2024-05-13', - 'gpt-4o-mini-2024-07-18', - 'gpt-4o-mini', - 'gpt-4o-2024-08-06', - 'gpt-3.5-turbo', - 'gpt-3.5-turbo-0125', - 'gpt-3.5-turbo-1106', - 'gpt-4o', - 'gpt-4-turbo-2024-04-09', - 'gpt-4-0125-preview', - 'gpt-4-1106-preview', - - # meta-llama - default_model, - 'llama-3-70b-chat-turbo', - 'llama-3-8b-chat', - 'llama-3-8b-chat-turbo', - 'llama-3-70b-chat-lite', - 'llama-3-8b-chat-lite', - 'llama-2-13b-chat', - 'llama-3.1-405b-turbo', - 'llama-3.1-70b-turbo', - 'llama-3.1-8b-turbo', - 'LlamaGuard-2-8b', - 'llamaguard-7b', - 'Llama-Vision-Free', - 'Llama-Guard-7b', - 'Llama-3.2-90B-Vision-Instruct-Turbo', - 'Meta-Llama-Guard-3-8B', - 'Llama-3.2-11B-Vision-Instruct-Turbo', - 'Llama-Guard-3-11B-Vision-Turbo', - 'Llama-3.2-3B-Instruct-Turbo', - 'Llama-3.2-1B-Instruct-Turbo', - 'llama-2-7b-chat-int8', - 'llama-2-7b-chat-fp16', - 'Llama 3.1 405B Instruct', - 'Llama 3.1 70B Instruct', - 'Llama 3.1 8B Instruct', - - # mistral-ai - 'Mixtral-8x7B-Instruct-v0.1', - 'Mixtral-8x22B-Instruct-v0.1', - 'Mistral-7B-Instruct-v0.1', - 'Mistral-7B-Instruct-v0.2', - 'Mistral-7B-Instruct-v0.3', - - # Gryphe - 'MythoMax-L2-13b-Lite', - 'MythoMax-L2-13b', - - # openchat - 'openchat-3.5-0106', - - # qwen - #'Qwen1.5-72B-Chat', # Empty answer - #'Qwen1.5-110B-Chat', # Empty answer - 'Qwen2-72B-Instruct', - 'Qwen2.5-7B-Instruct-Turbo', - 'Qwen2.5-72B-Instruct-Turbo', - - # google - 'gemma-2b-it', - 'gemma-2-9b-it', - 'gemma-2-27b-it', - - # gemini - 'gemini-1.5-flash', - 'gemini-1.5-pro', - - # databricks - 'dbrx-instruct', - - # deepseek-ai - 'deepseek-coder-6.7b-base', - 'deepseek-coder-6.7b-instruct', - 'deepseek-math-7b-instruct', - - # NousResearch - 'deepseek-math-7b-instruct', - 'Nous-Hermes-2-Mixtral-8x7B-DPO', - 'hermes-2-pro-mistral-7b', - - # teknium - 'openhermes-2.5-mistral-7b', - - # microsoft - 'WizardLM-2-8x22B', - 'phi-2', - - # upstage - 'SOLAR-10.7B-Instruct-v1.0', - - # pawan - 'cosmosrp', - - # liquid - 'lfm-40b-moe', - - # DiscoResearch - 'discolm-german-7b-v1', - - # tiiuae - 'falcon-7b-instruct', - - # defog - 'sqlcoder-7b-2', - - # tinyllama - 'tinyllama-1.1b-chat', - - # HuggingFaceH4 - 'zephyr-7b-beta', - ] + default_model = 'llama-3.1-70b-chat' + response = requests.get('https://api.airforce/models') + data = response.json() + + text_models = [model['id'] for model in data['data']] models = [*text_models] model_aliases = { - # anthropic - "claude-3-haiku": "claude-3-haiku-20240307", - "claude-3-sonnet": "claude-3-sonnet-20240229", - "claude-3.5-sonnet": "claude-3-5-sonnet-20240620", - "claude-3.5-sonnet": "claude-3-5-sonnet-20241022", - "claude-3-opus": "claude-3-opus-20240229", - - # openai - "gpt-4o": "chatgpt-4o-latest", - #"gpt-4": "gpt-4", - #"gpt-4-turbo": "gpt-4-turbo", - "gpt-4o": "gpt-4o-2024-05-13", - "gpt-4o-mini": "gpt-4o-mini-2024-07-18", - #"gpt-4o-mini": "gpt-4o-mini", - "gpt-4o": "gpt-4o-2024-08-06", - "gpt-3.5-turbo": "gpt-3.5-turbo", - "gpt-3.5-turbo": "gpt-3.5-turbo-0125", - "gpt-3.5-turbo": "gpt-3.5-turbo-1106", - #"gpt-4o": "gpt-4o", - "gpt-4-turbo": "gpt-4-turbo-2024-04-09", - "gpt-4": "gpt-4-0125-preview", - "gpt-4": "gpt-4-1106-preview", - - # meta-llama - "llama-3-70b": "llama-3-70b-chat", - "llama-3-8b": "llama-3-8b-chat", - "llama-3-8b": "llama-3-8b-chat-turbo", - "llama-3-70b": "llama-3-70b-chat-lite", - "llama-3-8b": "llama-3-8b-chat-lite", - "llama-2-13b": "llama-2-13b-chat", - "llama-3.1-405b": "llama-3.1-405b-turbo", - "llama-3.1-70b": "llama-3.1-70b-turbo", - "llama-3.1-8b": "llama-3.1-8b-turbo", - "llamaguard-2-8b": "LlamaGuard-2-8b", - "llamaguard-7b": "llamaguard-7b", - #"llama_vision_free": "Llama-Vision-Free", # Unknown - "llamaguard-7b": "Llama-Guard-7b", - "llama-3.2-90b": "Llama-3.2-90B-Vision-Instruct-Turbo", - "llamaguard-3-8b": "Meta-Llama-Guard-3-8B", - "llama-3.2-11b": "Llama-3.2-11B-Vision-Instruct-Turbo", - "llamaguard-3-11b": "Llama-Guard-3-11B-Vision-Turbo", - "llama-3.2-3b": "Llama-3.2-3B-Instruct-Turbo", - "llama-3.2-1b": "Llama-3.2-1B-Instruct-Turbo", - "llama-2-7b": "llama-2-7b-chat-int8", - "llama-2-7b": "llama-2-7b-chat-fp16", - "llama-3.1-405b": "Llama 3.1 405B Instruct", - "llama-3.1-70b": "Llama 3.1 70B Instruct", - "llama-3.1-8b": "Llama 3.1 8B Instruct", - - # mistral-ai - "mixtral-8x7b": "Mixtral-8x7B-Instruct-v0.1", - "mixtral-8x22b": "Mixtral-8x22B-Instruct-v0.1", - "mixtral-8x7b": "Mistral-7B-Instruct-v0.1", - "mixtral-8x7b": "Mistral-7B-Instruct-v0.2", - "mixtral-8x7b": "Mistral-7B-Instruct-v0.3", - - # Gryphe - "mythomax-13b": "MythoMax-L2-13b-Lite", - "mythomax-13b": "MythoMax-L2-13b", - # openchat "openchat-3.5": "openchat-3.5-0106", - - # qwen - #"qwen-1.5-72b": "Qwen1.5-72B-Chat", # Empty answer - #"qwen-1.5-110b": "Qwen1.5-110B-Chat", # Empty answer - "qwen-2-72b": "Qwen2-72B-Instruct", - "qwen-2-5-7b": "Qwen2.5-7B-Instruct-Turbo", - "qwen-2-5-72b": "Qwen2.5-72B-Instruct-Turbo", - - # google - "gemma-2b": "gemma-2b-it", - "gemma-2-9b": "gemma-2-9b-it", - "gemma-2b-27b": "gemma-2-27b-it", - - # gemini - "gemini-flash": "gemini-1.5-flash", - "gemini-pro": "gemini-1.5-pro", - - # databricks - "dbrx-instruct": "dbrx-instruct", - + # deepseek-ai - #"deepseek-coder": "deepseek-coder-6.7b-base", "deepseek-coder": "deepseek-coder-6.7b-instruct", - #"deepseek-math": "deepseek-math-7b-instruct", - + # NousResearch - #"deepseek-math": "deepseek-math-7b-instruct", "hermes-2-dpo": "Nous-Hermes-2-Mixtral-8x7B-DPO", - "hermes-2": "hermes-2-pro-mistral-7b", - + "hermes-2-pro": "hermes-2-pro-mistral-7b", + # teknium "openhermes-2.5": "openhermes-2.5-mistral-7b", - - # microsoft - "wizardlm-2-8x22b": "WizardLM-2-8x22B", - #"phi-2": "phi-2", - - # upstage - "solar-10-7b": "SOLAR-10.7B-Instruct-v1.0", - - # pawan - #"cosmosrp": "cosmosrp", - + # liquid "lfm-40b": "lfm-40b-moe", - + # DiscoResearch "german-7b": "discolm-german-7b-v1", - - # tiiuae - #"falcon-7b": "falcon-7b-instruct", - - # defog - #"sqlcoder-7b": "sqlcoder-7b-2", - - # tinyllama - #"tinyllama-1b": "tinyllama-1.1b-chat", - + + # meta-llama + "llama-2-7b": "llama-2-7b-chat-int8", + "llama-2-7b": "llama-2-7b-chat-fp16", + "llama-3.1-70b": "llama-3.1-70b-chat", + "llama-3.1-8b": "llama-3.1-8b-chat", + "llama-3.1-70b": "llama-3.1-70b-turbo", + "llama-3.1-8b": "llama-3.1-8b-turbo", + + # inferless + "neural-7b": "neural-chat-7b-v3-1", + # HuggingFaceH4 "zephyr-7b": "zephyr-7b-beta", + + # llmplayground.net + #"any-uncensored": "any-uncensored", } @classmethod diff --git a/g4f/models.py b/g4f/models.py index 8788ab77..ec0ebd32 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -98,32 +98,32 @@ default = Model( gpt_35_turbo = Model( name = 'gpt-3.5-turbo', base_provider = 'OpenAI', - best_provider = IterListProvider([DarkAI, Airforce, Liaobots, Allyfy]) + best_provider = IterListProvider([DarkAI, Liaobots, Allyfy]) ) # gpt-4 gpt_4o = Model( name = 'gpt-4o', base_provider = 'OpenAI', - best_provider = IterListProvider([Blackbox, ChatGptEs, DarkAI, Airforce, ChatGpt, Liaobots, OpenaiChat]) + best_provider = IterListProvider([Blackbox, ChatGptEs, DarkAI, ChatGpt, Airforce, Liaobots, OpenaiChat]) ) gpt_4o_mini = Model( name = 'gpt-4o-mini', base_provider = 'OpenAI', - best_provider = IterListProvider([DDG, ChatGptEs, FreeNetfly, Pizzagpt, MagickPen, RubiksAI, Liaobots, ChatGpt, Airforce, OpenaiChat]) + best_provider = IterListProvider([DDG, ChatGptEs, FreeNetfly, Pizzagpt, MagickPen, ChatGpt, Airforce, RubiksAI, Liaobots, OpenaiChat]) ) gpt_4_turbo = Model( name = 'gpt-4-turbo', base_provider = 'OpenAI', - best_provider = IterListProvider([Liaobots, Airforce, ChatGpt, Bing]) + best_provider = IterListProvider([ChatGpt, Airforce, Liaobots, Bing]) ) gpt_4 = Model( name = 'gpt-4', base_provider = 'OpenAI', - best_provider = IterListProvider([Chatgpt4Online, Ai4Chat, ChatGpt, Airforce, Bing, OpenaiChat, gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider]) + best_provider = IterListProvider([Chatgpt4Online, ChatGpt, Bing, OpenaiChat, gpt_4_turbo.best_provider, gpt_4o.best_provider, gpt_4o_mini.best_provider]) ) # o1 @@ -161,24 +161,17 @@ llama_2_7b = Model( base_provider = "Meta Llama", best_provider = IterListProvider([Cloudflare, Airforce]) ) - -llama_2_13b = Model( - name = "llama-2-13b", - base_provider = "Meta Llama", - best_provider = Airforce -) - # llama 3 llama_3_8b = Model( name = "llama-3-8b", base_provider = "Meta Llama", - best_provider = IterListProvider([Cloudflare, Airforce]) + best_provider = IterListProvider([Cloudflare]) ) llama_3_70b = Model( name = "llama-3-70b", base_provider = "Meta Llama", - best_provider = IterListProvider([ReplicateHome, Airforce]) + best_provider = IterListProvider([ReplicateHome]) ) # llama 3.1 @@ -191,84 +184,39 @@ llama_3_1_8b = Model( llama_3_1_70b = Model( name = "llama-3.1-70b", base_provider = "Meta Llama", - best_provider = IterListProvider([DDG, DeepInfraChat, Blackbox, TeachAnything, DarkAI, AiMathGPT, RubiksAI, Airforce, HuggingChat, HuggingFace, PerplexityLabs]) + best_provider = IterListProvider([DDG, DeepInfraChat, Blackbox, TeachAnything, DarkAI, AiMathGPT, Airforce, RubiksAI, HuggingChat, HuggingFace, PerplexityLabs]) ) llama_3_1_405b = Model( name = "llama-3.1-405b", base_provider = "Meta Llama", - best_provider = IterListProvider([Blackbox, DarkAI, Airforce]) + best_provider = IterListProvider([Blackbox, DarkAI]) ) # llama 3.2 llama_3_2_1b = Model( name = "llama-3.2-1b", base_provider = "Meta Llama", - best_provider = IterListProvider([Cloudflare, Airforce]) -) - -llama_3_2_3b = Model( - name = "llama-3.2-3b", - base_provider = "Meta Llama", - best_provider = IterListProvider([Airforce]) + best_provider = IterListProvider([Cloudflare]) ) llama_3_2_11b = Model( name = "llama-3.2-11b", base_provider = "Meta Llama", - best_provider = IterListProvider([HuggingChat, Airforce, HuggingFace]) -) - -llama_3_2_90b = Model( - name = "llama-3.2-90b", - base_provider = "Meta Llama", - best_provider = IterListProvider([Airforce]) -) - - -# llamaguard -llamaguard_7b = Model( - name = "llamaguard-7b", - base_provider = "Meta Llama", - best_provider = Airforce -) - -llamaguard_2_8b = Model( - name = "llamaguard-2-8b", - base_provider = "Meta Llama", - best_provider = Airforce -) - -llamaguard_3_8b = Model( - name = "llamaguard-3-8b", - base_provider = "Meta Llama", - best_provider = Airforce -) - -llamaguard_3_11b = Model( - name = "llamaguard-3-11b", - base_provider = "Meta Llama", - best_provider = Airforce + best_provider = IterListProvider([HuggingChat, HuggingFace]) ) - ### Mistral ### mistral_7b = Model( name = "mistral-7b", base_provider = "Mistral", - best_provider = IterListProvider([Free2GPT, Airforce]) + best_provider = IterListProvider([Free2GPT]) ) mixtral_8x7b = Model( name = "mixtral-8x7b", base_provider = "Mistral", - best_provider = IterListProvider([DDG, ReplicateHome, Airforce]) -) - -mixtral_8x22b = Model( - name = "mixtral-8x22b", - base_provider = "Mistral", - best_provider = IterListProvider([Airforce]) + best_provider = IterListProvider([DDG, ReplicateHome]) ) mistral_nemo = Model( @@ -279,8 +227,8 @@ mistral_nemo = Model( ### NousResearch ### -hermes_2 = Model( - name = "hermes-2", +hermes_2_pro = Model( + name = "hermes-2-pro", base_provider = "NousResearch", best_provider = Airforce ) @@ -305,12 +253,6 @@ phi_2 = Model( best_provider = IterListProvider([Cloudflare, Airforce]) ) -phi_3_medium_4k = Model( - name = "phi-3-medium-4k", - base_provider = "Microsoft", - best_provider = None -) - phi_3_5_mini = Model( name = "phi-3.5-mini", base_provider = "Microsoft", @@ -322,13 +264,13 @@ phi_3_5_mini = Model( gemini_pro = Model( name = 'gemini-pro', base_provider = 'Google DeepMind', - best_provider = IterListProvider([GeminiPro, Blackbox, AIChatFree, FreeGpt, Airforce, Liaobots]) + best_provider = IterListProvider([GeminiPro, Blackbox, AIChatFree, FreeGpt, Liaobots]) ) gemini_flash = Model( name = 'gemini-flash', base_provider = 'Google DeepMind', - best_provider = IterListProvider([Blackbox, GizAI, Airforce, Liaobots]) + best_provider = IterListProvider([Blackbox, GizAI, Liaobots]) ) gemini = Model( @@ -341,13 +283,7 @@ gemini = Model( gemma_2b = Model( name = 'gemma-2b', base_provider = 'Google', - best_provider = IterListProvider([ReplicateHome, Airforce]) -) - -gemma_2b_27b = Model( - name = 'gemma-2b-27b', - base_provider = 'Google', - best_provider = IterListProvider([Airforce]) + best_provider = IterListProvider([ReplicateHome]) ) gemma_7b = Model( @@ -356,13 +292,6 @@ gemma_7b = Model( best_provider = Cloudflare ) -# gemma 2 -gemma_2_9b = Model( - name = 'gemma-2-9b', - base_provider = 'Google', - best_provider = Airforce -) - ### Anthropic ### claude_2_1 = Model( @@ -419,15 +348,6 @@ blackboxai_pro = Model( best_provider = Blackbox ) - -### Databricks ### -dbrx_instruct = Model( - name = 'dbrx-instruct', - base_provider = 'Databricks', - best_provider = IterListProvider([Airforce]) -) - - ### CohereForAI ### command_r_plus = Model( name = 'command-r-plus', @@ -466,28 +386,10 @@ qwen_1_5_14b = Model( qwen_2_72b = Model( name = 'qwen-2-72b', base_provider = 'Qwen', - best_provider = IterListProvider([DeepInfraChat, HuggingChat, Airforce, HuggingFace]) -) - -qwen_2_5_7b = Model( - name = 'qwen-2-5-7b', - base_provider = 'Qwen', - best_provider = Airforce -) - -qwen_2_5_72b = Model( - name = 'qwen-2-5-72b', - base_provider = 'Qwen', - best_provider = Airforce + best_provider = IterListProvider([DeepInfraChat, HuggingChat, HuggingFace]) ) ### Upstage ### -solar_10_7b = Model( - name = 'solar-10-7b', - base_provider = 'Upstage', - best_provider = Airforce -) - solar_mini = Model( name = 'solar-mini', base_provider = 'Upstage', @@ -519,7 +421,7 @@ deepseek_coder = Model( wizardlm_2_8x22b = Model( name = 'wizardlm-2-8x22b', base_provider = 'WizardLM', - best_provider = IterListProvider([DeepInfraChat, Airforce]) + best_provider = IterListProvider([DeepInfraChat]) ) ### Yorickvp ### @@ -529,44 +431,11 @@ llava_13b = Model( best_provider = ReplicateHome ) - -### OpenBMB ### -minicpm_llama_3_v2_5 = Model( - name = 'minicpm-llama-3-v2.5', - base_provider = 'OpenBMB', - best_provider = None -) - - -### Lzlv ### -lzlv_70b = Model( - name = 'lzlv-70b', - base_provider = 'Lzlv', - best_provider = None -) - - ### OpenChat ### -openchat_3_6_8b = Model( - name = 'openchat-3.6-8b', +openchat_3_5 = Model( + name = 'openchat-3.5', base_provider = 'OpenChat', - best_provider = None -) - - -### Phind ### -phind_codellama_34b_v2 = Model( - name = 'phind-codellama-34b-v2', - base_provider = 'Phind', - best_provider = None -) - - -### Cognitive Computations ### -dolphin_2_9_1_llama_3_70b = Model( - name = 'dolphin-2.9.1-llama-3-70b', - base_provider = 'Cognitive Computations', - best_provider = None + best_provider = Airforce ) @@ -650,6 +519,13 @@ zephyr_7b = Model( best_provider = Airforce ) +### Inferless ### +neural_7b = Model( + name = 'neural-7b', + base_provider = 'inferless', + best_provider = Airforce +) + ############# @@ -660,7 +536,7 @@ zephyr_7b = Model( sdxl = Model( name = 'sdxl', base_provider = 'Stability AI', - best_provider = IterListProvider([ReplicateHome, Airforce]) + best_provider = IterListProvider([ReplicateHome]) ) @@ -740,7 +616,7 @@ flux_4o = Model( flux_schnell = Model( name = 'flux-schnell', base_provider = 'Flux AI', - best_provider = IterListProvider([ReplicateHome]) + best_provider = ReplicateHome ) @@ -786,7 +662,6 @@ class ModelUtils: # llama-2 'llama-2-7b': llama_2_7b, -'llama-2-13b': llama_2_13b, # llama-3 'llama-3-8b': llama_3_8b, @@ -799,33 +674,23 @@ class ModelUtils: # llama-3.2 'llama-3.2-1b': llama_3_2_1b, -'llama-3.2-3b': llama_3_2_3b, 'llama-3.2-11b': llama_3_2_11b, -'llama-3.2-90b': llama_3_2_90b, - -# llamaguard -'llamaguard-7b': llamaguard_7b, -'llamaguard-2-8b': llamaguard_2_8b, -'llamaguard-3-8b': llamaguard_3_8b, -'llamaguard-3-11b': llamaguard_3_11b, - + ### Mistral ### 'mistral-7b': mistral_7b, 'mixtral-8x7b': mixtral_8x7b, -'mixtral-8x22b': mixtral_8x22b, 'mistral-nemo': mistral_nemo, ### NousResearch ### -'hermes-2': hermes_2, +'hermes-2-pro': hermes_2_pro, 'hermes-2-dpo': hermes_2_dpo, 'hermes-3': hermes_3, ### Microsoft ### 'phi-2': phi_2, -'phi_3_medium-4k': phi_3_medium_4k, 'phi-3.5-mini': phi_3_5_mini, @@ -837,12 +702,8 @@ class ModelUtils: # gemma 'gemma-2b': gemma_2b, -'gemma-2b-27b': gemma_2b_27b, 'gemma-7b': gemma_7b, -# gemma-2 -'gemma-2-9b': gemma_2_9b, - ### Anthropic ### 'claude-2.1': claude_2_1, @@ -868,10 +729,6 @@ class ModelUtils: ### CohereForAI ### 'command-r+': command_r_plus, - -### Databricks ### -'dbrx-instruct': dbrx_instruct, - ### GigaChat ### 'gigachat': gigachat, @@ -887,14 +744,9 @@ class ModelUtils: # qwen 2 'qwen-2-72b': qwen_2_72b, - -# qwen 2-5 -'qwen-2-5-7b': qwen_2_5_7b, -'qwen-2-5-72b': qwen_2_5_72b, - + ### Upstage ### -'solar-10-7b': solar_10_7b, 'solar-mini': solar_mini, 'solar-pro': solar_pro, @@ -913,27 +765,11 @@ class ModelUtils: ### WizardLM ### 'wizardlm-2-8x22b': wizardlm_2_8x22b, - - -### OpenBMB ### -'minicpm-llama-3-v2.5': minicpm_llama_3_v2_5, - - -### Lzlv ### -'lzlv-70b': lzlv_70b, - + ### OpenChat ### -'openchat-3.6-8b': openchat_3_6_8b, - - -### Phind ### -'phind-codellama-34b-v2': phind_codellama_34b_v2, - - -### Cognitive Computations ### -'dolphin-2.9.1-llama-3-70b': dolphin_2_9_1_llama_3_70b, - +'openchat-3.5': openchat_3_5, + ### x.ai ### 'grok-2': grok_2, @@ -972,6 +808,10 @@ class ModelUtils: ### HuggingFaceH4 ### 'zephyr-7b': zephyr_7b, + + +### Inferless ### +'neural-7b': neural_7b, |