From a6c64b7d4ed04a6265e3dff26aa4eee2daade4dd Mon Sep 17 00:00:00 2001 From: kqlio67 <166700875+kqlio67@users.noreply.github.com> Date: Sun, 26 Jan 2025 16:01:08 +0000 Subject: Updates for Blackbox and DeepInfraChat providers new models (#2608) * Fixed the model name in the Blackbox.py provider in the vision_models list, the DeepSeek-R1 model * Update g4f/Provider/Blackbox.py * Update docs/providers-and-models.md * Uodate g4f/models.py g4f/Provider/DeepInfraChat.py docs/providers-and-models.md --------- Co-authored-by: kqlio67 <> --- g4f/Provider/Blackbox.py | 11 +++++------ g4f/Provider/DeepInfraChat.py | 2 ++ 2 files changed, 7 insertions(+), 6 deletions(-) (limited to 'g4f/Provider') diff --git a/g4f/Provider/Blackbox.py b/g4f/Provider/Blackbox.py index d6b4ad6c..7e3cf40f 100644 --- a/g4f/Provider/Blackbox.py +++ b/g4f/Provider/Blackbox.py @@ -38,20 +38,19 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin): default_vision_model = default_model default_image_model = 'ImageGeneration' image_models = [default_image_model, "ImageGeneration2"] - vision_models = [default_vision_model, 'gpt-4o', 'gemini-pro', 'gemini-1.5-flash', 'llama-3.1-8b', 'llama-3.1-70b', 'llama-3.1-405b', 'deepseek-r1'] + vision_models = [default_vision_model, 'gpt-4o', 'gemini-pro', 'gemini-1.5-flash', 'llama-3.1-8b', 'llama-3.1-70b', 'llama-3.1-405b'] - userSelectedModel = ['gpt-4o', 'gemini-pro', 'claude-sonnet-3.5', 'blackboxai-pro'] + userSelectedModel = ['gpt-4o', 'gemini-pro', 'claude-sonnet-3.5', 'deepseek-r1', 'deepseek-v3', 'blackboxai-pro'] agentMode = { 'ImageGeneration': {'mode': True, 'id': "ImageGenerationLV45LJp", 'name': "Image Generation"}, # 'Meta-Llama-3.3-70B-Instruct-Turbo': {'mode': True, 'id': "meta-llama/Llama-3.3-70B-Instruct-Turbo", 'name': "Meta-Llama-3.3-70B-Instruct-Turbo"}, - 'Mistral-(7B)-Instruct-v0.': {'mode': True, 'id': "mistralai/Mistral-7B-Instruct-v0.2", 'name': "Mistral-(7B)-Instruct-v0.2"}, + 'Mistral-(7B)-Instruct-v0.2': {'mode': True, 'id': "mistralai/Mistral-7B-Instruct-v0.2", 'name': "Mistral-(7B)-Instruct-v0.2"}, 'DeepSeek-LLM-Chat-(67B)': {'mode': True, 'id': "deepseek-ai/deepseek-llm-67b-chat", 'name': "DeepSeek-LLM-Chat-(67B)"}, 'DBRX-Instruct': {'mode': True, 'id': "databricks/dbrx-instruct", 'name': "DBRX-Instruct"}, 'Qwen-QwQ-32B-Preview': {'mode': True, 'id': "Qwen/QwQ-32B-Preview", 'name': "Qwen-QwQ-32B-Preview"}, 'Nous-Hermes-2-Mixtral-8x7B-DPO': {'mode': True, 'id': "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", 'name': "Nous-Hermes-2-Mixtral-8x7B-DPO"}, - 'DeepSeek-R1': {'mode': True, 'id': "deepseek-reasoner", 'name': "DeepSeek-R1"} } trendingAgentMode = { @@ -106,12 +105,12 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin): "gemini-1.5-pro": "gemini-pro", "claude-3.5-sonnet": "claude-sonnet-3.5", "llama-3.3-70b": "Meta-Llama-3.3-70B-Instruct-Turbo", - "mixtral-7b": "Mistral-(7B)-Instruct-v0.", + "mixtral-7b": "Mistral-(7B)-Instruct-v0.2", "deepseek-chat": "DeepSeek-LLM-Chat-(67B)", "dbrx-instruct": "DBRX-Instruct", "qwq-32b": "Qwen-QwQ-32B-Preview", "hermes-2-dpo": "Nous-Hermes-2-Mixtral-8x7B-DPO", - "deepseek-r1": "DeepSeek-R1", + "deepseek-chat": "deepseek-v3", ### image ### "flux": "ImageGeneration", diff --git a/g4f/Provider/DeepInfraChat.py b/g4f/Provider/DeepInfraChat.py index 7aa1fd09..f384a3ff 100644 --- a/g4f/Provider/DeepInfraChat.py +++ b/g4f/Provider/DeepInfraChat.py @@ -14,6 +14,7 @@ class DeepInfraChat(OpenaiTemplate): 'meta-llama/Meta-Llama-3.1-8B-Instruct', default_model, 'meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo', + 'deepseek-ai/DeepSeek-V3', 'Qwen/QwQ-32B-Preview', 'microsoft/WizardLM-2-8x22B', 'microsoft/WizardLM-2-7B', @@ -26,6 +27,7 @@ class DeepInfraChat(OpenaiTemplate): "llama-3.1-8b": "meta-llama/Meta-Llama-3.1-8B-Instruct", "llama-3.3-70b": "meta-llama/Llama-3.3-70B-Instruct-Turbo", "llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo", + "deepseek-chat": "deepseek-ai/DeepSeek-V3", "qwq-32b": "Qwen/QwQ-32B-Preview", "wizardlm-2-8x22b": "microsoft/WizardLM-2-8x22B", "wizardlm-2-7b": "microsoft/WizardLM-2-7B", -- cgit v1.2.3