From e4c4e7b5badb3c0185d223e65330167d663d4489 Mon Sep 17 00:00:00 2001 From: kqlio67 <166700875+kqlio67@users.noreply.github.com> Date: Sat, 25 Jan 2025 14:22:17 +0000 Subject: Update provider capabilities and model support (#2600) * Update provider capabilities and model support - Update provider documentation with latest model support - Remove deprecated models and update model counts - Add new model variants and fix formatting - Update provider class labels for better clarity - Add support for new models including DeepSeek-R1 and sd-turbo - Clean up unused model aliases and improve code organization Key changes: - Update Blackbox vision capabilities - Remove legacy models (midijourney, unity, rtist) - Add flux variants and update provider counts - Set explicit provider labels - Update model aliases and mappings - Add new model support in multiple providers * Upodate g4f/models.py * Update docs/providers-and-models.md g4f/models.py g4f/Provider/Blackbox.py --------- Co-authored-by: kqlio67 <> --- g4f/Provider/PollinationsAI.py | 35 +++++++++++++++++++++++------------ 1 file changed, 23 insertions(+), 12 deletions(-) (limited to 'g4f/Provider/PollinationsAI.py') diff --git a/g4f/Provider/PollinationsAI.py b/g4f/Provider/PollinationsAI.py index 3263fcbc..a715acf1 100644 --- a/g4f/Provider/PollinationsAI.py +++ b/g4f/Provider/PollinationsAI.py @@ -38,24 +38,35 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin): default_model = "openai" default_image_model = "flux" default_vision_model = "gpt-4o" - extra_image_models = ["midjourney", "dall-e-3", "flux-pro", "flux-realism", "flux-cablyai", "flux-anime", "flux-3d"] + extra_image_models = ["midjourney", "dall-e-3", "flux-pro"] vision_models = [default_vision_model, "gpt-4o-mini"] - extra_text_models = [*vision_models, "claude", "claude-email", "karma", "command-r", "llamalight", "mistral-large", "sur", "sur-mistral", "any-dark"] + extra_text_models = ["claude", "claude-email", "deepseek-reasoner"] + vision_models model_aliases = { - "qwen-2-72b": "qwen", + ### Text Models ### + "gpt-4o-mini": "openai", + "gpt-4": "openai-large", + "gpt-4o": "openai-large", + "qwen-2.5-72b": "qwen", "qwen-2.5-coder-32b": "qwen-coder", "llama-3.3-70b": "llama", "mistral-nemo": "mistral", - #"": "karma", - #"": "sur-mistral", - "gpt-4": "searchgpt", - "claude-3.5-haiku": "claude-hybridspace", - "claude-3.5-sonnet": "claude-email", - "gpt-4": "claude", + #"mistral-nemo": "unity", # bug with image url response + #"gpt-4o-mini": "midijourney", # bug with the answer + "gpt-4o-mini": "rtist", + "gpt-4o": "searchgpt", + #"mistral-nemo": "evil", + "gpt-4o-mini": "p1", "deepseek-chat": "deepseek", - "llama-3.1-8b": "llamalight", + "deepseek-chat": "claude-hybridspace", + "llama-3.1-8b": "llamalight", + "gpt-4o-vision": "gpt-4o", + "gpt-4o-mini-vision": "gpt-4o-mini", + "gpt-4o-mini": "claude", + "deepseek-chat": "claude-email", + "deepseek-r1": "deepseek-reasoner", + ### Image Models ### - "sd-turbo": "turbo", + "sdxl-turbo": "turbo", } text_models = [] @@ -244,4 +255,4 @@ class PollinationsAI(AsyncGeneratorProvider, ProviderModelMixin): break except json.JSONDecodeError: yield decoded_chunk.strip() - continue \ No newline at end of file + continue -- cgit v1.2.3