summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/HuggingFace.py
diff options
context:
space:
mode:
authorTekky <98614666+xtekky@users.noreply.github.com>2024-09-16 00:37:30 +0200
committerGitHub <noreply@github.com>2024-09-16 00:37:30 +0200
commitcc80f2d3159ca0b6f6bfa2c36c4be87bc96209b2 (patch)
tree25ef52b8c5496ca22ce8eaa82bc885313962ba29 /g4f/Provider/HuggingFace.py
parentMerge pull request #2207 from kqlio67/main (diff)
parentrefactor(ReplicateHome): update model handling and API interaction (diff)
downloadgpt4free-0.3.2.7.tar
gpt4free-0.3.2.7.tar.gz
gpt4free-0.3.2.7.tar.bz2
gpt4free-0.3.2.7.tar.lz
gpt4free-0.3.2.7.tar.xz
gpt4free-0.3.2.7.tar.zst
gpt4free-0.3.2.7.zip
Diffstat (limited to 'g4f/Provider/HuggingFace.py')
-rw-r--r--g4f/Provider/HuggingFace.py27
1 files changed, 5 insertions, 22 deletions
diff --git a/g4f/Provider/HuggingFace.py b/g4f/Provider/HuggingFace.py
index 74957862..586e5f5f 100644
--- a/g4f/Provider/HuggingFace.py
+++ b/g4f/Provider/HuggingFace.py
@@ -9,33 +9,16 @@ from .helper import get_connector
from ..errors import RateLimitError, ModelNotFoundError
from ..requests.raise_for_status import raise_for_status
+from .HuggingChat import HuggingChat
+
class HuggingFace(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://huggingface.co/chat"
working = True
needs_auth = True
supports_message_history = True
- default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct"
- models = [
- 'meta-llama/Meta-Llama-3.1-70B-Instruct',
- 'meta-llama/Meta-Llama-3.1-405B-Instruct-FP8',
- 'CohereForAI/c4ai-command-r-plus',
- 'mistralai/Mixtral-8x7B-Instruct-v0.1',
- 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO',
- '01-ai/Yi-1.5-34B-Chat',
- 'mistralai/Mistral-7B-Instruct-v0.3',
- 'microsoft/Phi-3-mini-4k-instruct',
- ]
-
- model_aliases = {
- "llama-3.1-70b": "meta-llama/Meta-Llama-3.1-70B-Instruct",
- "llama-3.1-405b": "meta-llama/Meta-Llama-3.1-405B-Instruct-FP8",
- "command-r-plus": "CohereForAI/c4ai-command-r-plus",
- "mixtral-8x7b": "mistralai/Mixtral-8x7B-Instruct-v0.1",
- "mixtral-8x7b": "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO",
- "yi-1.5-34b": "01-ai/Yi-1.5-34B-Chat",
- "mistral-7b": "mistralai/Mistral-7B-Instruct-v0.3",
- "phi-3-mini-4k": "microsoft/Phi-3-mini-4k-instruct",
- }
+ default_model = HuggingChat.default_model
+ models = HuggingChat.models
+ model_aliases = HuggingChat.model_aliases
@classmethod
def get_model(cls, model: str) -> str: