From c1e6276414632761b5ed6a98f61ca4cb7286e582 Mon Sep 17 00:00:00 2001 From: kqlio67 Date: Sat, 9 Nov 2024 09:48:34 +0200 Subject: Update (g4f/Provider/) --- g4f/Provider/local/Ollama.py | 4 +- g4f/Provider/needs_auth/DeepInfra.py | 4 +- g4f/Provider/needs_auth/Groq.py | 4 +- g4f/Provider/needs_auth/Openai.py | 124 ------------------------------- g4f/Provider/needs_auth/OpenaiAPI.py | 124 +++++++++++++++++++++++++++++++ g4f/Provider/needs_auth/PerplexityApi.py | 4 +- g4f/Provider/needs_auth/ThebApi.py | 6 +- g4f/Provider/needs_auth/__init__.py | 2 +- 8 files changed, 136 insertions(+), 136 deletions(-) delete mode 100644 g4f/Provider/needs_auth/Openai.py create mode 100644 g4f/Provider/needs_auth/OpenaiAPI.py diff --git a/g4f/Provider/local/Ollama.py b/g4f/Provider/local/Ollama.py index c503a46a..de68a218 100644 --- a/g4f/Provider/local/Ollama.py +++ b/g4f/Provider/local/Ollama.py @@ -3,10 +3,10 @@ from __future__ import annotations import requests import os -from ..needs_auth.Openai import Openai +from ..needs_auth.OpenaiAPI import OpenaiAPI from ...typing import AsyncResult, Messages -class Ollama(Openai): +class Ollama(OpenaiAPI): label = "Ollama" url = "https://ollama.com" needs_auth = False diff --git a/g4f/Provider/needs_auth/DeepInfra.py b/g4f/Provider/needs_auth/DeepInfra.py index ebe5bfbf..35e7ca7f 100644 --- a/g4f/Provider/needs_auth/DeepInfra.py +++ b/g4f/Provider/needs_auth/DeepInfra.py @@ -2,9 +2,9 @@ from __future__ import annotations import requests from ...typing import AsyncResult, Messages -from .Openai import Openai +from .OpenaiAPI import OpenaiAPI -class DeepInfra(Openai): +class DeepInfra(OpenaiAPI): label = "DeepInfra" url = "https://deepinfra.com" working = True diff --git a/g4f/Provider/needs_auth/Groq.py b/g4f/Provider/needs_auth/Groq.py index 027d98bf..943fc81a 100644 --- a/g4f/Provider/needs_auth/Groq.py +++ b/g4f/Provider/needs_auth/Groq.py @@ -1,9 +1,9 @@ from __future__ import annotations -from .Openai import Openai +from .OpenaiAPI import OpenaiAPI from ...typing import AsyncResult, Messages -class Groq(Openai): +class Groq(OpenaiAPI): label = "Groq" url = "https://console.groq.com/playground" working = True diff --git a/g4f/Provider/needs_auth/Openai.py b/g4f/Provider/needs_auth/Openai.py deleted file mode 100644 index 382ebada..00000000 --- a/g4f/Provider/needs_auth/Openai.py +++ /dev/null @@ -1,124 +0,0 @@ -from __future__ import annotations - -import json - -from ..helper import filter_none -from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin, FinishReason -from ...typing import Union, Optional, AsyncResult, Messages, ImageType -from ...requests import StreamSession, raise_for_status -from ...errors import MissingAuthError, ResponseError -from ...image import to_data_uri - -class Openai(AsyncGeneratorProvider, ProviderModelMixin): - label = "OpenAI API" - url = "https://platform.openai.com" - working = True - needs_auth = True - supports_message_history = True - supports_system_message = True - default_model = "" - - @classmethod - async def create_async_generator( - cls, - model: str, - messages: Messages, - proxy: str = None, - timeout: int = 120, - image: ImageType = None, - api_key: str = None, - api_base: str = "https://api.openai.com/v1", - temperature: float = None, - max_tokens: int = None, - top_p: float = None, - stop: Union[str, list[str]] = None, - stream: bool = False, - headers: dict = None, - extra_data: dict = {}, - **kwargs - ) -> AsyncResult: - if cls.needs_auth and api_key is None: - raise MissingAuthError('Add a "api_key"') - if image is not None: - if not model and hasattr(cls, "default_vision_model"): - model = cls.default_vision_model - messages[-1]["content"] = [ - { - "type": "image_url", - "image_url": {"url": to_data_uri(image)} - }, - { - "type": "text", - "text": messages[-1]["content"] - } - ] - async with StreamSession( - proxies={"all": proxy}, - headers=cls.get_headers(stream, api_key, headers), - timeout=timeout - ) as session: - data = filter_none( - messages=messages, - model=cls.get_model(model), - temperature=temperature, - max_tokens=max_tokens, - top_p=top_p, - stop=stop, - stream=stream, - **extra_data - ) - async with session.post(f"{api_base.rstrip('/')}/chat/completions", json=data) as response: - await raise_for_status(response) - if not stream: - data = await response.json() - cls.raise_error(data) - choice = data["choices"][0] - if "content" in choice["message"]: - yield choice["message"]["content"].strip() - finish = cls.read_finish_reason(choice) - if finish is not None: - yield finish - else: - first = True - async for line in response.iter_lines(): - if line.startswith(b"data: "): - chunk = line[6:] - if chunk == b"[DONE]": - break - data = json.loads(chunk) - cls.raise_error(data) - choice = data["choices"][0] - if "content" in choice["delta"] and choice["delta"]["content"]: - delta = choice["delta"]["content"] - if first: - delta = delta.lstrip() - if delta: - first = False - yield delta - finish = cls.read_finish_reason(choice) - if finish is not None: - yield finish - - @staticmethod - def read_finish_reason(choice: dict) -> Optional[FinishReason]: - if "finish_reason" in choice and choice["finish_reason"] is not None: - return FinishReason(choice["finish_reason"]) - - @staticmethod - def raise_error(data: dict): - if "error_message" in data: - raise ResponseError(data["error_message"]) - elif "error" in data: - raise ResponseError(f'Error {data["error"]["code"]}: {data["error"]["message"]}') - - @classmethod - def get_headers(cls, stream: bool, api_key: str = None, headers: dict = None) -> dict: - return { - "Accept": "text/event-stream" if stream else "application/json", - "Content-Type": "application/json", - **( - {"Authorization": f"Bearer {api_key}"} - if api_key is not None else {} - ), - **({} if headers is None else headers) - } diff --git a/g4f/Provider/needs_auth/OpenaiAPI.py b/g4f/Provider/needs_auth/OpenaiAPI.py new file mode 100644 index 00000000..116b5f6f --- /dev/null +++ b/g4f/Provider/needs_auth/OpenaiAPI.py @@ -0,0 +1,124 @@ +from __future__ import annotations + +import json + +from ..helper import filter_none +from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin, FinishReason +from ...typing import Union, Optional, AsyncResult, Messages, ImageType +from ...requests import StreamSession, raise_for_status +from ...errors import MissingAuthError, ResponseError +from ...image import to_data_uri + +class OpenaiAPI(AsyncGeneratorProvider, ProviderModelMixin): + label = "OpenAI API" + url = "https://platform.openai.com" + working = True + needs_auth = True + supports_message_history = True + supports_system_message = True + default_model = "" + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: Messages, + proxy: str = None, + timeout: int = 120, + image: ImageType = None, + api_key: str = None, + api_base: str = "https://api.openai.com/v1", + temperature: float = None, + max_tokens: int = None, + top_p: float = None, + stop: Union[str, list[str]] = None, + stream: bool = False, + headers: dict = None, + extra_data: dict = {}, + **kwargs + ) -> AsyncResult: + if cls.needs_auth and api_key is None: + raise MissingAuthError('Add a "api_key"') + if image is not None: + if not model and hasattr(cls, "default_vision_model"): + model = cls.default_vision_model + messages[-1]["content"] = [ + { + "type": "image_url", + "image_url": {"url": to_data_uri(image)} + }, + { + "type": "text", + "text": messages[-1]["content"] + } + ] + async with StreamSession( + proxies={"all": proxy}, + headers=cls.get_headers(stream, api_key, headers), + timeout=timeout + ) as session: + data = filter_none( + messages=messages, + model=cls.get_model(model), + temperature=temperature, + max_tokens=max_tokens, + top_p=top_p, + stop=stop, + stream=stream, + **extra_data + ) + async with session.post(f"{api_base.rstrip('/')}/chat/completions", json=data) as response: + await raise_for_status(response) + if not stream: + data = await response.json() + cls.raise_error(data) + choice = data["choices"][0] + if "content" in choice["message"]: + yield choice["message"]["content"].strip() + finish = cls.read_finish_reason(choice) + if finish is not None: + yield finish + else: + first = True + async for line in response.iter_lines(): + if line.startswith(b"data: "): + chunk = line[6:] + if chunk == b"[DONE]": + break + data = json.loads(chunk) + cls.raise_error(data) + choice = data["choices"][0] + if "content" in choice["delta"] and choice["delta"]["content"]: + delta = choice["delta"]["content"] + if first: + delta = delta.lstrip() + if delta: + first = False + yield delta + finish = cls.read_finish_reason(choice) + if finish is not None: + yield finish + + @staticmethod + def read_finish_reason(choice: dict) -> Optional[FinishReason]: + if "finish_reason" in choice and choice["finish_reason"] is not None: + return FinishReason(choice["finish_reason"]) + + @staticmethod + def raise_error(data: dict): + if "error_message" in data: + raise ResponseError(data["error_message"]) + elif "error" in data: + raise ResponseError(f'Error {data["error"]["code"]}: {data["error"]["message"]}') + + @classmethod + def get_headers(cls, stream: bool, api_key: str = None, headers: dict = None) -> dict: + return { + "Accept": "text/event-stream" if stream else "application/json", + "Content-Type": "application/json", + **( + {"Authorization": f"Bearer {api_key}"} + if api_key is not None else {} + ), + **({} if headers is None else headers) + } diff --git a/g4f/Provider/needs_auth/PerplexityApi.py b/g4f/Provider/needs_auth/PerplexityApi.py index 3ee65b30..85d7cc98 100644 --- a/g4f/Provider/needs_auth/PerplexityApi.py +++ b/g4f/Provider/needs_auth/PerplexityApi.py @@ -1,9 +1,9 @@ from __future__ import annotations -from .Openai import Openai +from .OpenaiAPI import OpenaiAPI from ...typing import AsyncResult, Messages -class PerplexityApi(Openai): +class PerplexityApi(OpenaiAPI): label = "Perplexity API" url = "https://www.perplexity.ai" working = True diff --git a/g4f/Provider/needs_auth/ThebApi.py b/g4f/Provider/needs_auth/ThebApi.py index 22fc62ed..2006f7ad 100644 --- a/g4f/Provider/needs_auth/ThebApi.py +++ b/g4f/Provider/needs_auth/ThebApi.py @@ -1,7 +1,7 @@ from __future__ import annotations from ...typing import CreateResult, Messages -from .Openai import Openai +from .OpenaiAPI import OpenaiAPI models = { "theb-ai": "TheB.AI", @@ -27,7 +27,7 @@ models = { "qwen-7b-chat": "Qwen 7B" } -class ThebApi(Openai): +class ThebApi(OpenaiAPI): label = "TheB.AI API" url = "https://theb.ai" working = True @@ -58,4 +58,4 @@ class ThebApi(Openai): "top_p": top_p, } } - return super().create_async_generator(model, messages, api_base=api_base, extra_data=data, **kwargs) \ No newline at end of file + return super().create_async_generator(model, messages, api_base=api_base, extra_data=data, **kwargs) diff --git a/g4f/Provider/needs_auth/__init__.py b/g4f/Provider/needs_auth/__init__.py index e979f86d..26c50c0a 100644 --- a/g4f/Provider/needs_auth/__init__.py +++ b/g4f/Provider/needs_auth/__init__.py @@ -11,7 +11,7 @@ from .GeminiPro import GeminiPro from .Groq import Groq from .HuggingFace import HuggingFace from .MetaAI import MetaAI -from .Openai import Openai +from .OpenaiAPI import OpenaiAPI from .OpenaiChat import OpenaiChat from .PerplexityApi import PerplexityApi from .Poe import Poe -- cgit v1.2.3