diff options
author | Heiner Lohaus <heiner.lohaus@netformic.com> | 2023-09-05 17:27:24 +0200 |
---|---|---|
committer | Heiner Lohaus <heiner.lohaus@netformic.com> | 2023-09-05 17:27:24 +0200 |
commit | 5ca47b44b2b42abb4f48163c17500b5ee67ab28f (patch) | |
tree | b8fba4bde73d59c05857459eac41b25347d65c8e /g4f/Provider/OpenaiChat.py | |
parent | ~ | Merge pull request #869 from ahobsonsayers/add-console-script (diff) | |
download | gpt4free-5ca47b44b2b42abb4f48163c17500b5ee67ab28f.tar gpt4free-5ca47b44b2b42abb4f48163c17500b5ee67ab28f.tar.gz gpt4free-5ca47b44b2b42abb4f48163c17500b5ee67ab28f.tar.bz2 gpt4free-5ca47b44b2b42abb4f48163c17500b5ee67ab28f.tar.lz gpt4free-5ca47b44b2b42abb4f48163c17500b5ee67ab28f.tar.xz gpt4free-5ca47b44b2b42abb4f48163c17500b5ee67ab28f.tar.zst gpt4free-5ca47b44b2b42abb4f48163c17500b5ee67ab28f.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/OpenaiChat.py | 104 |
1 files changed, 56 insertions, 48 deletions
diff --git a/g4f/Provider/OpenaiChat.py b/g4f/Provider/OpenaiChat.py index f2d1ed6f..c93977ec 100644 --- a/g4f/Provider/OpenaiChat.py +++ b/g4f/Provider/OpenaiChat.py @@ -1,67 +1,82 @@ from __future__ import annotations -has_module = True -try: - from revChatGPT.V1 import AsyncChatbot -except ImportError: - has_module = False - +from curl_cffi.requests import AsyncSession +import uuid import json -from httpx import AsyncClient - +from .base_provider import AsyncProvider, get_cookies, format_prompt from ..typing import AsyncGenerator -from .base_provider import AsyncGeneratorProvider, format_prompt, get_cookies -class OpenaiChat(AsyncGeneratorProvider): +class OpenaiChat(AsyncProvider): url = "https://chat.openai.com" needs_auth = True - working = has_module + working = True supports_gpt_35_turbo = True - supports_gpt_4 = True - supports_stream = True _access_token = None @classmethod - async def create_async_generator( + async def create_async( cls, model: str, messages: list[dict[str, str]], proxy: str = None, - access_token: str = _access_token, + access_token: str = None, cookies: dict = None, **kwargs: dict ) -> AsyncGenerator: - - config = {"access_token": access_token, "model": model} + proxies = None if proxy: if "://" not in proxy: proxy = f"http://{proxy}" - config["proxy"] = proxy + proxies = { + "http": proxy, + "https": proxy + } + if not access_token: + access_token = await cls.get_access_token(cookies) + headers = { + "Accept": "text/event-stream", + "Authorization": f"Bearer {access_token}", + } + async with AsyncSession(proxies=proxies, headers=headers, impersonate="chrome107") as session: + messages = [ + { + "id": str(uuid.uuid4()), + "author": {"role": "user"}, + "content": {"content_type": "text", "parts": [format_prompt(messages)]}, + }, + ] + data = { + "action": "next", + "messages": messages, + "conversation_id": None, + "parent_message_id": str(uuid.uuid4()), + "model": "text-davinci-002-render-sha", + "history_and_training_disabled": True, + } + response = await session.post("https://chat.openai.com/backend-api/conversation", json=data) + response.raise_for_status() + last_message = None + for line in response.content.decode().splitlines(): + if line.startswith("data: "): + line = line[6:] + if line != "[DONE]": + line = json.loads(line) + if "message" in line: + last_message = line["message"]["content"]["parts"][0] + return last_message - bot = AsyncChatbot( - config=config - ) - if not access_token: + @classmethod + async def get_access_token(cls, cookies: dict = None, proxies: dict = None): + if not cls._access_token: cookies = cookies if cookies else get_cookies("chat.openai.com") - cls._access_token = await get_access_token(bot.session, cookies) - bot.set_access_token(cls._access_token) - - returned = None - async for message in bot.ask(format_prompt(messages)): - message = message["message"] - if returned: - if message.startswith(returned): - new = message[len(returned):] - if new: - yield new - else: - yield message - returned = message - - await bot.delete_conversation(bot.conversation_id) + async with AsyncSession(proxies=proxies, cookies=cookies, impersonate="chrome107") as session: + response = await session.get("https://chat.openai.com/api/auth/session") + response.raise_for_status() + cls._access_token = response.json()["accessToken"] + return cls._access_token @classmethod @@ -72,15 +87,8 @@ class OpenaiChat(AsyncGeneratorProvider): ("messages", "list[dict[str, str]]"), ("stream", "bool"), ("proxy", "str"), + ("access_token", "str"), + ("cookies", "dict[str, str]") ] param = ", ".join([": ".join(p) for p in params]) - return f"g4f.provider.{cls.__name__} supports: ({param})" - - -async def get_access_token(session: AsyncClient, cookies: dict): - response = await session.get("https://chat.openai.com/api/auth/session", cookies=cookies) - response.raise_for_status() - try: - return response.json()["accessToken"] - except json.decoder.JSONDecodeError: - raise RuntimeError(f"Response: {response.text}")
\ No newline at end of file + return f"g4f.provider.{cls.__name__} supports: ({param})"
\ No newline at end of file |