diff options
author | Tekky <98614666+xtekky@users.noreply.github.com> | 2023-10-03 23:04:10 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-10-03 23:04:10 +0200 |
commit | d9e527eb327e6e991435332b20f15b30c596ba0d (patch) | |
tree | 0616d74ff183e905cc81e97c44326f73d06c5b93 /g4f | |
parent | ~ (diff) | |
parent | Add streaming in openai chat (diff) | |
download | gpt4free-d9e527eb327e6e991435332b20f15b30c596ba0d.tar gpt4free-d9e527eb327e6e991435332b20f15b30c596ba0d.tar.gz gpt4free-d9e527eb327e6e991435332b20f15b30c596ba0d.tar.bz2 gpt4free-d9e527eb327e6e991435332b20f15b30c596ba0d.tar.lz gpt4free-d9e527eb327e6e991435332b20f15b30c596ba0d.tar.xz gpt4free-d9e527eb327e6e991435332b20f15b30c596ba0d.tar.zst gpt4free-d9e527eb327e6e991435332b20f15b30c596ba0d.zip |
Diffstat (limited to 'g4f')
-rw-r--r-- | g4f/Provider/Bing.py | 78 | ||||
-rw-r--r-- | g4f/Provider/OpenaiChat.py | 73 | ||||
-rw-r--r-- | g4f/Provider/helper.py | 26 |
3 files changed, 120 insertions, 57 deletions
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py index e4e56519..940b3a0b 100644 --- a/g4f/Provider/Bing.py +++ b/g4f/Provider/Bing.py @@ -6,8 +6,12 @@ import os import urllib.parse from aiohttp import ClientSession, ClientTimeout from ..typing import AsyncGenerator -from .base_provider import AsyncGeneratorProvider, get_cookies +from .base_provider import AsyncGeneratorProvider +class Tones(): + creative = "Creative" + balanced = "Balanced" + precise = "Precise" class Bing(AsyncGeneratorProvider): url = "https://bing.com/chat" @@ -16,12 +20,13 @@ class Bing(AsyncGeneratorProvider): @staticmethod def create_async_generator( - model: str, - messages: list[dict[str, str]], - cookies: dict = None, **kwargs) -> AsyncGenerator: + model: str, + messages: list[dict[str, str]], + cookies: dict = None, + tone: str = Tones.creative, + **kwargs + ) -> AsyncGenerator: - if not cookies: - cookies = get_cookies(".bing.com") if len(messages) < 2: prompt = messages[0]["content"] context = None @@ -38,7 +43,7 @@ class Bing(AsyncGeneratorProvider): 'SRCHUSR' : '', 'SRCHHPGUSR' : '', } - return stream_generate(prompt, context, cookies) + return stream_generate(prompt, tone, context, cookies) def create_context(messages: list[dict[str, str]]): context = "".join(f"[{message['role']}](#message)\n{message['content']}\n\n" for message in messages) @@ -64,6 +69,14 @@ async def create_conversation(session: ClientSession) -> Conversation: return Conversation(conversationId, clientId, conversationSignature) +async def retry_conversation(session: ClientSession) -> Conversation: + for _ in range(5): + try: + return await create_conversation(session) + except: + session.cookie_jar.clear() + return await create_conversation(session) + async def list_conversations(session: ClientSession) -> list: url = "https://www.bing.com/turing/conversation/chats" async with session.get(url) as response: @@ -162,34 +175,32 @@ class Defaults: 'x-forwarded-for': ip_address, } - optionsSets = { - "optionsSets": [ - 'saharasugg', - 'enablenewsfc', - 'clgalileo', - 'gencontentv3', - "nlu_direct_response_filter", - "deepleo", - "disable_emoji_spoken_text", - "responsible_ai_policy_235", - "enablemm", - "h3precise" - "dtappid", - "cricinfo", - "cricinfov2", - "dv3sugg", - "nojbfedge" - ] - } + optionsSets = [ + 'saharasugg', + 'enablenewsfc', + 'clgalileo', + 'gencontentv3', + "nlu_direct_response_filter", + "deepleo", + "disable_emoji_spoken_text", + "responsible_ai_policy_235", + "enablemm", + "h3precise" + "dtappid", + "cricinfo", + "cricinfov2", + "dv3sugg", + "nojbfedge" + ] def format_message(msg: dict) -> str: return json.dumps(msg, ensure_ascii=False) + Defaults.delimiter -def create_message(conversation: Conversation, prompt: str, context: str=None) -> str: +def create_message(conversation: Conversation, prompt: str, tone: str, context: str=None) -> str: struct = { 'arguments': [ { - **Defaults.optionsSets, + 'optionsSets': Defaults.optionsSets, 'source': 'cib', 'allowedMessageTypes': Defaults.allowedMessageTypes, 'sliceIds': Defaults.sliceIds, @@ -201,6 +212,7 @@ def create_message(conversation: Conversation, prompt: str, context: str=None) - 'text': prompt, 'messageType': 'Chat' }, + 'tone': tone, 'conversationSignature': conversation.conversationSignature, 'participant': { 'id': conversation.clientId @@ -225,15 +237,16 @@ def create_message(conversation: Conversation, prompt: str, context: str=None) - async def stream_generate( prompt: str, + tone: str, context: str=None, - cookies: dict=None + cookies: dict=None, ): async with ClientSession( timeout=ClientTimeout(total=900), cookies=cookies, headers=Defaults.headers, ) as session: - conversation = await create_conversation(session) + conversation = await retry_conversation(session) try: async with session.ws_connect( 'wss://sydney.bing.com/sydney/ChatHub', @@ -243,7 +256,7 @@ async def stream_generate( await wss.send_str(format_message({'protocol': 'json', 'version': 1})) msg = await wss.receive(timeout=900) - await wss.send_str(create_message(conversation, prompt, context)) + await wss.send_str(create_message(conversation, prompt, tone, context)) response_txt = '' returned_text = '' @@ -281,7 +294,6 @@ async def stream_generate( result = response['item']['result'] if result.get('error'): raise Exception(f"{result['value']}: {result['message']}") - final = True - break + return finally: await delete_conversation(session, conversation)
\ No newline at end of file diff --git a/g4f/Provider/OpenaiChat.py b/g4f/Provider/OpenaiChat.py index f7dc8298..fbd26d7c 100644 --- a/g4f/Provider/OpenaiChat.py +++ b/g4f/Provider/OpenaiChat.py @@ -1,14 +1,14 @@ from __future__ import annotations -from curl_cffi.requests import AsyncSession import uuid import json -from .base_provider import AsyncProvider, get_cookies, format_prompt +from .base_provider import AsyncGeneratorProvider +from .helper import get_browser, get_cookies, format_prompt from ..typing import AsyncGenerator +from ..requests import StreamSession - -class OpenaiChat(AsyncProvider): +class OpenaiChat(AsyncGeneratorProvider): url = "https://chat.openai.com" needs_auth = True working = True @@ -16,7 +16,7 @@ class OpenaiChat(AsyncProvider): _access_token = None @classmethod - async def create_async( + async def create_async_generator( cls, model: str, messages: list[dict[str, str]], @@ -32,7 +32,7 @@ class OpenaiChat(AsyncProvider): "Accept": "text/event-stream", "Authorization": f"Bearer {access_token}", } - async with AsyncSession(proxies=proxies, headers=headers, impersonate="chrome107") as session: + async with StreamSession(proxies=proxies, headers=headers, impersonate="chrome107") as session: messages = [ { "id": str(uuid.uuid4()), @@ -48,31 +48,58 @@ class OpenaiChat(AsyncProvider): "model": "text-davinci-002-render-sha", "history_and_training_disabled": True, } - response = await session.post("https://chat.openai.com/backend-api/conversation", json=data) - response.raise_for_status() - last_message = None - for line in response.content.decode().splitlines(): - if line.startswith("data: "): - line = line[6:] - if line == "[DONE]": - break - line = json.loads(line) - if "message" in line: - last_message = line["message"]["content"]["parts"][0] - return last_message + async with session.post(f"{cls.url}/backend-api/conversation", json=data) as response: + response.raise_for_status() + last_message = "" + async for line in response.iter_lines(): + if line.startswith(b"data: "): + line = line[6:] + if line == b"[DONE]": + break + line = json.loads(line) + if "message" in line and not line["message"]["end_turn"]: + new_message = line["message"]["content"]["parts"][0] + yield new_message[len(last_message):] + last_message = new_message + + @classmethod + def fetch_access_token(cls) -> str: + try: + from selenium.webdriver.common.by import By + from selenium.webdriver.support.ui import WebDriverWait + from selenium.webdriver.support import expected_conditions as EC + except ImportError: + return + driver = get_browser() + if not driver: + return + + driver.get(f"{cls.url}/") + try: + WebDriverWait(driver, 1200).until( + EC.presence_of_element_located((By.ID, "prompt-textarea")) + ) + javascript = "return (await (await fetch('/api/auth/session')).json())['accessToken']" + return driver.execute_script(javascript) + finally: + driver.quit() @classmethod async def get_access_token(cls, cookies: dict = None, proxies: dict = None) -> str: if not cls._access_token: cookies = cookies if cookies else get_cookies("chat.openai.com") - async with AsyncSession(proxies=proxies, cookies=cookies, impersonate="chrome107") as session: - response = await session.get("https://chat.openai.com/api/auth/session") - response.raise_for_status() - cls._access_token = response.json()["accessToken"] + async with StreamSession(proxies=proxies, cookies=cookies, impersonate="chrome107") as session: + async with session.get(f"{cls.url}/api/auth/session") as response: + response.raise_for_status() + auth = await response.json() + if "accessToken" in auth: + cls._access_token = auth["accessToken"] + cls._access_token = cls.fetch_access_token() + if not cls._access_token: + raise RuntimeError("Missing access token") return cls._access_token - @classmethod @property def params(cls): diff --git a/g4f/Provider/helper.py b/g4f/Provider/helper.py index 234cdaa1..8f09239a 100644 --- a/g4f/Provider/helper.py +++ b/g4f/Provider/helper.py @@ -56,4 +56,28 @@ def format_prompt(messages: list[dict[str, str]], add_special_tokens=False): ) return f"{formatted}\nAssistant:" else: - return messages[0]["content"]
\ No newline at end of file + return messages[0]["content"] + + +def get_browser(user_data_dir: str = None): + try: + from undetected_chromedriver import Chrome + except ImportError: + return None + + def get_user_data_dir(): + dirs = [ + '~/.config/google-chrome/Default', + '~/.var/app/com.google.Chrome/config/google-chrome/Default', + '%LOCALAPPDATA%\\Google\\Chrome\\User Data\\Default', + '~/Library/Application Support/Google/Chrome/Default', + ] + from os import path + for dir in dirs: + dir = path.expandvars(dir) + if path.exists(dir): + return dir + if not user_data_dir: + user_data_dir = get_user_data_dir() + + return Chrome(user_data_dir=user_data_dir)
\ No newline at end of file |