From 126496d3cacd06a4fa8cbb4e5bde417ce6bb5b4a Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Fri, 25 Aug 2023 06:41:32 +0200 Subject: Add OpenaiChat and Hugchat Provider Add tests for providers with auth Improve async support / 2x faster Shared get_cookies by domain function --- g4f/Provider/OpenaiChat.py | 74 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 g4f/Provider/OpenaiChat.py (limited to 'g4f/Provider/OpenaiChat.py') diff --git a/g4f/Provider/OpenaiChat.py b/g4f/Provider/OpenaiChat.py new file mode 100644 index 00000000..cca258b3 --- /dev/null +++ b/g4f/Provider/OpenaiChat.py @@ -0,0 +1,74 @@ +has_module = True +try: + from revChatGPT.V1 import AsyncChatbot +except ImportError: + has_module = False +from .base_provider import AsyncGeneratorProvider, get_cookies +from ..typing import AsyncGenerator + +class OpenaiChat(AsyncGeneratorProvider): + url = "https://chat.openai.com" + needs_auth = True + working = has_module + supports_gpt_35_turbo = True + supports_gpt_4 = True + supports_stream = True + + @classmethod + async def create_async_generator( + cls, + model: str, + messages: list[dict[str, str]], + proxy: str = None, + access_token: str = None, + cookies: dict = None, + **kwargs + ) -> AsyncGenerator: + + config = {"access_token": access_token, "model": model} + if proxy: + if "://" not in proxy: + proxy = f"http://{proxy}" + config["proxy"] = proxy + + bot = AsyncChatbot( + config=config + ) + + if not access_token: + cookies = cookies if cookies else get_cookies("chat.openai.com") + response = await bot.session.get("https://chat.openai.com/api/auth/session", cookies=cookies) + access_token = response.json()["accessToken"] + bot.set_access_token(access_token) + + if len(messages) > 1: + formatted = "\n".join( + ["%s: %s" % ((message["role"]).capitalize(), message["content"]) for message in messages] + ) + prompt = f"{formatted}\nAssistant:" + else: + prompt = messages.pop()["content"] + + returned = None + async for message in bot.ask(prompt): + message = message["message"] + if returned: + if message.startswith(returned): + new = message[len(returned):] + if new: + yield new + else: + yield message + returned = message + + @classmethod + @property + def params(cls): + params = [ + ("model", "str"), + ("messages", "list[dict[str, str]]"), + ("stream", "bool"), + ("proxy", "str"), + ] + param = ", ".join([": ".join(p) for p in params]) + return f"g4f.provider.{cls.__name__} supports: ({param})" -- cgit v1.2.3 From efd75a11b871d61ac31b0e274acdfb33daba361d Mon Sep 17 00:00:00 2001 From: abc <98614666+xtekky@users.noreply.github.com> Date: Sun, 27 Aug 2023 17:37:44 +0200 Subject: ~ | code styling --- g4f/Provider/OpenaiChat.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) (limited to 'g4f/Provider/OpenaiChat.py') diff --git a/g4f/Provider/OpenaiChat.py b/g4f/Provider/OpenaiChat.py index cca258b3..c023c898 100644 --- a/g4f/Provider/OpenaiChat.py +++ b/g4f/Provider/OpenaiChat.py @@ -3,16 +3,17 @@ try: from revChatGPT.V1 import AsyncChatbot except ImportError: has_module = False + from .base_provider import AsyncGeneratorProvider, get_cookies -from ..typing import AsyncGenerator +from ..typing import AsyncGenerator class OpenaiChat(AsyncGeneratorProvider): - url = "https://chat.openai.com" - needs_auth = True - working = has_module + url = "https://chat.openai.com" + needs_auth = True + working = has_module supports_gpt_35_turbo = True - supports_gpt_4 = True - supports_stream = True + supports_gpt_4 = True + supports_stream = True @classmethod async def create_async_generator( @@ -36,8 +37,8 @@ class OpenaiChat(AsyncGeneratorProvider): ) if not access_token: - cookies = cookies if cookies else get_cookies("chat.openai.com") - response = await bot.session.get("https://chat.openai.com/api/auth/session", cookies=cookies) + cookies = cookies if cookies else get_cookies("chat.openai.com") + response = await bot.session.get("https://chat.openai.com/api/auth/session", cookies=cookies) access_token = response.json()["accessToken"] bot.set_access_token(access_token) -- cgit v1.2.3 From 7294abc890c377d75c6c8c932620c2e2c8b3f0f9 Mon Sep 17 00:00:00 2001 From: Heiner Lohaus Date: Mon, 28 Aug 2023 01:43:45 +0200 Subject: Add async support for H2o Add format_prompt helper Fix create_completion in AsyncGeneratorProvider Move get_cookies from constructor to function Add ow HuggingChat implement Remove need auth form Liabots Add staic cache for access_token in OpenaiChat Add OpenAssistant provider Support stream and async in You Support async and add userId in Yqcloud Add log_time module --- g4f/Provider/OpenaiChat.py | 41 ++++++++++++++++++++++++----------------- 1 file changed, 24 insertions(+), 17 deletions(-) (limited to 'g4f/Provider/OpenaiChat.py') diff --git a/g4f/Provider/OpenaiChat.py b/g4f/Provider/OpenaiChat.py index c023c898..9ca0cd58 100644 --- a/g4f/Provider/OpenaiChat.py +++ b/g4f/Provider/OpenaiChat.py @@ -4,8 +4,11 @@ try: except ImportError: has_module = False -from .base_provider import AsyncGeneratorProvider, get_cookies -from ..typing import AsyncGenerator +from .base_provider import AsyncGeneratorProvider, get_cookies, format_prompt +from ..typing import AsyncGenerator +from httpx import AsyncClient +import json + class OpenaiChat(AsyncGeneratorProvider): url = "https://chat.openai.com" @@ -14,6 +17,7 @@ class OpenaiChat(AsyncGeneratorProvider): supports_gpt_35_turbo = True supports_gpt_4 = True supports_stream = True + _access_token = None @classmethod async def create_async_generator( @@ -21,9 +25,9 @@ class OpenaiChat(AsyncGeneratorProvider): model: str, messages: list[dict[str, str]], proxy: str = None, - access_token: str = None, + access_token: str = _access_token, cookies: dict = None, - **kwargs + **kwargs: dict ) -> AsyncGenerator: config = {"access_token": access_token, "model": model} @@ -37,21 +41,12 @@ class OpenaiChat(AsyncGeneratorProvider): ) if not access_token: - cookies = cookies if cookies else get_cookies("chat.openai.com") - response = await bot.session.get("https://chat.openai.com/api/auth/session", cookies=cookies) - access_token = response.json()["accessToken"] - bot.set_access_token(access_token) - - if len(messages) > 1: - formatted = "\n".join( - ["%s: %s" % ((message["role"]).capitalize(), message["content"]) for message in messages] - ) - prompt = f"{formatted}\nAssistant:" - else: - prompt = messages.pop()["content"] + cookies = cookies if cookies else get_cookies("chat.openai.com") + cls._access_token = await get_access_token(bot.session, cookies) + bot.set_access_token(cls._access_token) returned = None - async for message in bot.ask(prompt): + async for message in bot.ask(format_prompt(messages)): message = message["message"] if returned: if message.startswith(returned): @@ -61,6 +56,9 @@ class OpenaiChat(AsyncGeneratorProvider): else: yield message returned = message + + await bot.delete_conversation(bot.conversation_id) + @classmethod @property @@ -73,3 +71,12 @@ class OpenaiChat(AsyncGeneratorProvider): ] param = ", ".join([": ".join(p) for p in params]) return f"g4f.provider.{cls.__name__} supports: ({param})" + + +async def get_access_token(session: AsyncClient, cookies: dict): + response = await session.get("https://chat.openai.com/api/auth/session", cookies=cookies) + response.raise_for_status() + try: + return response.json()["accessToken"] + except json.decoder.JSONDecodeError: + raise RuntimeError(f"Response: {response.text}") \ No newline at end of file -- cgit v1.2.3 From 901595b10f08972ee3ac5fc08c346dbb561a7d62 Mon Sep 17 00:00:00 2001 From: msi-JunXiang Date: Sun, 3 Sep 2023 16:26:26 +0800 Subject: type hints Use `from __future__ import annotations avoid `dict` and `list` cause "TypeErro: 'type' object is not subscriptable". Refer to the following Stack Overflow discussions for more information: 1. https://stackoverflow.com/questions/75202610/typeerror-type-object-is-not-subscriptable-python 2. https://stackoverflow.com/questions/59101121/type-hint-for-a-dict-gives-typeerror-type-object-is-not-subscriptable --- g4f/Provider/OpenaiChat.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) (limited to 'g4f/Provider/OpenaiChat.py') diff --git a/g4f/Provider/OpenaiChat.py b/g4f/Provider/OpenaiChat.py index 9ca0cd58..f2d1ed6f 100644 --- a/g4f/Provider/OpenaiChat.py +++ b/g4f/Provider/OpenaiChat.py @@ -1,14 +1,18 @@ +from __future__ import annotations + has_module = True try: from revChatGPT.V1 import AsyncChatbot except ImportError: has_module = False -from .base_provider import AsyncGeneratorProvider, get_cookies, format_prompt -from ..typing import AsyncGenerator -from httpx import AsyncClient import json +from httpx import AsyncClient + +from ..typing import AsyncGenerator +from .base_provider import AsyncGeneratorProvider, format_prompt, get_cookies + class OpenaiChat(AsyncGeneratorProvider): url = "https://chat.openai.com" -- cgit v1.2.3