summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/needs_auth/OpenaiChat.py
diff options
context:
space:
mode:
Diffstat (limited to 'g4f/Provider/needs_auth/OpenaiChat.py')
-rw-r--r--g4f/Provider/needs_auth/OpenaiChat.py46
1 files changed, 25 insertions, 21 deletions
diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py
index 36b8bd3c..515230f0 100644
--- a/g4f/Provider/needs_auth/OpenaiChat.py
+++ b/g4f/Provider/needs_auth/OpenaiChat.py
@@ -29,6 +29,7 @@ from ...requests.aiohttp import StreamSession
from ...image import to_image, to_bytes, ImageResponse, ImageRequest
from ...errors import MissingAuthError, ResponseError
from ...providers.conversation import BaseConversation
+from ..helper import format_cookies
from ..openai.har_file import getArkoseAndAccessToken, NoValidHarFileError
from ... import debug
@@ -43,8 +44,14 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
supports_message_history = True
supports_system_message = True
default_model = None
+ default_vision_model = "gpt-4-vision"
models = ["gpt-3.5-turbo", "gpt-4", "gpt-4-gizmo"]
- model_aliases = {"text-davinci-002-render-sha": "gpt-3.5-turbo", "": "gpt-3.5-turbo", "gpt-4-turbo-preview": "gpt-4"}
+ model_aliases = {
+ "text-davinci-002-render-sha": "gpt-3.5-turbo",
+ "": "gpt-3.5-turbo",
+ "gpt-4-turbo-preview": "gpt-4",
+ "dall-e": "gpt-4",
+ }
_api_key: str = None
_headers: dict = None
_cookies: Cookies = None
@@ -334,9 +341,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
Raises:
RuntimeError: If an error occurs during processing.
"""
-
async with StreamSession(
- proxies={"all": proxy},
+ proxy=proxy,
impersonate="chrome",
timeout=timeout
) as session:
@@ -358,26 +364,27 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
api_key = cls._api_key = None
cls._create_request_args()
if debug.logging:
- print("OpenaiChat: Load default_model failed")
+ print("OpenaiChat: Load default model failed")
print(f"{e.__class__.__name__}: {e}")
arkose_token = None
if cls.default_model is None:
+ error = None
try:
- arkose_token, api_key, cookies = await getArkoseAndAccessToken(proxy)
- cls._create_request_args(cookies)
+ arkose_token, api_key, cookies, headers = await getArkoseAndAccessToken(proxy)
+ cls._create_request_args(cookies, headers)
cls._set_api_key(api_key)
except NoValidHarFileError as e:
- ...
+ error = e
if cls._api_key is None:
await cls.nodriver_access_token()
if cls._api_key is None and cls.needs_auth:
- raise e
+ raise error
cls.default_model = cls.get_model(await cls.get_default_model(session, cls._headers))
async with session.post(
f"{cls.url}/backend-anon/sentinel/chat-requirements"
- if not cls._api_key else
+ if cls._api_key is None else
f"{cls.url}/backend-api/sentinel/chat-requirements",
json={"conversation_mode_kind": "primary_assistant"},
headers=cls._headers
@@ -393,8 +400,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
print(f'Arkose: {need_arkose} Turnstile: {data["turnstile"]["required"]}')
if need_arkose and arkose_token is None:
- arkose_token, api_key, cookies = await getArkoseAndAccessToken(proxy)
- cls._create_request_args(cookies)
+ arkose_token, api_key, cookies, headers = await getArkoseAndAccessToken(proxy)
+ cls._create_request_args(cookies, headers)
cls._set_api_key(api_key)
if arkose_token is None:
raise MissingAuthError("No arkose token found in .har file")
@@ -406,7 +413,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
print("OpenaiChat: Upload image failed")
print(f"{e.__class__.__name__}: {e}")
- model = cls.get_model(model).replace("gpt-3.5-turbo", "text-davinci-002-render-sha")
+ model = cls.get_model(model)
+ model = "text-davinci-002-render-sha" if model == "gpt-3.5-turbo" else model
if conversation is None:
conversation = Conversation(conversation_id, str(uuid.uuid4()) if parent_id is None else parent_id)
else:
@@ -613,7 +621,7 @@ this.fetch = async (url, options) => {
cookies[c.name] = c.value
user_agent = await page.evaluate("window.navigator.userAgent")
await page.close()
- cls._create_request_args(cookies, user_agent)
+ cls._create_request_args(cookies, user_agent=user_agent)
cls._set_api_key(api_key)
@classmethod
@@ -667,16 +675,12 @@ this.fetch = async (url, options) => {
"oai-language": "en-US",
}
- @staticmethod
- def _format_cookies(cookies: Cookies):
- return "; ".join(f"{k}={v}" for k, v in cookies.items() if k != "access_token")
-
@classmethod
- def _create_request_args(cls, cookies: Cookies = None, user_agent: str = None):
- cls._headers = cls.get_default_headers()
+ def _create_request_args(cls, cookies: Cookies = None, headers: dict = None, user_agent: str = None):
+ cls._headers = cls.get_default_headers() if headers is None else headers
if user_agent is not None:
cls._headers["user-agent"] = user_agent
- cls._cookies = {} if cookies is None else cookies
+ cls._cookies = {} if cookies is None else {k: v for k, v in cookies.items() if k != "access_token"}
cls._update_cookie_header()
@classmethod
@@ -693,7 +697,7 @@ this.fetch = async (url, options) => {
@classmethod
def _update_cookie_header(cls):
- cls._headers["cookie"] = cls._format_cookies(cls._cookies)
+ cls._headers["cookie"] = format_cookies(cls._cookies)
class Conversation(BaseConversation):
"""