summaryrefslogtreecommitdiffstats
path: root/unfinished/openai
diff options
context:
space:
mode:
authort.me/xtekky <98614666+xtekky@users.noreply.github.com>2023-04-27 14:29:39 +0200
committerGitHub <noreply@github.com>2023-04-27 14:29:39 +0200
commita5b4d8b10c49021949aa9ce950fa182b54c71bc4 (patch)
tree997609545bc922d63081a49ee356b747b4ecce68 /unfinished/openai
parentMerge pull request #197 from AymaneHrouch/update_syspath (diff)
parentMerge branch 'main' into main (diff)
downloadgpt4free-a5b4d8b10c49021949aa9ce950fa182b54c71bc4.tar
gpt4free-a5b4d8b10c49021949aa9ce950fa182b54c71bc4.tar.gz
gpt4free-a5b4d8b10c49021949aa9ce950fa182b54c71bc4.tar.bz2
gpt4free-a5b4d8b10c49021949aa9ce950fa182b54c71bc4.tar.lz
gpt4free-a5b4d8b10c49021949aa9ce950fa182b54c71bc4.tar.xz
gpt4free-a5b4d8b10c49021949aa9ce950fa182b54c71bc4.tar.zst
gpt4free-a5b4d8b10c49021949aa9ce950fa182b54c71bc4.zip
Diffstat (limited to '')
-rw-r--r--unfinished/openai/__ini__.py140
1 files changed, 77 insertions, 63 deletions
diff --git a/unfinished/openai/__ini__.py b/unfinished/openai/__ini__.py
index b24184c6..71ec4623 100644
--- a/unfinished/openai/__ini__.py
+++ b/unfinished/openai/__ini__.py
@@ -1,72 +1,86 @@
-# experimental, needs chat.openai.com to be loaded with cf_clearance on browser ( can be closed after )
-
+# Import required libraries
from tls_client import Session
-from uuid import uuid4
-
+from uuid import uuid4
from browser_cookie3 import chrome
-def session_auth(client):
- headers = {
- 'authority': 'chat.openai.com',
- 'accept': '*/*',
- 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
- 'cache-control': 'no-cache',
- 'pragma': 'no-cache',
- 'referer': 'https://chat.openai.com/chat',
- 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
- 'sec-ch-ua-mobile': '?0',
- 'sec-ch-ua-platform': '"macOS"',
- 'sec-fetch-dest': 'empty',
- 'sec-fetch-mode': 'cors',
- 'sec-fetch-site': 'same-origin',
- 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
- }
- return client.get('https://chat.openai.com/api/auth/session', headers=headers).json()
+class OpenAIChat:
+ def __init__(self):
+ self.client = Session(client_identifier='chrome110')
+ self._load_cookies()
+ self._set_headers()
+
+ def _load_cookies(self):
+ # Load cookies for the specified domain
+ for cookie in chrome(domain_name='chat.openai.com'):
+ self.client.cookies[cookie.name] = cookie.value
+
+ def _set_headers(self):
+ # Set headers for the client
+ self.client.headers = {
+ 'authority': 'chat.openai.com',
+ 'accept': 'text/event-stream',
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
+ 'authorization': 'Bearer ' + self.session_auth()['accessToken'],
+ 'cache-control': 'no-cache',
+ 'content-type': 'application/json',
+ 'origin': 'https://chat.openai.com',
+ 'pragma': 'no-cache',
+ 'referer': 'https://chat.openai.com/chat',
+ 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'same-origin',
+ 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
+ }
+
+ def session_auth(self):
+ headers = {
+ 'authority': 'chat.openai.com',
+ 'accept': '*/*',
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
+ 'cache-control': 'no-cache',
+ 'pragma': 'no-cache',
+ 'referer': 'https://chat.openai.com/chat',
+ 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'same-origin',
+ 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
+ }
-client = Session(client_identifier='chrome110')
+ return self.client.get('https://chat.openai.com/api/auth/session', headers=headers).json()
-for cookie in chrome(domain_name='chat.openai.com'):
- client.cookies[cookie.name] = cookie.value
+ def send_message(self, message):
+ response = self.client.post('https://chat.openai.com/backend-api/conversation', json={
+ 'action': 'next',
+ 'messages': [
+ {
+ 'id': str(uuid4()),
+ 'author': {
+ 'role': 'user',
+ },
+ 'content': {
+ 'content_type': 'text',
+ 'parts': [
+ message,
+ ],
+ },
+ },
+ ],
+ 'parent_message_id': '9b4682f7-977c-4c8a-b5e6-9713e73dfe01',
+ 'model': 'text-davinci-002-render-sha',
+ 'timezone_offset_min': -120,
+ })
-client.headers = {
- 'authority': 'chat.openai.com',
- 'accept': 'text/event-stream',
- 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
- 'authorization': 'Bearer ' + session_auth(client)['accessToken'],
- 'cache-control': 'no-cache',
- 'content-type': 'application/json',
- 'origin': 'https://chat.openai.com',
- 'pragma': 'no-cache',
- 'referer': 'https://chat.openai.com/chat',
- 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
- 'sec-ch-ua-mobile': '?0',
- 'sec-ch-ua-platform': '"macOS"',
- 'sec-fetch-dest': 'empty',
- 'sec-fetch-mode': 'cors',
- 'sec-fetch-site': 'same-origin',
- 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
-}
+ return response.text
-response = client.post('https://chat.openai.com/backend-api/conversation', json = {
- 'action': 'next',
- 'messages': [
- {
- 'id': str(uuid4()),
- 'author': {
- 'role': 'user',
- },
- 'content': {
- 'content_type': 'text',
- 'parts': [
- 'hello world',
- ],
- },
- },
- ],
- 'parent_message_id': '9b4682f7-977c-4c8a-b5e6-9713e73dfe01',
- 'model': 'text-davinci-002-render-sha',
- 'timezone_offset_min': -120,
-})
-print(response.text) \ No newline at end of file
+if __name__ == "__main__":
+ chat = OpenAIChat()
+ response = chat.send_message("hello world")
+ print(response)