summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--gpt4free/__init__.py9
-rw-r--r--gpt4free/usesless/README.md (renamed from unfinished/usesless/README.md)0
-rw-r--r--gpt4free/usesless/__init__.py (renamed from unfinished/usesless/__init__.py)4
-rw-r--r--gui/streamlit_chat_app.py1
-rw-r--r--testing/theb_test.py (renamed from test.py)0
-rw-r--r--testing/useless_test.py27
-rw-r--r--unfinished/chatpdf/__init__.py87
7 files changed, 87 insertions, 41 deletions
diff --git a/gpt4free/__init__.py b/gpt4free/__init__.py
index 5336c825..b4742b64 100644
--- a/gpt4free/__init__.py
+++ b/gpt4free/__init__.py
@@ -5,6 +5,7 @@ from gpt4free import forefront
from gpt4free import quora
from gpt4free import theb
from gpt4free import you
+from gpt4free import usesless
class Provider(Enum):
@@ -15,6 +16,7 @@ class Provider(Enum):
ForeFront = 'fore_front'
Theb = 'theb'
CoCalc = 'cocalc'
+ UseLess = 'useless'
class Completion:
@@ -22,6 +24,7 @@ class Completion:
@staticmethod
def create(provider: Provider, prompt: str, **kwargs) -> str:
+
"""
Invokes the given provider with given prompt and addition arguments and returns the string response
@@ -40,8 +43,14 @@ class Completion:
return Completion.__theb_service(prompt, **kwargs)
elif provider == Provider.CoCalc:
return Completion.__cocalc_service(prompt, **kwargs)
+ elif provider == Provider.UseLess:
+ return Completion.__useless_service(prompt, **kwargs)
else:
raise Exception('Provider not exist, Please try again')
+
+ @staticmethod
+ def __useless_service(prompt: str, **kwargs) -> str:
+ return usesless.Completion.create(prompt = prompt, **kwargs)
@staticmethod
def __you_service(prompt: str, **kwargs) -> str:
diff --git a/unfinished/usesless/README.md b/gpt4free/usesless/README.md
index 13e9df8c..13e9df8c 100644
--- a/unfinished/usesless/README.md
+++ b/gpt4free/usesless/README.md
diff --git a/unfinished/usesless/__init__.py b/gpt4free/usesless/__init__.py
index 6f9a47ef..6029009d 100644
--- a/unfinished/usesless/__init__.py
+++ b/gpt4free/usesless/__init__.py
@@ -23,6 +23,8 @@ class Completion:
temperature: float = 1,
model: str = "gpt-3.5-turbo",
):
+ print(parentMessageId, prompt)
+
json_data = {
"openaiKey": "",
"prompt": prompt,
@@ -40,12 +42,14 @@ class Completion:
url = "https://ai.usesless.com/api/chat-process"
request = requests.post(url, headers=Completion.headers, json=json_data)
content = request.content
+
response = Completion.__response_to_json(content)
return response
@classmethod
def __response_to_json(cls, text) -> dict:
text = str(text.decode("utf-8"))
+
split_text = text.rsplit("\n", 1)[1]
to_json = json.loads(split_text)
return to_json
diff --git a/gui/streamlit_chat_app.py b/gui/streamlit_chat_app.py
index fc5c8d8e..6abc9caf 100644
--- a/gui/streamlit_chat_app.py
+++ b/gui/streamlit_chat_app.py
@@ -11,7 +11,6 @@ import pickle
conversations_file = "conversations.pkl"
-
def load_conversations():
try:
with open(conversations_file, "rb") as f:
diff --git a/test.py b/testing/theb_test.py
index 0fd2ec8b..0fd2ec8b 100644
--- a/test.py
+++ b/testing/theb_test.py
diff --git a/testing/useless_test.py b/testing/useless_test.py
new file mode 100644
index 00000000..9b613aac
--- /dev/null
+++ b/testing/useless_test.py
@@ -0,0 +1,27 @@
+from gpt4free import usesless
+
+message_id = ""
+while True:
+ prompt = input("Question: ")
+ if prompt == "!stop":
+ break
+
+ req = usesless.Completion.create(prompt=prompt, parentMessageId=message_id)
+
+ print(f"Answer: {req['text']}")
+ message_id = req["id"]
+
+
+import gpt4free
+
+message_id = ""
+while True:
+ prompt = input("Question: ")
+ if prompt == "!stop":
+ break
+
+ req = gpt4free.Completion.create(provider = gpt4free.Provider.UseLess,
+ prompt=prompt, parentMessageId=message_id)
+
+ print(f"Answer: {req['text']}")
+ message_id = req["id"] \ No newline at end of file
diff --git a/unfinished/chatpdf/__init__.py b/unfinished/chatpdf/__init__.py
index 4c9d2d3e..30dc1d3e 100644
--- a/unfinished/chatpdf/__init__.py
+++ b/unfinished/chatpdf/__init__.py
@@ -1,59 +1,66 @@
import requests
import json
+from queue import Queue, Empty
+from threading import Thread
+from json import loads
+from re import findall
+
+
class Completion:
def request(prompt: str):
'''TODO: some sort of authentication + upload PDF from URL or local file
- Then you should get the atoken and chat ID
- '''
-
+ Then you should get the atoken and chat ID
+ '''
+
token = "your_token_here"
chat_id = "your_chat_id_here"
url = "https://chat-pr4yueoqha-ue.a.run.app/"
payload = json.dumps({
- "v": 2,
- "chatSession": {
- "type": "join",
- "chatId": chat_id
- },
- "history": [
- {
- "id": "VNsSyJIq_0",
- "author": "p_if2GPSfyN8hjDoA7unYe",
- "msg": "<start>",
- "time": 1682672009270
- },
- {
- "id": "Zk8DRUtx_6",
- "author": "uplaceholder",
- "msg": prompt,
- "time": 1682672181339
- }
- ]
- })
-
+ "v": 2,
+ "chatSession": {
+ "type": "join",
+ "chatId": chat_id
+ },
+ "history": [
+ {
+ "id": "VNsSyJIq_0",
+ "author": "p_if2GPSfyN8hjDoA7unYe",
+ "msg": "<start>",
+ "time": 1682672009270
+ },
+ {
+ "id": "Zk8DRUtx_6",
+ "author": "uplaceholder",
+ "msg": prompt,
+ "time": 1682672181339
+ }
+ ]
+ })
+
# TODO: fix headers, use random user-agent, streaming response, etc
headers = {
- 'authority': 'chat-pr4yueoqha-ue.a.run.app',
- 'accept': '*/*',
- 'accept-language': 'en-US,en;q=0.9',
- 'atoken': token,
- 'content-type': 'application/json',
- 'origin': 'https://www.chatpdf.com',
- 'referer': 'https://www.chatpdf.com/',
- 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
- 'sec-ch-ua-mobile': '?0',
- 'sec-ch-ua-platform': '"Windows"',
- 'sec-fetch-dest': 'empty',
- 'sec-fetch-mode': 'cors',
- 'sec-fetch-site': 'cross-site',
- 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
- }
+ 'authority': 'chat-pr4yueoqha-ue.a.run.app',
+ 'accept': '*/*',
+ 'accept-language': 'en-US,en;q=0.9',
+ 'atoken': token,
+ 'content-type': 'application/json',
+ 'origin': 'https://www.chatpdf.com',
+ 'referer': 'https://www.chatpdf.com/',
+ 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"Windows"',
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'cross-site',
+ 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36'
+ }
- response = requests.request("POST", url, headers=headers, data=payload).text
+ response = requests.request(
+ "POST", url, headers=headers, data=payload).text
Completion.stream_completed = True
return {'response': response}