diff options
Diffstat (limited to 'g4f/Provider/Theb.py')
-rw-r--r-- | g4f/Provider/Theb.py | 86 |
1 files changed, 42 insertions, 44 deletions
diff --git a/g4f/Provider/Theb.py b/g4f/Provider/Theb.py index 09c94c24..9a47bb89 100644 --- a/g4f/Provider/Theb.py +++ b/g4f/Provider/Theb.py @@ -1,74 +1,72 @@ -import json,random,requests -# from curl_cffi import requests -from ..typing import Any, CreateResult +import json, random, requests + +from ..typing import Any, CreateResult from .base_provider import BaseProvider class Theb(BaseProvider): - url = "https://theb.ai" - working = True - supports_stream = True - supports_gpt_35_turbo = True - needs_auth = True + url = "https://theb.ai" + working = True + supports_stream = True + supports_gpt_35_turbo = True + needs_auth = True @staticmethod def create_completion( model: str, messages: list[dict[str, str]], - stream: bool, - **kwargs: Any, - ) -> CreateResult: + stream: bool, **kwargs: Any) -> CreateResult: + conversation = '' for message in messages: conversation += '%s: %s\n' % (message['role'], message['content']) - conversation += 'assistant: ' + auth = kwargs.get("auth", { "bearer_token":"free", "org_id":"theb", }) + bearer_token = auth["bearer_token"] - org_id = auth["org_id"] + org_id = auth["org_id"] + headers = { - 'authority': 'beta.theb.ai', - 'accept': 'text/event-stream', - 'accept-language': 'id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7', - 'authorization': 'Bearer '+bearer_token, - 'content-type': 'application/json', - 'origin': 'https://beta.theb.ai', - 'referer': 'https://beta.theb.ai/home', - 'sec-ch-ua': '"Chromium";v="116", "Not)A;Brand";v="24", "Google Chrome";v="116"', - 'sec-ch-ua-mobile': '?0', + 'authority' : 'beta.theb.ai', + 'accept' : 'text/event-stream', + 'accept-language' : 'id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7', + 'authorization' : 'Bearer '+bearer_token, + 'content-type' : 'application/json', + 'origin' : 'https://beta.theb.ai', + 'referer' : 'https://beta.theb.ai/home', + 'sec-ch-ua' : '"Chromium";v="116", "Not)A;Brand";v="24", "Google Chrome";v="116"', + 'sec-ch-ua-mobile' : '?0', 'sec-ch-ua-platform': '"Windows"', - 'sec-fetch-dest': 'empty', - 'sec-fetch-mode': 'cors', - 'sec-fetch-site': 'same-origin', - 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36', - 'x-ai-model': 'ee8d4f29cb7047f78cbe84313ed6ace8', + 'sec-fetch-dest' : 'empty', + 'sec-fetch-mode' : 'cors', + 'sec-fetch-site' : 'same-origin', + 'user-agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36', + 'x-ai-model' : 'ee8d4f29cb7047f78cbe84313ed6ace8', } - # generate 10 random number - # 0.1 - 0.9 + req_rand = random.randint(100000000, 9999999999) json_data: dict[str, Any] = { - "text": conversation, - "category": "04f58f64a4aa4191a957b47290fee864", - "model": "ee8d4f29cb7047f78cbe84313ed6ace8", + "text" : conversation, + "category" : "04f58f64a4aa4191a957b47290fee864", + "model" : "ee8d4f29cb7047f78cbe84313ed6ace8", "model_params": { - "system_prompt": "You are ChatGPT, a large language model trained by OpenAI, based on the GPT-3.5 architecture.\nKnowledge cutoff: 2021-09\nCurrent date: {{YYYY-MM-DD}}", - "temperature": kwargs.get("temperature", 1), - "top_p": kwargs.get("top_p", 1), - "frequency_penalty": kwargs.get("frequency_penalty", 0), - "presence_penalty": kwargs.get("presence_penalty", 0), - "long_term_memory": "auto" + "system_prompt" : "You are ChatGPT, a large language model trained by OpenAI, based on the GPT-3.5 architecture.\nKnowledge cutoff: 2021-09\nCurrent date: {{YYYY-MM-DD}}", + "temperature" : kwargs.get("temperature", 1), + "top_p" : kwargs.get("top_p", 1), + "frequency_penalty" : kwargs.get("frequency_penalty", 0), + "presence_penalty" : kwargs.get("presence_penalty", 0), + "long_term_memory" : "auto" } } - response = requests.post( - "https://beta.theb.ai/api/conversation?org_id="+org_id+"&req_rand="+str(req_rand), - headers=headers, - json=json_data, - stream=True, - ) + + response = requests.post(f"https://beta.theb.ai/api/conversation?org_id={org_id}&req_rand={req_rand}", + headers=headers, json=json_data, stream=True) + response.raise_for_status() content = "" next_content = "" |