From 0b5d1d3d18736899d5665af846a2eb88873bc8f0 Mon Sep 17 00:00:00 2001 From: Bagus Indrayana Date: Thu, 17 Aug 2023 23:36:33 +0800 Subject: add provider --- g4f/Provider/Wuguokai.py | 65 ++++++++++++++++++++++++++++++++++++++++++++++++ g4f/Provider/__init__.py | 4 ++- 2 files changed, 68 insertions(+), 1 deletion(-) create mode 100644 g4f/Provider/Wuguokai.py (limited to 'g4f/Provider') diff --git a/g4f/Provider/Wuguokai.py b/g4f/Provider/Wuguokai.py new file mode 100644 index 00000000..906283ad --- /dev/null +++ b/g4f/Provider/Wuguokai.py @@ -0,0 +1,65 @@ +import random, requests, json +from ..typing import Any, CreateResult +from .base_provider import BaseProvider + + +class Wuguokai(BaseProvider): + url = 'https://chat.wuguokai.xyz' + supports_gpt_35_turbo = True + supports_stream = False + needs_auth = False + working = True + + @staticmethod + def create_completion( + model: str, + messages: list[dict[str, str]], + stream: bool, + **kwargs: Any, + ) -> CreateResult: + base = '' + for message in messages: + base += '%s: %s\n' % (message['role'], message['content']) + base += 'assistant:' + + headers = { + 'authority': 'ai-api.wuguokai.xyz', + 'accept': 'application/json, text/plain, */*', + 'accept-language': 'id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7', + 'content-type': 'application/json', + 'origin': 'https://chat.wuguokai.xyz', + 'referer': 'https://chat.wuguokai.xyz/', + 'sec-ch-ua': '"Not.A/Brand";v="8", "Chromium";v="114", "Google Chrome";v="114"', + 'sec-ch-ua-mobile': '?0', + 'sec-ch-ua-platform': '"Windows"', + 'sec-fetch-dest': 'empty', + 'sec-fetch-mode': 'cors', + 'sec-fetch-site': 'same-site', + 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36' + } + data ={ + "prompt": base, + "options": {}, + "userId": f"#/chat/{random.randint(1,99999999)}", + "usingContext": True + } + response = requests.post("https://ai-api20.wuguokai.xyz/api/chat-process", headers=headers, data=json.dumps(data),proxies=kwargs['proxy'] if 'proxy' in kwargs else {}) + _split = response.text.split("> 若回答失败请重试或多刷新几次界面后重试") + if response.status_code == 200: + if len(_split) > 1: + yield _split[1].strip() + else: + yield _split[0].strip() + else: + raise Exception(f"Error: {response.status_code} {response.reason}") + + @classmethod + @property + def params(cls): + params = [ + ("model", "str"), + ("messages", "list[dict[str, str]]"), + ("stream", "bool") + ] + param = ", ".join([": ".join(p) for p in params]) + return f"g4f.provider.{cls.__name__} supports: ({param})" \ No newline at end of file diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py index 81d1ad64..432ae672 100644 --- a/g4f/Provider/__init__.py +++ b/g4f/Provider/__init__.py @@ -25,6 +25,7 @@ from .You import You from .Yqcloud import Yqcloud from .Equing import Equing from .FastGpt import FastGpt +from .Wuguokai import Wuguokai __all__ = [ "BaseProvider", @@ -53,5 +54,6 @@ __all__ = [ "You", "Yqcloud", "Equing", - "FastGpt" + "FastGpt", + "Wuguokai" ] -- cgit v1.2.3