summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/GetGpt.py
diff options
context:
space:
mode:
authorHeiner Lohaus <heiner@lohaus.eu>2023-10-04 07:20:51 +0200
committerHeiner Lohaus <heiner@lohaus.eu>2023-10-04 07:20:51 +0200
commit6a61cf811655fa87dbcb196025cc0b6040502293 (patch)
tree0e299a24d90095013854d04f9bf13617eebb8f6c /g4f/Provider/GetGpt.py
parentUse custom user dir (diff)
downloadgpt4free-6a61cf811655fa87dbcb196025cc0b6040502293.tar
gpt4free-6a61cf811655fa87dbcb196025cc0b6040502293.tar.gz
gpt4free-6a61cf811655fa87dbcb196025cc0b6040502293.tar.bz2
gpt4free-6a61cf811655fa87dbcb196025cc0b6040502293.tar.lz
gpt4free-6a61cf811655fa87dbcb196025cc0b6040502293.tar.xz
gpt4free-6a61cf811655fa87dbcb196025cc0b6040502293.tar.zst
gpt4free-6a61cf811655fa87dbcb196025cc0b6040502293.zip
Diffstat (limited to 'g4f/Provider/GetGpt.py')
-rw-r--r--g4f/Provider/GetGpt.py88
1 files changed, 0 insertions, 88 deletions
diff --git a/g4f/Provider/GetGpt.py b/g4f/Provider/GetGpt.py
deleted file mode 100644
index b96efaac..00000000
--- a/g4f/Provider/GetGpt.py
+++ /dev/null
@@ -1,88 +0,0 @@
-from __future__ import annotations
-
-import json
-import os
-import uuid
-
-import requests
-from Crypto.Cipher import AES
-
-from ..typing import Any, CreateResult
-from .base_provider import BaseProvider
-
-
-class GetGpt(BaseProvider):
- url = 'https://chat.getgpt.world/'
- supports_stream = True
- working = False
- supports_gpt_35_turbo = True
-
- @staticmethod
- def create_completion(
- model: str,
- messages: list[dict[str, str]],
- stream: bool, **kwargs: Any) -> CreateResult:
-
- headers = {
- 'Content-Type' : 'application/json',
- 'Referer' : 'https://chat.getgpt.world/',
- 'user-agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/114.0.0.0 Safari/537.36',
- }
-
- data = json.dumps(
- {
- 'messages' : messages,
- 'frequency_penalty' : kwargs.get('frequency_penalty', 0),
- 'max_tokens' : kwargs.get('max_tokens', 4000),
- 'model' : 'gpt-3.5-turbo',
- 'presence_penalty' : kwargs.get('presence_penalty', 0),
- 'temperature' : kwargs.get('temperature', 1),
- 'top_p' : kwargs.get('top_p', 1),
- 'stream' : True,
- 'uuid' : str(uuid.uuid4())
- }
- )
-
- res = requests.post('https://chat.getgpt.world/api/chat/stream',
- headers=headers, json={'signature': _encrypt(data)}, stream=True)
-
- res.raise_for_status()
- for line in res.iter_lines():
- if b'content' in line:
- line_json = json.loads(line.decode('utf-8').split('data: ')[1])
- yield (line_json['choices'][0]['delta']['content'])
-
- @classmethod
- @property
- def params(cls):
- params = [
- ('model', 'str'),
- ('messages', 'list[dict[str, str]]'),
- ('stream', 'bool'),
- ('temperature', 'float'),
- ('presence_penalty', 'int'),
- ('frequency_penalty', 'int'),
- ('top_p', 'int'),
- ('max_tokens', 'int'),
- ]
- param = ', '.join([': '.join(p) for p in params])
- return f'g4f.provider.{cls.__name__} supports: ({param})'
-
-
-def _encrypt(e: str):
- t = os.urandom(8).hex().encode('utf-8')
- n = os.urandom(8).hex().encode('utf-8')
- r = e.encode('utf-8')
-
- cipher = AES.new(t, AES.MODE_CBC, n)
- ciphertext = cipher.encrypt(_pad_data(r))
-
- return ciphertext.hex() + t.decode('utf-8') + n.decode('utf-8')
-
-
-def _pad_data(data: bytes) -> bytes:
- block_size = AES.block_size
- padding_size = block_size - len(data) % block_size
- padding = bytes([padding_size] * padding_size)
-
- return data + padding