summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/unfinished/TalkAi.py
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2023-11-18 04:38:31 +0100
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2023-11-18 04:38:31 +0100
commitcadc507fad2fee59b23d1d8e73c472c077f468fc (patch)
tree42b1c0c0ccf5e3aa7a843a3766d4d19b6a7ade8e /g4f/Provider/unfinished/TalkAi.py
parentImprove providers (diff)
downloadgpt4free-cadc507fad2fee59b23d1d8e73c472c077f468fc.tar
gpt4free-cadc507fad2fee59b23d1d8e73c472c077f468fc.tar.gz
gpt4free-cadc507fad2fee59b23d1d8e73c472c077f468fc.tar.bz2
gpt4free-cadc507fad2fee59b23d1d8e73c472c077f468fc.tar.lz
gpt4free-cadc507fad2fee59b23d1d8e73c472c077f468fc.tar.xz
gpt4free-cadc507fad2fee59b23d1d8e73c472c077f468fc.tar.zst
gpt4free-cadc507fad2fee59b23d1d8e73c472c077f468fc.zip
Diffstat (limited to 'g4f/Provider/unfinished/TalkAi.py')
-rw-r--r--g4f/Provider/unfinished/TalkAi.py59
1 files changed, 0 insertions, 59 deletions
diff --git a/g4f/Provider/unfinished/TalkAi.py b/g4f/Provider/unfinished/TalkAi.py
deleted file mode 100644
index b27d51c0..00000000
--- a/g4f/Provider/unfinished/TalkAi.py
+++ /dev/null
@@ -1,59 +0,0 @@
-from __future__ import annotations
-
-from aiohttp import ClientSession
-
-from ...typing import AsyncResult, Messages
-from ..base_provider import AsyncGeneratorProvider
-
-
-class TalkAi(AsyncGeneratorProvider):
- url = "https://talkai.info"
- supports_gpt_35_turbo = True
-
- @classmethod
- async def create_async_generator(
- cls,
- model: str,
- messages: Messages,
- proxy: str = None,
- **kwargs
- ) -> AsyncResult:
- if not model:
- model = "gpt-3.5-turbo"
- headers = {
- "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/118.0",
- "Accept": "application/json",
- "Accept-Language": "de,en-US;q=0.7,en;q=0.3",
- "Accept-Encoding": "gzip, deflate, br",
- "Referer": f"{cls.url}/de/chat/",
- "content-type": "application/json",
- "Origin": cls.url,
- "Connection": "keep-alive",
- "Sec-Fetch-Dest": "empty",
- "Sec-Fetch-Mode": "cors",
- "Sec-Fetch-Site": "same-origin",
- "Pragma": "no-cache",
- "Cache-Control": "no-cache"
- }
- async with ClientSession(headers=headers) as session:
- history = [{
- "content": message["content"],
- "from": "you" if message["role"] == "user" else "chatGPT"
- } for message in messages]
- data = {
- "type": "chat",
- "message": messages[-1]["content"],
- "messagesHistory": history,
- "model": model,
- "max_tokens": 256,
- "temperature": 1,
- "top_p": 1,
- "presence_penalty": 0,
- "frequency_penalty": 0,
- **kwargs
- }
- async with session.post(f"{cls.url}/de/chat/send2/", json=data, proxy=proxy) as response:
- response.raise_for_status()
- async for chunk in response.content:
- if chunk:
- yield chunk.decode() \ No newline at end of file