summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/DeepInfra.py
diff options
context:
space:
mode:
authorabc <98614666+xtekky@users.noreply.github.com>2023-11-24 15:16:00 +0100
committerabc <98614666+xtekky@users.noreply.github.com>2023-11-24 15:16:00 +0100
commit10a38324582d34a2514ae0be64bc3a03774bfd77 (patch)
tree5e931be07a2c791112d99092e6f66bc172e0e1ce /g4f/Provider/DeepInfra.py
parent~ | g4f `v-0.1.9.0` (diff)
downloadgpt4free-10a38324582d34a2514ae0be64bc3a03774bfd77.tar
gpt4free-10a38324582d34a2514ae0be64bc3a03774bfd77.tar.gz
gpt4free-10a38324582d34a2514ae0be64bc3a03774bfd77.tar.bz2
gpt4free-10a38324582d34a2514ae0be64bc3a03774bfd77.tar.lz
gpt4free-10a38324582d34a2514ae0be64bc3a03774bfd77.tar.xz
gpt4free-10a38324582d34a2514ae0be64bc3a03774bfd77.tar.zst
gpt4free-10a38324582d34a2514ae0be64bc3a03774bfd77.zip
Diffstat (limited to 'g4f/Provider/DeepInfra.py')
-rw-r--r--g4f/Provider/DeepInfra.py112
1 files changed, 55 insertions, 57 deletions
diff --git a/g4f/Provider/DeepInfra.py b/g4f/Provider/DeepInfra.py
index da6333ad..754439c1 100644
--- a/g4f/Provider/DeepInfra.py
+++ b/g4f/Provider/DeepInfra.py
@@ -1,64 +1,62 @@
from __future__ import annotations
-import json
-from aiohttp import ClientSession
+import requests, json
+from ..typing import CreateResult, Messages
+from .base_provider import BaseProvider
-from ..typing import AsyncResult, Messages
-from .base_provider import AsyncGeneratorProvider
+class DeepInfra(BaseProvider):
+ url: str = "https://deepinfra.com"
+ working: bool = True
+ supports_stream: bool = True
+ supports_message_history: bool = True
-
-class DeepInfra(AsyncGeneratorProvider):
- url = "https://deepinfra.com"
- supports_message_history = True
- working = True
-
- @classmethod
- async def create_async_generator(
- cls,
- model: str,
- messages: Messages,
- proxy: str = None,
- **kwargs
- ) -> AsyncResult:
- if not model:
- model = "meta-llama/Llama-2-70b-chat-hf"
+ @staticmethod
+ def create_completion(model: str,
+ messages: Messages,
+ stream: bool,
+ **kwargs) -> CreateResult:
+
headers = {
- "User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:109.0) Gecko/20100101 Firefox/118.0",
- "Accept": "text/event-stream",
- "Accept-Language": "de,en-US;q=0.7,en;q=0.3",
- "Accept-Encoding": "gzip, deflate, br",
- "Referer": f"{cls.url}/",
- "Content-Type": "application/json",
- "X-Deepinfra-Source": "web-page",
- "Origin": cls.url,
- "Connection": "keep-alive",
- "Sec-Fetch-Dest": "empty",
- "Sec-Fetch-Mode": "cors",
- "Sec-Fetch-Site": "same-site",
- "Pragma": "no-cache",
- "Cache-Control": "no-cache",
+ 'Accept-Language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
+ 'Cache-Control': 'no-cache',
+ 'Connection': 'keep-alive',
+ 'Content-Type': 'application/json',
+ 'Origin': 'https://deepinfra.com',
+ 'Pragma': 'no-cache',
+ 'Referer': 'https://deepinfra.com/',
+ 'Sec-Fetch-Dest': 'empty',
+ 'Sec-Fetch-Mode': 'cors',
+ 'Sec-Fetch-Site': 'same-site',
+ 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
+ 'X-Deepinfra-Source': 'web-embed',
+ 'accept': 'text/event-stream',
+ 'sec-ch-ua': '"Google Chrome";v="119", "Chromium";v="119", "Not?A_Brand";v="24"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
}
- async with ClientSession(headers=headers) as session:
- data = {
- "model": model,
- "messages": messages,
- "stream": True,
- }
- async with session.post(
- "https://api.deepinfra.com/v1/openai/chat/completions",
- json=data,
- proxy=proxy
- ) as response:
- response.raise_for_status()
- first = True
- async for line in response.content:
- if line.startswith(b"data: [DONE]"):
- break
- elif line.startswith(b"data: "):
- chunk = json.loads(line[6:])["choices"][0]["delta"].get("content")
+
+ json_data = json.dumps({
+ 'model' : 'meta-llama/Llama-2-70b-chat-hf',
+ 'messages': messages,
+ 'stream' : True}, separators=(',', ':'))
+
+ response = requests.post('https://api.deepinfra.com/v1/openai/chat/completions',
+ headers=headers, data=json_data, stream=True)
+
+ response.raise_for_status()
+ first = True
+
+ for line in response.iter_content(chunk_size=1024):
+ if line.startswith(b"data: [DONE]"):
+ break
+
+ elif line.startswith(b"data: "):
+ chunk = json.loads(line[6:])["choices"][0]["delta"].get("content")
+
+ if chunk:
+ if first:
+ chunk = chunk.lstrip()
if chunk:
- if first:
- chunk = chunk.lstrip()
- if chunk:
- first = False
- yield chunk \ No newline at end of file
+ first = False
+
+ yield (chunk) \ No newline at end of file