summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/needs_auth/DeepInfra.py
diff options
context:
space:
mode:
authorkqlio67 <kqlio67@users.noreply.github.com>2024-11-06 13:10:19 +0100
committerkqlio67 <kqlio67@users.noreply.github.com>2024-11-06 13:10:19 +0100
commit087a4d684c456ca93e2689083074ed909974e929 (patch)
tree696cc7511290f8a9165cc01e812d18f173a938be /g4f/Provider/needs_auth/DeepInfra.py
parentUpdate (g4f/gui/client/index.html) (diff)
downloadgpt4free-087a4d684c456ca93e2689083074ed909974e929.tar
gpt4free-087a4d684c456ca93e2689083074ed909974e929.tar.gz
gpt4free-087a4d684c456ca93e2689083074ed909974e929.tar.bz2
gpt4free-087a4d684c456ca93e2689083074ed909974e929.tar.lz
gpt4free-087a4d684c456ca93e2689083074ed909974e929.tar.xz
gpt4free-087a4d684c456ca93e2689083074ed909974e929.tar.zst
gpt4free-087a4d684c456ca93e2689083074ed909974e929.zip
Diffstat (limited to 'g4f/Provider/needs_auth/DeepInfra.py')
-rw-r--r--g4f/Provider/needs_auth/DeepInfra.py58
1 files changed, 58 insertions, 0 deletions
diff --git a/g4f/Provider/needs_auth/DeepInfra.py b/g4f/Provider/needs_auth/DeepInfra.py
new file mode 100644
index 00000000..ebe5bfbf
--- /dev/null
+++ b/g4f/Provider/needs_auth/DeepInfra.py
@@ -0,0 +1,58 @@
+from __future__ import annotations
+
+import requests
+from ...typing import AsyncResult, Messages
+from .Openai import Openai
+
+class DeepInfra(Openai):
+ label = "DeepInfra"
+ url = "https://deepinfra.com"
+ working = True
+ needs_auth = True
+ supports_stream = True
+ supports_message_history = True
+ default_model = "meta-llama/Meta-Llama-3.1-70B-Instruct"
+
+ @classmethod
+ def get_models(cls):
+ if not cls.models:
+ url = 'https://api.deepinfra.com/models/featured'
+ models = requests.get(url).json()
+ cls.models = [model['model_name'] for model in models if model["type"] == "text-generation"]
+ return cls.models
+
+ @classmethod
+ def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ stream: bool,
+ api_base: str = "https://api.deepinfra.com/v1/openai",
+ temperature: float = 0.7,
+ max_tokens: int = 1028,
+ **kwargs
+ ) -> AsyncResult:
+ headers = {
+ 'Accept-Encoding': 'gzip, deflate, br',
+ 'Accept-Language': 'en-US',
+ 'Connection': 'keep-alive',
+ 'Origin': 'https://deepinfra.com',
+ 'Referer': 'https://deepinfra.com/',
+ 'Sec-Fetch-Dest': 'empty',
+ 'Sec-Fetch-Mode': 'cors',
+ 'Sec-Fetch-Site': 'same-site',
+ 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/119.0.0.0 Safari/537.36',
+ 'X-Deepinfra-Source': 'web-embed',
+ 'sec-ch-ua': '"Google Chrome";v="119", "Chromium";v="119", "Not?A_Brand";v="24"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
+ }
+ return super().create_async_generator(
+ model, messages,
+ stream=stream,
+ api_base=api_base,
+ temperature=temperature,
+ max_tokens=max_tokens,
+ headers=headers,
+ **kwargs
+ )