summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/DeepInfra.py
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-04-22 01:35:07 +0200
committerGitHub <noreply@github.com>2024-04-22 01:35:07 +0200
commit4b4d1f08b5c75c8c8932b5edfbb0d020f8e029a7 (patch)
tree6ed0cfc6cd53a3ab32565d6199a929ac1ea6ad80 /g4f/Provider/DeepInfra.py
parentMerge pull request #1869 from hlohaus/carst (diff)
parentAdd vision models to readme (diff)
downloadgpt4free-4b4d1f08b5c75c8c8932b5edfbb0d020f8e029a7.tar
gpt4free-4b4d1f08b5c75c8c8932b5edfbb0d020f8e029a7.tar.gz
gpt4free-4b4d1f08b5c75c8c8932b5edfbb0d020f8e029a7.tar.bz2
gpt4free-4b4d1f08b5c75c8c8932b5edfbb0d020f8e029a7.tar.lz
gpt4free-4b4d1f08b5c75c8c8932b5edfbb0d020f8e029a7.tar.xz
gpt4free-4b4d1f08b5c75c8c8932b5edfbb0d020f8e029a7.tar.zst
gpt4free-4b4d1f08b5c75c8c8932b5edfbb0d020f8e029a7.zip
Diffstat (limited to 'g4f/Provider/DeepInfra.py')
-rw-r--r--g4f/Provider/DeepInfra.py33
1 files changed, 22 insertions, 11 deletions
diff --git a/g4f/Provider/DeepInfra.py b/g4f/Provider/DeepInfra.py
index 971424b7..35ff84a1 100644
--- a/g4f/Provider/DeepInfra.py
+++ b/g4f/Provider/DeepInfra.py
@@ -1,17 +1,22 @@
from __future__ import annotations
import requests
-from ..typing import AsyncResult, Messages
+from ..typing import AsyncResult, Messages, ImageType
+from ..image import to_data_uri
from .needs_auth.Openai import Openai
class DeepInfra(Openai):
label = "DeepInfra"
url = "https://deepinfra.com"
working = True
- needs_auth = False
+ has_auth = True
supports_stream = True
supports_message_history = True
- default_model = 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1'
+ default_model = "meta-llama/Meta-Llama-3-70b-instruct"
+ default_vision_model = "llava-hf/llava-1.5-7b-hf"
+ model_aliases = {
+ 'mixtral-8x22b': 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1'
+ }
@classmethod
def get_models(cls):
@@ -27,19 +32,12 @@ class DeepInfra(Openai):
model: str,
messages: Messages,
stream: bool,
+ image: ImageType = None,
api_base: str = "https://api.deepinfra.com/v1/openai",
temperature: float = 0.7,
max_tokens: int = 1028,
**kwargs
) -> AsyncResult:
-
- if not '/' in model:
- models = {
- 'mixtral-8x22b': 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1',
- 'dbrx-instruct': 'databricks/dbrx-instruct',
- }
- model = models.get(model, model)
-
headers = {
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US',
@@ -55,6 +53,19 @@ class DeepInfra(Openai):
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
}
+ if image is not None:
+ if not model:
+ model = cls.default_vision_model
+ messages[-1]["content"] = [
+ {
+ "type": "image_url",
+ "image_url": {"url": to_data_uri(image)}
+ },
+ {
+ "type": "text",
+ "text": messages[-1]["content"]
+ }
+ ]
return super().create_async_generator(
model, messages,
stream=stream,