summaryrefslogtreecommitdiffstats
path: root/g4f/Provider
diff options
context:
space:
mode:
authorabc <98614666+xtekky@users.noreply.github.com>2024-04-13 03:46:20 +0200
committerabc <98614666+xtekky@users.noreply.github.com>2024-04-13 03:46:20 +0200
commited8afc20e82b285cbaf5c222fa1e05d8d2651151 (patch)
treeb50e4c6a6fcf7a618565bf9a9bddab4165e5c50f /g4f/Provider
parentimprove models list for gpt-3.5-turbo (diff)
downloadgpt4free-ed8afc20e82b285cbaf5c222fa1e05d8d2651151.tar
gpt4free-ed8afc20e82b285cbaf5c222fa1e05d8d2651151.tar.gz
gpt4free-ed8afc20e82b285cbaf5c222fa1e05d8d2651151.tar.bz2
gpt4free-ed8afc20e82b285cbaf5c222fa1e05d8d2651151.tar.lz
gpt4free-ed8afc20e82b285cbaf5c222fa1e05d8d2651151.tar.xz
gpt4free-ed8afc20e82b285cbaf5c222fa1e05d8d2651151.tar.zst
gpt4free-ed8afc20e82b285cbaf5c222fa1e05d8d2651151.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/DeepInfra.py10
-rw-r--r--g4f/Provider/needs_auth/Openai.py1
2 files changed, 10 insertions, 1 deletions
diff --git a/g4f/Provider/DeepInfra.py b/g4f/Provider/DeepInfra.py
index 68aaf8b9..971424b7 100644
--- a/g4f/Provider/DeepInfra.py
+++ b/g4f/Provider/DeepInfra.py
@@ -11,7 +11,7 @@ class DeepInfra(Openai):
needs_auth = False
supports_stream = True
supports_message_history = True
- default_model = 'meta-llama/Llama-2-70b-chat-hf'
+ default_model = 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1'
@classmethod
def get_models(cls):
@@ -32,6 +32,14 @@ class DeepInfra(Openai):
max_tokens: int = 1028,
**kwargs
) -> AsyncResult:
+
+ if not '/' in model:
+ models = {
+ 'mixtral-8x22b': 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1',
+ 'dbrx-instruct': 'databricks/dbrx-instruct',
+ }
+ model = models.get(model, model)
+
headers = {
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US',
diff --git a/g4f/Provider/needs_auth/Openai.py b/g4f/Provider/needs_auth/Openai.py
index 81ba5981..80318f6d 100644
--- a/g4f/Provider/needs_auth/Openai.py
+++ b/g4f/Provider/needs_auth/Openai.py
@@ -51,6 +51,7 @@ class Openai(AsyncGeneratorProvider, ProviderModelMixin):
stream=stream,
**extra_data
)
+
async with session.post(f"{api_base.rstrip('/')}/chat/completions", json=data) as response:
await raise_for_status(response)
if not stream: