From ed8afc20e82b285cbaf5c222fa1e05d8d2651151 Mon Sep 17 00:00:00 2001 From: abc <98614666+xtekky@users.noreply.github.com> Date: Sat, 13 Apr 2024 02:46:20 +0100 Subject: add more models mixtral 8x22b --- g4f/Provider/DeepInfra.py | 10 +++++++++- g4f/Provider/needs_auth/Openai.py | 1 + 2 files changed, 10 insertions(+), 1 deletion(-) (limited to 'g4f/Provider') diff --git a/g4f/Provider/DeepInfra.py b/g4f/Provider/DeepInfra.py index 68aaf8b9..971424b7 100644 --- a/g4f/Provider/DeepInfra.py +++ b/g4f/Provider/DeepInfra.py @@ -11,7 +11,7 @@ class DeepInfra(Openai): needs_auth = False supports_stream = True supports_message_history = True - default_model = 'meta-llama/Llama-2-70b-chat-hf' + default_model = 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1' @classmethod def get_models(cls): @@ -32,6 +32,14 @@ class DeepInfra(Openai): max_tokens: int = 1028, **kwargs ) -> AsyncResult: + + if not '/' in model: + models = { + 'mixtral-8x22b': 'HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1', + 'dbrx-instruct': 'databricks/dbrx-instruct', + } + model = models.get(model, model) + headers = { 'Accept-Encoding': 'gzip, deflate, br', 'Accept-Language': 'en-US', diff --git a/g4f/Provider/needs_auth/Openai.py b/g4f/Provider/needs_auth/Openai.py index 81ba5981..80318f6d 100644 --- a/g4f/Provider/needs_auth/Openai.py +++ b/g4f/Provider/needs_auth/Openai.py @@ -51,6 +51,7 @@ class Openai(AsyncGeneratorProvider, ProviderModelMixin): stream=stream, **extra_data ) + async with session.post(f"{api_base.rstrip('/')}/chat/completions", json=data) as response: await raise_for_status(response) if not stream: -- cgit v1.2.3