From 2fad27b2c5ce6b05591d2921c7bafefa2de7c9b5 Mon Sep 17 00:00:00 2001 From: abc <98614666+xtekky@users.noreply.github.com> Date: Sat, 13 Apr 2024 03:09:11 +0100 Subject: new gpt-4 beating opensource models --- g4f/Provider/HuggingChat.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) (limited to 'g4f/Provider/HuggingChat.py') diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py index b80795fe..882edb78 100644 --- a/g4f/Provider/HuggingChat.py +++ b/g4f/Provider/HuggingChat.py @@ -14,13 +14,12 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin): working = True default_model = "mistralai/Mixtral-8x7B-Instruct-v0.1" models = [ - "mistralai/Mixtral-8x7B-Instruct-v0.1", - "google/gemma-7b-it", - "meta-llama/Llama-2-70b-chat-hf", - "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO", - "codellama/CodeLlama-34b-Instruct-hf", - "mistralai/Mistral-7B-Instruct-v0.2", - "openchat/openchat-3.5-0106", + "HuggingFaceH4/zephyr-orpo-141b-A35b-v0.1", + 'CohereForAI/c4ai-command-r-plus', + 'mistralai/Mixtral-8x7B-Instruct-v0.1', + 'google/gemma-1.1-7b-it', + 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', + 'mistralai/Mistral-7B-Instruct-v0.2' ] model_aliases = { "openchat/openchat_3.5": "openchat/openchat-3.5-0106", @@ -48,6 +47,7 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin): **kwargs ) -> AsyncResult: options = {"model": cls.get_model(model)} + system_prompt = "\n".join([message["content"] for message in messages if message["role"] == "system"]) if system_prompt: options["preprompt"] = system_prompt -- cgit v1.2.3