From 0850c81118e72246c1aa030dc3f7495b3a0fbb54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eien=20Mojiki=20=F0=9F=8D=95?= <95165750+eienmojiki206@users.noreply.github.com> Date: Fri, 7 Jun 2024 12:56:50 +0700 Subject: Update `HuggingChat` provider - Fix null string in response token temporarily - Add new model `01-ai/Yi-1.5-34B-Chat` that works well using Japanese. --- g4f/Provider/HuggingChat.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py index ac324390..cf2a89da 100644 --- a/g4f/Provider/HuggingChat.py +++ b/g4f/Provider/HuggingChat.py @@ -23,7 +23,8 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin): 'NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO', 'mistralai/Mistral-7B-Instruct-v0.2', 'meta-llama/Meta-Llama-3-70B-Instruct', - 'microsoft/Phi-3-mini-4k-instruct' + 'microsoft/Phi-3-mini-4k-instruct', + '01-ai/Yi-1.5-34B-Chat' ] model_aliases = { "mistralai/Mistral-7B-Instruct-v0.1": "mistralai/Mistral-7B-Instruct-v0.2" @@ -102,7 +103,7 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin): elif line["type"] == "stream": token = line["token"] if first_token: - token = token.lstrip() + token = token.lstrip().replace('\u0000', '') first_token = False yield token elif line["type"] == "finalAnswer": -- cgit v1.2.3