From 5fd118f3c9d3d3a932695a3b413d8926d8ad58c0 Mon Sep 17 00:00:00 2001 From: PD <56485898+pratham-darooka@users.noreply.github.com> Date: Fri, 19 Apr 2024 12:57:33 +0530 Subject: Added Meta llama-3 support! (#1856) * Added Meta llama-3 support! Decided to change llama2.py to llama.py to hold all the llama family models. * updated HuggingChat provider * Update FlowGpt.py --- g4f/Provider/FlowGpt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'g4f/Provider/FlowGpt.py') diff --git a/g4f/Provider/FlowGpt.py b/g4f/Provider/FlowGpt.py index 7edd6f19..6c2aa046 100644 --- a/g4f/Provider/FlowGpt.py +++ b/g4f/Provider/FlowGpt.py @@ -99,4 +99,4 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin): if "event" not in message: continue if message["event"] == "text": - yield message["data"] \ No newline at end of file + yield message["data"] -- cgit v1.2.3