From b8a3db526ce25932658131fcb96a3576f43672b1 Mon Sep 17 00:00:00 2001 From: hdsz25 Date: Sat, 28 Oct 2023 23:58:36 +0800 Subject: Update backend.py, index.html, requirements.txt (#1180) * Update backend.py change to the model that received from user interactive from the web interface model selection. * Update index.html added Llama2 as a provider selection and also include the model selection for Llama2: llama2-70b, llama2-13b, llama2-7b * Update requirements.txt add asgiref to enable async for Flask in api. "RuntimeError: Install Flask with the 'async' extra in order to use async views" --- g4f/gui/client/html/index.html | 10 +++++----- g4f/gui/server/backend.py | 2 +- requirements.txt | 3 ++- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/g4f/gui/client/html/index.html b/g4f/gui/client/html/index.html index 9dea5fe6..66534a51 100644 --- a/g4f/gui/client/html/index.html +++ b/g4f/gui/client/html/index.html @@ -130,9 +130,9 @@ - - - + + + @@ -188,7 +188,7 @@ - + @@ -203,4 +203,4 @@ - \ No newline at end of file + diff --git a/g4f/gui/server/backend.py b/g4f/gui/server/backend.py index 304b9fc8..3d7bfedc 100644 --- a/g4f/gui/server/backend.py +++ b/g4f/gui/server/backend.py @@ -56,7 +56,7 @@ class Backend_Api: def stream(): yield from g4f.ChatCompletion.create( - model=g4f.models.gpt_35_long, + model=model, provider=get_provider(provider), messages=messages, stream=True, diff --git a/requirements.txt b/requirements.txt index 857cc324..ed0d6e4b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -18,4 +18,5 @@ loguru tiktoken pillow platformdirs -numpy \ No newline at end of file +numpy +asgiref -- cgit v1.2.3