From c74a6943a8bc03a212643e430a1b873da89f81a6 Mon Sep 17 00:00:00 2001 From: kqlio67 Date: Mon, 11 Nov 2024 10:36:56 +0200 Subject: Update (g4f/Provider/Cloudflare.py) --- g4f/Provider/Cloudflare.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/g4f/Provider/Cloudflare.py b/g4f/Provider/Cloudflare.py index 2443f616..34d7c585 100644 --- a/g4f/Provider/Cloudflare.py +++ b/g4f/Provider/Cloudflare.py @@ -21,10 +21,7 @@ class Cloudflare(AsyncGeneratorProvider, ProviderModelMixin): supports_message_history = True default_model = '@cf/meta/llama-3.1-8b-instruct-awq' - models = [ - '@cf/tiiuae/falcon-7b-instruct', # Specific answer - - + models = [ '@hf/google/gemma-7b-it', '@cf/meta/llama-2-7b-chat-fp16', @@ -120,9 +117,12 @@ class Cloudflare(AsyncGeneratorProvider, ProviderModelMixin): scraper = cloudscraper.create_scraper() + + prompt = messages[-1]['content'] + data = { "messages": [ - {"role": "user", "content": format_prompt(messages)} + {"role": "user", "content": prompt} ], "lora": None, "model": model, @@ -147,7 +147,7 @@ class Cloudflare(AsyncGeneratorProvider, ProviderModelMixin): response.raise_for_status() - skip_tokens = ["", "", "[DONE]", "<|endoftext|>", "<|end|>"] + skip_tokens = ["", "", "", "[DONE]", "<|endoftext|>", "<|end|>"] filtered_response = "" for line in response.iter_lines(): -- cgit v1.2.3