summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/FlowGpt.py
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-02-28 09:48:57 +0100
committerGitHub <noreply@github.com>2024-02-28 09:48:57 +0100
commit96db520ff030cd0beae8b469876013b8f18b793a (patch)
tree0d2d6cf85371cc454279bd454ae851ca0fee930a /g4f/Provider/FlowGpt.py
parentMerge pull request #1635 from hlohaus/flow (diff)
parentAdd websocket support in OpenaiChat (diff)
downloadgpt4free-96db520ff030cd0beae8b469876013b8f18b793a.tar
gpt4free-96db520ff030cd0beae8b469876013b8f18b793a.tar.gz
gpt4free-96db520ff030cd0beae8b469876013b8f18b793a.tar.bz2
gpt4free-96db520ff030cd0beae8b469876013b8f18b793a.tar.lz
gpt4free-96db520ff030cd0beae8b469876013b8f18b793a.tar.xz
gpt4free-96db520ff030cd0beae8b469876013b8f18b793a.tar.zst
gpt4free-96db520ff030cd0beae8b469876013b8f18b793a.zip
Diffstat (limited to 'g4f/Provider/FlowGpt.py')
-rw-r--r--g4f/Provider/FlowGpt.py8
1 files changed, 6 insertions, 2 deletions
diff --git a/g4f/Provider/FlowGpt.py b/g4f/Provider/FlowGpt.py
index 39192bf9..b466a2e6 100644
--- a/g4f/Provider/FlowGpt.py
+++ b/g4f/Provider/FlowGpt.py
@@ -51,12 +51,16 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
"TE": "trailers"
}
async with ClientSession(headers=headers) as session:
+ history = [message for message in messages[:-1] if message["role"] != "system"]
+ system_message = "\n".join([message["content"] for message in messages if message["role"] == "system"])
+ if not system_message:
+ system_message = "You are helpful assistant. Follow the user's instructions carefully."
data = {
"model": model,
"nsfw": False,
"question": messages[-1]["content"],
- "history": [{"role": "assistant", "content": "Hello, how can I help you today?"}, *messages[:-1]],
- "system": kwargs.get("system_message", "You are helpful assistant. Follow the user's instructions carefully."),
+ "history": [{"role": "assistant", "content": "Hello, how can I help you today?"}, *history],
+ "system": system_message,
"temperature": kwargs.get("temperature", 0.7),
"promptId": f"model-{model}",
"documentIds": [],