summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/FlowGpt.py
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--g4f/Provider/FlowGpt.py7
1 files changed, 3 insertions, 4 deletions
diff --git a/g4f/Provider/FlowGpt.py b/g4f/Provider/FlowGpt.py
index 6c2aa046..1a45997b 100644
--- a/g4f/Provider/FlowGpt.py
+++ b/g4f/Provider/FlowGpt.py
@@ -12,8 +12,7 @@ from ..requests.raise_for_status import raise_for_status
class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://flowgpt.com/chat"
- working = True
- supports_gpt_35_turbo = True
+ working = False
supports_message_history = True
supports_system_message = True
default_model = "gpt-3.5-turbo"
@@ -30,7 +29,7 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
"pygmalion-13b",
"chronos-hermes-13b",
"Mixtral-8x7B",
- "Dolphin-2.6-8x7B"
+ "Dolphin-2.6-8x7B",
]
model_aliases = {
"gemini": "google-gemini",
@@ -91,7 +90,7 @@ class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
"generateImage": False,
"generateAudio": False
}
- async with session.post("https://backend-k8s.flowgpt.com/v2/chat-anonymous-encrypted", json=data, proxy=proxy) as response:
+ async with session.post("https://prod-backend-k8s.flowgpt.com/v3/chat-anonymous", json=data, proxy=proxy) as response:
await raise_for_status(response)
async for chunk in response.content:
if chunk.strip():