summaryrefslogtreecommitdiffstats
path: root/g4f/Provider
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2024-03-28 11:36:25 +0100
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2024-03-28 11:36:25 +0100
commit03fd5ac99a828bd2637cf5be43a98157113527fb (patch)
tree0af06918fddf0b6f3881e91b4e237fd85b088e77 /g4f/Provider
parentFix attr conversation_id not found (diff)
downloadgpt4free-03fd5ac99a828bd2637cf5be43a98157113527fb.tar
gpt4free-03fd5ac99a828bd2637cf5be43a98157113527fb.tar.gz
gpt4free-03fd5ac99a828bd2637cf5be43a98157113527fb.tar.bz2
gpt4free-03fd5ac99a828bd2637cf5be43a98157113527fb.tar.lz
gpt4free-03fd5ac99a828bd2637cf5be43a98157113527fb.tar.xz
gpt4free-03fd5ac99a828bd2637cf5be43a98157113527fb.tar.zst
gpt4free-03fd5ac99a828bd2637cf5be43a98157113527fb.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/needs_auth/OpenaiChat.py11
1 files changed, 5 insertions, 6 deletions
diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py
index 331d524a..396d73dd 100644
--- a/g4f/Provider/needs_auth/OpenaiChat.py
+++ b/g4f/Provider/needs_auth/OpenaiChat.py
@@ -389,19 +389,17 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
print(f"{e.__class__.__name__}: {e}")
model = cls.get_model(model).replace("gpt-3.5-turbo", "text-davinci-002-render-sha")
- fields = Conversation() if conversation is None else copy(conversation)
+ fields = Conversation(conversation_id, parent_id) if conversation is None else copy(conversation)
fields.finish_reason = None
while fields.finish_reason is None:
- conversation_id = fields.conversation_id if hasattr(fields, "conversation_id") else conversation_id
- parent_id = fields.message_id if hasattr(fields, "message_id") else parent_id
websocket_request_id = str(uuid.uuid4())
data = {
"action": action,
"conversation_mode": {"kind": "primary_assistant"},
"force_paragen": False,
"force_rate_limit": False,
- "conversation_id": conversation_id,
- "parent_message_id": parent_id,
+ "conversation_id": fields.conversation_id,
+ "parent_message_id": fields.message_id,
"model": model,
"history_and_training_disabled": history_disabled and not auto_continue and not return_conversation,
"websocket_request_id": websocket_request_id
@@ -425,6 +423,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
await raise_for_status(response)
async for chunk in cls.iter_messages_chunk(response.iter_lines(), session, fields):
if return_conversation:
+ history_disabled = False
return_conversation = False
yield fields
yield chunk
@@ -432,7 +431,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
break
action = "continue"
await asyncio.sleep(5)
- if history_disabled and auto_continue and not return_conversation:
+ if history_disabled and auto_continue:
await cls.delete_conversation(session, cls._headers, fields.conversation_id)
@staticmethod