summaryrefslogtreecommitdiffstats
path: root/g4f/Provider
diff options
context:
space:
mode:
Diffstat (limited to 'g4f/Provider')
-rw-r--r--g4f/Provider/nexra/NexraBing.py3
-rw-r--r--g4f/Provider/nexra/NexraBlackbox.py6
-rw-r--r--g4f/Provider/nexra/NexraChatGPT.py3
-rw-r--r--g4f/Provider/nexra/NexraChatGptV2.py3
-rw-r--r--g4f/Provider/nexra/NexraChatGptWeb.py3
5 files changed, 12 insertions, 6 deletions
diff --git a/g4f/Provider/nexra/NexraBing.py b/g4f/Provider/nexra/NexraBing.py
index 755bedd5..b7e8f73a 100644
--- a/g4f/Provider/nexra/NexraBing.py
+++ b/g4f/Provider/nexra/NexraBing.py
@@ -38,6 +38,7 @@ class NexraBing(AbstractProvider, ProviderModelMixin):
model: str,
messages: Messages,
stream: bool,
+ markdown: bool = False,
**kwargs
) -> CreateResult:
model = cls.get_model(model)
@@ -54,7 +55,7 @@ class NexraBing(AbstractProvider, ProviderModelMixin):
}
],
"conversation_style": model,
- "markdown": False,
+ "markdown": markdown,
"stream": stream,
"model": "Bing"
}
diff --git a/g4f/Provider/nexra/NexraBlackbox.py b/g4f/Provider/nexra/NexraBlackbox.py
index 1b316803..cbe26584 100644
--- a/g4f/Provider/nexra/NexraBlackbox.py
+++ b/g4f/Provider/nexra/NexraBlackbox.py
@@ -33,6 +33,8 @@ class NexraBlackbox(AbstractProvider, ProviderModelMixin):
model: str,
messages: Messages,
stream: bool,
+ markdown: bool = False,
+ websearch: bool = False,
**kwargs
) -> CreateResult:
model = cls.get_model(model)
@@ -48,9 +50,9 @@ class NexraBlackbox(AbstractProvider, ProviderModelMixin):
"content": format_prompt(messages)
}
],
- "websearch": False,
+ "websearch": websearch,
"stream": stream,
- "markdown": False,
+ "markdown": markdown,
"model": model
}
diff --git a/g4f/Provider/nexra/NexraChatGPT.py b/g4f/Provider/nexra/NexraChatGPT.py
index b9592aac..4039c17e 100644
--- a/g4f/Provider/nexra/NexraChatGPT.py
+++ b/g4f/Provider/nexra/NexraChatGPT.py
@@ -56,6 +56,7 @@ class NexraChatGPT(AbstractProvider, ProviderModelMixin):
cls,
model: str,
messages: Messages,
+ markdown: bool = False,
**kwargs
) -> CreateResult:
model = cls.get_model(model)
@@ -68,7 +69,7 @@ class NexraChatGPT(AbstractProvider, ProviderModelMixin):
"messages": [],
"prompt": format_prompt(messages),
"model": model,
- "markdown": False
+ "markdown": markdown
}
response = requests.post(cls.api_endpoint, headers=headers, json=data)
diff --git a/g4f/Provider/nexra/NexraChatGptV2.py b/g4f/Provider/nexra/NexraChatGptV2.py
index ed40f070..98e98008 100644
--- a/g4f/Provider/nexra/NexraChatGptV2.py
+++ b/g4f/Provider/nexra/NexraChatGptV2.py
@@ -33,6 +33,7 @@ class NexraChatGptV2(AbstractProvider, ProviderModelMixin):
model: str,
messages: Messages,
stream: bool,
+ markdown: bool = False,
**kwargs
) -> CreateResult:
model = cls.get_model(model)
@@ -49,7 +50,7 @@ class NexraChatGptV2(AbstractProvider, ProviderModelMixin):
}
],
"stream": stream,
- "markdown": False,
+ "markdown": markdown,
"model": model
}
diff --git a/g4f/Provider/nexra/NexraChatGptWeb.py b/g4f/Provider/nexra/NexraChatGptWeb.py
index 653c8904..258ce7f5 100644
--- a/g4f/Provider/nexra/NexraChatGptWeb.py
+++ b/g4f/Provider/nexra/NexraChatGptWeb.py
@@ -31,6 +31,7 @@ class NexraChatGptWeb(AbstractProvider, ProviderModelMixin):
cls,
model: str,
messages: Messages,
+ markdown: bool = False,
**kwargs
) -> CreateResult:
model = cls.get_model(model)
@@ -42,7 +43,7 @@ class NexraChatGptWeb(AbstractProvider, ProviderModelMixin):
data = {
"prompt": format_prompt(messages),
- "markdown": False
+ "markdown": markdown
}
response = requests.post(api_endpoint, headers=headers, json=data)