summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2024-11-18 15:41:45 +0100
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2024-11-18 15:41:45 +0100
commitf1ef23285ae6e63d5fb28f5e271fec0e40ad65f2 (patch)
tree78134cfed60c8facbcccf200e3cbdb65fba35d85
parentUpdate Blackbox.py (diff)
downloadgpt4free-f1ef23285ae6e63d5fb28f5e271fec0e40ad65f2.tar
gpt4free-f1ef23285ae6e63d5fb28f5e271fec0e40ad65f2.tar.gz
gpt4free-f1ef23285ae6e63d5fb28f5e271fec0e40ad65f2.tar.bz2
gpt4free-f1ef23285ae6e63d5fb28f5e271fec0e40ad65f2.tar.lz
gpt4free-f1ef23285ae6e63d5fb28f5e271fec0e40ad65f2.tar.xz
gpt4free-f1ef23285ae6e63d5fb28f5e271fec0e40ad65f2.tar.zst
gpt4free-f1ef23285ae6e63d5fb28f5e271fec0e40ad65f2.zip
-rw-r--r--README.md2
-rw-r--r--g4f/Provider/Blackbox.py65
-rw-r--r--g4f/Provider/Copilot.py87
-rw-r--r--g4f/Provider/DeepInfraChat.py2
-rw-r--r--g4f/Provider/__init__.py1
-rw-r--r--g4f/Provider/airforce/AirforceChat.py10
-rw-r--r--g4f/Provider/needs_auth/OpenaiChat.py16
-rw-r--r--g4f/gui/client/index.html1
-rw-r--r--g4f/gui/client/static/js/chat.v1.js5
-rw-r--r--g4f/gui/server/api.py21
-rw-r--r--g4f/requests/__init__.py2
11 files changed, 157 insertions, 55 deletions
diff --git a/README.md b/README.md
index 671fd908..cfaaadc0 100644
--- a/README.md
+++ b/README.md
@@ -21,7 +21,7 @@
> <sup><strong>Stats:</strong></sup> [![Downloads](https://static.pepy.tech/badge/g4f)](https://pepy.tech/project/g4f) [![Downloads](https://static.pepy.tech/badge/g4f/month)](https://pepy.tech/project/g4f)
```sh
-pip install -U g4f
+pip install -U g4f[all]
```
```sh
diff --git a/g4f/Provider/Blackbox.py b/g4f/Provider/Blackbox.py
index 75abb183..97466c04 100644
--- a/g4f/Provider/Blackbox.py
+++ b/g4f/Provider/Blackbox.py
@@ -10,6 +10,7 @@ import aiohttp
from ..typing import AsyncResult, Messages, ImageType
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
from ..image import ImageResponse, to_data_uri
+from .helper import get_random_string
class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
label = "Blackbox AI"
@@ -22,11 +23,13 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
_last_validated_value = None
default_model = 'blackboxai'
+ default_vision_model = default_model
default_image_model = 'generate_image'
image_models = [default_image_model, 'repomap']
text_models = [default_model, 'gpt-4o', 'gemini-pro', 'claude-sonnet-3.5', 'blackboxai-pro']
+ vision_models = [default_model, 'gpt-4o', 'gemini-pro', 'blackboxai-pro']
agentMode = {
- 'Image Generation': {'mode': True, 'id': "ImageGenerationLV45LJp", 'name': "Image Generation"},
+ default_image_model: {'mode': True, 'id': "ImageGenerationLV45LJp", 'name': "Image Generation"},
}
trendingAgentMode = {
"gemini-1.5-flash": {'mode': True, 'id': 'Gemini'},
@@ -111,11 +114,6 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
return cls._last_validated_value
- @staticmethod
- def generate_id(length=7):
- characters = string.ascii_letters + string.digits
- return ''.join(random.choice(characters) for _ in range(length))
-
@classmethod
def add_prefix_to_messages(cls, messages: Messages, model: str) -> Messages:
prefix = cls.model_prefixes.get(model, "")
@@ -143,12 +141,12 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
**kwargs
) -> AsyncResult:
model = cls.get_model(model)
- message_id = cls.generate_id()
- messages_with_prefix = cls.add_prefix_to_messages(messages, model)
+ message_id = get_random_string(7)
+ messages = cls.add_prefix_to_messages(messages, model)
validated_value = await cls.fetch_validated()
if image is not None:
- messages_with_prefix[-1]['data'] = {
+ messages[-1]['data'] = {
'fileText': '',
'imageBase64': to_data_uri(image),
'title': image_name
@@ -171,9 +169,9 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
'sec-fetch-site': 'same-origin',
'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/130.0.0.0 Safari/537.36'
}
-
+
data = {
- "messages": messages_with_prefix,
+ "messages": messages,
"id": message_id,
"previewToken": None,
"userId": None,
@@ -200,27 +198,24 @@ class Blackbox(AsyncGeneratorProvider, ProviderModelMixin):
async with ClientSession(headers=headers) as session:
async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response:
response.raise_for_status()
- response_text = await response.text()
-
- if model in cls.image_models:
- image_matches = re.findall(r'!\[.*?\]\((https?://[^\)]+)\)', response_text)
- if image_matches:
- image_url = image_matches[0]
- image_response = ImageResponse(images=[image_url], alt="Generated Image")
- yield image_response
- return
-
- response_text = re.sub(r'Generated by BLACKBOX.AI, try unlimited chat https://www.blackbox.ai', '', response_text, flags=re.DOTALL)
-
- json_match = re.search(r'\$~~~\$(.*?)\$~~~\$', response_text, re.DOTALL)
- if json_match:
- search_results = json.loads(json_match.group(1))
- answer = response_text.split('$~~~$')[-1].strip()
-
- formatted_response = f"{answer}\n\n**Source:**"
- for i, result in enumerate(search_results, 1):
- formatted_response += f"\n{i}. {result['title']}: {result['link']}"
-
- yield formatted_response
- else:
- yield response_text.strip()
+ async for chunk in response.content.iter_any():
+ text_chunk = chunk.decode(errors="ignore")
+ if model in cls.image_models:
+ image_matches = re.findall(r'!\[.*?\]\((https?://[^\)]+)\)', text_chunk)
+ if image_matches:
+ image_url = image_matches[0]
+ image_response = ImageResponse(images=[image_url])
+ yield image_response
+ continue
+
+ text_chunk = re.sub(r'Generated by BLACKBOX.AI, try unlimited chat https://www.blackbox.ai', '', text_chunk, flags=re.DOTALL)
+ json_match = re.search(r'\$~~~\$(.*?)\$~~~\$', text_chunk, re.DOTALL)
+ if json_match:
+ search_results = json.loads(json_match.group(1))
+ answer = text_chunk.split('$~~~$')[-1].strip()
+ formatted_response = f"{answer}\n\n**Source:**"
+ for i, result in enumerate(search_results, 1):
+ formatted_response += f"\n{i}. {result['title']}: {result['link']}"
+ yield formatted_response
+ else:
+ yield text_chunk.strip()
diff --git a/g4f/Provider/Copilot.py b/g4f/Provider/Copilot.py
new file mode 100644
index 00000000..ddfed4a8
--- /dev/null
+++ b/g4f/Provider/Copilot.py
@@ -0,0 +1,87 @@
+from __future__ import annotations
+
+import json
+from http.cookiejar import CookieJar
+try:
+ from curl_cffi.requests import Session, CurlWsFlag
+ has_curl_cffi = True
+except ImportError:
+ has_curl_cffi = False
+
+from .base_provider import AbstractProvider, BaseConversation
+from .helper import format_prompt
+from ..typing import CreateResult, Messages
+from ..errors import MissingRequirementsError
+from ..requests.raise_for_status import raise_for_status
+from .. import debug
+
+class Conversation(BaseConversation):
+ conversation_id: str
+ cookie_jar: CookieJar
+
+ def __init__(self, conversation_id: str, cookie_jar: CookieJar):
+ self.conversation_id = conversation_id
+ self.cookie_jar = cookie_jar
+
+class Copilot(AbstractProvider):
+ label = "Microsoft Copilot"
+ url = "https://copilot.microsoft.com"
+ working = True
+ supports_stream = True
+
+ websocket_url = "wss://copilot.microsoft.com/c/api/chat?api-version=2"
+ conversation_url = f"{url}/c/api/conversations"
+
+ @classmethod
+ def create_completion(
+ cls,
+ model: str,
+ messages: Messages,
+ stream: bool = False,
+ proxy: str = None,
+ timeout: int = 900,
+ conversation: Conversation = None,
+ return_conversation: bool = False,
+ **kwargs
+ ) -> CreateResult:
+ if not has_curl_cffi:
+ raise MissingRequirementsError('Install or update "curl_cffi" package | pip install -U nodriver')
+
+ cookies = conversation.cookie_jar if conversation is not None else None
+ with Session(timeout=timeout, proxy=proxy, impersonate="chrome", cookies=cookies) as session:
+ response = session.get(f"{cls.url}/")
+ raise_for_status(response)
+ if conversation is None:
+ response = session.post(cls.conversation_url)
+ raise_for_status(response)
+ conversation_id = response.json().get("id")
+ if return_conversation:
+ yield Conversation(conversation_id, session.cookies.jar)
+ prompt = format_prompt(messages)
+ if debug.logging:
+ print(f"Copilot: Created conversation: {conversation_id}")
+ else:
+ conversation_id = conversation.conversation_id
+ prompt = messages[-1]["content"]
+ if debug.logging:
+ print(f"Copilot: Use conversation: {conversation_id}")
+
+ wss = session.ws_connect(cls.websocket_url)
+ wss.send(json.dumps({
+ "event": "send",
+ "conversationId": conversation_id,
+ "content": [{
+ "type": "text",
+ "text": prompt,
+ }],
+ "mode": "chat"
+ }).encode(), CurlWsFlag.TEXT)
+ while True:
+ try:
+ msg = json.loads(wss.recv()[0])
+ except:
+ break
+ if msg.get("event") == "appendText":
+ yield msg.get("text")
+ elif msg.get("event") in ["done", "partCompleted"]:
+ break \ No newline at end of file
diff --git a/g4f/Provider/DeepInfraChat.py b/g4f/Provider/DeepInfraChat.py
index 5c668599..d8cb072a 100644
--- a/g4f/Provider/DeepInfraChat.py
+++ b/g4f/Provider/DeepInfraChat.py
@@ -4,10 +4,8 @@ from aiohttp import ClientSession
import json
from ..typing import AsyncResult, Messages, ImageType
-from ..image import to_data_uri
from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
-
class DeepInfraChat(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://deepinfra.com/chat"
api_endpoint = "https://api.deepinfra.com/v1/openai/chat/completions"
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index 8a162baf..faf9979e 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -19,6 +19,7 @@ from .Blackbox import Blackbox
from .ChatGpt import ChatGpt
from .ChatGptEs import ChatGptEs
from .Cloudflare import Cloudflare
+from .Copilot import Copilot
from .DarkAI import DarkAI
from .DDG import DDG
from .DeepInfraChat import DeepInfraChat
diff --git a/g4f/Provider/airforce/AirforceChat.py b/g4f/Provider/airforce/AirforceChat.py
index cec911a3..e94dd0a8 100644
--- a/g4f/Provider/airforce/AirforceChat.py
+++ b/g4f/Provider/airforce/AirforceChat.py
@@ -50,11 +50,13 @@ class AirforceChat(AsyncGeneratorProvider, ProviderModelMixin):
supports_message_history = True
default_model = 'llama-3.1-70b-chat'
- response = requests.get('https://api.airforce/models')
- data = response.json()
- text_models = [model['id'] for model in data['data']]
- models = [*text_models]
+ @classmethod
+ def get_models(cls) -> list:
+ if not cls.models:
+ response = requests.get('https://api.airforce/models')
+ data = response.json()
+ cls.models = [model['id'] for model in data['data']]
model_aliases = {
# openchat
diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py
index 43444699..13e15f1d 100644
--- a/g4f/Provider/needs_auth/OpenaiChat.py
+++ b/g4f/Provider/needs_auth/OpenaiChat.py
@@ -6,6 +6,7 @@ import uuid
import json
import base64
import time
+import requests
from aiohttp import ClientWebSocketResponse
from copy import copy
@@ -62,7 +63,8 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
supports_system_message = True
default_model = "auto"
default_vision_model = "gpt-4o"
- models = ["auto", "gpt-4o-mini", "gpt-4o", "gpt-4", "gpt-4-gizmo"]
+ fallback_models = ["auto", "gpt-4", "gpt-4o", "gpt-4o-mini", "gpt-4o-canmore", "o1-preview", "o1-mini"]
+ vision_models = fallback_models
_api_key: str = None
_headers: dict = None
@@ -70,6 +72,18 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
_expires: int = None
@classmethod
+ def get_models(cls):
+ if not cls.models:
+ try:
+ response = requests.get(f"{cls.url}/backend-anon/models")
+ response.raise_for_status()
+ data = response.json()
+ cls.models = [model.get("slug") for model in data.get("models")]
+ except Exception:
+ cls.models = cls.fallback_models
+ return cls.models
+
+ @classmethod
async def create(
cls,
prompt: str = None,
diff --git a/g4f/gui/client/index.html b/g4f/gui/client/index.html
index e650d7e0..63e47b3f 100644
--- a/g4f/gui/client/index.html
+++ b/g4f/gui/client/index.html
@@ -245,6 +245,7 @@
<select name="provider" id="provider">
<option value="">Provider: Auto</option>
<option value="OpenaiChat">OpenAI ChatGPT</option>
+ <option value="Copilot">Microsoft Copilot</option>
<option value="ChatGpt">ChatGpt</option>
<option value="Gemini">Gemini</option>
<option value="MetaAI">Meta AI</option>
diff --git a/g4f/gui/client/static/js/chat.v1.js b/g4f/gui/client/static/js/chat.v1.js
index 42ddb129..580cbf77 100644
--- a/g4f/gui/client/static/js/chat.v1.js
+++ b/g4f/gui/client/static/js/chat.v1.js
@@ -1367,7 +1367,8 @@ async function load_provider_models(providerIndex=null) {
modelProvider.classList.remove("hidden");
models.forEach((model) => {
let option = document.createElement('option');
- option.value = option.text = model.model;
+ option.value = model.model;
+ option.text = `${model.model}${model.image ? " (Image Generation)" : ""}${model.vision ? " (Image Upload)" : ""}`;
option.selected = model.default;
modelProvider.appendChild(option);
});
@@ -1381,7 +1382,7 @@ providerSelect.addEventListener("change", () => load_provider_models());
function save_storage() {
let filename = `chat ${new Date().toLocaleString()}.json`.replaceAll(":", "-");
let data = {"options": {"g4f": ""}};
- for (let i = 0; i < appStorage.length; i++){
+ for (let i = 0; i < appStorage.length; i++){label
let key = appStorage.key(i);
let item = appStorage.getItem(key);
if (key.startsWith("conversation:")) {
diff --git a/g4f/gui/server/api.py b/g4f/gui/server/api.py
index f03d2048..ed8454c3 100644
--- a/g4f/gui/server/api.py
+++ b/g4f/gui/server/api.py
@@ -42,7 +42,12 @@ class Api:
provider: ProviderType = __map__[provider]
if issubclass(provider, ProviderModelMixin):
return [
- {"model": model, "default": model == provider.default_model}
+ {
+ "model": model,
+ "default": model == provider.default_model,
+ "vision": getattr(provider, "default_vision_model", None) == model or model in getattr(provider, "vision_models", []),
+ "image": model in getattr(provider, "image_models", []),
+ }
for model in provider.get_models()
]
return []
@@ -65,7 +70,7 @@ class Api:
"url": parent.url,
"label": parent.label if hasattr(parent, "label") else None,
"image_model": model,
- "vision_model": parent.default_vision_model if hasattr(parent, "default_vision_model") else None
+ "vision_model": getattr(parent, "default_vision_model", None)
})
index.append(parent.__name__)
elif hasattr(provider, "default_vision_model") and provider.__name__ not in index:
@@ -82,13 +87,11 @@ class Api:
@staticmethod
def get_providers() -> list[str]:
return {
- provider.__name__: (
- provider.label if hasattr(provider, "label") else provider.__name__
- ) + (
- " (WebDriver)" if "webdriver" in provider.get_parameters() else ""
- ) + (
- " (Auth)" if provider.needs_auth else ""
- )
+ provider.__name__: (provider.label if hasattr(provider, "label") else provider.__name__)
+ + (" (Image Generation)" if hasattr(provider, "image_models") else "")
+ + (" (Image Upload)" if getattr(provider, "default_vision_model", None) else "")
+ + (" (WebDriver)" if "webdriver" in provider.get_parameters() else "")
+ + (" (Auth)" if provider.needs_auth else "")
for provider in __providers__
if provider.working
}
diff --git a/g4f/requests/__init__.py b/g4f/requests/__init__.py
index a8c0e286..89e0b4ea 100644
--- a/g4f/requests/__init__.py
+++ b/g4f/requests/__init__.py
@@ -109,7 +109,7 @@ def get_args_from_browser(
def get_session_from_browser(url: str, webdriver: WebDriver = None, proxy: str = None, timeout: int = 120) -> Session:
if not has_curl_cffi:
- raise MissingRequirementsError('Install "curl_cffi" package')
+ raise MissingRequirementsError('Install "curl_cffi" package | pip install -U curl_cffi')
args = get_args_from_browser(url, webdriver, proxy, timeout)
return Session(
**args,