summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorkqlio67 <kqlio67@users.noreply.github.com>2024-10-19 12:21:19 +0200
committerkqlio67 <kqlio67@users.noreply.github.com>2024-10-19 12:21:19 +0200
commitd7573a003934f1bc569ccb08602ab8203361669d (patch)
tree1ed3343d1dfca7c546028e92891f5e176ff8ce2c
parentfeat(g4f/Provider/Blackbox.py): enhance async generator with image processing (diff)
downloadgpt4free-d7573a003934f1bc569ccb08602ab8203361669d.tar
gpt4free-d7573a003934f1bc569ccb08602ab8203361669d.tar.gz
gpt4free-d7573a003934f1bc569ccb08602ab8203361669d.tar.bz2
gpt4free-d7573a003934f1bc569ccb08602ab8203361669d.tar.lz
gpt4free-d7573a003934f1bc569ccb08602ab8203361669d.tar.xz
gpt4free-d7573a003934f1bc569ccb08602ab8203361669d.tar.zst
gpt4free-d7573a003934f1bc569ccb08602ab8203361669d.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/AI365VIP.py2
-rw-r--r--g4f/Provider/Ai4Chat.py1
-rw-r--r--g4f/Provider/AiChatOnline.py1
-rw-r--r--g4f/Provider/AiChats.py1
-rw-r--r--g4f/Provider/Airforce.py4
-rw-r--r--g4f/Provider/Allyfy.py1
-rw-r--r--g4f/Provider/Bing.py1
-rw-r--r--g4f/Provider/ChatGptEs.py1
-rw-r--r--g4f/Provider/Chatgpt4Online.py1
-rw-r--r--g4f/Provider/Chatgpt4o.py1
-rw-r--r--g4f/Provider/ChatgptFree.py1
-rw-r--r--g4f/Provider/DDG.py1
-rw-r--r--g4f/Provider/DarkAI.py2
-rw-r--r--g4f/Provider/Editee.py1
-rw-r--r--g4f/Provider/FlowGpt.py1
-rw-r--r--g4f/Provider/FreeNetfly.py2
-rw-r--r--g4f/Provider/Koala.py1
-rw-r--r--g4f/Provider/Liaobots.py1
-rw-r--r--g4f/Provider/MagickPen.py1
-rw-r--r--g4f/Provider/Nexra.py66
-rw-r--r--g4f/Provider/Pizzagpt.py1
-rw-r--r--g4f/Provider/Prodia.py3
-rw-r--r--g4f/Provider/RubiksAI.py1
-rw-r--r--g4f/Provider/You.py2
-rw-r--r--g4f/Provider/__init__.py12
-rw-r--r--g4f/Provider/gigachat/GigaChat.py (renamed from g4f/Provider/GigaChat.py)10
-rw-r--r--g4f/Provider/gigachat/__init__.py2
-rw-r--r--g4f/Provider/gigachat/russian_trusted_root_ca_pem.crt (renamed from g4f/Provider/gigachat_crt/russian_trusted_root_ca_pem.crt)0
28 files changed, 16 insertions, 106 deletions
diff --git a/g4f/Provider/AI365VIP.py b/g4f/Provider/AI365VIP.py
index 154cbd34..c7ebf6b5 100644
--- a/g4f/Provider/AI365VIP.py
+++ b/g4f/Provider/AI365VIP.py
@@ -11,8 +11,6 @@ class AI365VIP(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://chat.ai365vip.com"
api_endpoint = "/api/chat"
working = True
- supports_gpt_35_turbo = True
- supports_gpt_4 = True
default_model = 'gpt-3.5-turbo'
models = [
'gpt-3.5-turbo',
diff --git a/g4f/Provider/Ai4Chat.py b/g4f/Provider/Ai4Chat.py
index 81633b7a..4daf1b4a 100644
--- a/g4f/Provider/Ai4Chat.py
+++ b/g4f/Provider/Ai4Chat.py
@@ -12,7 +12,6 @@ class Ai4Chat(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://www.ai4chat.co"
api_endpoint = "https://www.ai4chat.co/generate-response"
working = True
- supports_gpt_4 = False
supports_stream = False
supports_system_message = True
supports_message_history = True
diff --git a/g4f/Provider/AiChatOnline.py b/g4f/Provider/AiChatOnline.py
index 40f77105..26aacef6 100644
--- a/g4f/Provider/AiChatOnline.py
+++ b/g4f/Provider/AiChatOnline.py
@@ -12,7 +12,6 @@ class AiChatOnline(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://aichatonlineorg.erweima.ai"
api_endpoint = "/aichatonline/api/chat/gpt"
working = True
- supports_gpt_4 = True
default_model = 'gpt-4o-mini'
@classmethod
diff --git a/g4f/Provider/AiChats.py b/g4f/Provider/AiChats.py
index 10127d4f..08492e24 100644
--- a/g4f/Provider/AiChats.py
+++ b/g4f/Provider/AiChats.py
@@ -12,7 +12,6 @@ class AiChats(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://ai-chats.org"
api_endpoint = "https://ai-chats.org/chat/send2/"
working = True
- supports_gpt_4 = True
supports_message_history = True
default_model = 'gpt-4'
models = ['gpt-4', 'dalle']
diff --git a/g4f/Provider/Airforce.py b/g4f/Provider/Airforce.py
index e7907cec..ac2b48fa 100644
--- a/g4f/Provider/Airforce.py
+++ b/g4f/Provider/Airforce.py
@@ -17,9 +17,7 @@ class Airforce(AsyncGeneratorProvider, ProviderModelMixin):
working = True
default_model = 'llama-3-70b-chat'
-
- supports_gpt_35_turbo = True
- supports_gpt_4 = True
+
supports_stream = True
supports_system_message = True
supports_message_history = True
diff --git a/g4f/Provider/Allyfy.py b/g4f/Provider/Allyfy.py
index eb202a4f..bf607df4 100644
--- a/g4f/Provider/Allyfy.py
+++ b/g4f/Provider/Allyfy.py
@@ -12,7 +12,6 @@ class Allyfy(AsyncGeneratorProvider):
url = "https://allyfy.chat"
api_endpoint = "https://chatbot.allyfy.chat/api/v1/message/stream/super/chat"
working = True
- supports_gpt_35_turbo = True
@classmethod
async def create_async_generator(
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py
index 4056f9ff..f04b1a54 100644
--- a/g4f/Provider/Bing.py
+++ b/g4f/Provider/Bing.py
@@ -37,7 +37,6 @@ class Bing(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://bing.com/chat"
working = True
supports_message_history = True
- supports_gpt_4 = True
default_model = "Balanced"
default_vision_model = "gpt-4-vision"
models = [getattr(Tones, key) for key in Tones.__dict__ if not key.startswith("__")]
diff --git a/g4f/Provider/ChatGptEs.py b/g4f/Provider/ChatGptEs.py
index 0e7062e5..a060ecb1 100644
--- a/g4f/Provider/ChatGptEs.py
+++ b/g4f/Provider/ChatGptEs.py
@@ -13,7 +13,6 @@ class ChatGptEs(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://chatgpt.es"
api_endpoint = "https://chatgpt.es/wp-admin/admin-ajax.php"
working = True
- supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True
diff --git a/g4f/Provider/Chatgpt4Online.py b/g4f/Provider/Chatgpt4Online.py
index 8c058fdc..74241253 100644
--- a/g4f/Provider/Chatgpt4Online.py
+++ b/g4f/Provider/Chatgpt4Online.py
@@ -12,7 +12,6 @@ class Chatgpt4Online(AsyncGeneratorProvider):
url = "https://chatgpt4online.org"
api_endpoint = "/wp-json/mwai-ui/v1/chats/submit"
working = True
- supports_gpt_4 = True
async def get_nonce(headers: dict) -> str:
async with ClientSession(headers=headers) as session:
diff --git a/g4f/Provider/Chatgpt4o.py b/g4f/Provider/Chatgpt4o.py
index d38afb7d..7730fc84 100644
--- a/g4f/Provider/Chatgpt4o.py
+++ b/g4f/Provider/Chatgpt4o.py
@@ -9,7 +9,6 @@ from .helper import format_prompt
class Chatgpt4o(AsyncProvider, ProviderModelMixin):
url = "https://chatgpt4o.one"
- supports_gpt_4 = True
working = True
_post_id = None
_nonce = None
diff --git a/g4f/Provider/ChatgptFree.py b/g4f/Provider/ChatgptFree.py
index 95efa865..d2837594 100644
--- a/g4f/Provider/ChatgptFree.py
+++ b/g4f/Provider/ChatgptFree.py
@@ -10,7 +10,6 @@ from .helper import format_prompt
class ChatgptFree(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://chatgptfree.ai"
- supports_gpt_4 = True
working = True
_post_id = None
_nonce = None
diff --git a/g4f/Provider/DDG.py b/g4f/Provider/DDG.py
index 1eae7b39..43cc39c0 100644
--- a/g4f/Provider/DDG.py
+++ b/g4f/Provider/DDG.py
@@ -13,7 +13,6 @@ class DDG(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://duckduckgo.com"
api_endpoint = "https://duckduckgo.com/duckchat/v1/chat"
working = True
- supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True
diff --git a/g4f/Provider/DarkAI.py b/g4f/Provider/DarkAI.py
index d5bd86a5..6ffb615e 100644
--- a/g4f/Provider/DarkAI.py
+++ b/g4f/Provider/DarkAI.py
@@ -12,8 +12,6 @@ class DarkAI(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://www.aiuncensored.info"
api_endpoint = "https://darkai.foundation/chat"
working = True
- supports_gpt_35_turbo = True
- supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True
diff --git a/g4f/Provider/Editee.py b/g4f/Provider/Editee.py
index 6d297169..8ac2324a 100644
--- a/g4f/Provider/Editee.py
+++ b/g4f/Provider/Editee.py
@@ -11,7 +11,6 @@ class Editee(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://editee.com"
api_endpoint = "https://editee.com/submit/chatgptfree"
working = True
- supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True
diff --git a/g4f/Provider/FlowGpt.py b/g4f/Provider/FlowGpt.py
index d510eabe..1a45997b 100644
--- a/g4f/Provider/FlowGpt.py
+++ b/g4f/Provider/FlowGpt.py
@@ -13,7 +13,6 @@ from ..requests.raise_for_status import raise_for_status
class FlowGpt(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://flowgpt.com/chat"
working = False
- supports_gpt_35_turbo = True
supports_message_history = True
supports_system_message = True
default_model = "gpt-3.5-turbo"
diff --git a/g4f/Provider/FreeNetfly.py b/g4f/Provider/FreeNetfly.py
index d0543176..ada5d51a 100644
--- a/g4f/Provider/FreeNetfly.py
+++ b/g4f/Provider/FreeNetfly.py
@@ -13,8 +13,6 @@ class FreeNetfly(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://free.netfly.top"
api_endpoint = "/api/openai/v1/chat/completions"
working = True
- supports_gpt_35_turbo = True
- supports_gpt_4 = True
default_model = 'gpt-3.5-turbo'
models = [
'gpt-3.5-turbo',
diff --git a/g4f/Provider/Koala.py b/g4f/Provider/Koala.py
index 14e533df..0dd76b71 100644
--- a/g4f/Provider/Koala.py
+++ b/g4f/Provider/Koala.py
@@ -14,7 +14,6 @@ class Koala(AsyncGeneratorProvider, ProviderModelMixin):
api_endpoint = "https://koala.sh/api/gpt/"
working = True
supports_message_history = True
- supports_gpt_4 = True
default_model = 'gpt-4o-mini'
@classmethod
diff --git a/g4f/Provider/Liaobots.py b/g4f/Provider/Liaobots.py
index 00c54600..56f765de 100644
--- a/g4f/Provider/Liaobots.py
+++ b/g4f/Provider/Liaobots.py
@@ -170,7 +170,6 @@ class Liaobots(AsyncGeneratorProvider, ProviderModelMixin):
working = True
supports_message_history = True
supports_system_message = True
- supports_gpt_4 = True
default_model = "gpt-3.5-turbo"
models = list(models.keys())
diff --git a/g4f/Provider/MagickPen.py b/g4f/Provider/MagickPen.py
index c15a59f5..7f1751dd 100644
--- a/g4f/Provider/MagickPen.py
+++ b/g4f/Provider/MagickPen.py
@@ -14,7 +14,6 @@ class MagickPen(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://magickpen.com"
api_endpoint = "https://api.magickpen.com/ask"
working = True
- supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True
diff --git a/g4f/Provider/Nexra.py b/g4f/Provider/Nexra.py
deleted file mode 100644
index 5fcdd242..00000000
--- a/g4f/Provider/Nexra.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from __future__ import annotations
-
-from aiohttp import ClientSession
-import json
-
-from ..typing import AsyncResult, Messages
-from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
-from ..image import ImageResponse
-
-
-class Nexra(AsyncGeneratorProvider, ProviderModelMixin):
- label = "Nexra Animagine XL"
- url = "https://nexra.aryahcr.cc/documentation/midjourney/en"
- api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
- working = True
-
- default_model = 'animagine-xl'
- models = [default_model]
-
- @classmethod
- def get_model(cls, model: str) -> str:
- return cls.default_model
-
- @classmethod
- async def create_async_generator(
- cls,
- model: str,
- messages: Messages,
- proxy: str = None,
- response: str = "url", # base64 or url
- **kwargs
- ) -> AsyncResult:
- # Retrieve the correct model to use
- model = cls.get_model(model)
-
- # Format the prompt from the messages
- prompt = messages[0]['content']
-
- headers = {
- "Content-Type": "application/json"
- }
- payload = {
- "prompt": prompt,
- "model": model,
- "response": response
- }
-
- async with ClientSession(headers=headers) as session:
- async with session.post(cls.api_endpoint, json=payload, proxy=proxy) as response:
- response.raise_for_status()
- text_data = await response.text()
-
- try:
- # Parse the JSON response
- json_start = text_data.find('{')
- json_data = text_data[json_start:]
- data = json.loads(json_data)
-
- # Check if the response contains images
- if 'images' in data and len(data['images']) > 0:
- image_url = data['images'][0]
- yield ImageResponse(image_url, prompt)
- else:
- yield ImageResponse("No images found in the response.", prompt)
- except json.JSONDecodeError:
- yield ImageResponse("Failed to parse JSON. Response might not be in JSON format.", prompt)
diff --git a/g4f/Provider/Pizzagpt.py b/g4f/Provider/Pizzagpt.py
index 47cb135c..6513bd34 100644
--- a/g4f/Provider/Pizzagpt.py
+++ b/g4f/Provider/Pizzagpt.py
@@ -12,7 +12,6 @@ class Pizzagpt(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://www.pizzagpt.it"
api_endpoint = "/api/chatx-completion"
working = True
- supports_gpt_4 = True
default_model = 'gpt-4o-mini'
@classmethod
diff --git a/g4f/Provider/Prodia.py b/g4f/Provider/Prodia.py
index f953064e..543a8b19 100644
--- a/g4f/Provider/Prodia.py
+++ b/g4f/Provider/Prodia.py
@@ -14,7 +14,7 @@ class Prodia(AsyncGeneratorProvider, ProviderModelMixin):
working = True
default_model = 'absolutereality_v181.safetensors [3d9d4d2b]'
- models = [
+ image_models = [
'3Guofeng3_v34.safetensors [50f420de]',
'absolutereality_V16.safetensors [37db0fc3]',
default_model,
@@ -81,6 +81,7 @@ class Prodia(AsyncGeneratorProvider, ProviderModelMixin):
'timeless-1.0.ckpt [7c4971d4]',
'toonyou_beta6.safetensors [980f6b15]',
]
+ models = [*image_models]
@classmethod
def get_model(cls, model: str) -> str:
diff --git a/g4f/Provider/RubiksAI.py b/g4f/Provider/RubiksAI.py
index 184322c8..7e76d558 100644
--- a/g4f/Provider/RubiksAI.py
+++ b/g4f/Provider/RubiksAI.py
@@ -19,7 +19,6 @@ class RubiksAI(AsyncGeneratorProvider, ProviderModelMixin):
url = "https://rubiks.ai"
api_endpoint = "https://rubiks.ai/search/api.php"
working = True
- supports_gpt_4 = True
supports_stream = True
supports_system_message = True
supports_message_history = True
diff --git a/g4f/Provider/You.py b/g4f/Provider/You.py
index af8aab0e..02735038 100644
--- a/g4f/Provider/You.py
+++ b/g4f/Provider/You.py
@@ -17,8 +17,6 @@ class You(AsyncGeneratorProvider, ProviderModelMixin):
label = "You.com"
url = "https://you.com"
working = True
- supports_gpt_35_turbo = True
- supports_gpt_4 = True
default_model = "gpt-4o-mini"
default_vision_model = "agent"
image_models = ["dall-e"]
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index c794dd0b..8f36606b 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -5,11 +5,12 @@ from ..providers.retry_provider import RetryProvider, IterListProvider
from ..providers.base_provider import AsyncProvider, AsyncGeneratorProvider
from ..providers.create_images import CreateImagesProvider
-from .deprecated import *
-from .selenium import *
-from .needs_auth import *
+from .deprecated import *
+from .selenium import *
+from .needs_auth import *
-from .nexra import *
+from .gigachat import *
+from .nexra import *
from .Ai4Chat import Ai4Chat
from .AI365VIP import AI365VIP
@@ -46,7 +47,6 @@ from .FreeChatgpt import FreeChatgpt
from .FreeGpt import FreeGpt
from .FreeNetfly import FreeNetfly
from .GeminiPro import GeminiPro
-from .GigaChat import GigaChat
from .GPROChat import GPROChat
from .HuggingChat import HuggingChat
from .HuggingFace import HuggingFace
@@ -55,7 +55,7 @@ from .Liaobots import Liaobots
from .Local import Local
from .MagickPen import MagickPen
from .MetaAI import MetaAI
-#from .MetaAIAccount import MetaAIAccount
+#from .MetaAIAccount import MetaAIAccount
from .Ollama import Ollama
from .PerplexityLabs import PerplexityLabs
from .Pi import Pi
diff --git a/g4f/Provider/GigaChat.py b/g4f/Provider/gigachat/GigaChat.py
index 8ba07b43..b1b293e3 100644
--- a/g4f/Provider/GigaChat.py
+++ b/g4f/Provider/gigachat/GigaChat.py
@@ -9,10 +9,10 @@ import json
from aiohttp import ClientSession, TCPConnector, BaseConnector
from g4f.requests import raise_for_status
-from ..typing import AsyncResult, Messages
-from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
-from ..errors import MissingAuthError
-from .helper import get_connector
+from ...typing import AsyncResult, Messages
+from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
+from ...errors import MissingAuthError
+from ..helper import get_connector
access_token = ""
token_expires_at = 0
@@ -45,7 +45,7 @@ class GigaChat(AsyncGeneratorProvider, ProviderModelMixin):
if not api_key:
raise MissingAuthError('Missing "api_key"')
- cafile = os.path.join(os.path.dirname(__file__), "gigachat_crt/russian_trusted_root_ca_pem.crt")
+ cafile = os.path.join(os.path.dirname(__file__), "russian_trusted_root_ca_pem.crt")
ssl_context = ssl.create_default_context(cafile=cafile) if os.path.exists(cafile) else None
if connector is None and ssl_context is not None:
connector = TCPConnector(ssl_context=ssl_context)
diff --git a/g4f/Provider/gigachat/__init__.py b/g4f/Provider/gigachat/__init__.py
new file mode 100644
index 00000000..c9853742
--- /dev/null
+++ b/g4f/Provider/gigachat/__init__.py
@@ -0,0 +1,2 @@
+from .GigaChat import GigaChat
+
diff --git a/g4f/Provider/gigachat_crt/russian_trusted_root_ca_pem.crt b/g4f/Provider/gigachat/russian_trusted_root_ca_pem.crt
index 4c143a21..4c143a21 100644
--- a/g4f/Provider/gigachat_crt/russian_trusted_root_ca_pem.crt
+++ b/g4f/Provider/gigachat/russian_trusted_root_ca_pem.crt