summaryrefslogtreecommitdiffstats
path: root/g4f
diff options
context:
space:
mode:
authorkqlio67 <kqlio67@users.noreply.github.com>2024-10-22 11:50:16 +0200
committerkqlio67 <kqlio67@users.noreply.github.com>2024-10-22 11:50:16 +0200
commitdf89e58d5049db563253dfa0ae6b75af40f58675 (patch)
treea5bfcb8b6c8aadce8b010306fa6c79e978b536bf /g4f
parentRestored provider (g4f/Provider/nexra/NexraDallE2.py) (diff)
downloadgpt4free-df89e58d5049db563253dfa0ae6b75af40f58675.tar
gpt4free-df89e58d5049db563253dfa0ae6b75af40f58675.tar.gz
gpt4free-df89e58d5049db563253dfa0ae6b75af40f58675.tar.bz2
gpt4free-df89e58d5049db563253dfa0ae6b75af40f58675.tar.lz
gpt4free-df89e58d5049db563253dfa0ae6b75af40f58675.tar.xz
gpt4free-df89e58d5049db563253dfa0ae6b75af40f58675.tar.zst
gpt4free-df89e58d5049db563253dfa0ae6b75af40f58675.zip
Diffstat (limited to 'g4f')
-rw-r--r--g4f/Provider/nexra/NexraDalleMini.py66
-rw-r--r--g4f/Provider/nexra/NexraLLaMA31.py91
-rw-r--r--g4f/Provider/nexra/__init__.py2
3 files changed, 0 insertions, 159 deletions
diff --git a/g4f/Provider/nexra/NexraDalleMini.py b/g4f/Provider/nexra/NexraDalleMini.py
deleted file mode 100644
index 92dd5343..00000000
--- a/g4f/Provider/nexra/NexraDalleMini.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from __future__ import annotations
-
-from aiohttp import ClientSession
-import json
-
-from ...typing import AsyncResult, Messages
-from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
-from ...image import ImageResponse
-
-
-class NexraDalleMini(AsyncGeneratorProvider, ProviderModelMixin):
- label = "Nexra DALL-E Mini"
- url = "https://nexra.aryahcr.cc/documentation/dall-e/en"
- api_endpoint = "https://nexra.aryahcr.cc/api/image/complements"
- working = False
-
- default_model = 'dalle-mini'
- models = [default_model]
-
- @classmethod
- def get_model(cls, model: str) -> str:
- return cls.default_model
-
- @classmethod
- async def create_async_generator(
- cls,
- model: str,
- messages: Messages,
- proxy: str = None,
- response: str = "url", # base64 or url
- **kwargs
- ) -> AsyncResult:
- # Retrieve the correct model to use
- model = cls.get_model(model)
-
- # Format the prompt from the messages
- prompt = messages[0]['content']
-
- headers = {
- "Content-Type": "application/json"
- }
- payload = {
- "prompt": prompt,
- "model": model,
- "response": response
- }
-
- async with ClientSession(headers=headers) as session:
- async with session.post(cls.api_endpoint, json=payload, proxy=proxy) as response:
- response.raise_for_status()
- text_data = await response.text()
-
- try:
- # Parse the JSON response
- json_start = text_data.find('{')
- json_data = text_data[json_start:]
- data = json.loads(json_data)
-
- # Check if the response contains images
- if 'images' in data and len(data['images']) > 0:
- image_url = data['images'][0]
- yield ImageResponse(image_url, prompt)
- else:
- yield ImageResponse("No images found in the response.", prompt)
- except json.JSONDecodeError:
- yield ImageResponse("Failed to parse JSON. Response might not be in JSON format.", prompt)
diff --git a/g4f/Provider/nexra/NexraLLaMA31.py b/g4f/Provider/nexra/NexraLLaMA31.py
deleted file mode 100644
index 53c30720..00000000
--- a/g4f/Provider/nexra/NexraLLaMA31.py
+++ /dev/null
@@ -1,91 +0,0 @@
-from __future__ import annotations
-
-from aiohttp import ClientSession
-import json
-
-from ...typing import AsyncResult, Messages
-from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin
-from ..helper import format_prompt
-
-
-class NexraLLaMA31(AsyncGeneratorProvider, ProviderModelMixin):
- label = "Nexra LLaMA 3.1"
- url = "https://nexra.aryahcr.cc/documentation/llama-3.1/en"
- api_endpoint = "https://nexra.aryahcr.cc/api/chat/complements"
- working = False
- supports_stream = True
-
- default_model = 'llama-3.1'
- models = [default_model]
- model_aliases = {
- "llama-3.1-8b": "llama-3.1",
- }
-
- @classmethod
- def get_model(cls, model: str) -> str:
- if model in cls.models:
- return model
- elif model in cls.model_aliases:
- return cls.model_aliases.get(model, cls.default_model)
- else:
- return cls.default_model
-
- @classmethod
- async def create_async_generator(
- cls,
- model: str,
- messages: Messages,
- proxy: str = None,
- stream: bool = False,
- markdown: bool = False,
- **kwargs
- ) -> AsyncResult:
- model = cls.get_model(model)
-
- headers = {
- "Content-Type": "application/json"
- }
-
- async with ClientSession(headers=headers) as session:
- prompt = format_prompt(messages)
- data = {
- "messages": [
- {
- "role": "user",
- "content": prompt
- }
- ],
- "stream": stream,
- "markdown": markdown,
- "model": model
- }
-
- async with session.post(f"{cls.api_endpoint}", json=data, proxy=proxy) as response:
- response.raise_for_status()
-
- if stream:
- # Streamed response handling
- collected_message = ""
- async for chunk in response.content.iter_any():
- if chunk:
- decoded_chunk = chunk.decode().strip().split("\x1e")
- for part in decoded_chunk:
- if part:
- message_data = json.loads(part)
-
- # Collect messages until 'finish': true
- if 'message' in message_data and message_data['message']:
- collected_message = message_data['message']
-
- # When finish is true, yield the final collected message
- if message_data.get('finish', False):
- yield collected_message
- return
- else:
- # Non-streamed response handling
- response_data = await response.json(content_type=None)
-
- # Yield the message directly from the response
- if 'message' in response_data and response_data['message']:
- yield response_data['message']
- return
diff --git a/g4f/Provider/nexra/__init__.py b/g4f/Provider/nexra/__init__.py
index c2e6b2f6..32b159d1 100644
--- a/g4f/Provider/nexra/__init__.py
+++ b/g4f/Provider/nexra/__init__.py
@@ -6,11 +6,9 @@ from .NexraChatGptV2 import NexraChatGptV2
from .NexraChatGptWeb import NexraChatGptWeb
from .NexraDallE import NexraDallE
from .NexraDallE2 import NexraDallE2
-from .NexraDalleMini import NexraDalleMini
from .NexraEmi import NexraEmi
from .NexraFluxPro import NexraFluxPro
from .NexraGeminiPro import NexraGeminiPro
-from .NexraLLaMA31 import NexraLLaMA31
from .NexraMidjourney import NexraMidjourney
from .NexraProdiaAI import NexraProdiaAI
from .NexraQwen import NexraQwen