summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorkqlio67 <kqlio67@users.noreply.github.com>2024-11-11 19:21:03 +0100
committerkqlio67 <kqlio67@users.noreply.github.com>2024-11-11 19:21:03 +0100
commit82b8c22b0b90590b7aae2685852910193a0f379d (patch)
tree720fb61b345f5c6dff761e7f7f87459b942f72bb
parentfeat(g4f/Provider/airforce/AirforceImage.py): Dynamically fetch image models from API (diff)
downloadgpt4free-82b8c22b0b90590b7aae2685852910193a0f379d.tar
gpt4free-82b8c22b0b90590b7aae2685852910193a0f379d.tar.gz
gpt4free-82b8c22b0b90590b7aae2685852910193a0f379d.tar.bz2
gpt4free-82b8c22b0b90590b7aae2685852910193a0f379d.tar.lz
gpt4free-82b8c22b0b90590b7aae2685852910193a0f379d.tar.xz
gpt4free-82b8c22b0b90590b7aae2685852910193a0f379d.tar.zst
gpt4free-82b8c22b0b90590b7aae2685852910193a0f379d.zip
-rw-r--r--docs/providers-and-models.md2
-rw-r--r--g4f/Provider/Airforce.py2
-rw-r--r--g4f/Provider/airforce/AirforceChat.py8
-rw-r--r--g4f/models.py14
4 files changed, 9 insertions, 17 deletions
diff --git a/docs/providers-and-models.md b/docs/providers-and-models.md
index dc29eb23..2a53cb22 100644
--- a/docs/providers-and-models.md
+++ b/docs/providers-and-models.md
@@ -20,7 +20,7 @@ This document provides an overview of various AI providers and models, including
|[ai4chat.co](https://www.ai4chat.co)|`g4f.Provider.Ai4Chat`|`gpt-4`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[aichatfree.info](https://aichatfree.info)|`g4f.Provider.AIChatFree`|`gemini-pro`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[aimathgpt.forit.ai](https://aimathgpt.forit.ai)|`g4f.Provider.AiMathGPT`|`llama-3.1-70b`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
-|[api.airforce](https://api.airforce)|`g4f.Provider.Airforce`|`gpt-4o, gpt-4o-mini, gpt-4-turbo, llama-2-7b, llama-3.1-8b, llama-3.1-70b, hermes-2-pro, hermes-2-dpo, phi-2, deepseek-coder, openchat-3.5, openhermes-2.5, cosmosrp, lfm-40b, german-7b, zephyr-7b, neural-7b`|`flux, flux-realism', flux-anime, flux-3d, flux-disney, flux-pixel, flux-4o, any-dark, sdxl`|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
+|[api.airforce](https://api.airforce)|`g4f.Provider.Airforce`|`gpt-4o, gpt-4o-mini, gpt-4-turbo, llama-2-7b, llama-3.1-8b, llama-3.1-70b, hermes-2-pro, hermes-2-dpo, phi-2, deepseek-coder, openchat-3.5, openhermes-2.5, lfm-40b, german-7b, zephyr-7b, neural-7b`|`flux, flux-realism', flux-anime, flux-3d, flux-disney, flux-pixel, flux-4o, any-dark, sdxl`|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[aiuncensored.info](https://www.aiuncensored.info)|`g4f.Provider.AIUncensored`|✔|✔|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[allyfy.chat](https://allyfy.chat/)|`g4f.Provider.Allyfy`|`gpt-3.5-turbo`|❌|❌|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌|
|[bing.com](https://bing.com/chat)|`g4f.Provider.Bing`|`gpt-4`|✔|`gpt-4-vision`|✔|![Active](https://img.shields.io/badge/Active-brightgreen)|❌+✔|
diff --git a/g4f/Provider/Airforce.py b/g4f/Provider/Airforce.py
index 8ea0a174..c7ae44c0 100644
--- a/g4f/Provider/Airforce.py
+++ b/g4f/Provider/Airforce.py
@@ -20,7 +20,7 @@ class Airforce(AsyncGeneratorProvider, ProviderModelMixin):
supports_message_history = AirforceChat.supports_message_history
default_model = AirforceChat.default_model
- models = [*AirforceChat.text_models, *AirforceImage.image_models]
+ models = [*AirforceChat.models, *AirforceImage.models]
model_aliases = {
**AirforceChat.model_aliases,
diff --git a/g4f/Provider/airforce/AirforceChat.py b/g4f/Provider/airforce/AirforceChat.py
index fc375270..cec911a3 100644
--- a/g4f/Provider/airforce/AirforceChat.py
+++ b/g4f/Provider/airforce/AirforceChat.py
@@ -1,8 +1,8 @@
from __future__ import annotations
import re
import json
-from aiohttp import ClientSession
import requests
+from aiohttp import ClientSession
from typing import List
from ...typing import AsyncResult, Messages
@@ -21,7 +21,11 @@ def clean_response(text: str) -> str:
]
for pattern in patterns:
text = re.sub(pattern, '', text)
- return text.strip()
+
+ # Remove the <|im_end|> token if present
+ text = text.replace("<|im_end|>", "").strip()
+
+ return text
def split_message(message: str, max_length: int = 1000) -> List[str]:
"""Splits the message into chunks of a given length (max_length)"""
diff --git a/g4f/models.py b/g4f/models.py
index 3b82270e..87a076a8 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -463,15 +463,6 @@ openhermes_2_5 = Model(
best_provider = Airforce
)
-
-### Pawan ###
-cosmosrp = Model(
- name = 'cosmosrp',
- base_provider = 'Pawan',
- best_provider = Airforce
-)
-
-
### Liquid ###
lfm_40b = Model(
name = 'lfm-40b',
@@ -666,6 +657,7 @@ class ModelUtils:
### Microsoft ###
+'phi-2': phi_2,
'phi-3.5-mini': phi_3_5_mini,
@@ -764,10 +756,6 @@ class ModelUtils:
### Teknium ###
'openhermes-2.5': openhermes_2_5,
-
-### Pawan ###
-'cosmosrp': cosmosrp,
-
### Liquid ###
'lfm-40b': lfm_40b,