summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--README.md1
-rw-r--r--etc/examples/ecosia.py18
-rw-r--r--g4f/Provider/Ecosia.py47
-rw-r--r--g4f/Provider/__init__.py1
-rw-r--r--g4f/models.py3
5 files changed, 0 insertions, 70 deletions
diff --git a/README.md b/README.md
index bf8c398f..9c53a97f 100644
--- a/README.md
+++ b/README.md
@@ -352,7 +352,6 @@ While we wait for gpt-5, here is a list of new models that are at least better t
| [chatgptx.de](https://chatgptx.de) | `g4f.Provider.ChatgptX` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ |
| [f1.cnote.top](https://f1.cnote.top) | `g4f.Provider.Cnote` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
| [duckduckgo.com](https://duckduckgo.com/duckchat) | `g4f.Provider.DuckDuckGo` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
-| [ecosia.org](https://www.ecosia.org) | `g4f.Provider.Ecosia` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
| [feedough.com](https://www.feedough.com) | `g4f.Provider.Feedough` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
| [flowgpt.com](https://flowgpt.com/chat) | `g4f.Provider.FlowGpt` | ✔️ | ❌ | ✔️ | ![Unknown](https://img.shields.io/badge/Unknown-grey) | ❌ |
| [freegptsnav.aifree.site](https://freegptsnav.aifree.site) | `g4f.Provider.FreeGpt` | ✔️ | ❌ | ✔️ | ![Active](https://img.shields.io/badge/Active-brightgreen) | ❌ |
diff --git a/etc/examples/ecosia.py b/etc/examples/ecosia.py
deleted file mode 100644
index 5a2ae520..00000000
--- a/etc/examples/ecosia.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import asyncio
-import g4f
-from g4f.client import AsyncClient
-
-async def main():
- client = AsyncClient(
- provider=g4f.Provider.Ecosia,
- )
- async for chunk in client.chat.completions.create(
- [{"role": "user", "content": "happy dogs on work. write some lines"}],
- g4f.models.default,
- stream=True,
- green=True,
- ):
- print(chunk.choices[0].delta.content or "", end="")
- print(f"\nwith {chunk.model}")
-
-asyncio.run(main()) \ No newline at end of file
diff --git a/g4f/Provider/Ecosia.py b/g4f/Provider/Ecosia.py
deleted file mode 100644
index 231412aa..00000000
--- a/g4f/Provider/Ecosia.py
+++ /dev/null
@@ -1,47 +0,0 @@
-
-from __future__ import annotations
-
-import base64
-import json
-from aiohttp import ClientSession, BaseConnector
-
-from ..typing import AsyncResult, Messages
-from ..requests.raise_for_status import raise_for_status
-from .base_provider import AsyncGeneratorProvider, ProviderModelMixin
-from .helper import get_connector
-
-class Ecosia(AsyncGeneratorProvider, ProviderModelMixin):
- url = "https://www.ecosia.org"
- working = True
- supports_gpt_35_turbo = True
- default_model = "gpt-3.5-turbo-0125"
- models = [default_model, "green"]
- model_aliases = {"gpt-3.5-turbo": default_model}
-
- @classmethod
- async def create_async_generator(
- cls,
- model: str,
- messages: Messages,
- connector: BaseConnector = None,
- proxy: str = None,
- **kwargs
- ) -> AsyncResult:
- model = cls.get_model(model)
- headers = {
- "authority": "api.ecosia.org",
- "accept": "*/*",
- "origin": cls.url,
- "referer": f"{cls.url}/",
- "user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.114 Safari/537.36",
- }
- async with ClientSession(headers=headers, connector=get_connector(connector, proxy)) as session:
- data = {
- "messages": base64.b64encode(json.dumps(messages).encode()).decode()
- }
- api_url = f"https://api.ecosia.org/v2/chat/?sp={'eco' if model == 'green' else 'productivity'}"
- async with session.post(api_url, json=data) as response:
- await raise_for_status(response)
- async for chunk in response.content.iter_any():
- if chunk:
- yield chunk.decode(errors="ignore") \ No newline at end of file
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index e60e1310..3aeabaaf 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -26,7 +26,6 @@ from .Cohere import Cohere
from .DeepInfra import DeepInfra
from .DeepInfraImage import DeepInfraImage
from .DuckDuckGo import DuckDuckGo
-from .Ecosia import Ecosia
from .Feedough import Feedough
from .FlowGpt import FlowGpt
from .FreeChatgpt import FreeChatgpt
diff --git a/g4f/models.py b/g4f/models.py
index 40de22ba..d031797e 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -12,7 +12,6 @@ from .Provider import (
Cnote,
DeepInfra,
DuckDuckGo,
- Ecosia,
Feedough,
FreeGpt,
Gemini,
@@ -59,7 +58,6 @@ default = Model(
ChatgptAi,
You,
OpenaiChat,
- Ecosia,
])
)
@@ -73,7 +71,6 @@ gpt_35_long = Model(
ChatgptNext,
OpenaiChat,
Koala,
- Ecosia,
DuckDuckGo,
])
)