summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/needs_auth
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-04-10 08:14:50 +0200
committerGitHub <noreply@github.com>2024-04-10 08:14:50 +0200
commit00951eb79114adf74ad1a3f1ce596e9e0fa932bf (patch)
treefea75e7745d69b09d91b0003e5dbf12b77380223 /g4f/Provider/needs_auth
parentUpdate Dockerfile (diff)
downloadgpt4free-0.2.9.2.tar
gpt4free-0.2.9.2.tar.gz
gpt4free-0.2.9.2.tar.bz2
gpt4free-0.2.9.2.tar.lz
gpt4free-0.2.9.2.tar.xz
gpt4free-0.2.9.2.tar.zst
gpt4free-0.2.9.2.zip
Diffstat (limited to 'g4f/Provider/needs_auth')
-rw-r--r--g4f/Provider/needs_auth/OpenRouter.py31
-rw-r--r--g4f/Provider/needs_auth/Openai.py13
-rw-r--r--g4f/Provider/needs_auth/OpenaiChat.py2
-rw-r--r--g4f/Provider/needs_auth/__init__.py3
4 files changed, 37 insertions, 12 deletions
diff --git a/g4f/Provider/needs_auth/OpenRouter.py b/g4f/Provider/needs_auth/OpenRouter.py
new file mode 100644
index 00000000..e5f87076
--- /dev/null
+++ b/g4f/Provider/needs_auth/OpenRouter.py
@@ -0,0 +1,31 @@
+from __future__ import annotations
+
+import requests
+
+from .Openai import Openai
+from ...typing import AsyncResult, Messages
+
+class OpenRouter(Openai):
+ url = "https://openrouter.ai"
+ working = True
+ default_model = "openrouter/auto"
+
+ @classmethod
+ def get_models(cls):
+ if not cls.models:
+ url = 'https://openrouter.ai/api/v1/models'
+ models = requests.get(url).json()["data"]
+ cls.models = [model['id'] for model in models]
+ return cls.models
+
+ @classmethod
+ def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ api_base: str = "https://openrouter.ai/api/v1",
+ **kwargs
+ ) -> AsyncResult:
+ return super().create_async_generator(
+ model, messages, api_base=api_base, **kwargs
+ ) \ No newline at end of file
diff --git a/g4f/Provider/needs_auth/Openai.py b/g4f/Provider/needs_auth/Openai.py
index 6cd2cf86..ea09e950 100644
--- a/g4f/Provider/needs_auth/Openai.py
+++ b/g4f/Provider/needs_auth/Openai.py
@@ -2,10 +2,10 @@ from __future__ import annotations
import json
+from ..helper import filter_none
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin, FinishReason
from ...typing import Union, Optional, AsyncResult, Messages
-from ...requests.raise_for_status import raise_for_status
-from ...requests import StreamSession
+from ...requests import StreamSession, raise_for_status
from ...errors import MissingAuthError, ResponseError
class Openai(AsyncGeneratorProvider, ProviderModelMixin):
@@ -98,11 +98,4 @@ class Openai(AsyncGeneratorProvider, ProviderModelMixin):
else {}
),
**({} if headers is None else headers)
- }
-
-def filter_none(**kwargs) -> dict:
- return {
- key: value
- for key, value in kwargs.items()
- if value is not None
- } \ No newline at end of file
+ } \ No newline at end of file
diff --git a/g4f/Provider/needs_auth/OpenaiChat.py b/g4f/Provider/needs_auth/OpenaiChat.py
index 64e3aeac..7491725f 100644
--- a/g4f/Provider/needs_auth/OpenaiChat.py
+++ b/g4f/Provider/needs_auth/OpenaiChat.py
@@ -334,7 +334,7 @@ class OpenaiChat(AsyncGeneratorProvider, ProviderModelMixin):
RuntimeError: If an error occurs during processing.
"""
async with StreamSession(
- proxies={"https": proxy},
+ proxies={"all": proxy},
impersonate="chrome",
timeout=timeout
) as session:
diff --git a/g4f/Provider/needs_auth/__init__.py b/g4f/Provider/needs_auth/__init__.py
index 581335e1..7b793223 100644
--- a/g4f/Provider/needs_auth/__init__.py
+++ b/g4f/Provider/needs_auth/__init__.py
@@ -5,4 +5,5 @@ from .ThebApi import ThebApi
from .OpenaiChat import OpenaiChat
from .Poe import Poe
from .Openai import Openai
-from .Groq import Groq \ No newline at end of file
+from .Groq import Groq
+from .OpenRouter import OpenRouter \ No newline at end of file