summaryrefslogtreecommitdiffstats
path: root/g4f/Provider
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2024-05-05 23:38:31 +0200
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2024-05-05 23:38:31 +0200
commit8fcf618bbe164e6c6f3e0df5e6c9809f35a50e6c (patch)
treed956ccf9116d4f4db0157c5025dcfbb2a4804487 /g4f/Provider
parentAdd AppConfig class, update readme (diff)
downloadgpt4free-8fcf618bbe164e6c6f3e0df5e6c9809f35a50e6c.tar
gpt4free-8fcf618bbe164e6c6f3e0df5e6c9809f35a50e6c.tar.gz
gpt4free-8fcf618bbe164e6c6f3e0df5e6c9809f35a50e6c.tar.bz2
gpt4free-8fcf618bbe164e6c6f3e0df5e6c9809f35a50e6c.tar.lz
gpt4free-8fcf618bbe164e6c6f3e0df5e6c9809f35a50e6c.tar.xz
gpt4free-8fcf618bbe164e6c6f3e0df5e6c9809f35a50e6c.tar.zst
gpt4free-8fcf618bbe164e6c6f3e0df5e6c9809f35a50e6c.zip
Diffstat (limited to 'g4f/Provider')
-rw-r--r--g4f/Provider/Bing.py7
-rw-r--r--g4f/Provider/DeepInfra.py17
-rw-r--r--g4f/Provider/Ollama.py33
-rw-r--r--g4f/Provider/__init__.py1
-rw-r--r--g4f/Provider/needs_auth/Openai.py21
5 files changed, 57 insertions, 22 deletions
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py
index a9eb78e6..7e64a617 100644
--- a/g4f/Provider/Bing.py
+++ b/g4f/Provider/Bing.py
@@ -457,10 +457,13 @@ async def stream_generate(
returned_text = ''
message_id = None
while do_read:
- msg = await wss.receive_str()
+ try:
+ msg = await wss.receive_str()
+ except TypeError:
+ continue
objects = msg.split(Defaults.delimiter)
for obj in objects:
- if obj is None or not obj:
+ if not obj:
continue
try:
response = json.loads(obj)
diff --git a/g4f/Provider/DeepInfra.py b/g4f/Provider/DeepInfra.py
index a74601e8..9691539e 100644
--- a/g4f/Provider/DeepInfra.py
+++ b/g4f/Provider/DeepInfra.py
@@ -1,8 +1,7 @@
from __future__ import annotations
import requests
-from ..typing import AsyncResult, Messages, ImageType
-from ..image import to_data_uri
+from ..typing import AsyncResult, Messages
from .needs_auth.Openai import Openai
class DeepInfra(Openai):
@@ -33,7 +32,6 @@ class DeepInfra(Openai):
model: str,
messages: Messages,
stream: bool,
- image: ImageType = None,
api_base: str = "https://api.deepinfra.com/v1/openai",
temperature: float = 0.7,
max_tokens: int = 1028,
@@ -54,19 +52,6 @@ class DeepInfra(Openai):
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
}
- if image is not None:
- if not model:
- model = cls.default_vision_model
- messages[-1]["content"] = [
- {
- "type": "image_url",
- "image_url": {"url": to_data_uri(image)}
- },
- {
- "type": "text",
- "text": messages[-1]["content"]
- }
- ]
return super().create_async_generator(
model, messages,
stream=stream,
diff --git a/g4f/Provider/Ollama.py b/g4f/Provider/Ollama.py
new file mode 100644
index 00000000..a44aaacd
--- /dev/null
+++ b/g4f/Provider/Ollama.py
@@ -0,0 +1,33 @@
+from __future__ import annotations
+
+import requests
+
+from .needs_auth.Openai import Openai
+from ..typing import AsyncResult, Messages
+
+class Ollama(Openai):
+ label = "Ollama"
+ url = "https://ollama.com"
+ needs_auth = False
+ working = True
+
+ @classmethod
+ def get_models(cls):
+ if not cls.models:
+ url = 'http://127.0.0.1:11434/api/tags'
+ models = requests.get(url).json()["models"]
+ cls.models = [model['name'] for model in models]
+ cls.default_model = cls.models[0]
+ return cls.models
+
+ @classmethod
+ def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ api_base: str = "http://localhost:11434/v1",
+ **kwargs
+ ) -> AsyncResult:
+ return super().create_async_generator(
+ model, messages, api_base=api_base, **kwargs
+ ) \ No newline at end of file
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index 4c0e3f32..d0c0d8b6 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -43,6 +43,7 @@ from .Llama import Llama
from .Local import Local
from .MetaAI import MetaAI
from .MetaAIAccount import MetaAIAccount
+from .Ollama import Ollama
from .PerplexityLabs import PerplexityLabs
from .Pi import Pi
from .Replicate import Replicate
diff --git a/g4f/Provider/needs_auth/Openai.py b/g4f/Provider/needs_auth/Openai.py
index f73c1011..9da6bad8 100644
--- a/g4f/Provider/needs_auth/Openai.py
+++ b/g4f/Provider/needs_auth/Openai.py
@@ -4,9 +4,10 @@ import json
from ..helper import filter_none
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin, FinishReason
-from ...typing import Union, Optional, AsyncResult, Messages
+from ...typing import Union, Optional, AsyncResult, Messages, ImageType
from ...requests import StreamSession, raise_for_status
from ...errors import MissingAuthError, ResponseError
+from ...image import to_data_uri
class Openai(AsyncGeneratorProvider, ProviderModelMixin):
label = "OpenAI API"
@@ -23,6 +24,7 @@ class Openai(AsyncGeneratorProvider, ProviderModelMixin):
messages: Messages,
proxy: str = None,
timeout: int = 120,
+ image: ImageType = None,
api_key: str = None,
api_base: str = "https://api.openai.com/v1",
temperature: float = None,
@@ -36,6 +38,19 @@ class Openai(AsyncGeneratorProvider, ProviderModelMixin):
) -> AsyncResult:
if cls.needs_auth and api_key is None:
raise MissingAuthError('Add a "api_key"')
+ if image is not None:
+ if not model and hasattr(cls, "default_vision_model"):
+ model = cls.default_vision_model
+ messages[-1]["content"] = [
+ {
+ "type": "image_url",
+ "image_url": {"url": to_data_uri(image)}
+ },
+ {
+ "type": "text",
+ "text": messages[-1]["content"]
+ }
+ ]
async with StreamSession(
proxies={"all": proxy},
headers=cls.get_headers(stream, api_key, headers),
@@ -51,7 +66,6 @@ class Openai(AsyncGeneratorProvider, ProviderModelMixin):
stream=stream,
**extra_data
)
-
async with session.post(f"{api_base.rstrip('/')}/chat/completions", json=data) as response:
await raise_for_status(response)
if not stream:
@@ -103,8 +117,7 @@ class Openai(AsyncGeneratorProvider, ProviderModelMixin):
"Content-Type": "application/json",
**(
{"Authorization": f"Bearer {api_key}"}
- if cls.needs_auth and api_key is not None
- else {}
+ if api_key is not None else {}
),
**({} if headers is None else headers)
} \ No newline at end of file