summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-05-05 23:51:40 +0200
committerGitHub <noreply@github.com>2024-05-05 23:51:40 +0200
commit1d02a06456f2a05bd88782949ba28c9244fa1e6a (patch)
tree7d2505b4220f7b83adfa5cb81f076ed779176067
parentMerge pull request #1923 from Hydra566/patch-2 (diff)
parentAdd Ollama provider, Add vision support to Openai (diff)
downloadgpt4free-0.3.0.9.tar
gpt4free-0.3.0.9.tar.gz
gpt4free-0.3.0.9.tar.bz2
gpt4free-0.3.0.9.tar.lz
gpt4free-0.3.0.9.tar.xz
gpt4free-0.3.0.9.tar.zst
gpt4free-0.3.0.9.zip
-rw-r--r--g4f/Provider/Bing.py7
-rw-r--r--g4f/Provider/DeepInfra.py17
-rw-r--r--g4f/Provider/Ollama.py33
-rw-r--r--g4f/Provider/__init__.py1
-rw-r--r--g4f/Provider/needs_auth/Openai.py21
-rw-r--r--g4f/api/__init__.py2
-rw-r--r--g4f/cli.py21
-rw-r--r--g4f/requests/__init__.py2
8 files changed, 72 insertions, 32 deletions
diff --git a/g4f/Provider/Bing.py b/g4f/Provider/Bing.py
index a9eb78e6..7e64a617 100644
--- a/g4f/Provider/Bing.py
+++ b/g4f/Provider/Bing.py
@@ -457,10 +457,13 @@ async def stream_generate(
returned_text = ''
message_id = None
while do_read:
- msg = await wss.receive_str()
+ try:
+ msg = await wss.receive_str()
+ except TypeError:
+ continue
objects = msg.split(Defaults.delimiter)
for obj in objects:
- if obj is None or not obj:
+ if not obj:
continue
try:
response = json.loads(obj)
diff --git a/g4f/Provider/DeepInfra.py b/g4f/Provider/DeepInfra.py
index a74601e8..9691539e 100644
--- a/g4f/Provider/DeepInfra.py
+++ b/g4f/Provider/DeepInfra.py
@@ -1,8 +1,7 @@
from __future__ import annotations
import requests
-from ..typing import AsyncResult, Messages, ImageType
-from ..image import to_data_uri
+from ..typing import AsyncResult, Messages
from .needs_auth.Openai import Openai
class DeepInfra(Openai):
@@ -33,7 +32,6 @@ class DeepInfra(Openai):
model: str,
messages: Messages,
stream: bool,
- image: ImageType = None,
api_base: str = "https://api.deepinfra.com/v1/openai",
temperature: float = 0.7,
max_tokens: int = 1028,
@@ -54,19 +52,6 @@ class DeepInfra(Openai):
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"macOS"',
}
- if image is not None:
- if not model:
- model = cls.default_vision_model
- messages[-1]["content"] = [
- {
- "type": "image_url",
- "image_url": {"url": to_data_uri(image)}
- },
- {
- "type": "text",
- "text": messages[-1]["content"]
- }
- ]
return super().create_async_generator(
model, messages,
stream=stream,
diff --git a/g4f/Provider/Ollama.py b/g4f/Provider/Ollama.py
new file mode 100644
index 00000000..a44aaacd
--- /dev/null
+++ b/g4f/Provider/Ollama.py
@@ -0,0 +1,33 @@
+from __future__ import annotations
+
+import requests
+
+from .needs_auth.Openai import Openai
+from ..typing import AsyncResult, Messages
+
+class Ollama(Openai):
+ label = "Ollama"
+ url = "https://ollama.com"
+ needs_auth = False
+ working = True
+
+ @classmethod
+ def get_models(cls):
+ if not cls.models:
+ url = 'http://127.0.0.1:11434/api/tags'
+ models = requests.get(url).json()["models"]
+ cls.models = [model['name'] for model in models]
+ cls.default_model = cls.models[0]
+ return cls.models
+
+ @classmethod
+ def create_async_generator(
+ cls,
+ model: str,
+ messages: Messages,
+ api_base: str = "http://localhost:11434/v1",
+ **kwargs
+ ) -> AsyncResult:
+ return super().create_async_generator(
+ model, messages, api_base=api_base, **kwargs
+ ) \ No newline at end of file
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index 4c0e3f32..d0c0d8b6 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -43,6 +43,7 @@ from .Llama import Llama
from .Local import Local
from .MetaAI import MetaAI
from .MetaAIAccount import MetaAIAccount
+from .Ollama import Ollama
from .PerplexityLabs import PerplexityLabs
from .Pi import Pi
from .Replicate import Replicate
diff --git a/g4f/Provider/needs_auth/Openai.py b/g4f/Provider/needs_auth/Openai.py
index f73c1011..9da6bad8 100644
--- a/g4f/Provider/needs_auth/Openai.py
+++ b/g4f/Provider/needs_auth/Openai.py
@@ -4,9 +4,10 @@ import json
from ..helper import filter_none
from ..base_provider import AsyncGeneratorProvider, ProviderModelMixin, FinishReason
-from ...typing import Union, Optional, AsyncResult, Messages
+from ...typing import Union, Optional, AsyncResult, Messages, ImageType
from ...requests import StreamSession, raise_for_status
from ...errors import MissingAuthError, ResponseError
+from ...image import to_data_uri
class Openai(AsyncGeneratorProvider, ProviderModelMixin):
label = "OpenAI API"
@@ -23,6 +24,7 @@ class Openai(AsyncGeneratorProvider, ProviderModelMixin):
messages: Messages,
proxy: str = None,
timeout: int = 120,
+ image: ImageType = None,
api_key: str = None,
api_base: str = "https://api.openai.com/v1",
temperature: float = None,
@@ -36,6 +38,19 @@ class Openai(AsyncGeneratorProvider, ProviderModelMixin):
) -> AsyncResult:
if cls.needs_auth and api_key is None:
raise MissingAuthError('Add a "api_key"')
+ if image is not None:
+ if not model and hasattr(cls, "default_vision_model"):
+ model = cls.default_vision_model
+ messages[-1]["content"] = [
+ {
+ "type": "image_url",
+ "image_url": {"url": to_data_uri(image)}
+ },
+ {
+ "type": "text",
+ "text": messages[-1]["content"]
+ }
+ ]
async with StreamSession(
proxies={"all": proxy},
headers=cls.get_headers(stream, api_key, headers),
@@ -51,7 +66,6 @@ class Openai(AsyncGeneratorProvider, ProviderModelMixin):
stream=stream,
**extra_data
)
-
async with session.post(f"{api_base.rstrip('/')}/chat/completions", json=data) as response:
await raise_for_status(response)
if not stream:
@@ -103,8 +117,7 @@ class Openai(AsyncGeneratorProvider, ProviderModelMixin):
"Content-Type": "application/json",
**(
{"Authorization": f"Bearer {api_key}"}
- if cls.needs_auth and api_key is not None
- else {}
+ if api_key is not None else {}
),
**({} if headers is None else headers)
} \ No newline at end of file
diff --git a/g4f/api/__init__.py b/g4f/api/__init__.py
index f252ab71..20cc42db 100644
--- a/g4f/api/__init__.py
+++ b/g4f/api/__init__.py
@@ -201,7 +201,7 @@ def run_api(
if bind is not None:
host, port = bind.split(":")
uvicorn.run(
- f"g4f.api:{'create_app_debug' if debug else 'create_app'}",
+ f"g4f.api:create_app{'_debug' if debug else ''}",
host=host, port=int(port),
workers=workers,
use_colors=use_colors,
diff --git a/g4f/cli.py b/g4f/cli.py
index fe219b38..408dc914 100644
--- a/g4f/cli.py
+++ b/g4f/cli.py
@@ -11,6 +11,10 @@ def main():
api_parser = subparsers.add_parser("api")
api_parser.add_argument("--bind", default="0.0.0.0:1337", help="The bind string.")
api_parser.add_argument("--debug", action="store_true", help="Enable verbose logging.")
+ api_parser.add_argument("--model", default=None, help="Default model for chat completion. (incompatible with --debug and --workers)")
+ api_parser.add_argument("--provider", choices=[provider.__name__ for provider in Provider.__providers__ if provider.working],
+ default=None, help="Default provider for chat completion. (incompatible with --debug and --workers)")
+ api_parser.add_argument("--proxy", default=None, help="Default used proxy.")
api_parser.add_argument("--workers", type=int, default=None, help="Number of workers.")
api_parser.add_argument("--disable-colors", action="store_true", help="Don't use colors.")
api_parser.add_argument("--ignore-cookie-files", action="store_true", help="Don't read .har and cookie files.")
@@ -31,14 +35,15 @@ def main():
def run_api_args(args):
from g4f.api import AppConfig, run_api
- AppConfig.set_ignore_cookie_files(
- args.ignore_cookie_files
- )
- AppConfig.set_list_ignored_providers(
- args.ignored_providers
- )
- AppConfig.set_g4f_api_key(
- args.g4f_api_key
+ AppConfig.set_config(
+ ignore_cookie_files=args.ignore_cookie_files,
+ ignored_providers=args.ignored_providers,
+ g4f_api_key=args.g4f_api_key,
+ defaults={
+ "model": args.model,
+ "provider": args.provider,
+ "proxy": args.proxy
+ }
)
run_api(
bind=args.bind,
diff --git a/g4f/requests/__init__.py b/g4f/requests/__init__.py
index e65de99a..80fc44b3 100644
--- a/g4f/requests/__init__.py
+++ b/g4f/requests/__init__.py
@@ -40,7 +40,7 @@ async def get_args_from_webview(url: str) -> dict:
"Referer": window.real_url
}
cookies = [list(*cookie.items()) for cookie in window.get_cookies()]
- cookies = dict([(name, cookie.value) for name, cookie in cookies])
+ cookies = {name: cookie.value for name, cookie in cookies}
window.destroy()
return {"headers": headers, "cookies": cookies}