diff options
author | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2024-04-07 10:36:13 +0200 |
---|---|---|
committer | Heiner Lohaus <hlohaus@users.noreply.github.com> | 2024-04-07 10:36:13 +0200 |
commit | b35dfcd1b01c575b65e0299ef71d285dc8f41459 (patch) | |
tree | cfe5f4a390af62fafefd1d27ca2c82a23cdcab49 /g4f/client | |
parent | Update Gemini.py (diff) | |
download | gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar.gz gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar.bz2 gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar.lz gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar.xz gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar.zst gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/client/__init__.py | 3 | ||||
-rw-r--r-- | g4f/client/async_client.py (renamed from g4f/client/async.py) | 104 | ||||
-rw-r--r-- | g4f/client/client.py (renamed from g4f/client.py) | 114 | ||||
-rw-r--r-- | g4f/client/helper.py | 20 | ||||
-rw-r--r-- | g4f/client/image_models.py | 10 | ||||
-rw-r--r-- | g4f/client/service.py | 114 | ||||
-rw-r--r-- | g4f/client/types.py | 12 |
7 files changed, 222 insertions, 155 deletions
diff --git a/g4f/client/__init__.py b/g4f/client/__init__.py index e69de29b..5bb4ba35 100644 --- a/g4f/client/__init__.py +++ b/g4f/client/__init__.py @@ -0,0 +1,3 @@ +from .stubs import ChatCompletion, ChatCompletionChunk, ImagesResponse +from .client import Client +from .async_client import AsyncClient
\ No newline at end of file diff --git a/g4f/client/async.py b/g4f/client/async_client.py index 76e410fc..25de1c76 100644 --- a/g4f/client/async.py +++ b/g4f/client/async_client.py @@ -1,20 +1,21 @@ from __future__ import annotations -import re -import os import time import random import string from .types import Client as BaseClient -from .types import BaseProvider, ProviderType, FinishReason -from .stubs import ChatCompletion, ChatCompletionChunk, Image, ImagesResponse -from ..typing import Union, Iterator, Messages, ImageType, AsyncIerator +from .types import ProviderType, FinishReason +from .stubs import ChatCompletion, ChatCompletionChunk, ImagesResponse, Image +from .types import AsyncIterResponse, ImageProvider +from .image_models import ImageModels +from .helper import filter_json, find_stop, filter_none, cast_iter_async +from .service import get_last_provider, get_model_and_provider +from ..typing import Union, Iterator, Messages, AsyncIterator, ImageType +from ..errors import NoImageResponseError from ..image import ImageResponse as ImageProviderResponse -from ..errors import NoImageResponseError, RateLimitError, MissingAuthError -from .. import get_model_and_provider, get_last_provider -from .helper import read_json, find_stop, filter_none -รค +from ..providers.base_provider import AsyncGeneratorProvider + async def iter_response( response: AsyncIterator[str], stream: bool, @@ -47,10 +48,10 @@ async def iter_response( else: if response_format is not None and "type" in response_format: if response_format["type"] == "json_object": - content = read_json(content) + content = filter_json(content) yield ChatCompletion(content, finish_reason, completion_id, int(time.time())) -async def iter_append_model_and_provider(response: AsyncIterResponse) -> IterResponse: +async def iter_append_model_and_provider(response: AsyncIterResponse) -> AsyncIterResponse: last_provider = None async for chunk in response: last_provider = get_last_provider(True) if last_provider is None else last_provider @@ -58,51 +59,50 @@ async def iter_append_model_and_provider(response: AsyncIterResponse) -> IterRes chunk.provider = last_provider.get("name") yield chunk -class Client(BaseClient): +class AsyncClient(BaseClient): def __init__( self, + provider: ProviderType = None, + image_provider: ImageProvider = None, **kwargs ): super().__init__(**kwargs) self.chat: Chat = Chat(self, provider) self.images: Images = Images(self, image_provider) -async def cast_iter_async(iter): - for chunk in iter: - yield chunk - def create_response( messages: Messages, model: str, provider: ProviderType = None, stream: bool = False, - response_format: dict = None, + proxy: str = None, max_tokens: int = None, - stop: Union[list[str], str] = None, + stop: list[str] = None, api_key: str = None, **kwargs ): - if hasattr(provider, "create_async_generator): + has_asnyc = isinstance(provider, type) and issubclass(provider, AsyncGeneratorProvider) + if has_asnyc: create = provider.create_async_generator else: create = provider.create_completion response = create( model, messages, stream, **filter_none( - proxy=self.client.get_proxy(), + proxy=proxy, max_tokens=max_tokens, stop=stop, - api_key=self.client.api_key if api_key is None else api_key + api_key=api_key ), **kwargs ) - if not hasattr(provider, "create_async_generator") + if not has_asnyc: response = cast_iter_async(response) return response class Completions(): - def __init__(self, client: Client, provider: ProviderType = None): - self.client: Client = client + def __init__(self, client: AsyncClient, provider: ProviderType = None): + self.client: AsyncClient = client self.provider: ProviderType = provider def create( @@ -111,6 +111,10 @@ class Completions(): model: str, provider: ProviderType = None, stream: bool = False, + proxy: str = None, + max_tokens: int = None, + stop: Union[list[str], str] = None, + api_key: str = None, response_format: dict = None, ignored : list[str] = None, ignore_working: bool = False, @@ -123,11 +127,18 @@ class Completions(): stream, ignored, ignore_working, - ignore_stream, - **kwargs + ignore_stream ) stop = [stop] if isinstance(stop, str) else stop - response = create_response(messages, model, provider, stream, **kwargs) + response = create_response( + messages, model, + provider, stream, + proxy=self.client.get_proxy() if proxy is None else proxy, + max_tokens=max_tokens, + stop=stop, + api_key=self.client.api_key if api_key is None else api_key + **kwargs + ) response = iter_response(response, stream, response_format, max_tokens, stop) response = iter_append_model_and_provider(response) return response if stream else anext(response) @@ -135,44 +146,40 @@ class Completions(): class Chat(): completions: Completions - def __init__(self, client: Client, provider: ProviderType = None): + def __init__(self, client: AsyncClient, provider: ProviderType = None): self.completions = Completions(client, provider) async def iter_image_response(response: Iterator) -> Union[ImagesResponse, None]: - async for chunk in list(response): + async for chunk in response: if isinstance(chunk, ImageProviderResponse): return ImagesResponse([Image(image) for image in chunk.get_list()]) -def create_image(client: Client, provider: ProviderType, prompt: str, model: str = "", **kwargs) -> AsyncIterator: +def create_image(client: AsyncClient, provider: ProviderType, prompt: str, model: str = "", **kwargs) -> AsyncIterator: prompt = f"create a image with: {prompt}" + if provider.__name__ == "You": + kwargs["chat_mode"] = "create" return provider.create_async_generator( model, [{"role": "user", "content": prompt}], - True, + stream=True, proxy=client.get_proxy(), **kwargs ) class Images(): - def __init__(self, client: Client, provider: ImageProvider = None): - self.client: Client = client + def __init__(self, client: AsyncClient, provider: ImageProvider = None): + self.client: AsyncClient = client self.provider: ImageProvider = provider self.models: ImageModels = ImageModels(client) - async def generate(self, prompt, model: str = None, **kwargs) -> ImagesResponse: + async def generate(self, prompt, model: str = "", **kwargs) -> ImagesResponse: provider = self.models.get(model, self.provider) - if isinstance(provider, type) and issubclass(provider, BaseProvider): + if isinstance(provider, type) and issubclass(provider, AsyncGeneratorProvider): response = create_image(self.client, provider, prompt, **kwargs) else: - try: - response = list(provider.create(prompt)) - except (RateLimitError, MissingAuthError) as e: - # Fallback for default provider - if self.provider is None: - response = create_image(self.client, self.models.you, prompt, model or "dall-e", **kwargs) - else: - raise e - image = iter_image_response(response) + response = await provider.create_async(prompt) + return ImagesResponse([Image(image) for image in response.get_list()]) + image = await iter_image_response(response) if image is None: raise NoImageResponseError() return image @@ -180,7 +187,7 @@ class Images(): async def create_variation(self, image: ImageType, model: str = None, **kwargs): provider = self.models.get(model, self.provider) result = None - if isinstance(provider, type) and issubclass(provider, BaseProvider): + if isinstance(provider, type) and issubclass(provider, AsyncGeneratorProvider): response = provider.create_async_generator( "", [{"role": "user", "content": "create a image like this"}], @@ -189,10 +196,7 @@ class Images(): proxy=self.client.get_proxy(), **kwargs ) - async for chunk in response: - if isinstance(chunk, ImageProviderResponse): - result = ([chunk.images] if isinstance(chunk.images, str) else chunk.images) - result = ImagesResponse([Image(image)for image in result]) + result = iter_image_response(response) if result is None: raise NoImageResponseError() - return result + return result
\ No newline at end of file diff --git a/g4f/client.py b/g4f/client/client.py index 2c4fe788..8ce5d932 100644 --- a/g4f/client.py +++ b/g4f/client/client.py @@ -1,40 +1,19 @@ from __future__ import annotations -import re -import os import time import random import string +from ..typing import Union, Iterator, Messages, ImageType +from ..providers.types import BaseProvider, ProviderType, FinishReason +from ..image import ImageResponse as ImageProviderResponse +from ..errors import NoImageResponseError from .stubs import ChatCompletion, ChatCompletionChunk, Image, ImagesResponse -from .typing import Union, Iterator, Messages, ImageType -from .providers.types import BaseProvider, ProviderType, FinishReason -from .image import ImageResponse as ImageProviderResponse -from .errors import NoImageResponseError, RateLimitError, MissingAuthError -from . import get_model_and_provider, get_last_provider - -from .Provider.BingCreateImages import BingCreateImages -from .Provider.needs_auth import Gemini, OpenaiChat -from .Provider.You import You - -ImageProvider = Union[BaseProvider, object] -Proxies = Union[dict, str] -IterResponse = Iterator[Union[ChatCompletion, ChatCompletionChunk]] - -def read_json(text: str) -> dict: - """ - Parses JSON code block from a string. - - Args: - text (str): A string containing a JSON code block. - - Returns: - dict: A dictionary parsed from the JSON code block. - """ - match = re.search(r"```(json|)\n(?P<code>[\S\s]+?)\n```", text) - if match: - return match.group("code") - return text +from .image_models import ImageModels +from .types import IterResponse, ImageProvider +from .types import Client as BaseClient +from .service import get_model_and_provider, get_last_provider +from .helper import find_stop, filter_json, filter_none def iter_response( response: iter[str], @@ -53,20 +32,7 @@ def iter_response( content += str(chunk) if max_tokens is not None and idx + 1 >= max_tokens: finish_reason = "length" - first = -1 - word = None - if stop is not None: - for word in list(stop): - first = content.find(word) - if first != -1: - content = content[:first] - break - if stream and first != -1: - first = chunk.find(word) - if first != -1: - chunk = chunk[:first] - else: - first = 0 + first, content, chunk = find_stop(stop, content, chunk if stream else None) if first != -1: finish_reason = "stop" if stream: @@ -79,7 +45,7 @@ def iter_response( else: if response_format is not None and "type" in response_format: if response_format["type"] == "json_object": - content = read_json(content) + content = filter_json(content) yield ChatCompletion(content, finish_reason, completion_id, int(time.time())) def iter_append_model_and_provider(response: IterResponse) -> IterResponse: @@ -90,37 +56,17 @@ def iter_append_model_and_provider(response: IterResponse) -> IterResponse: chunk.provider = last_provider.get("name") yield chunk -class Client(): - +class Client(BaseClient): def __init__( self, - api_key: str = None, - proxies: Proxies = None, provider: ProviderType = None, image_provider: ImageProvider = None, **kwargs ) -> None: - self.api_key: str = api_key - self.proxies: Proxies = proxies + super().__init__(**kwargs) self.chat: Chat = Chat(self, provider) self.images: Images = Images(self, image_provider) - def get_proxy(self) -> Union[str, None]: - if isinstance(self.proxies, str): - return self.proxies - elif self.proxies is None: - return os.environ.get("G4F_PROXY") - elif "all" in self.proxies: - return self.proxies["all"] - elif "https" in self.proxies: - return self.proxies["https"] - -def filter_none(**kwargs): - for key in list(kwargs.keys()): - if kwargs[key] is None: - del kwargs[key] - return kwargs - class Completions(): def __init__(self, client: Client, provider: ProviderType = None): self.client: Client = client @@ -132,6 +78,7 @@ class Completions(): model: str, provider: ProviderType = None, stream: bool = False, + proxy: str = None, response_format: dict = None, max_tokens: int = None, stop: Union[list[str], str] = None, @@ -148,13 +95,12 @@ class Completions(): ignored, ignore_working, ignore_stream, - **kwargs ) stop = [stop] if isinstance(stop, str) else stop response = provider.create_completion( model, messages, stream, **filter_none( - proxy=self.client.get_proxy(), + proxy=self.client.get_proxy() if proxy is None else proxy, max_tokens=max_tokens, stop=stop, api_key=self.client.api_key if api_key is None else api_key @@ -171,18 +117,6 @@ class Chat(): def __init__(self, client: Client, provider: ProviderType = None): self.completions = Completions(client, provider) -class ImageModels(): - gemini = Gemini - openai = OpenaiChat - you = You - - def __init__(self, client: Client) -> None: - self.client = client - self.default = BingCreateImages(proxy=self.client.get_proxy()) - - def get(self, name: str, default: ImageProvider = None) -> ImageProvider: - return getattr(self, name) if hasattr(self, name) else default or self.default - def iter_image_response(response: Iterator) -> Union[ImagesResponse, None]: for chunk in list(response): if isinstance(chunk, ImageProviderResponse): @@ -190,10 +124,12 @@ def iter_image_response(response: Iterator) -> Union[ImagesResponse, None]: def create_image(client: Client, provider: ProviderType, prompt: str, model: str = "", **kwargs) -> Iterator: prompt = f"create a image with: {prompt}" + if provider.__name__ == "You": + kwargs["chat_mode"] = "create" return provider.create_completion( model, [{"role": "user", "content": prompt}], - True, + stream=True, proxy=client.get_proxy(), **kwargs ) @@ -209,14 +145,7 @@ class Images(): if isinstance(provider, type) and issubclass(provider, BaseProvider): response = create_image(self.client, provider, prompt, **kwargs) else: - try: - response = list(provider.create(prompt)) - except (RateLimitError, MissingAuthError) as e: - # Fallback for default provider - if self.provider is None: - response = create_image(self.client, self.models.you, prompt, model or "dall-e", **kwargs) - else: - raise e + response = list(provider.create(prompt)) image = iter_image_response(response) if image is None: raise NoImageResponseError() @@ -234,10 +163,7 @@ class Images(): proxy=self.client.get_proxy(), **kwargs ) - for chunk in response: - if isinstance(chunk, ImageProviderResponse): - result = ([chunk.images] if isinstance(chunk.images, str) else chunk.images) - result = ImagesResponse([Image(image)for image in result]) + result = iter_image_response(response) if result is None: raise NoImageResponseError() return result
\ No newline at end of file diff --git a/g4f/client/helper.py b/g4f/client/helper.py index 32aa9183..c502d478 100644 --- a/g4f/client/helper.py +++ b/g4f/client/helper.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import re +from typing import Iterable, AsyncIterator -def read_json(text: str) -> dict: +def filter_json(text: str) -> str: """ Parses JSON code block from a string. @@ -15,7 +18,7 @@ def read_json(text: str) -> dict: return match.group("code") return text -def find_stop(stop, content: str, chunk: str): +def find_stop(stop, content: str, chunk: str = None): first = -1 word = None if stop is not None: @@ -24,10 +27,21 @@ def find_stop(stop, content: str, chunk: str): if first != -1: content = content[:first] break - if stream and first != -1: + if chunk is not None and first != -1: first = chunk.find(word) if first != -1: chunk = chunk[:first] else: first = 0 return first, content, chunk + +def filter_none(**kwargs) -> dict: + return { + key: value + for key, value in kwargs.items() + if value is not None + } + +async def cast_iter_async(iter: Iterable) -> AsyncIterator: + for chunk in iter: + yield chunk
\ No newline at end of file diff --git a/g4f/client/image_models.py b/g4f/client/image_models.py index 1fd2d0b5..db2ce09a 100644 --- a/g4f/client/image_models.py +++ b/g4f/client/image_models.py @@ -1,8 +1,10 @@ -from .Provider.BingCreateImages import BingCreateImages -from .Provider.needs_auth import Gemini, OpenaiChat -from ..Provider.You import You +from __future__ import annotations + +from .types import Client, ImageProvider -from .types import Client +from ..Provider.BingCreateImages import BingCreateImages +from ..Provider.needs_auth import Gemini, OpenaiChat +from ..Provider.You import You class ImageModels(): gemini = Gemini diff --git a/g4f/client/service.py b/g4f/client/service.py new file mode 100644 index 00000000..f3565f6d --- /dev/null +++ b/g4f/client/service.py @@ -0,0 +1,114 @@ +from __future__ import annotations + +from typing import Union + +from .. import debug, version +from ..errors import ProviderNotFoundError, ModelNotFoundError, ProviderNotWorkingError, StreamNotSupportedError +from ..models import Model, ModelUtils +from ..Provider import ProviderUtils +from ..providers.types import BaseRetryProvider, ProviderType +from ..providers.retry_provider import IterProvider + +def convert_to_provider(provider: str) -> ProviderType: + if " " in provider: + provider_list = [ProviderUtils.convert[p] for p in provider.split() if p in ProviderUtils.convert] + if not provider_list: + raise ProviderNotFoundError(f'Providers not found: {provider}') + provider = IterProvider(provider_list) + elif provider in ProviderUtils.convert: + provider = ProviderUtils.convert[provider] + elif provider: + raise ProviderNotFoundError(f'Provider not found: {provider}') + return provider + +def get_model_and_provider(model : Union[Model, str], + provider : Union[ProviderType, str, None], + stream : bool, + ignored : list[str] = None, + ignore_working: bool = False, + ignore_stream: bool = False) -> tuple[str, ProviderType]: + """ + Retrieves the model and provider based on input parameters. + + Args: + model (Union[Model, str]): The model to use, either as an object or a string identifier. + provider (Union[ProviderType, str, None]): The provider to use, either as an object, a string identifier, or None. + stream (bool): Indicates if the operation should be performed as a stream. + ignored (list[str], optional): List of provider names to be ignored. + ignore_working (bool, optional): If True, ignores the working status of the provider. + ignore_stream (bool, optional): If True, ignores the streaming capability of the provider. + + Returns: + tuple[str, ProviderType]: A tuple containing the model name and the provider type. + + Raises: + ProviderNotFoundError: If the provider is not found. + ModelNotFoundError: If the model is not found. + ProviderNotWorkingError: If the provider is not working. + StreamNotSupportedError: If streaming is not supported by the provider. + """ + if debug.version_check: + debug.version_check = False + version.utils.check_version() + + if isinstance(provider, str): + provider = convert_to_provider(provider) + + if isinstance(model, str): + if model in ModelUtils.convert: + model = ModelUtils.convert[model] + + if not provider: + if isinstance(model, str): + raise ModelNotFoundError(f'Model not found: {model}') + provider = model.best_provider + + if not provider: + raise ProviderNotFoundError(f'No provider found for model: {model}') + + if isinstance(model, Model): + model = model.name + + if not ignore_working and not provider.working: + raise ProviderNotWorkingError(f'{provider.__name__} is not working') + + if not ignore_working and isinstance(provider, BaseRetryProvider): + provider.providers = [p for p in provider.providers if p.working] + + if ignored and isinstance(provider, BaseRetryProvider): + provider.providers = [p for p in provider.providers if p.__name__ not in ignored] + + if not ignore_stream and not provider.supports_stream and stream: + raise StreamNotSupportedError(f'{provider.__name__} does not support "stream" argument') + + if debug.logging: + if model: + print(f'Using {provider.__name__} provider and {model} model') + else: + print(f'Using {provider.__name__} provider') + + debug.last_provider = provider + debug.last_model = model + + return model, provider + +def get_last_provider(as_dict: bool = False) -> Union[ProviderType, dict[str, str]]: + """ + Retrieves the last used provider. + + Args: + as_dict (bool, optional): If True, returns the provider information as a dictionary. + + Returns: + Union[ProviderType, dict[str, str]]: The last used provider, either as an object or a dictionary. + """ + last = debug.last_provider + if isinstance(last, BaseRetryProvider): + last = last.last_provider + if last and as_dict: + return { + "name": last.__name__, + "url": last.url, + "model": debug.last_model, + } + return last
\ No newline at end of file diff --git a/g4f/client/types.py b/g4f/client/types.py index b21ff03a..100be432 100644 --- a/g4f/client/types.py +++ b/g4f/client/types.py @@ -1,9 +1,15 @@ +from __future__ import annotations + +import os + +from .stubs import ChatCompletion, ChatCompletionChunk from ..providers.types import BaseProvider, ProviderType, FinishReason -from typing import Union, Iterator +from typing import Union, Iterator, AsyncIterator ImageProvider = Union[BaseProvider, object] Proxies = Union[dict, str] IterResponse = Iterator[Union[ChatCompletion, ChatCompletionChunk]] +AsyncIterResponse = AsyncIterator[Union[ChatCompletion, ChatCompletionChunk]] class ClientProxyMixin(): def get_proxy(self) -> Union[str, None]: @@ -21,9 +27,7 @@ class Client(ClientProxyMixin): self, api_key: str = None, proxies: Proxies = None, - provider: ProviderType = None, - image_provider: ImageProvider = None, **kwargs ) -> None: self.api_key: str = api_key - self.proxies: Proxies = proxies + self.proxies: Proxies = proxies
\ No newline at end of file |