diff options
author | H Lohaus <hlohaus@users.noreply.github.com> | 2024-04-07 11:27:26 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-04-07 11:27:26 +0200 |
commit | d327afc60620913f5d2b0a9985b03a7934468ad4 (patch) | |
tree | 395de9142af3e6b9c0e5e3968ee7f8234b8b25e2 /g4f/Provider/Local.py | |
parent | Update Gemini.py (diff) | |
parent | Update provider.py (diff) | |
download | gpt4free-d327afc60620913f5d2b0a9985b03a7934468ad4.tar gpt4free-d327afc60620913f5d2b0a9985b03a7934468ad4.tar.gz gpt4free-d327afc60620913f5d2b0a9985b03a7934468ad4.tar.bz2 gpt4free-d327afc60620913f5d2b0a9985b03a7934468ad4.tar.lz gpt4free-d327afc60620913f5d2b0a9985b03a7934468ad4.tar.xz gpt4free-d327afc60620913f5d2b0a9985b03a7934468ad4.tar.zst gpt4free-d327afc60620913f5d2b0a9985b03a7934468ad4.zip |
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/Local.py | 42 |
1 files changed, 42 insertions, 0 deletions
diff --git a/g4f/Provider/Local.py b/g4f/Provider/Local.py new file mode 100644 index 00000000..b4d096de --- /dev/null +++ b/g4f/Provider/Local.py @@ -0,0 +1,42 @@ +from __future__ import annotations + +from ..locals.models import get_models +try: + from ..locals.provider import LocalProvider + has_requirements = True +except ModuleNotFoundError: + has_requirements = False + +from ..typing import Messages, CreateResult +from ..providers.base_provider import AbstractProvider, ProviderModelMixin +from ..errors import MissingRequirementsError + +class Local(AbstractProvider, ProviderModelMixin): + working = True + supports_message_history = True + supports_system_message = True + supports_stream = True + + @classmethod + def get_models(cls): + if not cls.models: + cls.models = list(get_models()) + cls.default_model = cls.models[0] + return cls.models + + @classmethod + def create_completion( + cls, + model: str, + messages: Messages, + stream: bool, + **kwargs + ) -> CreateResult: + if not has_requirements: + raise MissingRequirementsError('Install "gpt4all" package | pip install -U g4f[local]') + return LocalProvider.create_completion( + cls.get_model(model), + messages, + stream, + **kwargs + )
\ No newline at end of file |