summaryrefslogtreecommitdiffstats
path: root/g4f/local/__init__.py
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2024-04-07 10:36:13 +0200
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2024-04-07 10:36:13 +0200
commitb35dfcd1b01c575b65e0299ef71d285dc8f41459 (patch)
treecfe5f4a390af62fafefd1d27ca2c82a23cdcab49 /g4f/local/__init__.py
parentUpdate Gemini.py (diff)
downloadgpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar
gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar.gz
gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar.bz2
gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar.lz
gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar.xz
gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.tar.zst
gpt4free-b35dfcd1b01c575b65e0299ef71d285dc8f41459.zip
Diffstat (limited to '')
-rw-r--r--g4f/local/__init__.py19
1 files changed, 9 insertions, 10 deletions
diff --git a/g4f/local/__init__.py b/g4f/local/__init__.py
index c9d3d74a..d13c8c1d 100644
--- a/g4f/local/__init__.py
+++ b/g4f/local/__init__.py
@@ -1,17 +1,17 @@
-from ..typing import Union, Iterator, Messages
-from ..stubs import ChatCompletion, ChatCompletionChunk
-from ._engine import LocalProvider
-from ._models import models
-from ..client import iter_response, filter_none, IterResponse
+from ..typing import Union, Messages
+from ..locals.provider import LocalProvider
+from ..locals.models import get_models
+from ..client.client import iter_response, filter_none
+from ..client.types import IterResponse
class LocalClient():
def __init__(self, **kwargs) -> None:
self.chat: Chat = Chat(self)
-
+
@staticmethod
def list_models():
- return list(models.keys())
-
+ return list(get_models())
+
class Completions():
def __init__(self, client: LocalClient):
self.client: LocalClient = client
@@ -25,8 +25,7 @@ class Completions():
max_tokens: int = None,
stop: Union[list[str], str] = None,
**kwargs
- ) -> Union[ChatCompletion, Iterator[ChatCompletionChunk]]:
-
+ ) -> IterResponse:
stop = [stop] if isinstance(stop, str) else stop
response = LocalProvider.create_completion(
model, messages, stream,