summaryrefslogtreecommitdiffstats
path: root/g4f/models.py
diff options
context:
space:
mode:
authorH Lohaus <hlohaus@users.noreply.github.com>2024-01-02 01:10:31 +0100
committerGitHub <noreply@github.com>2024-01-02 01:10:31 +0100
commitb1b8ed40a4e8c7c3490b1c6b7cf6b55d0776f366 (patch)
tree6cd09fb2eb4c144e28a82759a2a9a2fa7f30d311 /g4f/models.py
parentMerge pull request #1414 from hlohaus/lia (diff)
parentFix markdown replace (diff)
downloadgpt4free-b1b8ed40a4e8c7c3490b1c6b7cf6b55d0776f366.tar
gpt4free-b1b8ed40a4e8c7c3490b1c6b7cf6b55d0776f366.tar.gz
gpt4free-b1b8ed40a4e8c7c3490b1c6b7cf6b55d0776f366.tar.bz2
gpt4free-b1b8ed40a4e8c7c3490b1c6b7cf6b55d0776f366.tar.lz
gpt4free-b1b8ed40a4e8c7c3490b1c6b7cf6b55d0776f366.tar.xz
gpt4free-b1b8ed40a4e8c7c3490b1c6b7cf6b55d0776f366.tar.zst
gpt4free-b1b8ed40a4e8c7c3490b1c6b7cf6b55d0776f366.zip
Diffstat (limited to 'g4f/models.py')
-rw-r--r--g4f/models.py27
1 files changed, 19 insertions, 8 deletions
diff --git a/g4f/models.py b/g4f/models.py
index 9a4539c5..264cd40e 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -1,7 +1,6 @@
from __future__ import annotations
from dataclasses import dataclass
-from .typing import Union
-from .Provider import BaseProvider, RetryProvider
+from .Provider import RetryProvider, ProviderType
from .Provider import (
Chatgpt4Online,
ChatgptDemoAi,
@@ -36,7 +35,7 @@ from .Provider import (
class Model:
name: str
base_provider: str
- best_provider: Union[type[BaseProvider], RetryProvider] = None
+ best_provider: ProviderType = None
@staticmethod
def __all__() -> list[str]:
@@ -101,28 +100,39 @@ gpt_4_turbo = Model(
llama2_7b = Model(
name = "meta-llama/Llama-2-7b-chat-hf",
base_provider = 'huggingface',
- best_provider = RetryProvider([Llama2, DeepInfra]))
+ best_provider = RetryProvider([Llama2, DeepInfra])
+)
llama2_13b = Model(
name = "meta-llama/Llama-2-13b-chat-hf",
base_provider = 'huggingface',
- best_provider = RetryProvider([Llama2, DeepInfra]))
+ best_provider = RetryProvider([Llama2, DeepInfra])
+)
llama2_70b = Model(
name = "meta-llama/Llama-2-70b-chat-hf",
base_provider = "huggingface",
- best_provider = RetryProvider([Llama2, DeepInfra, HuggingChat]))
+ best_provider = RetryProvider([Llama2, DeepInfra, HuggingChat])
+)
# Mistal
mixtral_8x7b = Model(
name = "mistralai/Mixtral-8x7B-Instruct-v0.1",
base_provider = "huggingface",
- best_provider = HuggingChat)
+ best_provider = RetryProvider([DeepInfra, HuggingChat])
+)
mistral_7b = Model(
name = "mistralai/Mistral-7B-Instruct-v0.1",
base_provider = "huggingface",
- best_provider = HuggingChat)
+ best_provider = RetryProvider([DeepInfra, HuggingChat])
+)
+
+openchat_35 = Model(
+ name = "openchat/openchat_3.5",
+ base_provider = "huggingface",
+ best_provider = RetryProvider([DeepInfra, HuggingChat])
+)
# Bard
palm = Model(
@@ -313,6 +323,7 @@ class ModelUtils:
# Mistral
'mixtral-8x7b': mixtral_8x7b,
'mistral-7b': mistral_7b,
+ 'openchat_3.5': openchat_35,
# Bard
'palm2' : palm,