diff options
Diffstat (limited to '')
-rw-r--r-- | g4f/Provider/Chatgpt4Online.py | 4 | ||||
-rw-r--r-- | g4f/models.py | 30 |
2 files changed, 27 insertions, 7 deletions
diff --git a/g4f/Provider/Chatgpt4Online.py b/g4f/Provider/Chatgpt4Online.py index c2c66fd8..4135ec9d 100644 --- a/g4f/Provider/Chatgpt4Online.py +++ b/g4f/Provider/Chatgpt4Online.py @@ -12,7 +12,7 @@ class Chatgpt4Online(AsyncGeneratorProvider): url = "https://chatgpt4online.org" supports_message_history = True supports_gpt_35_turbo = True - working = True + working = False # cloudfare block ! _wpnonce = None @classmethod @@ -73,4 +73,4 @@ class Chatgpt4Online(AsyncGeneratorProvider): elif line["type"] == "live": yield line["data"] elif line["type"] == "end": - break
\ No newline at end of file + break diff --git a/g4f/models.py b/g4f/models.py index e0d6121d..e58ccef2 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -120,10 +120,10 @@ llama2_70b = Model( codellama_34b_instruct = Model( name = "codellama/CodeLlama-34b-Instruct-hf", base_provider = "huggingface", - best_provider = RetryProvider([HuggingChat, PerplexityLabs]) + best_provider = RetryProvider([HuggingChat, PerplexityLabs, DeepInfra]) ) -# Mistal +# Mistral mixtral_8x7b = Model( name = "mistralai/Mixtral-8x7B-Instruct-v0.1", base_provider = "huggingface", @@ -136,14 +136,31 @@ mistral_7b = Model( best_provider = RetryProvider([DeepInfra, HuggingChat, PerplexityLabs]) ) -# Dolphin +# Misc models dolphin_mixtral_8x7b = Model( name = "cognitivecomputations/dolphin-2.6-mixtral-8x7b", base_provider = "huggingface", best_provider = DeepInfra ) -# OpenChat +lzlv_70b = Model( + name = "lizpreciatior/lzlv_70b_fp16_hf", + base_provider = "huggingface", + best_provider = DeepInfra +) + +airoboros_70b = Model( + name = "deepinfra/airoboros-70b", + base_provider = "huggingface", + best_provider = DeepInfra +) + +airoboros_l2_70b = Model( + name = "jondurbin/airoboros-l2-70b-gpt4-1.4.1", + base_provider = "huggingface", + best_provider = DeepInfra +) + openchat_35 = Model( name = "openchat/openchat_3.5", base_provider = "huggingface", @@ -243,6 +260,9 @@ class ModelUtils: 'mixtral-8x7b': mixtral_8x7b, 'mistral-7b': mistral_7b, 'dolphin-mixtral-8x7b': dolphin_mixtral_8x7b, + 'lzlv-70b': lzlv_70b, + 'airoboros-70b': airoboros_70b, + 'airoboros-l2-70b': airoboros_l2_70b, 'openchat_3.5': openchat_35, 'gemini-pro': gemini_pro, 'bard': bard, @@ -250,4 +270,4 @@ class ModelUtils: 'pi': pi } -_all_models = list(ModelUtils.convert.keys()) +_all_models = list(ModelUtils.convert.keys())
\ No newline at end of file |