summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/Hugchat.py
diff options
context:
space:
mode:
authorTekky <98614666+xtekky@users.noreply.github.com>2023-08-28 22:08:23 +0200
committerGitHub <noreply@github.com>2023-08-28 22:08:23 +0200
commit7e687b3d178c00a27d7e5ae2613fe88ee7844639 (patch)
tree4034e8ae9fc7ca9af295f04358bb00516b464e0b /g4f/Provider/Hugchat.py
parentMerge pull request #851 from Luneye/patch-1 (diff)
parentMerge branch 'main' into hugging (diff)
downloadgpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar
gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar.gz
gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar.bz2
gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar.lz
gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar.xz
gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.tar.zst
gpt4free-7e687b3d178c00a27d7e5ae2613fe88ee7844639.zip
Diffstat (limited to 'g4f/Provider/Hugchat.py')
-rw-r--r--g4f/Provider/Hugchat.py65
1 files changed, 0 insertions, 65 deletions
diff --git a/g4f/Provider/Hugchat.py b/g4f/Provider/Hugchat.py
deleted file mode 100644
index 80062af1..00000000
--- a/g4f/Provider/Hugchat.py
+++ /dev/null
@@ -1,65 +0,0 @@
-has_module = False
-try:
- from hugchat.hugchat import ChatBot
-except ImportError:
- has_module = False
-
-from .base_provider import BaseProvider, get_cookies
-from g4f.typing import CreateResult
-
-class Hugchat(BaseProvider):
- url = "https://huggingface.co/chat/"
- needs_auth = True
- working = has_module
- llms = ['OpenAssistant/oasst-sft-6-llama-30b-xor', 'meta-llama/Llama-2-70b-chat-hf']
-
- @classmethod
- def create_completion(
- cls,
- model: str,
- messages: list[dict[str, str]],
- stream: bool = False,
- proxy: str = None,
- cookies: str = get_cookies(".huggingface.co"), **kwargs) -> CreateResult:
-
- bot = ChatBot(
- cookies=cookies)
-
- if proxy and "://" not in proxy:
- proxy = f"http://{proxy}"
- bot.session.proxies = {"http": proxy, "https": proxy}
-
- if model:
- try:
- if not isinstance(model, int):
- model = cls.llms.index(model)
- bot.switch_llm(model)
- except:
- raise RuntimeError(f"Model are not supported: {model}")
-
- if len(messages) > 1:
- formatted = "\n".join(
- ["%s: %s" % (message["role"], message["content"]) for message in messages]
- )
- prompt = f"{formatted}\nAssistant:"
- else:
- prompt = messages.pop()["content"]
-
- try:
- yield bot.chat(prompt, **kwargs)
- finally:
- bot.delete_conversation(bot.current_conversation)
- bot.current_conversation = ""
- pass
-
- @classmethod
- @property
- def params(cls):
- params = [
- ("model", "str"),
- ("messages", "list[dict[str, str]]"),
- ("stream", "bool"),
- ("proxy", "str"),
- ]
- param = ", ".join([": ".join(p) for p in params])
- return f"g4f.provider.{cls.__name__} supports: ({param})"