summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/Hugchat.py
diff options
context:
space:
mode:
authorHeiner Lohaus <heiner.lohaus@netformic.com>2023-08-25 06:41:32 +0200
committerHeiner Lohaus <heiner.lohaus@netformic.com>2023-08-25 06:41:32 +0200
commit126496d3cacd06a4fa8cbb4e5bde417ce6bb5b4a (patch)
tree00f989c070b0c001860c39507450aaf30e4302b1 /g4f/Provider/Hugchat.py
parentAdd create_async method (diff)
downloadgpt4free-126496d3cacd06a4fa8cbb4e5bde417ce6bb5b4a.tar
gpt4free-126496d3cacd06a4fa8cbb4e5bde417ce6bb5b4a.tar.gz
gpt4free-126496d3cacd06a4fa8cbb4e5bde417ce6bb5b4a.tar.bz2
gpt4free-126496d3cacd06a4fa8cbb4e5bde417ce6bb5b4a.tar.lz
gpt4free-126496d3cacd06a4fa8cbb4e5bde417ce6bb5b4a.tar.xz
gpt4free-126496d3cacd06a4fa8cbb4e5bde417ce6bb5b4a.tar.zst
gpt4free-126496d3cacd06a4fa8cbb4e5bde417ce6bb5b4a.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/Hugchat.py67
1 files changed, 67 insertions, 0 deletions
diff --git a/g4f/Provider/Hugchat.py b/g4f/Provider/Hugchat.py
new file mode 100644
index 00000000..cedf8402
--- /dev/null
+++ b/g4f/Provider/Hugchat.py
@@ -0,0 +1,67 @@
+has_module = False
+try:
+ from hugchat.hugchat import ChatBot
+except ImportError:
+ has_module = False
+
+from .base_provider import BaseProvider, get_cookies
+from g4f.typing import CreateResult
+
+class Hugchat(BaseProvider):
+ url = "https://huggingface.co/chat/"
+ needs_auth = True
+ working = has_module
+ llms = ['OpenAssistant/oasst-sft-6-llama-30b-xor', 'meta-llama/Llama-2-70b-chat-hf']
+
+ @classmethod
+ def create_completion(
+ cls,
+ model: str,
+ messages: list[dict[str, str]],
+ stream: bool = False,
+ proxy: str = None,
+ cookies: str = get_cookies(".huggingface.co"),
+ **kwargs
+ ) -> CreateResult:
+ bot = ChatBot(
+ cookies=cookies
+ )
+
+ if proxy and "://" not in proxy:
+ proxy = f"http://{proxy}"
+ bot.session.proxies = {"http": proxy, "https": proxy}
+
+ if model:
+ try:
+ if not isinstance(model, int):
+ model = cls.llms.index(model)
+ bot.switch_llm(model)
+ except:
+ raise RuntimeError(f"Model are not supported: {model}")
+
+ if len(messages) > 1:
+ formatted = "\n".join(
+ ["%s: %s" % (message["role"], message["content"]) for message in messages]
+ )
+ prompt = f"{formatted}\nAssistant:"
+ else:
+ prompt = messages.pop()["content"]
+
+ try:
+ yield bot.chat(prompt, **kwargs)
+ finally:
+ bot.delete_conversation(bot.current_conversation)
+ bot.current_conversation = ""
+ pass
+
+ @classmethod
+ @property
+ def params(cls):
+ params = [
+ ("model", "str"),
+ ("messages", "list[dict[str, str]]"),
+ ("stream", "bool"),
+ ("proxy", "str"),
+ ]
+ param = ", ".join([": ".join(p) for p in params])
+ return f"g4f.provider.{cls.__name__} supports: ({param})"