summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/needs_auth/HuggingChat.py
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2024-01-22 03:38:11 +0100
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2024-01-22 03:38:11 +0100
commit38dbe4b8e5ca7f9bc0508e1ba1bf878fd6d8c19c (patch)
tree6cdb82f14fcb04e9f6b339f5cab13e73a6f75d41 /g4f/Provider/needs_auth/HuggingChat.py
parentFix error in copilot (diff)
downloadgpt4free-38dbe4b8e5ca7f9bc0508e1ba1bf878fd6d8c19c.tar
gpt4free-38dbe4b8e5ca7f9bc0508e1ba1bf878fd6d8c19c.tar.gz
gpt4free-38dbe4b8e5ca7f9bc0508e1ba1bf878fd6d8c19c.tar.bz2
gpt4free-38dbe4b8e5ca7f9bc0508e1ba1bf878fd6d8c19c.tar.lz
gpt4free-38dbe4b8e5ca7f9bc0508e1ba1bf878fd6d8c19c.tar.xz
gpt4free-38dbe4b8e5ca7f9bc0508e1ba1bf878fd6d8c19c.tar.zst
gpt4free-38dbe4b8e5ca7f9bc0508e1ba1bf878fd6d8c19c.zip
Diffstat (limited to 'g4f/Provider/needs_auth/HuggingChat.py')
-rw-r--r--g4f/Provider/needs_auth/HuggingChat.py71
1 files changed, 0 insertions, 71 deletions
diff --git a/g4f/Provider/needs_auth/HuggingChat.py b/g4f/Provider/needs_auth/HuggingChat.py
deleted file mode 100644
index e4fa237d..00000000
--- a/g4f/Provider/needs_auth/HuggingChat.py
+++ /dev/null
@@ -1,71 +0,0 @@
-from __future__ import annotations
-
-import json, uuid
-
-from aiohttp import ClientSession
-
-from ...typing import AsyncResult, Messages
-from ..base_provider import AsyncGeneratorProvider
-from ..helper import format_prompt, get_cookies
-
-map = {
- "openchat/openchat_3.5": "openchat/openchat-3.5-1210",
-}
-
-class HuggingChat(AsyncGeneratorProvider):
- url = "https://huggingface.co/chat"
- working = True
- model = "meta-llama/Llama-2-70b-chat-hf"
-
- @classmethod
- async def create_async_generator(
- cls,
- model: str,
- messages: Messages,
- stream: bool = True,
- proxy: str = None,
- web_search: bool = False,
- cookies: dict = None,
- **kwargs
- ) -> AsyncResult:
- if not model:
- model = cls.model
- elif model in map:
- model = map[model]
- if not cookies:
- cookies = get_cookies(".huggingface.co")
-
- headers = {
- 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/111.0.0.0 Safari/537.36',
- }
- async with ClientSession(
- cookies=cookies,
- headers=headers
- ) as session:
- async with session.post(f"{cls.url}/conversation", json={"model": model}, proxy=proxy) as response:
- conversation_id = (await response.json())["conversationId"]
-
- send = {
- "id": str(uuid.uuid4()),
- "inputs": format_prompt(messages),
- "is_retry": False,
- "response_id": str(uuid.uuid4()),
- "web_search": web_search
- }
- async with session.post(f"{cls.url}/conversation/{conversation_id}", json=send, proxy=proxy) as response:
- first_token = True
- async for line in response.content:
- line = json.loads(line[:-1])
- if "type" not in line:
- raise RuntimeError(f"Response: {line}")
- elif line["type"] == "stream":
- token = line["token"]
- if first_token:
- token = token.lstrip()
- first_token = False
- yield token
- elif line["type"] == "finalAnswer":
- break
-
- async with session.delete(f"{cls.url}/conversation/{conversation_id}", proxy=proxy) as response:
- response.raise_for_status()