summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/HuggingChat.py
diff options
context:
space:
mode:
authorTekky <98614666+xtekky@users.noreply.github.com>2023-09-07 19:45:04 +0200
committerGitHub <noreply@github.com>2023-09-07 19:45:04 +0200
commit7ca1a59d95b52f94f674e8f981eab910b2f03518 (patch)
treeca506c3c152f3906a5b727a4cc6ebba1fd59d335 /g4f/Provider/HuggingChat.py
parent~ | Merge pull request #869 from ahobsonsayers/add-console-script (diff)
parentFix imports in Bing (diff)
downloadgpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar
gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar.gz
gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar.bz2
gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar.lz
gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar.xz
gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar.zst
gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.zip
Diffstat (limited to '')
-rw-r--r--g4f/Provider/HuggingChat.py52
1 files changed, 24 insertions, 28 deletions
diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py
index 7b62b342..11310a69 100644
--- a/g4f/Provider/HuggingChat.py
+++ b/g4f/Provider/HuggingChat.py
@@ -24,9 +24,9 @@ class HuggingChat(AsyncGeneratorProvider):
cookies: dict = None,
**kwargs
) -> AsyncGenerator:
+ model = model if model else cls.model
if not cookies:
cookies = get_cookies(".huggingface.co")
- model = model if model else cls.model
if proxy and "://" not in proxy:
proxy = f"http://{proxy}"
@@ -62,36 +62,32 @@ class HuggingChat(AsyncGeneratorProvider):
"web_search_id": ""
}
}
- start = "data:"
- first = True
async with session.post(f"https://huggingface.co/chat/conversation/{conversation_id}", proxy=proxy, json=send) as response:
- async for line in response.content:
- line = line.decode("utf-8")
- if not line:
- continue
- if not stream:
- try:
- data = json.loads(line)
- except json.decoder.JSONDecodeError:
- raise RuntimeError(f"No json: {line}")
- if "error" in data:
- raise RuntimeError(data["error"])
- elif isinstance(data, list):
- yield data[0]["generated_text"]
- else:
- raise RuntimeError(f"Response: {line}")
- elif line.startswith(start):
- line = json.loads(line[len(start):-1])
+ if not stream:
+ data = await response.json()
+ if "error" in data:
+ raise RuntimeError(data["error"])
+ elif isinstance(data, list):
+ yield data[0]["generated_text"]
+ else:
+ raise RuntimeError(f"Response: {data}")
+ else:
+ start = "data:"
+ first = True
+ async for line in response.content:
+ line = line.decode("utf-8")
if not line:
continue
- if "token" not in line:
- raise RuntimeError(f"Response: {line}")
- if not line["token"]["special"]:
- if first:
- yield line["token"]["text"].lstrip()
- first = False
- else:
- yield line["token"]["text"]
+ if line.startswith(start):
+ line = json.loads(line[len(start):-1])
+ if "token" not in line:
+ raise RuntimeError(f"Response: {line}")
+ if not line["token"]["special"]:
+ if first:
+ yield line["token"]["text"].lstrip()
+ first = False
+ else:
+ yield line["token"]["text"]
async with session.delete(f"https://huggingface.co/chat/conversation/{conversation_id}", proxy=proxy) as response:
response.raise_for_status()