diff options
author | H Lohaus <hlohaus@users.noreply.github.com> | 2024-04-07 00:01:04 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-04-07 00:01:04 +0200 |
commit | e5e811fd7f0a725a793bfd28350df3b99c1203e8 (patch) | |
tree | 15d35057b50a607de8297e8434474c00e32a10d8 /g4f/Provider/HuggingChat.py | |
parent | Update GptTalkRu.py (diff) | |
download | gpt4free-e5e811fd7f0a725a793bfd28350df3b99c1203e8.tar gpt4free-e5e811fd7f0a725a793bfd28350df3b99c1203e8.tar.gz gpt4free-e5e811fd7f0a725a793bfd28350df3b99c1203e8.tar.bz2 gpt4free-e5e811fd7f0a725a793bfd28350df3b99c1203e8.tar.lz gpt4free-e5e811fd7f0a725a793bfd28350df3b99c1203e8.tar.xz gpt4free-e5e811fd7f0a725a793bfd28350df3b99c1203e8.tar.zst gpt4free-e5e811fd7f0a725a793bfd28350df3b99c1203e8.zip |
Diffstat (limited to 'g4f/Provider/HuggingChat.py')
-rw-r--r-- | g4f/Provider/HuggingChat.py | 14 |
1 files changed, 7 insertions, 7 deletions
diff --git a/g4f/Provider/HuggingChat.py b/g4f/Provider/HuggingChat.py index 5c95b679..9003c749 100644 --- a/g4f/Provider/HuggingChat.py +++ b/g4f/Provider/HuggingChat.py @@ -5,10 +5,10 @@ import requests from aiohttp import ClientSession, BaseConnector from ..typing import AsyncResult, Messages +from ..requests.raise_for_status import raise_for_status from .base_provider import AsyncGeneratorProvider, ProviderModelMixin from .helper import format_prompt, get_connector - class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin): url = "https://huggingface.co/chat" working = True @@ -60,11 +60,11 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin): headers=headers, connector=get_connector(connector, proxy) ) as session: - async with session.post(f"{cls.url}/conversation", json=options, proxy=proxy) as response: - response.raise_for_status() + async with session.post(f"{cls.url}/conversation", json=options) as response: + await raise_for_status(response) conversation_id = (await response.json())["conversationId"] async with session.get(f"{cls.url}/conversation/{conversation_id}/__data.json") as response: - response.raise_for_status() + await raise_for_status(response) data: list = (await response.json())["nodes"][1]["data"] keys: list[int] = data[data[0]["messages"]] message_keys: dict = data[keys[0]] @@ -79,7 +79,7 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin): async with session.post(f"{cls.url}/conversation/{conversation_id}", json=options) as response: first_token = True async for line in response.content: - response.raise_for_status() + await raise_for_status(response) line = json.loads(line) if "type" not in line: raise RuntimeError(f"Response: {line}") @@ -91,5 +91,5 @@ class HuggingChat(AsyncGeneratorProvider, ProviderModelMixin): yield token elif line["type"] == "finalAnswer": break - async with session.delete(f"{cls.url}/conversation/{conversation_id}", proxy=proxy) as response: - response.raise_for_status() + async with session.delete(f"{cls.url}/conversation/{conversation_id}") as response: + await raise_for_status(response) |