summaryrefslogtreecommitdiffstats
path: root/g4f/Provider/AiAsk.py
diff options
context:
space:
mode:
authorCommenter123321 <36051603+Commenter123321@users.noreply.github.com>2023-10-10 14:15:12 +0200
committerCommenter123321 <36051603+Commenter123321@users.noreply.github.com>2023-10-10 14:15:12 +0200
commit0e4297494dd46533738f786e4ac675541586177a (patch)
tree6c2999e14ed831f7f37cc60991571cd040772918 /g4f/Provider/AiAsk.py
parentadd cool testing for gpt-3.5 and and gpt-4 (diff)
parentUpdate Aivvm.py (diff)
downloadgpt4free-0e4297494dd46533738f786e4ac675541586177a.tar
gpt4free-0e4297494dd46533738f786e4ac675541586177a.tar.gz
gpt4free-0e4297494dd46533738f786e4ac675541586177a.tar.bz2
gpt4free-0e4297494dd46533738f786e4ac675541586177a.tar.lz
gpt4free-0e4297494dd46533738f786e4ac675541586177a.tar.xz
gpt4free-0e4297494dd46533738f786e4ac675541586177a.tar.zst
gpt4free-0e4297494dd46533738f786e4ac675541586177a.zip
Diffstat (limited to 'g4f/Provider/AiAsk.py')
-rw-r--r--g4f/Provider/AiAsk.py9
1 files changed, 5 insertions, 4 deletions
diff --git a/g4f/Provider/AiAsk.py b/g4f/Provider/AiAsk.py
index 27d3bf15..f10be389 100644
--- a/g4f/Provider/AiAsk.py
+++ b/g4f/Provider/AiAsk.py
@@ -1,7 +1,7 @@
from __future__ import annotations
from aiohttp import ClientSession
-from ..typing import AsyncGenerator
+from ..typing import AsyncResult, Messages
from .base_provider import AsyncGeneratorProvider
class AiAsk(AsyncGeneratorProvider):
@@ -13,9 +13,10 @@ class AiAsk(AsyncGeneratorProvider):
async def create_async_generator(
cls,
model: str,
- messages: list[dict[str, str]],
+ messages: Messages,
+ proxy: str = None,
**kwargs
- ) -> AsyncGenerator:
+ ) -> AsyncResult:
headers = {
"accept": "application/json, text/plain, */*",
"origin": cls.url,
@@ -33,7 +34,7 @@ class AiAsk(AsyncGeneratorProvider):
}
buffer = ""
rate_limit = "您的免费额度不够使用这个模型啦,请点击右上角登录继续使用!"
- async with session.post(f"{cls.url}/v1/chat/gpt/", json=data) as response:
+ async with session.post(f"{cls.url}/v1/chat/gpt/", json=data, proxy=proxy) as response:
response.raise_for_status()
async for chunk in response.content.iter_any():
buffer += chunk.decode()