From 43b3c2313d6fbed335a548de22944ec9a017d544 Mon Sep 17 00:00:00 2001 From: kqlio67 Date: Sat, 7 Sep 2024 01:34:47 +0300 Subject: g4f/models.py g4f/Provider/MagickPen.py --- g4f/Provider/MagickPen.py | 41 ++++++++++++++++++++++++++++++----------- g4f/models.py | 1 + 2 files changed, 31 insertions(+), 11 deletions(-) (limited to 'g4f') diff --git a/g4f/Provider/MagickPen.py b/g4f/Provider/MagickPen.py index 0f476eca..eab70536 100644 --- a/g4f/Provider/MagickPen.py +++ b/g4f/Provider/MagickPen.py @@ -12,15 +12,19 @@ from .helper import format_prompt class MagickPen(AsyncGeneratorProvider, ProviderModelMixin): url = "https://magickpen.com" - api_endpoint = "https://api.magickpen.com/chat/free" + api_endpoint_free = "https://api.magickpen.com/chat/free" + api_endpoint_ask = "https://api.magickpen.com/ask" working = True supports_gpt_4 = True supports_stream = False - default_model = 'gpt-4o-mini' - models = ['gpt-4o-mini'] + default_model = 'free' + models = ['free', 'ask'] - model_aliases = {} + model_aliases = { + "gpt-4o-mini": "free", + "gpt-4o-mini": "ask", + } @classmethod def get_model(cls, model: str) -> str: @@ -102,10 +106,25 @@ class MagickPen(AsyncGeneratorProvider, ProviderModelMixin): } async with ClientSession(headers=headers) as session: - data = { - "history": [{"role": "user", "content": format_prompt(messages)}] - } - async with session.post(cls.api_endpoint, json=data, proxy=proxy) as response: - response.raise_for_status() - result = await response.text() - yield result + if model == 'free': + data = { + "history": [{"role": "user", "content": format_prompt(messages)}] + } + async with session.post(cls.api_endpoint_free, json=data, proxy=proxy) as response: + response.raise_for_status() + result = await response.text() + yield result + + elif model == 'ask': + data = { + "query": format_prompt(messages), + "plan": "Pay as you go" + } + async with session.post(cls.api_endpoint_ask, json=data, proxy=proxy) as response: + response.raise_for_status() + async for chunk in response.content: + if chunk: + yield chunk.decode() + + else: + raise ValueError(f"Unknown model: {model}") diff --git a/g4f/models.py b/g4f/models.py index 9607658c..ddbeeddf 100644 --- a/g4f/models.py +++ b/g4f/models.py @@ -82,6 +82,7 @@ default = Model( Bixin123, Binjie, Free2GPT, + MagickPen, ]) ) -- cgit v1.2.3