diff options
author | Tekky <98614666+xtekky@users.noreply.github.com> | 2023-09-07 19:45:04 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-09-07 19:45:04 +0200 |
commit | 7ca1a59d95b52f94f674e8f981eab910b2f03518 (patch) | |
tree | ca506c3c152f3906a5b727a4cc6ebba1fd59d335 /testing | |
parent | ~ | Merge pull request #869 from ahobsonsayers/add-console-script (diff) | |
parent | Fix imports in Bing (diff) | |
download | gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar.gz gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar.bz2 gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar.lz gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar.xz gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.tar.zst gpt4free-7ca1a59d95b52f94f674e8f981eab910b2f03518.zip |
Diffstat (limited to '')
-rw-r--r-- | testing/test_async.py | 37 | ||||
-rw-r--r-- | testing/test_providers.py | 24 |
2 files changed, 52 insertions, 9 deletions
diff --git a/testing/test_async.py b/testing/test_async.py new file mode 100644 index 00000000..692946ea --- /dev/null +++ b/testing/test_async.py @@ -0,0 +1,37 @@ +import sys +from pathlib import Path +import asyncio + +sys.path.append(str(Path(__file__).parent.parent)) + +import g4f +from g4f.Provider import AsyncProvider +from testing.test_providers import get_providers +from testing.log_time import log_time_async + +async def create_async(provider: AsyncProvider): + model = g4f.models.gpt_35_turbo.name if provider.supports_gpt_35_turbo else g4f.models.default.name + try: + response = await log_time_async( + provider.create_async, + model=model, + messages=[{"role": "user", "content": "Hello Assistant!"}] + ) + assert type(response) is str + assert len(response) > 0 + return response + except Exception as e: + return e + +async def run_async(): + _providers: list[AsyncProvider] = [ + _provider + for _provider in get_providers() + if _provider.working and hasattr(_provider, "create_async") + ] + responses = [create_async(_provider) for _provider in _providers] + responses = await asyncio.gather(*responses) + for idx, provider in enumerate(_providers): + print(f"{provider.__name__}:", responses[idx]) + +print("Total:", asyncio.run(log_time_async(run_async)))
\ No newline at end of file diff --git a/testing/test_providers.py b/testing/test_providers.py index c4fcbc0c..be04e7a3 100644 --- a/testing/test_providers.py +++ b/testing/test_providers.py @@ -8,6 +8,11 @@ from g4f import BaseProvider, models, Provider logging = False +class Styles: + ENDC = "\033[0m" + BOLD = "\033[1m" + UNDERLINE = "\033[4m" + def main(): providers = get_providers() failed_providers = [] @@ -24,39 +29,40 @@ def main(): print() if failed_providers: - print(f"{Fore.RED}Failed providers:\n") + print(f"{Fore.RED + Styles.BOLD}Failed providers:{Styles.ENDC}") for _provider in failed_providers: print(f"{Fore.RED}{_provider.__name__}") else: - print(f"{Fore.GREEN}All providers are working") + print(f"{Fore.GREEN + Styles.BOLD}All providers are working") def get_providers() -> list[type[BaseProvider]]: provider_names = dir(Provider) ignore_names = [ + "annotations", "base_provider", - "BaseProvider" + "BaseProvider", + "AsyncProvider", + "AsyncGeneratorProvider" ] provider_names = [ provider_name for provider_name in provider_names if not provider_name.startswith("__") and provider_name not in ignore_names ] - return [getattr(Provider, provider_name) for provider_name in sorted(provider_names)] + return [getattr(Provider, provider_name) for provider_name in provider_names] def create_response(_provider: type[BaseProvider]) -> str: if _provider.supports_gpt_35_turbo: model = models.gpt_35_turbo.name elif _provider.supports_gpt_4: - model = models.gpt_4 - elif hasattr(_provider, "model"): - model = _provider.model + model = models.gpt_4.name else: - model = None + model = models.default.name response = _provider.create_completion( model=model, - messages=[{"role": "user", "content": "Hello"}], + messages=[{"role": "user", "content": "Hello, who are you? Answer in detail much as possible."}], stream=False, ) return "".join(response) |