summaryrefslogtreecommitdiffstats
path: root/etc/examples
diff options
context:
space:
mode:
authorHeiner Lohaus <hlohaus@users.noreply.github.com>2024-04-20 15:41:49 +0200
committerHeiner Lohaus <hlohaus@users.noreply.github.com>2024-04-20 15:41:49 +0200
commit83484c0a5658b023bcef930aee5099a4fc059cb4 (patch)
tree36b4467e36c9363e997c0706f0bdcdcae26ed0aa /etc/examples
parentAdd MetaAI Provider and some small improvments (diff)
downloadgpt4free-83484c0a5658b023bcef930aee5099a4fc059cb4.tar
gpt4free-83484c0a5658b023bcef930aee5099a4fc059cb4.tar.gz
gpt4free-83484c0a5658b023bcef930aee5099a4fc059cb4.tar.bz2
gpt4free-83484c0a5658b023bcef930aee5099a4fc059cb4.tar.lz
gpt4free-83484c0a5658b023bcef930aee5099a4fc059cb4.tar.xz
gpt4free-83484c0a5658b023bcef930aee5099a4fc059cb4.tar.zst
gpt4free-83484c0a5658b023bcef930aee5099a4fc059cb4.zip
Diffstat (limited to 'etc/examples')
-rw-r--r--etc/examples/api.py19
-rw-r--r--etc/examples/ecosia.py18
-rw-r--r--etc/examples/openaichat.py23
3 files changed, 60 insertions, 0 deletions
diff --git a/etc/examples/api.py b/etc/examples/api.py
new file mode 100644
index 00000000..d4d03a77
--- /dev/null
+++ b/etc/examples/api.py
@@ -0,0 +1,19 @@
+import requests
+import json
+url = "http://localhost:1337/v1/chat/completions"
+body = {
+ "model": "",
+ "provider": "MetaAI",
+ "stream": True,
+ "messages": [
+ {"role": "assistant", "content": "What can you do? Who are you?"}
+ ]
+}
+lines = requests.post(url, json=body, stream=True).iter_lines()
+for line in lines:
+ if line.startswith(b"data: "):
+ try:
+ print(json.loads(line[6:]).get("choices", [{"delta": {}}])[0]["delta"].get("content", ""), end="")
+ except json.JSONDecodeError:
+ pass
+print() \ No newline at end of file
diff --git a/etc/examples/ecosia.py b/etc/examples/ecosia.py
new file mode 100644
index 00000000..5a2ae520
--- /dev/null
+++ b/etc/examples/ecosia.py
@@ -0,0 +1,18 @@
+import asyncio
+import g4f
+from g4f.client import AsyncClient
+
+async def main():
+ client = AsyncClient(
+ provider=g4f.Provider.Ecosia,
+ )
+ async for chunk in client.chat.completions.create(
+ [{"role": "user", "content": "happy dogs on work. write some lines"}],
+ g4f.models.default,
+ stream=True,
+ green=True,
+ ):
+ print(chunk.choices[0].delta.content or "", end="")
+ print(f"\nwith {chunk.model}")
+
+asyncio.run(main()) \ No newline at end of file
diff --git a/etc/examples/openaichat.py b/etc/examples/openaichat.py
new file mode 100644
index 00000000..291daa2c
--- /dev/null
+++ b/etc/examples/openaichat.py
@@ -0,0 +1,23 @@
+from g4f.client import Client
+from g4f.Provider import OpenaiChat, RetryProvider
+
+# compatible countries: https://pastebin.com/UK0gT9cn
+client = Client(
+ proxies = {
+ 'http': 'http://username:password@host:port', # MUST BE WORKING OPENAI COUNTRY PROXY ex: USA
+ 'https': 'http://username:password@host:port' # MUST BE WORKING OPENAI COUNTRY PROXY ex: USA
+ },
+ provider = RetryProvider([OpenaiChat],
+ single_provider_retry=True, max_retries=5)
+)
+
+messages = [
+ {'role': 'user', 'content': 'Hello'}
+]
+
+response = client.chat.completions.create(model='gpt-3.5-turbo',
+ messages=messages,
+ stream=True)
+
+for message in response:
+ print(message.choices[0].delta.content or "") \ No newline at end of file