From e341c75e5e9c34c5aa583e67b8711799d74040eb Mon Sep 17 00:00:00 2001
From: "t.me/xtekky" <98614666+xtekky@users.noreply.github.com>
Date: Tue, 25 Apr 2023 18:40:17 +0100
Subject: discontinue ora.sh api
---
README.md | 5 +-
ora/README.md | 49 ------------
ora/__init__.py | 62 ---------------
ora/_jwt.py | 75 ------------------
ora/model.py | 57 --------------
ora/typing.py | 39 ----------
ora_test.py | 15 ----
t3nsor/README.md | 44 -----------
t3nsor/__init__.py | 137 ---------------------------------
testing/ora_gpt4.py | 45 -----------
testing/ora_gpt4_proof.py | 24 ------
testing/you_test.py | 32 --------
unfinished/t3nsor/README.md | 44 +++++++++++
unfinished/t3nsor/__init__.py | 137 +++++++++++++++++++++++++++++++++
unfinished/writesonic/README.md | 53 +++++++++++++
unfinished/writesonic/__init__.py | 158 ++++++++++++++++++++++++++++++++++++++
v2.py | 0
writesonic/README.md | 53 -------------
writesonic/__init__.py | 158 --------------------------------------
you_test.py | 32 ++++++++
20 files changed, 425 insertions(+), 794 deletions(-)
delete mode 100644 ora/README.md
delete mode 100644 ora/__init__.py
delete mode 100644 ora/_jwt.py
delete mode 100644 ora/model.py
delete mode 100644 ora/typing.py
delete mode 100644 ora_test.py
delete mode 100644 t3nsor/README.md
delete mode 100644 t3nsor/__init__.py
delete mode 100644 testing/ora_gpt4.py
delete mode 100644 testing/ora_gpt4_proof.py
delete mode 100644 testing/you_test.py
create mode 100644 unfinished/t3nsor/README.md
create mode 100644 unfinished/t3nsor/__init__.py
create mode 100644 unfinished/writesonic/README.md
create mode 100644 unfinished/writesonic/__init__.py
create mode 100644 v2.py
delete mode 100644 writesonic/README.md
delete mode 100644 writesonic/__init__.py
create mode 100644 you_test.py
diff --git a/README.md b/README.md
index f86c47eb..90085277 100644
--- a/README.md
+++ b/README.md
@@ -23,7 +23,6 @@ By the way, thank you so much for `2k` stars and all the support!!
- [`quora (poe)`](./quora/README.md)
- [`phind`](./phind/README.md)
- [`t3nsor`](./t3nsor/README.md)
- - [`ora`](./ora/README.md)
- [`writesonic`](./writesonic/README.md)
- [`you`](./you/README.md)
- [`sqlchat`](./sqlchat/README.md)
@@ -44,7 +43,6 @@ By the way, thank you so much for `2k` stars and all the support!!
| Website | Model(s) |
| ---------------------------------------------------- | ------------------------------- |
-| [ora.sh](https://ora.sh) | GPT-3.5 / 4 |
| [poe.com](https://poe.com) | GPT-4/3.5 |
| [writesonic.com](https://writesonic.com) | GPT-3.5 / Internet |
| [t3nsor.com](https://t3nsor.com) | GPT-3.5 |
@@ -64,8 +62,7 @@ By the way, thank you so much for `2k` stars and all the support!!
- why not `ora` anymore ? gpt-4 requires login + limited
#### gpt-3.5
-- [`/ora`](./ora/README.md)
-- only stable api at the moment ( for gpt-3.5, gpt-4 is dead)
+- looking for a stable api at the moment
## Install
download or clone this GitHub repo
diff --git a/ora/README.md b/ora/README.md
deleted file mode 100644
index 36bc2806..00000000
--- a/ora/README.md
+++ /dev/null
@@ -1,49 +0,0 @@
-### Example: `ora` (use like openai pypi package)
-
-### load model (new)
-
-more gpt4 models in `/testing/ora_gpt4.py`
-
-find the userid by visiting https://ora.sh/api/auth/session ( must be logged in on the site )
-and session_token in the cookies, it should be: __Secure-next-auth.session-token
-
-```python
-# if using CompletionModel.load set these
-ora.user_id = '...'
-ora.session_token = '...'
-
-# normal gpt-4: b8b12eaa-5d47-44d3-92a6-4d706f2bcacf
-model = ora.CompletionModel.load(chatbot_id, 'gpt-4') # or gpt-3.5
-```
-
-#### create model / chatbot:
-```python
-# import ora
-import ora
-
-
-# create model
-model = ora.CompletionModel.create(
- system_prompt = 'You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible',
- description = 'ChatGPT Openai Language Model',
- name = 'gpt-3.5')
-
-# init conversation (will give you a conversationId)
-init = ora.Completion.create(
- model = model,
- prompt = 'hello world')
-
-print(init.completion.choices[0].text)
-
-while True:
- # pass in conversationId to continue conversation
-
- prompt = input('>>> ')
- response = ora.Completion.create(
- model = model,
- prompt = prompt,
- includeHistory = True, # remember history
- conversationId = init.id)
-
- print(response.completion.choices[0].text)
-```
diff --git a/ora/__init__.py b/ora/__init__.py
deleted file mode 100644
index db8252cd..00000000
--- a/ora/__init__.py
+++ /dev/null
@@ -1,62 +0,0 @@
-from ora.model import CompletionModel
-from ora.typing import OraResponse
-from requests import post
-from time import time
-from random import randint
-from ora._jwt import do_jwt
-
-user_id = None
-session_token = None
-
-class Completion:
- def create(
- model : CompletionModel,
- prompt: str,
- includeHistory: bool = True,
- conversationId: str or None = None) -> OraResponse:
- extra = {
- 'conversationId': conversationId} if conversationId else {}
-
- cookies = {
- "cookie" : f"__Secure-next-auth.session-token={session_token}"} if session_token else {}
-
- json_data = extra | {
- 'chatbotId': model.id,
- 'input' : prompt,
- 'userId' : user_id if user_id else model.createdBy,
- 'model' : model.modelName,
- 'provider' : 'OPEN_AI',
- 'includeHistory': includeHistory}
-
-
- response = post('https://ora.sh/api/conversation',
- headers = cookies | {
- "host" : "ora.sh",
- "authorization" : f"Bearer AY0{randint(1111, 9999)}",
- "user-agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36",
- "origin" : "https://ora.sh",
- "referer" : "https://ora.sh/chat/",
- "x-signed-token": do_jwt(json_data)
- },
- json = json_data).json()
-
- if response.get('error'):
- raise Exception('''set ora.user_id and ora.session_token\napi response: %s''' % response['error'])
-
- return OraResponse({
- 'id' : response['conversationId'],
- 'object' : 'text_completion',
- 'created': int(time()),
- 'model' : model.slug,
- 'choices': [{
- 'text' : response['response'],
- 'index' : 0,
- 'logprobs' : None,
- 'finish_reason' : 'stop'
- }],
- 'usage': {
- 'prompt_tokens' : len(prompt),
- 'completion_tokens' : len(response['response']),
- 'total_tokens' : len(prompt) + len(response['response'])
- }
- })
\ No newline at end of file
diff --git a/ora/_jwt.py b/ora/_jwt.py
deleted file mode 100644
index 61f71332..00000000
--- a/ora/_jwt.py
+++ /dev/null
@@ -1,75 +0,0 @@
-import jwt
-from datetime import datetime, timedelta
-# from cryptography.hazmat.primitives import serialization
-# from cryptography.hazmat.primitives.serialization import load_pem_private_key
-# from cryptography.hazmat.backends import default_backend
-
-
-def do_jwt(json_data: dict):
-
- private_key = b'''-----BEGIN RSA PRIVATE KEY-----
-MIIJKAIBAAKCAgEAxv9TLZP2TnsR512LqzT52N6Z9ixKmUA11jy0IXH0dEbdbfBw
-eeWrXoTuIYcY8Dkg/+q33ppfujYfb0z22bs/CZ63+jBL2UmxG/0XIzmsQlHSgJd/
-rnbERwIt7/ZjOHAcNrAzI0N11AI8AT0+M3XFOGRoIKzoc3Juxl7eyyPPEkNZMkEv
-lYfDN5AMD/+4pZ+7SCEzUCyGtBejW2P+NwTvjBxhLjIoG+m7yh81RoIBnO+Z1o5X
-ZtospuWZe1L6GNh+zezeHIyBGYgGgYbPboQ8QeHhoh+n0PuZB0GQqorqfxHjB38t
-yB4qsRGi10UNcohvFhglZk8kdMYBTd0M5ik5t4sx/ujjF57gX7dCKipHimDy7McY
-ElVLTDoSkwD/Lg3tV0utky42dL/iIMePlHfMrw/m2oAm33/dCaiAW8grNkJPjcwo
-Y8pnqpFGgAZX+6WalQCfoSStV4kYYlaq11DB6dZjDYoKLRIyH7MCAmMxms9569qe
-5gFuyQWTZgXlKoj2Zd7XIaIs5s/A6PFt7sxk8mOY/DspSbygZZCnMH3+or/8trH2
-p0fGEkqpzMKAY6TYtdYhOyTbup3VOKQwhk8b5CPuEWZutE6pT0O2O81MkuEEl/Zw
-/M1MJERTIjGAThsL0yvEn1Gi5HXl7s/5E61Yvc0ItORqio70PZcToRII27ECAwEA
-AQKCAgEAle0H3e78Q2S1uHriH7tqAdq0ZKQ6D/wwk5honkocwv4hFhNwqmY/FpdQ
-UjJWt6ZTFnzgyvXD6aedR13VHXXVqInMUtLQUoUSyuOD6yYogk7jKb76k5cnidg6
-g/A+EOdmWk2mOYs52uFUFBrwIhU44aPET9n1yAUPMKWJdcMk372eFh7GmwIOMm50
-qBkiJKaTk2RwJJdnZYfpq5FKlmlBkW5QSV3AmkcfFMkuelC4pmReoyfa8cKuoY+a
-cy+w/ccewkcTkK7LFVFGlY/b+IfoXjqwpFT1Op5UTQM420SOJ+5x/dPzyjHwODfx
-V/7OgtwH1b2bb9lwvgnwMZm5fi7RLAOC5BaSrZUb8WtVaaKURzXgdE+5LO/xXYCy
-JECbRQ5o4H4CwOc3mvJZL0O/dwPKoTccjELc8HOcogdy+hrJPXFl+oXy3yKUmf5L
-Lx13hh/kO4960TcGVQsUPV9oYB8XU5iYC1cMdlMVZAOwoLE1h/Tro0blisq6eafx
-+4ZS+COJEM+A7UgFacxdQ9C4bL5ZgjgLxMEsCIjwBN1i/bMEKpj46ulH23I57F1S
-jr6/UtMPO73c2bGcxdzRRQSI/LW5Qnb4USQsOIjYDVReLM9hDvI4OyQ2pfcgXlTL
-ODky2qivbP6WA4GKCBhaDEaeKFNDiyCqx9ObftCbRk1fWu7IP4ECggEBAOnPs88o
-DQLEaColCbh3ziogoANYMKiqaJUacnH5S5e1/aW3jgVK85NsMJT9hsODXyHup/CF
-RT3jeJA5cRj+04KI33cH2F5X9MhPB0a2Zo0Io813l95d2Wuk9rnadNCr8+h3b/nM
-HR4X+n7l0x6Y8sn60pxesYXKu8NFccUCVcGUvrrL2gsPLPB//3eqgfZuf8BCDzOB
-liO8Pzt0ELjxwxUWB9kPKLNZwVa0hq4snJThZQBrlMQcuH8BmitS5vZDVwiRLGVR
-L5z+tPJMz5wJ/dGbjyMMONCZgiXypqb1qHIGt8FEPLryQ6u+04ZszxW9QTsWqMqi
-ZvoFo0VPGkXGIWcCggEBANnh1tTCfGJSrwK1fWVhBajtn03iE5DuIkPUmL8juBq6
-LSYG8tuk+zt0RVNYLYrM2nSzU78IsuR/15XtpheDh3Fy1ZdsAe/boccdZUrLtH9h
-hRcAYUfY+E0E4U6j7FVTQCy9eNGnWJ/su2N0GDJll2BQWi8bcnL8dZqsq8pZzAjo
-7jBlOEe2xOVbCsBLfCW7tmeKCv4cc8digITGemig4NgCs6W03gJPnvnvvHMnuF3u
-8YjD9kWWEpQr37pT6QSdhwzKMAOeHbhh/CQO/sl+fBLbcYikQa0HIuvj+29w0/jv
-whVfsJxCvs6fCTMYjQE7GdTcGmCbvs+x7TrXuqeT8ycCggEAWr4Un/KAUjGd57Vm
-N2Sv6+OrloC0qdExM6UHA7roHqIwJg++G8nCDNYxaLGYiurCki3Ime1vORy+XuMc
-RMIpnoC2kcDGtZ7XTqJ1RXlnBZdz0zt2AoRT7JYid3EUYyRJTlCEceNI7bQKsRNL
-Q5XCrKce9DdAGJfdFWUvSXGljLLI70BMiHxESbazlGLle5nZFOnOcoP5nDbkJ5Pd
-JZoWx2k8dH6QokLUaW041AJWZuWvSGF4ZEBtTkV16xiKsMrjzVxiaZP/saOc4Gj1
-Li8mhiIkhEqrBjJ9s3KgQS4YSODYkjaEh12c69vsxkAWgu5nkaIysiojYyeq/Sw9
-GxVRQwKCAQAeYvTHL2iRfd6SjiUy4lkbuighgIoiCFQXCatT3PNsJtLtHsL4BwZS
-wGB6wy120iMVa30eg2QPohS7AC3N0bYuCEnpmFKc1RC26E6cI9TEfyFEl/T5RDU8
-6JVTlmD7dWTZ2ILlGmWtyCJKOIK3ZJu7/vjU4QsRJkxwiexbiDKAe5vcfAFhXwgO
-xKe3Mc/ao1dJEWN/FRDAmeg6nEOuG+G/voC3d4YO5HPTf6/Uj5GS6CQfYtUR12A3
-8fZ90f4Jer6+9ePEXWTftiqoDL9T8qPzLU+kMuRF8VzZcS472Ix3h1iWCoZjBJv/
-zQZHbgEcTtXHbfrvxkjSRopDTprljCi5AoIBAGc6M8/FH1pLgxOgS6oEGJAtErxv
-EnmELzKvfwBryphx8f0S5sHoiqli+5dqFtw5h5yy/pXrNzLi0LfpmFzxbChfO8ai
-omC/oqxU0FKqY2msFYdnfwM3PZeZ3c7LALLhWG56/fIYMtV78+cfqkRPM8nRJXaF
-Aza2YTTZGfh3x10KnSLWUmhIWUEj8VzCNW7SR0Ecqa+ordAYio4wBsq7sO3sCw8G
-Oi0/98ondhGJWL3M6FDGai8dXewt+8o0dlq95mHkNNopCWbPI71pM7u4ABPL50Yd
-spd4eADxTm2m0GR7bhVEIbYfc0aAzIoWDpVs4V3vmx+bdRbppFxV1aS/r0g=
------END RSA PRIVATE KEY-----'''
-
- header = {
- 'alg': 'RS256',
- 'typ': 'JWT',
- 'kid': '1c8a5da7-527e-4bee-aa8d-aabda16c59ce'
- }
-
- payload = {
- **json_data,
- 'iat': int(datetime.now().timestamp()),
- 'exp': int((datetime.now() + timedelta(minutes=10)).timestamp()),
- 'iss': 'https://rick.roll'
- }
-
- return jwt.encode(payload, private_key, algorithm='RS256', headers=header)
\ No newline at end of file
diff --git a/ora/model.py b/ora/model.py
deleted file mode 100644
index 34720c2d..00000000
--- a/ora/model.py
+++ /dev/null
@@ -1,57 +0,0 @@
-from uuid import uuid4
-from requests import post
-
-class CompletionModel:
- system_prompt = None
- description = None
- createdBy = None
- createdAt = None
- slug = None
- id = None
- modelName = None
- model = 'gpt-3.5-turbo'
-
- def create(
- system_prompt: str = 'You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible',
- description : str = 'ChatGPT Openai Language Model',
- name : str = 'gpt-3.5'):
-
- CompletionModel.system_prompt = system_prompt
- CompletionModel.description = description
- CompletionModel.slug = name
-
- json_data = {
- 'prompt' : system_prompt,
- 'userId' : f'auto:{uuid4()}',
- 'name' : name,
- 'description': description}
-
- headers = {
- 'Origin' : 'https://ora.sh',
- 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/16.4 Safari/605.1.15',
- 'Referer' : 'https://ora.sh/',
- 'Host' : 'ora.sh',
- }
-
- response = post('https://ora.sh/api/assistant', headers = headers, json = json_data)
-
- print(response.json())
-
- CompletionModel.id = response.json()['id']
- CompletionModel.createdBy = response.json()['createdBy']
- CompletionModel.createdAt = response.json()['createdAt']
-
- return CompletionModel
-
- def load(chatbotId: str, modelName: str = 'gpt-3.5-turbo', userId: str = None):
- if userId is None: userId = f'{uuid4()}'
-
- CompletionModel.system_prompt = None
- CompletionModel.description = None
- CompletionModel.slug = None
- CompletionModel.id = chatbotId
- CompletionModel.createdBy = userId
- CompletionModel.createdAt = None
- CompletionModel.modelName = modelName
-
- return CompletionModel
\ No newline at end of file
diff --git a/ora/typing.py b/ora/typing.py
deleted file mode 100644
index f3f0aebf..00000000
--- a/ora/typing.py
+++ /dev/null
@@ -1,39 +0,0 @@
-class OraResponse:
-
- class Completion:
-
- class Choices:
- def __init__(self, choice: dict) -> None:
- self.text = choice['text']
- self.content = self.text.encode()
- self.index = choice['index']
- self.logprobs = choice['logprobs']
- self.finish_reason = choice['finish_reason']
-
- def __repr__(self) -> str:
- return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
-
- def __init__(self, choices: dict) -> None:
- self.choices = [self.Choices(choice) for choice in choices]
-
- class Usage:
- def __init__(self, usage_dict: dict) -> None:
- self.prompt_tokens = usage_dict['prompt_tokens']
- self.completion_tokens = usage_dict['completion_tokens']
- self.total_tokens = usage_dict['total_tokens']
-
- def __repr__(self):
- return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
-
- def __init__(self, response_dict: dict) -> None:
-
- self.response_dict = response_dict
- self.id = response_dict['id']
- self.object = response_dict['object']
- self.created = response_dict['created']
- self.model = response_dict['model']
- self.completion = self.Completion(response_dict['choices'])
- self.usage = self.Usage(response_dict['usage'])
-
- def json(self) -> dict:
- return self.response_dict
\ No newline at end of file
diff --git a/ora_test.py b/ora_test.py
deleted file mode 100644
index 5144be14..00000000
--- a/ora_test.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import ora
-
-
-# create model
-model = ora.CompletionModel.create(
- system_prompt = 'You are ChatGPT, a large language model trained by OpenAI. Answer as concisely as possible',
- description = 'ChatGPT Openai Language Model',
- name = 'gpt-3.5')
-
-# init conversation (will give you a conversationId)
-init = ora.Completion.create(
- model = model,
- prompt = 'hello world')
-
-print(init.completion.choices[0].text)
\ No newline at end of file
diff --git a/t3nsor/README.md b/t3nsor/README.md
deleted file mode 100644
index 2790bf6e..00000000
--- a/t3nsor/README.md
+++ /dev/null
@@ -1,44 +0,0 @@
-### note: currently patched
-
-### Example: `t3nsor` (use like openai pypi package)
-
-```python
-# Import t3nsor
-import t3nsor
-
-# t3nsor.Completion.create
-# t3nsor.StreamCompletion.create
-
-[...]
-
-```
-
-#### Example Chatbot
-```python
-messages = []
-
-while True:
- user = input('you: ')
-
- t3nsor_cmpl = t3nsor.Completion.create(
- prompt = user,
- messages = messages
- )
-
- print('gpt:', t3nsor_cmpl.completion.choices[0].text)
-
- messages.extend([
- {'role': 'user', 'content': user },
- {'role': 'assistant', 'content': t3nsor_cmpl.completion.choices[0].text}
- ])
-```
-
-#### Streaming Response:
-
-```python
-for response in t3nsor.StreamCompletion.create(
- prompt = 'write python code to reverse a string',
- messages = []):
-
- print(response.completion.choices[0].text)
-```
diff --git a/t3nsor/__init__.py b/t3nsor/__init__.py
deleted file mode 100644
index aec45dcf..00000000
--- a/t3nsor/__init__.py
+++ /dev/null
@@ -1,137 +0,0 @@
-from requests import post
-from time import time
-
-headers = {
- 'authority': 'www.t3nsor.tech',
- 'accept': '*/*',
- 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
- 'cache-control': 'no-cache',
- 'content-type': 'application/json',
- 'origin': 'https://www.t3nsor.tech',
- 'pragma': 'no-cache',
- 'referer': 'https://www.t3nsor.tech/',
- 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
- 'sec-ch-ua-mobile': '?0',
- 'sec-ch-ua-platform': '"macOS"',
- 'sec-fetch-dest': 'empty',
- 'sec-fetch-mode': 'cors',
- 'sec-fetch-site': 'same-origin',
- 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
-}
-
-class T3nsorResponse:
-
- class Completion:
-
- class Choices:
- def __init__(self, choice: dict) -> None:
- self.text = choice['text']
- self.content = self.text.encode()
- self.index = choice['index']
- self.logprobs = choice['logprobs']
- self.finish_reason = choice['finish_reason']
-
- def __repr__(self) -> str:
- return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
-
- def __init__(self, choices: dict) -> None:
- self.choices = [self.Choices(choice) for choice in choices]
-
- class Usage:
- def __init__(self, usage_dict: dict) -> None:
- self.prompt_tokens = usage_dict['prompt_chars']
- self.completion_tokens = usage_dict['completion_chars']
- self.total_tokens = usage_dict['total_chars']
-
- def __repr__(self):
- return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
-
- def __init__(self, response_dict: dict) -> None:
-
- self.response_dict = response_dict
- self.id = response_dict['id']
- self.object = response_dict['object']
- self.created = response_dict['created']
- self.model = response_dict['model']
- self.completion = self.Completion(response_dict['choices'])
- self.usage = self.Usage(response_dict['usage'])
-
- def json(self) -> dict:
- return self.response_dict
-
-class Completion:
- model = {
- 'model': {
- 'id' : 'gpt-3.5-turbo',
- 'name' : 'Default (GPT-3.5)'
- }
- }
-
- def create(
- prompt: str = 'hello world',
- messages: list = []) -> T3nsorResponse:
-
- response = post('https://www.t3nsor.tech/api/chat', headers = headers, json = Completion.model | {
- 'messages' : messages,
- 'key' : '',
- 'prompt' : prompt
- })
-
- return T3nsorResponse({
- 'id' : f'cmpl-1337-{int(time())}',
- 'object' : 'text_completion',
- 'created': int(time()),
- 'model' : Completion.model,
- 'choices': [{
- 'text' : response.text,
- 'index' : 0,
- 'logprobs' : None,
- 'finish_reason' : 'stop'
- }],
- 'usage': {
- 'prompt_chars' : len(prompt),
- 'completion_chars' : len(response.text),
- 'total_chars' : len(prompt) + len(response.text)
- }
- })
-
-class StreamCompletion:
- model = {
- 'model': {
- 'id' : 'gpt-3.5-turbo',
- 'name' : 'Default (GPT-3.5)'
- }
- }
-
- def create(
- prompt: str = 'hello world',
- messages: list = []) -> T3nsorResponse:
-
- print('t3nsor api is down, this may not work, refer to another module')
-
- response = post('https://www.t3nsor.tech/api/chat', headers = headers, stream = True, json = Completion.model | {
- 'messages' : messages,
- 'key' : '',
- 'prompt' : prompt
- })
-
- for chunk in response.iter_content(chunk_size = 2046):
- yield T3nsorResponse({
- 'id' : f'cmpl-1337-{int(time())}',
- 'object' : 'text_completion',
- 'created': int(time()),
- 'model' : Completion.model,
-
- 'choices': [{
- 'text' : chunk.decode(),
- 'index' : 0,
- 'logprobs' : None,
- 'finish_reason' : 'stop'
- }],
-
- 'usage': {
- 'prompt_chars' : len(prompt),
- 'completion_chars' : len(chunk.decode()),
- 'total_chars' : len(prompt) + len(chunk.decode())
- }
- })
diff --git a/testing/ora_gpt4.py b/testing/ora_gpt4.py
deleted file mode 100644
index 41f20876..00000000
--- a/testing/ora_gpt4.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import ora
-
-ora.user_id = '...'
-ora.session_token = '...'
-
-gpt4_chatbot_ids = ['b8b12eaa-5d47-44d3-92a6-4d706f2bcacf', 'fbe53266-673c-4b70-9d2d-d247785ccd91', 'bd5781cf-727a-45e9-80fd-a3cfce1350c6', '993a0102-d397-47f6-98c3-2587f2c9ec3a', 'ae5c524e-d025-478b-ad46-8843a5745261', 'cc510743-e4ab-485e-9191-76960ecb6040', 'a5cd2481-8e24-4938-aa25-8e26d6233390', '6bca5930-2aa1-4bf4-96a7-bea4d32dcdac', '884a5f2b-47a2-47a5-9e0f-851bbe76b57c', 'd5f3c491-0e74-4ef7-bdca-b7d27c59e6b3', 'd72e83f6-ef4e-4702-844f-cf4bd432eef7', '6e80b170-11ed-4f1a-b992-fd04d7a9e78c', '8ef52d68-1b01-466f-bfbf-f25c13ff4a72', 'd0674e11-f22e-406b-98bc-c1ba8564f749', 'a051381d-6530-463f-be68-020afddf6a8f', '99c0afa1-9e32-4566-8909-f4ef9ac06226', '1be65282-9c59-4a96-99f8-d225059d9001', 'dba16bd8-5785-4248-a8e9-b5d1ecbfdd60', '1731450d-3226-42d0-b41c-4129fe009524', '8e74635d-000e-4819-ab2c-4e986b7a0f48', 'afe7ed01-c1ac-4129-9c71-2ca7f3800b30', 'e374c37a-8c44-4f0e-9e9f-1ad4609f24f5']
-chatbot_id = gpt4_chatbot_ids[0]
-
-model = ora.CompletionModel.load(chatbot_id, 'gpt-4')
-response = ora.Completion.create(model, 'hello')
-
-print(response.completion.choices[0].text)
-conversation_id = response.id
-
-while True:
- # pass in conversationId to continue conversation
-
- prompt = input('>>> ')
- response = ora.Completion.create(
- model = model,
- prompt = prompt,
- includeHistory = True, # remember history
- conversationId = conversation_id)
-
- print(response.completion.choices[0].text)
-
-
-# bots :
-# 1 normal
-# 2 solidity contract helper
-# 3 swift project helper
-# 4 developer gpt
-# 5 lawsuit bot for spam call
-# 6 p5.js code help bot
-# 8 AI professor, for controversial topics
-# 9 HustleGPT, your entrepreneurial AI
-# 10 midjourney prompts bot
-# 11 AI philosophy professor
-# 12 TypeScript and JavaScript code review bot
-# 13 credit card transaction details to merchant and location bot
-# 15 Chemical Compound Similarity and Purchase Tool bot
-# 16 expert full-stack developer AI
-# 17 Solana development bot
-# 18 price guessing game bot
-# 19 AI Ethicist and Philosopher
\ No newline at end of file
diff --git a/testing/ora_gpt4_proof.py b/testing/ora_gpt4_proof.py
deleted file mode 100644
index 17fbe8b7..00000000
--- a/testing/ora_gpt4_proof.py
+++ /dev/null
@@ -1,24 +0,0 @@
-import ora
-
-complex_question = '''
-James is talking to two people, his father, and his friend.
-
-Douglas asks him, "What did you do today James?"
-James replies, "I went on a fishing trip."
-Josh then asks, "Did you catch anything?"
-James replies, "Yes, I caught a couple of nice rainbow trout. It was a lot of fun."
-Josh replies, "Good job son, tell your mother we should eat them tonight, she'll be very happy."
-Douglas then says, "I wish my family would eat fish tonight, my father is making pancakes."
-
-Question: Who is James' father?
-'''
-
-# right answer is josh
-
-model = ora.CompletionModel.load('b8b12eaa-5d47-44d3-92a6-4d706f2bcacf', 'gpt-4')
-# init conversation (will give you a conversationId)
-init = ora.Completion.create(
- model = model,
- prompt = complex_question)
-
-print(init.completion.choices[0].text) # James' father is Josh.
\ No newline at end of file
diff --git a/testing/you_test.py b/testing/you_test.py
deleted file mode 100644
index 62598e05..00000000
--- a/testing/you_test.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import you
-
-# simple request with links and details
-response = you.Completion.create(
- prompt = "hello world",
- detailed = True,
- includelinks = True,)
-
-print(response)
-
-# {
-# "response": "...",
-# "links": [...],
-# "extra": {...},
-# "slots": {...}
-# }
-# }
-
-#chatbot
-
-chat = []
-
-while True:
- prompt = input("You: ")
-
- response = you.Completion.create(
- prompt = prompt,
- chat = chat)
-
- print("Bot:", response["response"])
-
- chat.append({"question": prompt, "answer": response["response"]})
\ No newline at end of file
diff --git a/unfinished/t3nsor/README.md b/unfinished/t3nsor/README.md
new file mode 100644
index 00000000..2790bf6e
--- /dev/null
+++ b/unfinished/t3nsor/README.md
@@ -0,0 +1,44 @@
+### note: currently patched
+
+### Example: `t3nsor` (use like openai pypi package)
+
+```python
+# Import t3nsor
+import t3nsor
+
+# t3nsor.Completion.create
+# t3nsor.StreamCompletion.create
+
+[...]
+
+```
+
+#### Example Chatbot
+```python
+messages = []
+
+while True:
+ user = input('you: ')
+
+ t3nsor_cmpl = t3nsor.Completion.create(
+ prompt = user,
+ messages = messages
+ )
+
+ print('gpt:', t3nsor_cmpl.completion.choices[0].text)
+
+ messages.extend([
+ {'role': 'user', 'content': user },
+ {'role': 'assistant', 'content': t3nsor_cmpl.completion.choices[0].text}
+ ])
+```
+
+#### Streaming Response:
+
+```python
+for response in t3nsor.StreamCompletion.create(
+ prompt = 'write python code to reverse a string',
+ messages = []):
+
+ print(response.completion.choices[0].text)
+```
diff --git a/unfinished/t3nsor/__init__.py b/unfinished/t3nsor/__init__.py
new file mode 100644
index 00000000..aec45dcf
--- /dev/null
+++ b/unfinished/t3nsor/__init__.py
@@ -0,0 +1,137 @@
+from requests import post
+from time import time
+
+headers = {
+ 'authority': 'www.t3nsor.tech',
+ 'accept': '*/*',
+ 'accept-language': 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
+ 'cache-control': 'no-cache',
+ 'content-type': 'application/json',
+ 'origin': 'https://www.t3nsor.tech',
+ 'pragma': 'no-cache',
+ 'referer': 'https://www.t3nsor.tech/',
+ 'sec-ch-ua': '"Chromium";v="112", "Google Chrome";v="112", "Not:A-Brand";v="99"',
+ 'sec-ch-ua-mobile': '?0',
+ 'sec-ch-ua-platform': '"macOS"',
+ 'sec-fetch-dest': 'empty',
+ 'sec-fetch-mode': 'cors',
+ 'sec-fetch-site': 'same-origin',
+ 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/112.0.0.0 Safari/537.36',
+}
+
+class T3nsorResponse:
+
+ class Completion:
+
+ class Choices:
+ def __init__(self, choice: dict) -> None:
+ self.text = choice['text']
+ self.content = self.text.encode()
+ self.index = choice['index']
+ self.logprobs = choice['logprobs']
+ self.finish_reason = choice['finish_reason']
+
+ def __repr__(self) -> str:
+ return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
+
+ def __init__(self, choices: dict) -> None:
+ self.choices = [self.Choices(choice) for choice in choices]
+
+ class Usage:
+ def __init__(self, usage_dict: dict) -> None:
+ self.prompt_tokens = usage_dict['prompt_chars']
+ self.completion_tokens = usage_dict['completion_chars']
+ self.total_tokens = usage_dict['total_chars']
+
+ def __repr__(self):
+ return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
+
+ def __init__(self, response_dict: dict) -> None:
+
+ self.response_dict = response_dict
+ self.id = response_dict['id']
+ self.object = response_dict['object']
+ self.created = response_dict['created']
+ self.model = response_dict['model']
+ self.completion = self.Completion(response_dict['choices'])
+ self.usage = self.Usage(response_dict['usage'])
+
+ def json(self) -> dict:
+ return self.response_dict
+
+class Completion:
+ model = {
+ 'model': {
+ 'id' : 'gpt-3.5-turbo',
+ 'name' : 'Default (GPT-3.5)'
+ }
+ }
+
+ def create(
+ prompt: str = 'hello world',
+ messages: list = []) -> T3nsorResponse:
+
+ response = post('https://www.t3nsor.tech/api/chat', headers = headers, json = Completion.model | {
+ 'messages' : messages,
+ 'key' : '',
+ 'prompt' : prompt
+ })
+
+ return T3nsorResponse({
+ 'id' : f'cmpl-1337-{int(time())}',
+ 'object' : 'text_completion',
+ 'created': int(time()),
+ 'model' : Completion.model,
+ 'choices': [{
+ 'text' : response.text,
+ 'index' : 0,
+ 'logprobs' : None,
+ 'finish_reason' : 'stop'
+ }],
+ 'usage': {
+ 'prompt_chars' : len(prompt),
+ 'completion_chars' : len(response.text),
+ 'total_chars' : len(prompt) + len(response.text)
+ }
+ })
+
+class StreamCompletion:
+ model = {
+ 'model': {
+ 'id' : 'gpt-3.5-turbo',
+ 'name' : 'Default (GPT-3.5)'
+ }
+ }
+
+ def create(
+ prompt: str = 'hello world',
+ messages: list = []) -> T3nsorResponse:
+
+ print('t3nsor api is down, this may not work, refer to another module')
+
+ response = post('https://www.t3nsor.tech/api/chat', headers = headers, stream = True, json = Completion.model | {
+ 'messages' : messages,
+ 'key' : '',
+ 'prompt' : prompt
+ })
+
+ for chunk in response.iter_content(chunk_size = 2046):
+ yield T3nsorResponse({
+ 'id' : f'cmpl-1337-{int(time())}',
+ 'object' : 'text_completion',
+ 'created': int(time()),
+ 'model' : Completion.model,
+
+ 'choices': [{
+ 'text' : chunk.decode(),
+ 'index' : 0,
+ 'logprobs' : None,
+ 'finish_reason' : 'stop'
+ }],
+
+ 'usage': {
+ 'prompt_chars' : len(prompt),
+ 'completion_chars' : len(chunk.decode()),
+ 'total_chars' : len(prompt) + len(chunk.decode())
+ }
+ })
diff --git a/unfinished/writesonic/README.md b/unfinished/writesonic/README.md
new file mode 100644
index 00000000..a658a87c
--- /dev/null
+++ b/unfinished/writesonic/README.md
@@ -0,0 +1,53 @@
+### Example: `writesonic` (use like openai pypi package)
+
+```python
+# import writesonic
+import writesonic
+
+# create account (3-4s)
+account = writesonic.Account.create(logging = True)
+
+# with loging:
+ # 2023-04-06 21:50:25 INFO __main__ -> register success : '{"id":"51aa0809-3053-44f7-922a...' (2s)
+ # 2023-04-06 21:50:25 INFO __main__ -> id : '51aa0809-3053-44f7-922a-2b85d8d07edf'
+ # 2023-04-06 21:50:25 INFO __main__ -> token : 'eyJhbGciOiJIUzI1NiIsInR5cCI6Ik...'
+ # 2023-04-06 21:50:28 INFO __main__ -> got key : '194158c4-d249-4be0-82c6-5049e869533c' (2s)
+
+# simple completion
+response = writesonic.Completion.create(
+ api_key = account.key,
+ prompt = 'hello world'
+)
+
+print(response.completion.choices[0].text) # Hello! How may I assist you today?
+
+# conversation
+
+response = writesonic.Completion.create(
+ api_key = account.key,
+ prompt = 'what is my name ?',
+ enable_memory = True,
+ history_data = [
+ {
+ 'is_sent': True,
+ 'message': 'my name is Tekky'
+ },
+ {
+ 'is_sent': False,
+ 'message': 'hello Tekky'
+ }
+ ]
+)
+
+print(response.completion.choices[0].text) # Your name is Tekky.
+
+# enable internet
+
+response = writesonic.Completion.create(
+ api_key = account.key,
+ prompt = 'who won the quatar world cup ?',
+ enable_google_results = True
+)
+
+print(response.completion.choices[0].text) # Argentina won the 2022 FIFA World Cup tournament held in Qatar ...
+```
\ No newline at end of file
diff --git a/unfinished/writesonic/__init__.py b/unfinished/writesonic/__init__.py
new file mode 100644
index 00000000..7df6f393
--- /dev/null
+++ b/unfinished/writesonic/__init__.py
@@ -0,0 +1,158 @@
+from requests import Session
+from names import get_first_name, get_last_name
+from random import choice
+from requests import post
+from time import time
+from colorama import Fore, init; init()
+
+class logger:
+ @staticmethod
+ def info(string) -> print:
+ import datetime
+ now = datetime.datetime.now()
+ return print(f"{Fore.CYAN}{now.strftime('%Y-%m-%d %H:%M:%S')} {Fore.BLUE}INFO {Fore.MAGENTA}__main__ -> {Fore.RESET}{string}")
+
+class SonicResponse:
+
+ class Completion:
+
+ class Choices:
+ def __init__(self, choice: dict) -> None:
+ self.text = choice['text']
+ self.content = self.text.encode()
+ self.index = choice['index']
+ self.logprobs = choice['logprobs']
+ self.finish_reason = choice['finish_reason']
+
+ def __repr__(self) -> str:
+ return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
+
+ def __init__(self, choices: dict) -> None:
+ self.choices = [self.Choices(choice) for choice in choices]
+
+ class Usage:
+ def __init__(self, usage_dict: dict) -> None:
+ self.prompt_tokens = usage_dict['prompt_chars']
+ self.completion_tokens = usage_dict['completion_chars']
+ self.total_tokens = usage_dict['total_chars']
+
+ def __repr__(self):
+ return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
+
+ def __init__(self, response_dict: dict) -> None:
+
+ self.response_dict = response_dict
+ self.id = response_dict['id']
+ self.object = response_dict['object']
+ self.created = response_dict['created']
+ self.model = response_dict['model']
+ self.completion = self.Completion(response_dict['choices'])
+ self.usage = self.Usage(response_dict['usage'])
+
+ def json(self) -> dict:
+ return self.response_dict
+
+class Account:
+ session = Session()
+ session.headers = {
+ "connection" : "keep-alive",
+ "sec-ch-ua" : "\"Not_A Brand\";v=\"99\", \"Google Chrome\";v=\"109\", \"Chromium\";v=\"109\"",
+ "accept" : "application/json, text/plain, */*",
+ "content-type" : "application/json",
+ "sec-ch-ua-mobile" : "?0",
+ "user-agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36",
+ "sec-ch-ua-platform": "\"Windows\"",
+ "sec-fetch-site" : "same-origin",
+ "sec-fetch-mode" : "cors",
+ "sec-fetch-dest" : "empty",
+ # "accept-encoding" : "gzip, deflate, br",
+ "accept-language" : "en-GB,en-US;q=0.9,en;q=0.8",
+ "cookie" : ""
+ }
+
+ @staticmethod
+ def get_user():
+ password = f'0opsYouGoTme@1234'
+ f_name = get_first_name()
+ l_name = get_last_name()
+ hosts = ['gmail.com', 'protonmail.com', 'proton.me', 'outlook.com']
+
+ return {
+ "email" : f"{f_name.lower()}.{l_name.lower()}@{choice(hosts)}",
+ "password" : password,
+ "confirm_password" : password,
+ "full_name" : f'{f_name} {l_name}'
+ }
+
+ @staticmethod
+ def create(logging: bool = False):
+ while True:
+ try:
+ user = Account.get_user()
+ start = time()
+ response = Account.session.post("https://app.writesonic.com/api/session-login", json = user | {
+ "utmParams" : "{}",
+ "visitorId" : "0",
+ "locale" : "en",
+ "userAgent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36",
+ "signInWith" : "password",
+ "request_type" : "signup",
+ })
+
+ if logging:
+ logger.info(f"\x1b[31mregister success\x1b[0m : '{response.text[:30]}...' ({int(time() - start)}s)")
+ logger.info(f"\x1b[31mid\x1b[0m : '{response.json()['id']}'")
+ logger.info(f"\x1b[31mtoken\x1b[0m : '{response.json()['token'][:30]}...'")
+
+ start = time()
+ response = Account.session.post("https://api.writesonic.com/v1/business/set-business-active", headers={"authorization": "Bearer " + response.json()['token']})
+ key = response.json()["business"]["api_key"]
+ if logging: logger.info(f"\x1b[31mgot key\x1b[0m : '{key}' ({int(time() - start)}s)")
+
+ return Account.AccountResponse(user['email'], user['password'], key)
+
+ except Exception as e:
+ if logging: logger.info(f"\x1b[31merror\x1b[0m : '{e}'")
+ continue
+
+ class AccountResponse:
+ def __init__(self, email, password, key):
+ self.email = email
+ self.password = password
+ self.key = key
+
+
+class Completion:
+ def create(
+ api_key: str,
+ prompt: str,
+ enable_memory: bool = False,
+ enable_google_results: bool = False,
+ history_data: list = []) -> SonicResponse:
+
+ response = post('https://api.writesonic.com/v2/business/content/chatsonic?engine=premium', headers = {"X-API-KEY": api_key},
+ json = {
+ "enable_memory" : enable_memory,
+ "enable_google_results" : enable_google_results,
+ "input_text" : prompt,
+ "history_data" : history_data}).json()
+
+ return SonicResponse({
+ 'id' : f'cmpl-premium-{int(time())}',
+ 'object' : 'text_completion',
+ 'created': int(time()),
+ 'model' : 'premium',
+
+ 'choices': [{
+ 'text' : response['message'],
+ 'index' : 0,
+ 'logprobs' : None,
+ 'finish_reason' : 'stop'
+ }],
+
+ 'usage': {
+ 'prompt_chars' : len(prompt),
+ 'completion_chars' : len(response['message']),
+ 'total_chars' : len(prompt) + len(response['message'])
+ }
+ })
\ No newline at end of file
diff --git a/v2.py b/v2.py
new file mode 100644
index 00000000..e69de29b
diff --git a/writesonic/README.md b/writesonic/README.md
deleted file mode 100644
index a658a87c..00000000
--- a/writesonic/README.md
+++ /dev/null
@@ -1,53 +0,0 @@
-### Example: `writesonic` (use like openai pypi package)
-
-```python
-# import writesonic
-import writesonic
-
-# create account (3-4s)
-account = writesonic.Account.create(logging = True)
-
-# with loging:
- # 2023-04-06 21:50:25 INFO __main__ -> register success : '{"id":"51aa0809-3053-44f7-922a...' (2s)
- # 2023-04-06 21:50:25 INFO __main__ -> id : '51aa0809-3053-44f7-922a-2b85d8d07edf'
- # 2023-04-06 21:50:25 INFO __main__ -> token : 'eyJhbGciOiJIUzI1NiIsInR5cCI6Ik...'
- # 2023-04-06 21:50:28 INFO __main__ -> got key : '194158c4-d249-4be0-82c6-5049e869533c' (2s)
-
-# simple completion
-response = writesonic.Completion.create(
- api_key = account.key,
- prompt = 'hello world'
-)
-
-print(response.completion.choices[0].text) # Hello! How may I assist you today?
-
-# conversation
-
-response = writesonic.Completion.create(
- api_key = account.key,
- prompt = 'what is my name ?',
- enable_memory = True,
- history_data = [
- {
- 'is_sent': True,
- 'message': 'my name is Tekky'
- },
- {
- 'is_sent': False,
- 'message': 'hello Tekky'
- }
- ]
-)
-
-print(response.completion.choices[0].text) # Your name is Tekky.
-
-# enable internet
-
-response = writesonic.Completion.create(
- api_key = account.key,
- prompt = 'who won the quatar world cup ?',
- enable_google_results = True
-)
-
-print(response.completion.choices[0].text) # Argentina won the 2022 FIFA World Cup tournament held in Qatar ...
-```
\ No newline at end of file
diff --git a/writesonic/__init__.py b/writesonic/__init__.py
deleted file mode 100644
index 7df6f393..00000000
--- a/writesonic/__init__.py
+++ /dev/null
@@ -1,158 +0,0 @@
-from requests import Session
-from names import get_first_name, get_last_name
-from random import choice
-from requests import post
-from time import time
-from colorama import Fore, init; init()
-
-class logger:
- @staticmethod
- def info(string) -> print:
- import datetime
- now = datetime.datetime.now()
- return print(f"{Fore.CYAN}{now.strftime('%Y-%m-%d %H:%M:%S')} {Fore.BLUE}INFO {Fore.MAGENTA}__main__ -> {Fore.RESET}{string}")
-
-class SonicResponse:
-
- class Completion:
-
- class Choices:
- def __init__(self, choice: dict) -> None:
- self.text = choice['text']
- self.content = self.text.encode()
- self.index = choice['index']
- self.logprobs = choice['logprobs']
- self.finish_reason = choice['finish_reason']
-
- def __repr__(self) -> str:
- return f'''<__main__.APIResponse.Completion.Choices(\n text = {self.text.encode()},\n index = {self.index},\n logprobs = {self.logprobs},\n finish_reason = {self.finish_reason})object at 0x1337>'''
-
- def __init__(self, choices: dict) -> None:
- self.choices = [self.Choices(choice) for choice in choices]
-
- class Usage:
- def __init__(self, usage_dict: dict) -> None:
- self.prompt_tokens = usage_dict['prompt_chars']
- self.completion_tokens = usage_dict['completion_chars']
- self.total_tokens = usage_dict['total_chars']
-
- def __repr__(self):
- return f'''<__main__.APIResponse.Usage(\n prompt_tokens = {self.prompt_tokens},\n completion_tokens = {self.completion_tokens},\n total_tokens = {self.total_tokens})object at 0x1337>'''
-
- def __init__(self, response_dict: dict) -> None:
-
- self.response_dict = response_dict
- self.id = response_dict['id']
- self.object = response_dict['object']
- self.created = response_dict['created']
- self.model = response_dict['model']
- self.completion = self.Completion(response_dict['choices'])
- self.usage = self.Usage(response_dict['usage'])
-
- def json(self) -> dict:
- return self.response_dict
-
-class Account:
- session = Session()
- session.headers = {
- "connection" : "keep-alive",
- "sec-ch-ua" : "\"Not_A Brand\";v=\"99\", \"Google Chrome\";v=\"109\", \"Chromium\";v=\"109\"",
- "accept" : "application/json, text/plain, */*",
- "content-type" : "application/json",
- "sec-ch-ua-mobile" : "?0",
- "user-agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36",
- "sec-ch-ua-platform": "\"Windows\"",
- "sec-fetch-site" : "same-origin",
- "sec-fetch-mode" : "cors",
- "sec-fetch-dest" : "empty",
- # "accept-encoding" : "gzip, deflate, br",
- "accept-language" : "en-GB,en-US;q=0.9,en;q=0.8",
- "cookie" : ""
- }
-
- @staticmethod
- def get_user():
- password = f'0opsYouGoTme@1234'
- f_name = get_first_name()
- l_name = get_last_name()
- hosts = ['gmail.com', 'protonmail.com', 'proton.me', 'outlook.com']
-
- return {
- "email" : f"{f_name.lower()}.{l_name.lower()}@{choice(hosts)}",
- "password" : password,
- "confirm_password" : password,
- "full_name" : f'{f_name} {l_name}'
- }
-
- @staticmethod
- def create(logging: bool = False):
- while True:
- try:
- user = Account.get_user()
- start = time()
- response = Account.session.post("https://app.writesonic.com/api/session-login", json = user | {
- "utmParams" : "{}",
- "visitorId" : "0",
- "locale" : "en",
- "userAgent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36",
- "signInWith" : "password",
- "request_type" : "signup",
- })
-
- if logging:
- logger.info(f"\x1b[31mregister success\x1b[0m : '{response.text[:30]}...' ({int(time() - start)}s)")
- logger.info(f"\x1b[31mid\x1b[0m : '{response.json()['id']}'")
- logger.info(f"\x1b[31mtoken\x1b[0m : '{response.json()['token'][:30]}...'")
-
- start = time()
- response = Account.session.post("https://api.writesonic.com/v1/business/set-business-active", headers={"authorization": "Bearer " + response.json()['token']})
- key = response.json()["business"]["api_key"]
- if logging: logger.info(f"\x1b[31mgot key\x1b[0m : '{key}' ({int(time() - start)}s)")
-
- return Account.AccountResponse(user['email'], user['password'], key)
-
- except Exception as e:
- if logging: logger.info(f"\x1b[31merror\x1b[0m : '{e}'")
- continue
-
- class AccountResponse:
- def __init__(self, email, password, key):
- self.email = email
- self.password = password
- self.key = key
-
-
-class Completion:
- def create(
- api_key: str,
- prompt: str,
- enable_memory: bool = False,
- enable_google_results: bool = False,
- history_data: list = []) -> SonicResponse:
-
- response = post('https://api.writesonic.com/v2/business/content/chatsonic?engine=premium', headers = {"X-API-KEY": api_key},
- json = {
- "enable_memory" : enable_memory,
- "enable_google_results" : enable_google_results,
- "input_text" : prompt,
- "history_data" : history_data}).json()
-
- return SonicResponse({
- 'id' : f'cmpl-premium-{int(time())}',
- 'object' : 'text_completion',
- 'created': int(time()),
- 'model' : 'premium',
-
- 'choices': [{
- 'text' : response['message'],
- 'index' : 0,
- 'logprobs' : None,
- 'finish_reason' : 'stop'
- }],
-
- 'usage': {
- 'prompt_chars' : len(prompt),
- 'completion_chars' : len(response['message']),
- 'total_chars' : len(prompt) + len(response['message'])
- }
- })
\ No newline at end of file
diff --git a/you_test.py b/you_test.py
new file mode 100644
index 00000000..9c69cf34
--- /dev/null
+++ b/you_test.py
@@ -0,0 +1,32 @@
+import you
+
+# simple request with links and details
+response = you.Completion.create(
+ prompt = "hello world",
+ detailed = True,
+ includelinks = True)
+
+print(response)
+
+# {
+# "response": "...",
+# "links": [...],
+# "extra": {...},
+# "slots": {...}
+# }
+# }
+
+#chatbot
+
+chat = []
+
+while True:
+ prompt = input("You: ")
+
+ response = you.Completion.create(
+ prompt = prompt,
+ chat = chat)
+
+ print("Bot:", response["response"])
+
+ chat.append({"question": prompt, "answer": response["response"]})
\ No newline at end of file
--
cgit v1.2.3