diff options
-rw-r--r-- | README.md | 15 | ||||
-rw-r--r-- | unfinished/cocalc/__init__.py (renamed from cocalc/__init__.py) | 15 | ||||
-rw-r--r-- | unfinished/cocalc/cocalc_test.py | 8 |
3 files changed, 15 insertions, 23 deletions
@@ -21,7 +21,6 @@ This repository provides reverse-engineered language models from various sources - [`ora`](#example-ora) - [`writesonic`](#example-writesonic) - [`you`](#example-you) - - [`cocalc`](#example-cocalc) ## Current Sites <a name="current-sites"></a> @@ -33,7 +32,6 @@ This repository provides reverse-engineered language models from various sources | [t3nsor.com](https://t3nsor.com)|GPT-3.5| | [you.com](https://you.com)|GPT-3.5 / Internet / good search| | [phind.com](https://phind.com)|GPT-4 / Internet / good search| -| [cocalc.com](https://cocalc.com)|GPT-3.5 / Unknown Internet| ## Sites with Authentication <a name="sites-with-authentication"></a> @@ -305,19 +303,6 @@ while True: chat.append({"question": prompt, "answer": response["response"]}) ``` -### Example: `cocalc` (use like openai pypi package) <a name="example-cocalc"></a> - -```python -import cocalc - -response = you.Completion.create( - prompt = "Hello World", # Required - system_prompt = "You are ChatGPT" # Optional -) - -print(response) # Just text response -``` - ## Dependencies The repository is written in Python and requires the following packages: diff --git a/cocalc/__init__.py b/unfinished/cocalc/__init__.py index a77934e4..fa565a45 100644 --- a/cocalc/__init__.py +++ b/unfinished/cocalc/__init__.py @@ -4,25 +4,24 @@ import json class Completion: def create( prompt: str = "What is the square root of pi", - system_prompt: str = "ASSUME I HAVE FULL ACCESS TO COCALC. ENCLOSE MATH IN $. INCLUDE THE LANGUAGE DIRECTLY AFTER THE TRIPLE BACKTICKS IN ALL MARKDOWN CODE BLOCKS. How can I do the following using CoCalc? " - ) -> str: + system_prompt: str = "ASSUME I HAVE FULL ACCESS TO COCALC. ENCLOSE MATH IN $. INCLUDE THE LANGUAGE DIRECTLY AFTER THE TRIPLE BACKTICKS IN ALL MARKDOWN CODE BLOCKS. How can I do the following using CoCalc? ") -> str: - client = Session(client_identifier="chrome_108") + client = Session() client.headers = { - 'Accept': */* - 'Accept-Language': en-US,en;q=0.5 + 'Accept': '*/*', + 'Accept-Language': 'en-US,en;q=0.5', "origin" : "https://cocalc.com", "referer" : "https://cocalc.com/api/v2/openai/chatgpt", - 'cookie' : "Cookie: CC_ANA=c68b16f3-f74c-403f-8e18-1d89168b2d13; " "user-agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36", } + payload = { "input": prompt, "system": system_prompt, "tag": "next:index" } - response = client.post(f"https://cocalc.com/api/v2/openai/chatgpt", json=payload) + response = client.post(f"https://cocalc.com/api/v2/openai/chatgpt", json=payload).json() - return json.loads(response)["output"] + return response diff --git a/unfinished/cocalc/cocalc_test.py b/unfinished/cocalc/cocalc_test.py new file mode 100644 index 00000000..0e1a7b3b --- /dev/null +++ b/unfinished/cocalc/cocalc_test.py @@ -0,0 +1,8 @@ +import cocalc + + +response = cocalc.Completion.create( + prompt = 'hello world' +) + +print(response)
\ No newline at end of file |