diff options
-rw-r--r-- | README.md | 18 | ||||
-rw-r--r-- | docker-compose.yaml | 12 | ||||
-rw-r--r-- | docker-compose.yml | 12 | ||||
-rw-r--r-- | gpt4free/forefront/README.md | 9 | ||||
-rw-r--r-- | gpt4free/theb/README.md | 9 | ||||
-rw-r--r-- | gpt4free/theb/__init__.py | 14 |
6 files changed, 39 insertions, 35 deletions
@@ -32,11 +32,11 @@ Just API's from some language model sites. <td><a href="https://github.com/xtekky/chatgpt-clone/pulls"><img alt="Pull Requests" src="https://img.shields.io/github/issues-pr/xtekky/chatgpt-clone?style=flat-square&labelColor=343b41"/></a></td> </tr> <tr> - <td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free"><b>ChatGpt Discord Bot</b></a></td> - <td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/stargazers"><img alt="Stars" src="https://img.shields.io/github/stars/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td> - <td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/network/members"><img alt="Forks" src="https://img.shields.io/github/forks/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td> - <td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/issues"><img alt="Issues" src="https://img.shields.io/github/issues/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td> - <td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/pulls"><img alt="Pull Requests" src="https://img.shields.io/github/issues-pr/mishalhossin/Coding-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td> + <td><a href="https://github.com/mishalhossin/Discord-Chatbot-Gpt4Free"><b>ChatGpt Discord Bot</b></a></td> + <td><a href="https://github.com/mishalhossin/Discord-Chatbot-Gpt4Free/stargazers"><img alt="Stars" src="https://img.shields.io/github/stars/mishalhossin/Discord-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td> + <td><a href="https://github.com/mishalhossin/Discord-Chatbot-Gpt4Free/network/members"><img alt="Forks" src="https://img.shields.io/github/forks/mishalhossin/Discord-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td> + <td><a href="https://github.com/mishalhossin/Discord-Chatbot-Gpt4Free/issues"><img alt="Issues" src="https://img.shields.io/github/issues/mishalhossin/Discord-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td> + <td><a href="https://github.com/mishalhossin/Coding-Chatbot-Gpt4Free/pulls"><img alt="Pull Requests" src="https://img.shields.io/github/issues-pr/mishalhossin/Discord-Chatbot-Gpt4Free?style=flat-square&labelColor=343b41"/></a></td> </tr> </tbody> </table> @@ -118,7 +118,7 @@ then run: Build ``` -docker build -t gpt4free:latest -f Docker/Dockerfile . +docker build -t gpt4free:latest . ``` Run @@ -126,17 +126,13 @@ Run ``` docker run -p 8501:8501 gpt4free:latest ``` -Another way - docker-compose (no docker build/run needed) -``` -docker-compose up -d -``` ## Deploy using docker-compose Run the following: ``` -docker-compose up -d +docker-compose up --build -d ``` ## ChatGPT clone diff --git a/docker-compose.yaml b/docker-compose.yaml index 3afd6cdf..8098f359 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -2,8 +2,14 @@ version: "3.9" services: gpt4free: - build: - context: . + build: + context: ./ dockerfile: Dockerfile + container_name: dc_gpt4free + # environment: + # - http_proxy=http://127.0.0.1:1080 # modify this for your proxy + # - https_proxy=http://127.0.0.1:1080 # modify this for your proxy + image: img_gpt4free ports: - - "8501:8501" + - 8501:8501 + restart: always
\ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index e8e7119b..00000000 --- a/docker-compose.yml +++ /dev/null @@ -1,12 +0,0 @@ -version: '3.8' - -services: - gpt4: - build: - context: . - dockerfile: Dockerfile - image: gpt4free:latest - container_name: gpt4 - ports: - - 8501:8501 - restart: unless-stopped diff --git a/gpt4free/forefront/README.md b/gpt4free/forefront/README.md index 35ba9897..887097ec 100644 --- a/gpt4free/forefront/README.md +++ b/gpt4free/forefront/README.md @@ -6,8 +6,11 @@ from gpt4free import forefront token = forefront.Account.create(logging=False) print(token) # get a response -for response in forefront.StreamingCompletion.create(token=token, - prompt='hello world', model='gpt-4'): - print(response.completion.choices[0].text, end='') +for response in forefront.StreamingCompletion.create( + token=token, + prompt='hello world', + model='gpt-4' +): + print(response.choices[0].text, end='') print("") ```
\ No newline at end of file diff --git a/gpt4free/theb/README.md b/gpt4free/theb/README.md index a4abdf62..a7af9dd8 100644 --- a/gpt4free/theb/README.md +++ b/gpt4free/theb/README.md @@ -5,7 +5,10 @@ from gpt4free import theb # simple streaming completion -for token in theb.Completion.create('hello world'): - print(token, end='', flush=True) -print("") + +while True: + x = input() + for token in theb.Completion.create(x): + print(token, end='', flush=True) + print("") ``` diff --git a/gpt4free/theb/__init__.py b/gpt4free/theb/__init__.py index 75a15068..741de34d 100644 --- a/gpt4free/theb/__init__.py +++ b/gpt4free/theb/__init__.py @@ -17,6 +17,7 @@ class Completion: timer = None message_queue = Queue() stream_completed = False + last_msg_id = None @staticmethod def request(prompt: str, proxy: Optional[str]=None): @@ -28,26 +29,33 @@ class Completion: } proxies = {'http': 'http://' + proxy, 'https': 'http://' + proxy} if proxy else None - + + options = {} + if Completion.last_msg_id: + options['parentMessageId'] = Completion.last_msg_id + requests.post( 'https://chatbot.theb.ai/api/chat-process', headers=headers, proxies=proxies, content_callback=Completion.handle_stream_response, - json={'prompt': prompt, 'options': {}}, + json={'prompt': prompt, 'options': options}, ) Completion.stream_completed = True @staticmethod def create(prompt: str, proxy: Optional[str]=None) -> Generator[str, None, None]: + Completion.stream_completed = False Thread(target=Completion.request, args=[prompt, proxy]).start() while not Completion.stream_completed or not Completion.message_queue.empty(): try: message = Completion.message_queue.get(timeout=0.01) for message in findall(Completion.regex, message): - yield loads(Completion.part1 + message + Completion.part2)['delta'] + message_json = loads(Completion.part1 + message + Completion.part2) + Completion.last_msg_id = message_json['id'] + yield message_json['delta'] except Empty: pass |