summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/publish-workflow.yaml33
-rw-r--r--Dockerfile6
-rw-r--r--docker-compose.yml15
-rw-r--r--g4f/Provider/ChatBase.py15
-rw-r--r--g4f/Provider/__init__.py9
-rw-r--r--g4f/models.py2
6 files changed, 57 insertions, 23 deletions
diff --git a/.github/workflows/publish-workflow.yaml b/.github/workflows/publish-workflow.yaml
new file mode 100644
index 00000000..07567a2f
--- /dev/null
+++ b/.github/workflows/publish-workflow.yaml
@@ -0,0 +1,33 @@
+name: Publish Docker image
+
+on:
+ push:
+ tags:
+ - '**'
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Setup Buildx
+ uses: docker/setup-buildx-action@v3
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ - name: Get metadata for Docker
+ id: metadata
+ uses: docker/metadata-action@v5
+ with:
+ images: ghcr.io/${{ github.repository }}
+ - name: Login to GitHub Container Registry
+ uses: docker/login-action@v3
+ with:
+ registry: ghcr.io
+ username: ${{ github.repository_owner }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ - name: Build and push image
+ uses: docker/build-push-action@v5
+ with:
+ context: .
+ push: true
+ tags: ${{ steps.metadata.outputs.tags }}
+ labels: ${{ steps.metadata.outputs.labels }}
diff --git a/Dockerfile b/Dockerfile
index a4ab3733..3cb86bec 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -26,8 +26,8 @@ RUN pip install --upgrade pip && pip install -r requirements.txt
# This may include all code, assets, and configuration files required to run the application.
COPY . /app/
-# Expose port 1337
-EXPOSE 1337
+# Expose port 80 and 1337
+EXPOSE 80 1337
# Define the default command to run the app using Python's module mode.
-CMD ["python", "-m", "g4f.api.run"] \ No newline at end of file
+ENTRYPOINT ["python", "-m", "g4f.cli"] \ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
index b044ed81..0ecceefd 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,13 +1,18 @@
version: '3'
services:
- gpt4free:
+ gpt4free-api: &gpt4free
+ image: gpt4free:latest
build:
context: .
dockerfile: Dockerfile
- volumes:
- - .:/app
+ cache_from:
+ - gpt4free:latest
ports:
- '1337:1337'
- environment:
- - PYTHONUNBUFFERED=1 \ No newline at end of file
+ command: api
+ gpt4free-gui:
+ <<: *gpt4free
+ ports:
+ - '8080:80'
+ command: gui \ No newline at end of file
diff --git a/g4f/Provider/ChatBase.py b/g4f/Provider/ChatBase.py
index ce5160d8..3d45b40b 100644
--- a/g4f/Provider/ChatBase.py
+++ b/g4f/Provider/ChatBase.py
@@ -9,7 +9,6 @@ from .base_provider import AsyncGeneratorProvider
class ChatBase(AsyncGeneratorProvider):
url = "https://www.chatbase.co"
supports_gpt_35_turbo = True
- supports_gpt_4 = True
working = True
@classmethod
@@ -20,12 +19,8 @@ class ChatBase(AsyncGeneratorProvider):
proxy: str = None,
**kwargs
) -> AsyncResult:
- if model == "gpt-4":
- chat_id = "quran---tafseer-saadi-pdf-wbgknt7zn"
- elif model == "gpt-3.5-turbo" or not model:
- chat_id = "chatbase--1--pdf-p680fxvnm"
- else:
- raise ValueError(f"Model are not supported: {model}")
+ chat_id = 'z2c2HSfKnCTh5J4650V0I'
+
headers = {
"User-Agent" : "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36",
"Accept" : "*/*",
@@ -36,21 +31,19 @@ class ChatBase(AsyncGeneratorProvider):
"Sec-Fetch-Mode" : "cors",
"Sec-Fetch-Site" : "same-origin",
}
- async with ClientSession(
- headers=headers
- ) as session:
+ async with ClientSession(headers=headers) as session:
data = {
"messages": messages,
"captchaCode": "hadsa",
"chatId": chat_id,
"conversationId": f"kcXpqEnqUie3dnJlsRi_O-{chat_id}"
}
+
async with session.post("https://www.chatbase.co/api/fe/chat", json=data, proxy=proxy) as response:
response.raise_for_status()
async for stream in response.content.iter_any():
yield stream.decode()
-
@classmethod
@property
def params(cls):
diff --git a/g4f/Provider/__init__.py b/g4f/Provider/__init__.py
index 857b814d..26f523c7 100644
--- a/g4f/Provider/__init__.py
+++ b/g4f/Provider/__init__.py
@@ -1,4 +1,4 @@
-from __future__ import annotations
+from __future__ import annotations
from .Acytoo import Acytoo
from .AiAsk import AiAsk
from .Aibn import Aibn
@@ -99,7 +99,12 @@ class ProviderUtils:
'Wuguokai': Wuguokai,
'Ylokh': Ylokh,
'You': You,
- 'Yqcloud': Yqcloud
+ 'Yqcloud': Yqcloud,
+
+ 'BaseProvider': BaseProvider,
+ 'AsyncProvider': AsyncProvider,
+ 'AsyncGeneratorProvider': AsyncGeneratorProvider,
+ 'RetryProvider': RetryProvider,
}
__all__ = [
diff --git a/g4f/models.py b/g4f/models.py
index 3561891b..c2d9b89b 100644
--- a/g4f/models.py
+++ b/g4f/models.py
@@ -12,7 +12,6 @@ from .Provider import (
ChatgptAi,
GptChatly,
AItianhu,
- ChatBase,
Liaobots,
Yqcloud,
Myshell,
@@ -42,7 +41,6 @@ default = Model(
best_provider = RetryProvider([
Bing, # Not fully GPT 3 or 4
Yqcloud, # Answers short questions in chinese
- ChatBase, # Don't want to answer creatively
ChatgptDuo, # Include search results
Aibn, Aichat, ChatgptAi, ChatgptLogin, FreeGpt, GptGo, Myshell, Ylokh,
])