diff options
Diffstat (limited to 'g4f/gui')
-rw-r--r-- | g4f/gui/client/index.html | 5 | ||||
-rw-r--r-- | g4f/gui/client/static/css/style.css | 37 | ||||
-rw-r--r-- | g4f/gui/client/static/js/chat.v1.js | 244 | ||||
-rw-r--r-- | g4f/gui/server/api.py | 17 | ||||
-rw-r--r-- | g4f/gui/server/backend.py | 69 |
5 files changed, 276 insertions, 96 deletions
diff --git a/g4f/gui/client/index.html b/g4f/gui/client/index.html index 6c2ad8b6..8cbcd578 100644 --- a/g4f/gui/client/index.html +++ b/g4f/gui/client/index.html @@ -191,6 +191,9 @@ <button class="slide-systemPrompt"> <i class="fa-solid fa-angles-up"></i> </button> + <div class="media_player"> + <i class="fa-regular fa-x"></i> + </div> <div class="toolbar"> <div id="input-count" class=""> <button class="hide-input"> @@ -224,7 +227,7 @@ <i class="fa-solid fa-camera"></i> </label> <label class="file-label" for="file"> - <input type="file" id="file" name="file" accept="text/plain, text/html, text/xml, application/json, text/javascript, .sh, .py, .php, .css, .yaml, .sql, .log, .csv, .twig, .md" required/> + <input type="file" id="file" name="file" accept="text/plain, text/html, text/xml, application/json, text/javascript, .har, .sh, .py, .php, .css, .yaml, .sql, .log, .csv, .twig, .md" required/> <i class="fa-solid fa-paperclip"></i> </label> <label class="micro-label" for="micro"> diff --git a/g4f/gui/client/static/css/style.css b/g4f/gui/client/static/css/style.css index c4b61d87..57b75bae 100644 --- a/g4f/gui/client/static/css/style.css +++ b/g4f/gui/client/static/css/style.css @@ -259,7 +259,6 @@ body { flex-direction: column; gap: var(--section-gap); padding: var(--inner-gap) var(--section-gap); - padding-bottom: 0; } .message.print { @@ -271,7 +270,11 @@ body { } .message.regenerate { - opacity: 0.75; + background-color: var(--colour-6); +} + +.white .message.regenerate { + background-color: var(--colour-4); } .message:last-child { @@ -407,6 +410,7 @@ body { .message .count .fa-clipboard.clicked, .message .count .fa-print.clicked, +.message .count .fa-rotate.clicked, .message .count .fa-volume-high.active { color: var(--accent); } @@ -430,6 +434,28 @@ body { font-size: 12px; } +.media_player { + display: none; +} + +.media_player audio { + right: 28px; + position: absolute; + top: -4px; + z-index: 900; +} + +.media_player.show { + display: block; +} + +.media_player .fa-x { + position: absolute; + right: 8px; + top: 8px; + z-index: 1000; +} + .count_total { font-size: 12px; padding-left: 25px; @@ -1159,7 +1185,10 @@ a:-webkit-any-link { .message .user { display: none; } - .message.regenerate { - opacity: 1; + body { + height: auto; + } + .box { + backdrop-filter: none; } } diff --git a/g4f/gui/client/static/js/chat.v1.js b/g4f/gui/client/static/js/chat.v1.js index 0136f9c4..fd9cc50e 100644 --- a/g4f/gui/client/static/js/chat.v1.js +++ b/g4f/gui/client/static/js/chat.v1.js @@ -28,6 +28,7 @@ let message_storage = {}; let controller_storage = {}; let content_storage = {}; let error_storage = {}; +let synthesize_storage = {}; messageInput.addEventListener("blur", () => { window.scrollTo(0, 0); @@ -43,17 +44,18 @@ appStorage = window.localStorage || { removeItem: (key) => delete self[key], length: 0 } - -const markdown = window.markdownit(); -const markdown_render = (content) => { - return markdown.render(content - .replaceAll(/<!-- generated images start -->|<!-- generated images end -->/gm, "") - .replaceAll(/<img data-prompt="[^>]+">/gm, "") - ) - .replaceAll("<a href=", '<a target="_blank" href=') - .replaceAll('<code>', '<code class="language-plaintext">') +let markdown_render = () => null; +if (window.markdownit) { + const markdown = window.markdownit(); + markdown_render = (content) => { + return markdown.render(content + .replaceAll(/<!-- generated images start -->|<!-- generated images end -->/gm, "") + .replaceAll(/<img data-prompt="[^>]+">/gm, "") + ) + .replaceAll("<a href=", '<a target="_blank" href=') + .replaceAll('<code>', '<code class="language-plaintext">') + } } - function filter_message(text) { return text.replaceAll( /<!-- generated images start -->[\s\S]+<!-- generated images end -->/gm, "" @@ -134,6 +136,24 @@ const register_message_buttons = async () => { if (!("click" in el.dataset)) { el.dataset.click = "true"; el.addEventListener("click", async () => { + const message_el = el.parentElement.parentElement.parentElement; + let audio; + if (message_el.dataset.synthesize_url) { + el.classList.add("active"); + setTimeout(()=>el.classList.remove("active"), 2000); + const media_player = document.querySelector(".media_player"); + if (!media_player.classList.contains("show")) { + media_player.classList.add("show"); + audio = new Audio(message_el.dataset.synthesize_url); + audio.controls = true; + media_player.appendChild(audio); + } else { + audio = media_player.querySelector("audio"); + audio.src = message_el.dataset.synthesize_url; + } + audio.play(); + return; + } let playlist = []; function play_next() { const next = playlist.shift(); @@ -155,8 +175,7 @@ const register_message_buttons = async () => { el.dataset.running = true; el.classList.add("blink") el.classList.add("active") - const content_el = el.parentElement.parentElement; - const message_el = content_el.parentElement; + let speechText = await get_message(window.conversation_id, message_el.dataset.index); speechText = speechText.replaceAll(/([^0-9])\./gm, "$1.;"); @@ -215,8 +234,8 @@ const register_message_buttons = async () => { const message_el = el.parentElement.parentElement.parentElement; el.classList.add("clicked"); setTimeout(() => el.classList.remove("clicked"), 1000); - await ask_gpt(message_el.dataset.index, get_message_id()); - }) + await ask_gpt(get_message_id(), message_el.dataset.index); + }); } }); document.querySelectorAll(".message .fa-whatsapp").forEach(async (el) => { @@ -301,25 +320,29 @@ const handle_ask = async () => { <i class="fa-regular fa-clipboard"></i> <a><i class="fa-brands fa-whatsapp"></i></a> <i class="fa-solid fa-print"></i> + <i class="fa-solid fa-rotate"></i> </div> </div> </div> `; highlight(message_box); - stop_generating.classList.remove("stop_generating-hidden"); - await ask_gpt(-1, message_id); + await ask_gpt(message_id); }; -async function remove_cancel_button() { +async function safe_remove_cancel_button() { + for (let key in controller_storage) { + if (!controller_storage[key].signal.aborted) { + return; + } + } stop_generating.classList.add("stop_generating-hidden"); } regenerate.addEventListener("click", async () => { regenerate.classList.add("regenerate-hidden"); setTimeout(()=>regenerate.classList.remove("regenerate-hidden"), 3000); - stop_generating.classList.remove("stop_generating-hidden"); await hide_message(window.conversation_id); - await ask_gpt(-1, get_message_id()); + await ask_gpt(get_message_id()); }); stop_generating.addEventListener("click", async () => { @@ -337,21 +360,28 @@ stop_generating.addEventListener("click", async () => { } } } - await load_conversation(window.conversation_id); + await load_conversation(window.conversation_id, false); +}); + +document.querySelector(".media_player .fa-x").addEventListener("click", ()=>{ + const media_player = document.querySelector(".media_player"); + media_player.classList.remove("show"); + const audio = document.querySelector(".media_player audio"); + media_player.removeChild(audio); }); const prepare_messages = (messages, message_index = -1) => { + if (message_index >= 0) { + messages = messages.filter((_, index) => message_index >= index); + } + // Removes none user messages at end - if (message_index == -1) { - let last_message; - while (last_message = messages.pop()) { - if (last_message["role"] == "user") { - messages.push(last_message); - break; - } + let last_message; + while (last_message = messages.pop()) { + if (last_message["role"] == "user") { + messages.push(last_message); + break; } - } else if (message_index >= 0) { - messages = messages.filter((_, index) => message_index >= index); } let new_messages = []; @@ -377,9 +407,11 @@ const prepare_messages = (messages, message_index = -1) => { // Remove generated images from history new_message.content = filter_message(new_message.content); delete new_message.provider; + delete new_message.synthesize; new_messages.push(new_message) } }); + return new_messages; } @@ -427,6 +459,8 @@ async function add_message_chunk(message, message_id) { let p = document.createElement("p"); p.innerText = message.log; log_storage.appendChild(p); + } else if (message.type == "synthesize") { + synthesize_storage[message_id] = message.synthesize; } let scroll_down = ()=>{ if (message_box.scrollTop >= message_box.scrollHeight - message_box.clientHeight - 100) { @@ -434,8 +468,10 @@ async function add_message_chunk(message, message_id) { message_box.scrollTo({ top: message_box.scrollHeight, behavior: "auto" }); } } - setTimeout(scroll_down, 200); - setTimeout(scroll_down, 1000); + if (!content_map.container.classList.contains("regenerate")) { + scroll_down(); + setTimeout(scroll_down, 200); + } } cameraInput?.addEventListener("click", (e) => { @@ -452,45 +488,58 @@ imageInput?.addEventListener("click", (e) => { } }); -const ask_gpt = async (message_index = -1, message_id) => { +const ask_gpt = async (message_id, message_index = -1) => { let messages = await get_messages(window.conversation_id); - let total_messages = messages.length; messages = prepare_messages(messages, message_index); - message_index = total_messages message_storage[message_id] = ""; - stop_generating.classList.remove(".stop_generating-hidden"); + stop_generating.classList.remove("stop_generating-hidden"); - message_box.scrollTop = message_box.scrollHeight; - window.scrollTo(0, 0); + if (message_index == -1) { + await scroll_to_bottom(); + } let count_total = message_box.querySelector('.count_total'); count_total ? count_total.parentElement.removeChild(count_total) : null; - message_box.innerHTML += ` - <div class="message" data-index="${message_index}"> - <div class="assistant"> - ${gpt_image} - <i class="fa-solid fa-xmark"></i> - <i class="fa-regular fa-phone-arrow-down-left"></i> - </div> - <div class="content" id="gpt_${message_id}"> - <div class="provider"></div> - <div class="content_inner"><span class="cursor"></span></div> - <div class="count"></div> - </div> + const message_el = document.createElement("div"); + message_el.classList.add("message"); + if (message_index != -1) { + message_el.classList.add("regenerate"); + } + message_el.innerHTML += ` + <div class="assistant"> + ${gpt_image} + <i class="fa-solid fa-xmark"></i> + <i class="fa-regular fa-phone-arrow-down-left"></i> + </div> + <div class="content" id="gpt_${message_id}"> + <div class="provider"></div> + <div class="content_inner"><span class="cursor"></span></div> + <div class="count"></div> </div> `; + if (message_index == -1) { + message_box.appendChild(message_el); + } else { + parent_message = message_box.querySelector(`.message[data-index="${message_index}"]`); + if (!parent_message) { + return; + } + parent_message.after(message_el); + } controller_storage[message_id] = new AbortController(); let content_el = document.getElementById(`gpt_${message_id}`) let content_map = content_storage[message_id] = { + container: message_el, content: content_el, inner: content_el.querySelector('.content_inner'), count: content_el.querySelector('.count'), } - - await scroll_to_bottom(); + if (message_index == -1) { + await scroll_to_bottom(); + } try { const input = imageInput && imageInput.files.length > 0 ? imageInput : cameraInput; const file = input && input.files.length > 0 ? input.files[0] : null; @@ -527,14 +576,23 @@ const ask_gpt = async (message_index = -1, message_id) => { delete controller_storage[message_id]; if (!error_storage[message_id] && message_storage[message_id]) { const message_provider = message_id in provider_storage ? provider_storage[message_id] : null; - await add_message(window.conversation_id, "assistant", message_storage[message_id], message_provider); - await safe_load_conversation(window.conversation_id); + await add_message( + window.conversation_id, + "assistant", + message_storage[message_id], + message_provider, + message_index, + synthesize_storage[message_id] + ); + await safe_load_conversation(window.conversation_id, message_index == -1); } else { - let cursorDiv = message_box.querySelector(".cursor"); + let cursorDiv = message_el.querySelector(".cursor"); if (cursorDiv) cursorDiv.parentNode.removeChild(cursorDiv); } - await scroll_to_bottom(); - await remove_cancel_button(); + if (message_index == -1) { + await scroll_to_bottom(); + } + await safe_remove_cancel_button(); await register_message_buttons(); await load_conversations(); regenerate.classList.remove("regenerate-hidden"); @@ -687,8 +745,17 @@ const load_conversation = async (conversation_id, scroll=true) => { ${item.provider.model ? ' with ' + item.provider.model : ''} </div> ` : ""; + let synthesize_params = {text: item.content} + let synthesize_provider = "Gemini"; + if (item.synthesize) { + synthesize_params = item.synthesize.data + synthesize_provider = item.synthesize.provider; + } + synthesize_params = (new URLSearchParams(synthesize_params)).toString(); + let synthesize_url = `/backend-api/v2/synthesize/${synthesize_provider}?${synthesize_params}`; + elements += ` - <div class="message${item.regenerate ? " regenerate": ""}" data-index="${i}"> + <div class="message${item.regenerate ? " regenerate": ""}" data-index="${i}" data-synthesize_url="${synthesize_url}"> <div class="${item.role}"> ${item.role == "assistant" ? gpt_image : user_image} <i class="fa-solid fa-xmark"></i> @@ -706,6 +773,7 @@ const load_conversation = async (conversation_id, scroll=true) => { <i class="fa-regular fa-clipboard"></i> <a><i class="fa-brands fa-whatsapp"></i></a> <i class="fa-solid fa-print"></i> + <i class="fa-solid fa-rotate"></i> </div> </div> </div> @@ -830,14 +898,35 @@ const get_message = async (conversation_id, index) => { return messages[index]["content"]; }; -const add_message = async (conversation_id, role, content, provider) => { +const add_message = async ( + conversation_id, role, content, + provider = null, + message_index = -1, + synthesize_data = null +) => { const conversation = await get_conversation(conversation_id); if (!conversation) return; - conversation.items.push({ + const new_message = { role: role, content: content, - provider: provider - }); + provider: provider, + }; + if (synthesize_data) { + new_message.synthesize = synthesize_data; + } + if (message_index == -1) { + conversation.items.push(new_message); + } else { + const new_messages = []; + conversation.items.forEach((item, index)=>{ + new_messages.push(item); + if (index == message_index) { + new_message.regenerate = true; + new_messages.push(new_message); + } + }); + conversation.items = new_messages; + } await save_conversation(conversation_id, conversation); return conversation.items.length - 1; }; @@ -1267,17 +1356,25 @@ fileInput.addEventListener('click', async (event) => { delete fileInput.dataset.text; }); +async function upload_cookies() { + const file = fileInput.files[0]; + const formData = new FormData(); + formData.append('file', file); + response = await fetch("/backend-api/v2/upload_cookies", { + method: 'POST', + body: formData, + }); + if (response.status == 200) { + inputCount.innerText = `${file.name} was uploaded successfully`; + } + fileInput.value = ""; +} + fileInput.addEventListener('change', async (event) => { if (fileInput.files.length) { - type = fileInput.files[0].type; - if (type && type.indexOf('/')) { - type = type.split('/').pop().replace('x-', '') - type = type.replace('plain', 'plaintext') - .replace('shellscript', 'sh') - .replace('svg+xml', 'svg') - .replace('vnd.trolltech.linguist', 'ts') - } else { - type = fileInput.files[0].name.split('.').pop() + type = fileInput.files[0].name.split('.').pop() + if (type == "har") { + return await upload_cookies(); } fileInput.dataset.type = type const reader = new FileReader(); @@ -1286,14 +1383,19 @@ fileInput.addEventListener('change', async (event) => { if (type == "json") { const data = JSON.parse(fileInput.dataset.text); if ("g4f" in data.options) { + let count = 0; Object.keys(data).forEach(key => { if (key != "options" && !localStorage.getItem(key)) { appStorage.setItem(key, JSON.stringify(data[key])); - } + count += 1; + } }); delete fileInput.dataset.text; await load_conversations(); fileInput.value = ""; + inputCount.innerText = `${count} Conversations were imported successfully`; + } else { + await upload_cookies(); } } }); diff --git a/g4f/gui/server/api.py b/g4f/gui/server/api.py index 00eb7182..ecf7bc54 100644 --- a/g4f/gui/server/api.py +++ b/g4f/gui/server/api.py @@ -13,7 +13,7 @@ from g4f.errors import VersionNotFoundError from g4f.image import ImagePreview, ImageResponse, copy_images, ensure_images_dir, images_dir from g4f.Provider import ProviderType, __providers__, __map__ from g4f.providers.base_provider import ProviderModelMixin -from g4f.providers.response import BaseConversation, FinishReason +from g4f.providers.response import BaseConversation, FinishReason, SynthesizeData from g4f.client.service import convert_to_provider from g4f import debug @@ -140,13 +140,12 @@ class Api: } def _create_response_stream(self, kwargs: dict, conversation_id: str, provider: str, download_images: bool = True) -> Iterator: - if debug.logging: - debug.logs = [] - print_callback = debug.log_handler - def log_handler(text: str): - debug.logs.append(text) - print_callback(text) - debug.log_handler = log_handler + debug.logs = [] + print_callback = debug.log_handler + def log_handler(text: str): + debug.logs.append(text) + print_callback(text) + debug.log_handler = log_handler try: result = ChatCompletion.create(**kwargs) first = True @@ -177,6 +176,8 @@ class Api: images = asyncio.run(copy_images(chunk.get_list(), chunk.options.get("cookies"))) images = ImageResponse(images, chunk.alt) yield self._format_json("content", str(images)) + elif isinstance(chunk, SynthesizeData): + yield self._format_json("synthesize", chunk.to_json()) elif not isinstance(chunk, FinishReason): yield self._format_json("content", str(chunk)) if debug.logs: diff --git a/g4f/gui/server/backend.py b/g4f/gui/server/backend.py index 917d779e..3dcae546 100644 --- a/g4f/gui/server/backend.py +++ b/g4f/gui/server/backend.py @@ -1,8 +1,28 @@ import json +import flask +import os +import logging +import asyncio from flask import request, Flask +from typing import Generator +from werkzeug.utils import secure_filename + from g4f.image import is_allowed_extension, to_image +from g4f.client.service import convert_to_provider +from g4f.providers.asyncio import to_sync_generator +from g4f.errors import ProviderNotFoundError +from g4f.cookies import get_cookies_dir from .api import Api +logger = logging.getLogger(__name__) + +def safe_iter_generator(generator: Generator) -> Generator: + start = next(generator) + def iter_generator(): + yield start + yield from generator + return iter_generator() + class Backend_Api(Api): """ Handles various endpoints in a Flask application for backend operations. @@ -47,8 +67,12 @@ class Backend_Api(Api): 'function': self.handle_conversation, 'methods': ['POST'] }, - '/backend-api/v2/error': { - 'function': self.handle_error, + '/backend-api/v2/synthesize/<provider>': { + 'function': self.handle_synthesize, + 'methods': ['GET'] + }, + '/backend-api/v2/upload_cookies': { + 'function': self.upload_cookies, 'methods': ['POST'] }, '/images/<path:name>': { @@ -57,15 +81,17 @@ class Backend_Api(Api): } } - def handle_error(self): - """ - Initialize the backend API with the given Flask application. - - Args: - app (Flask): Flask application instance to attach routes to. - """ - print(request.json) - return 'ok', 200 + def upload_cookies(self): + file = None + if "file" in request.files: + file = request.files['file'] + if file.filename == '': + return 'No selected file', 400 + if file and file.filename.endswith(".json") or file.filename.endswith(".har"): + filename = secure_filename(file.filename) + file.save(os.path.join(get_cookies_dir(), filename)) + return "File saved", 200 + return 'Not supported file', 400 def handle_conversation(self): """ @@ -98,11 +124,30 @@ class Backend_Api(Api): mimetype='text/event-stream' ) + def handle_synthesize(self, provider: str): + try: + provider_handler = convert_to_provider(provider) + except ProviderNotFoundError: + return "Provider not found", 404 + if not hasattr(provider_handler, "synthesize"): + return "Provider doesn't support synthesize", 500 + response_data = provider_handler.synthesize({**request.args}) + if asyncio.iscoroutinefunction(provider_handler.synthesize): + response_data = asyncio.run(response_data) + else: + if hasattr(response_data, "__aiter__"): + response_data = to_sync_generator(response_data) + response_data = safe_iter_generator(response_data) + content_type = getattr(provider_handler, "synthesize_content_type", "application/octet-stream") + response = flask.Response(response_data, content_type=content_type) + response.headers['Cache-Control'] = "max-age=604800" + return response + def get_provider_models(self, provider: str): api_key = None if request.authorization is None else request.authorization.token models = super().get_provider_models(provider, api_key) if models is None: - return 404, "Provider not found" + return "Provider not found", 404 return models def _format_json(self, response_type: str, content) -> str: |