From 75d6ff2c4033781b1e758b41c8bc9f748846abd3 Mon Sep 17 00:00:00 2001 From: Zylan Date: Tue, 4 Feb 2025 05:17:14 +0800 Subject: [PATCH] ocr --- app.py | 82 ++++++++++++++++- icon.py | 14 --- models/base.py | 14 +++ models/claude.py | 97 ++++++++++++++++++++ models/deepseek.py | 70 ++++++++++++++ models/gpt4o.py | 65 +++++++++++++ requirements.txt | 2 + static/js/main.js | 206 +++++++++++++++++++++++++++++++++++------- static/js/settings.js | 16 ++++ static/style.css | 98 +++++++++++++++++++- templates/index.html | 38 ++++++-- 11 files changed, 641 insertions(+), 61 deletions(-) delete mode 100644 icon.py diff --git a/app.py b/app.py index e4d88f5..9340c11 100644 --- a/app.py +++ b/app.py @@ -9,10 +9,12 @@ import pystray from PIL import Image, ImageDraw import pyperclip from models import ModelFactory - app = Flask(__name__) socketio = SocketIO(app, cors_allowed_origins="*") +# Commented out due to model file issues +# from pix2text import Pix2Text + def get_local_ip(): try: # Get local IP address @@ -64,6 +66,9 @@ def handle_connect(): def handle_disconnect(): print('Client disconnected') +# Commented out due to model file issues +# p2t = Pix2Text() + def stream_model_response(response_generator, sid): """Stream model responses to the client""" try: @@ -110,6 +115,81 @@ def handle_screenshot_request(): 'error': str(e) }) +@socketio.on('extract_text') +def handle_text_extraction(data): + try: + print("Starting text extraction...") + image_data = data['image'] # Base64 encoded image + + # Convert base64 to PIL Image + image_bytes = base64.b64decode(image_data) + image = Image.open(BytesIO(image_bytes)) + + # Temporarily disabled text extraction + extracted_text = "Text extraction is currently disabled" + + # Send the extracted text back to the client + socketio.emit('text_extraction_response', { + 'success': True, + 'text': extracted_text + }, room=request.sid) + + except Exception as e: + print(f"Text extraction error: {str(e)}") + socketio.emit('text_extraction_response', { + 'success': False, + 'error': f'Text extraction error: {str(e)}' + }, room=request.sid) + +@socketio.on('analyze_text') +def handle_text_analysis(data): + try: + print("Starting text analysis...") + text = data['text'] + settings = data['settings'] + + # Validate required settings + if not settings.get('apiKey'): + raise ValueError("API key is required for the selected model") + + # Configure proxy settings if enabled + proxies = None + if settings.get('proxyEnabled', False): + proxy_host = settings.get('proxyHost', '127.0.0.1') + proxy_port = settings.get('proxyPort', '4780') + proxies = { + 'http': f'http://{proxy_host}:{proxy_port}', + 'https': f'http://{proxy_host}:{proxy_port}' + } + + try: + # Create model instance using factory + model = ModelFactory.create_model( + model_name=settings.get('model', 'claude-3-5-sonnet-20241022'), + api_key=settings['apiKey'], + temperature=float(settings.get('temperature', 0.7)), + system_prompt=settings.get('systemPrompt') + ) + + # Start streaming in a separate thread + Thread( + target=stream_model_response, + args=(model.analyze_text(text, proxies), request.sid) + ).start() + + except Exception as e: + socketio.emit('claude_response', { + 'status': 'error', + 'error': f'API error: {str(e)}' + }, room=request.sid) + + except Exception as e: + print(f"Analysis error: {str(e)}") + socketio.emit('claude_response', { + 'status': 'error', + 'error': f'Analysis error: {str(e)}' + }, room=request.sid) + @socketio.on('analyze_image') def handle_image_analysis(data): try: diff --git a/icon.py b/icon.py deleted file mode 100644 index 74ef89e..0000000 --- a/icon.py +++ /dev/null @@ -1,14 +0,0 @@ -from PIL import Image, ImageDraw - -def create_icon(): - # Create a simple icon (a colored circle) - icon_size = 64 - icon_image = Image.new('RGB', (icon_size, icon_size), color='white') - draw = ImageDraw.Draw(icon_image) - draw.ellipse([4, 4, icon_size-4, icon_size-4], fill='#2196F3') - - # Save as ICO file - icon_image.save('app.ico', format='ICO') - -if __name__ == '__main__': - create_icon() diff --git a/models/base.py b/models/base.py index c225373..5a45d9c 100644 --- a/models/base.py +++ b/models/base.py @@ -21,6 +21,20 @@ class BaseModel(ABC): """ pass + @abstractmethod + def analyze_text(self, text: str, proxies: dict = None) -> Generator[dict, None, None]: + """ + Analyze the given text and yield response chunks. + + Args: + text: Text to analyze + proxies: Optional proxy configuration + + Yields: + dict: Response chunks with status and content + """ + pass + @abstractmethod def get_default_system_prompt(self) -> str: """Return the default system prompt for this model""" diff --git a/models/claude.py b/models/claude.py index dbcc18e..7de5ff2 100644 --- a/models/claude.py +++ b/models/claude.py @@ -15,6 +15,103 @@ class ClaudeModel(BaseModel): def get_model_identifier(self) -> str: return "claude-3-5-sonnet-20241022" + def analyze_text(self, text: str, proxies: dict = None) -> Generator[dict, None, None]: + """Stream Claude's response for text analysis""" + try: + # Initial status + yield {"status": "started", "content": ""} + + api_key = self.api_key.strip() + if api_key.startswith('Bearer '): + api_key = api_key[7:] + + headers = { + 'x-api-key': api_key, + 'anthropic-version': '2023-06-01', + 'content-type': 'application/json', + 'accept': 'application/json', + } + + payload = { + 'model': self.get_model_identifier(), + 'stream': True, + 'max_tokens': 4096, + 'temperature': self.temperature, + 'system': self.system_prompt, + 'messages': [{ + 'role': 'user', + 'content': [ + { + 'type': 'text', + 'text': text + } + ] + }] + } + + response = requests.post( + 'https://api.anthropic.com/v1/messages', + headers=headers, + json=payload, + stream=True, + proxies=proxies, + timeout=60 + ) + + if response.status_code != 200: + error_msg = f'API error: {response.status_code}' + try: + error_data = response.json() + if 'error' in error_data: + error_msg += f" - {error_data['error']['message']}" + except: + error_msg += f" - {response.text}" + yield {"status": "error", "error": error_msg} + return + + for chunk in response.iter_lines(): + if not chunk: + continue + + try: + chunk_str = chunk.decode('utf-8') + if not chunk_str.startswith('data: '): + continue + + chunk_str = chunk_str[6:] + data = json.loads(chunk_str) + + if data.get('type') == 'content_block_delta': + if 'delta' in data and 'text' in data['delta']: + yield { + "status": "streaming", + "content": data['delta']['text'] + } + + elif data.get('type') == 'message_stop': + yield { + "status": "completed", + "content": "" + } + + elif data.get('type') == 'error': + error_msg = data.get('error', {}).get('message', 'Unknown error') + yield { + "status": "error", + "error": error_msg + } + break + + except json.JSONDecodeError as e: + print(f"JSON decode error: {str(e)}") + continue + + except Exception as e: + yield { + "status": "error", + "error": f"Streaming error: {str(e)}" + } + def analyze_image(self, image_data: str, proxies: dict = None) -> Generator[dict, None, None]: """Stream Claude's response for image analysis""" try: diff --git a/models/deepseek.py b/models/deepseek.py index d45f6e1..fa02fb4 100644 --- a/models/deepseek.py +++ b/models/deepseek.py @@ -16,6 +16,76 @@ class DeepSeekModel(BaseModel): def get_model_identifier(self) -> str: return "deepseek-reasoner" + def analyze_text(self, text: str, proxies: dict = None) -> Generator[dict, None, None]: + """Stream DeepSeek's response for text analysis""" + try: + # Initial status + yield {"status": "started", "content": ""} + + # Configure client with proxy if needed + client_args = { + "api_key": self.api_key, + "base_url": "https://api.deepseek.com" + } + + if proxies: + session = requests.Session() + session.proxies = proxies + client_args["http_client"] = session + + client = OpenAI(**client_args) + + response = client.chat.completions.create( + model=self.get_model_identifier(), + messages=[{ + 'role': 'system', + 'content': self.system_prompt + }, { + 'role': 'user', + 'content': text + }], + stream=True + ) + + for chunk in response: + try: + if hasattr(chunk.choices[0].delta, 'reasoning_content'): + content = chunk.choices[0].delta.reasoning_content + if content: + yield { + "status": "streaming", + "content": content + } + elif hasattr(chunk.choices[0].delta, 'content'): + content = chunk.choices[0].delta.content + if content: + yield { + "status": "streaming", + "content": content + } + + except Exception as e: + print(f"Chunk processing error: {str(e)}") + continue + + # Send completion status + yield { + "status": "completed", + "content": "" + } + + except Exception as e: + error_msg = str(e) + if "invalid_api_key" in error_msg.lower(): + error_msg = "Invalid API key provided" + elif "rate_limit" in error_msg.lower(): + error_msg = "Rate limit exceeded. Please try again later." + + yield { + "status": "error", + "error": f"DeepSeek API error: {error_msg}" + } + def analyze_image(self, image_data: str, proxies: dict = None) -> Generator[dict, None, None]: """Stream DeepSeek's response for image analysis""" try: diff --git a/models/gpt4o.py b/models/gpt4o.py index 0d2bc00..4a1a04e 100644 --- a/models/gpt4o.py +++ b/models/gpt4o.py @@ -16,6 +16,71 @@ class GPT4oModel(BaseModel): def get_model_identifier(self) -> str: return "gpt-4o-2024-11-20" + def analyze_text(self, text: str, proxies: dict = None) -> Generator[dict, None, None]: + """Stream GPT-4o's response for text analysis""" + try: + # Initial status + yield {"status": "started", "content": ""} + + # Configure client with proxy if needed + client_args = { + "api_key": self.api_key, + "base_url": "https://api.openai.com/v1" # Replace with actual GPT-4o API endpoint + } + + if proxies: + session = requests.Session() + session.proxies = proxies + client_args["http_client"] = session + + client = OpenAI(**client_args) + + messages = [ + { + "role": "system", + "content": self.system_prompt + }, + { + "role": "user", + "content": text + } + ] + + response = client.chat.completions.create( + model=self.get_model_identifier(), + messages=messages, + temperature=self.temperature, + stream=True, + max_tokens=4000 + ) + + for chunk in response: + if hasattr(chunk.choices[0].delta, 'content'): + content = chunk.choices[0].delta.content + if content: + yield { + "status": "streaming", + "content": content + } + + # Send completion status + yield { + "status": "completed", + "content": "" + } + + except Exception as e: + error_msg = str(e) + if "invalid_api_key" in error_msg.lower(): + error_msg = "Invalid API key provided" + elif "rate_limit" in error_msg.lower(): + error_msg = "Rate limit exceeded. Please try again later." + + yield { + "status": "error", + "error": f"GPT-4o API error: {error_msg}" + } + def analyze_image(self, image_data: str, proxies: dict = None) -> Generator[dict, None, None]: """Stream GPT-4o's response for image analysis""" try: diff --git a/requirements.txt b/requirements.txt index 31de3d8..42bc948 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +pix2text flask==3.1.0 pyautogui==0.9.54 Pillow==11.1.0 @@ -6,3 +7,4 @@ python-engineio==4.11.2 python-socketio==5.12.1 requests==2.32.3 openai==1.61.0 +pystray \ No newline at end of file diff --git a/static/js/main.js b/static/js/main.js index ada78fd..184e7fb 100644 --- a/static/js/main.js +++ b/static/js/main.js @@ -4,6 +4,7 @@ class SnapSolver { this.initializeState(); this.setupEventListeners(); this.initializeConnection(); + this.setupAutoScroll(); // Initialize managers window.uiManager = new UIManager(); @@ -19,8 +20,13 @@ class SnapSolver { this.cropContainer = document.getElementById('cropContainer'); this.imagePreview = document.getElementById('imagePreview'); this.sendToClaudeBtn = document.getElementById('sendToClaude'); + this.extractTextBtn = document.getElementById('extractText'); + this.textEditor = document.getElementById('textEditor'); + this.extractedText = document.getElementById('extractedText'); + this.sendExtractedTextBtn = document.getElementById('sendExtractedText'); this.responseContent = document.getElementById('responseContent'); this.claudePanel = document.getElementById('claudePanel'); + this.statusLight = document.querySelector('.status-light'); } initializeState() { @@ -30,6 +36,27 @@ class SnapSolver { this.history = JSON.parse(localStorage.getItem('snapHistory') || '[]'); } + setupAutoScroll() { + // Create MutationObserver to watch for content changes + const observer = new MutationObserver((mutations) => { + mutations.forEach((mutation) => { + if (mutation.type === 'characterData' || mutation.type === 'childList') { + this.responseContent.scrollTo({ + top: this.responseContent.scrollHeight, + behavior: 'smooth' + }); + } + }); + }); + + // Start observing the response content + observer.observe(this.responseContent, { + childList: true, + characterData: true, + subtree: true + }); + } + updateConnectionStatus(connected) { this.connectionStatus.textContent = connected ? 'Connected' : 'Disconnected'; this.connectionStatus.className = `status ${connected ? 'connected' : 'disconnected'}`; @@ -39,6 +66,27 @@ class SnapSolver { this.imagePreview.classList.add('hidden'); this.cropBtn.classList.add('hidden'); this.sendToClaudeBtn.classList.add('hidden'); + this.extractTextBtn.classList.add('hidden'); + this.textEditor.classList.add('hidden'); + } + } + + updateStatusLight(status) { + this.statusLight.className = 'status-light'; + switch (status) { + case 'started': + case 'streaming': + this.statusLight.classList.add('processing'); + break; + case 'completed': + this.statusLight.classList.add('completed'); + break; + case 'error': + this.statusLight.classList.add('error'); + break; + default: + // Reset to default state + break; } } @@ -68,6 +116,7 @@ class SnapSolver { } setupSocketEventHandlers() { + // Screenshot response handler this.socket.on('screenshot_response', (data) => { if (data.success) { this.screenshotImg.src = `data:image/png;base64,${data.image}`; @@ -76,6 +125,8 @@ class SnapSolver { this.captureBtn.disabled = false; this.captureBtn.innerHTML = 'Capture'; this.sendToClaudeBtn.classList.add('hidden'); + this.extractTextBtn.classList.add('hidden'); + this.textEditor.classList.add('hidden'); window.showToast('Screenshot captured successfully'); } else { window.showToast('Failed to capture screenshot: ' + data.error, 'error'); @@ -84,58 +135,61 @@ class SnapSolver { } }); + // Text extraction response handler + this.socket.on('text_extraction_response', (data) => { + if (data.success) { + this.extractedText.value = data.text; + this.textEditor.classList.remove('hidden'); + window.showToast('Text extracted successfully'); + } else { + window.showToast('Failed to extract text: ' + data.error, 'error'); + } + this.extractTextBtn.disabled = false; + this.extractTextBtn.innerHTML = 'Extract Text'; + }); + this.socket.on('claude_response', (data) => { console.log('Received claude_response:', data); + this.updateStatusLight(data.status); switch (data.status) { case 'started': console.log('Analysis started'); - this.responseContent.textContent = 'Starting analysis...\n'; + this.responseContent.textContent = ''; this.sendToClaudeBtn.disabled = true; + this.sendExtractedTextBtn.disabled = true; break; case 'streaming': if (data.content) { console.log('Received content:', data.content); - if (this.responseContent.textContent === 'Starting analysis...\n') { - this.responseContent.textContent = data.content; - } else { - this.responseContent.textContent += data.content; - } - this.responseContent.scrollTo({ - top: this.responseContent.scrollHeight, - behavior: 'smooth' - }); + this.responseContent.textContent += data.content; } break; case 'completed': console.log('Analysis completed'); - this.responseContent.textContent += '\n\nAnalysis complete.'; this.sendToClaudeBtn.disabled = false; + this.sendExtractedTextBtn.disabled = false; this.addToHistory(this.croppedImage, this.responseContent.textContent); window.showToast('Analysis completed successfully'); - this.responseContent.scrollTo({ - top: this.responseContent.scrollHeight, - behavior: 'smooth' - }); break; case 'error': console.error('Claude analysis error:', data.error); const errorMessage = data.error || 'Unknown error occurred'; - this.responseContent.textContent += '\n\nError: ' + errorMessage; + this.responseContent.textContent += '\nError: ' + errorMessage; this.sendToClaudeBtn.disabled = false; - this.responseContent.scrollTop = this.responseContent.scrollHeight; + this.sendExtractedTextBtn.disabled = false; window.showToast('Analysis failed: ' + errorMessage, 'error'); break; default: console.warn('Unknown response status:', data.status); if (data.error) { - this.responseContent.textContent += '\n\nError: ' + data.error; + this.responseContent.textContent += '\nError: ' + data.error; this.sendToClaudeBtn.disabled = false; - this.responseContent.scrollTop = this.responseContent.scrollHeight; + this.sendExtractedTextBtn.disabled = false; window.showToast('Unknown error occurred', 'error'); } } @@ -170,8 +224,8 @@ class SnapSolver { this.cropper = new Cropper(clonedImage, { viewMode: 1, - dragMode: 'move', - autoCropArea: 0.8, + dragMode: 'crop', + autoCropArea: 0, restore: false, modal: true, guides: true, @@ -179,10 +233,8 @@ class SnapSolver { cropBoxMovable: true, cropBoxResizable: true, toggleDragModeOnDblclick: false, - minContainerWidth: 800, - minContainerHeight: 600, - minCropBoxWidth: 100, - minCropBoxHeight: 100, + minCropBoxWidth: 50, + minCropBoxHeight: 50, background: true, responsive: true, checkOrientation: true, @@ -213,6 +265,13 @@ class SnapSolver { } setupEventListeners() { + this.setupCaptureEvents(); + this.setupCropEvents(); + this.setupAnalysisEvents(); + this.setupKeyboardShortcuts(); + } + + setupCaptureEvents() { // Capture button this.captureBtn.addEventListener('click', async () => { if (!this.socket || !this.socket.connected) { @@ -230,7 +289,9 @@ class SnapSolver { this.captureBtn.innerHTML = 'Capture'; } }); + } + setupCropEvents() { // Crop button this.cropBtn.addEventListener('click', () => { if (this.screenshotImg.src) { @@ -264,11 +325,11 @@ class SnapSolver { // Get cropped canvas with more conservative size limits console.log('Getting cropped canvas...'); const canvas = this.cropper.getCroppedCanvas({ - maxWidth: 1280, - maxHeight: 720, + maxWidth: 2560, + maxHeight: 1440, fillColor: '#fff', imageSmoothingEnabled: true, - imageSmoothingQuality: 'low', + imageSmoothingQuality: 'high', }); if (!canvas) { @@ -280,23 +341,28 @@ class SnapSolver { // Convert to data URL with error handling and compression console.log('Converting to data URL...'); try { - // Use lower quality for JPEG to reduce size - this.croppedImage = canvas.toDataURL('image/jpeg', 0.6); + // Use PNG for better quality + this.croppedImage = canvas.toDataURL('image/png'); console.log('Data URL conversion successful'); } catch (dataUrlError) { console.error('Data URL conversion error:', dataUrlError); throw new Error('Failed to process cropped image. The image might be too large or memory insufficient.'); } + // Properly destroy the cropper instance + this.cropper.destroy(); + this.cropper = null; + // Clean up cropper and update UI this.cropContainer.classList.add('hidden'); document.querySelector('.crop-area').innerHTML = ''; - this.settingsPanel.classList.add('hidden'); + // Update the screenshot image with the cropped version this.screenshotImg.src = this.croppedImage; this.imagePreview.classList.remove('hidden'); this.cropBtn.classList.remove('hidden'); this.sendToClaudeBtn.classList.remove('hidden'); + this.extractTextBtn.classList.remove('hidden'); window.showToast('Image cropped successfully'); } catch (error) { console.error('Cropping error details:', { @@ -305,7 +371,12 @@ class SnapSolver { cropperState: this.cropper ? 'initialized' : 'not initialized' }); window.showToast(error.message || 'Error while cropping image', 'error'); - return; // Exit the function to prevent cleanup if error occurs + } finally { + // Always clean up the cropper instance + if (this.cropper) { + this.cropper.destroy(); + this.cropper = null; + } } } }); @@ -318,8 +389,72 @@ class SnapSolver { } this.cropContainer.classList.add('hidden'); this.sendToClaudeBtn.classList.add('hidden'); + this.extractTextBtn.classList.add('hidden'); document.querySelector('.crop-area').innerHTML = ''; }); + } + + setupAnalysisEvents() { + // Extract Text button + this.extractTextBtn.addEventListener('click', () => { + if (!this.croppedImage) { + window.showToast('Please crop the image first', 'error'); + return; + } + + this.extractTextBtn.disabled = true; + this.extractTextBtn.innerHTML = 'Extracting...'; + + try { + this.socket.emit('extract_text', { + image: this.croppedImage.split(',')[1] + }); + } catch (error) { + window.showToast('Failed to extract text: ' + error.message, 'error'); + this.extractTextBtn.disabled = false; + this.extractTextBtn.innerHTML = 'Extract Text'; + } + }); + + // Send Extracted Text button + this.sendExtractedTextBtn.addEventListener('click', () => { + const text = this.extractedText.value.trim(); + if (!text) { + window.showToast('Please enter some text', 'error'); + return; + } + + const settings = window.settingsManager.getSettings(); + const apiKey = window.settingsManager.getApiKey(); + + if (!apiKey) { + this.settingsPanel.classList.remove('hidden'); + return; + } + + this.claudePanel.classList.remove('hidden'); + this.responseContent.textContent = ''; + this.sendExtractedTextBtn.disabled = true; + + try { + this.socket.emit('analyze_text', { + text: text, + settings: { + apiKey: apiKey, + model: settings.model || 'claude-3-5-sonnet-20241022', + temperature: parseFloat(settings.temperature) || 0.7, + systemPrompt: settings.systemPrompt || 'You are an expert at analyzing questions and providing detailed solutions.', + proxyEnabled: settings.proxyEnabled || false, + proxyHost: settings.proxyHost || '127.0.0.1', + proxyPort: settings.proxyPort || '4780' + } + }); + } catch (error) { + this.responseContent.textContent = 'Error: Failed to send text for analysis - ' + error.message; + this.sendExtractedTextBtn.disabled = false; + window.showToast('Failed to send text for analysis', 'error'); + } + }); // Send to Claude button this.sendToClaudeBtn.addEventListener('click', () => { @@ -337,7 +472,7 @@ class SnapSolver { } this.claudePanel.classList.remove('hidden'); - this.responseContent.textContent = 'Preparing to analyze image...\n'; + this.responseContent.textContent = ''; this.sendToClaudeBtn.disabled = true; try { @@ -354,12 +489,14 @@ class SnapSolver { } }); } catch (error) { - this.responseContent.textContent += '\nError: Failed to send image for analysis - ' + error.message; + this.responseContent.textContent = 'Error: Failed to send image for analysis - ' + error.message; this.sendToClaudeBtn.disabled = false; window.showToast('Failed to send image for analysis', 'error'); } }); + } + setupKeyboardShortcuts() { // Keyboard shortcuts for capture and crop document.addEventListener('keydown', (e) => { if (e.ctrlKey || e.metaKey) { @@ -430,6 +567,7 @@ window.renderHistory = function() { window.app.cropBtn.classList.add('hidden'); window.app.captureBtn.classList.add('hidden'); window.app.sendToClaudeBtn.classList.add('hidden'); + window.app.extractTextBtn.classList.add('hidden'); if (historyItem.response) { window.app.claudePanel.classList.remove('hidden'); window.app.responseContent.textContent = historyItem.response; diff --git a/static/js/settings.js b/static/js/settings.js index f8c00e0..adb5baa 100644 --- a/static/js/settings.js +++ b/static/js/settings.js @@ -12,6 +12,7 @@ class SettingsManager { this.temperatureInput = document.getElementById('temperature'); this.temperatureValue = document.getElementById('temperatureValue'); this.systemPromptInput = document.getElementById('systemPrompt'); + this.languageInput = document.getElementById('language'); this.proxyEnabledInput = document.getElementById('proxyEnabled'); this.proxyHostInput = document.getElementById('proxyHost'); this.proxyPortInput = document.getElementById('proxyPort'); @@ -67,6 +68,7 @@ class SettingsManager { this.temperatureInput.value = settings.temperature; this.temperatureValue.textContent = settings.temperature; } + if (settings.language) this.languageInput.value = settings.language; if (settings.systemPrompt) this.systemPromptInput.value = settings.systemPrompt; if (settings.proxyEnabled !== undefined) { this.proxyEnabledInput.checked = settings.proxyEnabled; @@ -89,6 +91,7 @@ class SettingsManager { apiKeys: {}, model: this.modelSelect.value, temperature: this.temperatureInput.value, + language: this.languageInput.value, systemPrompt: this.systemPromptInput.value, proxyEnabled: this.proxyEnabledInput.checked, proxyHost: this.proxyHostInput.value, @@ -118,6 +121,18 @@ class SettingsManager { return apiKey; } + getSettings() { + return { + model: this.modelSelect.value, + temperature: this.temperatureInput.value, + language: this.languageInput.value, + systemPrompt: this.systemPromptInput.value + ` Please respond in ${this.languageInput.value}.`, + proxyEnabled: this.proxyEnabledInput.checked, + proxyHost: this.proxyHostInput.value, + proxyPort: this.proxyPortInput.value + }; + } + setupEventListeners() { // Save settings on change Object.values(this.apiKeyInputs).forEach(input => { @@ -135,6 +150,7 @@ class SettingsManager { }); this.systemPromptInput.addEventListener('change', () => this.saveSettings()); + this.languageInput.addEventListener('change', () => this.saveSettings()); this.proxyEnabledInput.addEventListener('change', (e) => { this.proxySettings.style.display = e.target.checked ? 'block' : 'none'; this.saveSettings(); diff --git a/static/style.css b/static/style.css index 289429b..f699719 100644 --- a/static/style.css +++ b/static/style.css @@ -145,20 +145,64 @@ body { } .toolbar-buttons { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(min-content, max-content)); - gap: 1rem; - justify-content: start; + display: flex; + justify-content: flex-start; + align-items: center; +} + +.button-group { + display: flex; + gap: 0.5rem; align-items: center; } .analysis-button { display: flex; - justify-content: center; + flex-direction: column; + align-items: center; + gap: 1rem; margin-top: 1rem; padding: 1rem; } +.analysis-button .button-group { + display: flex; + gap: 0.5rem; + width: 100%; + max-width: 400px; + justify-content: center; +} + +.text-editor { + width: 100%; + max-width: 600px; + display: flex; + flex-direction: column; + gap: 1rem; +} + +.text-editor textarea { + width: 100%; + min-height: 120px; + padding: 0.75rem; + border: 1px solid var(--border-color); + border-radius: 0.375rem; + background-color: var(--background); + color: var(--text-primary); + font-size: 0.9375rem; + resize: vertical; +} + +.text-editor textarea:focus { + outline: none; + border-color: var(--primary-color); + box-shadow: 0 0 0 2px rgba(33, 150, 243, 0.1); +} + +.text-editor button { + align-self: flex-end; +} + .image-preview { position: relative; border-radius: 0.5rem; @@ -207,11 +251,55 @@ body { margin-bottom: 1rem; } +.header-title { + display: flex; + align-items: center; + gap: 0.75rem; +} + .panel-header h2 { font-size: 1.25rem; color: var(--text-primary); } +.analysis-status { + display: flex; + align-items: center; +} + +.status-light { + width: 12px; + height: 12px; + border-radius: 50%; + background-color: var(--text-secondary); + transition: background-color 0.3s ease; +} + +.status-light.processing { + background-color: #ffd700; + animation: pulse 1.5s infinite; +} + +.status-light.completed { + background-color: var(--success-color); +} + +.status-light.error { + background-color: var(--error-color); +} + +@keyframes pulse { + 0% { + opacity: 1; + } + 50% { + opacity: 0.5; + } + 100% { + opacity: 1; + } +} + .response-content { flex: 1; overflow-y: auto; diff --git a/templates/index.html b/templates/index.html index fa6c1ca..a8571cf 100644 --- a/templates/index.html +++ b/templates/index.html @@ -35,7 +35,8 @@
-
+
+
+
+
+ + +
+