diff --git a/src/bridge/featureBridge.js b/src/bridge/featureBridge.js index 9c56a32..4dde899 100644 --- a/src/bridge/featureBridge.js +++ b/src/bridge/featureBridge.js @@ -6,15 +6,15 @@ const whisperService = require('../features/common/services/whisperService'); const ollamaService = require('../features/common/services/ollamaService'); const modelStateService = require('../features/common/services/modelStateService'); const shortcutsService = require('../features/shortcuts/shortcutsService'); +const presetRepository = require('../features/common/repositories/preset'); const askService = require('../features/ask/askService'); const listenService = require('../features/listen/listenService'); const permissionService = require('../features/common/services/permissionService'); module.exports = { - // Renderer로부터의 요청을 수신 + // Renderer로부터의 요청을 수신하고 서비스로 전달 initialize() { - // Settings Service ipcMain.handle('settings:getPresets', async () => await settingsService.getPresets()); ipcMain.handle('settings:get-auto-update', async () => await settingsService.getAutoUpdateSetting()); @@ -33,14 +33,12 @@ module.exports = { ipcMain.handle('get-default-shortcuts', async () => await shortcutsService.handleRestoreDefaults()); ipcMain.handle('save-shortcuts', async (event, newKeybinds) => await shortcutsService.handleSaveShortcuts(newKeybinds)); - // Permissions ipcMain.handle('check-system-permissions', async () => await permissionService.checkSystemPermissions()); ipcMain.handle('request-microphone-permission', async () => await permissionService.requestMicrophonePermission()); ipcMain.handle('open-system-preferences', async (event, section) => await permissionService.openSystemPreferences(section)); ipcMain.handle('mark-permissions-completed', async () => await permissionService.markPermissionsAsCompleted()); ipcMain.handle('check-permissions-completed', async () => await permissionService.checkPermissionsCompleted()); - // User/Auth ipcMain.handle('get-current-user', () => authService.getCurrentUser()); @@ -51,7 +49,7 @@ module.exports = { ipcMain.handle('quit-application', () => app.quit()); // Whisper - ipcMain.handle('whisper:download-model', async (event, modelId) => await whisperService.handleDownloadModel(event, modelId)); + ipcMain.handle('whisper:download-model', async (event, modelId) => await whisperService.handleDownloadModel(modelId)); ipcMain.handle('whisper:get-installed-models', async () => await whisperService.handleGetInstalledModels()); // General @@ -60,17 +58,17 @@ module.exports = { // Ollama ipcMain.handle('ollama:get-status', async () => await ollamaService.handleGetStatus()); - ipcMain.handle('ollama:install', async (event) => await ollamaService.handleInstall(event)); - ipcMain.handle('ollama:start-service', async (event) => await ollamaService.handleStartService(event)); + ipcMain.handle('ollama:install', async () => await ollamaService.handleInstall()); + ipcMain.handle('ollama:start-service', async () => await ollamaService.handleStartService()); ipcMain.handle('ollama:ensure-ready', async () => await ollamaService.handleEnsureReady()); ipcMain.handle('ollama:get-models', async () => await ollamaService.handleGetModels()); ipcMain.handle('ollama:get-model-suggestions', async () => await ollamaService.handleGetModelSuggestions()); - ipcMain.handle('ollama:pull-model', async (event, modelName) => await ollamaService.handlePullModel(event, modelName)); + ipcMain.handle('ollama:pull-model', async (event, modelName) => await ollamaService.handlePullModel(modelName)); ipcMain.handle('ollama:is-model-installed', async (event, modelName) => await ollamaService.handleIsModelInstalled(modelName)); ipcMain.handle('ollama:warm-up-model', async (event, modelName) => await ollamaService.handleWarmUpModel(modelName)); ipcMain.handle('ollama:auto-warm-up', async () => await ollamaService.handleAutoWarmUp()); ipcMain.handle('ollama:get-warm-up-status', async () => await ollamaService.handleGetWarmUpStatus()); - ipcMain.handle('ollama:shutdown', async (event, force = false) => await ollamaService.handleShutdown(event, force)); + ipcMain.handle('ollama:shutdown', async (event, force = false) => await ollamaService.handleShutdown(force)); // Ask ipcMain.handle('ask:sendQuestionFromAsk', async (event, userPrompt) => await askService.sendMessage(userPrompt)); @@ -101,9 +99,7 @@ module.exports = { } }); - - - // ModelStateService + // ModelStateService ipcMain.handle('model:validate-key', async (e, { provider, key }) => await modelStateService.handleValidateKey(provider, key)); ipcMain.handle('model:get-all-keys', () => modelStateService.getAllApiKeys()); ipcMain.handle('model:set-api-key', async (e, { provider, key }) => await modelStateService.setApiKey(provider, key)); @@ -114,8 +110,6 @@ module.exports = { ipcMain.handle('model:are-providers-configured', () => modelStateService.areProvidersConfigured()); ipcMain.handle('model:get-provider-config', () => modelStateService.getProviderConfig()); - - console.log('[FeatureBridge] Initialized with all feature handlers.'); }, diff --git a/src/bridge/windowBridge.js b/src/bridge/windowBridge.js index 416cc98..fe9e6de 100644 --- a/src/bridge/windowBridge.js +++ b/src/bridge/windowBridge.js @@ -12,6 +12,7 @@ module.exports = { ipcMain.on('hide-settings-window', () => windowManager.hideSettingsWindow()); ipcMain.on('cancel-hide-settings-window', () => windowManager.cancelHideSettingsWindow()); ipcMain.handle('open-login-page', () => windowManager.openLoginPage()); + ipcMain.handle('open-personalize-page', () => windowManager.openLoginPage()); ipcMain.handle('move-window-step', (event, direction) => windowManager.moveWindowStep(direction)); ipcMain.on('close-shortcut-editor', () => windowManager.closeWindow('shortcut-settings')); diff --git a/src/features/ask/askService.js b/src/features/ask/askService.js index c61f465..a87a095 100644 --- a/src/features/ask/askService.js +++ b/src/features/ask/askService.js @@ -281,35 +281,78 @@ class AskService { portkeyVirtualKey: modelInfo.provider === 'openai-glass' ? modelInfo.apiKey : undefined, }); - const response = await streamingLLM.streamChat(messages); - const askWin = getWindowPool()?.get('ask'); + try { + const response = await streamingLLM.streamChat(messages); + const askWin = getWindowPool()?.get('ask'); - if (!askWin || askWin.isDestroyed()) { - console.error("[AskService] Ask window is not available to send stream to."); - response.body.getReader().cancel(); - return { success: false, error: 'Ask window is not available.' }; + if (!askWin || askWin.isDestroyed()) { + console.error("[AskService] Ask window is not available to send stream to."); + response.body.getReader().cancel(); + return { success: false, error: 'Ask window is not available.' }; + } + + const reader = response.body.getReader(); + signal.addEventListener('abort', () => { + console.log(`[AskService] Aborting stream reader. Reason: ${signal.reason}`); + reader.cancel(signal.reason).catch(() => { /* 이미 취소된 경우의 오류는 무시 */ }); + }); + + await this._processStream(reader, askWin, sessionId, signal); + return { success: true }; + + } catch (multimodalError) { + // 멀티모달 요청이 실패했고 스크린샷이 포함되어 있다면 텍스트만으로 재시도 + if (screenshotBase64 && this._isMultimodalError(multimodalError)) { + console.log(`[AskService] Multimodal request failed, retrying with text-only: ${multimodalError.message}`); + + // 텍스트만으로 메시지 재구성 + const textOnlyMessages = [ + { role: 'system', content: systemPrompt }, + { + role: 'user', + content: `User Request: ${userPrompt.trim()}` + } + ]; + + const fallbackResponse = await streamingLLM.streamChat(textOnlyMessages); + const askWin = getWindowPool()?.get('ask'); + + if (!askWin || askWin.isDestroyed()) { + console.error("[AskService] Ask window is not available for fallback response."); + fallbackResponse.body.getReader().cancel(); + return { success: false, error: 'Ask window is not available.' }; + } + + const fallbackReader = fallbackResponse.body.getReader(); + signal.addEventListener('abort', () => { + console.log(`[AskService] Aborting fallback stream reader. Reason: ${signal.reason}`); + fallbackReader.cancel(signal.reason).catch(() => {}); + }); + + await this._processStream(fallbackReader, askWin, sessionId, signal); + return { success: true }; + } else { + // 다른 종류의 에러이거나 스크린샷이 없었다면 그대로 throw + throw multimodalError; + } } - const reader = response.body.getReader(); - signal.addEventListener('abort', () => { - console.log(`[AskService] Aborting stream reader. Reason: ${signal.reason}`); - reader.cancel(signal.reason).catch(() => { /* 이미 취소된 경우의 오류는 무시 */ }); - }); - - await this._processStream(reader, askWin, sessionId, signal); - - return { success: true }; - } catch (error) { - if (error.name === 'AbortError') { - console.log('[AskService] SendMessage operation was successfully aborted.'); - return { success: true, response: 'Cancelled' }; + console.error('[AskService] Error during message processing:', error); + this.state = { + ...this.state, + isLoading: false, + isStreaming: false, + showTextInput: true, + }; + this._broadcastState(); + + const askWin = getWindowPool()?.get('ask'); + if (askWin && !askWin.isDestroyed()) { + const streamError = error.message || 'Unknown error occurred'; + askWin.webContents.send('ask-response-stream-error', { error: streamError }); } - console.error('[AskService] Error processing message:', error); - this.state.isLoading = false; - this.state.error = error.message; - this._broadcastState(); return { success: false, error: error.message }; } } @@ -381,6 +424,24 @@ class AskService { } } + /** + * 멀티모달 관련 에러인지 판단 + * @private + */ + _isMultimodalError(error) { + const errorMessage = error.message?.toLowerCase() || ''; + return ( + errorMessage.includes('vision') || + errorMessage.includes('image') || + errorMessage.includes('multimodal') || + errorMessage.includes('unsupported') || + errorMessage.includes('image_url') || + errorMessage.includes('400') || // Bad Request often for unsupported features + errorMessage.includes('invalid') || + errorMessage.includes('not supported') + ); + } + } const askService = new AskService(); diff --git a/src/features/common/ai/factory.js b/src/features/common/ai/factory.js index 6afe2a8..8ccc5ec 100644 --- a/src/features/common/ai/factory.js +++ b/src/features/common/ai/factory.js @@ -68,7 +68,8 @@ const PROVIDERS = { handler: () => { // This needs to remain a function due to its conditional logic for renderer/main process if (typeof window === 'undefined') { - return require("./providers/whisper"); + const { WhisperProvider } = require("./providers/whisper"); + return new WhisperProvider(); } // Return a dummy object for the renderer process return { diff --git a/src/features/common/ai/providers/ollama.js b/src/features/common/ai/providers/ollama.js index a521ec1..6a3d08c 100644 --- a/src/features/common/ai/providers/ollama.js +++ b/src/features/common/ai/providers/ollama.js @@ -1,6 +1,79 @@ const http = require('http'); const fetch = require('node-fetch'); +// Request Queue System for Ollama API (only for non-streaming requests) +class RequestQueue { + constructor() { + this.queue = []; + this.processing = false; + this.streamingActive = false; + } + + async addStreamingRequest(requestFn) { + // Streaming requests have priority - wait for current processing to finish + while (this.processing) { + await new Promise(resolve => setTimeout(resolve, 50)); + } + + this.streamingActive = true; + console.log('[Ollama Queue] Starting streaming request (priority)'); + + try { + const result = await requestFn(); + return result; + } finally { + this.streamingActive = false; + console.log('[Ollama Queue] Streaming request completed'); + } + } + + async add(requestFn) { + return new Promise((resolve, reject) => { + this.queue.push({ requestFn, resolve, reject }); + this.process(); + }); + } + + async process() { + if (this.processing || this.queue.length === 0) { + return; + } + + // Wait if streaming is active + if (this.streamingActive) { + setTimeout(() => this.process(), 100); + return; + } + + this.processing = true; + + while (this.queue.length > 0) { + // Check if streaming started while processing queue + if (this.streamingActive) { + this.processing = false; + setTimeout(() => this.process(), 100); + return; + } + + const { requestFn, resolve, reject } = this.queue.shift(); + + try { + console.log(`[Ollama Queue] Processing queued request (${this.queue.length} remaining)`); + const result = await requestFn(); + resolve(result); + } catch (error) { + console.error('[Ollama Queue] Request failed:', error); + reject(error); + } + } + + this.processing = false; + } +} + +// Global request queue instance +const requestQueue = new RequestQueue(); + class OllamaProvider { static async validateApiKey() { try { @@ -79,71 +152,77 @@ function createLLM({ } messages.push({ role: 'user', content: userContent.join('\n') }); - try { - const response = await fetch(`${baseUrl}/api/chat`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - model, - messages, - stream: false, - options: { - temperature, - num_predict: maxTokens, - } - }) - }); + // Use request queue to prevent concurrent API calls + return await requestQueue.add(async () => { + try { + const response = await fetch(`${baseUrl}/api/chat`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + model, + messages, + stream: false, + options: { + temperature, + num_predict: maxTokens, + } + }) + }); - if (!response.ok) { - throw new Error(`Ollama API error: ${response.status} ${response.statusText}`); + if (!response.ok) { + throw new Error(`Ollama API error: ${response.status} ${response.statusText}`); + } + + const result = await response.json(); + + return { + response: { + text: () => result.message.content + }, + raw: result + }; + } catch (error) { + console.error('Ollama LLM error:', error); + throw error; } - - const result = await response.json(); - - return { - response: { - text: () => result.message.content - }, - raw: result - }; - } catch (error) { - console.error('Ollama LLM error:', error); - throw error; - } + }); }, chat: async (messages) => { const ollamaMessages = convertMessagesToOllamaFormat(messages); - try { - const response = await fetch(`${baseUrl}/api/chat`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - model, - messages: ollamaMessages, - stream: false, - options: { - temperature, - num_predict: maxTokens, - } - }) - }); + // Use request queue to prevent concurrent API calls + return await requestQueue.add(async () => { + try { + const response = await fetch(`${baseUrl}/api/chat`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + model, + messages: ollamaMessages, + stream: false, + options: { + temperature, + num_predict: maxTokens, + } + }) + }); - if (!response.ok) { - throw new Error(`Ollama API error: ${response.status} ${response.statusText}`); + if (!response.ok) { + throw new Error(`Ollama API error: ${response.status} ${response.statusText}`); + } + + const result = await response.json(); + + return { + content: result.message.content, + raw: result + }; + } catch (error) { + console.error('Ollama chat error:', error); + throw error; } - - const result = await response.json(); - - return { - content: result.message.content, - raw: result - }; - } catch (error) { - console.error('Ollama chat error:', error); - throw error; - } + }); } }; } @@ -165,89 +244,92 @@ function createStreamingLLM({ const ollamaMessages = convertMessagesToOllamaFormat(messages); console.log('[Ollama Provider] Converted messages for Ollama:', ollamaMessages); - try { - const response = await fetch(`${baseUrl}/api/chat`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - model, - messages: ollamaMessages, - stream: true, - options: { - temperature, - num_predict: maxTokens, - } - }) - }); + // Streaming requests have priority over queued requests + return await requestQueue.addStreamingRequest(async () => { + try { + const response = await fetch(`${baseUrl}/api/chat`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + model, + messages: ollamaMessages, + stream: true, + options: { + temperature, + num_predict: maxTokens, + } + }) + }); - if (!response.ok) { - throw new Error(`Ollama API error: ${response.status} ${response.statusText}`); - } - - console.log('[Ollama Provider] Got streaming response'); - - const stream = new ReadableStream({ - async start(controller) { - let buffer = ''; - - try { - response.body.on('data', (chunk) => { - buffer += chunk.toString(); - const lines = buffer.split('\n'); - buffer = lines.pop() || ''; - - for (const line of lines) { - if (line.trim() === '') continue; - - try { - const data = JSON.parse(line); - - if (data.message?.content) { - const sseData = JSON.stringify({ - choices: [{ - delta: { - content: data.message.content - } - }] - }); - controller.enqueue(new TextEncoder().encode(`data: ${sseData}\n\n`)); - } - - if (data.done) { - controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n')); - } - } catch (e) { - console.error('[Ollama Provider] Failed to parse chunk:', e); - } - } - }); - - response.body.on('end', () => { - controller.close(); - console.log('[Ollama Provider] Streaming completed'); - }); - - response.body.on('error', (error) => { - console.error('[Ollama Provider] Streaming error:', error); - controller.error(error); - }); - - } catch (error) { - console.error('[Ollama Provider] Streaming setup error:', error); - controller.error(error); - } + if (!response.ok) { + throw new Error(`Ollama API error: ${response.status} ${response.statusText}`); } - }); - return { - ok: true, - body: stream - }; - - } catch (error) { - console.error('[Ollama Provider] Request error:', error); - throw error; - } + console.log('[Ollama Provider] Got streaming response'); + + const stream = new ReadableStream({ + async start(controller) { + let buffer = ''; + + try { + response.body.on('data', (chunk) => { + buffer += chunk.toString(); + const lines = buffer.split('\n'); + buffer = lines.pop() || ''; + + for (const line of lines) { + if (line.trim() === '') continue; + + try { + const data = JSON.parse(line); + + if (data.message?.content) { + const sseData = JSON.stringify({ + choices: [{ + delta: { + content: data.message.content + } + }] + }); + controller.enqueue(new TextEncoder().encode(`data: ${sseData}\n\n`)); + } + + if (data.done) { + controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n')); + } + } catch (e) { + console.error('[Ollama Provider] Failed to parse chunk:', e); + } + } + }); + + response.body.on('end', () => { + controller.close(); + console.log('[Ollama Provider] Streaming completed'); + }); + + response.body.on('error', (error) => { + console.error('[Ollama Provider] Streaming error:', error); + controller.error(error); + }); + + } catch (error) { + console.error('[Ollama Provider] Streaming setup error:', error); + controller.error(error); + } + } + }); + + return { + ok: true, + body: stream + }; + + } catch (error) { + console.error('[Ollama Provider] Request error:', error); + throw error; + } + }); } }; } diff --git a/src/features/common/ai/providers/whisper.js b/src/features/common/ai/providers/whisper.js index 1190977..58cd666 100644 --- a/src/features/common/ai/providers/whisper.js +++ b/src/features/common/ai/providers/whisper.js @@ -184,9 +184,10 @@ class WhisperProvider { async initialize() { if (!this.whisperService) { - const { WhisperService } = require('../../services/whisperService'); - this.whisperService = new WhisperService(); - await this.whisperService.initialize(); + this.whisperService = require('../../services/whisperService'); + if (!this.whisperService.isInitialized) { + await this.whisperService.initialize(); + } } } diff --git a/src/features/common/repositories/providerSettings/sqlite.repository.js b/src/features/common/repositories/providerSettings/sqlite.repository.js index bddd5b0..1967890 100644 --- a/src/features/common/repositories/providerSettings/sqlite.repository.js +++ b/src/features/common/repositories/providerSettings/sqlite.repository.js @@ -1,20 +1,42 @@ const sqliteClient = require('../../services/sqliteClient'); +const encryptionService = require('../../services/encryptionService'); function getByProvider(uid, provider) { const db = sqliteClient.getDb(); const stmt = db.prepare('SELECT * FROM provider_settings WHERE uid = ? AND provider = ?'); - return stmt.get(uid, provider) || null; + const result = stmt.get(uid, provider) || null; + + if (result && result.api_key) { + // Decrypt API key if it exists + result.api_key = encryptionService.decrypt(result.api_key); + } + + return result; } function getAllByUid(uid) { const db = sqliteClient.getDb(); const stmt = db.prepare('SELECT * FROM provider_settings WHERE uid = ? ORDER BY provider'); - return stmt.all(uid); + const results = stmt.all(uid); + + // Decrypt API keys for all results + return results.map(result => { + if (result.api_key) { + result.api_key = encryptionService.decrypt(result.api_key); + } + return result; + }); } function upsert(uid, provider, settings) { const db = sqliteClient.getDb(); + // Encrypt API key if it exists + const encryptedSettings = { ...settings }; + if (encryptedSettings.api_key) { + encryptedSettings.api_key = encryptionService.encrypt(encryptedSettings.api_key); + } + // Use SQLite's UPSERT syntax (INSERT ... ON CONFLICT ... DO UPDATE) const stmt = db.prepare(` INSERT INTO provider_settings (uid, provider, api_key, selected_llm_model, selected_stt_model, created_at, updated_at) @@ -29,11 +51,11 @@ function upsert(uid, provider, settings) { const result = stmt.run( uid, provider, - settings.api_key || null, - settings.selected_llm_model || null, - settings.selected_stt_model || null, - settings.created_at || Date.now(), - settings.updated_at + encryptedSettings.api_key || null, + encryptedSettings.selected_llm_model || null, + encryptedSettings.selected_stt_model || null, + encryptedSettings.created_at || Date.now(), + encryptedSettings.updated_at ); return { changes: result.changes }; diff --git a/src/features/common/services/localAIServiceBase.js b/src/features/common/services/localAIServiceBase.js index 45db41b..cbbca3b 100644 --- a/src/features/common/services/localAIServiceBase.js +++ b/src/features/common/services/localAIServiceBase.js @@ -1,6 +1,7 @@ const { exec } = require('child_process'); const { promisify } = require('util'); const { EventEmitter } = require('events'); +const { BrowserWindow } = require('electron'); const path = require('path'); const os = require('os'); const https = require('https'); @@ -17,6 +18,19 @@ class LocalAIServiceBase extends EventEmitter { this.installationProgress = new Map(); } + // 모든 윈도우에 이벤트 브로드캐스트 + _broadcastToAllWindows(eventName, data = null) { + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + if (data !== null) { + win.webContents.send(eventName, data); + } else { + win.webContents.send(eventName); + } + } + }); + } + getPlatform() { return process.platform; } @@ -65,7 +79,7 @@ class LocalAIServiceBase extends EventEmitter { setInstallProgress(modelName, progress) { this.installationProgress.set(modelName, progress); - this.emit('install-progress', { model: modelName, progress }); + // 각 서비스에서 직접 브로드캐스트하도록 변경 } clearInstallProgress(modelName) { @@ -152,7 +166,8 @@ class LocalAIServiceBase extends EventEmitter { const { onProgress = null, headers = { 'User-Agent': 'Glass-App' }, - timeout = 300000 // 5 minutes default + timeout = 300000, // 5 minutes default + modelId = null // 모델 ID를 위한 추가 옵션 } = options; return new Promise((resolve, reject) => { @@ -190,9 +205,15 @@ class LocalAIServiceBase extends EventEmitter { response.on('data', (chunk) => { downloadedSize += chunk.length; - if (onProgress && totalSize > 0) { + if (totalSize > 0) { const progress = Math.round((downloadedSize / totalSize) * 100); - onProgress(progress, downloadedSize, totalSize); + + // 이벤트 기반 진행률 보고는 각 서비스에서 직접 처리 + + // 기존 콜백 지원 (호환성 유지) + if (onProgress) { + onProgress(progress, downloadedSize, totalSize); + } } }); @@ -200,7 +221,7 @@ class LocalAIServiceBase extends EventEmitter { file.on('finish', () => { file.close(() => { - this.emit('download-complete', { url, destination, size: downloadedSize }); + // download-complete 이벤트는 각 서비스에서 직접 처리 resolve({ success: true, size: downloadedSize }); }); }); @@ -216,7 +237,7 @@ class LocalAIServiceBase extends EventEmitter { request.on('error', (err) => { file.close(); fs.unlink(destination, () => {}); - this.emit('download-error', { url, error: err }); + this.emit('download-error', { url, error: err, modelId }); reject(err); }); @@ -230,11 +251,20 @@ class LocalAIServiceBase extends EventEmitter { } async downloadWithRetry(url, destination, options = {}) { - const { maxRetries = 3, retryDelay = 1000, expectedChecksum = null, ...downloadOptions } = options; + const { + maxRetries = 3, + retryDelay = 1000, + expectedChecksum = null, + modelId = null, // 모델 ID를 위한 추가 옵션 + ...downloadOptions + } = options; for (let attempt = 1; attempt <= maxRetries; attempt++) { try { - const result = await this.downloadFile(url, destination, downloadOptions); + const result = await this.downloadFile(url, destination, { + ...downloadOptions, + modelId + }); if (expectedChecksum) { const isValid = await this.verifyChecksum(destination, expectedChecksum); @@ -248,6 +278,7 @@ class LocalAIServiceBase extends EventEmitter { return result; } catch (error) { if (attempt === maxRetries) { + // download-error 이벤트는 각 서비스에서 직접 처리 throw error; } diff --git a/src/features/common/services/modelStateService.js b/src/features/common/services/modelStateService.js index e60a2e0..a6162b5 100644 --- a/src/features/common/services/modelStateService.js +++ b/src/features/common/services/modelStateService.js @@ -1,6 +1,7 @@ const Store = require('electron-store'); const fetch = require('node-fetch'); -const { ipcMain, webContents } = require('electron'); +const { EventEmitter } = require('events'); +const { BrowserWindow } = require('electron'); const { PROVIDERS, getProviderClass } = require('../ai/factory'); const encryptionService = require('./encryptionService'); const providerSettingsRepository = require('../repositories/providerSettings'); @@ -9,8 +10,9 @@ const userModelSelectionsRepository = require('../repositories/userModelSelectio // Import authService directly (singleton) const authService = require('./authService'); -class ModelStateService { +class ModelStateService extends EventEmitter { constructor() { + super(); this.authService = authService; this.store = new Store({ name: 'pickle-glass-model-state' }); this.state = {}; @@ -21,6 +23,19 @@ class ModelStateService { userModelSelectionsRepository.setAuthService(authService); } + // 모든 윈도우에 이벤트 브로드캐스트 + _broadcastToAllWindows(eventName, data = null) { + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + if (data !== null) { + win.webContents.send(eventName, data); + } else { + win.webContents.send(eventName); + } + } + }); + } + async initialize() { console.log('[ModelStateService] Initializing...'); await this._loadStateForCurrentUser(); @@ -143,17 +158,8 @@ class ModelStateService { for (const setting of providerSettings) { if (setting.api_key) { - // API keys are stored encrypted in database, decrypt them - if (setting.provider !== 'ollama' && setting.provider !== 'whisper') { - try { - apiKeys[setting.provider] = encryptionService.decrypt(setting.api_key); - } catch (error) { - console.error(`[ModelStateService] Failed to decrypt API key for ${setting.provider}, resetting`); - apiKeys[setting.provider] = null; - } - } else { - apiKeys[setting.provider] = setting.api_key; - } + // API keys are already decrypted by the repository layer + apiKeys[setting.provider] = setting.api_key; } } @@ -171,6 +177,9 @@ class ModelStateService { console.log(`[ModelStateService] State loaded from database for user: ${userId}`); + // Auto-select available models after loading state + this._autoSelectAvailableModels(); + } catch (error) { console.error('[ModelStateService] Failed to load state from database:', error); // Fall back to default state @@ -217,12 +226,9 @@ class ModelStateService { // Save provider settings (API keys) for (const [provider, apiKey] of Object.entries(this.state.apiKeys)) { if (apiKey) { - const encryptedKey = (provider !== 'ollama' && provider !== 'whisper') - ? encryptionService.encrypt(apiKey) - : apiKey; - + // API keys will be encrypted by the repository layer await providerSettingsRepository.upsert(provider, { - api_key: encryptedKey + api_key: apiKey }); } else { // Remove empty API keys @@ -262,7 +268,7 @@ class ModelStateService { }; for (const [provider, key] of Object.entries(stateToSave.apiKeys)) { - if (key && provider !== 'ollama' && provider !== 'whisper') { + if (key) { try { stateToSave.apiKeys[provider] = encryptionService.encrypt(key); } catch (error) { @@ -331,22 +337,19 @@ class ModelStateService { } async setApiKey(provider, key) { - if (provider in this.state.apiKeys) { - this.state.apiKeys[provider] = key; - - const supportedTypes = []; - if (PROVIDERS[provider]?.llmModels.length > 0 || provider === 'ollama') { - supportedTypes.push('llm'); - } - if (PROVIDERS[provider]?.sttModels.length > 0 || provider === 'whisper') { - supportedTypes.push('stt'); - } - - this._autoSelectAvailableModels(supportedTypes); - await this._saveState(); - return true; + console.log(`[ModelStateService] setApiKey: ${provider}`); + if (!provider) { + throw new Error('Provider is required'); } - return false; + + // API keys will be encrypted by the repository layer + this.state.apiKeys[provider] = key; + await this._saveState(); + + this._autoSelectAvailableModels([]); + + this._broadcastToAllWindows('model-state:updated', this.state); + this._broadcastToAllWindows('settings-updated'); } getApiKey(provider) { @@ -358,19 +361,14 @@ class ModelStateService { return displayKeys; } - removeApiKey(provider) { - console.log(`[ModelStateService] Removing API key for provider: ${provider}`); - if (provider in this.state.apiKeys) { + async removeApiKey(provider) { + if (this.state.apiKeys[provider]) { this.state.apiKeys[provider] = null; - const llmProvider = this.getProviderForModel('llm', this.state.selectedModels.llm); - if (llmProvider === provider) this.state.selectedModels.llm = null; - - const sttProvider = this.getProviderForModel('stt', this.state.selectedModels.stt); - if (sttProvider === provider) this.state.selectedModels.stt = null; - - this._autoSelectAvailableModels(); - this._saveState(); - this._logCurrentSelection(); + await providerSettingsRepository.remove(provider); + await this._saveState(); + this._autoSelectAvailableModels([]); + this._broadcastToAllWindows('model-state:updated', this.state); + this._broadcastToAllWindows('settings-updated'); return true; } return false; @@ -456,11 +454,36 @@ class ModelStateService { const available = []; const modelList = type === 'llm' ? 'llmModels' : 'sttModels'; - Object.entries(this.state.apiKeys).forEach(([providerId, key]) => { - if (key && PROVIDERS[providerId]?.[modelList]) { + for (const [providerId, key] of Object.entries(this.state.apiKeys)) { + if (!key) continue; + + // Ollama의 경우 데이터베이스에서 설치된 모델을 가져오기 + if (providerId === 'ollama' && type === 'llm') { + try { + const ollamaModelRepository = require('../repositories/ollamaModel'); + const installedModels = ollamaModelRepository.getInstalledModels(); + const ollamaModels = installedModels.map(model => ({ + id: model.name, + name: model.name + })); + available.push(...ollamaModels); + } catch (error) { + console.warn('[ModelStateService] Failed to get Ollama models from DB:', error.message); + } + } + // Whisper의 경우 정적 모델 목록 사용 (설치 상태는 별도 확인) + else if (providerId === 'whisper' && type === 'stt') { + // Whisper 모델은 factory.js의 정적 목록 사용 + if (PROVIDERS[providerId]?.[modelList]) { + available.push(...PROVIDERS[providerId][modelList]); + } + } + // 다른 provider들은 기존 로직 사용 + else if (PROVIDERS[providerId]?.[modelList]) { available.push(...PROVIDERS[providerId][modelList]); } - }); + } + return [...new Map(available.map(item => [item.id, item])).values()]; } @@ -469,20 +492,31 @@ class ModelStateService { } setSelectedModel(type, modelId) { - const provider = this.getProviderForModel(type, modelId); - if (provider && this.state.apiKeys[provider]) { - const previousModel = this.state.selectedModels[type]; - this.state.selectedModels[type] = modelId; - this._saveState(); - - // Auto warm-up for Ollama LLM models when changed - if (type === 'llm' && provider === 'ollama' && modelId !== previousModel) { - this._autoWarmUpOllamaModel(modelId, previousModel); - } - - return true; + const availableModels = this.getAvailableModels(type); + const isAvailable = availableModels.some(model => model.id === modelId); + + if (!isAvailable) { + console.warn(`[ModelStateService] Model ${modelId} is not available for type ${type}`); + return false; } - return false; + + const previousModelId = this.state.selectedModels[type]; + this.state.selectedModels[type] = modelId; + this._saveState(); + + console.log(`[ModelStateService] Selected ${type} model: ${modelId} (was: ${previousModelId})`); + + // Auto warm-up for Ollama models + if (type === 'llm' && modelId && modelId !== previousModelId) { + const provider = this.getProviderForModel('llm', modelId); + if (provider === 'ollama') { + this._autoWarmUpOllamaModel(modelId, previousModelId); + } + } + + this._broadcastToAllWindows('model-state:updated', this.state); + this._broadcastToAllWindows('settings-updated'); + return true; } /** @@ -493,7 +527,7 @@ class ModelStateService { */ async _autoWarmUpOllamaModel(newModelId, previousModelId) { try { - console.log(`[ModelStateService] 🔥 LLM model changed: ${previousModelId || 'None'} → ${newModelId}, triggering warm-up`); + console.log(`[ModelStateService] LLM model changed: ${previousModelId || 'None'} → ${newModelId}, triggering warm-up`); // Get Ollama service if available const ollamaService = require('./ollamaService'); @@ -509,12 +543,12 @@ class ModelStateService { const success = await ollamaService.warmUpModel(newModelId); if (success) { - console.log(`[ModelStateService] ✅ Successfully warmed up model: ${newModelId}`); + console.log(`[ModelStateService] Successfully warmed up model: ${newModelId}`); } else { - console.log(`[ModelStateService] ⚠️ Failed to warm up model: ${newModelId}`); + console.log(`[ModelStateService] Failed to warm up model: ${newModelId}`); } } catch (error) { - console.log(`[ModelStateService] 🚫 Error during auto warm-up for ${newModelId}:`, error.message); + console.log(`[ModelStateService] Error during auto warm-up for ${newModelId}:`, error.message); } }, 500); // 500ms delay @@ -544,13 +578,11 @@ class ModelStateService { async handleRemoveApiKey(provider) { console.log(`[ModelStateService] handleRemoveApiKey: ${provider}`); - const success = this.removeApiKey(provider); + const success = await this.removeApiKey(provider); if (success) { const selectedModels = this.getSelectedModels(); if (!selectedModels.llm || !selectedModels.stt) { - webContents.getAllWebContents().forEach(wc => { - wc.send('force-show-apikey-header'); - }); + this._broadcastToAllWindows('force-show-apikey-header'); } } return success; diff --git a/src/features/common/services/ollamaService.js b/src/features/common/services/ollamaService.js index 8506651..f631b09 100644 --- a/src/features/common/services/ollamaService.js +++ b/src/features/common/services/ollamaService.js @@ -3,7 +3,7 @@ const { promisify } = require('util'); const fetch = require('node-fetch'); const path = require('path'); const fs = require('fs').promises; -const { app } = require('electron'); +const { app, BrowserWindow } = require('electron'); const LocalAIServiceBase = require('./localAIServiceBase'); const { spawnAsync } = require('../utils/spawnHelper'); const { DOWNLOAD_CHECKSUMS } = require('../config/checksums'); @@ -27,8 +27,8 @@ class OllamaService extends LocalAIServiceBase { }; // Configuration - this.requestTimeout = 8000; // 8s for health checks - this.warmupTimeout = 15000; // 15s for model warmup + this.requestTimeout = 0; // Delete timeout + this.warmupTimeout = 120000; // 120s for model warmup this.healthCheckInterval = 60000; // 1min between health checks this.circuitBreakerThreshold = 3; this.circuitBreakerCooldown = 30000; // 30s @@ -40,6 +40,19 @@ class OllamaService extends LocalAIServiceBase { this._startHealthMonitoring(); } + // 모든 윈도우에 이벤트 브로드캐스트 + _broadcastToAllWindows(eventName, data = null) { + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + if (data !== null) { + win.webContents.send(eventName, data); + } else { + win.webContents.send(eventName); + } + } + }); + } + async getStatus() { try { const installed = await this.isInstalled(); @@ -87,14 +100,17 @@ class OllamaService extends LocalAIServiceBase { const controller = new AbortController(); const timeout = options.timeout || this.requestTimeout; - // Set up timeout mechanism - const timeoutId = setTimeout(() => { - controller.abort(); - this.activeRequests.delete(requestId); - this._recordFailure(); - }, timeout); - - this.requestTimeouts.set(requestId, timeoutId); + // Set up timeout mechanism only if timeout > 0 + let timeoutId = null; + if (timeout > 0) { + timeoutId = setTimeout(() => { + controller.abort(); + this.activeRequests.delete(requestId); + this._recordFailure(); + }, timeout); + + this.requestTimeouts.set(requestId, timeoutId); + } const requestPromise = this._executeRequest(url, { ...options, @@ -115,8 +131,10 @@ class OllamaService extends LocalAIServiceBase { } throw error; } finally { - clearTimeout(timeoutId); - this.requestTimeouts.delete(requestId); + if (timeoutId !== null) { + clearTimeout(timeoutId); + this.requestTimeouts.delete(requestId); + } this.activeRequests.delete(operationType === 'health' ? 'health' : requestId); } } @@ -377,7 +395,7 @@ class OllamaService extends LocalAIServiceBase { if (progress !== null) { this.setInstallProgress(modelName, progress); - this.emit('pull-progress', { + this._broadcastToAllWindows('ollama:pull-progress', { model: modelName, progress, status: data.status || 'downloading' @@ -388,7 +406,7 @@ class OllamaService extends LocalAIServiceBase { // Handle completion if (data.status === 'success') { console.log(`[OllamaService] Successfully pulled model: ${modelName}`); - this.emit('pull-complete', { model: modelName }); + this._broadcastToAllWindows('ollama:pull-complete', { model: modelName }); this.clearInstallProgress(modelName); resolve(); return; @@ -406,7 +424,7 @@ class OllamaService extends LocalAIServiceBase { const data = JSON.parse(buffer); if (data.status === 'success') { console.log(`[OllamaService] Successfully pulled model: ${modelName}`); - this.emit('pull-complete', { model: modelName }); + this._broadcastToAllWindows('ollama:pull-complete', { model: modelName }); } } catch (parseError) { console.warn('[OllamaService] Failed to parse final buffer:', buffer); @@ -639,8 +657,48 @@ class OllamaService extends LocalAIServiceBase { return true; } catch (error) { - console.error(`[OllamaService] Failed to warm up model ${modelName}:`, error.message); - return false; + // Check if it's a 404 error (model not found/installed) + if (error.message.includes('HTTP 404') || error.message.includes('Not Found')) { + console.log(`[OllamaService] Model ${modelName} not found (404), attempting to install...`); + + try { + // Try to install the model + await this.pullModel(modelName); + console.log(`[OllamaService] Successfully installed model ${modelName}, retrying warm-up...`); + + // Update database to reflect installation + await ollamaModelRepository.updateInstallStatus(modelName, true, false); + + // Retry warm-up after installation + const retryResponse = await this._makeRequest(`${this.baseUrl}/api/chat`, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + model: modelName, + messages: [ + { role: 'user', content: 'Hi' } + ], + stream: false, + options: { + num_predict: 1, + temperature: 0 + } + }), + timeout: this.warmupTimeout + }, `warmup_retry_${modelName}`); + + console.log(`[OllamaService] Successfully warmed up model ${modelName} after installation`); + return true; + + } catch (installError) { + console.error(`[OllamaService] Failed to auto-install model ${modelName}:`, installError.message); + await ollamaModelRepository.updateInstallStatus(modelName, false, false); + return false; + } + } else { + console.error(`[OllamaService] Failed to warm up model ${modelName}:`, error.message); + return false; + } } } @@ -671,14 +729,8 @@ class OllamaService extends LocalAIServiceBase { return false; } - // Check if model is installed - const isInstalled = await this.isModelInstalled(llmModelId); - if (!isInstalled) { - console.log(`[OllamaService] Model ${llmModelId} not installed, skipping warm-up`); - return false; - } - - console.log(`[OllamaService] Auto-warming up selected model: ${llmModelId}`); + // 설치 여부 체크 제거 - _performWarmUp에서 자동으로 설치 처리 + console.log(`[OllamaService] Auto-warming up selected model: ${llmModelId} (will auto-install if needed)`); return await this.warmUpModel(llmModelId); } catch (error) { @@ -844,10 +896,10 @@ class OllamaService extends LocalAIServiceBase { } } - async handleInstall(event) { + async handleInstall() { try { const onProgress = (data) => { - event.sender.send('ollama:install-progress', data); + this._broadcastToAllWindows('ollama:install-progress', data); }; await this.autoInstall(onProgress); @@ -857,26 +909,26 @@ class OllamaService extends LocalAIServiceBase { await this.startService(); onProgress({ stage: 'starting', message: 'Ollama service started.', progress: 100 }); } - event.sender.send('ollama:install-complete', { success: true }); + this._broadcastToAllWindows('ollama:install-complete', { success: true }); return { success: true }; } catch (error) { console.error('[OllamaService] Failed to install:', error); - event.sender.send('ollama:install-complete', { success: false, error: error.message }); + this._broadcastToAllWindows('ollama:install-complete', { success: false, error: error.message }); return { success: false, error: error.message }; } } - async handleStartService(event) { + async handleStartService() { try { if (!await this.isServiceRunning()) { console.log('[OllamaService] Starting Ollama service...'); await this.startService(); } - event.sender.send('ollama:install-complete', { success: true }); + this.emit('install-complete', { success: true }); return { success: true }; } catch (error) { console.error('[OllamaService] Failed to start service:', error); - event.sender.send('ollama:install-complete', { success: false, error: error.message }); + this.emit('install-complete', { success: false, error: error.message }); return { success: false, error: error.message }; } } @@ -914,29 +966,12 @@ class OllamaService extends LocalAIServiceBase { } } - async handlePullModel(event, modelName) { + async handlePullModel(modelName) { try { console.log(`[OllamaService] Starting model pull: ${modelName}`); await ollamaModelRepository.updateInstallStatus(modelName, false, true); - const progressHandler = (data) => { - if (data.model === modelName) { - event.sender.send('ollama:pull-progress', data); - } - }; - - const completeHandler = (data) => { - if (data.model === modelName) { - console.log(`[OllamaService] Model ${modelName} pull completed`); - this.removeListener('pull-progress', progressHandler); - this.removeListener('pull-complete', completeHandler); - } - }; - - this.on('pull-progress', progressHandler); - this.on('pull-complete', completeHandler); - await this.pullModel(modelName); await ollamaModelRepository.updateInstallStatus(modelName, true, false); @@ -946,6 +981,7 @@ class OllamaService extends LocalAIServiceBase { } catch (error) { console.error('[OllamaService] Failed to pull model:', error); await ollamaModelRepository.updateInstallStatus(modelName, false, false); + this._broadcastToAllWindows('ollama:pull-error', { model: modelName, error: error.message }); return { success: false, error: error.message }; } } @@ -990,7 +1026,7 @@ class OllamaService extends LocalAIServiceBase { } } - async handleShutdown(event, force = false) { + async handleShutdown(force = false) { try { console.log(`[OllamaService] Manual shutdown requested (force: ${force})`); const success = await this.shutdown(force); diff --git a/src/features/common/services/whisperService.js b/src/features/common/services/whisperService.js index 5d14f44..4f44cd1 100644 --- a/src/features/common/services/whisperService.js +++ b/src/features/common/services/whisperService.js @@ -2,6 +2,7 @@ const { spawn } = require('child_process'); const path = require('path'); const fs = require('fs'); const os = require('os'); +const { BrowserWindow } = require('electron'); const LocalAIServiceBase = require('./localAIServiceBase'); const { spawnAsync } = require('../utils/spawnHelper'); const { DOWNLOAD_CHECKSUMS } = require('../config/checksums'); @@ -39,6 +40,19 @@ class WhisperService extends LocalAIServiceBase { }; } + // 모든 윈도우에 이벤트 브로드캐스트 + _broadcastToAllWindows(eventName, data = null) { + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + if (data !== null) { + win.webContents.send(eventName, data); + } else { + win.webContents.send(eventName); + } + } + }); + } + async initialize() { if (this.isInitialized) return; @@ -157,19 +171,21 @@ class WhisperService extends LocalAIServiceBase { const modelPath = await this.getModelPath(modelId); const checksumInfo = DOWNLOAD_CHECKSUMS.whisper.models[modelId]; - this.emit('downloadProgress', { modelId, progress: 0 }); + this._broadcastToAllWindows('whisper:download-progress', { modelId, progress: 0 }); await this.downloadWithRetry(modelInfo.url, modelPath, { expectedChecksum: checksumInfo?.sha256, + modelId, // modelId를 전달하여 LocalAIServiceBase에서 이벤트 발생 시 사용 onProgress: (progress) => { - this.emit('downloadProgress', { modelId, progress }); + this._broadcastToAllWindows('whisper:download-progress', { modelId, progress }); } }); console.log(`[WhisperService] Model ${modelId} downloaded successfully`); + this._broadcastToAllWindows('whisper:download-complete', { modelId }); } - async handleDownloadModel(event, modelId) { + async handleDownloadModel(modelId) { try { console.log(`[WhisperService] Handling download for model: ${modelId}`); @@ -177,19 +193,7 @@ class WhisperService extends LocalAIServiceBase { await this.initialize(); } - const progressHandler = (data) => { - if (data.modelId === modelId && event && event.sender) { - event.sender.send('whisper:download-progress', data); - } - }; - - this.on('downloadProgress', progressHandler); - - try { - await this.ensureModelAvailable(modelId); - } finally { - this.removeListener('downloadProgress', progressHandler); - } + await this.ensureModelAvailable(modelId); return { success: true }; } catch (error) { diff --git a/src/features/listen/listenService.js b/src/features/listen/listenService.js index 3f62fb7..924e9b7 100644 --- a/src/features/listen/listenService.js +++ b/src/features/listen/listenService.js @@ -41,11 +41,58 @@ class ListenService { } sendToRenderer(channel, data) { - BrowserWindow.getAllWindows().forEach(win => { - if (!win.isDestroyed()) { - win.webContents.send(channel, data); + const { windowPool } = require('../../window/windowManager'); + const listenWindow = windowPool?.get('listen'); + + if (listenWindow && !listenWindow.isDestroyed()) { + listenWindow.webContents.send(channel, data); + } + } + + initialize() { + this.setupIpcHandlers(); + console.log('[ListenService] Initialized and ready.'); + } + + async handleListenRequest(listenButtonText) { + const { windowPool, updateLayout } = require('../../window/windowManager'); + const listenWindow = windowPool.get('listen'); + const header = windowPool.get('header'); + + try { + switch (listenButtonText) { + case 'Listen': + console.log('[ListenService] changeSession to "Listen"'); + listenWindow.show(); + updateLayout(); + listenWindow.webContents.send('window-show-animation'); + await this.initializeSession(); + listenWindow.webContents.send('session-state-changed', { isActive: true }); + break; + + case 'Stop': + console.log('[ListenService] changeSession to "Stop"'); + await this.closeSession(); + listenWindow.webContents.send('session-state-changed', { isActive: false }); + break; + + case 'Done': + console.log('[ListenService] changeSession to "Done"'); + listenWindow.webContents.send('window-hide-animation'); + listenWindow.webContents.send('session-state-changed', { isActive: false }); + break; + + default: + throw new Error(`[ListenService] unknown listenButtonText: ${listenButtonText}`); } - }); + + header.webContents.send('listen:changeSessionResult', { success: true }); + + } catch (error) { + console.error('[ListenService] error in handleListenRequest:', error); + header.webContents.send('listen:changeSessionResult', { success: false }); + throw error; + } } initialize() { diff --git a/src/features/listen/stt/sttService.js b/src/features/listen/stt/sttService.js index 4109dd7..9fd9e87 100644 --- a/src/features/listen/stt/sttService.js +++ b/src/features/listen/stt/sttService.js @@ -35,11 +35,24 @@ class SttService { } sendToRenderer(channel, data) { - BrowserWindow.getAllWindows().forEach(win => { - if (!win.isDestroyed()) { - win.webContents.send(channel, data); - } - }); + // Listen 관련 이벤트는 Listen 윈도우에만 전송 (Ask 윈도우 충돌 방지) + const { windowPool } = require('../../../window/windowManager'); + const listenWindow = windowPool?.get('listen'); + + if (listenWindow && !listenWindow.isDestroyed()) { + listenWindow.webContents.send(channel, data); + } + } + + async handleSendSystemAudioContent(data, mimeType) { + try { + await this.sendSystemAudioContent(data, mimeType); + this.sendToRenderer('system-audio-data', { data }); + return { success: true }; + } catch (error) { + console.error('Error sending system audio:', error); + return { success: false, error: error.message }; + } } async handleSendSystemAudioContent(data, mimeType) { diff --git a/src/features/listen/summary/summaryService.js b/src/features/listen/summary/summaryService.js index 29f92a3..0296d96 100644 --- a/src/features/listen/summary/summaryService.js +++ b/src/features/listen/summary/summaryService.js @@ -28,11 +28,12 @@ class SummaryService { } sendToRenderer(channel, data) { - BrowserWindow.getAllWindows().forEach(win => { - if (!win.isDestroyed()) { - win.webContents.send(channel, data); - } - }); + const { windowPool } = require('../../../window/windowManager'); + const listenWindow = windowPool?.get('listen'); + + if (listenWindow && !listenWindow.isDestroyed()) { + listenWindow.webContents.send(channel, data); + } } addConversationTurn(speaker, text) { @@ -304,25 +305,20 @@ Keep all points concise and build upon previous analysis if provided.`, */ async triggerAnalysisIfNeeded() { if (this.conversationHistory.length >= 5 && this.conversationHistory.length % 5 === 0) { - console.log(`🚀 Triggering analysis (non-blocking) - ${this.conversationHistory.length} conversation texts accumulated`); + console.log(`Triggering analysis - ${this.conversationHistory.length} conversation texts accumulated`); - this.makeOutlineAndRequests(this.conversationHistory) - .then(data => { - if (data) { - console.log('📤 Sending structured data to renderer'); - this.sendToRenderer('summary-update', data); - - // Notify callback - if (this.onAnalysisComplete) { - this.onAnalysisComplete(data); - } - } else { - console.log('❌ No analysis data returned from non-blocking call'); - } - }) - .catch(error => { - console.error('❌ Error in non-blocking analysis:', error); - }); + const data = await this.makeOutlineAndRequests(this.conversationHistory); + if (data) { + console.log('Sending structured data to renderer'); + this.sendToRenderer('summary-update', data); + + // Notify callback + if (this.onAnalysisComplete) { + this.onAnalysisComplete(data); + } + } else { + console.log('No analysis data returned'); + } } } diff --git a/src/features/settings/settingsService.js b/src/features/settings/settingsService.js index 58e0a98..da68a3a 100644 --- a/src/features/settings/settingsService.js +++ b/src/features/settings/settingsService.js @@ -27,13 +27,16 @@ const NOTIFICATION_CONFIG = { // New facade functions for model state management async function getModelSettings() { try { - const [config, storedKeys, availableLlm, availableStt, selectedModels] = await Promise.all([ + const [config, storedKeys, selectedModels] = await Promise.all([ modelStateService.getProviderConfig(), modelStateService.getAllApiKeys(), - modelStateService.getAvailableModels('llm'), - modelStateService.getAvailableModels('stt'), modelStateService.getSelectedModels(), ]); + + // 동기 함수들은 별도로 호출 + const availableLlm = modelStateService.getAvailableModels('llm'); + const availableStt = modelStateService.getAvailableModels('stt'); + return { success: true, data: { config, storedKeys, availableLlm, availableStt, selectedModels } }; } catch (error) { console.error('[SettingsService] Error getting model settings:', error); diff --git a/src/index.js b/src/index.js index 627c560..e64f06f 100644 --- a/src/index.js +++ b/src/index.js @@ -532,6 +532,7 @@ async function handleFirebaseAuthCallback(params) { }; // 1. Sync user data to local DB + userRepository.setAuthService(authService); userRepository.findOrCreate(firebaseUser); console.log('[Auth] User data synced with local DB.'); diff --git a/src/preload.js b/src/preload.js index a463a5b..5f49b87 100644 --- a/src/preload.js +++ b/src/preload.js @@ -136,6 +136,9 @@ contextBridge.exposeInMainWorld('api', { // Listeners onAskStateUpdate: (callback) => ipcRenderer.on('ask:stateUpdate', callback), removeOnAskStateUpdate: (callback) => ipcRenderer.removeListener('ask:stateUpdate', callback), + + onAskStreamError: (callback) => ipcRenderer.on('ask-response-stream-error', callback), + removeOnAskStreamError: (callback) => ipcRenderer.removeListener('ask-response-stream-error', callback), // Listeners onShowTextInput: (callback) => ipcRenderer.on('ask:showTextInput', callback), diff --git a/src/ui/app/ApiKeyHeader.js b/src/ui/app/ApiKeyHeader.js index 0bdbe03..820ea6a 100644 --- a/src/ui/app/ApiKeyHeader.js +++ b/src/ui/app/ApiKeyHeader.js @@ -1,92 +1,76 @@ -import { html, css, LitElement } from "../assets/lit-core-2.7.4.min.js" -import { getOllamaProgressTracker } from "../../features/common/services/localProgressTracker.js" +import { html, css, LitElement } from '../assets/lit-core-2.7.4.min.js'; export class ApiKeyHeader extends LitElement { - //////// after_modelStateService //////// - static properties = { - llmApiKey: { type: String }, - sttApiKey: { type: String }, - llmProvider: { type: String }, - sttProvider: { type: String }, - isLoading: { type: Boolean }, - errorMessage: { type: String }, - successMessage: { type: String }, - providers: { type: Object, state: true }, - modelSuggestions: { type: Array, state: true }, - userModelHistory: { type: Array, state: true }, - selectedLlmModel: { type: String, state: true }, - selectedSttModel: { type: String, state: true }, - ollamaStatus: { type: Object, state: true }, - installingModel: { type: String, state: true }, - installProgress: { type: Number, state: true }, - whisperInstallingModels: { type: Object, state: true }, - } - //////// after_modelStateService //////// + //////// after_modelStateService //////// + static properties = { + llmApiKey: { type: String }, + sttApiKey: { type: String }, + llmProvider: { type: String }, + sttProvider: { type: String }, + isLoading: { type: Boolean }, + errorMessage: { type: String }, + successMessage: { type: String }, + providers: { type: Object, state: true }, + modelSuggestions: { type: Array, state: true }, + userModelHistory: { type: Array, state: true }, + selectedLlmModel: { type: String, state: true }, + selectedSttModel: { type: String, state: true }, + ollamaStatus: { type: Object, state: true }, + installingModel: { type: String, state: true }, + installProgress: { type: Number, state: true }, + whisperInstallingModels: { type: Object, state: true }, + backCallback: { type: Function }, + llmError: { type: String }, + sttError: { type: String }, + }; + //////// after_modelStateService //////// - static styles = css` + static styles = css` :host { - display: block; - transition: opacity 0.3s ease-in, transform 0.3s ease-in; - will-change: opacity, transform; + display: block; + font-family: + 'Inter', + -apple-system, + BlinkMacSystemFont, + 'Segoe UI', + Roboto, + sans-serif; } - - :host(.sliding-out) { - opacity: 0; - transform: translateY(-20px); - } - - :host(.hidden) { - opacity: 0; - pointer-events: none; - } - * { - font-family: 'Helvetica Neue', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; - cursor: default; - user-select: none; box-sizing: border-box; } - .container { - -webkit-app-region: drag; - width: 350px; - min-height: 260px; - padding: 18px 20px; - background: rgba(0, 0, 0, 0.3); + width: 100%; + height: 100%; + padding: 24px 16px; + background: rgba(0, 0, 0, 0.64); + box-shadow: 0px 0px 0px 1.5px rgba(255, 255, 255, 0.64) inset; border-radius: 16px; - overflow: visible; + flex-direction: column; + justify-content: flex-start; + align-items: flex-start; + gap: 24px; + display: flex; + -webkit-app-region: drag; + } + .header { + width: 100%; position: relative; display: flex; - flex-direction: column; + justify-content: center; align-items: center; + margin-bottom: 8px; } - - .container::after { - content: ''; - position: absolute; - top: 0; - left: 0; - right: 0; - bottom: 0; - border-radius: 16px; - padding: 1px; - background: linear-gradient(169deg, rgba(255, 255, 255, 0.5) 0%, rgba(255, 255, 255, 0) 50%, rgba(255, 255, 255, 0.5) 100%); - -webkit-mask: linear-gradient(#fff 0 0) content-box, linear-gradient(#fff 0 0); - -webkit-mask-composite: destination-out; - mask-composite: exclude; - pointer-events: none; - } - .close-button { -webkit-app-region: no-drag; position: absolute; - top: 10px; - right: 10px; - width: 14px; - height: 14px; + top: 16px; + right: 16px; + width: 20px; + height: 20px; background: rgba(255, 255, 255, 0.1); border: none; - border-radius: 3px; + border-radius: 5px; color: rgba(255, 255, 255, 0.7); cursor: pointer; display: flex; @@ -94,1828 +78,2004 @@ export class ApiKeyHeader extends LitElement { justify-content: center; transition: all 0.15s ease; z-index: 10; - font-size: 14px; + font-size: 16px; line-height: 1; padding: 0; } - .close-button:hover { background: rgba(255, 255, 255, 0.2); color: rgba(255, 255, 255, 0.9); } - - .close-button:active { - transform: scale(0.95); + .back-button { + -webkit-app-region: no-drag; + padding: 8px; + left: 0px; + top: -7px; + position: absolute; + background: rgba(132.6, 132.6, 132.6, 0.8); + border-radius: 16px; + border: 0.5px solid rgba(255, 255, 255, 0.5); + justify-content: center; + align-items: center; + gap: 4px; + display: flex; + cursor: pointer; + transition: background-color 0.2s ease; + } + .back-button:hover { + background: rgba(150, 150, 150, 0.9); + } + .arrow-icon-left { + border: solid #dcdcdc; + border-width: 0 1.2px 1.2px 0; + display: inline-block; + padding: 3px; + transform: rotate(135deg); + } + .back-button-text { + color: white; + font-size: 12px; + font-weight: 500; + padding-right: 4px; } - .title { color: white; - font-size: 16px; - font-weight: 500; /* Medium */ - margin: 0; - text-align: center; - flex-shrink: 0; + font-size: 14px; + font-weight: 700; } - - .form-content { + .section { + width: 100%; display: flex; flex-direction: column; - align-items: center; - width: 100%; - margin-top: auto; + gap: 10px; + } + .row { + width: 100%; + display: flex; + justify-content: space-between; + align-items: center; + } + .label { + color: white; + font-size: 12px; + font-weight: 600; + } + .provider-selector { + display: flex; + width: 240px; + overflow: hidden; + border-radius: 12px; + border: 0.5px solid rgba(255, 255, 255, 0.5); + } + .provider-button { + -webkit-app-region: no-drag; + padding: 4px 8px; + background: rgba(20.4, 20.4, 20.4, 0.32); + color: #dcdcdc; + font-size: 11px; + font-weight: 450; + letter-spacing: 0.11px; + border: none; + cursor: pointer; + transition: background-color 0.2s ease; + flex: 1; + } + .provider-button:hover { + background: rgba(80, 80, 80, 0.48); + } + .provider-button[data-status='active'] { + background: rgba(142.8, 142.8, 142.8, 0.48); + color: white; + } + .api-input { + -webkit-app-region: no-drag; + width: 240px; + padding: 10px 8px; + background: rgba(61.2, 61.2, 61.2, 0.8); + border-radius: 6px; + border: 1px solid rgba(255, 255, 255, 0.24); + color: white; + font-size: 11px; + text-overflow: ellipsis; + font-family: inherit; + line-height: inherit; + } + .ollama-action-button { + -webkit-app-region: no-drag; + width: 240px; + padding: 10px 8px; + border-radius: 16px; + border: none; + color: white; + font-size: 12px; + font-weight: 500; + font-family: inherit; + cursor: pointer; + text-align: center; + transition: background-color 0.2s ease; + } + .ollama-action-button.install { + background: rgba(0, 122, 255, 0.2); + } + .ollama-action-button.start { + background: rgba(255, 200, 0, 0.2); + } + select.api-input { + -webkit-appearance: none; + appearance: none; + background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' fill='none' viewBox='0 0 20 20'%3e%3cpath stroke='white' stroke-linecap='round' stroke-linejoin='round' stroke-width='1.5' d='M6 8l4 4 4-4'/%3e%3c/svg%3e"); + background-position: right 0.5rem center; + background-repeat: no-repeat; + background-size: 1.5em 1.5em; + padding-right: 2.5rem; + } + select.api-input option { + background: #333; + color: white; + } + .api-input::placeholder { + color: #a0a0a0; + } + .confirm-button-container { + width: 100%; + display: flex; + justify-content: flex-end; + } + .confirm-button { + -webkit-app-region: no-drag; + width: 240px; + padding: 8px; + background: rgba(132.6, 132.6, 132.6, 0.8); + box-shadow: 0px 2px 2px rgba(0, 0, 0, 0.16); + border-radius: 16px; + border: 1px solid rgba(255, 255, 255, 0.5); + color: white; + font-size: 12px; + font-weight: 500; + cursor: pointer; + transition: background-color 0.2s ease; + } + .confirm-button:hover { + background: rgba(150, 150, 150, 0.9); + } + .confirm-button:disabled { + background: rgba(255, 255, 255, 0.12); + color: #bebebe; + border: 0.5px solid rgba(255, 255, 255, 0.24); + box-shadow: none; + cursor: not-allowed; + } + .footer { + width: 100%; + text-align: center; + color: #dcdcdc; + font-size: 12px; + font-weight: 500; + line-height: 18px; + } + .footer-link { + text-decoration: underline; + cursor: pointer; + } + .error-message, + .success-message { + position: absolute; + bottom: 70px; + left: 16px; + right: 16px; + text-align: center; + font-size: 11px; + font-weight: 500; + padding: 4px; + border-radius: 4px; } - .error-message { color: rgba(239, 68, 68, 0.9); - font-weight: 500; - font-size: 11px; - height: 14px; - text-align: center; - margin-bottom: 4px; - opacity: 1; - transition: opacity 0.3s ease; } - .success-message { color: rgba(74, 222, 128, 0.9); - font-weight: 500; - font-size: 11px; - height: 14px; - text-align: center; - margin-bottom: 4px; - opacity: 1; - transition: opacity 0.3s ease; } - .message-fade-out { animation: fadeOut 3s ease-in-out forwards; } - @keyframes fadeOut { - 0% { opacity: 1; } - 66% { opacity: 1; } - 100% { opacity: 0; } - } - - .api-input { - -webkit-app-region: no-drag; - width: 100%; - height: 34px; - background: rgba(255, 255, 255, 0.1); - border-radius: 10px; - border: none; - padding: 0 10px; - color: white; - font-size: 12px; - font-weight: 400; /* Regular */ - margin-bottom: 6px; - text-align: center; - user-select: text; - cursor: text; - } - - .api-input::placeholder { - color: rgba(255, 255, 255, 0.6); - } - - .api-input:focus { - outline: none; - } - - .providers-container { display: flex; gap: 12px; width: 100%; } - .provider-column { flex: 1; display: flex; flex-direction: column; align-items: center; } - .provider-label { color: rgba(255, 255, 255, 0.7); font-size: 11px; font-weight: 500; margin-bottom: 6px; } - .api-input, .provider-select { - -webkit-app-region: no-drag; - width: 100%; - height: 34px; - text-align: center; - background: rgba(255, 255, 255, 0.1); - border-radius: 10px; - border: 1px solid rgba(255, 255, 255, 0.2); - padding: 0 10px; - color: white; - font-size: 12px; - margin-bottom: 6px; - } - .provider-select option { background: #1a1a1a; color: white; } - - .provider-select:hover { - background-color: rgba(255, 255, 255, 0.15); - border-color: rgba(255, 255, 255, 0.3); - } - - .provider-select:focus { - outline: none; - background-color: rgba(255, 255, 255, 0.15); - border-color: rgba(255, 255, 255, 0.4); - } - - - .action-button { - -webkit-app-region: no-drag; - width: 100%; - height: 34px; - background: rgba(255, 255, 255, 0.2); - border: none; - border-radius: 10px; - color: white; - font-size: 12px; - font-weight: 500; /* Medium */ - cursor: pointer; - transition: background 0.15s ease; - position: relative; - overflow: visible; - } - - .action-button::after { - content: ''; - position: absolute; - top: 0; - left: 0; - right: 0; - bottom: 0; - border-radius: 10px; - padding: 1px; - background: linear-gradient(169deg, rgba(255, 255, 255, 0.5) 0%, rgba(255, 255, 255, 0) 50%, rgba(255, 255, 255, 0.5) 100%); - -webkit-mask: linear-gradient(#fff 0 0) content-box, linear-gradient(#fff 0 0); - -webkit-mask-composite: destination-out; - mask-composite: exclude; - pointer-events: none; - } - - .action-button:hover { - background: rgba(255, 255, 255, 0.3); - } - - .action-button:disabled { - opacity: 0.5; - cursor: not-allowed; - } - - .or-text { - color: rgba(255, 255, 255, 0.5); - font-size: 12px; - font-weight: 500; /* Medium */ - margin: 10px 0; - } - /* ────────────────[ GLASS BYPASS ]─────────────── */ - :host-context(body.has-glass) .container, - :host-context(body.has-glass) .api-input, - :host-context(body.has-glass) .provider-select, - :host-context(body.has-glass) .action-button, - :host-context(body.has-glass) .close-button { - background: transparent !important; - border: none !important; - box-shadow: none !important; - filter: none !important; - backdrop-filter: none !important; - } - - :host-context(body.has-glass) .container::after, - :host-context(body.has-glass) .action-button::after { - display: none !important; - } - - :host-context(body.has-glass) .action-button:hover, - :host-context(body.has-glass) .provider-select:hover, - :host-context(body.has-glass) .close-button:hover { - background: transparent !important; - } - ` - - constructor() { - super() - this.isLoading = false - this.errorMessage = "" - this.successMessage = "" - this.messageTimestamp = 0 - //////// after_modelStateService //////// - this.llmApiKey = ""; - this.sttApiKey = ""; - this.llmProvider = "openai"; - this.sttProvider = "openai"; - this.providers = { llm: [], stt: [] }; // 초기화 - // Ollama related - this.modelSuggestions = []; - this.userModelHistory = []; - this.selectedLlmModel = ""; - this.selectedSttModel = ""; - this.ollamaStatus = { installed: false, running: false }; - this.installingModel = null; - this.installProgress = 0; - this.progressTracker = getOllamaProgressTracker(); - this.whisperInstallingModels = {}; - - // Professional operation management system - this.activeOperations = new Map(); - this.operationTimeouts = new Map(); - this.connectionState = 'idle'; // idle, connecting, connected, failed, disconnected - this.lastStateChange = Date.now(); - this.retryCount = 0; - this.maxRetries = 3; - this.baseRetryDelay = 1000; - - // Backpressure and resource management - this.operationQueue = []; - this.maxConcurrentOperations = 2; - this.maxQueueSize = 5; - this.operationMetrics = { - totalOperations: 0, - successfulOperations: 0, - failedOperations: 0, - timeouts: 0, - averageResponseTime: 0 - }; - - // Configuration - this.ipcTimeout = 10000; // 10s for IPC calls - this.operationTimeout = 15000; // 15s for complex operations - - // Health monitoring system - this.healthCheck = { - enabled: false, - intervalId: null, - intervalMs: 30000, // 30s - lastCheck: 0, - consecutiveFailures: 0, - maxFailures: 3 - }; - - // Load user model history from localStorage - this.loadUserModelHistory(); - this.loadProviderConfig(); - //////// after_modelStateService //////// - - this.handleKeyPress = this.handleKeyPress.bind(this) - this.handleSubmit = this.handleSubmit.bind(this) - this.handleInput = this.handleInput.bind(this) - this.handleAnimationEnd = this.handleAnimationEnd.bind(this) - this.handleUsePicklesKey = this.handleUsePicklesKey.bind(this) - this.handleProviderChange = this.handleProviderChange.bind(this) - this.handleLlmProviderChange = this.handleLlmProviderChange.bind(this) - this.handleSttProviderChange = this.handleSttProviderChange.bind(this) - this.handleMessageFadeEnd = this.handleMessageFadeEnd.bind(this) - this.handleModelKeyPress = this.handleModelKeyPress.bind(this) - this.handleSttModelChange = this.handleSttModelChange.bind(this) - } - - reset() { - this.apiKey = "" - this.isLoading = false - this.errorMessage = "" - this.validatedApiKey = null - this.selectedProvider = "openai" - this.requestUpdate() - } - - async loadProviderConfig() { - if (!window.api) return; - - try { - const [config, ollamaStatus] = await Promise.all([ - window.api.apiKeyHeader.getProviderConfig(), - window.api.apiKeyHeader.getOllamaStatus() - ]); - - const llmProviders = []; - const sttProviders = []; - - for (const id in config) { - // 'openai-glass' 같은 가상 Provider는 UI에 표시하지 않음 - if (id.includes('-glass')) continue; - const hasLlmModels = config[id].llmModels.length > 0 || id === 'ollama'; - const hasSttModels = config[id].sttModels.length > 0 || id === 'whisper'; - - if (hasLlmModels) { - llmProviders.push({ id, name: config[id].name }); + 0% { + opacity: 1; } - if (hasSttModels) { - sttProviders.push({ id, name: config[id].name }); + 66% { + opacity: 1; + } + 100% { + opacity: 0; } } - - this.providers = { llm: llmProviders, stt: sttProviders }; - - // 기본 선택 값 설정 - if (llmProviders.length > 0) this.llmProvider = llmProviders[0].id; - if (sttProviders.length > 0) this.sttProvider = sttProviders[0].id; - - // Ollama 상태 및 모델 제안 로드 - if (ollamaStatus?.success) { - this.ollamaStatus = { - installed: ollamaStatus.installed, - running: ollamaStatus.running - }; - - // Load model suggestions if Ollama is running - if (ollamaStatus.running) { - await this.loadModelSuggestions(); + .sliding-out { + animation: slideOut 0.3s ease-out forwards; + } + @keyframes slideOut { + from { + transform: translateY(0); + opacity: 1; + } + to { + transform: translateY(-100%); + opacity: 0; } } - - this.requestUpdate(); - } catch (error) { - console.error('[ApiKeyHeader] Failed to load provider config:', error); - } -} - - async handleMouseDown(e) { - if (e.target.tagName === "INPUT" || e.target.tagName === "BUTTON" || e.target.tagName === "SELECT") { - return - } - - e.preventDefault() - - const initialPosition = await window.api.apiKeyHeader.getHeaderPosition() - - this.dragState = { - initialMouseX: e.screenX, - initialMouseY: e.screenY, - initialWindowX: initialPosition.x, - initialWindowY: initialPosition.y, - moved: false, - } - - window.addEventListener("mousemove", this.handleMouseMove) - window.addEventListener("mouseup", this.handleMouseUp, { once: true }) - } - - handleMouseMove(e) { - if (!this.dragState) return - - const deltaX = Math.abs(e.screenX - this.dragState.initialMouseX) - const deltaY = Math.abs(e.screenY - this.dragState.initialMouseY) - - if (deltaX > 3 || deltaY > 3) { - this.dragState.moved = true - } - - const newWindowX = this.dragState.initialWindowX + (e.screenX - this.dragState.initialMouseX) - const newWindowY = this.dragState.initialWindowY + (e.screenY - this.dragState.initialMouseY) - - window.api.apiKeyHeader.moveHeaderTo(newWindowX, newWindowY) - } - - handleMouseUp(e) { - if (!this.dragState) return - - const wasDragged = this.dragState.moved - - window.removeEventListener("mousemove", this.handleMouseMove) - this.dragState = null - - if (wasDragged) { - this.wasJustDragged = true - setTimeout(() => { - this.wasJustDragged = false - }, 200) - } - } - - handleInput(e) { - this.apiKey = e.target.value - this.clearMessages() - console.log("Input changed:", this.apiKey?.length || 0, "chars") - - this.requestUpdate() - this.updateComplete.then(() => { - const inputField = this.shadowRoot?.querySelector(".apikey-input") - if (inputField && this.isInputFocused) { - inputField.focus() - } - }) - } - - clearMessages() { - this.errorMessage = "" - this.successMessage = "" - this.messageTimestamp = 0 - } - - handleProviderChange(e) { - this.selectedProvider = e.target.value - this.clearMessages() - console.log("Provider changed to:", this.selectedProvider) - this.requestUpdate() - } - - async handleLlmProviderChange(e) { - // Cancel any active operations first - this._cancelAllActiveOperations(); - - this.llmProvider = e.target.value; - this.errorMessage = ""; - this.successMessage = ""; - - // Reset retry state - this.retryCount = 0; - - if (this.llmProvider === 'ollama') { - console.log('[ApiKeyHeader] Ollama selected, initiating connection...'); - await this._initializeOllamaConnection(); - // Start health monitoring for Ollama - this._startHealthMonitoring(); - } else { - this._updateConnectionState('idle', 'Non-Ollama provider selected'); - // Stop health monitoring for non-Ollama providers - this._stopHealthMonitoring(); - } - - this.requestUpdate(); - } - - async _initializeOllamaConnection() { - try { - // Progressive connection attempt with exponential backoff - await this._attemptOllamaConnection(); - } catch (error) { - console.error('[ApiKeyHeader] Initial Ollama connection failed:', error.message); - - if (this.retryCount < this.maxRetries) { - const delay = this.baseRetryDelay * Math.pow(2, this.retryCount); - console.log(`[ApiKeyHeader] Retrying Ollama connection in ${delay}ms (attempt ${this.retryCount + 1}/${this.maxRetries})`); - - this.retryCount++; - - // Use proper Promise-based delay instead of setTimeout - await new Promise(resolve => { - const retryTimeoutId = setTimeout(() => { - this._initializeOllamaConnection(); - resolve(); - }, delay); - - // Store timeout for cleanup - this.operationTimeouts.set(`retry_${this.retryCount}`, retryTimeoutId); - }); - } else { - this._updateConnectionState('failed', `Connection failed after ${this.maxRetries} attempts`); - } - } - } - - async _attemptOllamaConnection() { - await this.refreshOllamaStatus(); - } - - _cancelAllActiveOperations() { - console.log(`[ApiKeyHeader] Cancelling ${this.activeOperations.size} active operations and ${this.operationQueue.length} queued operations`); - - // Cancel active operations - for (const [operationType, operation] of this.activeOperations) { - this._cancelOperation(operationType); - } - - // Cancel queued operations - for (const queuedOp of this.operationQueue) { - queuedOp.reject(new Error(`Operation ${queuedOp.type} cancelled during cleanup`)); - } - this.operationQueue.length = 0; - - // Clean up all timeouts - for (const [timeoutId, timeout] of this.operationTimeouts) { - clearTimeout(timeout); - } - this.operationTimeouts.clear(); - } - - /** - * Get operation metrics for monitoring - */ - getOperationMetrics() { - return { - ...this.operationMetrics, - activeOperations: this.activeOperations.size, - queuedOperations: this.operationQueue.length, - successRate: this.operationMetrics.totalOperations > 0 ? - (this.operationMetrics.successfulOperations / this.operationMetrics.totalOperations) * 100 : 0 - }; - } - - /** - * Adaptive backpressure based on system performance - */ - _adjustBackpressureThresholds() { - const metrics = this.getOperationMetrics(); - - // Reduce concurrent operations if success rate is low - if (metrics.successRate < 70 && this.maxConcurrentOperations > 1) { - this.maxConcurrentOperations = Math.max(1, this.maxConcurrentOperations - 1); - console.log(`[ApiKeyHeader] Reduced max concurrent operations to ${this.maxConcurrentOperations} (success rate: ${metrics.successRate.toFixed(1)}%)`); - } - - // Increase if performance is good - if (metrics.successRate > 90 && metrics.averageResponseTime < 3000 && this.maxConcurrentOperations < 3) { - this.maxConcurrentOperations++; - console.log(`[ApiKeyHeader] Increased max concurrent operations to ${this.maxConcurrentOperations}`); - } - } - - /** - * Professional health monitoring system - */ - _startHealthMonitoring() { - if (this.healthCheck.enabled) return; - - this.healthCheck.enabled = true; - this.healthCheck.intervalId = setInterval(() => { - this._performHealthCheck(); - }, this.healthCheck.intervalMs); - - console.log(`[ApiKeyHeader] Health monitoring started (interval: ${this.healthCheck.intervalMs}ms)`); - } - - _stopHealthMonitoring() { - if (!this.healthCheck.enabled) return; - - this.healthCheck.enabled = false; - if (this.healthCheck.intervalId) { - clearInterval(this.healthCheck.intervalId); - this.healthCheck.intervalId = null; - } - - console.log('[ApiKeyHeader] Health monitoring stopped'); - } - - async _performHealthCheck() { - // Only perform health check if Ollama is selected and we're in a stable state - if (this.llmProvider !== 'ollama' || this.connectionState === 'connecting') { - return; - } - - const now = Date.now(); - this.healthCheck.lastCheck = now; - - try { - // Lightweight health check - just ping the service - const isHealthy = await this._executeOperation('health_check', async () => { - if (!window.api) return false; - const result = await window.api.apiKeyHeader.getOllamaStatus(); - return result?.success && result?.running; - }, { timeout: 5000, priority: 'low' }); - - if (isHealthy) { - this.healthCheck.consecutiveFailures = 0; - - // Update state if we were previously failed - if (this.connectionState === 'failed') { - this._updateConnectionState('connected', 'Health check recovered'); + .api-input.invalid { + outline: 1px solid #ff7070; + outline-offset: -1px; } - } else { - this._handleHealthCheckFailure(); - } - - // Adjust thresholds based on performance - this._adjustBackpressureThresholds(); - - } catch (error) { - console.warn('[ApiKeyHeader] Health check failed:', error.message); - this._handleHealthCheckFailure(); - } - } - - _handleHealthCheckFailure() { - this.healthCheck.consecutiveFailures++; - - if (this.healthCheck.consecutiveFailures >= this.healthCheck.maxFailures) { - console.warn(`[ApiKeyHeader] Health check failed ${this.healthCheck.consecutiveFailures} times, marking as disconnected`); - this._updateConnectionState('failed', 'Service health check failed'); - - // Increase health check frequency when having issues - this.healthCheck.intervalMs = Math.max(10000, this.healthCheck.intervalMs / 2); - this._restartHealthMonitoring(); - } - } - - _restartHealthMonitoring() { - this._stopHealthMonitoring(); - this._startHealthMonitoring(); - } - - /** - * Get comprehensive health status - */ - getHealthStatus() { - return { - connection: { - state: this.connectionState, - lastStateChange: this.lastStateChange, - timeSinceLastChange: Date.now() - this.lastStateChange - }, - operations: this.getOperationMetrics(), - health: { - enabled: this.healthCheck.enabled, - lastCheck: this.healthCheck.lastCheck, - timeSinceLastCheck: this.healthCheck.lastCheck > 0 ? Date.now() - this.healthCheck.lastCheck : null, - consecutiveFailures: this.healthCheck.consecutiveFailures, - intervalMs: this.healthCheck.intervalMs - }, - ollama: { - provider: this.llmProvider, - status: this.ollamaStatus, - selectedModel: this.selectedLlmModel - } - }; - } + .input-wrapper { + display: flex; + flex-direction: column; + gap: 4px; + align-items: flex-start; + } + .inline-error-message { + color: #ff7070; + font-size: 11px; + font-weight: 400; + letter-spacing: 0.11px; + word-wrap: break-word; + width: 240px; + } + `; - async handleSttProviderChange(e) { - this.sttProvider = e.target.value; - this.errorMessage = ""; - this.successMessage = ""; - - if (this.sttProvider === 'ollama') { - console.warn('[ApiKeyHeader] Ollama does not support STT yet. Please select Whisper or another provider.'); - this.errorMessage = 'Ollama does not support STT yet. Please select Whisper or another STT provider.'; - this.messageTimestamp = Date.now(); - - // Auto-select Whisper if available - const whisperProvider = this.providers.stt.find(p => p.id === 'whisper'); - if (whisperProvider) { - this.sttProvider = 'whisper'; - console.log('[ApiKeyHeader] Auto-selected Whisper for STT'); - } - } - - this.requestUpdate(); - } - /** - * Professional operation management with backpressure control - */ - async _executeOperation(operationType, operation, options = {}) { - const operationId = `${operationType}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; - const timeout = options.timeout || this.ipcTimeout; - const priority = options.priority || 'normal'; // high, normal, low - - // Backpressure control - if (this.activeOperations.size >= this.maxConcurrentOperations) { - if (this.operationQueue.length >= this.maxQueueSize) { - throw new Error(`Operation queue full (${this.maxQueueSize}), rejecting ${operationType}`); - } - - console.log(`[ApiKeyHeader] Queuing operation ${operationType} (${this.activeOperations.size} active)`); - return this._queueOperation(operationId, operationType, operation, options); - } - - return this._executeImmediately(operationId, operationType, operation, timeout); - } - - async _queueOperation(operationId, operationType, operation, options) { - return new Promise((resolve, reject) => { - const queuedOperation = { - id: operationId, - type: operationType, - operation, - options, - resolve, - reject, - queuedAt: Date.now(), - priority: options.priority || 'normal' - }; - - // Insert based on priority (high priority first) - if (options.priority === 'high') { - this.operationQueue.unshift(queuedOperation); - } else { - this.operationQueue.push(queuedOperation); - } - - console.log(`[ApiKeyHeader] Queued ${operationType} (queue size: ${this.operationQueue.length})`); - }); - } - - async _executeImmediately(operationId, operationType, operation, timeout) { - const startTime = Date.now(); - this.operationMetrics.totalOperations++; - - // Check if similar operation is already running - if (this.activeOperations.has(operationType)) { - console.log(`[ApiKeyHeader] Operation ${operationType} already in progress, cancelling previous`); - this._cancelOperation(operationType); - } - - // Create cancellation mechanism - const cancellationPromise = new Promise((_, reject) => { - const timeoutId = setTimeout(() => { - this.operationMetrics.timeouts++; - reject(new Error(`Operation ${operationType} timeout after ${timeout}ms`)); - }, timeout); - - this.operationTimeouts.set(operationId, timeoutId); - }); - - const operationPromise = Promise.race([ - operation(), - cancellationPromise - ]); - - this.activeOperations.set(operationType, { - id: operationId, - promise: operationPromise, - startTime - }); - - try { - const result = await operationPromise; - this._recordOperationSuccess(startTime); - return result; - } catch (error) { - this._recordOperationFailure(error, operationType); - throw error; - } finally { - this._cleanupOperation(operationId, operationType); - this._processQueue(); - } - } - - _recordOperationSuccess(startTime) { - this.operationMetrics.successfulOperations++; - const responseTime = Date.now() - startTime; - this._updateAverageResponseTime(responseTime); - } - - _recordOperationFailure(error, operationType) { - this.operationMetrics.failedOperations++; - - if (error.message.includes('timeout')) { - console.error(`[ApiKeyHeader] Operation ${operationType} timed out`); - this._updateConnectionState('failed', `Timeout: ${error.message}`); - } - } - - _updateAverageResponseTime(responseTime) { - const totalOps = this.operationMetrics.successfulOperations; - this.operationMetrics.averageResponseTime = - ((this.operationMetrics.averageResponseTime * (totalOps - 1)) + responseTime) / totalOps; - } - - async _processQueue() { - if (this.operationQueue.length === 0 || this.activeOperations.size >= this.maxConcurrentOperations) { - return; - } - - const queuedOp = this.operationQueue.shift(); - if (!queuedOp) return; - - const queueTime = Date.now() - queuedOp.queuedAt; - console.log(`[ApiKeyHeader] Processing queued operation ${queuedOp.type} (waited ${queueTime}ms)`); - - try { - const result = await this._executeImmediately( - queuedOp.id, - queuedOp.type, - queuedOp.operation, - queuedOp.options.timeout || this.ipcTimeout - ); - queuedOp.resolve(result); - } catch (error) { - queuedOp.reject(error); - } - } - - _cancelOperation(operationType) { - const operation = this.activeOperations.get(operationType); - if (operation) { - this._cleanupOperation(operation.id, operationType); - console.log(`[ApiKeyHeader] Cancelled operation: ${operationType}`); - } - } - - _cleanupOperation(operationId, operationType) { - if (this.operationTimeouts.has(operationId)) { - clearTimeout(this.operationTimeouts.get(operationId)); - this.operationTimeouts.delete(operationId); - } - this.activeOperations.delete(operationType); - } - - _updateConnectionState(newState, reason = '') { - if (this.connectionState !== newState) { - console.log(`[ApiKeyHeader] Connection state: ${this.connectionState} -> ${newState} (${reason})`); - this.connectionState = newState; - this.lastStateChange = Date.now(); - - // Update UI based on state - this._handleStateChange(newState, reason); - } - } - - _handleStateChange(state, reason) { - switch (state) { - case 'connecting': - this.installingModel = 'Connecting to Ollama...'; - this.installProgress = 10; - break; - case 'failed': - this.errorMessage = reason || 'Connection failed'; - this.installingModel = null; - this.installProgress = 0; - this.messageTimestamp = Date.now(); - break; - case 'connected': - this.installingModel = null; - this.installProgress = 0; - break; - case 'disconnected': + constructor() { + super(); + this.isLoading = false; + this.errorMessage = ''; + this.successMessage = ''; + this.messageTimestamp = 0; + //////// after_modelStateService //////// + this.llmApiKey = ''; + this.sttApiKey = ''; + this.llmProvider = 'openai'; + this.sttProvider = 'openai'; + this.providers = { llm: [], stt: [] }; // 초기화 + // Ollama related + this.modelSuggestions = []; + this.userModelHistory = []; + this.selectedLlmModel = ''; + this.selectedSttModel = ''; this.ollamaStatus = { installed: false, running: false }; - break; - } - this.requestUpdate(); - } - - async refreshOllamaStatus() { - if (!window.api) return; - - try { - this._updateConnectionState('connecting', 'Checking Ollama status'); - - const result = await this._executeOperation('ollama_status', async () => { - return await window.api.apiKeyHeader.getOllamaStatus(); - }); - - if (result?.success) { - this.ollamaStatus = { - installed: result.installed, - running: result.running + this.installingModel = null; + this.installProgress = 0; + this.whisperInstallingModels = {}; + this.backCallback = () => {}; + this.llmError = ''; + this.sttError = ''; + + // Professional operation management system + this.activeOperations = new Map(); + this.operationTimeouts = new Map(); + this.connectionState = 'idle'; // idle, connecting, connected, failed, disconnected + this.lastStateChange = Date.now(); + this.retryCount = 0; + this.maxRetries = 3; + this.baseRetryDelay = 1000; + + // Backpressure and resource management + this.operationQueue = []; + this.maxConcurrentOperations = 2; + this.maxQueueSize = 5; + this.operationMetrics = { + totalOperations: 0, + successfulOperations: 0, + failedOperations: 0, + timeouts: 0, + averageResponseTime: 0, }; - - this._updateConnectionState('connected', 'Status updated successfully'); - - // Load model suggestions if Ollama is running - if (result.running) { - await this.loadModelSuggestions(); - } - } else { - this._updateConnectionState('failed', result?.error || 'Status check failed'); - } - } catch (error) { - console.error('[ApiKeyHeader] Failed to refresh Ollama status:', error.message); - this._updateConnectionState('failed', error.message); + + // Configuration + this.ipcTimeout = 10000; // 10s for IPC calls + this.operationTimeout = 15000; // 15s for complex operations + + // Health monitoring system + this.healthCheck = { + enabled: false, + intervalId: null, + intervalMs: 30000, // 30s + lastCheck: 0, + consecutiveFailures: 0, + maxFailures: 3, + }; + + // Load user model history from localStorage + this.loadUserModelHistory(); + this.loadProviderConfig(); + //////// after_modelStateService //////// + + this.handleKeyPress = this.handleKeyPress.bind(this); + this.handleSubmit = this.handleSubmit.bind(this); + this.handleInput = this.handleInput.bind(this); + this.handleAnimationEnd = this.handleAnimationEnd.bind(this); + this.handleProviderChange = this.handleProviderChange.bind(this); + this.handleLlmProviderChange = this.handleLlmProviderChange.bind(this); + this.handleSttProviderChange = this.handleSttProviderChange.bind(this); + this.handleMessageFadeEnd = this.handleMessageFadeEnd.bind(this); + this.handleModelKeyPress = this.handleModelKeyPress.bind(this); + this.handleSttModelChange = this.handleSttModelChange.bind(this); + this.handleBack = this.handleBack.bind(this); + this.handleClose = this.handleClose.bind(this); } - } - - async loadModelSuggestions() { - if (!window.api) return; - - try { - const result = await this._executeOperation('model_suggestions', async () => { - return await window.api.apiKeyHeader.getModelSuggestions(); - }); - - if (result?.success) { - this.modelSuggestions = result.suggestions || []; - - // 기본 모델 선택 (설치된 모델 중 첫 번째) - if (!this.selectedLlmModel && this.modelSuggestions.length > 0) { - const installedModel = this.modelSuggestions.find(m => m.status === 'installed'); - if (installedModel) { - this.selectedLlmModel = installedModel.name; - } + + updated(changedProperties) { + super.updated(changedProperties); + this.dispatchEvent(new CustomEvent('content-changed', { bubbles: true, composed: true })); + } + + reset() { + this.apiKey = ''; + this.isLoading = false; + this.errorMessage = ''; + this.validatedApiKey = null; + this.selectedProvider = 'openai'; + this.requestUpdate(); + } + + handleBack() { + if (this.backCallback) { + this.backCallback(); + } + } + + async loadProviderConfig() { + if (!window.api?.apiKeyHeader) return; + + try { + const [config, ollamaStatus] = await Promise.all([ + window.api.apiKeyHeader.getProviderConfig(), + window.api.apiKeyHeader.getOllamaStatus(), + ]); + + const llmProviders = []; + const sttProviders = []; + + for (const id in config) { + // 'openai-glass' 같은 가상 Provider는 UI에 표시하지 않음 + if (id.includes('-glass')) continue; + const hasLlmModels = config[id].llmModels.length > 0 || id === 'ollama'; + const hasSttModels = config[id].sttModels.length > 0 || id === 'whisper'; + + if (hasLlmModels) { + llmProviders.push({ id, name: config[id].name }); + } + if (hasSttModels) { + sttProviders.push({ id, name: config[id].name }); + } + } + + this.providers = { llm: llmProviders, stt: sttProviders }; + + // 기본 선택 값 설정 + if (llmProviders.length > 0) this.llmProvider = llmProviders[0].id; + if (sttProviders.length > 0) this.sttProvider = sttProviders[0].id; + + // Ollama 상태 및 모델 제안 로드 + if (ollamaStatus?.success) { + this.ollamaStatus = { + installed: ollamaStatus.installed, + running: ollamaStatus.running, + }; + + // Load model suggestions if Ollama is running + if (ollamaStatus.running) { + await this.loadModelSuggestions(); + } + } + + this.requestUpdate(); + } catch (error) { + console.error('[ApiKeyHeader] Failed to load provider config:', error); + } + } + + async handleMouseDown(e) { + if (e.target.tagName === 'INPUT' || e.target.tagName === 'BUTTON' || e.target.tagName === 'SELECT') { + return; + } + + e.preventDefault(); + + if (!window.api?.apiKeyHeader) return; + const initialPosition = await window.api.apiKeyHeader.getHeaderPosition(); + + this.dragState = { + initialMouseX: e.screenX, + initialMouseY: e.screenY, + initialWindowX: initialPosition.x, + initialWindowY: initialPosition.y, + moved: false, + }; + + window.addEventListener('mousemove', this.handleMouseMove); + window.addEventListener('mouseup', this.handleMouseUp, { once: true }); + } + + handleMouseMove(e) { + if (!this.dragState) return; + + const deltaX = Math.abs(e.screenX - this.dragState.initialMouseX); + const deltaY = Math.abs(e.screenY - this.dragState.initialMouseY); + + if (deltaX > 3 || deltaY > 3) { + this.dragState.moved = true; + } + + const newWindowX = this.dragState.initialWindowX + (e.screenX - this.dragState.initialMouseX); + const newWindowY = this.dragState.initialWindowY + (e.screenY - this.dragState.initialMouseY); + + if (window.api?.apiKeyHeader) { + window.api.apiKeyHeader.moveHeaderTo(newWindowX, newWindowY); + } + } + + handleMouseUp(e) { + if (!this.dragState) return; + + const wasDragged = this.dragState.moved; + + window.removeEventListener('mousemove', this.handleMouseMove); + this.dragState = null; + + if (wasDragged) { + this.wasJustDragged = true; + setTimeout(() => { + this.wasJustDragged = false; + }, 200); + } + } + + handleInput(e) { + this.apiKey = e.target.value; + this.clearMessages(); + console.log('Input changed:', this.apiKey?.length || 0, 'chars'); + + this.requestUpdate(); + this.updateComplete.then(() => { + const inputField = this.shadowRoot?.querySelector('.apikey-input'); + if (inputField && this.isInputFocused) { + inputField.focus(); + } + }); + } + + clearMessages() { + this.errorMessage = ''; + this.successMessage = ''; + this.messageTimestamp = 0; + this.llmError = ''; + this.sttError = ''; + } + + handleProviderChange(e) { + this.selectedProvider = e.target.value; + this.clearMessages(); + console.log('Provider changed to:', this.selectedProvider); + this.requestUpdate(); + } + + async handleLlmProviderChange(e, providerId) { + const newProvider = providerId || e.target.value; + if (newProvider === this.llmProvider) return; + + // Cancel any active operations first + this._cancelAllActiveOperations(); + + this.llmProvider = newProvider; + this.errorMessage = ''; + this.successMessage = ''; + + if (['openai', 'gemini'].includes(this.llmProvider)) { + this.sttProvider = this.llmProvider; + } + + // Reset retry state + this.retryCount = 0; + + if (this.llmProvider === 'ollama') { + console.log('[ApiKeyHeader] Ollama selected, initiating connection...'); + await this._initializeOllamaConnection(); + // Start health monitoring for Ollama + this._startHealthMonitoring(); + } else { + this._updateConnectionState('idle', 'Non-Ollama provider selected'); + // Stop health monitoring for non-Ollama providers + this._stopHealthMonitoring(); + } + + this.requestUpdate(); + } + + async _initializeOllamaConnection() { + try { + // Progressive connection attempt with exponential backoff + await this._attemptOllamaConnection(); + } catch (error) { + console.error('[ApiKeyHeader] Initial Ollama connection failed:', error.message); + + if (this.retryCount < this.maxRetries) { + const delay = this.baseRetryDelay * Math.pow(2, this.retryCount); + console.log(`[ApiKeyHeader] Retrying Ollama connection in ${delay}ms (attempt ${this.retryCount + 1}/${this.maxRetries})`); + + this.retryCount++; + + // Use proper Promise-based delay instead of setTimeout + await new Promise(resolve => { + const retryTimeoutId = setTimeout(() => { + this._initializeOllamaConnection(); + resolve(); + }, delay); + + // Store timeout for cleanup + this.operationTimeouts.set(`retry_${this.retryCount}`, retryTimeoutId); + }); + } else { + this._updateConnectionState('failed', `Connection failed after ${this.maxRetries} attempts`); + } + } + } + + async _attemptOllamaConnection() { + await this.refreshOllamaStatus(); + } + + _cancelAllActiveOperations() { + console.log(`[ApiKeyHeader] Cancelling ${this.activeOperations.size} active operations and ${this.operationQueue.length} queued operations`); + + // Cancel active operations + for (const [operationType, operation] of this.activeOperations) { + this._cancelOperation(operationType); + } + + // Cancel queued operations + for (const queuedOp of this.operationQueue) { + queuedOp.reject(new Error(`Operation ${queuedOp.type} cancelled during cleanup`)); + } + this.operationQueue.length = 0; + + // Clean up all timeouts + for (const [timeoutId, timeout] of this.operationTimeouts) { + clearTimeout(timeout); + } + this.operationTimeouts.clear(); + } + + /** + * Get operation metrics for monitoring + */ + getOperationMetrics() { + return { + ...this.operationMetrics, + activeOperations: this.activeOperations.size, + queuedOperations: this.operationQueue.length, + successRate: + this.operationMetrics.totalOperations > 0 + ? (this.operationMetrics.successfulOperations / this.operationMetrics.totalOperations) * 100 + : 0, + }; + } + + /** + * Adaptive backpressure based on system performance + */ + _adjustBackpressureThresholds() { + const metrics = this.getOperationMetrics(); + + // Reduce concurrent operations if success rate is low + if (metrics.successRate < 70 && this.maxConcurrentOperations > 1) { + this.maxConcurrentOperations = Math.max(1, this.maxConcurrentOperations - 1); + console.log( + `[ApiKeyHeader] Reduced max concurrent operations to ${this.maxConcurrentOperations} (success rate: ${metrics.successRate.toFixed(1)}%)` + ); + } + + // Increase if performance is good + if (metrics.successRate > 90 && metrics.averageResponseTime < 3000 && this.maxConcurrentOperations < 3) { + this.maxConcurrentOperations++; + console.log(`[ApiKeyHeader] Increased max concurrent operations to ${this.maxConcurrentOperations}`); + } + } + + /** + * Professional health monitoring system + */ + _startHealthMonitoring() { + if (this.healthCheck.enabled) return; + + this.healthCheck.enabled = true; + this.healthCheck.intervalId = setInterval(() => { + this._performHealthCheck(); + }, this.healthCheck.intervalMs); + + console.log(`[ApiKeyHeader] Health monitoring started (interval: ${this.healthCheck.intervalMs}ms)`); + } + + _stopHealthMonitoring() { + if (!this.healthCheck.enabled) return; + + this.healthCheck.enabled = false; + if (this.healthCheck.intervalId) { + clearInterval(this.healthCheck.intervalId); + this.healthCheck.intervalId = null; + } + + console.log('[ApiKeyHeader] Health monitoring stopped'); + } + + async _performHealthCheck() { + // Only perform health check if Ollama is selected and we're in a stable state + if (this.llmProvider !== 'ollama' || this.connectionState === 'connecting') { + return; + } + + const now = Date.now(); + this.healthCheck.lastCheck = now; + + try { + // Lightweight health check - just ping the service + const isHealthy = await this._executeOperation( + 'health_check', + async () => { + if (!window.api?.apiKeyHeader) return false; + const result = await window.api.apiKeyHeader.getOllamaStatus(); + return result?.success && result?.running; + }, + { timeout: 5000, priority: 'low' } + ); + + if (isHealthy) { + this.healthCheck.consecutiveFailures = 0; + + // Update state if we were previously failed + if (this.connectionState === 'failed') { + this._updateConnectionState('connected', 'Health check recovered'); + } + } else { + this._handleHealthCheckFailure(); + } + + // Adjust thresholds based on performance + this._adjustBackpressureThresholds(); + } catch (error) { + console.warn('[ApiKeyHeader] Health check failed:', error.message); + this._handleHealthCheckFailure(); + } + } + + _handleHealthCheckFailure() { + this.healthCheck.consecutiveFailures++; + + if (this.healthCheck.consecutiveFailures >= this.healthCheck.maxFailures) { + console.warn(`[ApiKeyHeader] Health check failed ${this.healthCheck.consecutiveFailures} times, marking as disconnected`); + this._updateConnectionState('failed', 'Service health check failed'); + + // Increase health check frequency when having issues + this.healthCheck.intervalMs = Math.max(10000, this.healthCheck.intervalMs / 2); + this._restartHealthMonitoring(); + } + } + + _restartHealthMonitoring() { + this._stopHealthMonitoring(); + this._startHealthMonitoring(); + } + + /** + * Get comprehensive health status + */ + getHealthStatus() { + return { + connection: { + state: this.connectionState, + lastStateChange: this.lastStateChange, + timeSinceLastChange: Date.now() - this.lastStateChange, + }, + operations: this.getOperationMetrics(), + health: { + enabled: this.healthCheck.enabled, + lastCheck: this.healthCheck.lastCheck, + timeSinceLastCheck: this.healthCheck.lastCheck > 0 ? Date.now() - this.healthCheck.lastCheck : null, + consecutiveFailures: this.healthCheck.consecutiveFailures, + intervalMs: this.healthCheck.intervalMs, + }, + ollama: { + provider: this.llmProvider, + status: this.ollamaStatus, + selectedModel: this.selectedLlmModel, + }, + }; + } + + async handleSttProviderChange(e, providerId) { + const newProvider = providerId || e.target.value; + if (newProvider === this.sttProvider) return; + + this.sttProvider = newProvider; + this.errorMessage = ''; + this.successMessage = ''; + + if (this.sttProvider === 'ollama') { + console.warn('[ApiKeyHeader] Ollama does not support STT yet. Please select Whisper or another provider.'); + this.sttError = '*Ollama does not support STT yet. Please select Whisper or another STT provider.'; + this.messageTimestamp = Date.now(); + + // Auto-select Whisper if available + const whisperProvider = this.providers.stt.find(p => p.id === 'whisper'); + if (whisperProvider) { + this.sttProvider = 'whisper'; + console.log('[ApiKeyHeader] Auto-selected Whisper for STT'); + } + } + + this.requestUpdate(); + } + + /** + * Professional operation management with backpressure control + */ + async _executeOperation(operationType, operation, options = {}) { + const operationId = `${operationType}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; + const timeout = options.timeout || this.ipcTimeout; + const priority = options.priority || 'normal'; // high, normal, low + + // Backpressure control + if (this.activeOperations.size >= this.maxConcurrentOperations) { + if (this.operationQueue.length >= this.maxQueueSize) { + throw new Error(`Operation queue full (${this.maxQueueSize}), rejecting ${operationType}`); + } + + console.log(`[ApiKeyHeader] Queuing operation ${operationType} (${this.activeOperations.size} active)`); + return this._queueOperation(operationId, operationType, operation, options); + } + + return this._executeImmediately(operationId, operationType, operation, timeout); + } + + async _queueOperation(operationId, operationType, operation, options) { + return new Promise((resolve, reject) => { + const queuedOperation = { + id: operationId, + type: operationType, + operation, + options, + resolve, + reject, + queuedAt: Date.now(), + priority: options.priority || 'normal', + }; + + // Insert based on priority (high priority first) + if (options.priority === 'high') { + this.operationQueue.unshift(queuedOperation); + } else { + this.operationQueue.push(queuedOperation); + } + + console.log(`[ApiKeyHeader] Queued ${operationType} (queue size: ${this.operationQueue.length})`); + }); + } + + async _executeImmediately(operationId, operationType, operation, timeout) { + const startTime = Date.now(); + this.operationMetrics.totalOperations++; + + // Check if similar operation is already running + if (this.activeOperations.has(operationType)) { + console.log(`[ApiKeyHeader] Operation ${operationType} already in progress, cancelling previous`); + this._cancelOperation(operationType); + } + + // Create cancellation mechanism + const cancellationPromise = new Promise((_, reject) => { + const timeoutId = setTimeout(() => { + this.operationMetrics.timeouts++; + reject(new Error(`Operation ${operationType} timeout after ${timeout}ms`)); + }, timeout); + + this.operationTimeouts.set(operationId, timeoutId); + }); + + const operationPromise = Promise.race([operation(), cancellationPromise]); + + this.activeOperations.set(operationType, { + id: operationId, + promise: operationPromise, + startTime, + }); + + try { + const result = await operationPromise; + this._recordOperationSuccess(startTime); + return result; + } catch (error) { + this._recordOperationFailure(error, operationType); + throw error; + } finally { + this._cleanupOperation(operationId, operationType); + this._processQueue(); + } + } + + _recordOperationSuccess(startTime) { + this.operationMetrics.successfulOperations++; + const responseTime = Date.now() - startTime; + this._updateAverageResponseTime(responseTime); + } + + _recordOperationFailure(error, operationType) { + this.operationMetrics.failedOperations++; + + if (error.message.includes('timeout')) { + console.error(`[ApiKeyHeader] Operation ${operationType} timed out`); + this._updateConnectionState('failed', `Timeout: ${error.message}`); + } + } + + _updateAverageResponseTime(responseTime) { + const totalOps = this.operationMetrics.successfulOperations; + this.operationMetrics.averageResponseTime = (this.operationMetrics.averageResponseTime * (totalOps - 1) + responseTime) / totalOps; + } + + async _processQueue() { + if (this.operationQueue.length === 0 || this.activeOperations.size >= this.maxConcurrentOperations) { + return; + } + + const queuedOp = this.operationQueue.shift(); + if (!queuedOp) return; + + const queueTime = Date.now() - queuedOp.queuedAt; + console.log(`[ApiKeyHeader] Processing queued operation ${queuedOp.type} (waited ${queueTime}ms)`); + + try { + const result = await this._executeImmediately( + queuedOp.id, + queuedOp.type, + queuedOp.operation, + queuedOp.options.timeout || this.ipcTimeout + ); + queuedOp.resolve(result); + } catch (error) { + queuedOp.reject(error); + } + } + + _cancelOperation(operationType) { + const operation = this.activeOperations.get(operationType); + if (operation) { + this._cleanupOperation(operation.id, operationType); + console.log(`[ApiKeyHeader] Cancelled operation: ${operationType}`); + } + } + + _cleanupOperation(operationId, operationType) { + if (this.operationTimeouts.has(operationId)) { + clearTimeout(this.operationTimeouts.get(operationId)); + this.operationTimeouts.delete(operationId); + } + this.activeOperations.delete(operationType); + } + + _updateConnectionState(newState, reason = '') { + if (this.connectionState !== newState) { + console.log(`[ApiKeyHeader] Connection state: ${this.connectionState} -> ${newState} (${reason})`); + this.connectionState = newState; + this.lastStateChange = Date.now(); + + // Update UI based on state + this._handleStateChange(newState, reason); + } + } + + _handleStateChange(state, reason) { + switch (state) { + case 'connecting': + this.installingModel = 'Connecting to Ollama...'; + this.installProgress = 10; + break; + case 'failed': + this.errorMessage = reason || 'Connection failed'; + this.installingModel = null; + this.installProgress = 0; + this.messageTimestamp = Date.now(); + break; + case 'connected': + this.installingModel = null; + this.installProgress = 0; + break; + case 'disconnected': + this.ollamaStatus = { installed: false, running: false }; + break; } this.requestUpdate(); - } else { - console.warn('[ApiKeyHeader] Model suggestions request unsuccessful:', result?.error); - } - } catch (error) { - console.error('[ApiKeyHeader] Failed to load model suggestions:', error.message); } - } - async ensureOllamaReady() { - if (!window.api) return false; - - try { - this._updateConnectionState('connecting', 'Ensuring Ollama is ready'); - - const result = await this._executeOperation('ollama_ensure_ready', async () => { - return await window.api.apiKeyHeader.ensureOllamaReady(); - }, { timeout: this.operationTimeout }); - - if (result?.success) { - await this.refreshOllamaStatus(); - this._updateConnectionState('connected', 'Ollama ready'); - return true; - } else { - const errorMsg = `Failed to setup Ollama: ${result?.error || 'Unknown error'}`; - this._updateConnectionState('failed', errorMsg); - return false; - } - } catch (error) { - console.error('[ApiKeyHeader] Failed to ensure Ollama ready:', error.message); - this._updateConnectionState('failed', `Error setting up Ollama: ${error.message}`); - return false; + async refreshOllamaStatus() { + if (!window.api?.apiKeyHeader) return; + + try { + this._updateConnectionState('connecting', 'Checking Ollama status'); + + const result = await this._executeOperation('ollama_status', async () => { + return await window.api.apiKeyHeader.getOllamaStatus(); + }); + + if (result?.success) { + this.ollamaStatus = { + installed: result.installed, + running: result.running, + }; + + this._updateConnectionState('connected', 'Status updated successfully'); + + // Load model suggestions if Ollama is running + if (result.running) { + await this.loadModelSuggestions(); + } + } else { + this._updateConnectionState('failed', result?.error || 'Status check failed'); + } + } catch (error) { + console.error('[ApiKeyHeader] Failed to refresh Ollama status:', error.message); + this._updateConnectionState('failed', error.message); + } } - } - async ensureOllamaReadyWithUI() { - if (!window.api) return false; + async loadModelSuggestions() { + if (!window.api?.apiKeyHeader) return; - this.installingModel = "Setting up Ollama"; - this.installProgress = 0; - this.clearMessages(); - this.requestUpdate(); + try { + const result = await this._executeOperation('model_suggestions', async () => { + return await window.api.apiKeyHeader.getModelSuggestions(); + }); - const progressHandler = (event, data) => { - let baseProgress = 0; - let stageTotal = 0; + if (result?.success) { + this.modelSuggestions = result.suggestions || []; - switch (data.stage) { - case "downloading": - baseProgress = 0; - stageTotal = 70; - break; - case "mounting": - baseProgress = 70; - stageTotal = 10; - break; - case "installing": - baseProgress = 80; - stageTotal = 10; - break; - case "linking": - baseProgress = 90; - stageTotal = 5; - break; - case "cleanup": - baseProgress = 95; - stageTotal = 3; - break; - case "starting": - baseProgress = 98; - stageTotal = 2; - break; - } + // 기본 모델 선택 (설치된 모델 중 첫 번째) + if (!this.selectedLlmModel && this.modelSuggestions.length > 0) { + const installedModel = this.modelSuggestions.find(m => m.status === 'installed'); + if (installedModel) { + this.selectedLlmModel = installedModel.name; + } + } + this.requestUpdate(); + } else { + console.warn('[ApiKeyHeader] Model suggestions request unsuccessful:', result?.error); + } + } catch (error) { + console.error('[ApiKeyHeader] Failed to load model suggestions:', error.message); + } + } - const overallProgress = baseProgress + (data.progress / 100) * stageTotal; + async ensureOllamaReady() { + if (!window.api?.apiKeyHeader) return false; - this.installingModel = data.message; - this.installProgress = Math.round(overallProgress); - this.requestUpdate(); - }; + try { + this._updateConnectionState('connecting', 'Ensuring Ollama is ready'); - let operationCompleted = false; - const completionTimeout = setTimeout(async () => { - if (!operationCompleted) { - console.log("[ApiKeyHeader] Operation timeout, checking status manually..."); - await this._handleOllamaSetupCompletion(true); - } - }, 15000); // 15 second timeout + const result = await this._executeOperation( + 'ollama_ensure_ready', + async () => { + return await window.api.apiKeyHeader.ensureOllamaReady(); + }, + { timeout: this.operationTimeout } + ); - const completionHandler = async (event, result) => { - if (operationCompleted) return; - operationCompleted = true; - clearTimeout(completionTimeout); - - window.api.apiKeyHeader.removeOnOllamaInstallProgress(progressHandler); - await this._handleOllamaSetupCompletion(result.success, result.error); - }; + if (result?.success) { + await this.refreshOllamaStatus(); + this._updateConnectionState('connected', 'Ollama ready'); + return true; + } else { + const errorMsg = `Failed to setup Ollama: ${result?.error || 'Unknown error'}`; + this._updateConnectionState('failed', errorMsg); + return false; + } + } catch (error) { + console.error('[ApiKeyHeader] Failed to ensure Ollama ready:', error.message); + this._updateConnectionState('failed', `Error setting up Ollama: ${error.message}`); + return false; + } + } - window.api.apiKeyHeader.onceOllamaInstallComplete(completionHandler); - window.api.apiKeyHeader.onOllamaInstallProgress(progressHandler); + async ensureOllamaReadyWithUI() { + if (!window.api?.apiKeyHeader) return false; - try { - let result; - if (!this.ollamaStatus.installed) { - console.log("[ApiKeyHeader] Ollama not installed. Starting installation."); - result = await window.api.apiKeyHeader.installOllama(); - } else { - console.log("[ApiKeyHeader] Ollama installed. Starting service."); - result = await window.api.apiKeyHeader.startOllamaService(); - } - - // If IPC call succeeds but no event received, handle completion manually - if (result?.success && !operationCompleted) { - setTimeout(async () => { - if (!operationCompleted) { + this.installingModel = 'Setting up Ollama'; + this.installProgress = 0; + this.clearMessages(); + this.requestUpdate(); + + const progressHandler = (event, data) => { + let baseProgress = 0; + let stageTotal = 0; + + switch (data.stage) { + case 'downloading': + baseProgress = 0; + stageTotal = 70; + break; + case 'mounting': + baseProgress = 70; + stageTotal = 10; + break; + case 'installing': + baseProgress = 80; + stageTotal = 10; + break; + case 'linking': + baseProgress = 90; + stageTotal = 5; + break; + case 'cleanup': + baseProgress = 95; + stageTotal = 3; + break; + case 'starting': + baseProgress = 98; + stageTotal = 2; + break; + } + + const overallProgress = baseProgress + (data.progress / 100) * stageTotal; + + this.installingModel = data.message; + this.installProgress = Math.round(overallProgress); + this.requestUpdate(); + }; + + let operationCompleted = false; + const completionTimeout = setTimeout(async () => { + if (!operationCompleted) { + console.log('[ApiKeyHeader] Operation timeout, checking status manually...'); + await this._handleOllamaSetupCompletion(true); + } + }, 15000); // 15 second timeout + + const completionHandler = async (event, result) => { + if (operationCompleted) return; operationCompleted = true; clearTimeout(completionTimeout); - await this._handleOllamaSetupCompletion(true); - } - }, 2000); - } - - } catch (error) { - operationCompleted = true; - clearTimeout(completionTimeout); - console.error("[ApiKeyHeader] Ollama setup failed:", error); - window.api.apiKeyHeader.removeOnOllamaInstallProgress(progressHandler); - window.api.apiKeyHeader.removeOnceOllamaInstallComplete(completionHandler); - await this._handleOllamaSetupCompletion(false, error.message); - } - } - async _handleOllamaSetupCompletion(success, errorMessage = null) { - this.installingModel = null; - this.installProgress = 0; + window.api.apiKeyHeader.removeOnOllamaInstallProgress(progressHandler); + await this._handleOllamaSetupCompletion(result.success, result.error); + }; - if (success) { - await this.refreshOllamaStatus(); - this.successMessage = "✓ Ollama is ready!"; - } else { - this.errorMessage = `Setup failed: ${errorMessage || "Unknown error"}`; - } - this.messageTimestamp = Date.now(); - this.requestUpdate(); - } + window.api.apiKeyHeader.onceOllamaInstallComplete(completionHandler); + window.api.apiKeyHeader.onOllamaInstallProgress(progressHandler); - async handleModelInput(e) { - const modelName = e.target.value.trim(); - this.selectedLlmModel = modelName; - this.clearMessages(); - - // Save to user history if it's a valid model name - if (modelName && modelName.length > 2) { - this.saveToUserHistory(modelName); - } - - this.requestUpdate(); - } - - async handleModelKeyPress(e) { - if (e.key === 'Enter' && this.selectedLlmModel?.trim()) { - e.preventDefault(); - console.log(`[ApiKeyHeader] Enter pressed, installing model: ${this.selectedLlmModel}`); - - // Check if Ollama is ready first - const ollamaReady = await this.ensureOllamaReady(); - if (!ollamaReady) { - this.errorMessage = 'Failed to setup Ollama'; - this.messageTimestamp = Date.now(); - this.requestUpdate(); - return; - } - - // Install the model - await this.installModel(this.selectedLlmModel); - } - } - - loadUserModelHistory() { - try { - const saved = localStorage.getItem('ollama-model-history'); - if (saved) { - this.userModelHistory = JSON.parse(saved); - } - } catch (error) { - console.error('[ApiKeyHeader] Failed to load model history:', error); - this.userModelHistory = []; - } - } - - saveToUserHistory(modelName) { - if (!modelName || !modelName.trim()) return; - - // Remove if already exists (to move to front) - this.userModelHistory = this.userModelHistory.filter(m => m !== modelName); - - // Add to front - this.userModelHistory.unshift(modelName); - - // Keep only last 20 entries - this.userModelHistory = this.userModelHistory.slice(0, 20); - - // Save to localStorage - try { - localStorage.setItem('ollama-model-history', JSON.stringify(this.userModelHistory)); - } catch (error) { - console.error('[ApiKeyHeader] Failed to save model history:', error); - } - } - - getCombinedModelSuggestions() { - const combined = []; - - // Add installed models first (from Ollama CLI) - for (const model of this.modelSuggestions) { - combined.push({ - name: model.name, - status: 'installed', - size: model.size || 'Unknown', - source: 'installed' - }); - } - - // Add user history models that aren't already installed - const installedNames = this.modelSuggestions.map(m => m.name); - for (const modelName of this.userModelHistory) { - if (!installedNames.includes(modelName)) { - combined.push({ - name: modelName, - status: 'history', - size: 'Unknown', - source: 'history' - }); - } - } - - return combined; - } - - async installModel(modelName) { - if (!modelName?.trim()) { - throw new Error('Invalid model name'); - } - - this.installingModel = modelName; - this.installProgress = 0; - this.clearMessages(); - this.requestUpdate(); - - let progressHandler = null; - - try { - console.log(`[ApiKeyHeader] Installing model via Ollama REST API: ${modelName}`); - - // Create robust progress handler with timeout protection - progressHandler = (event, data) => { - if (data.model === modelName && !this._isOperationCancelled(modelName)) { - const progress = Math.round(Math.max(0, Math.min(100, data.progress || 0))); - - if (progress !== this.installProgress) { - this.installProgress = progress; - console.log(`[ApiKeyHeader] API Progress: ${progress}% for ${modelName} (${data.status || 'downloading'})`); - this.requestUpdate(); - } - } - }; - - // Set up progress tracking - window.api.apiKeyHeader.onOllamaPullProgress(progressHandler); - - // Execute the model pull with timeout - const installPromise = window.api.apiKeyHeader.pullOllamaModel(modelName); - const timeoutPromise = new Promise((_, reject) => - setTimeout(() => reject(new Error('Installation timeout after 10 minutes')), 600000) - ); - - const result = await Promise.race([installPromise, timeoutPromise]); - - if (result.success) { - console.log(`[ApiKeyHeader] Model ${modelName} installed successfully via API`); - this.installProgress = 100; - this.requestUpdate(); - - // Brief pause to show completion - await new Promise(resolve => setTimeout(resolve, 300)); - - // Refresh status and show success - await this.refreshOllamaStatus(); - this.successMessage = `✓ ${modelName} ready`; - this.messageTimestamp = Date.now(); - } else { - throw new Error(result.error || 'Installation failed'); - } - } catch (error) { - console.error(`[ApiKeyHeader] Model installation failed:`, error); - this.errorMessage = `Failed: ${error.message}`; - this.messageTimestamp = Date.now(); - } finally { - // Comprehensive cleanup - if (progressHandler) { - window.api.apiKeyHeader.removeOnOllamaPullProgress(progressHandler); - } - - this.installingModel = null; - this.installProgress = 0; - this.requestUpdate(); - } - } - - _isOperationCancelled(modelName) { - return !this.installingModel || this.installingModel !== modelName; - } - - async downloadWhisperModel(modelId) { - if (!modelId?.trim()) { - console.warn('[ApiKeyHeader] Invalid Whisper model ID'); - return; - } - - console.log(`[ApiKeyHeader] Starting Whisper model download: ${modelId}`); - - // Mark as installing - this.whisperInstallingModels = { ...this.whisperInstallingModels, [modelId]: 0 }; - this.clearMessages(); - this.requestUpdate(); - - let progressHandler = null; - - try { - // Set up robust progress listener - progressHandler = (event, { modelId: id, progress }) => { - if (id === modelId) { - const cleanProgress = Math.round(Math.max(0, Math.min(100, progress || 0))); - this.whisperInstallingModels = { ...this.whisperInstallingModels, [modelId]: cleanProgress }; - console.log(`[ApiKeyHeader] Whisper download progress: ${cleanProgress}% for ${modelId}`); - this.requestUpdate(); - } - }; - - window.api.apiKeyHeader.onWhisperDownloadProgress(progressHandler); - - // Start download with timeout protection - const downloadPromise = window.api.apiKeyHeader.downloadWhisperModel(modelId); - const timeoutPromise = new Promise((_, reject) => - setTimeout(() => reject(new Error('Download timeout after 10 minutes')), 600000) - ); - - const result = await Promise.race([downloadPromise, timeoutPromise]); - - if (result?.success) { - this.successMessage = `✓ ${modelId} downloaded successfully`; - this.messageTimestamp = Date.now(); - console.log(`[ApiKeyHeader] Whisper model ${modelId} downloaded successfully`); - - // Auto-select the downloaded model - this.selectedSttModel = modelId; - } else { - this.errorMessage = `Failed to download ${modelId}: ${result?.error || 'Unknown error'}`; - this.messageTimestamp = Date.now(); - console.error(`[ApiKeyHeader] Whisper download failed:`, result?.error); - } - - } catch (error) { - console.error(`[ApiKeyHeader] Error downloading Whisper model ${modelId}:`, error); - this.errorMessage = `Error downloading ${modelId}: ${error.message}`; - this.messageTimestamp = Date.now(); - } finally { - // Cleanup - if (progressHandler) { - window.api.apiKeyHeader.removeOnWhisperDownloadProgress(progressHandler); - } - delete this.whisperInstallingModels[modelId]; - this.requestUpdate(); - } - } - - handlePaste(e) { - e.preventDefault() - this.clearMessages() - const clipboardText = (e.clipboardData || window.clipboardData).getData("text") - console.log("Paste event detected:", clipboardText?.substring(0, 10) + "...") - - if (clipboardText) { - this.apiKey = clipboardText.trim() - - const inputElement = e.target - inputElement.value = this.apiKey - } - - this.requestUpdate() - this.updateComplete.then(() => { - const inputField = this.shadowRoot?.querySelector(".apikey-input") - if (inputField) { - inputField.focus() - inputField.setSelectionRange(inputField.value.length, inputField.value.length) - } - }) - } - - handleKeyPress(e) { - if (e.key === "Enter") { - e.preventDefault() - this.handleSubmit() - } - } - - //////// after_modelStateService //////// - async handleSttModelChange(e) { - const modelId = e.target.value; - this.selectedSttModel = modelId; - - if (modelId && this.sttProvider === 'whisper') { - // Check if model needs to be downloaded - const isInstalling = this.whisperInstallingModels[modelId] !== undefined; - if (!isInstalling) { - console.log(`[ApiKeyHeader] Auto-installing Whisper model: ${modelId}`); - await this.downloadWhisperModel(modelId); - } - } - - this.requestUpdate(); - } - - async handleSubmit() { - console.log('[ApiKeyHeader] handleSubmit: Submitting...'); - - this.isLoading = true; - this.clearMessages(); - this.requestUpdate(); - - try { - // Handle LLM provider - let llmResult; - if (this.llmProvider === 'ollama') { - // For Ollama ensure it's ready and validate model selection - if (!this.selectedLlmModel?.trim()) { - throw new Error('Please enter an Ollama model name'); + try { + let result; + if (!this.ollamaStatus.installed) { + console.log('[ApiKeyHeader] Ollama not installed. Starting installation.'); + result = await window.api.apiKeyHeader.installOllama(); + } else { + console.log('[ApiKeyHeader] Ollama installed. Starting service.'); + result = await window.api.apiKeyHeader.startOllamaService(); } - + + // If IPC call succeeds but no event received, handle completion manually + if (result?.success && !operationCompleted) { + setTimeout(async () => { + if (!operationCompleted) { + operationCompleted = true; + clearTimeout(completionTimeout); + await this._handleOllamaSetupCompletion(true); + } + }, 2000); + } + } catch (error) { + operationCompleted = true; + clearTimeout(completionTimeout); + console.error('[ApiKeyHeader] Ollama setup failed:', error); + window.api.apiKeyHeader.removeOnOllamaInstallProgress(progressHandler); + window.api.apiKeyHeader.removeOnceOllamaInstallComplete(completionHandler); + await this._handleOllamaSetupCompletion(false, error.message); + } + } + + async _handleOllamaSetupCompletion(success, errorMessage = null) { + this.installingModel = null; + this.installProgress = 0; + + if (success) { + await this.refreshOllamaStatus(); + this.successMessage = '✓ Ollama is ready!'; + } else { + this.llmError = `*Setup failed: ${errorMessage || 'Unknown error'}`; + } + this.messageTimestamp = Date.now(); + this.requestUpdate(); + } + + async handleModelInput(e) { + const modelName = e.target.value.trim(); + this.selectedLlmModel = modelName; + this.clearMessages(); + + // Save to user history if it's a valid model name + if (modelName && modelName.length > 2) { + this.saveToUserHistory(modelName); + } + + this.requestUpdate(); + } + + async handleModelKeyPress(e) { + if (e.key === 'Enter' && this.selectedLlmModel?.trim()) { + e.preventDefault(); + console.log(`[ApiKeyHeader] Enter pressed, installing model: ${this.selectedLlmModel}`); + + // Check if Ollama is ready first const ollamaReady = await this.ensureOllamaReady(); if (!ollamaReady) { - throw new Error('Failed to setup Ollama'); + this.llmError = '*Failed to setup Ollama'; + this.messageTimestamp = Date.now(); + this.requestUpdate(); + return; } - - // Check if model is installed, if not install it - const selectedModel = this.getCombinedModelSuggestions().find(m => m.name === this.selectedLlmModel); - if (!selectedModel || selectedModel.status !== 'installed') { - console.log(`[ApiKeyHeader] Installing model ${this.selectedLlmModel}...`); - await this.installModel(this.selectedLlmModel); + + // Install the model + await this.installModel(this.selectedLlmModel); + } + } + + loadUserModelHistory() { + try { + const saved = localStorage.getItem('ollama-model-history'); + if (saved) { + this.userModelHistory = JSON.parse(saved); } - - // Validate Ollama is working - llmResult = await window.api.apiKeyHeader.validateKey({ - provider: 'ollama', - key: 'local' - }); - - if (llmResult.success) { - // Set the selected model - await window.api.apiKeyHeader.setSelectedModel({ - type: 'llm', - modelId: this.selectedLlmModel - }); - } - } else { - // For other providers, validate API key - if (!this.llmApiKey.trim()) { - throw new Error('Please enter LLM API key'); - } - - llmResult = await window.api.apiKeyHeader.validateKey({ - provider: this.llmProvider, - key: this.llmApiKey.trim() + } catch (error) { + console.error('[ApiKeyHeader] Failed to load model history:', error); + this.userModelHistory = []; + } + } + + saveToUserHistory(modelName) { + if (!modelName || !modelName.trim()) return; + + // Remove if already exists (to move to front) + this.userModelHistory = this.userModelHistory.filter(m => m !== modelName); + + // Add to front + this.userModelHistory.unshift(modelName); + + // Keep only last 20 entries + this.userModelHistory = this.userModelHistory.slice(0, 20); + + // Save to localStorage + try { + localStorage.setItem('ollama-model-history', JSON.stringify(this.userModelHistory)); + } catch (error) { + console.error('[ApiKeyHeader] Failed to save model history:', error); + } + } + + getCombinedModelSuggestions() { + const combined = []; + + // Add installed models first (from Ollama CLI) + for (const model of this.modelSuggestions) { + combined.push({ + name: model.name, + status: 'installed', + size: model.size || 'Unknown', + source: 'installed', }); } - // Handle STT provider - let sttResult; - if (this.sttProvider === 'ollama') { - // Ollama doesn't support STT yet, so skip or use same as LLM validation - sttResult = { success: true }; - } else if (this.sttProvider === 'whisper') { - // For Whisper, just validate it's enabled (model download already handled in handleSttModelChange) - sttResult = await window.api.apiKeyHeader.validateKey({ - provider: 'whisper', - key: 'local' - }); - - if (sttResult.success && this.selectedSttModel) { - // Set the selected model - await window.api.apiKeyHeader.setSelectedModel({ - type: 'stt', - modelId: this.selectedSttModel + // Add user history models that aren't already installed + const installedNames = this.modelSuggestions.map(m => m.name); + for (const modelName of this.userModelHistory) { + if (!installedNames.includes(modelName)) { + combined.push({ + name: modelName, + status: 'history', + size: 'Unknown', + source: 'history', }); } - } else { - // For other providers, validate API key - if (!this.sttApiKey.trim()) { - throw new Error('Please enter STT API key'); - } - - sttResult = await window.api.apiKeyHeader.validateKey({ - provider: this.sttProvider, - key: this.sttApiKey.trim() - }); } - if (llmResult.success && sttResult.success) { - console.log('[ApiKeyHeader] handleSubmit: Validation successful.'); - this.startSlideOutAnimation(); - } else { - let errorParts = []; - if (!llmResult.success) errorParts.push(`LLM: ${llmResult.error || 'Invalid'}`); - if (!sttResult.success) errorParts.push(`STT: ${sttResult.error || 'Invalid'}`); - this.errorMessage = errorParts.join(' | '); + return combined; + } + + async installModel(modelName) { + if (!modelName?.trim()) { + throw new Error('Invalid model name'); + } + + this.installingModel = modelName; + this.installProgress = 0; + this.clearMessages(); + this.requestUpdate(); + + if (!window.api?.apiKeyHeader) return; + let progressHandler = null; + + try { + console.log(`[ApiKeyHeader] Installing model via Ollama REST API: ${modelName}`); + + // Create robust progress handler with timeout protection + progressHandler = (event, data) => { + if (data.model === modelName && !this._isOperationCancelled(modelName)) { + const progress = Math.round(Math.max(0, Math.min(100, data.progress || 0))); + + if (progress !== this.installProgress) { + this.installProgress = progress; + console.log(`[ApiKeyHeader] API Progress: ${progress}% for ${modelName} (${data.status || 'downloading'})`); + this.requestUpdate(); + } + } + }; + + // Set up progress tracking + window.api.apiKeyHeader.onOllamaPullProgress(progressHandler); + + // Execute the model pull with timeout + const installPromise = window.api.apiKeyHeader.pullOllamaModel(modelName); + const timeoutPromise = new Promise((_, reject) => setTimeout(() => reject(new Error('Installation timeout after 10 minutes')), 600000)); + + const result = await Promise.race([installPromise, timeoutPromise]); + + if (result.success) { + console.log(`[ApiKeyHeader] Model ${modelName} installed successfully via API`); + this.installProgress = 100; + this.requestUpdate(); + + // Brief pause to show completion + await new Promise(resolve => setTimeout(resolve, 300)); + + // Refresh status and show success + await this.refreshOllamaStatus(); + this.successMessage = `✓ ${modelName} ready`; + this.messageTimestamp = Date.now(); + } else { + throw new Error(result.error || 'Installation failed'); + } + } catch (error) { + console.error(`[ApiKeyHeader] Model installation failed:`, error); + this.llmError = `*Failed: ${error.message}`; + this.messageTimestamp = Date.now(); + } finally { + // Comprehensive cleanup + if (progressHandler) { + window.api.apiKeyHeader.removeOnOllamaPullProgress(progressHandler); + } + + this.installingModel = null; + this.installProgress = 0; + this.requestUpdate(); + } + } + + _isOperationCancelled(modelName) { + return !this.installingModel || this.installingModel !== modelName; + } + + async downloadWhisperModel(modelId) { + if (!modelId?.trim()) { + console.warn('[ApiKeyHeader] Invalid Whisper model ID'); + return; + } + + console.log(`[ApiKeyHeader] Starting Whisper model download: ${modelId}`); + + // Mark as installing + this.whisperInstallingModels = { ...this.whisperInstallingModels, [modelId]: 0 }; + this.clearMessages(); + this.requestUpdate(); + + if (!window.api?.apiKeyHeader) return; + let progressHandler = null; + + try { + // Set up robust progress listener + progressHandler = (event, { modelId: id, progress }) => { + if (id === modelId) { + const cleanProgress = Math.round(Math.max(0, Math.min(100, progress || 0))); + this.whisperInstallingModels = { ...this.whisperInstallingModels, [modelId]: cleanProgress }; + console.log(`[ApiKeyHeader] Whisper download progress: ${cleanProgress}% for ${modelId}`); + this.requestUpdate(); + } + }; + + window.api.apiKeyHeader.onWhisperDownloadProgress(progressHandler); + + // Start download with timeout protection + const downloadPromise = window.api.apiKeyHeader.downloadWhisperModel(modelId); + const timeoutPromise = new Promise((_, reject) => setTimeout(() => reject(new Error('Download timeout after 10 minutes')), 600000)); + + const result = await Promise.race([downloadPromise, timeoutPromise]); + + if (result?.success) { + this.successMessage = `✓ ${modelId} downloaded successfully`; + this.messageTimestamp = Date.now(); + console.log(`[ApiKeyHeader] Whisper model ${modelId} downloaded successfully`); + + // Auto-select the downloaded model + this.selectedSttModel = modelId; + } else { + this.sttError = `*Failed to download ${modelId}: ${result?.error || 'Unknown error'}`; + this.messageTimestamp = Date.now(); + console.error(`[ApiKeyHeader] Whisper download failed:`, result?.error); + } + } catch (error) { + console.error(`[ApiKeyHeader] Error downloading Whisper model ${modelId}:`, error); + this.sttError = `*Error downloading ${modelId}: ${error.message}`; + this.messageTimestamp = Date.now(); + } finally { + // Cleanup + if (progressHandler) { + window.api.apiKeyHeader.removeOnWhisperDownloadProgress(progressHandler); + } + delete this.whisperInstallingModels[modelId]; + this.requestUpdate(); + } + } + + handlePaste(e) { + e.preventDefault(); + this.clearMessages(); + const clipboardText = (e.clipboardData || window.clipboardData).getData('text'); + console.log('Paste event detected:', clipboardText?.substring(0, 10) + '...'); + + if (clipboardText) { + this.apiKey = clipboardText.trim(); + + const inputElement = e.target; + inputElement.value = this.apiKey; + } + + this.requestUpdate(); + this.updateComplete.then(() => { + const inputField = this.shadowRoot?.querySelector('.apikey-input'); + if (inputField) { + inputField.focus(); + inputField.setSelectionRange(inputField.value.length, inputField.value.length); + } + }); + } + + handleKeyPress(e) { + if (e.key === 'Enter') { + e.preventDefault(); + this.handleSubmit(); + } + } + + //////// after_modelStateService //////// + async handleSttModelChange(e) { + const modelId = e.target.value; + this.selectedSttModel = modelId; + + if (modelId && this.sttProvider === 'whisper') { + // Check if model needs to be downloaded + const isInstalling = this.whisperInstallingModels[modelId] !== undefined; + if (!isInstalling) { + console.log(`[ApiKeyHeader] Auto-installing Whisper model: ${modelId}`); + await this.downloadWhisperModel(modelId); + } + } + + this.requestUpdate(); + } + + async handleSubmit() { + console.log('[ApiKeyHeader] handleSubmit: Submitting...'); + + this.isLoading = true; + this.clearMessages(); + this.requestUpdate(); + + if (!window.api?.apiKeyHeader) { + this.isLoading = false; + this.llmError = '*API bridge not available'; + this.requestUpdate(); + return; + } + + try { + // Handle LLM provider + let llmResult; + if (this.llmProvider === 'ollama') { + // For Ollama ensure it's ready and validate model selection + if (!this.selectedLlmModel?.trim()) { + throw new Error('Please enter an Ollama model name'); + } + + const ollamaReady = await this.ensureOllamaReady(); + if (!ollamaReady) { + throw new Error('Failed to setup Ollama'); + } + + // Check if model is installed, if not install it + const selectedModel = this.getCombinedModelSuggestions().find(m => m.name === this.selectedLlmModel); + if (!selectedModel || selectedModel.status !== 'installed') { + console.log(`[ApiKeyHeader] Installing model ${this.selectedLlmModel}...`); + await this.installModel(this.selectedLlmModel); + } + + // Validate Ollama is working + llmResult = await window.api.apiKeyHeader.validateKey({ + provider: 'ollama', + key: 'local', + }); + + if (llmResult.success) { + // Set the selected model + await window.api.apiKeyHeader.setSelectedModel({ + type: 'llm', + modelId: this.selectedLlmModel, + }); + } + } else { + // For other providers, validate API key + if (!this.llmApiKey.trim()) { + throw new Error('Please enter LLM API key'); + } + + llmResult = await window.api.apiKeyHeader.validateKey({ + provider: this.llmProvider, + key: this.llmApiKey.trim(), + }); + + if (llmResult.success) { + const config = await window.api.apiKeyHeader.getProviderConfig(); + const providerConfig = config[this.llmProvider]; + if (providerConfig && providerConfig.llmModels.length > 0) { + await window.api.apiKeyHeader.setSelectedModel({ + type: 'llm', + modelId: providerConfig.llmModels[0].id, + }); + } + } + } + + // Handle STT provider + let sttResult; + if (this.sttProvider === 'ollama') { + // Ollama doesn't support STT yet, so skip or use same as LLM validation + sttResult = { success: true }; + } else if (this.sttProvider === 'whisper') { + // For Whisper, just validate it's enabled (model download already handled in handleSttModelChange) + sttResult = await window.api.apiKeyHeader.validateKey({ + provider: 'whisper', + key: 'local', + }); + + if (sttResult.success && this.selectedSttModel) { + // Set the selected model + await window.api.apiKeyHeader.setSelectedModel({ + type: 'stt', + modelId: this.selectedSttModel, + }); + } + } else { + // For other providers, validate API key + if (!this.sttApiKey.trim()) { + throw new Error('Please enter STT API key'); + } + + sttResult = await window.api.apiKeyHeader.validateKey({ + provider: this.sttProvider, + key: this.sttApiKey.trim(), + }); + + if (sttResult.success) { + const config = await window.api.apiKeyHeader.getProviderConfig(); + const providerConfig = config[this.sttProvider]; + if (providerConfig && providerConfig.sttModels.length > 0) { + await window.api.apiKeyHeader.setSelectedModel({ + type: 'stt', + modelId: providerConfig.sttModels[0].id, + }); + } + } + } + + if (llmResult.success && sttResult.success) { + console.log('[ApiKeyHeader] handleSubmit: Validation successful.'); + + // Force refresh the model state to ensure areProvidersConfigured returns true + setTimeout(async () => { + const isConfigured = await window.api.apiKeyHeader.areProvidersConfigured(); + console.log('[ApiKeyHeader] Post-validation providers configured check:', isConfigured); + + if (isConfigured) { + this.startSlideOutAnimation(); + } else { + console.error('[ApiKeyHeader] Providers still not configured after successful validation'); + this.llmError = '*Configuration error. Please try again.'; + this.isLoading = false; + this.requestUpdate(); + } + }, 100); + } else { + this.llmError = !llmResult.success ? `*${llmResult.error || 'Invalid API Key'}` : ''; + this.sttError = !sttResult.success ? `*${sttResult.error || 'Invalid'}` : ''; + this.errorMessage = ''; // Do not use the general error message for this + this.messageTimestamp = Date.now(); + } + } catch (error) { + console.error('[ApiKeyHeader] handleSubmit: Error:', error); + this.llmError = `*${error.message}`; this.messageTimestamp = Date.now(); } - } catch (error) { - console.error('[ApiKeyHeader] handleSubmit: Error:', error); - this.errorMessage = error.message; - this.messageTimestamp = Date.now(); + + this.isLoading = false; + this.requestUpdate(); } - - this.isLoading = false; - this.requestUpdate(); - } -//////// after_modelStateService //////// + //////// after_modelStateService //////// - startSlideOutAnimation() { - console.log('[ApiKeyHeader] startSlideOutAnimation: Starting slide out animation.'); - this.classList.add("sliding-out") - } - - handleUsePicklesKey(e) { - e.preventDefault() - - console.log("Requesting Firebase authentication from main process...") - if (window.api) { - window.api.common.startFirebaseAuth() - } - } - - handleClose() { - console.log("Close button clicked") - if (window.api) { - window.api.common.quitApplication() - } - } - - - //////// after_modelStateService //////// - handleAnimationEnd(e) { - if (e.target !== this || !this.classList.contains('sliding-out')) return; - this.classList.remove("sliding-out"); - this.classList.add("hidden"); - - console.log('[ApiKeyHeader] handleAnimationEnd: Transition completed, transitioning to next state...'); - - if (!window.api) { - console.error('[ApiKeyHeader] handleAnimationEnd: window.api not available'); - return; - } - - if (!this.stateUpdateCallback) { - console.error('[ApiKeyHeader] handleAnimationEnd: stateUpdateCallback not set! This will prevent transition to main window.'); - return; - } - - window.api.common.getCurrentUser() - .then(userState => { - console.log('[ApiKeyHeader] handleAnimationEnd: User state retrieved:', userState); + ////TODO: 뭔가 넘어가는 애니메이션 로직에 문제가 있음 + startSlideOutAnimation() { + console.log('[ApiKeyHeader] startSlideOutAnimation: Starting slide out animation.'); + this.classList.add('sliding-out'); - // Additional validation for local providers - return window.api.apiKeyHeader.areProvidersConfigured().then(isConfigured => { - console.log('[ApiKeyHeader] handleAnimationEnd: Providers configured check:', isConfigured); - - if (!isConfigured) { - console.warn('[ApiKeyHeader] handleAnimationEnd: Providers still not configured, may return to ApiKey screen'); - } - - // Call the state update callback - this.stateUpdateCallback(userState); - }); - }) - .catch(error => { - console.error('[ApiKeyHeader] handleAnimationEnd: Error during state transition:', error); - - // Fallback: try to call callback with minimal state - if (this.stateUpdateCallback) { - console.log('[ApiKeyHeader] handleAnimationEnd: Attempting fallback state transition...'); - this.stateUpdateCallback({ isLoggedIn: false }); + // Fallback: if animation doesn't trigger animationend event, force transition + setTimeout(() => { + if (this.classList.contains('sliding-out')) { + console.log('[ApiKeyHeader] Animation fallback triggered - forcing transition'); + this.handleAnimationEnd({ target: this, animationName: 'slideOut' }); + } + }, 1); // Wait a bit longer than animation duration + } + + handleClose() { + if (window.api?.common) { + window.api.common.quitApplication(); } - }); - } -//////// after_modelStateService //////// + } - connectedCallback() { - super.connectedCallback() - // this.addEventListener("animationend", this.handleAnimationEnd) - this.addEventListener("transitionend", this.handleAnimationEnd) - } - - handleMessageFadeEnd(e) { - if (e.animationName === 'fadeOut') { - // Clear the message that finished fading - if (e.target.classList.contains('error-message')) { - this.errorMessage = ''; - } else if (e.target.classList.contains('success-message')) { - this.successMessage = ''; - } - this.messageTimestamp = 0; - this.requestUpdate(); - } - } + //////// after_modelStateService //////// + handleAnimationEnd(e) { + if (e.target !== this || !this.classList.contains('sliding-out')) return; + this.classList.remove('sliding-out'); + this.classList.add('hidden'); - disconnectedCallback() { - super.disconnectedCallback() - // this.removeEventListener("animationend", this.handleAnimationEnd) - this.removeEventListener("transitionend", this.handleAnimationEnd) - // Professional cleanup of all resources - this._performCompleteCleanup(); - } - - _performCompleteCleanup() { - console.log('[ApiKeyHeader] Performing complete cleanup'); - - // Stop health monitoring - this._stopHealthMonitoring(); - - // Cancel all active operations - this._cancelAllActiveOperations(); - - // Cancel any ongoing installations when component is destroyed - if (this.installingModel) { - this.progressTracker.cancelInstallation(this.installingModel); - } - - // Cleanup event listeners - if (window.api) { - window.api.apiKeyHeader.removeAllListeners(); - } - - // Cancel any ongoing downloads - const downloadingModels = Object.keys(this.whisperInstallingModels); - if (downloadingModels.length > 0) { - console.log(`[ApiKeyHeader] Cancelling ${downloadingModels.length} ongoing Whisper downloads`); - downloadingModels.forEach(modelId => { - delete this.whisperInstallingModels[modelId]; - }); - } - - // Reset state - this.connectionState = 'disconnected'; - this.retryCount = 0; - - console.log('[ApiKeyHeader] Cleanup completed'); - } - - /** - * State machine-based Ollama UI rendering - */ - _renderOllamaStateUI() { - const state = this._getOllamaUIState(); - - switch (state.type) { - case 'connecting': - return this._renderConnectingState(state); - case 'install_required': - return this._renderInstallRequiredState(); - case 'start_required': - return this._renderStartRequiredState(); - case 'ready': - return this._renderReadyState(); - case 'failed': - return this._renderFailedState(state); - case 'installing': - return this._renderInstallingState(state); - default: - return this._renderUnknownState(); - } - } - - _getOllamaUIState() { - // State determination logic - if (this.connectionState === 'connecting') { - return { type: 'connecting', message: this.installingModel || 'Connecting to Ollama...' }; - } - - if (this.connectionState === 'failed') { - return { type: 'failed', message: this.errorMessage }; - } - - if (this.installingModel && this.installingModel.includes('Ollama')) { - return { type: 'installing', progress: this.installProgress }; - } - - if (!this.ollamaStatus.installed) { - return { type: 'install_required' }; - } - - if (!this.ollamaStatus.running) { - return { type: 'start_required' }; - } - - return { type: 'ready' }; - } - - _renderConnectingState(state) { - return html` -