diff --git a/src/bridge/featureBridge.js b/src/bridge/featureBridge.js index be6d348..b5c13eb 100644 --- a/src/bridge/featureBridge.js +++ b/src/bridge/featureBridge.js @@ -1,5 +1,5 @@ // src/bridge/featureBridge.js -const { ipcMain, app } = require('electron'); +const { ipcMain, app, BrowserWindow } = require('electron'); const settingsService = require('../features/settings/settingsService'); const authService = require('../features/common/services/authService'); const whisperService = require('../features/common/services/whisperService'); @@ -7,6 +7,8 @@ const ollamaService = require('../features/common/services/ollamaService'); const modelStateService = require('../features/common/services/modelStateService'); const shortcutsService = require('../features/shortcuts/shortcutsService'); const presetRepository = require('../features/common/repositories/preset'); +const windowBridge = require('./windowBridge'); +const localAIManager = require('../features/common/services/localAIManager'); const askService = require('../features/ask/askService'); const listenService = require('../features/listen/listenService'); @@ -40,6 +42,8 @@ module.exports = { ipcMain.handle('check-system-permissions', async () => await permissionService.checkSystemPermissions()); ipcMain.handle('request-microphone-permission', async () => await permissionService.requestMicrophonePermission()); ipcMain.handle('open-system-preferences', async (event, section) => await permissionService.openSystemPreferences(section)); + + //TODO: Need to Remove this ipcMain.handle('mark-permissions-completed', async () => await permissionService.markPermissionsAsCompleted()); ipcMain.handle('check-permissions-completed', async () => await permissionService.checkPermissionsCompleted()); @@ -113,6 +117,115 @@ module.exports = { ipcMain.handle('model:are-providers-configured', () => modelStateService.areProvidersConfigured()); ipcMain.handle('model:get-provider-config', () => modelStateService.getProviderConfig()); + // LocalAIManager 이벤트를 모든 윈도우에 브로드캐스트 + localAIManager.on('install-progress', (service, data) => { + const event = { service, ...data }; + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + win.webContents.send('localai:install-progress', event); + } + }); + }); + localAIManager.on('installation-complete', (service) => { + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + win.webContents.send('localai:installation-complete', { service }); + } + }); + }); + localAIManager.on('error', (error) => { + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + win.webContents.send('localai:error-occurred', error); + } + }); + }); + // Handle error-occurred events from LocalAIManager's error handling + localAIManager.on('error-occurred', (error) => { + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + win.webContents.send('localai:error-occurred', error); + } + }); + }); + localAIManager.on('model-ready', (data) => { + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + win.webContents.send('localai:model-ready', data); + } + }); + }); + localAIManager.on('state-changed', (service, state) => { + const event = { service, ...state }; + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + win.webContents.send('localai:service-status-changed', event); + } + }); + }); + + // 주기적 상태 동기화 시작 + localAIManager.startPeriodicSync(); + + // ModelStateService 이벤트를 모든 윈도우에 브로드캐스트 + modelStateService.on('state-updated', (state) => { + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + win.webContents.send('model-state:updated', state); + } + }); + }); + modelStateService.on('settings-updated', () => { + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + win.webContents.send('settings-updated'); + } + }); + }); + modelStateService.on('force-show-apikey-header', () => { + BrowserWindow.getAllWindows().forEach(win => { + if (win && !win.isDestroyed()) { + win.webContents.send('force-show-apikey-header'); + } + }); + }); + + // LocalAI 통합 핸들러 추가 + ipcMain.handle('localai:install', async (event, { service, options }) => { + return await localAIManager.installService(service, options); + }); + ipcMain.handle('localai:get-status', async (event, service) => { + return await localAIManager.getServiceStatus(service); + }); + ipcMain.handle('localai:start-service', async (event, service) => { + return await localAIManager.startService(service); + }); + ipcMain.handle('localai:stop-service', async (event, service) => { + return await localAIManager.stopService(service); + }); + ipcMain.handle('localai:install-model', async (event, { service, modelId, options }) => { + return await localAIManager.installModel(service, modelId, options); + }); + ipcMain.handle('localai:get-installed-models', async (event, service) => { + return await localAIManager.getInstalledModels(service); + }); + ipcMain.handle('localai:run-diagnostics', async (event, service) => { + return await localAIManager.runDiagnostics(service); + }); + ipcMain.handle('localai:repair-service', async (event, service) => { + return await localAIManager.repairService(service); + }); + + // 에러 처리 핸들러 + ipcMain.handle('localai:handle-error', async (event, { service, errorType, details }) => { + return await localAIManager.handleError(service, errorType, details); + }); + + // 전체 상태 조회 + ipcMain.handle('localai:get-all-states', async (event) => { + return await localAIManager.getAllServiceStates(); + }); + console.log('[FeatureBridge] Initialized with all feature handlers.'); }, diff --git a/src/bridge/windowBridge.js b/src/bridge/windowBridge.js index 313273c..6555049 100644 --- a/src/bridge/windowBridge.js +++ b/src/bridge/windowBridge.js @@ -1,9 +1,13 @@ // src/bridge/windowBridge.js const { ipcMain, shell } = require('electron'); -const windowManager = require('../window/windowManager'); +// Bridge는 단순히 IPC 핸들러를 등록하는 역할만 함 (비즈니스 로직 없음) module.exports = { initialize() { + // initialize 시점에 windowManager를 require하여 circular dependency 문제 해결 + const windowManager = require('../window/windowManager'); + + // 기존 IPC 핸들러들 ipcMain.handle('toggle-content-protection', () => windowManager.toggleContentProtection()); ipcMain.handle('resize-header-window', (event, args) => windowManager.resizeHeaderWindow(args)); ipcMain.handle('get-content-protection-status', () => windowManager.getContentProtectionStatus()); diff --git a/src/features/common/ai/providers/whisper.js b/src/features/common/ai/providers/whisper.js index 58cd666..6dca6be 100644 --- a/src/features/common/ai/providers/whisper.js +++ b/src/features/common/ai/providers/whisper.js @@ -41,7 +41,7 @@ class WhisperSTTSession extends EventEmitter { startProcessingLoop() { this.processingInterval = setInterval(async () => { - const minBufferSize = 24000 * 2 * 0.15; + const minBufferSize = 16000 * 2 * 0.15; if (this.audioBuffer.length >= minBufferSize && !this.process) { console.log(`[WhisperSTT-${this.sessionId}] Processing audio chunk, buffer size: ${this.audioBuffer.length}`); await this.processAudioChunk(); diff --git a/src/features/common/config/checksums.js b/src/features/common/config/checksums.js index ff903cc..216c31f 100644 --- a/src/features/common/config/checksums.js +++ b/src/features/common/config/checksums.js @@ -2,41 +2,49 @@ const DOWNLOAD_CHECKSUMS = { ollama: { dmg: { url: 'https://ollama.com/download/Ollama.dmg', - sha256: null // To be updated with actual checksum + sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨 }, exe: { url: 'https://ollama.com/download/OllamaSetup.exe', - sha256: null // To be updated with actual checksum + sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨 + }, + linux: { + url: 'curl -fsSL https://ollama.com/install.sh | sh', + sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨 } }, whisper: { models: { 'whisper-tiny': { - url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.bin', + url: 'https://huggingface.co/ggml-org/whisper.cpp/resolve/main/ggml-tiny.bin', sha256: 'be07e048e1e599ad46341c8d2a135645097a538221678b7acdd1b1919c6e1b21' }, 'whisper-base': { - url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.bin', + url: 'https://huggingface.co/ggml-org/whisper.cpp/resolve/main/ggml-base.bin', sha256: '60ed5bc3dd14eea856493d334349b405782ddcaf0028d4b5df4088345fba2efe' }, 'whisper-small': { - url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.bin', + url: 'https://huggingface.co/ggml-org/whisper.cpp/resolve/main/ggml-small.bin', sha256: '1be3a9b2063867b937e64e2ec7483364a79917e157fa98c5d94b5c1fffea987b' }, 'whisper-medium': { - url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.bin', + url: 'https://huggingface.co/ggml-org/whisper.cpp/resolve/main/ggml-medium.bin', sha256: '6c14d5adee5f86394037b4e4e8b59f1673b6cee10e3cf0b11bbdbee79c156208' } }, binaries: { 'v1.7.6': { + mac: { + url: 'https://github.com/ggml-org/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-mac-x64.zip', + sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨 + }, windows: { - url: 'https://github.com/ggerganov/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-win-x64.zip', - sha256: null // To be updated with actual checksum + url: 'https://github.com/ggml-org/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-win-x64.zip', + sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨 }, linux: { - url: 'https://github.com/ggerganov/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-linux-x64.tar.gz', - sha256: null // To be updated with actual checksum + url: 'https://github.com/ggml-org/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-linux-x64.tar.gz', + sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨 } } } diff --git a/src/features/common/config/schema.js b/src/features/common/config/schema.js index ad5c3b6..087e79b 100644 --- a/src/features/common/config/schema.js +++ b/src/features/common/config/schema.js @@ -96,21 +96,13 @@ const LATEST_SCHEMA = { { name: 'api_key', type: 'TEXT' }, { name: 'selected_llm_model', type: 'TEXT' }, { name: 'selected_stt_model', type: 'TEXT' }, + { name: 'is_active_llm', type: 'INTEGER DEFAULT 0' }, + { name: 'is_active_stt', type: 'INTEGER DEFAULT 0' }, { name: 'created_at', type: 'INTEGER' }, { name: 'updated_at', type: 'INTEGER' } ], constraints: ['PRIMARY KEY (uid, provider)'] }, - user_model_selections: { - columns: [ - { name: 'uid', type: 'TEXT PRIMARY KEY' }, - { name: 'selected_llm_provider', type: 'TEXT' }, - { name: 'selected_llm_model', type: 'TEXT' }, - { name: 'selected_stt_provider', type: 'TEXT' }, - { name: 'selected_stt_model', type: 'TEXT' }, - { name: 'updated_at', type: 'INTEGER' } - ] - }, shortcuts: { columns: [ { name: 'action', type: 'TEXT PRIMARY KEY' }, diff --git a/src/features/common/repositories/providerSettings/firebase.repository.js b/src/features/common/repositories/providerSettings/firebase.repository.js index f7fed8f..71d3575 100644 --- a/src/features/common/repositories/providerSettings/firebase.repository.js +++ b/src/features/common/repositories/providerSettings/firebase.repository.js @@ -74,10 +74,88 @@ async function removeAllByUid(uid) { } } +// Get active provider for a specific type (llm or stt) +async function getActiveProvider(uid, type) { + try { + const column = type === 'llm' ? 'is_active_llm' : 'is_active_stt'; + const q = query(providerSettingsCol(), + where('uid', '==', uid), + where(column, '==', true) + ); + const querySnapshot = await getDocs(q); + + if (querySnapshot.empty) { + return null; + } + + const doc = querySnapshot.docs[0]; + return { id: doc.id, ...doc.data() }; + } catch (error) { + console.error('[ProviderSettings Firebase] Error getting active provider:', error); + return null; + } +} + +// Set active provider for a specific type +async function setActiveProvider(uid, provider, type) { + try { + const column = type === 'llm' ? 'is_active_llm' : 'is_active_stt'; + + // First, deactivate all providers for this type + const allSettings = await getAllByUid(uid); + const updatePromises = allSettings.map(setting => { + const docRef = doc(providerSettingsCol(), setting.id); + return setDoc(docRef, { [column]: false }, { merge: true }); + }); + await Promise.all(updatePromises); + + // Then activate the specified provider + if (provider) { + const docRef = doc(providerSettingsCol(), `${uid}_${provider}`); + await setDoc(docRef, { [column]: true }, { merge: true }); + } + + return { success: true }; + } catch (error) { + console.error('[ProviderSettings Firebase] Error setting active provider:', error); + throw error; + } +} + +// Get all active settings (both llm and stt) +async function getActiveSettings(uid) { + try { + // Firebase doesn't support OR queries in this way, so we'll get all settings and filter + const allSettings = await getAllByUid(uid); + + const activeSettings = { + llm: null, + stt: null + }; + + allSettings.forEach(setting => { + if (setting.is_active_llm) { + activeSettings.llm = setting; + } + if (setting.is_active_stt) { + activeSettings.stt = setting; + } + }); + + return activeSettings; + } catch (error) { + console.error('[ProviderSettings Firebase] Error getting active settings:', error); + return { llm: null, stt: null }; + } +} + module.exports = { getByProvider, getAllByUid, upsert, remove, - removeAllByUid + removeAllByUid, + getActiveProvider, + setActiveProvider, + getActiveSettings }; \ No newline at end of file diff --git a/src/features/common/repositories/providerSettings/index.js b/src/features/common/repositories/providerSettings/index.js index d4fb384..fa681ed 100644 --- a/src/features/common/repositories/providerSettings/index.js +++ b/src/features/common/repositories/providerSettings/index.js @@ -56,6 +56,24 @@ const providerSettingsRepositoryAdapter = { const repo = getBaseRepository(); const uid = authService.getCurrentUserId(); return await repo.removeAllByUid(uid); + }, + + async getActiveProvider(type) { + const repo = getBaseRepository(); + const uid = authService.getCurrentUserId(); + return await repo.getActiveProvider(uid, type); + }, + + async setActiveProvider(provider, type) { + const repo = getBaseRepository(); + const uid = authService.getCurrentUserId(); + return await repo.setActiveProvider(uid, provider, type); + }, + + async getActiveSettings() { + const repo = getBaseRepository(); + const uid = authService.getCurrentUserId(); + return await repo.getActiveSettings(uid); } }; diff --git a/src/features/common/repositories/providerSettings/sqlite.repository.js b/src/features/common/repositories/providerSettings/sqlite.repository.js index 1967890..58932f1 100644 --- a/src/features/common/repositories/providerSettings/sqlite.repository.js +++ b/src/features/common/repositories/providerSettings/sqlite.repository.js @@ -1,15 +1,15 @@ const sqliteClient = require('../../services/sqliteClient'); -const encryptionService = require('../../services/encryptionService'); function getByProvider(uid, provider) { const db = sqliteClient.getDb(); const stmt = db.prepare('SELECT * FROM provider_settings WHERE uid = ? AND provider = ?'); const result = stmt.get(uid, provider) || null; - if (result && result.api_key) { - // Decrypt API key if it exists - result.api_key = encryptionService.decrypt(result.api_key); - } + // if (result && result.api_key) { + // // Decrypt API key if it exists + // result.api_key = result.api_key; + // } + return result; } @@ -22,40 +22,49 @@ function getAllByUid(uid) { // Decrypt API keys for all results return results.map(result => { if (result.api_key) { - result.api_key = encryptionService.decrypt(result.api_key); + result.api_key = result.api_key; } return result; }); } function upsert(uid, provider, settings) { + // Validate: prevent direct setting of active status + if (settings.is_active_llm || settings.is_active_stt) { + console.warn('[ProviderSettings] Warning: is_active_llm/is_active_stt should not be set directly. Use setActiveProvider() instead.'); + } + const db = sqliteClient.getDb(); // Encrypt API key if it exists - const encryptedSettings = { ...settings }; - if (encryptedSettings.api_key) { - encryptedSettings.api_key = encryptionService.encrypt(encryptedSettings.api_key); - } + // const encryptedSettings = { ...settings }; + // if (encryptedSettings.api_key) { + // encryptedSettings.api_key = encryptedSettings.api_key; + // } // Use SQLite's UPSERT syntax (INSERT ... ON CONFLICT ... DO UPDATE) const stmt = db.prepare(` - INSERT INTO provider_settings (uid, provider, api_key, selected_llm_model, selected_stt_model, created_at, updated_at) - VALUES (?, ?, ?, ?, ?, ?, ?) + INSERT INTO provider_settings (uid, provider, api_key, selected_llm_model, selected_stt_model, is_active_llm, is_active_stt, created_at, updated_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?) ON CONFLICT(uid, provider) DO UPDATE SET api_key = excluded.api_key, selected_llm_model = excluded.selected_llm_model, selected_stt_model = excluded.selected_stt_model, + -- is_active_llm and is_active_stt are NOT updated here + -- Use setActiveProvider() to change active status updated_at = excluded.updated_at `); const result = stmt.run( uid, provider, - encryptedSettings.api_key || null, - encryptedSettings.selected_llm_model || null, - encryptedSettings.selected_stt_model || null, - encryptedSettings.created_at || Date.now(), - encryptedSettings.updated_at + settings.api_key || null, + settings.selected_llm_model || null, + settings.selected_stt_model || null, + 0, // is_active_llm - always 0, use setActiveProvider to activate + 0, // is_active_stt - always 0, use setActiveProvider to activate + settings.created_at || Date.now(), + settings.updated_at ); return { changes: result.changes }; @@ -75,10 +84,79 @@ function removeAllByUid(uid) { return { changes: result.changes }; } +// Get active provider for a specific type (llm or stt) +function getActiveProvider(uid, type) { + const db = sqliteClient.getDb(); + const column = type === 'llm' ? 'is_active_llm' : 'is_active_stt'; + const stmt = db.prepare(`SELECT * FROM provider_settings WHERE uid = ? AND ${column} = 1`); + const result = stmt.get(uid) || null; + + if (result && result.api_key) { + result.api_key = result.api_key; + } + + return result; +} + +// Set active provider for a specific type +function setActiveProvider(uid, provider, type) { + const db = sqliteClient.getDb(); + const column = type === 'llm' ? 'is_active_llm' : 'is_active_stt'; + + // Start transaction to ensure only one provider is active + db.transaction(() => { + // First, deactivate all providers for this type + const deactivateStmt = db.prepare(`UPDATE provider_settings SET ${column} = 0 WHERE uid = ?`); + deactivateStmt.run(uid); + + // Then activate the specified provider + if (provider) { + const activateStmt = db.prepare(`UPDATE provider_settings SET ${column} = 1 WHERE uid = ? AND provider = ?`); + activateStmt.run(uid, provider); + } + })(); + + return { success: true }; +} + +// Get all active settings (both llm and stt) +function getActiveSettings(uid) { + const db = sqliteClient.getDb(); + const stmt = db.prepare(` + SELECT * FROM provider_settings + WHERE uid = ? AND (is_active_llm = 1 OR is_active_stt = 1) + ORDER BY provider + `); + const results = stmt.all(uid); + + // Decrypt API keys and organize by type + const activeSettings = { + llm: null, + stt: null + }; + + results.forEach(result => { + if (result.api_key) { + result.api_key = result.api_key; + } + if (result.is_active_llm) { + activeSettings.llm = result; + } + if (result.is_active_stt) { + activeSettings.stt = result; + } + }); + + return activeSettings; +} + module.exports = { getByProvider, getAllByUid, upsert, remove, - removeAllByUid + removeAllByUid, + getActiveProvider, + setActiveProvider, + getActiveSettings }; \ No newline at end of file diff --git a/src/features/common/repositories/userModelSelections/firebase.repository.js b/src/features/common/repositories/userModelSelections/firebase.repository.js deleted file mode 100644 index 58f879b..0000000 --- a/src/features/common/repositories/userModelSelections/firebase.repository.js +++ /dev/null @@ -1,55 +0,0 @@ -const { collection, doc, getDoc, setDoc, deleteDoc } = require('firebase/firestore'); -const { getFirestoreInstance: getFirestore } = require('../../services/firebaseClient'); -const { createEncryptedConverter } = require('../firestoreConverter'); - -// Create encrypted converter for user model selections -const userModelSelectionsConverter = createEncryptedConverter([ - 'selected_llm_provider', - 'selected_llm_model', - 'selected_stt_provider', - 'selected_stt_model' -]); - -function userModelSelectionsCol() { - const db = getFirestore(); - return collection(db, 'user_model_selections').withConverter(userModelSelectionsConverter); -} - -async function get(uid) { - try { - const docRef = doc(userModelSelectionsCol(), uid); - const docSnap = await getDoc(docRef); - return docSnap.exists() ? { id: docSnap.id, ...docSnap.data() } : null; - } catch (error) { - console.error('[UserModelSelections Firebase] Error getting user model selections:', error); - return null; - } -} - -async function upsert(uid, selections) { - try { - const docRef = doc(userModelSelectionsCol(), uid); - await setDoc(docRef, selections, { merge: true }); - return { changes: 1 }; - } catch (error) { - console.error('[UserModelSelections Firebase] Error upserting user model selections:', error); - throw error; - } -} - -async function remove(uid) { - try { - const docRef = doc(userModelSelectionsCol(), uid); - await deleteDoc(docRef); - return { changes: 1 }; - } catch (error) { - console.error('[UserModelSelections Firebase] Error removing user model selections:', error); - throw error; - } -} - -module.exports = { - get, - upsert, - remove -}; \ No newline at end of file diff --git a/src/features/common/repositories/userModelSelections/index.js b/src/features/common/repositories/userModelSelections/index.js deleted file mode 100644 index e886af0..0000000 --- a/src/features/common/repositories/userModelSelections/index.js +++ /dev/null @@ -1,50 +0,0 @@ -const firebaseRepository = require('./firebase.repository'); -const sqliteRepository = require('./sqlite.repository'); - -let authService = null; - -function setAuthService(service) { - authService = service; -} - -function getBaseRepository() { - if (!authService) { - throw new Error('AuthService not set for userModelSelections repository'); - } - - const user = authService.getCurrentUser(); - return user.isLoggedIn ? firebaseRepository : sqliteRepository; -} - -const userModelSelectionsRepositoryAdapter = { - async get() { - const repo = getBaseRepository(); - const uid = authService.getCurrentUserId(); - return await repo.get(uid); - }, - - async upsert(selections) { - const repo = getBaseRepository(); - const uid = authService.getCurrentUserId(); - const now = Date.now(); - - const selectionsWithMeta = { - ...selections, - uid, - updated_at: now - }; - - return await repo.upsert(uid, selectionsWithMeta); - }, - - async remove() { - const repo = getBaseRepository(); - const uid = authService.getCurrentUserId(); - return await repo.remove(uid); - } -}; - -module.exports = { - ...userModelSelectionsRepositoryAdapter, - setAuthService -}; \ No newline at end of file diff --git a/src/features/common/repositories/userModelSelections/sqlite.repository.js b/src/features/common/repositories/userModelSelections/sqlite.repository.js deleted file mode 100644 index abd38df..0000000 --- a/src/features/common/repositories/userModelSelections/sqlite.repository.js +++ /dev/null @@ -1,48 +0,0 @@ -const sqliteClient = require('../../services/sqliteClient'); - -function get(uid) { - const db = sqliteClient.getDb(); - const stmt = db.prepare('SELECT * FROM user_model_selections WHERE uid = ?'); - return stmt.get(uid) || null; -} - -function upsert(uid, selections) { - const db = sqliteClient.getDb(); - - // Use SQLite's UPSERT syntax (INSERT ... ON CONFLICT ... DO UPDATE) - const stmt = db.prepare(` - INSERT INTO user_model_selections (uid, selected_llm_provider, selected_llm_model, - selected_stt_provider, selected_stt_model, updated_at) - VALUES (?, ?, ?, ?, ?, ?) - ON CONFLICT(uid) DO UPDATE SET - selected_llm_provider = excluded.selected_llm_provider, - selected_llm_model = excluded.selected_llm_model, - selected_stt_provider = excluded.selected_stt_provider, - selected_stt_model = excluded.selected_stt_model, - updated_at = excluded.updated_at - `); - - const result = stmt.run( - uid, - selections.selected_llm_provider || null, - selections.selected_llm_model || null, - selections.selected_stt_provider || null, - selections.selected_stt_model || null, - selections.updated_at - ); - - return { changes: result.changes }; -} - -function remove(uid) { - const db = sqliteClient.getDb(); - const stmt = db.prepare('DELETE FROM user_model_selections WHERE uid = ?'); - const result = stmt.run(uid); - return { changes: result.changes }; -} - -module.exports = { - get, - upsert, - remove -}; \ No newline at end of file diff --git a/src/features/common/services/authService.js b/src/features/common/services/authService.js index face151..2969418 100644 --- a/src/features/common/services/authService.js +++ b/src/features/common/services/authService.js @@ -6,7 +6,6 @@ const encryptionService = require('./encryptionService'); const migrationService = require('./migrationService'); const sessionRepository = require('../repositories/session'); const providerSettingsRepository = require('../repositories/providerSettings'); -const userModelSelectionsRepository = require('../repositories/userModelSelections'); async function getVirtualKeyByEmail(email, idToken) { if (!idToken) { @@ -48,7 +47,6 @@ class AuthService { sessionRepository.setAuthService(this); providerSettingsRepository.setAuthService(this); - userModelSelectionsRepository.setAuthService(this); } initialize() { diff --git a/src/features/common/services/localAIServiceBase.js b/src/features/common/services/localAIServiceBase.js deleted file mode 100644 index cbbca3b..0000000 --- a/src/features/common/services/localAIServiceBase.js +++ /dev/null @@ -1,308 +0,0 @@ -const { exec } = require('child_process'); -const { promisify } = require('util'); -const { EventEmitter } = require('events'); -const { BrowserWindow } = require('electron'); -const path = require('path'); -const os = require('os'); -const https = require('https'); -const fs = require('fs'); -const crypto = require('crypto'); - -const execAsync = promisify(exec); - -class LocalAIServiceBase extends EventEmitter { - constructor(serviceName) { - super(); - this.serviceName = serviceName; - this.baseUrl = null; - this.installationProgress = new Map(); - } - - // 모든 윈도우에 이벤트 브로드캐스트 - _broadcastToAllWindows(eventName, data = null) { - BrowserWindow.getAllWindows().forEach(win => { - if (win && !win.isDestroyed()) { - if (data !== null) { - win.webContents.send(eventName, data); - } else { - win.webContents.send(eventName); - } - } - }); - } - - getPlatform() { - return process.platform; - } - - async checkCommand(command) { - try { - const platform = this.getPlatform(); - const checkCmd = platform === 'win32' ? 'where' : 'which'; - const { stdout } = await execAsync(`${checkCmd} ${command}`); - return stdout.trim(); - } catch (error) { - return null; - } - } - - async isInstalled() { - throw new Error('isInstalled() must be implemented by subclass'); - } - - async isServiceRunning() { - throw new Error('isServiceRunning() must be implemented by subclass'); - } - - async startService() { - throw new Error('startService() must be implemented by subclass'); - } - - async stopService() { - throw new Error('stopService() must be implemented by subclass'); - } - - async waitForService(checkFn, maxAttempts = 30, delayMs = 1000) { - for (let i = 0; i < maxAttempts; i++) { - if (await checkFn()) { - console.log(`[${this.serviceName}] Service is ready`); - return true; - } - await new Promise(resolve => setTimeout(resolve, delayMs)); - } - throw new Error(`${this.serviceName} service failed to start within timeout`); - } - - getInstallProgress(modelName) { - return this.installationProgress.get(modelName) || 0; - } - - setInstallProgress(modelName, progress) { - this.installationProgress.set(modelName, progress); - // 각 서비스에서 직접 브로드캐스트하도록 변경 - } - - clearInstallProgress(modelName) { - this.installationProgress.delete(modelName); - } - - async autoInstall(onProgress) { - const platform = this.getPlatform(); - console.log(`[${this.serviceName}] Starting auto-installation for ${platform}`); - - try { - switch(platform) { - case 'darwin': - return await this.installMacOS(onProgress); - case 'win32': - return await this.installWindows(onProgress); - case 'linux': - return await this.installLinux(); - default: - throw new Error(`Unsupported platform: ${platform}`); - } - } catch (error) { - console.error(`[${this.serviceName}] Auto-installation failed:`, error); - throw error; - } - } - - async installMacOS() { - throw new Error('installMacOS() must be implemented by subclass'); - } - - async installWindows() { - throw new Error('installWindows() must be implemented by subclass'); - } - - async installLinux() { - throw new Error('installLinux() must be implemented by subclass'); - } - - // parseProgress method removed - using proper REST API now - - async shutdown(force = false) { - console.log(`[${this.serviceName}] Starting ${force ? 'forced' : 'graceful'} shutdown...`); - - const isRunning = await this.isServiceRunning(); - if (!isRunning) { - console.log(`[${this.serviceName}] Service not running, nothing to shutdown`); - return true; - } - - const platform = this.getPlatform(); - - try { - switch(platform) { - case 'darwin': - return await this.shutdownMacOS(force); - case 'win32': - return await this.shutdownWindows(force); - case 'linux': - return await this.shutdownLinux(force); - default: - console.warn(`[${this.serviceName}] Unsupported platform for shutdown: ${platform}`); - return false; - } - } catch (error) { - console.error(`[${this.serviceName}] Error during shutdown:`, error); - return false; - } - } - - async shutdownMacOS(force) { - throw new Error('shutdownMacOS() must be implemented by subclass'); - } - - async shutdownWindows(force) { - throw new Error('shutdownWindows() must be implemented by subclass'); - } - - async shutdownLinux(force) { - throw new Error('shutdownLinux() must be implemented by subclass'); - } - - async downloadFile(url, destination, options = {}) { - const { - onProgress = null, - headers = { 'User-Agent': 'Glass-App' }, - timeout = 300000, // 5 minutes default - modelId = null // 모델 ID를 위한 추가 옵션 - } = options; - - return new Promise((resolve, reject) => { - const file = fs.createWriteStream(destination); - let downloadedSize = 0; - let totalSize = 0; - - const request = https.get(url, { headers }, (response) => { - // Handle redirects (301, 302, 307, 308) - if ([301, 302, 307, 308].includes(response.statusCode)) { - file.close(); - fs.unlink(destination, () => {}); - - if (!response.headers.location) { - reject(new Error('Redirect without location header')); - return; - } - - console.log(`[${this.serviceName}] Following redirect from ${url} to ${response.headers.location}`); - this.downloadFile(response.headers.location, destination, options) - .then(resolve) - .catch(reject); - return; - } - - if (response.statusCode !== 200) { - file.close(); - fs.unlink(destination, () => {}); - reject(new Error(`Download failed: ${response.statusCode} ${response.statusMessage}`)); - return; - } - - totalSize = parseInt(response.headers['content-length'], 10) || 0; - - response.on('data', (chunk) => { - downloadedSize += chunk.length; - - if (totalSize > 0) { - const progress = Math.round((downloadedSize / totalSize) * 100); - - // 이벤트 기반 진행률 보고는 각 서비스에서 직접 처리 - - // 기존 콜백 지원 (호환성 유지) - if (onProgress) { - onProgress(progress, downloadedSize, totalSize); - } - } - }); - - response.pipe(file); - - file.on('finish', () => { - file.close(() => { - // download-complete 이벤트는 각 서비스에서 직접 처리 - resolve({ success: true, size: downloadedSize }); - }); - }); - }); - - request.on('timeout', () => { - request.destroy(); - file.close(); - fs.unlink(destination, () => {}); - reject(new Error('Download timeout')); - }); - - request.on('error', (err) => { - file.close(); - fs.unlink(destination, () => {}); - this.emit('download-error', { url, error: err, modelId }); - reject(err); - }); - - request.setTimeout(timeout); - - file.on('error', (err) => { - fs.unlink(destination, () => {}); - reject(err); - }); - }); - } - - async downloadWithRetry(url, destination, options = {}) { - const { - maxRetries = 3, - retryDelay = 1000, - expectedChecksum = null, - modelId = null, // 모델 ID를 위한 추가 옵션 - ...downloadOptions - } = options; - - for (let attempt = 1; attempt <= maxRetries; attempt++) { - try { - const result = await this.downloadFile(url, destination, { - ...downloadOptions, - modelId - }); - - if (expectedChecksum) { - const isValid = await this.verifyChecksum(destination, expectedChecksum); - if (!isValid) { - fs.unlinkSync(destination); - throw new Error('Checksum verification failed'); - } - console.log(`[${this.serviceName}] Checksum verified successfully`); - } - - return result; - } catch (error) { - if (attempt === maxRetries) { - // download-error 이벤트는 각 서비스에서 직접 처리 - throw error; - } - - console.log(`Download attempt ${attempt} failed, retrying in ${retryDelay}ms...`); - await new Promise(resolve => setTimeout(resolve, retryDelay * attempt)); - } - } - } - - async verifyChecksum(filePath, expectedChecksum) { - return new Promise((resolve, reject) => { - const hash = crypto.createHash('sha256'); - const stream = fs.createReadStream(filePath); - - stream.on('data', (data) => hash.update(data)); - stream.on('end', () => { - const fileChecksum = hash.digest('hex'); - console.log(`[${this.serviceName}] File checksum: ${fileChecksum}`); - console.log(`[${this.serviceName}] Expected checksum: ${expectedChecksum}`); - resolve(fileChecksum === expectedChecksum); - }); - stream.on('error', reject); - }); - } -} - -module.exports = LocalAIServiceBase; \ No newline at end of file diff --git a/src/features/common/services/localProgressTracker.js b/src/features/common/services/localProgressTracker.js deleted file mode 100644 index 454b431..0000000 --- a/src/features/common/services/localProgressTracker.js +++ /dev/null @@ -1,138 +0,0 @@ -export class LocalProgressTracker { - constructor(serviceName) { - this.serviceName = serviceName; - this.activeOperations = new Map(); // operationId -> { controller, onProgress } - - // Check if we're in renderer process with window.api available - if (!window.api) { - throw new Error(`${serviceName} requires Electron environment with contextBridge`); - } - - this.globalProgressHandler = (event, data) => { - const operation = this.activeOperations.get(data.model || data.modelId); - if (operation && !operation.controller.signal.aborted) { - operation.onProgress(data.progress); - } - }; - - // Set up progress listeners based on service name - if (serviceName.toLowerCase() === 'ollama') { - window.api.settingsView.onOllamaPullProgress(this.globalProgressHandler); - } else if (serviceName.toLowerCase() === 'whisper') { - window.api.settingsView.onWhisperDownloadProgress(this.globalProgressHandler); - } - - this.progressEvent = serviceName.toLowerCase(); - } - - async trackOperation(operationId, operationType, onProgress) { - if (this.activeOperations.has(operationId)) { - throw new Error(`${operationType} ${operationId} is already in progress`); - } - - const controller = new AbortController(); - const operation = { controller, onProgress }; - this.activeOperations.set(operationId, operation); - - try { - let result; - - // Use appropriate API call based on service and operation - if (this.serviceName.toLowerCase() === 'ollama' && operationType === 'install') { - result = await window.api.settingsView.pullOllamaModel(operationId); - } else if (this.serviceName.toLowerCase() === 'whisper' && operationType === 'download') { - result = await window.api.settingsView.downloadWhisperModel(operationId); - } else { - throw new Error(`Unsupported operation: ${this.serviceName}:${operationType}`); - } - - if (!result.success) { - throw new Error(result.error || `${operationType} failed`); - } - - return true; - } catch (error) { - if (!controller.signal.aborted) { - throw error; - } - return false; - } finally { - this.activeOperations.delete(operationId); - } - } - - async installModel(modelName, onProgress) { - return this.trackOperation(modelName, 'install', onProgress); - } - - async downloadModel(modelId, onProgress) { - return this.trackOperation(modelId, 'download', onProgress); - } - - cancelOperation(operationId) { - const operation = this.activeOperations.get(operationId); - if (operation) { - operation.controller.abort(); - this.activeOperations.delete(operationId); - } - } - - cancelAllOperations() { - for (const [operationId, operation] of this.activeOperations) { - operation.controller.abort(); - } - this.activeOperations.clear(); - } - - isOperationActive(operationId) { - return this.activeOperations.has(operationId); - } - - getActiveOperations() { - return Array.from(this.activeOperations.keys()); - } - - destroy() { - this.cancelAllOperations(); - - // Remove progress listeners based on service name - if (this.progressEvent === 'ollama') { - window.api.settingsView.removeOnOllamaPullProgress(this.globalProgressHandler); - } else if (this.progressEvent === 'whisper') { - window.api.settingsView.removeOnWhisperDownloadProgress(this.globalProgressHandler); - } - } -} - -let trackers = new Map(); - -export function getLocalProgressTracker(serviceName) { - if (!trackers.has(serviceName)) { - trackers.set(serviceName, new LocalProgressTracker(serviceName)); - } - return trackers.get(serviceName); -} - -export function destroyLocalProgressTracker(serviceName) { - const tracker = trackers.get(serviceName); - if (tracker) { - tracker.destroy(); - trackers.delete(serviceName); - } -} - -export function destroyAllProgressTrackers() { - for (const [name, tracker] of trackers) { - tracker.destroy(); - } - trackers.clear(); -} - -// Legacy compatibility exports -export function getOllamaProgressTracker() { - return getLocalProgressTracker('ollama'); -} - -export function destroyOllamaProgressTracker() { - destroyLocalProgressTracker('ollama'); -} \ No newline at end of file diff --git a/src/features/common/services/modelStateService.js b/src/features/common/services/modelStateService.js index c331958..46242ae 100644 --- a/src/features/common/services/modelStateService.js +++ b/src/features/common/services/modelStateService.js @@ -1,11 +1,9 @@ const Store = require('electron-store'); const fetch = require('node-fetch'); const { EventEmitter } = require('events'); -const { BrowserWindow } = require('electron'); const { PROVIDERS, getProviderClass } = require('../ai/factory'); const encryptionService = require('./encryptionService'); const providerSettingsRepository = require('../repositories/providerSettings'); -const userModelSelectionsRepository = require('../repositories/userModelSelections'); // Import authService directly (singleton) const authService = require('./authService'); @@ -19,25 +17,54 @@ class ModelStateService extends EventEmitter { this.hasMigrated = false; } - // 모든 윈도우에 이벤트 브로드캐스트 - _broadcastToAllWindows(eventName, data = null) { - BrowserWindow.getAllWindows().forEach(win => { - if (win && !win.isDestroyed()) { - if (data !== null) { - win.webContents.send(eventName, data); - } else { - win.webContents.send(eventName); - } - } - }); - } async initialize() { console.log('[ModelStateService] Initializing...'); await this._loadStateForCurrentUser(); + + // LocalAI 상태 변경 이벤트 구독 + this.setupLocalAIStateSync(); + console.log('[ModelStateService] Initialization complete'); } + setupLocalAIStateSync() { + // LocalAI 서비스 상태 변경 감지 + // LocalAIManager에서 직접 이벤트를 받아 처리 + const localAIManager = require('./localAIManager'); + localAIManager.on('state-changed', (service, status) => { + this.handleLocalAIStateChange(service, status); + }); + } + + handleLocalAIStateChange(service, state) { + console.log(`[ModelStateService] LocalAI state changed: ${service}`, state); + + // Ollama의 경우 로드된 모델 정보도 처리 + if (service === 'ollama' && state.loadedModels) { + console.log(`[ModelStateService] Ollama loaded models: ${state.loadedModels.join(', ')}`); + + // 선택된 모델이 메모리에서 언로드되었는지 확인 + const selectedLLM = this.state.selectedModels.llm; + if (selectedLLM && this.getProviderForModel('llm', selectedLLM) === 'ollama') { + if (!state.loadedModels.includes(selectedLLM)) { + console.log(`[ModelStateService] Selected model ${selectedLLM} is not loaded in memory`); + // 필요시 자동 워밍업 트리거 + this._triggerAutoWarmUp(); + } + } + } + + // 자동 선택 재실행 (필요시) + if (!state.installed || !state.running) { + const types = service === 'ollama' ? ['llm'] : service === 'whisper' ? ['stt'] : []; + this._autoSelectAvailableModels(types); + } + + // UI 업데이트 알림 + this.emit('state-updated', this.state); + } + _logCurrentSelection() { const llmModel = this.state.selectedModels.llm; const sttModel = this.state.selectedModels.stt; @@ -86,6 +113,66 @@ class ModelStateService extends EventEmitter { }); } + async _migrateUserModelSelections() { + console.log('[ModelStateService] Checking for user_model_selections migration...'); + const userId = this.authService.getCurrentUserId(); + + try { + // Check if user_model_selections table exists + const sqliteClient = require('./sqliteClient'); + const db = sqliteClient.getDb(); + + const tableExists = db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='user_model_selections'").get(); + + if (!tableExists) { + console.log('[ModelStateService] user_model_selections table does not exist, skipping migration'); + return; + } + + // Get existing user_model_selections data + const selections = db.prepare('SELECT * FROM user_model_selections WHERE uid = ?').get(userId); + + if (!selections) { + console.log('[ModelStateService] No user_model_selections data to migrate'); + return; + } + + console.log('[ModelStateService] Found user_model_selections data, migrating to provider_settings...'); + + // Migrate LLM selection + if (selections.llm_model) { + const llmProvider = this.getProviderForModel('llm', selections.llm_model); + if (llmProvider) { + await providerSettingsRepository.upsert(llmProvider, { + selected_llm_model: selections.llm_model + }); + await providerSettingsRepository.setActiveProvider(llmProvider, 'llm'); + console.log(`[ModelStateService] Migrated LLM: ${selections.llm_model} (provider: ${llmProvider})`); + } + } + + // Migrate STT selection + if (selections.stt_model) { + const sttProvider = this.getProviderForModel('stt', selections.stt_model); + if (sttProvider) { + await providerSettingsRepository.upsert(sttProvider, { + selected_stt_model: selections.stt_model + }); + await providerSettingsRepository.setActiveProvider(sttProvider, 'stt'); + console.log(`[ModelStateService] Migrated STT: ${selections.stt_model} (provider: ${sttProvider})`); + } + } + + // Delete the migrated data from user_model_selections + db.prepare('DELETE FROM user_model_selections WHERE uid = ?').run(userId); + console.log('[ModelStateService] user_model_selections migration completed'); + + } catch (error) { + console.error('[ModelStateService] user_model_selections migration failed:', error); + // Don't throw - continue with normal operation + } + } + async _migrateFromElectronStore() { console.log('[ModelStateService] Starting migration from electron-store to database...'); const userId = this.authService.getCurrentUserId(); @@ -115,17 +202,26 @@ class ModelStateService extends EventEmitter { } // Migrate global model selections - if (selectedModels.llm || selectedModels.stt) { - const llmProvider = selectedModels.llm ? this.getProviderForModel('llm', selectedModels.llm) : null; - const sttProvider = selectedModels.stt ? this.getProviderForModel('stt', selectedModels.stt) : null; - - await userModelSelectionsRepository.upsert({ - selected_llm_provider: llmProvider, - selected_llm_model: selectedModels.llm, - selected_stt_provider: sttProvider, - selected_stt_model: selectedModels.stt - }); - console.log('[ModelStateService] Migrated global model selections'); + if (selectedModels.llm) { + const llmProvider = this.getProviderForModel('llm', selectedModels.llm); + if (llmProvider) { + await providerSettingsRepository.upsert(llmProvider, { + selected_llm_model: selectedModels.llm + }); + await providerSettingsRepository.setActiveProvider(llmProvider, 'llm'); + console.log(`[ModelStateService] Migrated LLM model selection: ${selectedModels.llm}`); + } + } + + if (selectedModels.stt) { + const sttProvider = this.getProviderForModel('stt', selectedModels.stt); + if (sttProvider) { + await providerSettingsRepository.upsert(sttProvider, { + selected_stt_model: selectedModels.stt + }); + await providerSettingsRepository.setActiveProvider(sttProvider, 'stt'); + console.log(`[ModelStateService] Migrated STT model selection: ${selectedModels.stt}`); + } } // Mark migration as complete by removing legacy data @@ -159,11 +255,11 @@ class ModelStateService extends EventEmitter { } } - // Load global model selections - const modelSelections = await userModelSelectionsRepository.get(); + // Load active model selections from provider settings + const activeSettings = await providerSettingsRepository.getActiveSettings(); const selectedModels = { - llm: modelSelections?.selected_llm_model || null, - stt: modelSelections?.selected_stt_model || null + llm: activeSettings.llm?.selected_llm_model || null, + stt: activeSettings.stt?.selected_stt_model || null }; this.state = { @@ -197,6 +293,9 @@ class ModelStateService extends EventEmitter { // Initialize encryption service for current user await encryptionService.initializeKey(userId); + // Check for user_model_selections migration first + await this._migrateUserModelSelections(); + // Try to load from database first await this._loadStateFromDatabase(); @@ -232,17 +331,38 @@ class ModelStateService extends EventEmitter { } } - // Save global model selections - const llmProvider = this.state.selectedModels.llm ? this.getProviderForModel('llm', this.state.selectedModels.llm) : null; - const sttProvider = this.state.selectedModels.stt ? this.getProviderForModel('stt', this.state.selectedModels.stt) : null; + // Save model selections and update active providers + const llmModel = this.state.selectedModels.llm; + const sttModel = this.state.selectedModels.stt; - if (llmProvider || sttProvider || this.state.selectedModels.llm || this.state.selectedModels.stt) { - await userModelSelectionsRepository.upsert({ - selected_llm_provider: llmProvider, - selected_llm_model: this.state.selectedModels.llm, - selected_stt_provider: sttProvider, - selected_stt_model: this.state.selectedModels.stt - }); + if (llmModel) { + const llmProvider = this.getProviderForModel('llm', llmModel); + if (llmProvider) { + // Update the provider's selected model + await providerSettingsRepository.upsert(llmProvider, { + selected_llm_model: llmModel + }); + // Set as active LLM provider + await providerSettingsRepository.setActiveProvider(llmProvider, 'llm'); + } + } else { + // Deactivate all LLM providers if no model selected + await providerSettingsRepository.setActiveProvider(null, 'llm'); + } + + if (sttModel) { + const sttProvider = this.getProviderForModel('stt', sttModel); + if (sttProvider) { + // Update the provider's selected model + await providerSettingsRepository.upsert(sttProvider, { + selected_stt_model: sttModel + }); + // Set as active STT provider + await providerSettingsRepository.setActiveProvider(sttProvider, 'stt'); + } + } else { + // Deactivate all STT providers if no model selected + await providerSettingsRepository.setActiveProvider(null, 'stt'); } console.log(`[ModelStateService] State saved to database for user: ${userId}`); @@ -344,8 +464,8 @@ class ModelStateService extends EventEmitter { this._autoSelectAvailableModels([]); - this._broadcastToAllWindows('model-state:updated', this.state); - this._broadcastToAllWindows('settings-updated'); + this.emit('state-updated', this.state); + this.emit('settings-updated'); } getApiKey(provider) { @@ -363,8 +483,8 @@ class ModelStateService extends EventEmitter { await providerSettingsRepository.remove(provider); await this._saveState(); this._autoSelectAvailableModels([]); - this._broadcastToAllWindows('model-state:updated', this.state); - this._broadcastToAllWindows('settings-updated'); + this.emit('state-updated', this.state); + this.emit('settings-updated'); return true; } return false; @@ -506,12 +626,21 @@ class ModelStateService extends EventEmitter { if (type === 'llm' && modelId && modelId !== previousModelId) { const provider = this.getProviderForModel('llm', modelId); if (provider === 'ollama') { - this._autoWarmUpOllamaModel(modelId, previousModelId); + const localAIManager = require('./localAIManager'); + if (localAIManager) { + console.log('[ModelStateService] Triggering Ollama model warm-up via LocalAIManager'); + localAIManager.warmUpModel(modelId).catch(error => { + console.warn('[ModelStateService] Model warm-up failed:', error); + }); + } else { + // fallback to old method + this._autoWarmUpOllamaModel(modelId, previousModelId); + } } } - this._broadcastToAllWindows('model-state:updated', this.state); - this._broadcastToAllWindows('settings-updated'); + this.emit('state-updated', this.state); + this.emit('settings-updated'); return true; } @@ -578,7 +707,7 @@ class ModelStateService extends EventEmitter { if (success) { const selectedModels = this.getSelectedModels(); if (!selectedModels.llm || !selectedModels.stt) { - this._broadcastToAllWindows('force-show-apikey-header'); + this.emit('force-show-apikey-header'); } } return success; diff --git a/src/features/common/services/ollamaService.js b/src/features/common/services/ollamaService.js index f631b09..b4830a0 100644 --- a/src/features/common/services/ollamaService.js +++ b/src/features/common/services/ollamaService.js @@ -1,56 +1,100 @@ -const { spawn } = require('child_process'); +const { EventEmitter } = require('events'); +const { spawn, exec } = require('child_process'); const { promisify } = require('util'); const fetch = require('node-fetch'); const path = require('path'); const fs = require('fs').promises; -const { app, BrowserWindow } = require('electron'); -const LocalAIServiceBase = require('./localAIServiceBase'); +const os = require('os'); +const https = require('https'); +const crypto = require('crypto'); +const { app } = require('electron'); const { spawnAsync } = require('../utils/spawnHelper'); const { DOWNLOAD_CHECKSUMS } = require('../config/checksums'); const ollamaModelRepository = require('../repositories/ollamaModel'); -class OllamaService extends LocalAIServiceBase { +const execAsync = promisify(exec); + +class OllamaService extends EventEmitter { constructor() { - super('OllamaService'); + super(); + this.serviceName = 'OllamaService'; this.baseUrl = 'http://localhost:11434'; + + // 단순화된 상태 관리 + this.installState = { + isInstalled: false, + isInstalling: false, + progress: 0 + }; + + // 단순화된 요청 관리 (복잡한 큐 제거) + this.activeRequest = null; + this.requestTimeout = 30000; // 30초 타임아웃 + + // 모델 상태 + this.installedModels = new Map(); + this.modelWarmupStatus = new Map(); + + // 체크포인트 시스템 (롤백용) + this.installCheckpoints = []; + + // 설치 진행률 관리 + this.installationProgress = new Map(); + + // 워밍 관련 (기존 유지) this.warmingModels = new Map(); this.warmedModels = new Set(); this.lastWarmUpAttempt = new Map(); - - // Request management system - this.activeRequests = new Map(); - this.requestTimeouts = new Map(); - this.healthStatus = { - lastHealthCheck: 0, - consecutive_failures: 0, - is_circuit_open: false - }; - - // Configuration - this.requestTimeout = 0; // Delete timeout this.warmupTimeout = 120000; // 120s for model warmup - this.healthCheckInterval = 60000; // 1min between health checks - this.circuitBreakerThreshold = 3; - this.circuitBreakerCooldown = 30000; // 30s - // Supported models are determined dynamically from installed models - this.supportedModels = {}; + // 상태 동기화 + this._lastState = null; + this._syncInterval = null; + this._lastLoadedModels = []; + this.modelLoadStatus = new Map(); - // Start health monitoring - this._startHealthMonitoring(); + // 서비스 종료 상태 추적 + this.isShuttingDown = false; } - // 모든 윈도우에 이벤트 브로드캐스트 - _broadcastToAllWindows(eventName, data = null) { - BrowserWindow.getAllWindows().forEach(win => { - if (win && !win.isDestroyed()) { - if (data !== null) { - win.webContents.send(eventName, data); - } else { - win.webContents.send(eventName); - } + + // Base class methods integration + getPlatform() { + return process.platform; + } + + async checkCommand(command) { + try { + const platform = this.getPlatform(); + const checkCmd = platform === 'win32' ? 'where' : 'which'; + const { stdout } = await execAsync(`${checkCmd} ${command}`); + return stdout.trim(); + } catch (error) { + return null; + } + } + + async waitForService(checkFn, maxAttempts = 30, delayMs = 1000) { + for (let i = 0; i < maxAttempts; i++) { + if (await checkFn()) { + console.log(`[${this.serviceName}] Service is ready`); + return true; } - }); + await new Promise(resolve => setTimeout(resolve, delayMs)); + } + throw new Error(`${this.serviceName} service failed to start within timeout`); + } + + getInstallProgress(modelName) { + return this.installationProgress.get(modelName) || 0; + } + + setInstallProgress(modelName, progress) { + this.installationProgress.set(modelName, progress); + } + + clearInstallProgress(modelName) { + this.installationProgress.delete(modelName); } async getStatus() { @@ -80,133 +124,30 @@ class OllamaService extends LocalAIServiceBase { return 'ollama'; } - /** - * Professional request management with AbortController-based cancellation - */ - async _makeRequest(url, options = {}, operationType = 'default') { - const requestId = `${operationType}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; - - // Circuit breaker check - if (this._isCircuitOpen()) { - throw new Error('Service temporarily unavailable (circuit breaker open)'); + // === 런타임 관리 (단순화) === + async makeRequest(endpoint, options = {}) { + // 서비스 종료 중이면 요청하지 않음 + if (this.isShuttingDown) { + throw new Error('Service is shutting down'); } - // Request deduplication for health checks - if (operationType === 'health' && this.activeRequests.has('health')) { - console.log('[OllamaService] Health check already in progress, returning existing promise'); - return this.activeRequests.get('health'); + // 동시 요청 방지 (단순한 잠금) + if (this.activeRequest) { + await this.activeRequest; } - + const controller = new AbortController(); - const timeout = options.timeout || this.requestTimeout; - - // Set up timeout mechanism only if timeout > 0 - let timeoutId = null; - if (timeout > 0) { - timeoutId = setTimeout(() => { - controller.abort(); - this.activeRequests.delete(requestId); - this._recordFailure(); - }, timeout); - - this.requestTimeouts.set(requestId, timeoutId); - } - - const requestPromise = this._executeRequest(url, { + const timeoutId = setTimeout(() => controller.abort(), this.requestTimeout); + + this.activeRequest = fetch(`${this.baseUrl}${endpoint}`, { ...options, signal: controller.signal - }, requestId); - - // Store active request for deduplication and cleanup - this.activeRequests.set(operationType === 'health' ? 'health' : requestId, requestPromise); - - try { - const result = await requestPromise; - this._recordSuccess(); - return result; - } catch (error) { - this._recordFailure(); - if (error.name === 'AbortError') { - throw new Error(`Request timeout after ${timeout}ms`); - } - throw error; - } finally { - if (timeoutId !== null) { - clearTimeout(timeoutId); - this.requestTimeouts.delete(requestId); - } - this.activeRequests.delete(operationType === 'health' ? 'health' : requestId); - } - } - - async _executeRequest(url, options, requestId) { - try { - console.log(`[OllamaService] Executing request ${requestId} to ${url}`); - const response = await fetch(url, options); - - if (!response.ok) { - throw new Error(`HTTP ${response.status}: ${response.statusText}`); - } - - return response; - } catch (error) { - console.error(`[OllamaService] Request ${requestId} failed:`, error.message); - throw error; - } - } - - _isCircuitOpen() { - if (!this.healthStatus.is_circuit_open) return false; - - // Check if cooldown period has passed - const now = Date.now(); - if (now - this.healthStatus.lastHealthCheck > this.circuitBreakerCooldown) { - console.log('[OllamaService] Circuit breaker cooldown expired, attempting recovery'); - this.healthStatus.is_circuit_open = false; - this.healthStatus.consecutive_failures = 0; - return false; - } - - return true; - } - - _recordSuccess() { - this.healthStatus.consecutive_failures = 0; - this.healthStatus.is_circuit_open = false; - this.healthStatus.lastHealthCheck = Date.now(); - } - - _recordFailure() { - this.healthStatus.consecutive_failures++; - this.healthStatus.lastHealthCheck = Date.now(); - - if (this.healthStatus.consecutive_failures >= this.circuitBreakerThreshold) { - console.warn(`[OllamaService] Circuit breaker opened after ${this.healthStatus.consecutive_failures} failures`); - this.healthStatus.is_circuit_open = true; - } - } - - _startHealthMonitoring() { - // Passive health monitoring - only when requests are made - console.log('[OllamaService] Health monitoring system initialized'); - } - - /** - * Cleanup all active requests and resources - */ - _cleanup() { - console.log(`[OllamaService] Cleaning up ${this.activeRequests.size} active requests`); - - // Cancel all active requests - for (const [requestId, promise] of this.activeRequests) { - if (this.requestTimeouts.has(requestId)) { - clearTimeout(this.requestTimeouts.get(requestId)); - this.requestTimeouts.delete(requestId); - } - } - - this.activeRequests.clear(); - this.requestTimeouts.clear(); + }).finally(() => { + clearTimeout(timeoutId); + this.activeRequest = null; + }); + + return this.activeRequest; } async isInstalled() { @@ -233,10 +174,11 @@ class OllamaService extends LocalAIServiceBase { async isServiceRunning() { try { - const response = await this._makeRequest(`${this.baseUrl}/api/tags`, { - method: 'GET', - timeout: this.requestTimeout - }, 'health'); + // Use /api/ps to check if service is running + // This is more reliable than /api/tags which may not show models not in memory + const response = await this.makeRequest('/api/ps', { + method: 'GET' + }); return response.ok; } catch (error) { @@ -246,6 +188,9 @@ class OllamaService extends LocalAIServiceBase { } async startService() { + // 서비스 시작 시 종료 플래그 리셋 + this.isShuttingDown = false; + const platform = this.getPlatform(); try { @@ -281,12 +226,69 @@ class OllamaService extends LocalAIServiceBase { return await this.shutdown(); } - async getInstalledModels() { + // Comprehensive health check using multiple endpoints + async healthCheck() { try { - const response = await this._makeRequest(`${this.baseUrl}/api/tags`, { - method: 'GET', - timeout: this.requestTimeout - }, 'models'); + const checks = { + serviceRunning: false, + apiResponsive: false, + modelsAccessible: false, + memoryStatus: false + }; + + // 1. Basic service check with /api/ps + try { + const psResponse = await this.makeRequest('/api/ps', { method: 'GET' }); + checks.serviceRunning = psResponse.ok; + checks.memoryStatus = psResponse.ok; + } catch (error) { + console.log('[OllamaService] /api/ps check failed:', error.message); + } + + // 2. Check if API is responsive with root endpoint + try { + const rootResponse = await this.makeRequest('/', { method: 'GET' }); + checks.apiResponsive = rootResponse.ok; + } catch (error) { + console.log('[OllamaService] Root endpoint check failed:', error.message); + } + + // 3. Check if models endpoint is accessible + try { + const tagsResponse = await this.makeRequest('/api/tags', { method: 'GET' }); + checks.modelsAccessible = tagsResponse.ok; + } catch (error) { + console.log('[OllamaService] /api/tags check failed:', error.message); + } + + const allHealthy = Object.values(checks).every(v => v === true); + + return { + healthy: allHealthy, + checks, + timestamp: new Date().toISOString() + }; + } catch (error) { + console.error('[OllamaService] Health check failed:', error); + return { + healthy: false, + error: error.message, + timestamp: new Date().toISOString() + }; + } + } + + async getInstalledModels() { + // 서비스 종료 중이면 빈 배열 반환 + if (this.isShuttingDown) { + console.log('[OllamaService] Service is shutting down, returning empty models list'); + return []; + } + + try { + const response = await this.makeRequest('/api/tags', { + method: 'GET' + }); const data = await response.json(); return data.models || []; @@ -296,6 +298,59 @@ class OllamaService extends LocalAIServiceBase { } } + // Get models currently loaded in memory using /api/ps + async getLoadedModels() { + // 서비스 종료 중이면 빈 배열 반환 + if (this.isShuttingDown) { + console.log('[OllamaService] Service is shutting down, returning empty loaded models list'); + return []; + } + + try { + const response = await this.makeRequest('/api/ps', { + method: 'GET' + }); + + if (!response.ok) { + console.log('[OllamaService] Failed to get loaded models via /api/ps'); + return []; + } + + const data = await response.json(); + // Extract model names from running processes + return (data.models || []).map(m => m.name); + } catch (error) { + console.error('[OllamaService] Error getting loaded models:', error); + return []; + } + } + + // Get detailed memory info for loaded models + async getLoadedModelsWithMemoryInfo() { + try { + const response = await this.makeRequest('/api/ps', { + method: 'GET' + }); + + if (!response.ok) { + return []; + } + + const data = await response.json(); + // Return full model info including memory usage + return data.models || []; + } catch (error) { + console.error('[OllamaService] Error getting loaded models info:', error); + return []; + } + } + + // Check if a specific model is loaded in memory + async isModelLoaded(modelName) { + const loadedModels = await this.getLoadedModels(); + return loadedModels.includes(modelName); + } + async getInstalledModelsList() { try { const { stdout } = await spawnAsync(this.getOllamaCliPath(), ['list']); @@ -360,6 +415,13 @@ class OllamaService extends LocalAIServiceBase { console.log(`[OllamaService] Starting to pull model: ${modelName} via API`); + // Emit progress event - LocalAIManager가 처리 + this.emit('install-progress', { + model: modelName, + progress: 0, + status: 'starting' + }); + try { const response = await fetch(`${this.baseUrl}/api/pull`, { method: 'POST', @@ -395,7 +457,8 @@ class OllamaService extends LocalAIServiceBase { if (progress !== null) { this.setInstallProgress(modelName, progress); - this._broadcastToAllWindows('ollama:pull-progress', { + // Emit progress event - LocalAIManager가 처리 + this.emit('install-progress', { model: modelName, progress, status: data.status || 'downloading' @@ -406,7 +469,7 @@ class OllamaService extends LocalAIServiceBase { // Handle completion if (data.status === 'success') { console.log(`[OllamaService] Successfully pulled model: ${modelName}`); - this._broadcastToAllWindows('ollama:pull-complete', { model: modelName }); + this.emit('model-pull-complete', { model: modelName }); this.clearInstallProgress(modelName); resolve(); return; @@ -424,7 +487,7 @@ class OllamaService extends LocalAIServiceBase { const data = JSON.parse(buffer); if (data.status === 'success') { console.log(`[OllamaService] Successfully pulled model: ${modelName}`); - this._broadcastToAllWindows('ollama:pull-complete', { model: modelName }); + this.emit('model-pull-complete', { model: modelName }); } } catch (parseError) { console.warn('[OllamaService] Failed to parse final buffer:', buffer); @@ -477,6 +540,163 @@ class OllamaService extends LocalAIServiceBase { + async downloadFile(url, destination, options = {}) { + const { + onProgress = null, + headers = { 'User-Agent': 'Glass-App' }, + timeout = 300000, + modelId = null + } = options; + + return new Promise((resolve, reject) => { + const file = require('fs').createWriteStream(destination); + let downloadedSize = 0; + let totalSize = 0; + + const request = https.get(url, { headers }, (response) => { + if ([301, 302, 307, 308].includes(response.statusCode)) { + file.close(); + require('fs').unlink(destination, () => {}); + + if (!response.headers.location) { + reject(new Error('Redirect without location header')); + return; + } + + console.log(`[${this.serviceName}] Following redirect from ${url} to ${response.headers.location}`); + this.downloadFile(response.headers.location, destination, options) + .then(resolve) + .catch(reject); + return; + } + + if (response.statusCode !== 200) { + file.close(); + require('fs').unlink(destination, () => {}); + reject(new Error(`Download failed: ${response.statusCode} ${response.statusMessage}`)); + return; + } + + totalSize = parseInt(response.headers['content-length'], 10) || 0; + + response.on('data', (chunk) => { + downloadedSize += chunk.length; + + if (totalSize > 0) { + const progress = Math.round((downloadedSize / totalSize) * 100); + + if (onProgress) { + onProgress(progress, downloadedSize, totalSize); + } + } + }); + + response.pipe(file); + + file.on('finish', () => { + file.close(() => { + resolve({ success: true, size: downloadedSize }); + }); + }); + }); + + request.on('timeout', () => { + request.destroy(); + file.close(); + require('fs').unlink(destination, () => {}); + reject(new Error('Download timeout')); + }); + + request.on('error', (err) => { + file.close(); + require('fs').unlink(destination, () => {}); + this.emit('download-error', { url, error: err, modelId }); + reject(err); + }); + + request.setTimeout(timeout); + + file.on('error', (err) => { + require('fs').unlink(destination, () => {}); + reject(err); + }); + }); + } + + async downloadWithRetry(url, destination, options = {}) { + const { + maxRetries = 3, + retryDelay = 1000, + expectedChecksum = null, + modelId = null, + ...downloadOptions + } = options; + + for (let attempt = 1; attempt <= maxRetries; attempt++) { + try { + const result = await this.downloadFile(url, destination, { + ...downloadOptions, + modelId + }); + + if (expectedChecksum) { + const isValid = await this.verifyChecksum(destination, expectedChecksum); + if (!isValid) { + require('fs').unlinkSync(destination); + throw new Error('Checksum verification failed'); + } + console.log(`[${this.serviceName}] Checksum verified successfully`); + } + + return result; + } catch (error) { + if (attempt === maxRetries) { + throw error; + } + + console.log(`Download attempt ${attempt} failed, retrying in ${retryDelay}ms...`); + await new Promise(resolve => setTimeout(resolve, retryDelay * attempt)); + } + } + } + + async verifyChecksum(filePath, expectedChecksum) { + return new Promise((resolve, reject) => { + const hash = crypto.createHash('sha256'); + const stream = require('fs').createReadStream(filePath); + + stream.on('data', (data) => hash.update(data)); + stream.on('end', () => { + const fileChecksum = hash.digest('hex'); + console.log(`[${this.serviceName}] File checksum: ${fileChecksum}`); + console.log(`[${this.serviceName}] Expected checksum: ${expectedChecksum}`); + resolve(fileChecksum === expectedChecksum); + }); + stream.on('error', reject); + }); + } + + async autoInstall(onProgress) { + const platform = this.getPlatform(); + console.log(`[${this.serviceName}] Starting auto-installation for ${platform}`); + + try { + switch(platform) { + case 'darwin': + return await this.installMacOS(onProgress); + case 'win32': + return await this.installWindows(onProgress); + case 'linux': + return await this.installLinux(); + default: + throw new Error(`Unsupported platform: ${platform}`); + } + } catch (error) { + console.error(`[${this.serviceName}] Auto-installation failed:`, error); + throw error; + } + } + async installMacOS(onProgress) { console.log('[OllamaService] Installing Ollama on macOS using DMG...'); @@ -486,6 +706,9 @@ class OllamaService extends LocalAIServiceBase { const dmgPath = path.join(tempDir, 'Ollama.dmg'); const mountPoint = path.join(tempDir, 'OllamaMount'); + // 체크포인트 저장 + await this.saveCheckpoint('pre-install'); + console.log('[OllamaService] Step 1: Downloading Ollama DMG...'); onProgress?.({ stage: 'downloading', message: 'Downloading Ollama installer...', progress: 0 }); const checksumInfo = DOWNLOAD_CHECKSUMS.ollama.dmg; @@ -496,6 +719,8 @@ class OllamaService extends LocalAIServiceBase { } }); + await this.saveCheckpoint('post-download'); + console.log('[OllamaService] Step 2: Mounting DMG...'); onProgress?.({ stage: 'mounting', message: 'Mounting disk image...', progress: 0 }); await fs.mkdir(mountPoint, { recursive: true }); @@ -507,6 +732,8 @@ class OllamaService extends LocalAIServiceBase { await spawnAsync('cp', ['-R', `${mountPoint}/Ollama.app`, '/Applications/']); onProgress?.({ stage: 'installing', message: 'Application installed.', progress: 100 }); + await this.saveCheckpoint('post-install'); + console.log('[OllamaService] Step 4: Setting up CLI path...'); onProgress?.({ stage: 'linking', message: 'Creating command-line shortcut...', progress: 0 }); try { @@ -533,6 +760,8 @@ class OllamaService extends LocalAIServiceBase { return true; } catch (error) { console.error('[OllamaService] macOS installation failed:', error); + // 설치 실패 시 정리 + await fs.unlink(dmgPath).catch(() => {}); throw new Error(`Failed to install Ollama on macOS: ${error.message}`); } } @@ -586,7 +815,135 @@ class OllamaService extends LocalAIServiceBase { throw new Error('Manual installation required on Linux. Please visit https://ollama.com/download/linux'); } + // === 체크포인트 & 롤백 시스템 === + async saveCheckpoint(name) { + this.installCheckpoints.push({ + name, + timestamp: Date.now(), + state: { ...this.installState } + }); + } + async rollbackToLastCheckpoint() { + const checkpoint = this.installCheckpoints.pop(); + if (checkpoint) { + console.log(`[OllamaService] Rolling back to checkpoint: ${checkpoint.name}`); + // 플랫폼별 롤백 로직 실행 + await this._executeRollback(checkpoint); + } + } + + async _executeRollback(checkpoint) { + const platform = this.getPlatform(); + + if (platform === 'darwin' && checkpoint.name === 'post-install') { + // macOS 롤백 + await fs.rm('/Applications/Ollama.app', { recursive: true, force: true }).catch(() => {}); + } else if (platform === 'win32') { + // Windows 롤백 (레지스트리 등) + // TODO: Windows 롤백 구현 + } + + this.installState = checkpoint.state; + } + + // === 상태 동기화 (내부 처리) === + async syncState() { + // 서비스 종료 중이면 스킵 + if (this.isShuttingDown) { + console.log('[OllamaService] Service is shutting down, skipping state sync'); + return this.installState; + } + + try { + const isInstalled = await this.isInstalled(); + const isRunning = await this.isServiceRunning(); + const models = isRunning && !this.isShuttingDown ? await this.getInstalledModels() : []; + const loadedModels = isRunning && !this.isShuttingDown ? await this.getLoadedModels() : []; + + // 상태 업데이트 + this.installState.isInstalled = isInstalled; + this.installState.isRunning = isRunning; + this.installState.lastSync = Date.now(); + + // 메모리 로드 상태 추적 + const previousLoadedModels = this._lastLoadedModels || []; + const loadedChanged = loadedModels.length !== previousLoadedModels.length || + !loadedModels.every(m => previousLoadedModels.includes(m)); + + if (loadedChanged) { + console.log(`[OllamaService] Loaded models changed: ${loadedModels.join(', ')}`); + this._lastLoadedModels = loadedModels; + + // 메모리에서 언로드된 모델의 warmed 상태 제거 + for (const modelName of this.warmedModels) { + if (!loadedModels.includes(modelName)) { + this.warmedModels.delete(modelName); + console.log(`[OllamaService] Model ${modelName} unloaded from memory, removing warmed state`); + } + } + } + + // 모델 상태 DB 업데이트 + if (isRunning && models.length > 0) { + for (const model of models) { + try { + const isLoaded = loadedModels.includes(model.name); + // DB에는 installed 상태만 저장, loaded 상태는 메모리에서 관리 + await ollamaModelRepository.updateInstallStatus(model.name, true, false); + + // 로드 상태를 인스턴스 변수에 저장 + if (!this.modelLoadStatus) { + this.modelLoadStatus = new Map(); + } + this.modelLoadStatus.set(model.name, isLoaded); + } catch (dbError) { + console.warn(`[OllamaService] Failed to update DB for model ${model.name}:`, dbError); + } + } + } + + // UI 알림 (상태 변경 시만) + if (this._lastState?.isRunning !== isRunning || + this._lastState?.isInstalled !== isInstalled || + loadedChanged) { + // Emit state change event - LocalAIManager가 처리 + this.emit('state-changed', { + installed: isInstalled, + running: isRunning, + models: models.length, + loadedModels: loadedModels + }); + } + + this._lastState = { isInstalled, isRunning, modelsCount: models.length }; + return { isInstalled, isRunning, models }; + + } catch (error) { + console.error('[OllamaService] State sync failed:', error); + return { + isInstalled: this.installState.isInstalled || false, + isRunning: false, + models: [] + }; + } + } + + // 주기적 동기화 시작 + startPeriodicSync() { + if (this._syncInterval) return; + + this._syncInterval = setInterval(() => { + this.syncState(); + }, 30000); // 30초마다 + } + + stopPeriodicSync() { + if (this._syncInterval) { + clearInterval(this._syncInterval); + this._syncInterval = null; + } + } async warmUpModel(modelName, forceRefresh = false) { if (!modelName?.trim()) { @@ -638,7 +995,7 @@ class OllamaService extends LocalAIServiceBase { console.log(`[OllamaService] Starting warm-up for model: ${modelName}`); try { - const response = await this._makeRequest(`${this.baseUrl}/api/chat`, { + const response = await this.makeRequest('/api/chat', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ @@ -651,9 +1008,8 @@ class OllamaService extends LocalAIServiceBase { num_predict: 1, // Minimal response temperature: 0 } - }), - timeout: this.warmupTimeout - }, `warmup_${modelName}`); + }) + }); return true; } catch (error) { @@ -670,7 +1026,7 @@ class OllamaService extends LocalAIServiceBase { await ollamaModelRepository.updateInstallStatus(modelName, true, false); // Retry warm-up after installation - const retryResponse = await this._makeRequest(`${this.baseUrl}/api/chat`, { + const retryResponse = await this.makeRequest('/api/chat', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ @@ -683,9 +1039,8 @@ class OllamaService extends LocalAIServiceBase { num_predict: 1, temperature: 0 } - }), - timeout: this.warmupTimeout - }, `warmup_retry_${modelName}`); + }) + }); console.log(`[OllamaService] Successfully warmed up model ${modelName} after installation`); return true; @@ -731,7 +1086,14 @@ class OllamaService extends LocalAIServiceBase { // 설치 여부 체크 제거 - _performWarmUp에서 자동으로 설치 처리 console.log(`[OllamaService] Auto-warming up selected model: ${llmModelId} (will auto-install if needed)`); - return await this.warmUpModel(llmModelId); + const result = await this.warmUpModel(llmModelId); + + // 성공 시 LocalAIManager에 알림 + if (result) { + this.emit('model-warmed-up', { model: llmModelId }); + } + + return result; } catch (error) { console.error('[OllamaService] Auto warm-up failed:', error); @@ -746,10 +1108,13 @@ class OllamaService extends LocalAIServiceBase { console.log('[OllamaService] Warm-up cache cleared'); } - getWarmUpStatus() { + async getWarmUpStatus() { + const loadedModels = await this.getLoadedModels(); + return { warmedModels: Array.from(this.warmedModels), warmingModels: Array.from(this.warmingModels.keys()), + loadedModels: loadedModels, // Models actually loaded in memory lastAttempts: Object.fromEntries(this.lastWarmUpAttempt) }; } @@ -757,6 +1122,9 @@ class OllamaService extends LocalAIServiceBase { async shutdown(force = false) { console.log(`[OllamaService] Shutdown initiated (force: ${force})`); + // 종료 중 플래그 설정 + this.isShuttingDown = true; + if (!force && this.warmingModels.size > 0) { const warmingList = Array.from(this.warmingModels.keys()); console.log(`[OllamaService] Waiting for ${warmingList.length} models to finish warming: ${warmingList.join(', ')}`); @@ -773,39 +1141,81 @@ class OllamaService extends LocalAIServiceBase { } // Clean up all resources - this._cleanup(); this._clearWarmUpCache(); + this.stopPeriodicSync(); - return super.shutdown(force); + // 프로세스 종료 + const isRunning = await this.isServiceRunning(); + if (!isRunning) { + console.log('[OllamaService] Service not running, nothing to shutdown'); + return true; + } + + const platform = this.getPlatform(); + + try { + switch(platform) { + case 'darwin': + return await this.shutdownMacOS(force); + case 'win32': + return await this.shutdownWindows(force); + case 'linux': + return await this.shutdownLinux(force); + default: + console.warn(`[OllamaService] Unsupported platform for shutdown: ${platform}`); + return false; + } + } catch (error) { + console.error(`[OllamaService] Error during shutdown:`, error); + return false; + } } async shutdownMacOS(force) { try { - // Try to quit Ollama.app gracefully - await spawnAsync('osascript', ['-e', 'tell application "Ollama" to quit']); - console.log('[OllamaService] Ollama.app quit successfully'); + // 1. First, try to kill ollama server process + console.log('[OllamaService] Killing ollama server process...'); + try { + await spawnAsync('pkill', ['-f', 'ollama serve']); + } catch (e) { + // Process might not be running + } - // Wait a moment for graceful shutdown + // 2. Then quit the Ollama.app + console.log('[OllamaService] Quitting Ollama.app...'); + try { + await spawnAsync('osascript', ['-e', 'tell application "Ollama" to quit']); + } catch (e) { + console.log('[OllamaService] Ollama.app might not be running'); + } + + // 3. Wait a moment for shutdown await new Promise(resolve => setTimeout(resolve, 2000)); - // Check if still running - const stillRunning = await this.isServiceRunning(); - if (stillRunning) { - console.log('[OllamaService] Ollama still running, forcing shutdown'); - // Force kill if necessary - await spawnAsync('pkill', ['-f', this.getOllamaCliPath()]); + // 4. Force kill any remaining ollama processes + if (force || await this.isServiceRunning()) { + console.log('[OllamaService] Force killing any remaining ollama processes...'); + try { + // Kill all ollama processes + await spawnAsync('pkill', ['-9', '-f', 'ollama']); + } catch (e) { + // Ignore errors - process might not exist + } } - return true; - } catch (error) { - console.log('[OllamaService] Graceful quit failed, trying force kill'); - try { - await spawnAsync('pkill', ['-f', this.getOllamaCliPath()]); - return true; - } catch (killError) { - console.error('[OllamaService] Failed to force kill Ollama:', killError); + // 5. Final check + await new Promise(resolve => setTimeout(resolve, 1000)); + const stillRunning = await this.isServiceRunning(); + if (stillRunning) { + console.warn('[OllamaService] Warning: Ollama may still be running'); return false; } + + console.log('[OllamaService] Ollama shutdown complete'); + return true; + } catch (error) { + console.error('[OllamaService] Shutdown error:', error); + return false; } } @@ -845,8 +1255,15 @@ class OllamaService extends LocalAIServiceBase { // Get all installed models directly from Ollama const installedModels = await this.getInstalledModels(); + // Get loaded models from memory + const loadedModels = await this.getLoadedModels(); + const models = []; for (const model of installedModels) { + const isWarmingUp = this.warmingModels.has(model.name); + const isWarmedUp = this.warmedModels.has(model.name); + const isLoaded = loadedModels.includes(model.name); + models.push({ name: model.name, displayName: model.name, // Use model name as display name @@ -854,7 +1271,11 @@ class OllamaService extends LocalAIServiceBase { description: `Ollama model: ${model.name}`, installed: true, installing: this.installationProgress.has(model.name), - progress: this.getInstallProgress(model.name) + progress: this.getInstallProgress(model.name), + warmedUp: isWarmedUp, + isWarmingUp, + isLoaded, // Actually loaded in memory + status: isWarmingUp ? 'warming' : (isLoaded ? 'loaded' : (isWarmedUp ? 'ready' : 'cold')) }); } @@ -899,21 +1320,38 @@ class OllamaService extends LocalAIServiceBase { async handleInstall() { try { const onProgress = (data) => { - this._broadcastToAllWindows('ollama:install-progress', data); + // Emit progress event - LocalAIManager가 처리 + this.emit('install-progress', data); }; await this.autoInstall(onProgress); + + // 설치 검증 + onProgress({ stage: 'verifying', message: 'Verifying installation...', progress: 0 }); + const verifyResult = await this.verifyInstallation(); + if (!verifyResult.success) { + throw new Error(`Installation verification failed: ${verifyResult.error}`); + } + onProgress({ stage: 'verifying', message: 'Installation verified.', progress: 100 }); if (!await this.isServiceRunning()) { onProgress({ stage: 'starting', message: 'Starting Ollama service...', progress: 0 }); await this.startService(); onProgress({ stage: 'starting', message: 'Ollama service started.', progress: 100 }); } - this._broadcastToAllWindows('ollama:install-complete', { success: true }); + + this.installState.isInstalled = true; + // Emit completion event - LocalAIManager가 처리 + this.emit('installation-complete'); return { success: true }; } catch (error) { console.error('[OllamaService] Failed to install:', error); - this._broadcastToAllWindows('ollama:install-complete', { success: false, error: error.message }); + await this.rollbackToLastCheckpoint(); + // Emit error event - LocalAIManager가 처리 + this.emit('error', { + errorType: 'installation-failed', + error: error.message + }); return { success: false, error: error.message }; } } @@ -981,7 +1419,12 @@ class OllamaService extends LocalAIServiceBase { } catch (error) { console.error('[OllamaService] Failed to pull model:', error); await ollamaModelRepository.updateInstallStatus(modelName, false, false); - this._broadcastToAllWindows('ollama:pull-error', { model: modelName, error: error.message }); + // Emit error event - LocalAIManager가 처리 + this.emit('error', { + errorType: 'model-pull-failed', + model: modelName, + error: error.message + }); return { success: false, error: error.message }; } } @@ -1018,7 +1461,7 @@ class OllamaService extends LocalAIServiceBase { async handleGetWarmUpStatus() { try { - const status = this.getWarmUpStatus(); + const status = await this.getWarmUpStatus(); return { success: true, status }; } catch (error) { console.error('[OllamaService] Failed to get warm-up status:', error); @@ -1030,12 +1473,59 @@ class OllamaService extends LocalAIServiceBase { try { console.log(`[OllamaService] Manual shutdown requested (force: ${force})`); const success = await this.shutdown(force); + + // 종료 후 상태 업데이트 및 플래그 리셋 + if (success) { + // 종료 완료 후 플래그 리셋 + this.isShuttingDown = false; + await this.syncState(); + } + return { success }; } catch (error) { console.error('[OllamaService] Failed to shutdown Ollama:', error); return { success: false, error: error.message }; } } + + // 설치 검증 + async verifyInstallation() { + try { + console.log('[OllamaService] Verifying installation...'); + + // 1. 바이너리 확인 + const isInstalled = await this.isInstalled(); + if (!isInstalled) { + return { success: false, error: 'Ollama binary not found' }; + } + + // 2. CLI 명령 테스트 + try { + const { stdout } = await spawnAsync(this.getOllamaCliPath(), ['--version']); + console.log('[OllamaService] Ollama version:', stdout.trim()); + } catch (error) { + return { success: false, error: 'Ollama CLI not responding' }; + } + + // 3. 서비스 시작 가능 여부 확인 + const platform = this.getPlatform(); + if (platform === 'darwin') { + // macOS: 앱 번들 확인 + try { + await fs.access('/Applications/Ollama.app/Contents/MacOS/Ollama'); + } catch (error) { + return { success: false, error: 'Ollama.app executable not found' }; + } + } + + console.log('[OllamaService] Installation verified successfully'); + return { success: true }; + + } catch (error) { + console.error('[OllamaService] Verification failed:', error); + return { success: false, error: error.message }; + } + } } // Export singleton instance diff --git a/src/features/common/services/whisperService.js b/src/features/common/services/whisperService.js index 4f44cd1..e676913 100644 --- a/src/features/common/services/whisperService.js +++ b/src/features/common/services/whisperService.js @@ -1,21 +1,40 @@ -const { spawn } = require('child_process'); +const { EventEmitter } = require('events'); +const { spawn, exec } = require('child_process'); +const { promisify } = require('util'); const path = require('path'); const fs = require('fs'); const os = require('os'); -const { BrowserWindow } = require('electron'); -const LocalAIServiceBase = require('./localAIServiceBase'); +const https = require('https'); +const crypto = require('crypto'); const { spawnAsync } = require('../utils/spawnHelper'); const { DOWNLOAD_CHECKSUMS } = require('../config/checksums'); +const execAsync = promisify(exec); + const fsPromises = fs.promises; -class WhisperService extends LocalAIServiceBase { +class WhisperService extends EventEmitter { constructor() { - super('WhisperService'); - this.isInitialized = false; + super(); + this.serviceName = 'WhisperService'; + + // 경로 및 디렉토리 this.whisperPath = null; this.modelsDir = null; this.tempDir = null; + + // 세션 관리 (세션 풀 내장) + this.sessionPool = []; + this.activeSessions = new Map(); + this.maxSessions = 3; + + // 설치 상태 + this.installState = { + isInstalled: false, + isInitialized: false + }; + + // 사용 가능한 모델 this.availableModels = { 'whisper-tiny': { name: 'Tiny', @@ -40,21 +59,222 @@ class WhisperService extends LocalAIServiceBase { }; } - // 모든 윈도우에 이벤트 브로드캐스트 - _broadcastToAllWindows(eventName, data = null) { - BrowserWindow.getAllWindows().forEach(win => { - if (win && !win.isDestroyed()) { - if (data !== null) { - win.webContents.send(eventName, data); - } else { - win.webContents.send(eventName); - } + + // Base class methods integration + getPlatform() { + return process.platform; + } + + async checkCommand(command) { + try { + const platform = this.getPlatform(); + const checkCmd = platform === 'win32' ? 'where' : 'which'; + const { stdout } = await execAsync(`${checkCmd} ${command}`); + return stdout.trim(); + } catch (error) { + return null; + } + } + + async waitForService(checkFn, maxAttempts = 30, delayMs = 1000) { + for (let i = 0; i < maxAttempts; i++) { + if (await checkFn()) { + console.log(`[${this.serviceName}] Service is ready`); + return true; } + await new Promise(resolve => setTimeout(resolve, delayMs)); + } + throw new Error(`${this.serviceName} service failed to start within timeout`); + } + + async downloadFile(url, destination, options = {}) { + const { + onProgress = null, + headers = { 'User-Agent': 'Glass-App' }, + timeout = 300000, + modelId = null + } = options; + + return new Promise((resolve, reject) => { + const file = fs.createWriteStream(destination); + let downloadedSize = 0; + let totalSize = 0; + + const request = https.get(url, { headers }, (response) => { + if ([301, 302, 307, 308].includes(response.statusCode)) { + file.close(); + fs.unlink(destination, () => {}); + + if (!response.headers.location) { + reject(new Error('Redirect without location header')); + return; + } + + console.log(`[${this.serviceName}] Following redirect from ${url} to ${response.headers.location}`); + this.downloadFile(response.headers.location, destination, options) + .then(resolve) + .catch(reject); + return; + } + + if (response.statusCode !== 200) { + file.close(); + fs.unlink(destination, () => {}); + reject(new Error(`Download failed: ${response.statusCode} ${response.statusMessage}`)); + return; + } + + totalSize = parseInt(response.headers['content-length'], 10) || 0; + + response.on('data', (chunk) => { + downloadedSize += chunk.length; + + if (totalSize > 0) { + const progress = Math.round((downloadedSize / totalSize) * 100); + + if (onProgress) { + onProgress(progress, downloadedSize, totalSize); + } + } + }); + + response.pipe(file); + + file.on('finish', () => { + file.close(() => { + resolve({ success: true, size: downloadedSize }); + }); + }); + }); + + request.on('timeout', () => { + request.destroy(); + file.close(); + fs.unlink(destination, () => {}); + reject(new Error('Download timeout')); + }); + + request.on('error', (err) => { + file.close(); + fs.unlink(destination, () => {}); + this.emit('download-error', { url, error: err, modelId }); + reject(err); + }); + + request.setTimeout(timeout); + + file.on('error', (err) => { + fs.unlink(destination, () => {}); + reject(err); + }); }); } + async downloadWithRetry(url, destination, options = {}) { + const { + maxRetries = 3, + retryDelay = 1000, + expectedChecksum = null, + modelId = null, + ...downloadOptions + } = options; + + for (let attempt = 1; attempt <= maxRetries; attempt++) { + try { + const result = await this.downloadFile(url, destination, { + ...downloadOptions, + modelId + }); + + if (expectedChecksum) { + const isValid = await this.verifyChecksum(destination, expectedChecksum); + if (!isValid) { + fs.unlinkSync(destination); + throw new Error('Checksum verification failed'); + } + console.log(`[${this.serviceName}] Checksum verified successfully`); + } + + return result; + } catch (error) { + if (attempt === maxRetries) { + throw error; + } + + console.log(`Download attempt ${attempt} failed, retrying in ${retryDelay}ms...`); + await new Promise(resolve => setTimeout(resolve, retryDelay * attempt)); + } + } + } + + async verifyChecksum(filePath, expectedChecksum) { + return new Promise((resolve, reject) => { + const hash = crypto.createHash('sha256'); + const stream = fs.createReadStream(filePath); + + stream.on('data', (data) => hash.update(data)); + stream.on('end', () => { + const fileChecksum = hash.digest('hex'); + console.log(`[${this.serviceName}] File checksum: ${fileChecksum}`); + console.log(`[${this.serviceName}] Expected checksum: ${expectedChecksum}`); + resolve(fileChecksum === expectedChecksum); + }); + stream.on('error', reject); + }); + } + + async autoInstall(onProgress) { + const platform = this.getPlatform(); + console.log(`[${this.serviceName}] Starting auto-installation for ${platform}`); + + try { + switch(platform) { + case 'darwin': + return await this.installMacOS(onProgress); + case 'win32': + return await this.installWindows(onProgress); + case 'linux': + return await this.installLinux(); + default: + throw new Error(`Unsupported platform: ${platform}`); + } + } catch (error) { + console.error(`[${this.serviceName}] Auto-installation failed:`, error); + throw error; + } + } + + async shutdown(force = false) { + console.log(`[${this.serviceName}] Starting ${force ? 'forced' : 'graceful'} shutdown...`); + + const isRunning = await this.isServiceRunning(); + if (!isRunning) { + console.log(`[${this.serviceName}] Service not running, nothing to shutdown`); + return true; + } + + const platform = this.getPlatform(); + + try { + switch(platform) { + case 'darwin': + return await this.shutdownMacOS(force); + case 'win32': + return await this.shutdownWindows(force); + case 'linux': + return await this.shutdownLinux(force); + default: + console.warn(`[${this.serviceName}] Unsupported platform for shutdown: ${platform}`); + return false; + } + } catch (error) { + console.error(`[${this.serviceName}] Error during shutdown:`, error); + return false; + } + } + async initialize() { - if (this.isInitialized) return; + if (this.installState.isInitialized) return; try { const homeDir = os.homedir(); @@ -71,10 +291,15 @@ class WhisperService extends LocalAIServiceBase { await this.ensureDirectories(); await this.ensureWhisperBinary(); - this.isInitialized = true; + this.installState.isInitialized = true; console.log('[WhisperService] Initialized successfully'); } catch (error) { console.error('[WhisperService] Initialization failed:', error); + // Emit error event - LocalAIManager가 처리 + this.emit('error', { + errorType: 'initialization-failed', + error: error.message + }); throw error; } } @@ -85,6 +310,56 @@ class WhisperService extends LocalAIServiceBase { await fsPromises.mkdir(path.dirname(this.whisperPath), { recursive: true }); } + // local stt session + async getSession(config) { + // check available session + const availableSession = this.sessionPool.find(s => !s.inUse); + if (availableSession) { + availableSession.inUse = true; + await availableSession.reconfigure(config); + return availableSession; + } + + // create new session + if (this.activeSessions.size >= this.maxSessions) { + throw new Error('Maximum session limit reached'); + } + + const session = new WhisperSession(config, this); + await session.initialize(); + this.activeSessions.set(session.id, session); + + return session; + } + + async releaseSession(sessionId) { + const session = this.activeSessions.get(sessionId); + if (session) { + await session.cleanup(); + session.inUse = false; + + // add to session pool + if (this.sessionPool.length < 2) { + this.sessionPool.push(session); + } else { + // remove session + await session.destroy(); + this.activeSessions.delete(sessionId); + } + } + } + + //cleanup + async cleanup() { + // cleanup all sessions + for (const session of this.activeSessions.values()) { + await session.destroy(); + } + + this.activeSessions.clear(); + this.sessionPool = []; + } + async ensureWhisperBinary() { const whisperCliPath = await this.checkCommand('whisper-cli'); if (whisperCliPath) { @@ -113,6 +388,11 @@ class WhisperService extends LocalAIServiceBase { console.log('[WhisperService] Whisper not found, trying Homebrew installation...'); try { await this.installViaHomebrew(); + // verify installation + const verified = await this.verifyInstallation(); + if (!verified.success) { + throw new Error(verified.error); + } return; } catch (error) { console.log('[WhisperService] Homebrew installation failed:', error.message); @@ -120,6 +400,12 @@ class WhisperService extends LocalAIServiceBase { } await this.autoInstall(); + + // verify installation + const verified = await this.verifyInstallation(); + if (!verified.success) { + throw new Error(`Whisper installation verification failed: ${verified.error}`); + } } async installViaHomebrew() { @@ -146,7 +432,7 @@ class WhisperService extends LocalAIServiceBase { async ensureModelAvailable(modelId) { - if (!this.isInitialized) { + if (!this.installState.isInitialized) { console.log('[WhisperService] Service not initialized, initializing now...'); await this.initialize(); } @@ -171,25 +457,33 @@ class WhisperService extends LocalAIServiceBase { const modelPath = await this.getModelPath(modelId); const checksumInfo = DOWNLOAD_CHECKSUMS.whisper.models[modelId]; - this._broadcastToAllWindows('whisper:download-progress', { modelId, progress: 0 }); + // Emit progress event - LocalAIManager가 처리 + this.emit('install-progress', { + model: modelId, + progress: 0 + }); await this.downloadWithRetry(modelInfo.url, modelPath, { expectedChecksum: checksumInfo?.sha256, - modelId, // modelId를 전달하여 LocalAIServiceBase에서 이벤트 발생 시 사용 + modelId, // pass modelId to LocalAIServiceBase for event handling onProgress: (progress) => { - this._broadcastToAllWindows('whisper:download-progress', { modelId, progress }); + // Emit progress event - LocalAIManager가 처리 + this.emit('install-progress', { + model: modelId, + progress + }); } }); console.log(`[WhisperService] Model ${modelId} downloaded successfully`); - this._broadcastToAllWindows('whisper:download-complete', { modelId }); + this.emit('model-download-complete', { modelId }); } async handleDownloadModel(modelId) { try { console.log(`[WhisperService] Handling download for model: ${modelId}`); - if (!this.isInitialized) { + if (!this.installState.isInitialized) { await this.initialize(); } @@ -204,7 +498,7 @@ class WhisperService extends LocalAIServiceBase { async handleGetInstalledModels() { try { - if (!this.isInitialized) { + if (!this.installState.isInitialized) { await this.initialize(); } const models = await this.getInstalledModels(); @@ -216,7 +510,7 @@ class WhisperService extends LocalAIServiceBase { } async getModelPath(modelId) { - if (!this.isInitialized || !this.modelsDir) { + if (!this.installState.isInitialized || !this.modelsDir) { throw new Error('WhisperService is not initialized. Call initialize() first.'); } return path.join(this.modelsDir, `${modelId}.bin`); @@ -241,7 +535,7 @@ class WhisperService extends LocalAIServiceBase { createWavHeader(dataSize) { const header = Buffer.alloc(44); - const sampleRate = 24000; + const sampleRate = 16000; const numChannels = 1; const bitsPerSample = 16; @@ -290,7 +584,7 @@ class WhisperService extends LocalAIServiceBase { } async getInstalledModels() { - if (!this.isInitialized) { + if (!this.installState.isInitialized) { console.log('[WhisperService] Service not initialized for getInstalledModels, initializing now...'); await this.initialize(); } @@ -319,11 +613,11 @@ class WhisperService extends LocalAIServiceBase { } async isServiceRunning() { - return this.isInitialized; + return this.installState.isInitialized; } async startService() { - if (!this.isInitialized) { + if (!this.installState.isInitialized) { await this.initialize(); } return true; @@ -493,6 +787,92 @@ class WhisperService extends LocalAIServiceBase { } } +// WhisperSession class +class WhisperSession { + constructor(config, service) { + this.id = `session_${Date.now()}_${Math.random()}`; + this.config = config; + this.service = service; + this.process = null; + this.inUse = true; + this.audioBuffer = Buffer.alloc(0); + } + + async initialize() { + await this.service.ensureModelAvailable(this.config.model); + this.startProcessingLoop(); + } + + async reconfigure(config) { + this.config = config; + await this.service.ensureModelAvailable(this.config.model); + } + + startProcessingLoop() { + // TODO: 실제 처리 루프 구현 + } + + async cleanup() { + // 임시 파일 정리 + await this.cleanupTempFiles(); + } + + async cleanupTempFiles() { + // TODO: 임시 파일 정리 구현 + } + + async destroy() { + if (this.process) { + this.process.kill(); + } + // 임시 파일 정리 + await this.cleanupTempFiles(); + } +} + +// verify installation +WhisperService.prototype.verifyInstallation = async function() { + try { + console.log('[WhisperService] Verifying installation...'); + + // 1. check binary + if (!this.whisperPath) { + return { success: false, error: 'Whisper binary path not set' }; + } + + try { + await fsPromises.access(this.whisperPath, fs.constants.X_OK); + } catch (error) { + return { success: false, error: 'Whisper binary not executable' }; + } + + // 2. check version + try { + const { stdout } = await spawnAsync(this.whisperPath, ['--help']); + if (!stdout.includes('whisper')) { + return { success: false, error: 'Invalid whisper binary' }; + } + } catch (error) { + return { success: false, error: 'Whisper binary not responding' }; + } + + // 3. check directories + try { + await fsPromises.access(this.modelsDir, fs.constants.W_OK); + await fsPromises.access(this.tempDir, fs.constants.W_OK); + } catch (error) { + return { success: false, error: 'Required directories not accessible' }; + } + + console.log('[WhisperService] Installation verified successfully'); + return { success: true }; + + } catch (error) { + console.error('[WhisperService] Verification failed:', error); + return { success: false, error: error.message }; + } +}; + // Export singleton instance const whisperService = new WhisperService(); module.exports = whisperService; \ No newline at end of file diff --git a/src/features/listen/listenService.js b/src/features/listen/listenService.js index d879d60..33b7cfc 100644 --- a/src/features/listen/listenService.js +++ b/src/features/listen/listenService.js @@ -110,13 +110,17 @@ class ListenService { console.log('[ListenService] changeSession to "Listen"'); internalBridge.emit('window:requestVisibility', { name: 'listen', visible: true }); await this.initializeSession(); - listenWindow.webContents.send('session-state-changed', { isActive: true }); + if (listenWindow && !listenWindow.isDestroyed()) { + listenWindow.webContents.send('session-state-changed', { isActive: true }); + } break; case 'Stop': console.log('[ListenService] changeSession to "Stop"'); await this.closeSession(); - listenWindow.webContents.send('session-state-changed', { isActive: false }); + if (listenWindow && !listenWindow.isDestroyed()) { + listenWindow.webContents.send('session-state-changed', { isActive: false }); + } break; case 'Done': diff --git a/src/features/settings/settingsService.js b/src/features/settings/settingsService.js index da68a3a..27fb65d 100644 --- a/src/features/settings/settingsService.js +++ b/src/features/settings/settingsService.js @@ -6,8 +6,7 @@ const { getStoredApiKey, getStoredProvider, windowPool } = require('../../window // New imports for common services const modelStateService = require('../common/services/modelStateService'); -const ollamaService = require('../common/services/ollamaService'); -const whisperService = require('../common/services/whisperService'); +const localAIManager = require('../common/services/localAIManager'); const store = new Store({ name: 'pickle-glass-settings', @@ -58,17 +57,21 @@ async function setSelectedModel(type, modelId) { return { success }; } -// Ollama facade functions +// LocalAI facade functions async function getOllamaStatus() { - return ollamaService.getStatus(); + return localAIManager.getServiceStatus('ollama'); } async function ensureOllamaReady() { - return ollamaService.ensureReady(); + const status = await localAIManager.getServiceStatus('ollama'); + if (!status.installed || !status.running) { + await localAIManager.startService('ollama'); + } + return { success: true }; } async function shutdownOllama() { - return ollamaService.shutdown(false); // false for graceful shutdown + return localAIManager.stopService('ollama'); } diff --git a/src/preload.js b/src/preload.js index 04b1046..6013134 100644 --- a/src/preload.js +++ b/src/preload.js @@ -31,11 +31,20 @@ contextBridge.exposeInMainWorld('api', { apiKeyHeader: { // Model & Provider Management getProviderConfig: () => ipcRenderer.invoke('model:get-provider-config'), - getOllamaStatus: () => ipcRenderer.invoke('ollama:get-status'), + // LocalAI 통합 API + getLocalAIStatus: (service) => ipcRenderer.invoke('localai:get-status', service), + installLocalAI: (service, options) => ipcRenderer.invoke('localai:install', { service, options }), + startLocalAIService: (service) => ipcRenderer.invoke('localai:start-service', service), + stopLocalAIService: (service) => ipcRenderer.invoke('localai:stop-service', service), + installLocalAIModel: (service, modelId, options) => ipcRenderer.invoke('localai:install-model', { service, modelId, options }), + getInstalledModels: (service) => ipcRenderer.invoke('localai:get-installed-models', service), + + // Legacy support (호환성 위해 유지) + getOllamaStatus: () => ipcRenderer.invoke('localai:get-status', 'ollama'), getModelSuggestions: () => ipcRenderer.invoke('ollama:get-model-suggestions'), ensureOllamaReady: () => ipcRenderer.invoke('ollama:ensure-ready'), - installOllama: () => ipcRenderer.invoke('ollama:install'), - startOllamaService: () => ipcRenderer.invoke('ollama:start-service'), + installOllama: () => ipcRenderer.invoke('localai:install', { service: 'ollama' }), + startOllamaService: () => ipcRenderer.invoke('localai:start-service', 'ollama'), pullOllamaModel: (modelName) => ipcRenderer.invoke('ollama:pull-model', modelName), downloadWhisperModel: (modelId) => ipcRenderer.invoke('whisper:download-model', modelId), validateKey: (data) => ipcRenderer.invoke('model:validate-key', data), @@ -47,21 +56,25 @@ contextBridge.exposeInMainWorld('api', { moveHeaderTo: (x, y) => ipcRenderer.invoke('move-header-to', x, y), // Listeners - onOllamaInstallProgress: (callback) => ipcRenderer.on('ollama:install-progress', callback), - removeOnOllamaInstallProgress: (callback) => ipcRenderer.removeListener('ollama:install-progress', callback), - onceOllamaInstallComplete: (callback) => ipcRenderer.once('ollama:install-complete', callback), - removeOnceOllamaInstallComplete: (callback) => ipcRenderer.removeListener('ollama:install-complete', callback), - onOllamaPullProgress: (callback) => ipcRenderer.on('ollama:pull-progress', callback), - removeOnOllamaPullProgress: (callback) => ipcRenderer.removeListener('ollama:pull-progress', callback), - onWhisperDownloadProgress: (callback) => ipcRenderer.on('whisper:download-progress', callback), - removeOnWhisperDownloadProgress: (callback) => ipcRenderer.removeListener('whisper:download-progress', callback), + // LocalAI 통합 이벤트 리스너 + onLocalAIProgress: (callback) => ipcRenderer.on('localai:install-progress', callback), + removeOnLocalAIProgress: (callback) => ipcRenderer.removeListener('localai:install-progress', callback), + onLocalAIComplete: (callback) => ipcRenderer.on('localai:installation-complete', callback), + removeOnLocalAIComplete: (callback) => ipcRenderer.removeListener('localai:installation-complete', callback), + onLocalAIError: (callback) => ipcRenderer.on('localai:error-notification', callback), + removeOnLocalAIError: (callback) => ipcRenderer.removeListener('localai:error-notification', callback), + onLocalAIModelReady: (callback) => ipcRenderer.on('localai:model-ready', callback), + removeOnLocalAIModelReady: (callback) => ipcRenderer.removeListener('localai:model-ready', callback), + // Remove all listeners (for cleanup) removeAllListeners: () => { - ipcRenderer.removeAllListeners('whisper:download-progress'); - ipcRenderer.removeAllListeners('ollama:install-progress'); - ipcRenderer.removeAllListeners('ollama:pull-progress'); - ipcRenderer.removeAllListeners('ollama:install-complete'); + // LocalAI 통합 이벤트 + ipcRenderer.removeAllListeners('localai:install-progress'); + ipcRenderer.removeAllListeners('localai:installation-complete'); + ipcRenderer.removeAllListeners('localai:error-notification'); + ipcRenderer.removeAllListeners('localai:model-ready'); + ipcRenderer.removeAllListeners('localai:service-status-changed'); } }, @@ -239,10 +252,11 @@ contextBridge.exposeInMainWorld('api', { removeOnPresetsUpdated: (callback) => ipcRenderer.removeListener('presets-updated', callback), onShortcutsUpdated: (callback) => ipcRenderer.on('shortcuts-updated', callback), removeOnShortcutsUpdated: (callback) => ipcRenderer.removeListener('shortcuts-updated', callback), - onWhisperDownloadProgress: (callback) => ipcRenderer.on('whisper:download-progress', callback), - removeOnWhisperDownloadProgress: (callback) => ipcRenderer.removeListener('whisper:download-progress', callback), - onOllamaPullProgress: (callback) => ipcRenderer.on('ollama:pull-progress', callback), - removeOnOllamaPullProgress: (callback) => ipcRenderer.removeListener('ollama:pull-progress', callback) + // 통합 LocalAI 이벤트 사용 + onLocalAIInstallProgress: (callback) => ipcRenderer.on('localai:install-progress', callback), + removeOnLocalAIInstallProgress: (callback) => ipcRenderer.removeListener('localai:install-progress', callback), + onLocalAIInstallationComplete: (callback) => ipcRenderer.on('localai:installation-complete', callback), + removeOnLocalAIInstallationComplete: (callback) => ipcRenderer.removeListener('localai:installation-complete', callback) }, // src/ui/settings/ShortCutSettingsView.js diff --git a/src/ui/app/ApiKeyHeader.js b/src/ui/app/ApiKeyHeader.js index 915ba77..26099b9 100644 --- a/src/ui/app/ApiKeyHeader.js +++ b/src/ui/app/ApiKeyHeader.js @@ -1092,6 +1092,9 @@ export class ApiKeyHeader extends LitElement { this.requestUpdate(); const progressHandler = (event, data) => { + // 통합 LocalAI 이벤트에서 Ollama 진행률만 처리 + if (data.service !== 'ollama') return; + let baseProgress = 0; let stageTotal = 0; @@ -1137,17 +1140,21 @@ export class ApiKeyHeader extends LitElement { } }, 15000); // 15 second timeout - const completionHandler = async (event, result) => { + const completionHandler = async (event, data) => { + // 통합 LocalAI 이벤트에서 Ollama 완료만 처리 + if (data.service !== 'ollama') return; if (operationCompleted) return; operationCompleted = true; clearTimeout(completionTimeout); - window.api.apiKeyHeader.removeOnOllamaInstallProgress(progressHandler); - await this._handleOllamaSetupCompletion(result.success, result.error); + window.api.apiKeyHeader.removeOnLocalAIProgress(progressHandler); + // installation-complete 이벤트는 성공을 의미 + await this._handleOllamaSetupCompletion(true); }; - window.api.apiKeyHeader.onceOllamaInstallComplete(completionHandler); - window.api.apiKeyHeader.onOllamaInstallProgress(progressHandler); + // 통합 LocalAI 이벤트 사용 + window.api.apiKeyHeader.onLocalAIComplete(completionHandler); + window.api.apiKeyHeader.onLocalAIProgress(progressHandler); try { let result; @@ -1173,8 +1180,8 @@ export class ApiKeyHeader extends LitElement { operationCompleted = true; clearTimeout(completionTimeout); console.error('[ApiKeyHeader] Ollama setup failed:', error); - window.api.apiKeyHeader.removeOnOllamaInstallProgress(progressHandler); - window.api.apiKeyHeader.removeOnceOllamaInstallComplete(completionHandler); + window.api.apiKeyHeader.removeOnLocalAIProgress(progressHandler); + window.api.apiKeyHeader.removeOnLocalAIComplete(completionHandler); await this._handleOllamaSetupCompletion(false, error.message); } } @@ -1304,7 +1311,7 @@ export class ApiKeyHeader extends LitElement { // Create robust progress handler with timeout protection progressHandler = (event, data) => { - if (data.model === modelName && !this._isOperationCancelled(modelName)) { + if (data.service === 'ollama' && data.model === modelName && !this._isOperationCancelled(modelName)) { const progress = Math.round(Math.max(0, Math.min(100, data.progress || 0))); if (progress !== this.installProgress) { @@ -1315,8 +1322,8 @@ export class ApiKeyHeader extends LitElement { } }; - // Set up progress tracking - window.api.apiKeyHeader.onOllamaPullProgress(progressHandler); + // Set up progress tracking - 통합 LocalAI 이벤트 사용 + window.api.apiKeyHeader.onLocalAIProgress(progressHandler); // Execute the model pull with timeout const installPromise = window.api.apiKeyHeader.pullOllamaModel(modelName); @@ -1346,7 +1353,7 @@ export class ApiKeyHeader extends LitElement { } finally { // Comprehensive cleanup if (progressHandler) { - window.api.apiKeyHeader.removeOnOllamaPullProgress(progressHandler); + window.api.apiKeyHeader.removeOnLocalAIProgress(progressHandler); } this.installingModel = null; @@ -1376,17 +1383,17 @@ export class ApiKeyHeader extends LitElement { let progressHandler = null; try { - // Set up robust progress listener - progressHandler = (event, { modelId: id, progress }) => { - if (id === modelId) { - const cleanProgress = Math.round(Math.max(0, Math.min(100, progress || 0))); + // Set up robust progress listener - 통합 LocalAI 이벤트 사용 + progressHandler = (event, data) => { + if (data.service === 'whisper' && data.model === modelId) { + const cleanProgress = Math.round(Math.max(0, Math.min(100, data.progress || 0))); this.whisperInstallingModels = { ...this.whisperInstallingModels, [modelId]: cleanProgress }; console.log(`[ApiKeyHeader] Whisper download progress: ${cleanProgress}% for ${modelId}`); this.requestUpdate(); } }; - window.api.apiKeyHeader.onWhisperDownloadProgress(progressHandler); + window.api.apiKeyHeader.onLocalAIProgress(progressHandler); // Start download with timeout protection const downloadPromise = window.api.apiKeyHeader.downloadWhisperModel(modelId); @@ -1413,7 +1420,7 @@ export class ApiKeyHeader extends LitElement { } finally { // Cleanup if (progressHandler) { - window.api.apiKeyHeader.removeOnWhisperDownloadProgress(progressHandler); + window.api.apiKeyHeader.removeOnLocalAIProgress(progressHandler); } delete this.whisperInstallingModels[modelId]; this.requestUpdate(); diff --git a/src/ui/settings/SettingsView.js b/src/ui/settings/SettingsView.js index 5d3bacb..1df713e 100644 --- a/src/ui/settings/SettingsView.js +++ b/src/ui/settings/SettingsView.js @@ -575,19 +575,50 @@ export class SettingsView extends LitElement { this.requestUpdate(); } + async loadLocalAIStatus() { + try { + // Load Ollama status + const ollamaStatus = await window.api.settingsView.getOllamaStatus(); + if (ollamaStatus?.success) { + this.ollamaStatus = { installed: ollamaStatus.installed, running: ollamaStatus.running }; + this.ollamaModels = ollamaStatus.models || []; + } + + // Load Whisper models status only if Whisper is enabled + if (this.apiKeys?.whisper === 'local') { + const whisperModelsResult = await window.api.settingsView.getWhisperInstalledModels(); + if (whisperModelsResult?.success) { + const installedWhisperModels = whisperModelsResult.models; + if (this.providerConfig?.whisper) { + this.providerConfig.whisper.sttModels.forEach(m => { + const installedInfo = installedWhisperModels.find(i => i.id === m.id); + if (installedInfo) { + m.installed = installedInfo.installed; + } + }); + } + } + } + + // Trigger UI update + this.requestUpdate(); + } catch (error) { + console.error('Error loading LocalAI status:', error); + } + } + //////// after_modelStateService //////// async loadInitialData() { if (!window.api) return; this.isLoading = true; try { - const [userState, modelSettings, presets, contentProtection, shortcuts, ollamaStatus, whisperModelsResult] = await Promise.all([ + // Load essential data first + const [userState, modelSettings, presets, contentProtection, shortcuts] = await Promise.all([ window.api.settingsView.getCurrentUser(), window.api.settingsView.getModelSettings(), // Facade call window.api.settingsView.getPresets(), window.api.settingsView.getContentProtectionStatus(), - window.api.settingsView.getCurrentShortcuts(), - window.api.settingsView.getOllamaStatus(), - window.api.settingsView.getWhisperInstalledModels() + window.api.settingsView.getCurrentShortcuts() ]); if (userState && userState.isLoggedIn) this.firebaseUser = userState; @@ -609,23 +640,9 @@ export class SettingsView extends LitElement { const firstUserPreset = this.presets.find(p => p.is_default === 0); if (firstUserPreset) this.selectedPreset = firstUserPreset; } - // Ollama status - if (ollamaStatus?.success) { - this.ollamaStatus = { installed: ollamaStatus.installed, running: ollamaStatus.running }; - this.ollamaModels = ollamaStatus.models || []; - } - // Whisper status - if (whisperModelsResult?.success) { - const installedWhisperModels = whisperModelsResult.models; - if (this.providerConfig.whisper) { - this.providerConfig.whisper.sttModels.forEach(m => { - const installedInfo = installedWhisperModels.find(i => i.id === m.id); - if (installedInfo) { - m.installed = installedInfo.installed; - } - }); - } - } + + // Load LocalAI status asynchronously to improve initial load time + this.loadLocalAIStatus(); } catch (error) { console.error('Error loading initial settings data:', error); } finally { @@ -779,16 +796,16 @@ export class SettingsView extends LitElement { this.installingModels = { ...this.installingModels, [modelName]: 0 }; this.requestUpdate(); - // 진행률 이벤트 리스너 설정 + // 진행률 이벤트 리스너 설정 - 통합 LocalAI 이벤트 사용 const progressHandler = (event, data) => { - if (data.modelId === modelName) { - this.installingModels = { ...this.installingModels, [modelName]: data.progress }; + if (data.service === 'ollama' && data.model === modelName) { + this.installingModels = { ...this.installingModels, [modelName]: data.progress || 0 }; this.requestUpdate(); } }; - // 진행률 이벤트 리스너 등록 - window.api.settingsView.onOllamaPullProgress(progressHandler); + // 통합 LocalAI 이벤트 리스너 등록 + window.api.settingsView.onLocalAIInstallProgress(progressHandler); try { const result = await window.api.settingsView.pullOllamaModel(modelName); @@ -805,8 +822,8 @@ export class SettingsView extends LitElement { throw new Error(result.error || 'Installation failed'); } } finally { - // 진행률 이벤트 리스너 제거 - window.api.settingsView.removeOnOllamaPullProgress(progressHandler); + // 통합 LocalAI 이벤트 리스너 제거 + window.api.settingsView.removeOnLocalAIInstallProgress(progressHandler); } } catch (error) { console.error(`[SettingsView] Error installing model ${modelName}:`, error); @@ -821,34 +838,52 @@ export class SettingsView extends LitElement { this.requestUpdate(); try { - // Set up progress listener - const progressHandler = (event, { modelId: id, progress }) => { - if (id === modelId) { - this.installingModels = { ...this.installingModels, [modelId]: progress }; + // Set up progress listener - 통합 LocalAI 이벤트 사용 + const progressHandler = (event, data) => { + if (data.service === 'whisper' && data.model === modelId) { + this.installingModels = { ...this.installingModels, [modelId]: data.progress || 0 }; this.requestUpdate(); } }; - window.api.settingsView.onWhisperDownloadProgress(progressHandler); + window.api.settingsView.onLocalAIInstallProgress(progressHandler); // Start download const result = await window.api.settingsView.downloadWhisperModel(modelId); if (result.success) { + // Update the model's installed status + if (this.providerConfig?.whisper?.sttModels) { + const modelInfo = this.providerConfig.whisper.sttModels.find(m => m.id === modelId); + if (modelInfo) { + modelInfo.installed = true; + } + } + + // Remove from installing models + delete this.installingModels[modelId]; + this.requestUpdate(); + + // Reload LocalAI status to get fresh data + await this.loadLocalAIStatus(); + // Auto-select the model after download await this.selectModel('stt', modelId); } else { + // Remove from installing models on failure too + delete this.installingModels[modelId]; + this.requestUpdate(); alert(`Failed to download Whisper model: ${result.error}`); } // Cleanup - window.api.settingsView.removeOnWhisperDownloadProgress(progressHandler); + window.api.settingsView.removeOnLocalAIInstallProgress(progressHandler); } catch (error) { console.error(`[SettingsView] Error downloading Whisper model ${modelId}:`, error); - alert(`Error downloading ${modelId}: ${error.message}`); - } finally { + // Remove from installing models on error delete this.installingModels[modelId]; this.requestUpdate(); + alert(`Error downloading ${modelId}: ${error.message}`); } } @@ -862,12 +897,6 @@ export class SettingsView extends LitElement { return null; } - async handleWhisperModelSelect(modelId) { - if (!modelId) return; - - // Select the model (will trigger download if needed) - await this.selectModel('stt', modelId); - } handleUsePicklesKey(e) { e.preventDefault() @@ -1192,12 +1221,7 @@ export class SettingsView extends LitElement { } if (id === 'whisper') { - // Special UI for Whisper with model selection - const whisperModels = config.sttModels || []; - const selectedWhisperModel = this.selectedStt && this.getProviderForModel('stt', this.selectedStt) === 'whisper' - ? this.selectedStt - : null; - + // Simplified UI for Whisper without model selection return html`
@@ -1205,51 +1229,6 @@ export class SettingsView extends LitElement {
✓ Whisper is enabled
- - - - - - ${Object.entries(this.installingModels).map(([modelId, progress]) => { - if (modelId.startsWith('whisper-') && progress !== undefined) { - return html` -
-
- Downloading ${modelId}... -
-
-
-
-
- `; - } - return null; - })} - @@ -1331,6 +1310,9 @@ export class SettingsView extends LitElement {
${this.availableSttModels.map(model => { const isWhisper = this.getProviderForModel('stt', model.id) === 'whisper'; + const whisperModel = isWhisper && this.providerConfig?.whisper?.sttModels + ? this.providerConfig.whisper.sttModels.find(m => m.id === model.id) + : null; const isInstalling = this.installingModels[model.id] !== undefined; const installProgress = this.installingModels[model.id] || 0; @@ -1338,10 +1320,16 @@ export class SettingsView extends LitElement {
this.selectModel('stt', model.id)}> ${model.name} - ${isWhisper && isInstalling ? html` -
-
-
+ ${isWhisper ? html` + ${isInstalling ? html` +
+
+
+ ` : whisperModel?.installed ? html` + ✓ Installed + ` : html` + Not Installed + `} ` : ''}
`;