remove business logic
This commit is contained in:
		
							parent
							
								
									e043b85bcd
								
							
						
					
					
						commit
						d62dad6992
					
				@ -1,5 +1,5 @@
 | 
			
		||||
// src/bridge/featureBridge.js
 | 
			
		||||
const { ipcMain, app, BrowserWindow } = require('electron');
 | 
			
		||||
const { ipcMain, app } = require('electron');
 | 
			
		||||
const settingsService = require('../features/settings/settingsService');
 | 
			
		||||
const authService = require('../features/common/services/authService');
 | 
			
		||||
const whisperService = require('../features/common/services/whisperService');
 | 
			
		||||
@ -13,12 +13,8 @@ const listenService = require('../features/listen/listenService');
 | 
			
		||||
const permissionService = require('../features/common/services/permissionService');
 | 
			
		||||
 | 
			
		||||
module.exports = {
 | 
			
		||||
  // Renderer로부터의 요청을 수신
 | 
			
		||||
  // Renderer로부터의 요청을 수신하고 서비스로 전달
 | 
			
		||||
  initialize() {
 | 
			
		||||
    
 | 
			
		||||
    // 서비스 이벤트 리스너 설정
 | 
			
		||||
    this._setupServiceEventListeners();
 | 
			
		||||
 | 
			
		||||
    // Settings Service
 | 
			
		||||
    ipcMain.handle('settings:getPresets', async () => await settingsService.getPresets());
 | 
			
		||||
    ipcMain.handle('settings:get-auto-update', async () => await settingsService.getAutoUpdateSetting());
 | 
			
		||||
@ -37,14 +33,12 @@ module.exports = {
 | 
			
		||||
    ipcMain.handle('get-default-shortcuts', async () => await shortcutsService.handleRestoreDefaults());
 | 
			
		||||
    ipcMain.handle('save-shortcuts', async (event, newKeybinds) => await shortcutsService.handleSaveShortcuts(newKeybinds));
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    // Permissions
 | 
			
		||||
    ipcMain.handle('check-system-permissions', async () => await permissionService.checkSystemPermissions());
 | 
			
		||||
    ipcMain.handle('request-microphone-permission', async () => await permissionService.requestMicrophonePermission());
 | 
			
		||||
    ipcMain.handle('open-system-preferences', async (event, section) => await permissionService.openSystemPreferences(section));
 | 
			
		||||
    ipcMain.handle('mark-permissions-completed', async () => await permissionService.markPermissionsAsCompleted());
 | 
			
		||||
    ipcMain.handle('check-permissions-completed', async () => await permissionService.checkPermissionsCompleted());
 | 
			
		||||
    
 | 
			
		||||
 | 
			
		||||
    // User/Auth
 | 
			
		||||
    ipcMain.handle('get-current-user', () => authService.getCurrentUser());
 | 
			
		||||
@ -55,33 +49,7 @@ module.exports = {
 | 
			
		||||
    ipcMain.handle('quit-application', () => app.quit());
 | 
			
		||||
 | 
			
		||||
    // Whisper
 | 
			
		||||
    ipcMain.handle('whisper:download-model', async (event, modelId) => {
 | 
			
		||||
        // 개별 진행률 이벤트 처리
 | 
			
		||||
        const progressHandler = (data) => {
 | 
			
		||||
            if (data.modelId === modelId) {
 | 
			
		||||
                event.sender.send('whisper:download-progress', data);
 | 
			
		||||
            }
 | 
			
		||||
        };
 | 
			
		||||
        
 | 
			
		||||
        const completeHandler = (data) => {
 | 
			
		||||
            if (data.modelId === modelId) {
 | 
			
		||||
                event.sender.send('whisper:download-complete', data);
 | 
			
		||||
                whisperService.removeListener('download-progress', progressHandler);
 | 
			
		||||
                whisperService.removeListener('download-complete', completeHandler);
 | 
			
		||||
            }
 | 
			
		||||
        };
 | 
			
		||||
        
 | 
			
		||||
        whisperService.on('download-progress', progressHandler);
 | 
			
		||||
        whisperService.on('download-complete', completeHandler);
 | 
			
		||||
        
 | 
			
		||||
        try {
 | 
			
		||||
            return await whisperService.handleDownloadModel(modelId);
 | 
			
		||||
        } catch (error) {
 | 
			
		||||
            whisperService.removeListener('download-progress', progressHandler);
 | 
			
		||||
            whisperService.removeListener('download-complete', completeHandler);
 | 
			
		||||
            throw error;
 | 
			
		||||
        }
 | 
			
		||||
    });
 | 
			
		||||
    ipcMain.handle('whisper:download-model', async (event, modelId) => await whisperService.handleDownloadModel(modelId));
 | 
			
		||||
    ipcMain.handle('whisper:get-installed-models', async () => await whisperService.handleGetInstalledModels());
 | 
			
		||||
       
 | 
			
		||||
    // General
 | 
			
		||||
@ -90,86 +58,12 @@ module.exports = {
 | 
			
		||||
 | 
			
		||||
    // Ollama
 | 
			
		||||
    ipcMain.handle('ollama:get-status', async () => await ollamaService.handleGetStatus());
 | 
			
		||||
    ipcMain.handle('ollama:install', async (event) => {
 | 
			
		||||
        // 개별 진행률 이벤트 처리
 | 
			
		||||
        const progressHandler = (data) => {
 | 
			
		||||
            event.sender.send('ollama:install-progress', data);
 | 
			
		||||
        };
 | 
			
		||||
        
 | 
			
		||||
        const completeHandler = (data) => {
 | 
			
		||||
            event.sender.send('ollama:install-complete', data);
 | 
			
		||||
            ollamaService.removeListener('install-progress', progressHandler);
 | 
			
		||||
            ollamaService.removeListener('install-complete', completeHandler);
 | 
			
		||||
        };
 | 
			
		||||
        
 | 
			
		||||
        ollamaService.on('install-progress', progressHandler);
 | 
			
		||||
        ollamaService.on('install-complete', completeHandler);
 | 
			
		||||
        
 | 
			
		||||
        try {
 | 
			
		||||
            return await ollamaService.handleInstall();
 | 
			
		||||
        } catch (error) {
 | 
			
		||||
            ollamaService.removeListener('install-progress', progressHandler);
 | 
			
		||||
            ollamaService.removeListener('install-complete', completeHandler);
 | 
			
		||||
            throw error;
 | 
			
		||||
        }
 | 
			
		||||
    });
 | 
			
		||||
    ipcMain.handle('ollama:start-service', async (event) => {
 | 
			
		||||
        // 개별 진행률 이벤트 처리
 | 
			
		||||
        const completeHandler = (data) => {
 | 
			
		||||
            event.sender.send('ollama:install-complete', data);
 | 
			
		||||
            ollamaService.removeListener('install-complete', completeHandler);
 | 
			
		||||
        };
 | 
			
		||||
        
 | 
			
		||||
        ollamaService.on('install-complete', completeHandler);
 | 
			
		||||
        
 | 
			
		||||
        try {
 | 
			
		||||
            return await ollamaService.handleStartService();
 | 
			
		||||
        } catch (error) {
 | 
			
		||||
            ollamaService.removeListener('install-complete', completeHandler);
 | 
			
		||||
            throw error;
 | 
			
		||||
        }
 | 
			
		||||
    });
 | 
			
		||||
    ipcMain.handle('ollama:install', async () => await ollamaService.handleInstall());
 | 
			
		||||
    ipcMain.handle('ollama:start-service', async () => await ollamaService.handleStartService());
 | 
			
		||||
    ipcMain.handle('ollama:ensure-ready', async () => await ollamaService.handleEnsureReady());
 | 
			
		||||
    ipcMain.handle('ollama:get-models', async () => await ollamaService.handleGetModels());
 | 
			
		||||
    ipcMain.handle('ollama:get-model-suggestions', async () => await ollamaService.handleGetModelSuggestions());
 | 
			
		||||
    ipcMain.handle('ollama:pull-model', async (event, modelName) => {
 | 
			
		||||
        // 개별 진행률 이벤트 처리
 | 
			
		||||
        const progressHandler = (data) => {
 | 
			
		||||
            if (data.model === modelName) {
 | 
			
		||||
                event.sender.send('ollama:pull-progress', data);
 | 
			
		||||
            }
 | 
			
		||||
        };
 | 
			
		||||
        
 | 
			
		||||
        const completeHandler = (data) => {
 | 
			
		||||
            if (data.model === modelName) {
 | 
			
		||||
                event.sender.send('ollama:pull-complete', data);
 | 
			
		||||
                ollamaService.removeListener('pull-progress', progressHandler);
 | 
			
		||||
                ollamaService.removeListener('pull-complete', completeHandler);
 | 
			
		||||
            }
 | 
			
		||||
        };
 | 
			
		||||
        
 | 
			
		||||
        const errorHandler = (data) => {
 | 
			
		||||
            if (data.model === modelName) {
 | 
			
		||||
                event.sender.send('ollama:pull-error', data);
 | 
			
		||||
                ollamaService.removeListener('pull-progress', progressHandler);
 | 
			
		||||
                ollamaService.removeListener('pull-complete', completeHandler);
 | 
			
		||||
                ollamaService.removeListener('pull-error', errorHandler);
 | 
			
		||||
            }
 | 
			
		||||
        };
 | 
			
		||||
        
 | 
			
		||||
        ollamaService.on('pull-progress', progressHandler);
 | 
			
		||||
        ollamaService.on('pull-complete', completeHandler);
 | 
			
		||||
        ollamaService.on('pull-error', errorHandler);
 | 
			
		||||
        
 | 
			
		||||
        try {
 | 
			
		||||
            return await ollamaService.handlePullModel(modelName);
 | 
			
		||||
        } catch (error) {
 | 
			
		||||
            ollamaService.removeListener('pull-progress', progressHandler);
 | 
			
		||||
            ollamaService.removeListener('pull-complete', completeHandler);
 | 
			
		||||
            ollamaService.removeListener('pull-error', errorHandler);
 | 
			
		||||
            throw error;
 | 
			
		||||
        }
 | 
			
		||||
    });
 | 
			
		||||
    ipcMain.handle('ollama:pull-model', async (event, modelName) => await ollamaService.handlePullModel(modelName));
 | 
			
		||||
    ipcMain.handle('ollama:is-model-installed', async (event, modelName) => await ollamaService.handleIsModelInstalled(modelName));
 | 
			
		||||
    ipcMain.handle('ollama:warm-up-model', async (event, modelName) => await ollamaService.handleWarmUpModel(modelName));
 | 
			
		||||
    ipcMain.handle('ollama:auto-warm-up', async () => await ollamaService.handleAutoWarmUp());
 | 
			
		||||
@ -204,9 +98,7 @@ module.exports = {
 | 
			
		||||
      }
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
     // ModelStateService
 | 
			
		||||
    // ModelStateService
 | 
			
		||||
    ipcMain.handle('model:validate-key', async (e, { provider, key }) => await modelStateService.handleValidateKey(provider, key));
 | 
			
		||||
    ipcMain.handle('model:get-all-keys', () => modelStateService.getAllApiKeys());
 | 
			
		||||
    ipcMain.handle('model:set-api-key', async (e, { provider, key }) => await modelStateService.setApiKey(provider, key));
 | 
			
		||||
@ -217,80 +109,9 @@ module.exports = {
 | 
			
		||||
    ipcMain.handle('model:are-providers-configured', () => modelStateService.areProvidersConfigured());
 | 
			
		||||
    ipcMain.handle('model:get-provider-config', () => modelStateService.getProviderConfig());
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    console.log('[FeatureBridge] Initialized with all feature handlers.');
 | 
			
		||||
  },
 | 
			
		||||
 | 
			
		||||
  // 서비스 이벤트 리스너 설정
 | 
			
		||||
  _setupServiceEventListeners() {
 | 
			
		||||
    // Ollama Service 이벤트 리스너
 | 
			
		||||
    ollamaService.on('pull-progress', (data) => {
 | 
			
		||||
      this._broadcastToAllWindows('ollama:pull-progress', data);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    ollamaService.on('pull-complete', (data) => {
 | 
			
		||||
      this._broadcastToAllWindows('ollama:pull-complete', data);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    ollamaService.on('pull-error', (data) => {
 | 
			
		||||
      this._broadcastToAllWindows('ollama:pull-error', data);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    ollamaService.on('download-progress', (data) => {
 | 
			
		||||
      this._broadcastToAllWindows('ollama:download-progress', data);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    ollamaService.on('download-complete', (data) => {
 | 
			
		||||
      this._broadcastToAllWindows('ollama:download-complete', data);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    ollamaService.on('download-error', (data) => {
 | 
			
		||||
      this._broadcastToAllWindows('ollama:download-error', data);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    // Whisper Service 이벤트 리스너
 | 
			
		||||
    whisperService.on('download-progress', (data) => {
 | 
			
		||||
      this._broadcastToAllWindows('whisper:download-progress', data);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    whisperService.on('download-complete', (data) => {
 | 
			
		||||
      this._broadcastToAllWindows('whisper:download-complete', data);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    whisperService.on('download-error', (data) => {
 | 
			
		||||
      this._broadcastToAllWindows('whisper:download-error', data);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    // Model State Service 이벤트 리스너
 | 
			
		||||
    modelStateService.on('state-changed', (data) => {
 | 
			
		||||
      this._broadcastToAllWindows('model-state:updated', data);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    modelStateService.on('settings-updated', () => {
 | 
			
		||||
      this._broadcastToAllWindows('settings-updated');
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    modelStateService.on('force-show-apikey-header', () => {
 | 
			
		||||
      this._broadcastToAllWindows('force-show-apikey-header');
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    console.log('[FeatureBridge] Service event listeners configured');
 | 
			
		||||
  },
 | 
			
		||||
 | 
			
		||||
  // 모든 창에 이벤트 방송
 | 
			
		||||
  _broadcastToAllWindows(eventName, data = null) {
 | 
			
		||||
    BrowserWindow.getAllWindows().forEach(win => {
 | 
			
		||||
      if (win && !win.isDestroyed()) {
 | 
			
		||||
        if (data !== null) {
 | 
			
		||||
          win.webContents.send(eventName, data);
 | 
			
		||||
        } else {
 | 
			
		||||
          win.webContents.send(eventName);
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    });
 | 
			
		||||
  },
 | 
			
		||||
 | 
			
		||||
  // Renderer로 상태를 전송
 | 
			
		||||
  sendAskProgress(win, progress) {
 | 
			
		||||
    win.webContents.send('feature:ask:progress', progress);
 | 
			
		||||
 | 
			
		||||
@ -138,6 +138,8 @@ class AskService {
 | 
			
		||||
        console.log('[AskService] Service instance created.');
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    _broadcastState() {
 | 
			
		||||
        const askWindow = getWindowPool()?.get('ask');
 | 
			
		||||
        if (askWindow && !askWindow.isDestroyed()) {
 | 
			
		||||
@ -381,6 +383,7 @@ class AskService {
 | 
			
		||||
                                this._broadcastState();
 | 
			
		||||
                            }
 | 
			
		||||
                        } catch (error) {
 | 
			
		||||
                            console.error('[AskService] Failed to parse stream data:', { line: data, error: error.message });
 | 
			
		||||
                        }
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
@ -1,6 +1,79 @@
 | 
			
		||||
const http = require('http');
 | 
			
		||||
const fetch = require('node-fetch');
 | 
			
		||||
 | 
			
		||||
// Request Queue System for Ollama API (only for non-streaming requests)
 | 
			
		||||
class RequestQueue {
 | 
			
		||||
    constructor() {
 | 
			
		||||
        this.queue = [];
 | 
			
		||||
        this.processing = false;
 | 
			
		||||
        this.streamingActive = false;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async addStreamingRequest(requestFn) {
 | 
			
		||||
        // Streaming requests have priority - wait for current processing to finish
 | 
			
		||||
        while (this.processing) {
 | 
			
		||||
            await new Promise(resolve => setTimeout(resolve, 50));
 | 
			
		||||
        }
 | 
			
		||||
        
 | 
			
		||||
        this.streamingActive = true;
 | 
			
		||||
        console.log('[Ollama Queue] Starting streaming request (priority)');
 | 
			
		||||
        
 | 
			
		||||
        try {
 | 
			
		||||
            const result = await requestFn();
 | 
			
		||||
            return result;
 | 
			
		||||
        } finally {
 | 
			
		||||
            this.streamingActive = false;
 | 
			
		||||
            console.log('[Ollama Queue] Streaming request completed');
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async add(requestFn) {
 | 
			
		||||
        return new Promise((resolve, reject) => {
 | 
			
		||||
            this.queue.push({ requestFn, resolve, reject });
 | 
			
		||||
            this.process();
 | 
			
		||||
        });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async process() {
 | 
			
		||||
        if (this.processing || this.queue.length === 0) {
 | 
			
		||||
            return;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        // Wait if streaming is active
 | 
			
		||||
        if (this.streamingActive) {
 | 
			
		||||
            setTimeout(() => this.process(), 100);
 | 
			
		||||
            return;
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        this.processing = true;
 | 
			
		||||
 | 
			
		||||
        while (this.queue.length > 0) {
 | 
			
		||||
            // Check if streaming started while processing queue
 | 
			
		||||
            if (this.streamingActive) {
 | 
			
		||||
                this.processing = false;
 | 
			
		||||
                setTimeout(() => this.process(), 100);
 | 
			
		||||
                return;
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
            const { requestFn, resolve, reject } = this.queue.shift();
 | 
			
		||||
            
 | 
			
		||||
            try {
 | 
			
		||||
                console.log(`[Ollama Queue] Processing queued request (${this.queue.length} remaining)`);
 | 
			
		||||
                const result = await requestFn();
 | 
			
		||||
                resolve(result);
 | 
			
		||||
            } catch (error) {
 | 
			
		||||
                console.error('[Ollama Queue] Request failed:', error);
 | 
			
		||||
                reject(error);
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        this.processing = false;
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
// Global request queue instance
 | 
			
		||||
const requestQueue = new RequestQueue();
 | 
			
		||||
 | 
			
		||||
class OllamaProvider {
 | 
			
		||||
    static async validateApiKey() {
 | 
			
		||||
        try {
 | 
			
		||||
@ -79,71 +152,77 @@ function createLLM({
 | 
			
		||||
            }
 | 
			
		||||
            messages.push({ role: 'user', content: userContent.join('\n') });
 | 
			
		||||
 | 
			
		||||
            try {
 | 
			
		||||
                const response = await fetch(`${baseUrl}/api/chat`, {
 | 
			
		||||
                    method: 'POST',
 | 
			
		||||
                    headers: { 'Content-Type': 'application/json' },
 | 
			
		||||
                    body: JSON.stringify({
 | 
			
		||||
                        model,
 | 
			
		||||
                        messages,
 | 
			
		||||
                        stream: false,
 | 
			
		||||
                        options: {
 | 
			
		||||
                            temperature,
 | 
			
		||||
                            num_predict: maxTokens,
 | 
			
		||||
                        }
 | 
			
		||||
                    })
 | 
			
		||||
                });
 | 
			
		||||
            // Use request queue to prevent concurrent API calls
 | 
			
		||||
            return await requestQueue.add(async () => {
 | 
			
		||||
                try {
 | 
			
		||||
                    const response = await fetch(`${baseUrl}/api/chat`, {
 | 
			
		||||
                        method: 'POST',
 | 
			
		||||
                        headers: { 'Content-Type': 'application/json' },
 | 
			
		||||
                        body: JSON.stringify({
 | 
			
		||||
                            model,
 | 
			
		||||
                            messages,
 | 
			
		||||
                            stream: false,
 | 
			
		||||
                            options: {
 | 
			
		||||
                                temperature,
 | 
			
		||||
                                num_predict: maxTokens,
 | 
			
		||||
                            }
 | 
			
		||||
                        })
 | 
			
		||||
                    });
 | 
			
		||||
 | 
			
		||||
                if (!response.ok) {
 | 
			
		||||
                    throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
 | 
			
		||||
                    if (!response.ok) {
 | 
			
		||||
                        throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
 | 
			
		||||
                    }
 | 
			
		||||
 | 
			
		||||
                    const result = await response.json();
 | 
			
		||||
                    
 | 
			
		||||
                    return {
 | 
			
		||||
                        response: {
 | 
			
		||||
                            text: () => result.message.content
 | 
			
		||||
                        },
 | 
			
		||||
                        raw: result
 | 
			
		||||
                    };
 | 
			
		||||
                } catch (error) {
 | 
			
		||||
                    console.error('Ollama LLM error:', error);
 | 
			
		||||
                    throw error;
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                const result = await response.json();
 | 
			
		||||
                
 | 
			
		||||
                return {
 | 
			
		||||
                    response: {
 | 
			
		||||
                        text: () => result.message.content
 | 
			
		||||
                    },
 | 
			
		||||
                    raw: result
 | 
			
		||||
                };
 | 
			
		||||
            } catch (error) {
 | 
			
		||||
                console.error('Ollama LLM error:', error);
 | 
			
		||||
                throw error;
 | 
			
		||||
            }
 | 
			
		||||
            });
 | 
			
		||||
        },
 | 
			
		||||
 | 
			
		||||
        chat: async (messages) => {
 | 
			
		||||
            const ollamaMessages = convertMessagesToOllamaFormat(messages);
 | 
			
		||||
 | 
			
		||||
            try {
 | 
			
		||||
                const response = await fetch(`${baseUrl}/api/chat`, {
 | 
			
		||||
                    method: 'POST',
 | 
			
		||||
                    headers: { 'Content-Type': 'application/json' },
 | 
			
		||||
                    body: JSON.stringify({
 | 
			
		||||
                        model,
 | 
			
		||||
                        messages: ollamaMessages,
 | 
			
		||||
                        stream: false,
 | 
			
		||||
                        options: {
 | 
			
		||||
                            temperature,
 | 
			
		||||
                            num_predict: maxTokens,
 | 
			
		||||
                        }
 | 
			
		||||
                    })
 | 
			
		||||
                });
 | 
			
		||||
            // Use request queue to prevent concurrent API calls
 | 
			
		||||
            return await requestQueue.add(async () => {
 | 
			
		||||
                try {
 | 
			
		||||
                    const response = await fetch(`${baseUrl}/api/chat`, {
 | 
			
		||||
                        method: 'POST',
 | 
			
		||||
                        headers: { 'Content-Type': 'application/json' },
 | 
			
		||||
                        body: JSON.stringify({
 | 
			
		||||
                            model,
 | 
			
		||||
                            messages: ollamaMessages,
 | 
			
		||||
                            stream: false,
 | 
			
		||||
                            options: {
 | 
			
		||||
                                temperature,
 | 
			
		||||
                                num_predict: maxTokens,
 | 
			
		||||
                            }
 | 
			
		||||
                        })
 | 
			
		||||
                    });
 | 
			
		||||
 | 
			
		||||
                if (!response.ok) {
 | 
			
		||||
                    throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
 | 
			
		||||
                    if (!response.ok) {
 | 
			
		||||
                        throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
 | 
			
		||||
                    }
 | 
			
		||||
 | 
			
		||||
                    const result = await response.json();
 | 
			
		||||
                    
 | 
			
		||||
                    return {
 | 
			
		||||
                        content: result.message.content,
 | 
			
		||||
                        raw: result
 | 
			
		||||
                    };
 | 
			
		||||
                } catch (error) {
 | 
			
		||||
                    console.error('Ollama chat error:', error);
 | 
			
		||||
                    throw error;
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                const result = await response.json();
 | 
			
		||||
                
 | 
			
		||||
                return {
 | 
			
		||||
                    content: result.message.content,
 | 
			
		||||
                    raw: result
 | 
			
		||||
                };
 | 
			
		||||
            } catch (error) {
 | 
			
		||||
                console.error('Ollama chat error:', error);
 | 
			
		||||
                throw error;
 | 
			
		||||
            }
 | 
			
		||||
            });
 | 
			
		||||
        }
 | 
			
		||||
    };
 | 
			
		||||
}
 | 
			
		||||
@ -165,89 +244,92 @@ function createStreamingLLM({
 | 
			
		||||
            const ollamaMessages = convertMessagesToOllamaFormat(messages);
 | 
			
		||||
            console.log('[Ollama Provider] Converted messages for Ollama:', ollamaMessages);
 | 
			
		||||
 | 
			
		||||
            try {
 | 
			
		||||
                const response = await fetch(`${baseUrl}/api/chat`, {
 | 
			
		||||
                    method: 'POST',
 | 
			
		||||
                    headers: { 'Content-Type': 'application/json' },
 | 
			
		||||
                    body: JSON.stringify({
 | 
			
		||||
                        model,
 | 
			
		||||
                        messages: ollamaMessages,
 | 
			
		||||
                        stream: true,
 | 
			
		||||
                        options: {
 | 
			
		||||
                            temperature,
 | 
			
		||||
                            num_predict: maxTokens,
 | 
			
		||||
                        }
 | 
			
		||||
                    })
 | 
			
		||||
                });
 | 
			
		||||
            // Streaming requests have priority over queued requests
 | 
			
		||||
            return await requestQueue.addStreamingRequest(async () => {
 | 
			
		||||
                try {
 | 
			
		||||
                    const response = await fetch(`${baseUrl}/api/chat`, {
 | 
			
		||||
                        method: 'POST',
 | 
			
		||||
                        headers: { 'Content-Type': 'application/json' },
 | 
			
		||||
                        body: JSON.stringify({
 | 
			
		||||
                            model,
 | 
			
		||||
                            messages: ollamaMessages,
 | 
			
		||||
                            stream: true,
 | 
			
		||||
                            options: {
 | 
			
		||||
                                temperature,
 | 
			
		||||
                                num_predict: maxTokens,
 | 
			
		||||
                            }
 | 
			
		||||
                        })
 | 
			
		||||
                    });
 | 
			
		||||
 | 
			
		||||
                if (!response.ok) {
 | 
			
		||||
                    throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
                console.log('[Ollama Provider] Got streaming response');
 | 
			
		||||
 | 
			
		||||
                const stream = new ReadableStream({
 | 
			
		||||
                    async start(controller) {
 | 
			
		||||
                        let buffer = '';
 | 
			
		||||
 | 
			
		||||
                        try {
 | 
			
		||||
                            response.body.on('data', (chunk) => {
 | 
			
		||||
                                buffer += chunk.toString();
 | 
			
		||||
                                const lines = buffer.split('\n');
 | 
			
		||||
                                buffer = lines.pop() || '';
 | 
			
		||||
 | 
			
		||||
                                for (const line of lines) {
 | 
			
		||||
                                    if (line.trim() === '') continue;
 | 
			
		||||
                                    
 | 
			
		||||
                                    try {
 | 
			
		||||
                                        const data = JSON.parse(line);
 | 
			
		||||
                                        
 | 
			
		||||
                                        if (data.message?.content) {
 | 
			
		||||
                                            const sseData = JSON.stringify({
 | 
			
		||||
                                                choices: [{
 | 
			
		||||
                                                    delta: {
 | 
			
		||||
                                                        content: data.message.content
 | 
			
		||||
                                                    }
 | 
			
		||||
                                                }]
 | 
			
		||||
                                            });
 | 
			
		||||
                                            controller.enqueue(new TextEncoder().encode(`data: ${sseData}\n\n`));
 | 
			
		||||
                                        }
 | 
			
		||||
                                        
 | 
			
		||||
                                        if (data.done) {
 | 
			
		||||
                                            controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'));
 | 
			
		||||
                                        }
 | 
			
		||||
                                    } catch (e) {
 | 
			
		||||
                                        console.error('[Ollama Provider] Failed to parse chunk:', e);
 | 
			
		||||
                                    }
 | 
			
		||||
                                }
 | 
			
		||||
                            });
 | 
			
		||||
 | 
			
		||||
                            response.body.on('end', () => {
 | 
			
		||||
                                controller.close();
 | 
			
		||||
                                console.log('[Ollama Provider] Streaming completed');
 | 
			
		||||
                            });
 | 
			
		||||
 | 
			
		||||
                            response.body.on('error', (error) => {
 | 
			
		||||
                                console.error('[Ollama Provider] Streaming error:', error);
 | 
			
		||||
                                controller.error(error);
 | 
			
		||||
                            });
 | 
			
		||||
                            
 | 
			
		||||
                        } catch (error) {
 | 
			
		||||
                            console.error('[Ollama Provider] Streaming setup error:', error);
 | 
			
		||||
                            controller.error(error);
 | 
			
		||||
                        }
 | 
			
		||||
                    if (!response.ok) {
 | 
			
		||||
                        throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
 | 
			
		||||
                    }
 | 
			
		||||
                });
 | 
			
		||||
 | 
			
		||||
                return {
 | 
			
		||||
                    ok: true,
 | 
			
		||||
                    body: stream
 | 
			
		||||
                };
 | 
			
		||||
                
 | 
			
		||||
            } catch (error) {
 | 
			
		||||
                console.error('[Ollama Provider] Request error:', error);
 | 
			
		||||
                throw error;
 | 
			
		||||
            }
 | 
			
		||||
                    console.log('[Ollama Provider] Got streaming response');
 | 
			
		||||
 | 
			
		||||
                    const stream = new ReadableStream({
 | 
			
		||||
                        async start(controller) {
 | 
			
		||||
                            let buffer = '';
 | 
			
		||||
 | 
			
		||||
                            try {
 | 
			
		||||
                                response.body.on('data', (chunk) => {
 | 
			
		||||
                                    buffer += chunk.toString();
 | 
			
		||||
                                    const lines = buffer.split('\n');
 | 
			
		||||
                                    buffer = lines.pop() || '';
 | 
			
		||||
 | 
			
		||||
                                    for (const line of lines) {
 | 
			
		||||
                                        if (line.trim() === '') continue;
 | 
			
		||||
                                        
 | 
			
		||||
                                        try {
 | 
			
		||||
                                            const data = JSON.parse(line);
 | 
			
		||||
                                            
 | 
			
		||||
                                            if (data.message?.content) {
 | 
			
		||||
                                                const sseData = JSON.stringify({
 | 
			
		||||
                                                    choices: [{
 | 
			
		||||
                                                        delta: {
 | 
			
		||||
                                                            content: data.message.content
 | 
			
		||||
                                                        }
 | 
			
		||||
                                                    }]
 | 
			
		||||
                                                });
 | 
			
		||||
                                                controller.enqueue(new TextEncoder().encode(`data: ${sseData}\n\n`));
 | 
			
		||||
                                            }
 | 
			
		||||
                                            
 | 
			
		||||
                                            if (data.done) {
 | 
			
		||||
                                                controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'));
 | 
			
		||||
                                            }
 | 
			
		||||
                                        } catch (e) {
 | 
			
		||||
                                            console.error('[Ollama Provider] Failed to parse chunk:', e);
 | 
			
		||||
                                        }
 | 
			
		||||
                                    }
 | 
			
		||||
                                });
 | 
			
		||||
 | 
			
		||||
                                response.body.on('end', () => {
 | 
			
		||||
                                    controller.close();
 | 
			
		||||
                                    console.log('[Ollama Provider] Streaming completed');
 | 
			
		||||
                                });
 | 
			
		||||
 | 
			
		||||
                                response.body.on('error', (error) => {
 | 
			
		||||
                                    console.error('[Ollama Provider] Streaming error:', error);
 | 
			
		||||
                                    controller.error(error);
 | 
			
		||||
                                });
 | 
			
		||||
                                
 | 
			
		||||
                            } catch (error) {
 | 
			
		||||
                                console.error('[Ollama Provider] Streaming setup error:', error);
 | 
			
		||||
                                controller.error(error);
 | 
			
		||||
                            }
 | 
			
		||||
                        }
 | 
			
		||||
                    });
 | 
			
		||||
 | 
			
		||||
                    return {
 | 
			
		||||
                        ok: true,
 | 
			
		||||
                        body: stream
 | 
			
		||||
                    };
 | 
			
		||||
                    
 | 
			
		||||
                } catch (error) {
 | 
			
		||||
                    console.error('[Ollama Provider] Request error:', error);
 | 
			
		||||
                    throw error;
 | 
			
		||||
                }
 | 
			
		||||
            });
 | 
			
		||||
        }
 | 
			
		||||
    };
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
@ -1,6 +1,7 @@
 | 
			
		||||
const { exec } = require('child_process');
 | 
			
		||||
const { promisify } = require('util');
 | 
			
		||||
const { EventEmitter } = require('events');
 | 
			
		||||
const { BrowserWindow } = require('electron');
 | 
			
		||||
const path = require('path');
 | 
			
		||||
const os = require('os');
 | 
			
		||||
const https = require('https');
 | 
			
		||||
@ -17,6 +18,19 @@ class LocalAIServiceBase extends EventEmitter {
 | 
			
		||||
        this.installationProgress = new Map();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // 모든 윈도우에 이벤트 브로드캐스트
 | 
			
		||||
    _broadcastToAllWindows(eventName, data = null) {
 | 
			
		||||
        BrowserWindow.getAllWindows().forEach(win => {
 | 
			
		||||
            if (win && !win.isDestroyed()) {
 | 
			
		||||
                if (data !== null) {
 | 
			
		||||
                    win.webContents.send(eventName, data);
 | 
			
		||||
                } else {
 | 
			
		||||
                    win.webContents.send(eventName);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    getPlatform() {
 | 
			
		||||
        return process.platform;
 | 
			
		||||
    }
 | 
			
		||||
@ -65,7 +79,7 @@ class LocalAIServiceBase extends EventEmitter {
 | 
			
		||||
 | 
			
		||||
    setInstallProgress(modelName, progress) {
 | 
			
		||||
        this.installationProgress.set(modelName, progress);
 | 
			
		||||
        this.emit('install-progress', { model: modelName, progress });
 | 
			
		||||
        // 각 서비스에서 직접 브로드캐스트하도록 변경
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    clearInstallProgress(modelName) {
 | 
			
		||||
@ -194,15 +208,7 @@ class LocalAIServiceBase extends EventEmitter {
 | 
			
		||||
                    if (totalSize > 0) {
 | 
			
		||||
                        const progress = Math.round((downloadedSize / totalSize) * 100);
 | 
			
		||||
                        
 | 
			
		||||
                        // 이벤트 기반 진행률 보고
 | 
			
		||||
                        if (modelId) {
 | 
			
		||||
                            this.emit('download-progress', { 
 | 
			
		||||
                                modelId, 
 | 
			
		||||
                                progress, 
 | 
			
		||||
                                downloadedSize, 
 | 
			
		||||
                                totalSize 
 | 
			
		||||
                            });
 | 
			
		||||
                        }
 | 
			
		||||
                        // 이벤트 기반 진행률 보고는 각 서비스에서 직접 처리
 | 
			
		||||
                        
 | 
			
		||||
                        // 기존 콜백 지원 (호환성 유지)
 | 
			
		||||
                        if (onProgress) {
 | 
			
		||||
@ -215,7 +221,7 @@ class LocalAIServiceBase extends EventEmitter {
 | 
			
		||||
 | 
			
		||||
                file.on('finish', () => {
 | 
			
		||||
                    file.close(() => {
 | 
			
		||||
                        this.emit('download-complete', { url, destination, size: downloadedSize, modelId });
 | 
			
		||||
                        // download-complete 이벤트는 각 서비스에서 직접 처리
 | 
			
		||||
                        resolve({ success: true, size: downloadedSize });
 | 
			
		||||
                    });
 | 
			
		||||
                });
 | 
			
		||||
@ -272,12 +278,7 @@ class LocalAIServiceBase extends EventEmitter {
 | 
			
		||||
                return result;
 | 
			
		||||
            } catch (error) {
 | 
			
		||||
                if (attempt === maxRetries) {
 | 
			
		||||
                    this.emit('download-error', { 
 | 
			
		||||
                        url, 
 | 
			
		||||
                        error: error.message, 
 | 
			
		||||
                        modelId,
 | 
			
		||||
                        attempt: attempt
 | 
			
		||||
                    });
 | 
			
		||||
                    // download-error 이벤트는 각 서비스에서 직접 처리
 | 
			
		||||
                    throw error;
 | 
			
		||||
                }
 | 
			
		||||
                
 | 
			
		||||
@ -287,23 +288,6 @@ class LocalAIServiceBase extends EventEmitter {
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // 모델 pull을 위한 이벤트 발생 메서드 추가
 | 
			
		||||
    emitPullProgress(modelId, progress, status = 'pulling') {
 | 
			
		||||
        this.emit('pull-progress', { 
 | 
			
		||||
            modelId, 
 | 
			
		||||
            progress, 
 | 
			
		||||
            status 
 | 
			
		||||
        });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    emitPullComplete(modelId) {
 | 
			
		||||
        this.emit('pull-complete', { modelId });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    emitPullError(modelId, error) {
 | 
			
		||||
        this.emit('pull-error', { modelId, error });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async verifyChecksum(filePath, expectedChecksum) {
 | 
			
		||||
        return new Promise((resolve, reject) => {
 | 
			
		||||
            const hash = crypto.createHash('sha256');
 | 
			
		||||
 | 
			
		||||
@ -1,6 +1,7 @@
 | 
			
		||||
const Store = require('electron-store');
 | 
			
		||||
const fetch = require('node-fetch');
 | 
			
		||||
const { EventEmitter } = require('events');
 | 
			
		||||
const { BrowserWindow } = require('electron');
 | 
			
		||||
const { PROVIDERS, getProviderClass } = require('../ai/factory');
 | 
			
		||||
const encryptionService = require('./encryptionService');
 | 
			
		||||
const providerSettingsRepository = require('../repositories/providerSettings');
 | 
			
		||||
@ -22,6 +23,19 @@ class ModelStateService extends EventEmitter {
 | 
			
		||||
        userModelSelectionsRepository.setAuthService(authService);
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // 모든 윈도우에 이벤트 브로드캐스트
 | 
			
		||||
    _broadcastToAllWindows(eventName, data = null) {
 | 
			
		||||
        BrowserWindow.getAllWindows().forEach(win => {
 | 
			
		||||
            if (win && !win.isDestroyed()) {
 | 
			
		||||
                if (data !== null) {
 | 
			
		||||
                    win.webContents.send(eventName, data);
 | 
			
		||||
                } else {
 | 
			
		||||
                    win.webContents.send(eventName);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async initialize() {
 | 
			
		||||
        console.log('[ModelStateService] Initializing...');
 | 
			
		||||
        await this._loadStateForCurrentUser();
 | 
			
		||||
@ -352,8 +366,8 @@ class ModelStateService extends EventEmitter {
 | 
			
		||||
        
 | 
			
		||||
        this._autoSelectAvailableModels([]);
 | 
			
		||||
        
 | 
			
		||||
        this.emit('state-changed', this.state);
 | 
			
		||||
        this.emit('settings-updated');
 | 
			
		||||
        this._broadcastToAllWindows('model-state:updated', this.state);
 | 
			
		||||
        this._broadcastToAllWindows('settings-updated');
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    getApiKey(provider) {
 | 
			
		||||
@ -372,8 +386,8 @@ class ModelStateService extends EventEmitter {
 | 
			
		||||
            
 | 
			
		||||
            this._autoSelectAvailableModels([]);
 | 
			
		||||
            
 | 
			
		||||
            this.emit('state-changed', this.state);
 | 
			
		||||
            this.emit('settings-updated');
 | 
			
		||||
            this._broadcastToAllWindows('model-state:updated', this.state);
 | 
			
		||||
            this._broadcastToAllWindows('settings-updated');
 | 
			
		||||
            return true;
 | 
			
		||||
        }
 | 
			
		||||
        return false;
 | 
			
		||||
@ -516,8 +530,8 @@ class ModelStateService extends EventEmitter {
 | 
			
		||||
            this._autoWarmUpOllamaModel(modelId, previousModelId);
 | 
			
		||||
        }
 | 
			
		||||
        
 | 
			
		||||
        this.emit('state-changed', this.state);
 | 
			
		||||
        this.emit('settings-updated');
 | 
			
		||||
        this._broadcastToAllWindows('model-state:updated', this.state);
 | 
			
		||||
        this._broadcastToAllWindows('settings-updated');
 | 
			
		||||
        return true;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@ -529,7 +543,7 @@ class ModelStateService extends EventEmitter {
 | 
			
		||||
     */
 | 
			
		||||
    async _autoWarmUpOllamaModel(newModelId, previousModelId) {
 | 
			
		||||
        try {
 | 
			
		||||
            console.log(`[ModelStateService] 🔥 LLM model changed: ${previousModelId || 'None'} → ${newModelId}, triggering warm-up`);
 | 
			
		||||
            console.log(`[ModelStateService] LLM model changed: ${previousModelId || 'None'} → ${newModelId}, triggering warm-up`);
 | 
			
		||||
            
 | 
			
		||||
            // Get Ollama service if available
 | 
			
		||||
            const ollamaService = require('./ollamaService');
 | 
			
		||||
@ -545,12 +559,12 @@ class ModelStateService extends EventEmitter {
 | 
			
		||||
                    const success = await ollamaService.warmUpModel(newModelId);
 | 
			
		||||
                    
 | 
			
		||||
                    if (success) {
 | 
			
		||||
                        console.log(`[ModelStateService] ✅ Successfully warmed up model: ${newModelId}`);
 | 
			
		||||
                        console.log(`[ModelStateService] Successfully warmed up model: ${newModelId}`);
 | 
			
		||||
                    } else {
 | 
			
		||||
                        console.log(`[ModelStateService] ⚠️ Failed to warm up model: ${newModelId}`);
 | 
			
		||||
                        console.log(`[ModelStateService] Failed to warm up model: ${newModelId}`);
 | 
			
		||||
                    }
 | 
			
		||||
                } catch (error) {
 | 
			
		||||
                    console.log(`[ModelStateService] 🚫 Error during auto warm-up for ${newModelId}:`, error.message);
 | 
			
		||||
                    console.log(`[ModelStateService] Error during auto warm-up for ${newModelId}:`, error.message);
 | 
			
		||||
                }
 | 
			
		||||
            }, 500); // 500ms delay
 | 
			
		||||
            
 | 
			
		||||
@ -584,7 +598,7 @@ class ModelStateService extends EventEmitter {
 | 
			
		||||
        if (success) {
 | 
			
		||||
            const selectedModels = this.getSelectedModels();
 | 
			
		||||
            if (!selectedModels.llm || !selectedModels.stt) {
 | 
			
		||||
                this.emit('force-show-apikey-header');
 | 
			
		||||
                this._broadcastToAllWindows('force-show-apikey-header');
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
        return success;
 | 
			
		||||
 | 
			
		||||
@ -3,7 +3,7 @@ const { promisify } = require('util');
 | 
			
		||||
const fetch = require('node-fetch');
 | 
			
		||||
const path = require('path');
 | 
			
		||||
const fs = require('fs').promises;
 | 
			
		||||
const { app } = require('electron');
 | 
			
		||||
const { app, BrowserWindow } = require('electron');
 | 
			
		||||
const LocalAIServiceBase = require('./localAIServiceBase');
 | 
			
		||||
const { spawnAsync } = require('../utils/spawnHelper');
 | 
			
		||||
const { DOWNLOAD_CHECKSUMS } = require('../config/checksums');
 | 
			
		||||
@ -27,8 +27,8 @@ class OllamaService extends LocalAIServiceBase {
 | 
			
		||||
        };
 | 
			
		||||
        
 | 
			
		||||
        // Configuration
 | 
			
		||||
        this.requestTimeout = 8000; // 8s for health checks
 | 
			
		||||
        this.warmupTimeout = 60000; // 60s for model warmup (늘림)
 | 
			
		||||
        this.requestTimeout = 0; // Delete timeout
 | 
			
		||||
        this.warmupTimeout = 120000; // 120s for model warmup
 | 
			
		||||
        this.healthCheckInterval = 60000; // 1min between health checks
 | 
			
		||||
        this.circuitBreakerThreshold = 3;
 | 
			
		||||
        this.circuitBreakerCooldown = 30000; // 30s
 | 
			
		||||
@ -40,6 +40,19 @@ class OllamaService extends LocalAIServiceBase {
 | 
			
		||||
        this._startHealthMonitoring();
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // 모든 윈도우에 이벤트 브로드캐스트
 | 
			
		||||
    _broadcastToAllWindows(eventName, data = null) {
 | 
			
		||||
        BrowserWindow.getAllWindows().forEach(win => {
 | 
			
		||||
            if (win && !win.isDestroyed()) {
 | 
			
		||||
                if (data !== null) {
 | 
			
		||||
                    win.webContents.send(eventName, data);
 | 
			
		||||
                } else {
 | 
			
		||||
                    win.webContents.send(eventName);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async getStatus() {
 | 
			
		||||
        try {
 | 
			
		||||
            const installed = await this.isInstalled();
 | 
			
		||||
@ -87,14 +100,17 @@ class OllamaService extends LocalAIServiceBase {
 | 
			
		||||
        const controller = new AbortController();
 | 
			
		||||
        const timeout = options.timeout || this.requestTimeout;
 | 
			
		||||
        
 | 
			
		||||
        // Set up timeout mechanism
 | 
			
		||||
        const timeoutId = setTimeout(() => {
 | 
			
		||||
            controller.abort();
 | 
			
		||||
            this.activeRequests.delete(requestId);
 | 
			
		||||
            this._recordFailure();
 | 
			
		||||
        }, timeout);
 | 
			
		||||
        
 | 
			
		||||
        this.requestTimeouts.set(requestId, timeoutId);
 | 
			
		||||
        // Set up timeout mechanism only if timeout > 0
 | 
			
		||||
        let timeoutId = null;
 | 
			
		||||
        if (timeout > 0) {
 | 
			
		||||
            timeoutId = setTimeout(() => {
 | 
			
		||||
                controller.abort();
 | 
			
		||||
                this.activeRequests.delete(requestId);
 | 
			
		||||
                this._recordFailure();
 | 
			
		||||
            }, timeout);
 | 
			
		||||
            
 | 
			
		||||
            this.requestTimeouts.set(requestId, timeoutId);
 | 
			
		||||
        }
 | 
			
		||||
        
 | 
			
		||||
        const requestPromise = this._executeRequest(url, {
 | 
			
		||||
            ...options,
 | 
			
		||||
@ -115,8 +131,10 @@ class OllamaService extends LocalAIServiceBase {
 | 
			
		||||
            }
 | 
			
		||||
            throw error;
 | 
			
		||||
        } finally {
 | 
			
		||||
            clearTimeout(timeoutId);
 | 
			
		||||
            this.requestTimeouts.delete(requestId);
 | 
			
		||||
            if (timeoutId !== null) {
 | 
			
		||||
                clearTimeout(timeoutId);
 | 
			
		||||
                this.requestTimeouts.delete(requestId);
 | 
			
		||||
            }
 | 
			
		||||
            this.activeRequests.delete(operationType === 'health' ? 'health' : requestId);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
@ -377,7 +395,7 @@ class OllamaService extends LocalAIServiceBase {
 | 
			
		||||
                            
 | 
			
		||||
                            if (progress !== null) {
 | 
			
		||||
                                this.setInstallProgress(modelName, progress);
 | 
			
		||||
                                this.emit('pull-progress', { 
 | 
			
		||||
                                this._broadcastToAllWindows('ollama:pull-progress', { 
 | 
			
		||||
                                    model: modelName, 
 | 
			
		||||
                                    progress,
 | 
			
		||||
                                    status: data.status || 'downloading'
 | 
			
		||||
@ -388,7 +406,7 @@ class OllamaService extends LocalAIServiceBase {
 | 
			
		||||
                            // Handle completion
 | 
			
		||||
                            if (data.status === 'success') {
 | 
			
		||||
                                console.log(`[OllamaService] Successfully pulled model: ${modelName}`);
 | 
			
		||||
                                this.emit('pull-complete', { model: modelName });
 | 
			
		||||
                                this._broadcastToAllWindows('ollama:pull-complete', { model: modelName });
 | 
			
		||||
                                this.clearInstallProgress(modelName);
 | 
			
		||||
                                resolve();
 | 
			
		||||
                                return;
 | 
			
		||||
@ -406,7 +424,7 @@ class OllamaService extends LocalAIServiceBase {
 | 
			
		||||
                            const data = JSON.parse(buffer);
 | 
			
		||||
                            if (data.status === 'success') {
 | 
			
		||||
                                console.log(`[OllamaService] Successfully pulled model: ${modelName}`);
 | 
			
		||||
                                this.emit('pull-complete', { model: modelName });
 | 
			
		||||
                                this._broadcastToAllWindows('ollama:pull-complete', { model: modelName });
 | 
			
		||||
                            }
 | 
			
		||||
                        } catch (parseError) {
 | 
			
		||||
                            console.warn('[OllamaService] Failed to parse final buffer:', buffer);
 | 
			
		||||
@ -881,7 +899,7 @@ class OllamaService extends LocalAIServiceBase {
 | 
			
		||||
    async handleInstall() {
 | 
			
		||||
        try {
 | 
			
		||||
            const onProgress = (data) => {
 | 
			
		||||
                this.emit('install-progress', data);
 | 
			
		||||
                this._broadcastToAllWindows('ollama:install-progress', data);
 | 
			
		||||
            };
 | 
			
		||||
 | 
			
		||||
            await this.autoInstall(onProgress);
 | 
			
		||||
@ -891,11 +909,11 @@ class OllamaService extends LocalAIServiceBase {
 | 
			
		||||
                await this.startService();
 | 
			
		||||
                onProgress({ stage: 'starting', message: 'Ollama service started.', progress: 100 });
 | 
			
		||||
            }
 | 
			
		||||
            this.emit('install-complete', { success: true });
 | 
			
		||||
            this._broadcastToAllWindows('ollama:install-complete', { success: true });
 | 
			
		||||
            return { success: true };
 | 
			
		||||
        } catch (error) {
 | 
			
		||||
            console.error('[OllamaService] Failed to install:', error);
 | 
			
		||||
            this.emit('install-complete', { success: false, error: error.message });
 | 
			
		||||
            this._broadcastToAllWindows('ollama:install-complete', { success: false, error: error.message });
 | 
			
		||||
            return { success: false, error: error.message };
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
@ -963,7 +981,7 @@ class OllamaService extends LocalAIServiceBase {
 | 
			
		||||
        } catch (error) {
 | 
			
		||||
            console.error('[OllamaService] Failed to pull model:', error);
 | 
			
		||||
            await ollamaModelRepository.updateInstallStatus(modelName, false, false);
 | 
			
		||||
            this.emit('pull-error', { model: modelName, error: error.message });
 | 
			
		||||
            this._broadcastToAllWindows('ollama:pull-error', { model: modelName, error: error.message });
 | 
			
		||||
            return { success: false, error: error.message };
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
@ -2,6 +2,7 @@ const { spawn } = require('child_process');
 | 
			
		||||
const path = require('path');
 | 
			
		||||
const fs = require('fs');
 | 
			
		||||
const os = require('os');
 | 
			
		||||
const { BrowserWindow } = require('electron');
 | 
			
		||||
const LocalAIServiceBase = require('./localAIServiceBase');
 | 
			
		||||
const { spawnAsync } = require('../utils/spawnHelper');
 | 
			
		||||
const { DOWNLOAD_CHECKSUMS } = require('../config/checksums');
 | 
			
		||||
@ -39,6 +40,19 @@ class WhisperService extends LocalAIServiceBase {
 | 
			
		||||
        };
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    // 모든 윈도우에 이벤트 브로드캐스트
 | 
			
		||||
    _broadcastToAllWindows(eventName, data = null) {
 | 
			
		||||
        BrowserWindow.getAllWindows().forEach(win => {
 | 
			
		||||
            if (win && !win.isDestroyed()) {
 | 
			
		||||
                if (data !== null) {
 | 
			
		||||
                    win.webContents.send(eventName, data);
 | 
			
		||||
                } else {
 | 
			
		||||
                    win.webContents.send(eventName);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async initialize() {
 | 
			
		||||
        if (this.isInitialized) return;
 | 
			
		||||
 | 
			
		||||
@ -157,18 +171,18 @@ class WhisperService extends LocalAIServiceBase {
 | 
			
		||||
        const modelPath = await this.getModelPath(modelId);
 | 
			
		||||
        const checksumInfo = DOWNLOAD_CHECKSUMS.whisper.models[modelId];
 | 
			
		||||
        
 | 
			
		||||
        this.emit('download-progress', { modelId, progress: 0 });
 | 
			
		||||
        this._broadcastToAllWindows('whisper:download-progress', { modelId, progress: 0 });
 | 
			
		||||
        
 | 
			
		||||
        await this.downloadWithRetry(modelInfo.url, modelPath, {
 | 
			
		||||
            expectedChecksum: checksumInfo?.sha256,
 | 
			
		||||
            modelId, // modelId를 전달하여 LocalAIServiceBase에서 이벤트 발생 시 사용
 | 
			
		||||
            onProgress: (progress) => {
 | 
			
		||||
                this.emit('download-progress', { modelId, progress });
 | 
			
		||||
                this._broadcastToAllWindows('whisper:download-progress', { modelId, progress });
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
        
 | 
			
		||||
        console.log(`[WhisperService] Model ${modelId} downloaded successfully`);
 | 
			
		||||
        this.emit('download-complete', { modelId });
 | 
			
		||||
        this._broadcastToAllWindows('whisper:download-complete', { modelId });
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async handleDownloadModel(modelId) {
 | 
			
		||||
 | 
			
		||||
@ -39,11 +39,12 @@ class ListenService {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    sendToRenderer(channel, data) {
 | 
			
		||||
        BrowserWindow.getAllWindows().forEach(win => {
 | 
			
		||||
            if (!win.isDestroyed()) {
 | 
			
		||||
                win.webContents.send(channel, data);
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
        const { windowPool } = require('../../window/windowManager');
 | 
			
		||||
        const listenWindow = windowPool?.get('listen');
 | 
			
		||||
        
 | 
			
		||||
        if (listenWindow && !listenWindow.isDestroyed()) {
 | 
			
		||||
            listenWindow.webContents.send(channel, data);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    initialize() {
 | 
			
		||||
 | 
			
		||||
@ -35,11 +35,13 @@ class SttService {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    sendToRenderer(channel, data) {
 | 
			
		||||
        BrowserWindow.getAllWindows().forEach(win => {
 | 
			
		||||
            if (!win.isDestroyed()) {
 | 
			
		||||
                win.webContents.send(channel, data);
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
        // Listen 관련 이벤트는 Listen 윈도우에만 전송 (Ask 윈도우 충돌 방지)
 | 
			
		||||
        const { windowPool } = require('../../../window/windowManager');
 | 
			
		||||
        const listenWindow = windowPool?.get('listen');
 | 
			
		||||
        
 | 
			
		||||
        if (listenWindow && !listenWindow.isDestroyed()) {
 | 
			
		||||
            listenWindow.webContents.send(channel, data);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    async handleSendSystemAudioContent(data, mimeType) {
 | 
			
		||||
 | 
			
		||||
@ -28,11 +28,12 @@ class SummaryService {
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    sendToRenderer(channel, data) {
 | 
			
		||||
        BrowserWindow.getAllWindows().forEach(win => {
 | 
			
		||||
            if (!win.isDestroyed()) {
 | 
			
		||||
                win.webContents.send(channel, data);
 | 
			
		||||
            }
 | 
			
		||||
        });
 | 
			
		||||
        const { windowPool } = require('../../../window/windowManager');
 | 
			
		||||
        const listenWindow = windowPool?.get('listen');
 | 
			
		||||
        
 | 
			
		||||
        if (listenWindow && !listenWindow.isDestroyed()) {
 | 
			
		||||
            listenWindow.webContents.send(channel, data);
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    addConversationTurn(speaker, text) {
 | 
			
		||||
@ -304,25 +305,20 @@ Keep all points concise and build upon previous analysis if provided.`,
 | 
			
		||||
     */
 | 
			
		||||
    async triggerAnalysisIfNeeded() {
 | 
			
		||||
        if (this.conversationHistory.length >= 5 && this.conversationHistory.length % 5 === 0) {
 | 
			
		||||
            console.log(`🚀 Triggering analysis (non-blocking) - ${this.conversationHistory.length} conversation texts accumulated`);
 | 
			
		||||
            console.log(`Triggering analysis - ${this.conversationHistory.length} conversation texts accumulated`);
 | 
			
		||||
 | 
			
		||||
            this.makeOutlineAndRequests(this.conversationHistory)
 | 
			
		||||
                .then(data => {
 | 
			
		||||
                    if (data) {
 | 
			
		||||
                        console.log('📤 Sending structured data to renderer');
 | 
			
		||||
                        this.sendToRenderer('summary-update', data);
 | 
			
		||||
                        
 | 
			
		||||
                        // Notify callback
 | 
			
		||||
                        if (this.onAnalysisComplete) {
 | 
			
		||||
                            this.onAnalysisComplete(data);
 | 
			
		||||
                        }
 | 
			
		||||
                    } else {
 | 
			
		||||
                        console.log('❌ No analysis data returned from non-blocking call');
 | 
			
		||||
                    }
 | 
			
		||||
                })
 | 
			
		||||
                .catch(error => {
 | 
			
		||||
                    console.error('❌ Error in non-blocking analysis:', error);
 | 
			
		||||
                });
 | 
			
		||||
            const data = await this.makeOutlineAndRequests(this.conversationHistory);
 | 
			
		||||
            if (data) {
 | 
			
		||||
                console.log('Sending structured data to renderer');
 | 
			
		||||
                this.sendToRenderer('summary-update', data);
 | 
			
		||||
                
 | 
			
		||||
                // Notify callback
 | 
			
		||||
                if (this.onAnalysisComplete) {
 | 
			
		||||
                    this.onAnalysisComplete(data);
 | 
			
		||||
                }
 | 
			
		||||
            } else {
 | 
			
		||||
                console.log('No analysis data returned');
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -136,6 +136,9 @@ contextBridge.exposeInMainWorld('api', {
 | 
			
		||||
    // Listeners
 | 
			
		||||
    onAskStateUpdate: (callback) => ipcRenderer.on('ask:stateUpdate', callback),
 | 
			
		||||
    removeOnAskStateUpdate: (callback) => ipcRenderer.removeListener('ask:stateUpdate', callback),
 | 
			
		||||
    
 | 
			
		||||
    onAskStreamError: (callback) => ipcRenderer.on('ask-response-stream-error', callback),
 | 
			
		||||
    removeOnAskStreamError: (callback) => ipcRenderer.removeListener('ask-response-stream-error', callback),
 | 
			
		||||
 | 
			
		||||
    // Listeners
 | 
			
		||||
    onShowTextInput: (callback) => ipcRenderer.on('ask:showTextInput', callback),
 | 
			
		||||
 | 
			
		||||
@ -331,14 +331,15 @@ export class ApiKeyHeader extends LitElement {
 | 
			
		||||
    this.ipcTimeout = 10000; // 10s for IPC calls
 | 
			
		||||
    this.operationTimeout = 15000; // 15s for complex operations
 | 
			
		||||
    
 | 
			
		||||
    // Health monitoring system
 | 
			
		||||
    // Health monitoring system 
 | 
			
		||||
    this.healthCheck = {
 | 
			
		||||
      enabled: false,
 | 
			
		||||
      intervalId: null,
 | 
			
		||||
      intervalMs: 30000, // 30s
 | 
			
		||||
      intervalMs: 120000,
 | 
			
		||||
      lastCheck: 0,
 | 
			
		||||
      consecutiveFailures: 0,
 | 
			
		||||
      maxFailures: 3
 | 
			
		||||
      maxFailures: 5, 
 | 
			
		||||
      skipDuringOperation: true // skip during operation
 | 
			
		||||
    };
 | 
			
		||||
    
 | 
			
		||||
    // Load user model history from localStorage
 | 
			
		||||
@ -641,6 +642,17 @@ export class ApiKeyHeader extends LitElement {
 | 
			
		||||
    if (this.llmProvider !== 'ollama' || this.connectionState === 'connecting') {
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
      
 | 
			
		||||
    // skip during operation
 | 
			
		||||
    if (this.healthCheck.skipDuringOperation && (
 | 
			
		||||
      this.operationQueue.length > 0 || 
 | 
			
		||||
      this.connectionState === 'installing' ||
 | 
			
		||||
      this.connectionState === 'starting' ||
 | 
			
		||||
      Object.keys(this.operationMetrics.activeOperations || {}).length > 0
 | 
			
		||||
    )) {
 | 
			
		||||
      console.log('[ApiKeyHeader] Skipping health check - other operations in progress');
 | 
			
		||||
      return;
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    const now = Date.now();
 | 
			
		||||
    this.healthCheck.lastCheck = now;
 | 
			
		||||
 | 
			
		||||
@ -322,6 +322,12 @@ function createFeatureWindows(header, namesToCreate) {
 | 
			
		||||
                if (!app.isPackaged) {
 | 
			
		||||
                    ask.webContents.openDevTools({ mode: 'detach' });
 | 
			
		||||
                }
 | 
			
		||||
                
 | 
			
		||||
                ask.on('closed', () => {
 | 
			
		||||
                    console.log('[WindowManager] Ask window closed, removing from pool.');
 | 
			
		||||
                    windowPool.delete('ask');
 | 
			
		||||
                });
 | 
			
		||||
                
 | 
			
		||||
                windowPool.set('ask', ask);
 | 
			
		||||
                break;
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user