local llm bridge communication

This commit is contained in:
jhyang0 2025-07-13 18:14:24 +09:00
parent d936af46a3
commit e043b85bcd
13 changed files with 563 additions and 318 deletions

View File

@ -1,11 +1,12 @@
// src/bridge/featureBridge.js // src/bridge/featureBridge.js
const { ipcMain, app } = require('electron'); const { ipcMain, app, BrowserWindow } = require('electron');
const settingsService = require('../features/settings/settingsService'); const settingsService = require('../features/settings/settingsService');
const authService = require('../features/common/services/authService'); const authService = require('../features/common/services/authService');
const whisperService = require('../features/common/services/whisperService'); const whisperService = require('../features/common/services/whisperService');
const ollamaService = require('../features/common/services/ollamaService'); const ollamaService = require('../features/common/services/ollamaService');
const modelStateService = require('../features/common/services/modelStateService'); const modelStateService = require('../features/common/services/modelStateService');
const shortcutsService = require('../features/shortcuts/shortcutsService'); const shortcutsService = require('../features/shortcuts/shortcutsService');
const presetRepository = require('../features/common/repositories/preset');
const askService = require('../features/ask/askService'); const askService = require('../features/ask/askService');
const listenService = require('../features/listen/listenService'); const listenService = require('../features/listen/listenService');
@ -15,6 +16,9 @@ module.exports = {
// Renderer로부터의 요청을 수신 // Renderer로부터의 요청을 수신
initialize() { initialize() {
// 서비스 이벤트 리스너 설정
this._setupServiceEventListeners();
// Settings Service // Settings Service
ipcMain.handle('settings:getPresets', async () => await settingsService.getPresets()); ipcMain.handle('settings:getPresets', async () => await settingsService.getPresets());
ipcMain.handle('settings:get-auto-update', async () => await settingsService.getAutoUpdateSetting()); ipcMain.handle('settings:get-auto-update', async () => await settingsService.getAutoUpdateSetting());
@ -51,7 +55,33 @@ module.exports = {
ipcMain.handle('quit-application', () => app.quit()); ipcMain.handle('quit-application', () => app.quit());
// Whisper // Whisper
ipcMain.handle('whisper:download-model', async (event, modelId) => await whisperService.handleDownloadModel(event, modelId)); ipcMain.handle('whisper:download-model', async (event, modelId) => {
// 개별 진행률 이벤트 처리
const progressHandler = (data) => {
if (data.modelId === modelId) {
event.sender.send('whisper:download-progress', data);
}
};
const completeHandler = (data) => {
if (data.modelId === modelId) {
event.sender.send('whisper:download-complete', data);
whisperService.removeListener('download-progress', progressHandler);
whisperService.removeListener('download-complete', completeHandler);
}
};
whisperService.on('download-progress', progressHandler);
whisperService.on('download-complete', completeHandler);
try {
return await whisperService.handleDownloadModel(modelId);
} catch (error) {
whisperService.removeListener('download-progress', progressHandler);
whisperService.removeListener('download-complete', completeHandler);
throw error;
}
});
ipcMain.handle('whisper:get-installed-models', async () => await whisperService.handleGetInstalledModels()); ipcMain.handle('whisper:get-installed-models', async () => await whisperService.handleGetInstalledModels());
// General // General
@ -60,17 +90,91 @@ module.exports = {
// Ollama // Ollama
ipcMain.handle('ollama:get-status', async () => await ollamaService.handleGetStatus()); ipcMain.handle('ollama:get-status', async () => await ollamaService.handleGetStatus());
ipcMain.handle('ollama:install', async (event) => await ollamaService.handleInstall(event)); ipcMain.handle('ollama:install', async (event) => {
ipcMain.handle('ollama:start-service', async (event) => await ollamaService.handleStartService(event)); // 개별 진행률 이벤트 처리
const progressHandler = (data) => {
event.sender.send('ollama:install-progress', data);
};
const completeHandler = (data) => {
event.sender.send('ollama:install-complete', data);
ollamaService.removeListener('install-progress', progressHandler);
ollamaService.removeListener('install-complete', completeHandler);
};
ollamaService.on('install-progress', progressHandler);
ollamaService.on('install-complete', completeHandler);
try {
return await ollamaService.handleInstall();
} catch (error) {
ollamaService.removeListener('install-progress', progressHandler);
ollamaService.removeListener('install-complete', completeHandler);
throw error;
}
});
ipcMain.handle('ollama:start-service', async (event) => {
// 개별 진행률 이벤트 처리
const completeHandler = (data) => {
event.sender.send('ollama:install-complete', data);
ollamaService.removeListener('install-complete', completeHandler);
};
ollamaService.on('install-complete', completeHandler);
try {
return await ollamaService.handleStartService();
} catch (error) {
ollamaService.removeListener('install-complete', completeHandler);
throw error;
}
});
ipcMain.handle('ollama:ensure-ready', async () => await ollamaService.handleEnsureReady()); ipcMain.handle('ollama:ensure-ready', async () => await ollamaService.handleEnsureReady());
ipcMain.handle('ollama:get-models', async () => await ollamaService.handleGetModels()); ipcMain.handle('ollama:get-models', async () => await ollamaService.handleGetModels());
ipcMain.handle('ollama:get-model-suggestions', async () => await ollamaService.handleGetModelSuggestions()); ipcMain.handle('ollama:get-model-suggestions', async () => await ollamaService.handleGetModelSuggestions());
ipcMain.handle('ollama:pull-model', async (event, modelName) => await ollamaService.handlePullModel(event, modelName)); ipcMain.handle('ollama:pull-model', async (event, modelName) => {
// 개별 진행률 이벤트 처리
const progressHandler = (data) => {
if (data.model === modelName) {
event.sender.send('ollama:pull-progress', data);
}
};
const completeHandler = (data) => {
if (data.model === modelName) {
event.sender.send('ollama:pull-complete', data);
ollamaService.removeListener('pull-progress', progressHandler);
ollamaService.removeListener('pull-complete', completeHandler);
}
};
const errorHandler = (data) => {
if (data.model === modelName) {
event.sender.send('ollama:pull-error', data);
ollamaService.removeListener('pull-progress', progressHandler);
ollamaService.removeListener('pull-complete', completeHandler);
ollamaService.removeListener('pull-error', errorHandler);
}
};
ollamaService.on('pull-progress', progressHandler);
ollamaService.on('pull-complete', completeHandler);
ollamaService.on('pull-error', errorHandler);
try {
return await ollamaService.handlePullModel(modelName);
} catch (error) {
ollamaService.removeListener('pull-progress', progressHandler);
ollamaService.removeListener('pull-complete', completeHandler);
ollamaService.removeListener('pull-error', errorHandler);
throw error;
}
});
ipcMain.handle('ollama:is-model-installed', async (event, modelName) => await ollamaService.handleIsModelInstalled(modelName)); ipcMain.handle('ollama:is-model-installed', async (event, modelName) => await ollamaService.handleIsModelInstalled(modelName));
ipcMain.handle('ollama:warm-up-model', async (event, modelName) => await ollamaService.handleWarmUpModel(modelName)); ipcMain.handle('ollama:warm-up-model', async (event, modelName) => await ollamaService.handleWarmUpModel(modelName));
ipcMain.handle('ollama:auto-warm-up', async () => await ollamaService.handleAutoWarmUp()); ipcMain.handle('ollama:auto-warm-up', async () => await ollamaService.handleAutoWarmUp());
ipcMain.handle('ollama:get-warm-up-status', async () => await ollamaService.handleGetWarmUpStatus()); ipcMain.handle('ollama:get-warm-up-status', async () => await ollamaService.handleGetWarmUpStatus());
ipcMain.handle('ollama:shutdown', async (event, force = false) => await ollamaService.handleShutdown(event, force)); ipcMain.handle('ollama:shutdown', async (event, force = false) => await ollamaService.handleShutdown(force));
// Ask // Ask
ipcMain.handle('ask:sendQuestionFromAsk', async (event, userPrompt) => await askService.sendMessage(userPrompt)); ipcMain.handle('ask:sendQuestionFromAsk', async (event, userPrompt) => await askService.sendMessage(userPrompt));
@ -118,6 +222,75 @@ module.exports = {
console.log('[FeatureBridge] Initialized with all feature handlers.'); console.log('[FeatureBridge] Initialized with all feature handlers.');
}, },
// 서비스 이벤트 리스너 설정
_setupServiceEventListeners() {
// Ollama Service 이벤트 리스너
ollamaService.on('pull-progress', (data) => {
this._broadcastToAllWindows('ollama:pull-progress', data);
});
ollamaService.on('pull-complete', (data) => {
this._broadcastToAllWindows('ollama:pull-complete', data);
});
ollamaService.on('pull-error', (data) => {
this._broadcastToAllWindows('ollama:pull-error', data);
});
ollamaService.on('download-progress', (data) => {
this._broadcastToAllWindows('ollama:download-progress', data);
});
ollamaService.on('download-complete', (data) => {
this._broadcastToAllWindows('ollama:download-complete', data);
});
ollamaService.on('download-error', (data) => {
this._broadcastToAllWindows('ollama:download-error', data);
});
// Whisper Service 이벤트 리스너
whisperService.on('download-progress', (data) => {
this._broadcastToAllWindows('whisper:download-progress', data);
});
whisperService.on('download-complete', (data) => {
this._broadcastToAllWindows('whisper:download-complete', data);
});
whisperService.on('download-error', (data) => {
this._broadcastToAllWindows('whisper:download-error', data);
});
// Model State Service 이벤트 리스너
modelStateService.on('state-changed', (data) => {
this._broadcastToAllWindows('model-state:updated', data);
});
modelStateService.on('settings-updated', () => {
this._broadcastToAllWindows('settings-updated');
});
modelStateService.on('force-show-apikey-header', () => {
this._broadcastToAllWindows('force-show-apikey-header');
});
console.log('[FeatureBridge] Service event listeners configured');
},
// 모든 창에 이벤트 방송
_broadcastToAllWindows(eventName, data = null) {
BrowserWindow.getAllWindows().forEach(win => {
if (win && !win.isDestroyed()) {
if (data !== null) {
win.webContents.send(eventName, data);
} else {
win.webContents.send(eventName);
}
}
});
},
// Renderer로 상태를 전송 // Renderer로 상태를 전송
sendAskProgress(win, progress) { sendAskProgress(win, progress) {
win.webContents.send('feature:ask:progress', progress); win.webContents.send('feature:ask:progress', progress);

View File

@ -211,7 +211,7 @@ class AskService {
let sessionId; let sessionId;
try { try {
console.log(`[AskService] 🤖 Processing message: ${userPrompt.substring(0, 50)}...`); console.log(`[AskService] Processing message: ${userPrompt.substring(0, 50)}...`);
this.state = { this.state = {
...this.state, ...this.state,
@ -237,9 +237,9 @@ class AskService {
const screenshotBase64 = screenshotResult.success ? screenshotResult.base64 : null; const screenshotBase64 = screenshotResult.success ? screenshotResult.base64 : null;
const conversationHistory = this._formatConversationForPrompt(conversationHistoryRaw); const conversationHistory = this._formatConversationForPrompt(conversationHistoryRaw);
const systemPrompt = getSystemPrompt('pickle_glass_analysis', conversationHistory, false); const systemPrompt = getSystemPrompt('pickle_glass_analysis', conversationHistory, false);
// 첫 번째 시도: 스크린샷 포함 (가능한 경우)
const messages = [ const messages = [
{ role: 'system', content: systemPrompt }, { role: 'system', content: systemPrompt },
{ {
@ -266,35 +266,78 @@ class AskService {
portkeyVirtualKey: modelInfo.provider === 'openai-glass' ? modelInfo.apiKey : undefined, portkeyVirtualKey: modelInfo.provider === 'openai-glass' ? modelInfo.apiKey : undefined,
}); });
const response = await streamingLLM.streamChat(messages); try {
const askWin = getWindowPool()?.get('ask'); const response = await streamingLLM.streamChat(messages);
const askWin = getWindowPool()?.get('ask');
if (!askWin || askWin.isDestroyed()) { if (!askWin || askWin.isDestroyed()) {
console.error("[AskService] Ask window is not available to send stream to."); console.error("[AskService] Ask window is not available to send stream to.");
response.body.getReader().cancel(); response.body.getReader().cancel();
return { success: false, error: 'Ask window is not available.' }; return { success: false, error: 'Ask window is not available.' };
}
const reader = response.body.getReader();
signal.addEventListener('abort', () => {
console.log(`[AskService] Aborting stream reader. Reason: ${signal.reason}`);
reader.cancel(signal.reason).catch(() => { /* 이미 취소된 경우의 오류는 무시 */ });
});
await this._processStream(reader, askWin, sessionId, signal);
return { success: true };
} catch (multimodalError) {
// 멀티모달 요청이 실패했고 스크린샷이 포함되어 있다면 텍스트만으로 재시도
if (screenshotBase64 && this._isMultimodalError(multimodalError)) {
console.log(`[AskService] Multimodal request failed, retrying with text-only: ${multimodalError.message}`);
// 텍스트만으로 메시지 재구성
const textOnlyMessages = [
{ role: 'system', content: systemPrompt },
{
role: 'user',
content: `User Request: ${userPrompt.trim()}`
}
];
const fallbackResponse = await streamingLLM.streamChat(textOnlyMessages);
const askWin = getWindowPool()?.get('ask');
if (!askWin || askWin.isDestroyed()) {
console.error("[AskService] Ask window is not available for fallback response.");
fallbackResponse.body.getReader().cancel();
return { success: false, error: 'Ask window is not available.' };
}
const fallbackReader = fallbackResponse.body.getReader();
signal.addEventListener('abort', () => {
console.log(`[AskService] Aborting fallback stream reader. Reason: ${signal.reason}`);
fallbackReader.cancel(signal.reason).catch(() => {});
});
await this._processStream(fallbackReader, askWin, sessionId, signal);
return { success: true };
} else {
// 다른 종류의 에러이거나 스크린샷이 없었다면 그대로 throw
throw multimodalError;
}
} }
const reader = response.body.getReader();
signal.addEventListener('abort', () => {
console.log(`[AskService] Aborting stream reader. Reason: ${signal.reason}`);
reader.cancel(signal.reason).catch(() => { /* 이미 취소된 경우의 오류는 무시 */ });
});
await this._processStream(reader, askWin, sessionId, signal);
return { success: true };
} catch (error) { } catch (error) {
if (error.name === 'AbortError') { console.error('[AskService] Error during message processing:', error);
console.log('[AskService] SendMessage operation was successfully aborted.'); this.state = {
return { success: true, response: 'Cancelled' }; ...this.state,
isLoading: false,
isStreaming: false,
showTextInput: true,
};
this._broadcastState();
const askWin = getWindowPool()?.get('ask');
if (askWin && !askWin.isDestroyed()) {
const streamError = error.message || 'Unknown error occurred';
askWin.webContents.send('ask-response-stream-error', { error: streamError });
} }
console.error('[AskService] Error processing message:', error);
this.state.isLoading = false;
this.state.error = error.message;
this._broadcastState();
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
} }
@ -366,6 +409,23 @@ class AskService {
} }
} }
/**
* 멀티모달 관련 에러인지 판단
* @private
*/
_isMultimodalError(error) {
const errorMessage = error.message?.toLowerCase() || '';
return (
errorMessage.includes('vision') ||
errorMessage.includes('image') ||
errorMessage.includes('multimodal') ||
errorMessage.includes('unsupported') ||
errorMessage.includes('image_url') ||
errorMessage.includes('400') || // Bad Request often for unsupported features
errorMessage.includes('invalid') ||
errorMessage.includes('not supported')
);
}
} }
const askService = new AskService(); const askService = new AskService();

View File

@ -68,7 +68,8 @@ const PROVIDERS = {
handler: () => { handler: () => {
// This needs to remain a function due to its conditional logic for renderer/main process // This needs to remain a function due to its conditional logic for renderer/main process
if (typeof window === 'undefined') { if (typeof window === 'undefined') {
return require("./providers/whisper"); const { WhisperProvider } = require("./providers/whisper");
return new WhisperProvider();
} }
// Return a dummy object for the renderer process // Return a dummy object for the renderer process
return { return {

View File

@ -184,9 +184,10 @@ class WhisperProvider {
async initialize() { async initialize() {
if (!this.whisperService) { if (!this.whisperService) {
const { WhisperService } = require('../../services/whisperService'); this.whisperService = require('../../services/whisperService');
this.whisperService = new WhisperService(); if (!this.whisperService.isInitialized) {
await this.whisperService.initialize(); await this.whisperService.initialize();
}
} }
} }

View File

@ -152,7 +152,8 @@ class LocalAIServiceBase extends EventEmitter {
const { const {
onProgress = null, onProgress = null,
headers = { 'User-Agent': 'Glass-App' }, headers = { 'User-Agent': 'Glass-App' },
timeout = 300000 // 5 minutes default timeout = 300000, // 5 minutes default
modelId = null // 모델 ID를 위한 추가 옵션
} = options; } = options;
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@ -190,9 +191,23 @@ class LocalAIServiceBase extends EventEmitter {
response.on('data', (chunk) => { response.on('data', (chunk) => {
downloadedSize += chunk.length; downloadedSize += chunk.length;
if (onProgress && totalSize > 0) { if (totalSize > 0) {
const progress = Math.round((downloadedSize / totalSize) * 100); const progress = Math.round((downloadedSize / totalSize) * 100);
onProgress(progress, downloadedSize, totalSize);
// 이벤트 기반 진행률 보고
if (modelId) {
this.emit('download-progress', {
modelId,
progress,
downloadedSize,
totalSize
});
}
// 기존 콜백 지원 (호환성 유지)
if (onProgress) {
onProgress(progress, downloadedSize, totalSize);
}
} }
}); });
@ -200,7 +215,7 @@ class LocalAIServiceBase extends EventEmitter {
file.on('finish', () => { file.on('finish', () => {
file.close(() => { file.close(() => {
this.emit('download-complete', { url, destination, size: downloadedSize }); this.emit('download-complete', { url, destination, size: downloadedSize, modelId });
resolve({ success: true, size: downloadedSize }); resolve({ success: true, size: downloadedSize });
}); });
}); });
@ -216,7 +231,7 @@ class LocalAIServiceBase extends EventEmitter {
request.on('error', (err) => { request.on('error', (err) => {
file.close(); file.close();
fs.unlink(destination, () => {}); fs.unlink(destination, () => {});
this.emit('download-error', { url, error: err }); this.emit('download-error', { url, error: err, modelId });
reject(err); reject(err);
}); });
@ -230,11 +245,20 @@ class LocalAIServiceBase extends EventEmitter {
} }
async downloadWithRetry(url, destination, options = {}) { async downloadWithRetry(url, destination, options = {}) {
const { maxRetries = 3, retryDelay = 1000, expectedChecksum = null, ...downloadOptions } = options; const {
maxRetries = 3,
retryDelay = 1000,
expectedChecksum = null,
modelId = null, // 모델 ID를 위한 추가 옵션
...downloadOptions
} = options;
for (let attempt = 1; attempt <= maxRetries; attempt++) { for (let attempt = 1; attempt <= maxRetries; attempt++) {
try { try {
const result = await this.downloadFile(url, destination, downloadOptions); const result = await this.downloadFile(url, destination, {
...downloadOptions,
modelId
});
if (expectedChecksum) { if (expectedChecksum) {
const isValid = await this.verifyChecksum(destination, expectedChecksum); const isValid = await this.verifyChecksum(destination, expectedChecksum);
@ -248,6 +272,12 @@ class LocalAIServiceBase extends EventEmitter {
return result; return result;
} catch (error) { } catch (error) {
if (attempt === maxRetries) { if (attempt === maxRetries) {
this.emit('download-error', {
url,
error: error.message,
modelId,
attempt: attempt
});
throw error; throw error;
} }
@ -257,6 +287,23 @@ class LocalAIServiceBase extends EventEmitter {
} }
} }
// 모델 pull을 위한 이벤트 발생 메서드 추가
emitPullProgress(modelId, progress, status = 'pulling') {
this.emit('pull-progress', {
modelId,
progress,
status
});
}
emitPullComplete(modelId) {
this.emit('pull-complete', { modelId });
}
emitPullError(modelId, error) {
this.emit('pull-error', { modelId, error });
}
async verifyChecksum(filePath, expectedChecksum) { async verifyChecksum(filePath, expectedChecksum) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const hash = crypto.createHash('sha256'); const hash = crypto.createHash('sha256');

View File

@ -1,138 +0,0 @@
export class LocalProgressTracker {
constructor(serviceName) {
this.serviceName = serviceName;
this.activeOperations = new Map(); // operationId -> { controller, onProgress }
// Check if we're in renderer process with window.api available
if (!window.api) {
throw new Error(`${serviceName} requires Electron environment with contextBridge`);
}
this.globalProgressHandler = (event, data) => {
const operation = this.activeOperations.get(data.model || data.modelId);
if (operation && !operation.controller.signal.aborted) {
operation.onProgress(data.progress);
}
};
// Set up progress listeners based on service name
if (serviceName.toLowerCase() === 'ollama') {
window.api.settingsView.onOllamaPullProgress(this.globalProgressHandler);
} else if (serviceName.toLowerCase() === 'whisper') {
window.api.settingsView.onWhisperDownloadProgress(this.globalProgressHandler);
}
this.progressEvent = serviceName.toLowerCase();
}
async trackOperation(operationId, operationType, onProgress) {
if (this.activeOperations.has(operationId)) {
throw new Error(`${operationType} ${operationId} is already in progress`);
}
const controller = new AbortController();
const operation = { controller, onProgress };
this.activeOperations.set(operationId, operation);
try {
let result;
// Use appropriate API call based on service and operation
if (this.serviceName.toLowerCase() === 'ollama' && operationType === 'install') {
result = await window.api.settingsView.pullOllamaModel(operationId);
} else if (this.serviceName.toLowerCase() === 'whisper' && operationType === 'download') {
result = await window.api.settingsView.downloadWhisperModel(operationId);
} else {
throw new Error(`Unsupported operation: ${this.serviceName}:${operationType}`);
}
if (!result.success) {
throw new Error(result.error || `${operationType} failed`);
}
return true;
} catch (error) {
if (!controller.signal.aborted) {
throw error;
}
return false;
} finally {
this.activeOperations.delete(operationId);
}
}
async installModel(modelName, onProgress) {
return this.trackOperation(modelName, 'install', onProgress);
}
async downloadModel(modelId, onProgress) {
return this.trackOperation(modelId, 'download', onProgress);
}
cancelOperation(operationId) {
const operation = this.activeOperations.get(operationId);
if (operation) {
operation.controller.abort();
this.activeOperations.delete(operationId);
}
}
cancelAllOperations() {
for (const [operationId, operation] of this.activeOperations) {
operation.controller.abort();
}
this.activeOperations.clear();
}
isOperationActive(operationId) {
return this.activeOperations.has(operationId);
}
getActiveOperations() {
return Array.from(this.activeOperations.keys());
}
destroy() {
this.cancelAllOperations();
// Remove progress listeners based on service name
if (this.progressEvent === 'ollama') {
window.api.settingsView.removeOnOllamaPullProgress(this.globalProgressHandler);
} else if (this.progressEvent === 'whisper') {
window.api.settingsView.removeOnWhisperDownloadProgress(this.globalProgressHandler);
}
}
}
let trackers = new Map();
export function getLocalProgressTracker(serviceName) {
if (!trackers.has(serviceName)) {
trackers.set(serviceName, new LocalProgressTracker(serviceName));
}
return trackers.get(serviceName);
}
export function destroyLocalProgressTracker(serviceName) {
const tracker = trackers.get(serviceName);
if (tracker) {
tracker.destroy();
trackers.delete(serviceName);
}
}
export function destroyAllProgressTrackers() {
for (const [name, tracker] of trackers) {
tracker.destroy();
}
trackers.clear();
}
// Legacy compatibility exports
export function getOllamaProgressTracker() {
return getLocalProgressTracker('ollama');
}
export function destroyOllamaProgressTracker() {
destroyLocalProgressTracker('ollama');
}

View File

@ -1,6 +1,6 @@
const Store = require('electron-store'); const Store = require('electron-store');
const fetch = require('node-fetch'); const fetch = require('node-fetch');
const { ipcMain, webContents } = require('electron'); const { EventEmitter } = require('events');
const { PROVIDERS, getProviderClass } = require('../ai/factory'); const { PROVIDERS, getProviderClass } = require('../ai/factory');
const encryptionService = require('./encryptionService'); const encryptionService = require('./encryptionService');
const providerSettingsRepository = require('../repositories/providerSettings'); const providerSettingsRepository = require('../repositories/providerSettings');
@ -9,8 +9,9 @@ const userModelSelectionsRepository = require('../repositories/userModelSelectio
// Import authService directly (singleton) // Import authService directly (singleton)
const authService = require('./authService'); const authService = require('./authService');
class ModelStateService { class ModelStateService extends EventEmitter {
constructor() { constructor() {
super();
this.authService = authService; this.authService = authService;
this.store = new Store({ name: 'pickle-glass-model-state' }); this.store = new Store({ name: 'pickle-glass-model-state' });
this.state = {}; this.state = {};
@ -171,6 +172,9 @@ class ModelStateService {
console.log(`[ModelStateService] State loaded from database for user: ${userId}`); console.log(`[ModelStateService] State loaded from database for user: ${userId}`);
// Auto-select available models after loading state
this._autoSelectAvailableModels();
} catch (error) { } catch (error) {
console.error('[ModelStateService] Failed to load state from database:', error); console.error('[ModelStateService] Failed to load state from database:', error);
// Fall back to default state // Fall back to default state
@ -331,22 +335,25 @@ class ModelStateService {
} }
async setApiKey(provider, key) { async setApiKey(provider, key) {
if (provider in this.state.apiKeys) { console.log(`[ModelStateService] setApiKey: ${provider}`);
this.state.apiKeys[provider] = key; if (!provider) {
throw new Error('Provider is required');
const supportedTypes = [];
if (PROVIDERS[provider]?.llmModels.length > 0 || provider === 'ollama') {
supportedTypes.push('llm');
}
if (PROVIDERS[provider]?.sttModels.length > 0 || provider === 'whisper') {
supportedTypes.push('stt');
}
this._autoSelectAvailableModels(supportedTypes);
await this._saveState();
return true;
} }
return false;
let finalKey = key;
// Handle encryption for non-firebase providers
if (provider !== 'firebase' && key && key !== 'local') {
finalKey = await encryptionService.encrypt(key);
}
this.state.apiKeys[provider] = finalKey;
await this._saveState();
this._autoSelectAvailableModels([]);
this.emit('state-changed', this.state);
this.emit('settings-updated');
} }
getApiKey(provider) { getApiKey(provider) {
@ -358,19 +365,15 @@ class ModelStateService {
return displayKeys; return displayKeys;
} }
removeApiKey(provider) { async removeApiKey(provider) {
console.log(`[ModelStateService] Removing API key for provider: ${provider}`); if (this.state.apiKeys[provider]) {
if (provider in this.state.apiKeys) { delete this.state.apiKeys[provider];
this.state.apiKeys[provider] = null;
const llmProvider = this.getProviderForModel('llm', this.state.selectedModels.llm);
if (llmProvider === provider) this.state.selectedModels.llm = null;
const sttProvider = this.getProviderForModel('stt', this.state.selectedModels.stt);
if (sttProvider === provider) this.state.selectedModels.stt = null;
this._autoSelectAvailableModels();
this._saveState(); this._saveState();
this._logCurrentSelection();
this._autoSelectAvailableModels([]);
this.emit('state-changed', this.state);
this.emit('settings-updated');
return true; return true;
} }
return false; return false;
@ -456,11 +459,36 @@ class ModelStateService {
const available = []; const available = [];
const modelList = type === 'llm' ? 'llmModels' : 'sttModels'; const modelList = type === 'llm' ? 'llmModels' : 'sttModels';
Object.entries(this.state.apiKeys).forEach(([providerId, key]) => { for (const [providerId, key] of Object.entries(this.state.apiKeys)) {
if (key && PROVIDERS[providerId]?.[modelList]) { if (!key) continue;
// Ollama의 경우 데이터베이스에서 설치된 모델을 가져오기
if (providerId === 'ollama' && type === 'llm') {
try {
const ollamaModelRepository = require('../repositories/ollamaModel');
const installedModels = ollamaModelRepository.getInstalledModels();
const ollamaModels = installedModels.map(model => ({
id: model.name,
name: model.name
}));
available.push(...ollamaModels);
} catch (error) {
console.warn('[ModelStateService] Failed to get Ollama models from DB:', error.message);
}
}
// Whisper의 경우 정적 모델 목록 사용 (설치 상태는 별도 확인)
else if (providerId === 'whisper' && type === 'stt') {
// Whisper 모델은 factory.js의 정적 목록 사용
if (PROVIDERS[providerId]?.[modelList]) {
available.push(...PROVIDERS[providerId][modelList]);
}
}
// 다른 provider들은 기존 로직 사용
else if (PROVIDERS[providerId]?.[modelList]) {
available.push(...PROVIDERS[providerId][modelList]); available.push(...PROVIDERS[providerId][modelList]);
} }
}); }
return [...new Map(available.map(item => [item.id, item])).values()]; return [...new Map(available.map(item => [item.id, item])).values()];
} }
@ -469,20 +497,28 @@ class ModelStateService {
} }
setSelectedModel(type, modelId) { setSelectedModel(type, modelId) {
const provider = this.getProviderForModel(type, modelId); const availableModels = this.getAvailableModels(type);
if (provider && this.state.apiKeys[provider]) { const isAvailable = availableModels.some(model => model.id === modelId);
const previousModel = this.state.selectedModels[type];
this.state.selectedModels[type] = modelId; if (!isAvailable) {
this._saveState(); console.warn(`[ModelStateService] Model ${modelId} is not available for type ${type}`);
return false;
// Auto warm-up for Ollama LLM models when changed
if (type === 'llm' && provider === 'ollama' && modelId !== previousModel) {
this._autoWarmUpOllamaModel(modelId, previousModel);
}
return true;
} }
return false;
const previousModelId = this.state.selectedModels[type];
this.state.selectedModels[type] = modelId;
this._saveState();
console.log(`[ModelStateService] Selected ${type} model: ${modelId} (was: ${previousModelId})`);
// Auto warm-up for Ollama models
if (type === 'llm' && modelId && modelId !== previousModelId) {
this._autoWarmUpOllamaModel(modelId, previousModelId);
}
this.emit('state-changed', this.state);
this.emit('settings-updated');
return true;
} }
/** /**
@ -544,13 +580,11 @@ class ModelStateService {
async handleRemoveApiKey(provider) { async handleRemoveApiKey(provider) {
console.log(`[ModelStateService] handleRemoveApiKey: ${provider}`); console.log(`[ModelStateService] handleRemoveApiKey: ${provider}`);
const success = this.removeApiKey(provider); const success = await this.removeApiKey(provider);
if (success) { if (success) {
const selectedModels = this.getSelectedModels(); const selectedModels = this.getSelectedModels();
if (!selectedModels.llm || !selectedModels.stt) { if (!selectedModels.llm || !selectedModels.stt) {
webContents.getAllWebContents().forEach(wc => { this.emit('force-show-apikey-header');
wc.send('force-show-apikey-header');
});
} }
} }
return success; return success;

View File

@ -28,7 +28,7 @@ class OllamaService extends LocalAIServiceBase {
// Configuration // Configuration
this.requestTimeout = 8000; // 8s for health checks this.requestTimeout = 8000; // 8s for health checks
this.warmupTimeout = 15000; // 15s for model warmup this.warmupTimeout = 60000; // 60s for model warmup (늘림)
this.healthCheckInterval = 60000; // 1min between health checks this.healthCheckInterval = 60000; // 1min between health checks
this.circuitBreakerThreshold = 3; this.circuitBreakerThreshold = 3;
this.circuitBreakerCooldown = 30000; // 30s this.circuitBreakerCooldown = 30000; // 30s
@ -639,8 +639,48 @@ class OllamaService extends LocalAIServiceBase {
return true; return true;
} catch (error) { } catch (error) {
console.error(`[OllamaService] Failed to warm up model ${modelName}:`, error.message); // Check if it's a 404 error (model not found/installed)
return false; if (error.message.includes('HTTP 404') || error.message.includes('Not Found')) {
console.log(`[OllamaService] Model ${modelName} not found (404), attempting to install...`);
try {
// Try to install the model
await this.pullModel(modelName);
console.log(`[OllamaService] Successfully installed model ${modelName}, retrying warm-up...`);
// Update database to reflect installation
await ollamaModelRepository.updateInstallStatus(modelName, true, false);
// Retry warm-up after installation
const retryResponse = await this._makeRequest(`${this.baseUrl}/api/chat`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
model: modelName,
messages: [
{ role: 'user', content: 'Hi' }
],
stream: false,
options: {
num_predict: 1,
temperature: 0
}
}),
timeout: this.warmupTimeout
}, `warmup_retry_${modelName}`);
console.log(`[OllamaService] Successfully warmed up model ${modelName} after installation`);
return true;
} catch (installError) {
console.error(`[OllamaService] Failed to auto-install model ${modelName}:`, installError.message);
await ollamaModelRepository.updateInstallStatus(modelName, false, false);
return false;
}
} else {
console.error(`[OllamaService] Failed to warm up model ${modelName}:`, error.message);
return false;
}
} }
} }
@ -671,14 +711,8 @@ class OllamaService extends LocalAIServiceBase {
return false; return false;
} }
// Check if model is installed // 설치 여부 체크 제거 - _performWarmUp에서 자동으로 설치 처리
const isInstalled = await this.isModelInstalled(llmModelId); console.log(`[OllamaService] Auto-warming up selected model: ${llmModelId} (will auto-install if needed)`);
if (!isInstalled) {
console.log(`[OllamaService] Model ${llmModelId} not installed, skipping warm-up`);
return false;
}
console.log(`[OllamaService] Auto-warming up selected model: ${llmModelId}`);
return await this.warmUpModel(llmModelId); return await this.warmUpModel(llmModelId);
} catch (error) { } catch (error) {
@ -844,10 +878,10 @@ class OllamaService extends LocalAIServiceBase {
} }
} }
async handleInstall(event) { async handleInstall() {
try { try {
const onProgress = (data) => { const onProgress = (data) => {
event.sender.send('ollama:install-progress', data); this.emit('install-progress', data);
}; };
await this.autoInstall(onProgress); await this.autoInstall(onProgress);
@ -857,26 +891,26 @@ class OllamaService extends LocalAIServiceBase {
await this.startService(); await this.startService();
onProgress({ stage: 'starting', message: 'Ollama service started.', progress: 100 }); onProgress({ stage: 'starting', message: 'Ollama service started.', progress: 100 });
} }
event.sender.send('ollama:install-complete', { success: true }); this.emit('install-complete', { success: true });
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('[OllamaService] Failed to install:', error); console.error('[OllamaService] Failed to install:', error);
event.sender.send('ollama:install-complete', { success: false, error: error.message }); this.emit('install-complete', { success: false, error: error.message });
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
} }
async handleStartService(event) { async handleStartService() {
try { try {
if (!await this.isServiceRunning()) { if (!await this.isServiceRunning()) {
console.log('[OllamaService] Starting Ollama service...'); console.log('[OllamaService] Starting Ollama service...');
await this.startService(); await this.startService();
} }
event.sender.send('ollama:install-complete', { success: true }); this.emit('install-complete', { success: true });
return { success: true }; return { success: true };
} catch (error) { } catch (error) {
console.error('[OllamaService] Failed to start service:', error); console.error('[OllamaService] Failed to start service:', error);
event.sender.send('ollama:install-complete', { success: false, error: error.message }); this.emit('install-complete', { success: false, error: error.message });
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
} }
@ -914,29 +948,12 @@ class OllamaService extends LocalAIServiceBase {
} }
} }
async handlePullModel(event, modelName) { async handlePullModel(modelName) {
try { try {
console.log(`[OllamaService] Starting model pull: ${modelName}`); console.log(`[OllamaService] Starting model pull: ${modelName}`);
await ollamaModelRepository.updateInstallStatus(modelName, false, true); await ollamaModelRepository.updateInstallStatus(modelName, false, true);
const progressHandler = (data) => {
if (data.model === modelName) {
event.sender.send('ollama:pull-progress', data);
}
};
const completeHandler = (data) => {
if (data.model === modelName) {
console.log(`[OllamaService] Model ${modelName} pull completed`);
this.removeListener('pull-progress', progressHandler);
this.removeListener('pull-complete', completeHandler);
}
};
this.on('pull-progress', progressHandler);
this.on('pull-complete', completeHandler);
await this.pullModel(modelName); await this.pullModel(modelName);
await ollamaModelRepository.updateInstallStatus(modelName, true, false); await ollamaModelRepository.updateInstallStatus(modelName, true, false);
@ -946,6 +963,7 @@ class OllamaService extends LocalAIServiceBase {
} catch (error) { } catch (error) {
console.error('[OllamaService] Failed to pull model:', error); console.error('[OllamaService] Failed to pull model:', error);
await ollamaModelRepository.updateInstallStatus(modelName, false, false); await ollamaModelRepository.updateInstallStatus(modelName, false, false);
this.emit('pull-error', { model: modelName, error: error.message });
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
} }
@ -990,7 +1008,7 @@ class OllamaService extends LocalAIServiceBase {
} }
} }
async handleShutdown(event, force = false) { async handleShutdown(force = false) {
try { try {
console.log(`[OllamaService] Manual shutdown requested (force: ${force})`); console.log(`[OllamaService] Manual shutdown requested (force: ${force})`);
const success = await this.shutdown(force); const success = await this.shutdown(force);

View File

@ -157,19 +157,21 @@ class WhisperService extends LocalAIServiceBase {
const modelPath = await this.getModelPath(modelId); const modelPath = await this.getModelPath(modelId);
const checksumInfo = DOWNLOAD_CHECKSUMS.whisper.models[modelId]; const checksumInfo = DOWNLOAD_CHECKSUMS.whisper.models[modelId];
this.emit('downloadProgress', { modelId, progress: 0 }); this.emit('download-progress', { modelId, progress: 0 });
await this.downloadWithRetry(modelInfo.url, modelPath, { await this.downloadWithRetry(modelInfo.url, modelPath, {
expectedChecksum: checksumInfo?.sha256, expectedChecksum: checksumInfo?.sha256,
modelId, // modelId를 전달하여 LocalAIServiceBase에서 이벤트 발생 시 사용
onProgress: (progress) => { onProgress: (progress) => {
this.emit('downloadProgress', { modelId, progress }); this.emit('download-progress', { modelId, progress });
} }
}); });
console.log(`[WhisperService] Model ${modelId} downloaded successfully`); console.log(`[WhisperService] Model ${modelId} downloaded successfully`);
this.emit('download-complete', { modelId });
} }
async handleDownloadModel(event, modelId) { async handleDownloadModel(modelId) {
try { try {
console.log(`[WhisperService] Handling download for model: ${modelId}`); console.log(`[WhisperService] Handling download for model: ${modelId}`);
@ -177,19 +179,7 @@ class WhisperService extends LocalAIServiceBase {
await this.initialize(); await this.initialize();
} }
const progressHandler = (data) => { await this.ensureModelAvailable(modelId);
if (data.modelId === modelId && event && event.sender) {
event.sender.send('whisper:download-progress', data);
}
};
this.on('downloadProgress', progressHandler);
try {
await this.ensureModelAvailable(modelId);
} finally {
this.removeListener('downloadProgress', progressHandler);
}
return { success: true }; return { success: true };
} catch (error) { } catch (error) {

View File

@ -27,13 +27,16 @@ const NOTIFICATION_CONFIG = {
// New facade functions for model state management // New facade functions for model state management
async function getModelSettings() { async function getModelSettings() {
try { try {
const [config, storedKeys, availableLlm, availableStt, selectedModels] = await Promise.all([ const [config, storedKeys, selectedModels] = await Promise.all([
modelStateService.getProviderConfig(), modelStateService.getProviderConfig(),
modelStateService.getAllApiKeys(), modelStateService.getAllApiKeys(),
modelStateService.getAvailableModels('llm'),
modelStateService.getAvailableModels('stt'),
modelStateService.getSelectedModels(), modelStateService.getSelectedModels(),
]); ]);
// 동기 함수들은 별도로 호출
const availableLlm = modelStateService.getAvailableModels('llm');
const availableStt = modelStateService.getAvailableModels('stt');
return { success: true, data: { config, storedKeys, availableLlm, availableStt, selectedModels } }; return { success: true, data: { config, storedKeys, availableLlm, availableStt, selectedModels } };
} catch (error) { } catch (error) {
console.error('[SettingsService] Error getting model settings:', error); console.error('[SettingsService] Error getting model settings:', error);

View File

@ -1,5 +1,5 @@
import { html, css, LitElement } from "../assets/lit-core-2.7.4.min.js" import { html, css, LitElement } from "../assets/lit-core-2.7.4.min.js"
import { getOllamaProgressTracker } from "../../features/common/services/localProgressTracker.js" // import { getOllamaProgressTracker } from "../../features/common/services/localProgressTracker.js" // 제거됨
export class ApiKeyHeader extends LitElement { export class ApiKeyHeader extends LitElement {
//////// after_modelStateService //////// //////// after_modelStateService ////////
@ -304,7 +304,6 @@ export class ApiKeyHeader extends LitElement {
this.ollamaStatus = { installed: false, running: false }; this.ollamaStatus = { installed: false, running: false };
this.installingModel = null; this.installingModel = null;
this.installProgress = 0; this.installProgress = 0;
this.progressTracker = getOllamaProgressTracker();
this.whisperInstallingModels = {}; this.whisperInstallingModels = {};
// Professional operation management system // Professional operation management system
@ -1607,7 +1606,7 @@ export class ApiKeyHeader extends LitElement {
// Cancel any ongoing installations when component is destroyed // Cancel any ongoing installations when component is destroyed
if (this.installingModel) { if (this.installingModel) {
this.progressTracker.cancelInstallation(this.installingModel); // this.progressTracker.cancelInstallation(this.installingModel); // 제거됨
} }
// Cleanup event listeners // Cleanup event listeners

View File

@ -1,5 +1,5 @@
import { html, css, LitElement } from '../assets/lit-core-2.7.4.min.js'; import { html, css, LitElement } from '../assets/lit-core-2.7.4.min.js';
import { getOllamaProgressTracker } from '../../features/common/services/localProgressTracker.js'; // import { getOllamaProgressTracker } from '../../features/common/services/localProgressTracker.js'; // 제거됨
export class SettingsView extends LitElement { export class SettingsView extends LitElement {
static styles = css` static styles = css`
@ -531,7 +531,6 @@ export class SettingsView extends LitElement {
this.ollamaStatus = { installed: false, running: false }; this.ollamaStatus = { installed: false, running: false };
this.ollamaModels = []; this.ollamaModels = [];
this.installingModels = {}; // { modelName: progress } this.installingModels = {}; // { modelName: progress }
this.progressTracker = getOllamaProgressTracker();
// Whisper related // Whisper related
this.whisperModels = []; this.whisperModels = [];
this.whisperProgressTracker = null; // Will be initialized when needed this.whisperProgressTracker = null; // Will be initialized when needed
@ -595,12 +594,12 @@ export class SettingsView extends LitElement {
if (modelSettings.success) { if (modelSettings.success) {
const { config, storedKeys, availableLlm, availableStt, selectedModels } = modelSettings.data; const { config, storedKeys, availableLlm, availableStt, selectedModels } = modelSettings.data;
this.providerConfig = config; this.providerConfig = config;
this.apiKeys = storedKeys; this.apiKeys = storedKeys;
this.availableLlmModels = availableLlm; this.availableLlmModels = availableLlm;
this.availableSttModels = availableStt; this.availableSttModels = availableStt;
this.selectedLlm = selectedModels.llm; this.selectedLlm = selectedModels.llm;
this.selectedStt = selectedModels.stt; this.selectedStt = selectedModels.stt;
} }
this.presets = presets || []; this.presets = presets || [];
@ -775,31 +774,42 @@ export class SettingsView extends LitElement {
} }
async installOllamaModel(modelName) { async installOllamaModel(modelName) {
// Mark as installing
this.installingModels = { ...this.installingModels, [modelName]: 0 };
this.requestUpdate();
try { try {
// Use the clean progress tracker - no manual event management needed // Ollama 모델 다운로드 시작
const success = await this.progressTracker.installModel(modelName, (progress) => { this.installingModels = { ...this.installingModels, [modelName]: 0 };
this.installingModels = { ...this.installingModels, [modelName]: progress }; this.requestUpdate();
this.requestUpdate();
}); // 진행률 이벤트 리스너 설정
const progressHandler = (event, data) => {
if (success) { if (data.modelId === modelName) {
// Refresh status after installation this.installingModels = { ...this.installingModels, [modelName]: data.progress };
await this.refreshOllamaStatus(); this.requestUpdate();
await this.refreshModelData(); }
// Auto-select the model after installation };
await this.selectModel('llm', modelName);
} else { // 진행률 이벤트 리스너 등록
alert(`Installation of ${modelName} was cancelled`); window.api.settingsView.onOllamaPullProgress(progressHandler);
try {
const result = await window.api.settingsView.pullOllamaModel(modelName);
if (result.success) {
console.log(`[SettingsView] Model ${modelName} installed successfully`);
delete this.installingModels[modelName];
this.requestUpdate();
// 상태 새로고침
await this.refreshOllamaStatus();
await this.refreshModelData();
} else {
throw new Error(result.error || 'Installation failed');
}
} finally {
// 진행률 이벤트 리스너 제거
window.api.settingsView.removeOnOllamaPullProgress(progressHandler);
} }
} catch (error) { } catch (error) {
console.error(`[SettingsView] Error installing model ${modelName}:`, error); console.error(`[SettingsView] Error installing model ${modelName}:`, error);
alert(`Error installing ${modelName}: ${error.message}`);
} finally {
// Automatic cleanup - no manual event listener management
delete this.installingModels[modelName]; delete this.installingModels[modelName];
this.requestUpdate(); this.requestUpdate();
} }
@ -891,7 +901,7 @@ export class SettingsView extends LitElement {
const installingModels = Object.keys(this.installingModels); const installingModels = Object.keys(this.installingModels);
if (installingModels.length > 0) { if (installingModels.length > 0) {
installingModels.forEach(modelName => { installingModels.forEach(modelName => {
this.progressTracker.cancelInstallation(modelName); window.api.settingsView.cancelOllamaInstallation(modelName);
}); });
} }
} }

View File

@ -7,6 +7,53 @@ const shortcutsService = require('../features/shortcuts/shortcutsService');
const internalBridge = require('../bridge/internalBridge'); const internalBridge = require('../bridge/internalBridge');
const permissionRepository = require('../features/common/repositories/permission'); const permissionRepository = require('../features/common/repositories/permission');
// internalBridge 이벤트 리스너 설정
function setupInternalBridgeListeners() {
// 창 표시/숨기기 요청
internalBridge.on('show-window', (windowName, options = {}) => {
console.log(`[WindowManager] Received show-window request for: ${windowName}`);
switch (windowName) {
case 'settings':
showSettingsWindow(options.bounds);
break;
case 'ask':
ensureAskWindowVisible();
break;
default:
console.warn(`[WindowManager] Unknown window name: ${windowName}`);
}
});
internalBridge.on('hide-window', (windowName) => {
console.log(`[WindowManager] Received hide-window request for: ${windowName}`);
switch (windowName) {
case 'settings':
hideSettingsWindow();
break;
case 'ask':
closeAskWindow();
break;
default:
console.warn(`[WindowManager] Unknown window name: ${windowName}`);
}
});
internalBridge.on('toggle-visibility', () => {
console.log(`[WindowManager] Received toggle-visibility request`);
toggleAllWindowsVisibility();
});
internalBridge.on('set-content-protection', (enabled) => {
console.log(`[WindowManager] Received set-content-protection request: ${enabled}`);
setContentProtection(enabled);
});
console.log('[WindowManager] Internal bridge listeners configured');
}
// 초기화 시 내부 브릿지 리스너 설정
setupInternalBridgeListeners();
/* ────────────────[ GLASS BYPASS ]─────────────── */ /* ────────────────[ GLASS BYPASS ]─────────────── */
let liquidGlass; let liquidGlass;
const isLiquidGlassSupported = () => { const isLiquidGlassSupported = () => {