Refactor: Implement local AI service management system
- Add LocalAIServiceManager for centralized local AI service lifecycle management - Refactor provider settings to support local AI service configuration - Remove userModelSelections in favor of provider settings integration - Update whisper service to use new local AI management system - Implement lazy loading and auto-cleanup for local AI services - Update UI components to reflect new local AI service architecture
This commit is contained in:
parent
c0cf74273a
commit
6ece74737b
@ -1,5 +1,5 @@
|
||||
// src/bridge/featureBridge.js
|
||||
const { ipcMain, app } = require('electron');
|
||||
const { ipcMain, app, BrowserWindow } = require('electron');
|
||||
const settingsService = require('../features/settings/settingsService');
|
||||
const authService = require('../features/common/services/authService');
|
||||
const whisperService = require('../features/common/services/whisperService');
|
||||
@ -7,6 +7,8 @@ const ollamaService = require('../features/common/services/ollamaService');
|
||||
const modelStateService = require('../features/common/services/modelStateService');
|
||||
const shortcutsService = require('../features/shortcuts/shortcutsService');
|
||||
const presetRepository = require('../features/common/repositories/preset');
|
||||
const windowBridge = require('./windowBridge');
|
||||
const localAIManager = require('../features/common/services/localAIManager');
|
||||
|
||||
const askService = require('../features/ask/askService');
|
||||
const listenService = require('../features/listen/listenService');
|
||||
@ -40,6 +42,8 @@ module.exports = {
|
||||
ipcMain.handle('check-system-permissions', async () => await permissionService.checkSystemPermissions());
|
||||
ipcMain.handle('request-microphone-permission', async () => await permissionService.requestMicrophonePermission());
|
||||
ipcMain.handle('open-system-preferences', async (event, section) => await permissionService.openSystemPreferences(section));
|
||||
|
||||
//TODO: Need to Remove this
|
||||
ipcMain.handle('mark-permissions-completed', async () => await permissionService.markPermissionsAsCompleted());
|
||||
ipcMain.handle('check-permissions-completed', async () => await permissionService.checkPermissionsCompleted());
|
||||
|
||||
@ -113,6 +117,115 @@ module.exports = {
|
||||
ipcMain.handle('model:are-providers-configured', () => modelStateService.areProvidersConfigured());
|
||||
ipcMain.handle('model:get-provider-config', () => modelStateService.getProviderConfig());
|
||||
|
||||
// LocalAIManager 이벤트를 모든 윈도우에 브로드캐스트
|
||||
localAIManager.on('install-progress', (service, data) => {
|
||||
const event = { service, ...data };
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
win.webContents.send('localai:install-progress', event);
|
||||
}
|
||||
});
|
||||
});
|
||||
localAIManager.on('installation-complete', (service) => {
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
win.webContents.send('localai:installation-complete', { service });
|
||||
}
|
||||
});
|
||||
});
|
||||
localAIManager.on('error', (error) => {
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
win.webContents.send('localai:error-occurred', error);
|
||||
}
|
||||
});
|
||||
});
|
||||
// Handle error-occurred events from LocalAIManager's error handling
|
||||
localAIManager.on('error-occurred', (error) => {
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
win.webContents.send('localai:error-occurred', error);
|
||||
}
|
||||
});
|
||||
});
|
||||
localAIManager.on('model-ready', (data) => {
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
win.webContents.send('localai:model-ready', data);
|
||||
}
|
||||
});
|
||||
});
|
||||
localAIManager.on('state-changed', (service, state) => {
|
||||
const event = { service, ...state };
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
win.webContents.send('localai:service-status-changed', event);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// 주기적 상태 동기화 시작
|
||||
localAIManager.startPeriodicSync();
|
||||
|
||||
// ModelStateService 이벤트를 모든 윈도우에 브로드캐스트
|
||||
modelStateService.on('state-updated', (state) => {
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
win.webContents.send('model-state:updated', state);
|
||||
}
|
||||
});
|
||||
});
|
||||
modelStateService.on('settings-updated', () => {
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
win.webContents.send('settings-updated');
|
||||
}
|
||||
});
|
||||
});
|
||||
modelStateService.on('force-show-apikey-header', () => {
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
win.webContents.send('force-show-apikey-header');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// LocalAI 통합 핸들러 추가
|
||||
ipcMain.handle('localai:install', async (event, { service, options }) => {
|
||||
return await localAIManager.installService(service, options);
|
||||
});
|
||||
ipcMain.handle('localai:get-status', async (event, service) => {
|
||||
return await localAIManager.getServiceStatus(service);
|
||||
});
|
||||
ipcMain.handle('localai:start-service', async (event, service) => {
|
||||
return await localAIManager.startService(service);
|
||||
});
|
||||
ipcMain.handle('localai:stop-service', async (event, service) => {
|
||||
return await localAIManager.stopService(service);
|
||||
});
|
||||
ipcMain.handle('localai:install-model', async (event, { service, modelId, options }) => {
|
||||
return await localAIManager.installModel(service, modelId, options);
|
||||
});
|
||||
ipcMain.handle('localai:get-installed-models', async (event, service) => {
|
||||
return await localAIManager.getInstalledModels(service);
|
||||
});
|
||||
ipcMain.handle('localai:run-diagnostics', async (event, service) => {
|
||||
return await localAIManager.runDiagnostics(service);
|
||||
});
|
||||
ipcMain.handle('localai:repair-service', async (event, service) => {
|
||||
return await localAIManager.repairService(service);
|
||||
});
|
||||
|
||||
// 에러 처리 핸들러
|
||||
ipcMain.handle('localai:handle-error', async (event, { service, errorType, details }) => {
|
||||
return await localAIManager.handleError(service, errorType, details);
|
||||
});
|
||||
|
||||
// 전체 상태 조회
|
||||
ipcMain.handle('localai:get-all-states', async (event) => {
|
||||
return await localAIManager.getAllServiceStates();
|
||||
});
|
||||
|
||||
console.log('[FeatureBridge] Initialized with all feature handlers.');
|
||||
},
|
||||
|
||||
|
@ -1,9 +1,13 @@
|
||||
// src/bridge/windowBridge.js
|
||||
const { ipcMain, shell } = require('electron');
|
||||
const windowManager = require('../window/windowManager');
|
||||
|
||||
// Bridge는 단순히 IPC 핸들러를 등록하는 역할만 함 (비즈니스 로직 없음)
|
||||
module.exports = {
|
||||
initialize() {
|
||||
// initialize 시점에 windowManager를 require하여 circular dependency 문제 해결
|
||||
const windowManager = require('../window/windowManager');
|
||||
|
||||
// 기존 IPC 핸들러들
|
||||
ipcMain.handle('toggle-content-protection', () => windowManager.toggleContentProtection());
|
||||
ipcMain.handle('resize-header-window', (event, args) => windowManager.resizeHeaderWindow(args));
|
||||
ipcMain.handle('get-content-protection-status', () => windowManager.getContentProtectionStatus());
|
||||
|
@ -41,7 +41,7 @@ class WhisperSTTSession extends EventEmitter {
|
||||
|
||||
startProcessingLoop() {
|
||||
this.processingInterval = setInterval(async () => {
|
||||
const minBufferSize = 24000 * 2 * 0.15;
|
||||
const minBufferSize = 16000 * 2 * 0.15;
|
||||
if (this.audioBuffer.length >= minBufferSize && !this.process) {
|
||||
console.log(`[WhisperSTT-${this.sessionId}] Processing audio chunk, buffer size: ${this.audioBuffer.length}`);
|
||||
await this.processAudioChunk();
|
||||
|
@ -2,41 +2,49 @@ const DOWNLOAD_CHECKSUMS = {
|
||||
ollama: {
|
||||
dmg: {
|
||||
url: 'https://ollama.com/download/Ollama.dmg',
|
||||
sha256: null // To be updated with actual checksum
|
||||
sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨
|
||||
},
|
||||
exe: {
|
||||
url: 'https://ollama.com/download/OllamaSetup.exe',
|
||||
sha256: null // To be updated with actual checksum
|
||||
sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨
|
||||
},
|
||||
linux: {
|
||||
url: 'curl -fsSL https://ollama.com/install.sh | sh',
|
||||
sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨
|
||||
}
|
||||
},
|
||||
whisper: {
|
||||
models: {
|
||||
'whisper-tiny': {
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.bin',
|
||||
url: 'https://huggingface.co/ggml-org/whisper.cpp/resolve/main/ggml-tiny.bin',
|
||||
sha256: 'be07e048e1e599ad46341c8d2a135645097a538221678b7acdd1b1919c6e1b21'
|
||||
},
|
||||
'whisper-base': {
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.bin',
|
||||
url: 'https://huggingface.co/ggml-org/whisper.cpp/resolve/main/ggml-base.bin',
|
||||
sha256: '60ed5bc3dd14eea856493d334349b405782ddcaf0028d4b5df4088345fba2efe'
|
||||
},
|
||||
'whisper-small': {
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.bin',
|
||||
url: 'https://huggingface.co/ggml-org/whisper.cpp/resolve/main/ggml-small.bin',
|
||||
sha256: '1be3a9b2063867b937e64e2ec7483364a79917e157fa98c5d94b5c1fffea987b'
|
||||
},
|
||||
'whisper-medium': {
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.bin',
|
||||
url: 'https://huggingface.co/ggml-org/whisper.cpp/resolve/main/ggml-medium.bin',
|
||||
sha256: '6c14d5adee5f86394037b4e4e8b59f1673b6cee10e3cf0b11bbdbee79c156208'
|
||||
}
|
||||
},
|
||||
binaries: {
|
||||
'v1.7.6': {
|
||||
mac: {
|
||||
url: 'https://github.com/ggml-org/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-mac-x64.zip',
|
||||
sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨
|
||||
},
|
||||
windows: {
|
||||
url: 'https://github.com/ggerganov/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-win-x64.zip',
|
||||
sha256: null // To be updated with actual checksum
|
||||
url: 'https://github.com/ggml-org/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-win-x64.zip',
|
||||
sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨
|
||||
},
|
||||
linux: {
|
||||
url: 'https://github.com/ggerganov/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-linux-x64.tar.gz',
|
||||
sha256: null // To be updated with actual checksum
|
||||
url: 'https://github.com/ggml-org/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-linux-x64.tar.gz',
|
||||
sha256: null // TODO: 실제 체크섬 추가 필요 - null일 경우 체크섬 검증 스킵됨
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -96,21 +96,13 @@ const LATEST_SCHEMA = {
|
||||
{ name: 'api_key', type: 'TEXT' },
|
||||
{ name: 'selected_llm_model', type: 'TEXT' },
|
||||
{ name: 'selected_stt_model', type: 'TEXT' },
|
||||
{ name: 'is_active_llm', type: 'INTEGER DEFAULT 0' },
|
||||
{ name: 'is_active_stt', type: 'INTEGER DEFAULT 0' },
|
||||
{ name: 'created_at', type: 'INTEGER' },
|
||||
{ name: 'updated_at', type: 'INTEGER' }
|
||||
],
|
||||
constraints: ['PRIMARY KEY (uid, provider)']
|
||||
},
|
||||
user_model_selections: {
|
||||
columns: [
|
||||
{ name: 'uid', type: 'TEXT PRIMARY KEY' },
|
||||
{ name: 'selected_llm_provider', type: 'TEXT' },
|
||||
{ name: 'selected_llm_model', type: 'TEXT' },
|
||||
{ name: 'selected_stt_provider', type: 'TEXT' },
|
||||
{ name: 'selected_stt_model', type: 'TEXT' },
|
||||
{ name: 'updated_at', type: 'INTEGER' }
|
||||
]
|
||||
},
|
||||
shortcuts: {
|
||||
columns: [
|
||||
{ name: 'action', type: 'TEXT PRIMARY KEY' },
|
||||
|
@ -74,10 +74,88 @@ async function removeAllByUid(uid) {
|
||||
}
|
||||
}
|
||||
|
||||
// Get active provider for a specific type (llm or stt)
|
||||
async function getActiveProvider(uid, type) {
|
||||
try {
|
||||
const column = type === 'llm' ? 'is_active_llm' : 'is_active_stt';
|
||||
const q = query(providerSettingsCol(),
|
||||
where('uid', '==', uid),
|
||||
where(column, '==', true)
|
||||
);
|
||||
const querySnapshot = await getDocs(q);
|
||||
|
||||
if (querySnapshot.empty) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const doc = querySnapshot.docs[0];
|
||||
return { id: doc.id, ...doc.data() };
|
||||
} catch (error) {
|
||||
console.error('[ProviderSettings Firebase] Error getting active provider:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Set active provider for a specific type
|
||||
async function setActiveProvider(uid, provider, type) {
|
||||
try {
|
||||
const column = type === 'llm' ? 'is_active_llm' : 'is_active_stt';
|
||||
|
||||
// First, deactivate all providers for this type
|
||||
const allSettings = await getAllByUid(uid);
|
||||
const updatePromises = allSettings.map(setting => {
|
||||
const docRef = doc(providerSettingsCol(), setting.id);
|
||||
return setDoc(docRef, { [column]: false }, { merge: true });
|
||||
});
|
||||
await Promise.all(updatePromises);
|
||||
|
||||
// Then activate the specified provider
|
||||
if (provider) {
|
||||
const docRef = doc(providerSettingsCol(), `${uid}_${provider}`);
|
||||
await setDoc(docRef, { [column]: true }, { merge: true });
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error('[ProviderSettings Firebase] Error setting active provider:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Get all active settings (both llm and stt)
|
||||
async function getActiveSettings(uid) {
|
||||
try {
|
||||
// Firebase doesn't support OR queries in this way, so we'll get all settings and filter
|
||||
const allSettings = await getAllByUid(uid);
|
||||
|
||||
const activeSettings = {
|
||||
llm: null,
|
||||
stt: null
|
||||
};
|
||||
|
||||
allSettings.forEach(setting => {
|
||||
if (setting.is_active_llm) {
|
||||
activeSettings.llm = setting;
|
||||
}
|
||||
if (setting.is_active_stt) {
|
||||
activeSettings.stt = setting;
|
||||
}
|
||||
});
|
||||
|
||||
return activeSettings;
|
||||
} catch (error) {
|
||||
console.error('[ProviderSettings Firebase] Error getting active settings:', error);
|
||||
return { llm: null, stt: null };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getByProvider,
|
||||
getAllByUid,
|
||||
upsert,
|
||||
remove,
|
||||
removeAllByUid
|
||||
removeAllByUid,
|
||||
getActiveProvider,
|
||||
setActiveProvider,
|
||||
getActiveSettings
|
||||
};
|
@ -56,6 +56,24 @@ const providerSettingsRepositoryAdapter = {
|
||||
const repo = getBaseRepository();
|
||||
const uid = authService.getCurrentUserId();
|
||||
return await repo.removeAllByUid(uid);
|
||||
},
|
||||
|
||||
async getActiveProvider(type) {
|
||||
const repo = getBaseRepository();
|
||||
const uid = authService.getCurrentUserId();
|
||||
return await repo.getActiveProvider(uid, type);
|
||||
},
|
||||
|
||||
async setActiveProvider(provider, type) {
|
||||
const repo = getBaseRepository();
|
||||
const uid = authService.getCurrentUserId();
|
||||
return await repo.setActiveProvider(uid, provider, type);
|
||||
},
|
||||
|
||||
async getActiveSettings() {
|
||||
const repo = getBaseRepository();
|
||||
const uid = authService.getCurrentUserId();
|
||||
return await repo.getActiveSettings(uid);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1,15 +1,15 @@
|
||||
const sqliteClient = require('../../services/sqliteClient');
|
||||
const encryptionService = require('../../services/encryptionService');
|
||||
|
||||
function getByProvider(uid, provider) {
|
||||
const db = sqliteClient.getDb();
|
||||
const stmt = db.prepare('SELECT * FROM provider_settings WHERE uid = ? AND provider = ?');
|
||||
const result = stmt.get(uid, provider) || null;
|
||||
|
||||
if (result && result.api_key) {
|
||||
// Decrypt API key if it exists
|
||||
result.api_key = encryptionService.decrypt(result.api_key);
|
||||
}
|
||||
// if (result && result.api_key) {
|
||||
// // Decrypt API key if it exists
|
||||
// result.api_key = result.api_key;
|
||||
// }
|
||||
|
||||
|
||||
return result;
|
||||
}
|
||||
@ -22,40 +22,49 @@ function getAllByUid(uid) {
|
||||
// Decrypt API keys for all results
|
||||
return results.map(result => {
|
||||
if (result.api_key) {
|
||||
result.api_key = encryptionService.decrypt(result.api_key);
|
||||
result.api_key = result.api_key;
|
||||
}
|
||||
return result;
|
||||
});
|
||||
}
|
||||
|
||||
function upsert(uid, provider, settings) {
|
||||
// Validate: prevent direct setting of active status
|
||||
if (settings.is_active_llm || settings.is_active_stt) {
|
||||
console.warn('[ProviderSettings] Warning: is_active_llm/is_active_stt should not be set directly. Use setActiveProvider() instead.');
|
||||
}
|
||||
|
||||
const db = sqliteClient.getDb();
|
||||
|
||||
// Encrypt API key if it exists
|
||||
const encryptedSettings = { ...settings };
|
||||
if (encryptedSettings.api_key) {
|
||||
encryptedSettings.api_key = encryptionService.encrypt(encryptedSettings.api_key);
|
||||
}
|
||||
// const encryptedSettings = { ...settings };
|
||||
// if (encryptedSettings.api_key) {
|
||||
// encryptedSettings.api_key = encryptedSettings.api_key;
|
||||
// }
|
||||
|
||||
// Use SQLite's UPSERT syntax (INSERT ... ON CONFLICT ... DO UPDATE)
|
||||
const stmt = db.prepare(`
|
||||
INSERT INTO provider_settings (uid, provider, api_key, selected_llm_model, selected_stt_model, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
INSERT INTO provider_settings (uid, provider, api_key, selected_llm_model, selected_stt_model, is_active_llm, is_active_stt, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(uid, provider) DO UPDATE SET
|
||||
api_key = excluded.api_key,
|
||||
selected_llm_model = excluded.selected_llm_model,
|
||||
selected_stt_model = excluded.selected_stt_model,
|
||||
-- is_active_llm and is_active_stt are NOT updated here
|
||||
-- Use setActiveProvider() to change active status
|
||||
updated_at = excluded.updated_at
|
||||
`);
|
||||
|
||||
const result = stmt.run(
|
||||
uid,
|
||||
provider,
|
||||
encryptedSettings.api_key || null,
|
||||
encryptedSettings.selected_llm_model || null,
|
||||
encryptedSettings.selected_stt_model || null,
|
||||
encryptedSettings.created_at || Date.now(),
|
||||
encryptedSettings.updated_at
|
||||
settings.api_key || null,
|
||||
settings.selected_llm_model || null,
|
||||
settings.selected_stt_model || null,
|
||||
0, // is_active_llm - always 0, use setActiveProvider to activate
|
||||
0, // is_active_stt - always 0, use setActiveProvider to activate
|
||||
settings.created_at || Date.now(),
|
||||
settings.updated_at
|
||||
);
|
||||
|
||||
return { changes: result.changes };
|
||||
@ -75,10 +84,79 @@ function removeAllByUid(uid) {
|
||||
return { changes: result.changes };
|
||||
}
|
||||
|
||||
// Get active provider for a specific type (llm or stt)
|
||||
function getActiveProvider(uid, type) {
|
||||
const db = sqliteClient.getDb();
|
||||
const column = type === 'llm' ? 'is_active_llm' : 'is_active_stt';
|
||||
const stmt = db.prepare(`SELECT * FROM provider_settings WHERE uid = ? AND ${column} = 1`);
|
||||
const result = stmt.get(uid) || null;
|
||||
|
||||
if (result && result.api_key) {
|
||||
result.api_key = result.api_key;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Set active provider for a specific type
|
||||
function setActiveProvider(uid, provider, type) {
|
||||
const db = sqliteClient.getDb();
|
||||
const column = type === 'llm' ? 'is_active_llm' : 'is_active_stt';
|
||||
|
||||
// Start transaction to ensure only one provider is active
|
||||
db.transaction(() => {
|
||||
// First, deactivate all providers for this type
|
||||
const deactivateStmt = db.prepare(`UPDATE provider_settings SET ${column} = 0 WHERE uid = ?`);
|
||||
deactivateStmt.run(uid);
|
||||
|
||||
// Then activate the specified provider
|
||||
if (provider) {
|
||||
const activateStmt = db.prepare(`UPDATE provider_settings SET ${column} = 1 WHERE uid = ? AND provider = ?`);
|
||||
activateStmt.run(uid, provider);
|
||||
}
|
||||
})();
|
||||
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
// Get all active settings (both llm and stt)
|
||||
function getActiveSettings(uid) {
|
||||
const db = sqliteClient.getDb();
|
||||
const stmt = db.prepare(`
|
||||
SELECT * FROM provider_settings
|
||||
WHERE uid = ? AND (is_active_llm = 1 OR is_active_stt = 1)
|
||||
ORDER BY provider
|
||||
`);
|
||||
const results = stmt.all(uid);
|
||||
|
||||
// Decrypt API keys and organize by type
|
||||
const activeSettings = {
|
||||
llm: null,
|
||||
stt: null
|
||||
};
|
||||
|
||||
results.forEach(result => {
|
||||
if (result.api_key) {
|
||||
result.api_key = result.api_key;
|
||||
}
|
||||
if (result.is_active_llm) {
|
||||
activeSettings.llm = result;
|
||||
}
|
||||
if (result.is_active_stt) {
|
||||
activeSettings.stt = result;
|
||||
}
|
||||
});
|
||||
|
||||
return activeSettings;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getByProvider,
|
||||
getAllByUid,
|
||||
upsert,
|
||||
remove,
|
||||
removeAllByUid
|
||||
removeAllByUid,
|
||||
getActiveProvider,
|
||||
setActiveProvider,
|
||||
getActiveSettings
|
||||
};
|
@ -1,55 +0,0 @@
|
||||
const { collection, doc, getDoc, setDoc, deleteDoc } = require('firebase/firestore');
|
||||
const { getFirestoreInstance: getFirestore } = require('../../services/firebaseClient');
|
||||
const { createEncryptedConverter } = require('../firestoreConverter');
|
||||
|
||||
// Create encrypted converter for user model selections
|
||||
const userModelSelectionsConverter = createEncryptedConverter([
|
||||
'selected_llm_provider',
|
||||
'selected_llm_model',
|
||||
'selected_stt_provider',
|
||||
'selected_stt_model'
|
||||
]);
|
||||
|
||||
function userModelSelectionsCol() {
|
||||
const db = getFirestore();
|
||||
return collection(db, 'user_model_selections').withConverter(userModelSelectionsConverter);
|
||||
}
|
||||
|
||||
async function get(uid) {
|
||||
try {
|
||||
const docRef = doc(userModelSelectionsCol(), uid);
|
||||
const docSnap = await getDoc(docRef);
|
||||
return docSnap.exists() ? { id: docSnap.id, ...docSnap.data() } : null;
|
||||
} catch (error) {
|
||||
console.error('[UserModelSelections Firebase] Error getting user model selections:', error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function upsert(uid, selections) {
|
||||
try {
|
||||
const docRef = doc(userModelSelectionsCol(), uid);
|
||||
await setDoc(docRef, selections, { merge: true });
|
||||
return { changes: 1 };
|
||||
} catch (error) {
|
||||
console.error('[UserModelSelections Firebase] Error upserting user model selections:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async function remove(uid) {
|
||||
try {
|
||||
const docRef = doc(userModelSelectionsCol(), uid);
|
||||
await deleteDoc(docRef);
|
||||
return { changes: 1 };
|
||||
} catch (error) {
|
||||
console.error('[UserModelSelections Firebase] Error removing user model selections:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
get,
|
||||
upsert,
|
||||
remove
|
||||
};
|
@ -1,50 +0,0 @@
|
||||
const firebaseRepository = require('./firebase.repository');
|
||||
const sqliteRepository = require('./sqlite.repository');
|
||||
|
||||
let authService = null;
|
||||
|
||||
function setAuthService(service) {
|
||||
authService = service;
|
||||
}
|
||||
|
||||
function getBaseRepository() {
|
||||
if (!authService) {
|
||||
throw new Error('AuthService not set for userModelSelections repository');
|
||||
}
|
||||
|
||||
const user = authService.getCurrentUser();
|
||||
return user.isLoggedIn ? firebaseRepository : sqliteRepository;
|
||||
}
|
||||
|
||||
const userModelSelectionsRepositoryAdapter = {
|
||||
async get() {
|
||||
const repo = getBaseRepository();
|
||||
const uid = authService.getCurrentUserId();
|
||||
return await repo.get(uid);
|
||||
},
|
||||
|
||||
async upsert(selections) {
|
||||
const repo = getBaseRepository();
|
||||
const uid = authService.getCurrentUserId();
|
||||
const now = Date.now();
|
||||
|
||||
const selectionsWithMeta = {
|
||||
...selections,
|
||||
uid,
|
||||
updated_at: now
|
||||
};
|
||||
|
||||
return await repo.upsert(uid, selectionsWithMeta);
|
||||
},
|
||||
|
||||
async remove() {
|
||||
const repo = getBaseRepository();
|
||||
const uid = authService.getCurrentUserId();
|
||||
return await repo.remove(uid);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
...userModelSelectionsRepositoryAdapter,
|
||||
setAuthService
|
||||
};
|
@ -1,48 +0,0 @@
|
||||
const sqliteClient = require('../../services/sqliteClient');
|
||||
|
||||
function get(uid) {
|
||||
const db = sqliteClient.getDb();
|
||||
const stmt = db.prepare('SELECT * FROM user_model_selections WHERE uid = ?');
|
||||
return stmt.get(uid) || null;
|
||||
}
|
||||
|
||||
function upsert(uid, selections) {
|
||||
const db = sqliteClient.getDb();
|
||||
|
||||
// Use SQLite's UPSERT syntax (INSERT ... ON CONFLICT ... DO UPDATE)
|
||||
const stmt = db.prepare(`
|
||||
INSERT INTO user_model_selections (uid, selected_llm_provider, selected_llm_model,
|
||||
selected_stt_provider, selected_stt_model, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(uid) DO UPDATE SET
|
||||
selected_llm_provider = excluded.selected_llm_provider,
|
||||
selected_llm_model = excluded.selected_llm_model,
|
||||
selected_stt_provider = excluded.selected_stt_provider,
|
||||
selected_stt_model = excluded.selected_stt_model,
|
||||
updated_at = excluded.updated_at
|
||||
`);
|
||||
|
||||
const result = stmt.run(
|
||||
uid,
|
||||
selections.selected_llm_provider || null,
|
||||
selections.selected_llm_model || null,
|
||||
selections.selected_stt_provider || null,
|
||||
selections.selected_stt_model || null,
|
||||
selections.updated_at
|
||||
);
|
||||
|
||||
return { changes: result.changes };
|
||||
}
|
||||
|
||||
function remove(uid) {
|
||||
const db = sqliteClient.getDb();
|
||||
const stmt = db.prepare('DELETE FROM user_model_selections WHERE uid = ?');
|
||||
const result = stmt.run(uid);
|
||||
return { changes: result.changes };
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
get,
|
||||
upsert,
|
||||
remove
|
||||
};
|
@ -6,7 +6,6 @@ const encryptionService = require('./encryptionService');
|
||||
const migrationService = require('./migrationService');
|
||||
const sessionRepository = require('../repositories/session');
|
||||
const providerSettingsRepository = require('../repositories/providerSettings');
|
||||
const userModelSelectionsRepository = require('../repositories/userModelSelections');
|
||||
|
||||
async function getVirtualKeyByEmail(email, idToken) {
|
||||
if (!idToken) {
|
||||
@ -48,7 +47,6 @@ class AuthService {
|
||||
|
||||
sessionRepository.setAuthService(this);
|
||||
providerSettingsRepository.setAuthService(this);
|
||||
userModelSelectionsRepository.setAuthService(this);
|
||||
}
|
||||
|
||||
initialize() {
|
||||
|
@ -1,308 +0,0 @@
|
||||
const { exec } = require('child_process');
|
||||
const { promisify } = require('util');
|
||||
const { EventEmitter } = require('events');
|
||||
const { BrowserWindow } = require('electron');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const https = require('https');
|
||||
const fs = require('fs');
|
||||
const crypto = require('crypto');
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
class LocalAIServiceBase extends EventEmitter {
|
||||
constructor(serviceName) {
|
||||
super();
|
||||
this.serviceName = serviceName;
|
||||
this.baseUrl = null;
|
||||
this.installationProgress = new Map();
|
||||
}
|
||||
|
||||
// 모든 윈도우에 이벤트 브로드캐스트
|
||||
_broadcastToAllWindows(eventName, data = null) {
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
if (data !== null) {
|
||||
win.webContents.send(eventName, data);
|
||||
} else {
|
||||
win.webContents.send(eventName);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getPlatform() {
|
||||
return process.platform;
|
||||
}
|
||||
|
||||
async checkCommand(command) {
|
||||
try {
|
||||
const platform = this.getPlatform();
|
||||
const checkCmd = platform === 'win32' ? 'where' : 'which';
|
||||
const { stdout } = await execAsync(`${checkCmd} ${command}`);
|
||||
return stdout.trim();
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async isInstalled() {
|
||||
throw new Error('isInstalled() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async isServiceRunning() {
|
||||
throw new Error('isServiceRunning() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async startService() {
|
||||
throw new Error('startService() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async stopService() {
|
||||
throw new Error('stopService() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async waitForService(checkFn, maxAttempts = 30, delayMs = 1000) {
|
||||
for (let i = 0; i < maxAttempts; i++) {
|
||||
if (await checkFn()) {
|
||||
console.log(`[${this.serviceName}] Service is ready`);
|
||||
return true;
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, delayMs));
|
||||
}
|
||||
throw new Error(`${this.serviceName} service failed to start within timeout`);
|
||||
}
|
||||
|
||||
getInstallProgress(modelName) {
|
||||
return this.installationProgress.get(modelName) || 0;
|
||||
}
|
||||
|
||||
setInstallProgress(modelName, progress) {
|
||||
this.installationProgress.set(modelName, progress);
|
||||
// 각 서비스에서 직접 브로드캐스트하도록 변경
|
||||
}
|
||||
|
||||
clearInstallProgress(modelName) {
|
||||
this.installationProgress.delete(modelName);
|
||||
}
|
||||
|
||||
async autoInstall(onProgress) {
|
||||
const platform = this.getPlatform();
|
||||
console.log(`[${this.serviceName}] Starting auto-installation for ${platform}`);
|
||||
|
||||
try {
|
||||
switch(platform) {
|
||||
case 'darwin':
|
||||
return await this.installMacOS(onProgress);
|
||||
case 'win32':
|
||||
return await this.installWindows(onProgress);
|
||||
case 'linux':
|
||||
return await this.installLinux();
|
||||
default:
|
||||
throw new Error(`Unsupported platform: ${platform}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[${this.serviceName}] Auto-installation failed:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async installMacOS() {
|
||||
throw new Error('installMacOS() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async installWindows() {
|
||||
throw new Error('installWindows() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async installLinux() {
|
||||
throw new Error('installLinux() must be implemented by subclass');
|
||||
}
|
||||
|
||||
// parseProgress method removed - using proper REST API now
|
||||
|
||||
async shutdown(force = false) {
|
||||
console.log(`[${this.serviceName}] Starting ${force ? 'forced' : 'graceful'} shutdown...`);
|
||||
|
||||
const isRunning = await this.isServiceRunning();
|
||||
if (!isRunning) {
|
||||
console.log(`[${this.serviceName}] Service not running, nothing to shutdown`);
|
||||
return true;
|
||||
}
|
||||
|
||||
const platform = this.getPlatform();
|
||||
|
||||
try {
|
||||
switch(platform) {
|
||||
case 'darwin':
|
||||
return await this.shutdownMacOS(force);
|
||||
case 'win32':
|
||||
return await this.shutdownWindows(force);
|
||||
case 'linux':
|
||||
return await this.shutdownLinux(force);
|
||||
default:
|
||||
console.warn(`[${this.serviceName}] Unsupported platform for shutdown: ${platform}`);
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[${this.serviceName}] Error during shutdown:`, error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async shutdownMacOS(force) {
|
||||
throw new Error('shutdownMacOS() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async shutdownWindows(force) {
|
||||
throw new Error('shutdownWindows() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async shutdownLinux(force) {
|
||||
throw new Error('shutdownLinux() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async downloadFile(url, destination, options = {}) {
|
||||
const {
|
||||
onProgress = null,
|
||||
headers = { 'User-Agent': 'Glass-App' },
|
||||
timeout = 300000, // 5 minutes default
|
||||
modelId = null // 모델 ID를 위한 추가 옵션
|
||||
} = options;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const file = fs.createWriteStream(destination);
|
||||
let downloadedSize = 0;
|
||||
let totalSize = 0;
|
||||
|
||||
const request = https.get(url, { headers }, (response) => {
|
||||
// Handle redirects (301, 302, 307, 308)
|
||||
if ([301, 302, 307, 308].includes(response.statusCode)) {
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
|
||||
if (!response.headers.location) {
|
||||
reject(new Error('Redirect without location header'));
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[${this.serviceName}] Following redirect from ${url} to ${response.headers.location}`);
|
||||
this.downloadFile(response.headers.location, destination, options)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
return;
|
||||
}
|
||||
|
||||
if (response.statusCode !== 200) {
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
reject(new Error(`Download failed: ${response.statusCode} ${response.statusMessage}`));
|
||||
return;
|
||||
}
|
||||
|
||||
totalSize = parseInt(response.headers['content-length'], 10) || 0;
|
||||
|
||||
response.on('data', (chunk) => {
|
||||
downloadedSize += chunk.length;
|
||||
|
||||
if (totalSize > 0) {
|
||||
const progress = Math.round((downloadedSize / totalSize) * 100);
|
||||
|
||||
// 이벤트 기반 진행률 보고는 각 서비스에서 직접 처리
|
||||
|
||||
// 기존 콜백 지원 (호환성 유지)
|
||||
if (onProgress) {
|
||||
onProgress(progress, downloadedSize, totalSize);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
response.pipe(file);
|
||||
|
||||
file.on('finish', () => {
|
||||
file.close(() => {
|
||||
// download-complete 이벤트는 각 서비스에서 직접 처리
|
||||
resolve({ success: true, size: downloadedSize });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
request.on('timeout', () => {
|
||||
request.destroy();
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
reject(new Error('Download timeout'));
|
||||
});
|
||||
|
||||
request.on('error', (err) => {
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
this.emit('download-error', { url, error: err, modelId });
|
||||
reject(err);
|
||||
});
|
||||
|
||||
request.setTimeout(timeout);
|
||||
|
||||
file.on('error', (err) => {
|
||||
fs.unlink(destination, () => {});
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async downloadWithRetry(url, destination, options = {}) {
|
||||
const {
|
||||
maxRetries = 3,
|
||||
retryDelay = 1000,
|
||||
expectedChecksum = null,
|
||||
modelId = null, // 모델 ID를 위한 추가 옵션
|
||||
...downloadOptions
|
||||
} = options;
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
const result = await this.downloadFile(url, destination, {
|
||||
...downloadOptions,
|
||||
modelId
|
||||
});
|
||||
|
||||
if (expectedChecksum) {
|
||||
const isValid = await this.verifyChecksum(destination, expectedChecksum);
|
||||
if (!isValid) {
|
||||
fs.unlinkSync(destination);
|
||||
throw new Error('Checksum verification failed');
|
||||
}
|
||||
console.log(`[${this.serviceName}] Checksum verified successfully`);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (attempt === maxRetries) {
|
||||
// download-error 이벤트는 각 서비스에서 직접 처리
|
||||
throw error;
|
||||
}
|
||||
|
||||
console.log(`Download attempt ${attempt} failed, retrying in ${retryDelay}ms...`);
|
||||
await new Promise(resolve => setTimeout(resolve, retryDelay * attempt));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async verifyChecksum(filePath, expectedChecksum) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
const stream = fs.createReadStream(filePath);
|
||||
|
||||
stream.on('data', (data) => hash.update(data));
|
||||
stream.on('end', () => {
|
||||
const fileChecksum = hash.digest('hex');
|
||||
console.log(`[${this.serviceName}] File checksum: ${fileChecksum}`);
|
||||
console.log(`[${this.serviceName}] Expected checksum: ${expectedChecksum}`);
|
||||
resolve(fileChecksum === expectedChecksum);
|
||||
});
|
||||
stream.on('error', reject);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LocalAIServiceBase;
|
@ -1,138 +0,0 @@
|
||||
export class LocalProgressTracker {
|
||||
constructor(serviceName) {
|
||||
this.serviceName = serviceName;
|
||||
this.activeOperations = new Map(); // operationId -> { controller, onProgress }
|
||||
|
||||
// Check if we're in renderer process with window.api available
|
||||
if (!window.api) {
|
||||
throw new Error(`${serviceName} requires Electron environment with contextBridge`);
|
||||
}
|
||||
|
||||
this.globalProgressHandler = (event, data) => {
|
||||
const operation = this.activeOperations.get(data.model || data.modelId);
|
||||
if (operation && !operation.controller.signal.aborted) {
|
||||
operation.onProgress(data.progress);
|
||||
}
|
||||
};
|
||||
|
||||
// Set up progress listeners based on service name
|
||||
if (serviceName.toLowerCase() === 'ollama') {
|
||||
window.api.settingsView.onOllamaPullProgress(this.globalProgressHandler);
|
||||
} else if (serviceName.toLowerCase() === 'whisper') {
|
||||
window.api.settingsView.onWhisperDownloadProgress(this.globalProgressHandler);
|
||||
}
|
||||
|
||||
this.progressEvent = serviceName.toLowerCase();
|
||||
}
|
||||
|
||||
async trackOperation(operationId, operationType, onProgress) {
|
||||
if (this.activeOperations.has(operationId)) {
|
||||
throw new Error(`${operationType} ${operationId} is already in progress`);
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
const operation = { controller, onProgress };
|
||||
this.activeOperations.set(operationId, operation);
|
||||
|
||||
try {
|
||||
let result;
|
||||
|
||||
// Use appropriate API call based on service and operation
|
||||
if (this.serviceName.toLowerCase() === 'ollama' && operationType === 'install') {
|
||||
result = await window.api.settingsView.pullOllamaModel(operationId);
|
||||
} else if (this.serviceName.toLowerCase() === 'whisper' && operationType === 'download') {
|
||||
result = await window.api.settingsView.downloadWhisperModel(operationId);
|
||||
} else {
|
||||
throw new Error(`Unsupported operation: ${this.serviceName}:${operationType}`);
|
||||
}
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || `${operationType} failed`);
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (!controller.signal.aborted) {
|
||||
throw error;
|
||||
}
|
||||
return false;
|
||||
} finally {
|
||||
this.activeOperations.delete(operationId);
|
||||
}
|
||||
}
|
||||
|
||||
async installModel(modelName, onProgress) {
|
||||
return this.trackOperation(modelName, 'install', onProgress);
|
||||
}
|
||||
|
||||
async downloadModel(modelId, onProgress) {
|
||||
return this.trackOperation(modelId, 'download', onProgress);
|
||||
}
|
||||
|
||||
cancelOperation(operationId) {
|
||||
const operation = this.activeOperations.get(operationId);
|
||||
if (operation) {
|
||||
operation.controller.abort();
|
||||
this.activeOperations.delete(operationId);
|
||||
}
|
||||
}
|
||||
|
||||
cancelAllOperations() {
|
||||
for (const [operationId, operation] of this.activeOperations) {
|
||||
operation.controller.abort();
|
||||
}
|
||||
this.activeOperations.clear();
|
||||
}
|
||||
|
||||
isOperationActive(operationId) {
|
||||
return this.activeOperations.has(operationId);
|
||||
}
|
||||
|
||||
getActiveOperations() {
|
||||
return Array.from(this.activeOperations.keys());
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.cancelAllOperations();
|
||||
|
||||
// Remove progress listeners based on service name
|
||||
if (this.progressEvent === 'ollama') {
|
||||
window.api.settingsView.removeOnOllamaPullProgress(this.globalProgressHandler);
|
||||
} else if (this.progressEvent === 'whisper') {
|
||||
window.api.settingsView.removeOnWhisperDownloadProgress(this.globalProgressHandler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let trackers = new Map();
|
||||
|
||||
export function getLocalProgressTracker(serviceName) {
|
||||
if (!trackers.has(serviceName)) {
|
||||
trackers.set(serviceName, new LocalProgressTracker(serviceName));
|
||||
}
|
||||
return trackers.get(serviceName);
|
||||
}
|
||||
|
||||
export function destroyLocalProgressTracker(serviceName) {
|
||||
const tracker = trackers.get(serviceName);
|
||||
if (tracker) {
|
||||
tracker.destroy();
|
||||
trackers.delete(serviceName);
|
||||
}
|
||||
}
|
||||
|
||||
export function destroyAllProgressTrackers() {
|
||||
for (const [name, tracker] of trackers) {
|
||||
tracker.destroy();
|
||||
}
|
||||
trackers.clear();
|
||||
}
|
||||
|
||||
// Legacy compatibility exports
|
||||
export function getOllamaProgressTracker() {
|
||||
return getLocalProgressTracker('ollama');
|
||||
}
|
||||
|
||||
export function destroyOllamaProgressTracker() {
|
||||
destroyLocalProgressTracker('ollama');
|
||||
}
|
@ -1,11 +1,9 @@
|
||||
const Store = require('electron-store');
|
||||
const fetch = require('node-fetch');
|
||||
const { EventEmitter } = require('events');
|
||||
const { BrowserWindow } = require('electron');
|
||||
const { PROVIDERS, getProviderClass } = require('../ai/factory');
|
||||
const encryptionService = require('./encryptionService');
|
||||
const providerSettingsRepository = require('../repositories/providerSettings');
|
||||
const userModelSelectionsRepository = require('../repositories/userModelSelections');
|
||||
|
||||
// Import authService directly (singleton)
|
||||
const authService = require('./authService');
|
||||
@ -19,25 +17,54 @@ class ModelStateService extends EventEmitter {
|
||||
this.hasMigrated = false;
|
||||
}
|
||||
|
||||
// 모든 윈도우에 이벤트 브로드캐스트
|
||||
_broadcastToAllWindows(eventName, data = null) {
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
if (data !== null) {
|
||||
win.webContents.send(eventName, data);
|
||||
} else {
|
||||
win.webContents.send(eventName);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
console.log('[ModelStateService] Initializing...');
|
||||
await this._loadStateForCurrentUser();
|
||||
|
||||
// LocalAI 상태 변경 이벤트 구독
|
||||
this.setupLocalAIStateSync();
|
||||
|
||||
console.log('[ModelStateService] Initialization complete');
|
||||
}
|
||||
|
||||
setupLocalAIStateSync() {
|
||||
// LocalAI 서비스 상태 변경 감지
|
||||
// LocalAIManager에서 직접 이벤트를 받아 처리
|
||||
const localAIManager = require('./localAIManager');
|
||||
localAIManager.on('state-changed', (service, status) => {
|
||||
this.handleLocalAIStateChange(service, status);
|
||||
});
|
||||
}
|
||||
|
||||
handleLocalAIStateChange(service, state) {
|
||||
console.log(`[ModelStateService] LocalAI state changed: ${service}`, state);
|
||||
|
||||
// Ollama의 경우 로드된 모델 정보도 처리
|
||||
if (service === 'ollama' && state.loadedModels) {
|
||||
console.log(`[ModelStateService] Ollama loaded models: ${state.loadedModels.join(', ')}`);
|
||||
|
||||
// 선택된 모델이 메모리에서 언로드되었는지 확인
|
||||
const selectedLLM = this.state.selectedModels.llm;
|
||||
if (selectedLLM && this.getProviderForModel('llm', selectedLLM) === 'ollama') {
|
||||
if (!state.loadedModels.includes(selectedLLM)) {
|
||||
console.log(`[ModelStateService] Selected model ${selectedLLM} is not loaded in memory`);
|
||||
// 필요시 자동 워밍업 트리거
|
||||
this._triggerAutoWarmUp();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 자동 선택 재실행 (필요시)
|
||||
if (!state.installed || !state.running) {
|
||||
const types = service === 'ollama' ? ['llm'] : service === 'whisper' ? ['stt'] : [];
|
||||
this._autoSelectAvailableModels(types);
|
||||
}
|
||||
|
||||
// UI 업데이트 알림
|
||||
this.emit('state-updated', this.state);
|
||||
}
|
||||
|
||||
_logCurrentSelection() {
|
||||
const llmModel = this.state.selectedModels.llm;
|
||||
const sttModel = this.state.selectedModels.stt;
|
||||
@ -86,6 +113,66 @@ class ModelStateService extends EventEmitter {
|
||||
});
|
||||
}
|
||||
|
||||
async _migrateUserModelSelections() {
|
||||
console.log('[ModelStateService] Checking for user_model_selections migration...');
|
||||
const userId = this.authService.getCurrentUserId();
|
||||
|
||||
try {
|
||||
// Check if user_model_selections table exists
|
||||
const sqliteClient = require('./sqliteClient');
|
||||
const db = sqliteClient.getDb();
|
||||
|
||||
const tableExists = db.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name='user_model_selections'").get();
|
||||
|
||||
if (!tableExists) {
|
||||
console.log('[ModelStateService] user_model_selections table does not exist, skipping migration');
|
||||
return;
|
||||
}
|
||||
|
||||
// Get existing user_model_selections data
|
||||
const selections = db.prepare('SELECT * FROM user_model_selections WHERE uid = ?').get(userId);
|
||||
|
||||
if (!selections) {
|
||||
console.log('[ModelStateService] No user_model_selections data to migrate');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('[ModelStateService] Found user_model_selections data, migrating to provider_settings...');
|
||||
|
||||
// Migrate LLM selection
|
||||
if (selections.llm_model) {
|
||||
const llmProvider = this.getProviderForModel('llm', selections.llm_model);
|
||||
if (llmProvider) {
|
||||
await providerSettingsRepository.upsert(llmProvider, {
|
||||
selected_llm_model: selections.llm_model
|
||||
});
|
||||
await providerSettingsRepository.setActiveProvider(llmProvider, 'llm');
|
||||
console.log(`[ModelStateService] Migrated LLM: ${selections.llm_model} (provider: ${llmProvider})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Migrate STT selection
|
||||
if (selections.stt_model) {
|
||||
const sttProvider = this.getProviderForModel('stt', selections.stt_model);
|
||||
if (sttProvider) {
|
||||
await providerSettingsRepository.upsert(sttProvider, {
|
||||
selected_stt_model: selections.stt_model
|
||||
});
|
||||
await providerSettingsRepository.setActiveProvider(sttProvider, 'stt');
|
||||
console.log(`[ModelStateService] Migrated STT: ${selections.stt_model} (provider: ${sttProvider})`);
|
||||
}
|
||||
}
|
||||
|
||||
// Delete the migrated data from user_model_selections
|
||||
db.prepare('DELETE FROM user_model_selections WHERE uid = ?').run(userId);
|
||||
console.log('[ModelStateService] user_model_selections migration completed');
|
||||
|
||||
} catch (error) {
|
||||
console.error('[ModelStateService] user_model_selections migration failed:', error);
|
||||
// Don't throw - continue with normal operation
|
||||
}
|
||||
}
|
||||
|
||||
async _migrateFromElectronStore() {
|
||||
console.log('[ModelStateService] Starting migration from electron-store to database...');
|
||||
const userId = this.authService.getCurrentUserId();
|
||||
@ -115,17 +202,26 @@ class ModelStateService extends EventEmitter {
|
||||
}
|
||||
|
||||
// Migrate global model selections
|
||||
if (selectedModels.llm || selectedModels.stt) {
|
||||
const llmProvider = selectedModels.llm ? this.getProviderForModel('llm', selectedModels.llm) : null;
|
||||
const sttProvider = selectedModels.stt ? this.getProviderForModel('stt', selectedModels.stt) : null;
|
||||
|
||||
await userModelSelectionsRepository.upsert({
|
||||
selected_llm_provider: llmProvider,
|
||||
selected_llm_model: selectedModels.llm,
|
||||
selected_stt_provider: sttProvider,
|
||||
selected_stt_model: selectedModels.stt
|
||||
});
|
||||
console.log('[ModelStateService] Migrated global model selections');
|
||||
if (selectedModels.llm) {
|
||||
const llmProvider = this.getProviderForModel('llm', selectedModels.llm);
|
||||
if (llmProvider) {
|
||||
await providerSettingsRepository.upsert(llmProvider, {
|
||||
selected_llm_model: selectedModels.llm
|
||||
});
|
||||
await providerSettingsRepository.setActiveProvider(llmProvider, 'llm');
|
||||
console.log(`[ModelStateService] Migrated LLM model selection: ${selectedModels.llm}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (selectedModels.stt) {
|
||||
const sttProvider = this.getProviderForModel('stt', selectedModels.stt);
|
||||
if (sttProvider) {
|
||||
await providerSettingsRepository.upsert(sttProvider, {
|
||||
selected_stt_model: selectedModels.stt
|
||||
});
|
||||
await providerSettingsRepository.setActiveProvider(sttProvider, 'stt');
|
||||
console.log(`[ModelStateService] Migrated STT model selection: ${selectedModels.stt}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Mark migration as complete by removing legacy data
|
||||
@ -159,11 +255,11 @@ class ModelStateService extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
// Load global model selections
|
||||
const modelSelections = await userModelSelectionsRepository.get();
|
||||
// Load active model selections from provider settings
|
||||
const activeSettings = await providerSettingsRepository.getActiveSettings();
|
||||
const selectedModels = {
|
||||
llm: modelSelections?.selected_llm_model || null,
|
||||
stt: modelSelections?.selected_stt_model || null
|
||||
llm: activeSettings.llm?.selected_llm_model || null,
|
||||
stt: activeSettings.stt?.selected_stt_model || null
|
||||
};
|
||||
|
||||
this.state = {
|
||||
@ -197,6 +293,9 @@ class ModelStateService extends EventEmitter {
|
||||
// Initialize encryption service for current user
|
||||
await encryptionService.initializeKey(userId);
|
||||
|
||||
// Check for user_model_selections migration first
|
||||
await this._migrateUserModelSelections();
|
||||
|
||||
// Try to load from database first
|
||||
await this._loadStateFromDatabase();
|
||||
|
||||
@ -232,17 +331,38 @@ class ModelStateService extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
// Save global model selections
|
||||
const llmProvider = this.state.selectedModels.llm ? this.getProviderForModel('llm', this.state.selectedModels.llm) : null;
|
||||
const sttProvider = this.state.selectedModels.stt ? this.getProviderForModel('stt', this.state.selectedModels.stt) : null;
|
||||
// Save model selections and update active providers
|
||||
const llmModel = this.state.selectedModels.llm;
|
||||
const sttModel = this.state.selectedModels.stt;
|
||||
|
||||
if (llmProvider || sttProvider || this.state.selectedModels.llm || this.state.selectedModels.stt) {
|
||||
await userModelSelectionsRepository.upsert({
|
||||
selected_llm_provider: llmProvider,
|
||||
selected_llm_model: this.state.selectedModels.llm,
|
||||
selected_stt_provider: sttProvider,
|
||||
selected_stt_model: this.state.selectedModels.stt
|
||||
});
|
||||
if (llmModel) {
|
||||
const llmProvider = this.getProviderForModel('llm', llmModel);
|
||||
if (llmProvider) {
|
||||
// Update the provider's selected model
|
||||
await providerSettingsRepository.upsert(llmProvider, {
|
||||
selected_llm_model: llmModel
|
||||
});
|
||||
// Set as active LLM provider
|
||||
await providerSettingsRepository.setActiveProvider(llmProvider, 'llm');
|
||||
}
|
||||
} else {
|
||||
// Deactivate all LLM providers if no model selected
|
||||
await providerSettingsRepository.setActiveProvider(null, 'llm');
|
||||
}
|
||||
|
||||
if (sttModel) {
|
||||
const sttProvider = this.getProviderForModel('stt', sttModel);
|
||||
if (sttProvider) {
|
||||
// Update the provider's selected model
|
||||
await providerSettingsRepository.upsert(sttProvider, {
|
||||
selected_stt_model: sttModel
|
||||
});
|
||||
// Set as active STT provider
|
||||
await providerSettingsRepository.setActiveProvider(sttProvider, 'stt');
|
||||
}
|
||||
} else {
|
||||
// Deactivate all STT providers if no model selected
|
||||
await providerSettingsRepository.setActiveProvider(null, 'stt');
|
||||
}
|
||||
|
||||
console.log(`[ModelStateService] State saved to database for user: ${userId}`);
|
||||
@ -344,8 +464,8 @@ class ModelStateService extends EventEmitter {
|
||||
|
||||
this._autoSelectAvailableModels([]);
|
||||
|
||||
this._broadcastToAllWindows('model-state:updated', this.state);
|
||||
this._broadcastToAllWindows('settings-updated');
|
||||
this.emit('state-updated', this.state);
|
||||
this.emit('settings-updated');
|
||||
}
|
||||
|
||||
getApiKey(provider) {
|
||||
@ -363,8 +483,8 @@ class ModelStateService extends EventEmitter {
|
||||
await providerSettingsRepository.remove(provider);
|
||||
await this._saveState();
|
||||
this._autoSelectAvailableModels([]);
|
||||
this._broadcastToAllWindows('model-state:updated', this.state);
|
||||
this._broadcastToAllWindows('settings-updated');
|
||||
this.emit('state-updated', this.state);
|
||||
this.emit('settings-updated');
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
@ -506,12 +626,21 @@ class ModelStateService extends EventEmitter {
|
||||
if (type === 'llm' && modelId && modelId !== previousModelId) {
|
||||
const provider = this.getProviderForModel('llm', modelId);
|
||||
if (provider === 'ollama') {
|
||||
this._autoWarmUpOllamaModel(modelId, previousModelId);
|
||||
const localAIManager = require('./localAIManager');
|
||||
if (localAIManager) {
|
||||
console.log('[ModelStateService] Triggering Ollama model warm-up via LocalAIManager');
|
||||
localAIManager.warmUpModel(modelId).catch(error => {
|
||||
console.warn('[ModelStateService] Model warm-up failed:', error);
|
||||
});
|
||||
} else {
|
||||
// fallback to old method
|
||||
this._autoWarmUpOllamaModel(modelId, previousModelId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this._broadcastToAllWindows('model-state:updated', this.state);
|
||||
this._broadcastToAllWindows('settings-updated');
|
||||
this.emit('state-updated', this.state);
|
||||
this.emit('settings-updated');
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -578,7 +707,7 @@ class ModelStateService extends EventEmitter {
|
||||
if (success) {
|
||||
const selectedModels = this.getSelectedModels();
|
||||
if (!selectedModels.llm || !selectedModels.stt) {
|
||||
this._broadcastToAllWindows('force-show-apikey-header');
|
||||
this.emit('force-show-apikey-header');
|
||||
}
|
||||
}
|
||||
return success;
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,21 +1,40 @@
|
||||
const { spawn } = require('child_process');
|
||||
const { EventEmitter } = require('events');
|
||||
const { spawn, exec } = require('child_process');
|
||||
const { promisify } = require('util');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const { BrowserWindow } = require('electron');
|
||||
const LocalAIServiceBase = require('./localAIServiceBase');
|
||||
const https = require('https');
|
||||
const crypto = require('crypto');
|
||||
const { spawnAsync } = require('../utils/spawnHelper');
|
||||
const { DOWNLOAD_CHECKSUMS } = require('../config/checksums');
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
const fsPromises = fs.promises;
|
||||
|
||||
class WhisperService extends LocalAIServiceBase {
|
||||
class WhisperService extends EventEmitter {
|
||||
constructor() {
|
||||
super('WhisperService');
|
||||
this.isInitialized = false;
|
||||
super();
|
||||
this.serviceName = 'WhisperService';
|
||||
|
||||
// 경로 및 디렉토리
|
||||
this.whisperPath = null;
|
||||
this.modelsDir = null;
|
||||
this.tempDir = null;
|
||||
|
||||
// 세션 관리 (세션 풀 내장)
|
||||
this.sessionPool = [];
|
||||
this.activeSessions = new Map();
|
||||
this.maxSessions = 3;
|
||||
|
||||
// 설치 상태
|
||||
this.installState = {
|
||||
isInstalled: false,
|
||||
isInitialized: false
|
||||
};
|
||||
|
||||
// 사용 가능한 모델
|
||||
this.availableModels = {
|
||||
'whisper-tiny': {
|
||||
name: 'Tiny',
|
||||
@ -40,21 +59,222 @@ class WhisperService extends LocalAIServiceBase {
|
||||
};
|
||||
}
|
||||
|
||||
// 모든 윈도우에 이벤트 브로드캐스트
|
||||
_broadcastToAllWindows(eventName, data = null) {
|
||||
BrowserWindow.getAllWindows().forEach(win => {
|
||||
if (win && !win.isDestroyed()) {
|
||||
if (data !== null) {
|
||||
win.webContents.send(eventName, data);
|
||||
} else {
|
||||
win.webContents.send(eventName);
|
||||
}
|
||||
|
||||
// Base class methods integration
|
||||
getPlatform() {
|
||||
return process.platform;
|
||||
}
|
||||
|
||||
async checkCommand(command) {
|
||||
try {
|
||||
const platform = this.getPlatform();
|
||||
const checkCmd = platform === 'win32' ? 'where' : 'which';
|
||||
const { stdout } = await execAsync(`${checkCmd} ${command}`);
|
||||
return stdout.trim();
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async waitForService(checkFn, maxAttempts = 30, delayMs = 1000) {
|
||||
for (let i = 0; i < maxAttempts; i++) {
|
||||
if (await checkFn()) {
|
||||
console.log(`[${this.serviceName}] Service is ready`);
|
||||
return true;
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, delayMs));
|
||||
}
|
||||
throw new Error(`${this.serviceName} service failed to start within timeout`);
|
||||
}
|
||||
|
||||
async downloadFile(url, destination, options = {}) {
|
||||
const {
|
||||
onProgress = null,
|
||||
headers = { 'User-Agent': 'Glass-App' },
|
||||
timeout = 300000,
|
||||
modelId = null
|
||||
} = options;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const file = fs.createWriteStream(destination);
|
||||
let downloadedSize = 0;
|
||||
let totalSize = 0;
|
||||
|
||||
const request = https.get(url, { headers }, (response) => {
|
||||
if ([301, 302, 307, 308].includes(response.statusCode)) {
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
|
||||
if (!response.headers.location) {
|
||||
reject(new Error('Redirect without location header'));
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[${this.serviceName}] Following redirect from ${url} to ${response.headers.location}`);
|
||||
this.downloadFile(response.headers.location, destination, options)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
return;
|
||||
}
|
||||
|
||||
if (response.statusCode !== 200) {
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
reject(new Error(`Download failed: ${response.statusCode} ${response.statusMessage}`));
|
||||
return;
|
||||
}
|
||||
|
||||
totalSize = parseInt(response.headers['content-length'], 10) || 0;
|
||||
|
||||
response.on('data', (chunk) => {
|
||||
downloadedSize += chunk.length;
|
||||
|
||||
if (totalSize > 0) {
|
||||
const progress = Math.round((downloadedSize / totalSize) * 100);
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(progress, downloadedSize, totalSize);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
response.pipe(file);
|
||||
|
||||
file.on('finish', () => {
|
||||
file.close(() => {
|
||||
resolve({ success: true, size: downloadedSize });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
request.on('timeout', () => {
|
||||
request.destroy();
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
reject(new Error('Download timeout'));
|
||||
});
|
||||
|
||||
request.on('error', (err) => {
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
this.emit('download-error', { url, error: err, modelId });
|
||||
reject(err);
|
||||
});
|
||||
|
||||
request.setTimeout(timeout);
|
||||
|
||||
file.on('error', (err) => {
|
||||
fs.unlink(destination, () => {});
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async downloadWithRetry(url, destination, options = {}) {
|
||||
const {
|
||||
maxRetries = 3,
|
||||
retryDelay = 1000,
|
||||
expectedChecksum = null,
|
||||
modelId = null,
|
||||
...downloadOptions
|
||||
} = options;
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
const result = await this.downloadFile(url, destination, {
|
||||
...downloadOptions,
|
||||
modelId
|
||||
});
|
||||
|
||||
if (expectedChecksum) {
|
||||
const isValid = await this.verifyChecksum(destination, expectedChecksum);
|
||||
if (!isValid) {
|
||||
fs.unlinkSync(destination);
|
||||
throw new Error('Checksum verification failed');
|
||||
}
|
||||
console.log(`[${this.serviceName}] Checksum verified successfully`);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (attempt === maxRetries) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
console.log(`Download attempt ${attempt} failed, retrying in ${retryDelay}ms...`);
|
||||
await new Promise(resolve => setTimeout(resolve, retryDelay * attempt));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async verifyChecksum(filePath, expectedChecksum) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
const stream = fs.createReadStream(filePath);
|
||||
|
||||
stream.on('data', (data) => hash.update(data));
|
||||
stream.on('end', () => {
|
||||
const fileChecksum = hash.digest('hex');
|
||||
console.log(`[${this.serviceName}] File checksum: ${fileChecksum}`);
|
||||
console.log(`[${this.serviceName}] Expected checksum: ${expectedChecksum}`);
|
||||
resolve(fileChecksum === expectedChecksum);
|
||||
});
|
||||
stream.on('error', reject);
|
||||
});
|
||||
}
|
||||
|
||||
async autoInstall(onProgress) {
|
||||
const platform = this.getPlatform();
|
||||
console.log(`[${this.serviceName}] Starting auto-installation for ${platform}`);
|
||||
|
||||
try {
|
||||
switch(platform) {
|
||||
case 'darwin':
|
||||
return await this.installMacOS(onProgress);
|
||||
case 'win32':
|
||||
return await this.installWindows(onProgress);
|
||||
case 'linux':
|
||||
return await this.installLinux();
|
||||
default:
|
||||
throw new Error(`Unsupported platform: ${platform}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[${this.serviceName}] Auto-installation failed:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async shutdown(force = false) {
|
||||
console.log(`[${this.serviceName}] Starting ${force ? 'forced' : 'graceful'} shutdown...`);
|
||||
|
||||
const isRunning = await this.isServiceRunning();
|
||||
if (!isRunning) {
|
||||
console.log(`[${this.serviceName}] Service not running, nothing to shutdown`);
|
||||
return true;
|
||||
}
|
||||
|
||||
const platform = this.getPlatform();
|
||||
|
||||
try {
|
||||
switch(platform) {
|
||||
case 'darwin':
|
||||
return await this.shutdownMacOS(force);
|
||||
case 'win32':
|
||||
return await this.shutdownWindows(force);
|
||||
case 'linux':
|
||||
return await this.shutdownLinux(force);
|
||||
default:
|
||||
console.warn(`[${this.serviceName}] Unsupported platform for shutdown: ${platform}`);
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[${this.serviceName}] Error during shutdown:`, error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
if (this.isInitialized) return;
|
||||
if (this.installState.isInitialized) return;
|
||||
|
||||
try {
|
||||
const homeDir = os.homedir();
|
||||
@ -71,10 +291,15 @@ class WhisperService extends LocalAIServiceBase {
|
||||
await this.ensureDirectories();
|
||||
await this.ensureWhisperBinary();
|
||||
|
||||
this.isInitialized = true;
|
||||
this.installState.isInitialized = true;
|
||||
console.log('[WhisperService] Initialized successfully');
|
||||
} catch (error) {
|
||||
console.error('[WhisperService] Initialization failed:', error);
|
||||
// Emit error event - LocalAIManager가 처리
|
||||
this.emit('error', {
|
||||
errorType: 'initialization-failed',
|
||||
error: error.message
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@ -85,6 +310,56 @@ class WhisperService extends LocalAIServiceBase {
|
||||
await fsPromises.mkdir(path.dirname(this.whisperPath), { recursive: true });
|
||||
}
|
||||
|
||||
// local stt session
|
||||
async getSession(config) {
|
||||
// check available session
|
||||
const availableSession = this.sessionPool.find(s => !s.inUse);
|
||||
if (availableSession) {
|
||||
availableSession.inUse = true;
|
||||
await availableSession.reconfigure(config);
|
||||
return availableSession;
|
||||
}
|
||||
|
||||
// create new session
|
||||
if (this.activeSessions.size >= this.maxSessions) {
|
||||
throw new Error('Maximum session limit reached');
|
||||
}
|
||||
|
||||
const session = new WhisperSession(config, this);
|
||||
await session.initialize();
|
||||
this.activeSessions.set(session.id, session);
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
async releaseSession(sessionId) {
|
||||
const session = this.activeSessions.get(sessionId);
|
||||
if (session) {
|
||||
await session.cleanup();
|
||||
session.inUse = false;
|
||||
|
||||
// add to session pool
|
||||
if (this.sessionPool.length < 2) {
|
||||
this.sessionPool.push(session);
|
||||
} else {
|
||||
// remove session
|
||||
await session.destroy();
|
||||
this.activeSessions.delete(sessionId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//cleanup
|
||||
async cleanup() {
|
||||
// cleanup all sessions
|
||||
for (const session of this.activeSessions.values()) {
|
||||
await session.destroy();
|
||||
}
|
||||
|
||||
this.activeSessions.clear();
|
||||
this.sessionPool = [];
|
||||
}
|
||||
|
||||
async ensureWhisperBinary() {
|
||||
const whisperCliPath = await this.checkCommand('whisper-cli');
|
||||
if (whisperCliPath) {
|
||||
@ -113,6 +388,11 @@ class WhisperService extends LocalAIServiceBase {
|
||||
console.log('[WhisperService] Whisper not found, trying Homebrew installation...');
|
||||
try {
|
||||
await this.installViaHomebrew();
|
||||
// verify installation
|
||||
const verified = await this.verifyInstallation();
|
||||
if (!verified.success) {
|
||||
throw new Error(verified.error);
|
||||
}
|
||||
return;
|
||||
} catch (error) {
|
||||
console.log('[WhisperService] Homebrew installation failed:', error.message);
|
||||
@ -120,6 +400,12 @@ class WhisperService extends LocalAIServiceBase {
|
||||
}
|
||||
|
||||
await this.autoInstall();
|
||||
|
||||
// verify installation
|
||||
const verified = await this.verifyInstallation();
|
||||
if (!verified.success) {
|
||||
throw new Error(`Whisper installation verification failed: ${verified.error}`);
|
||||
}
|
||||
}
|
||||
|
||||
async installViaHomebrew() {
|
||||
@ -146,7 +432,7 @@ class WhisperService extends LocalAIServiceBase {
|
||||
|
||||
|
||||
async ensureModelAvailable(modelId) {
|
||||
if (!this.isInitialized) {
|
||||
if (!this.installState.isInitialized) {
|
||||
console.log('[WhisperService] Service not initialized, initializing now...');
|
||||
await this.initialize();
|
||||
}
|
||||
@ -171,25 +457,33 @@ class WhisperService extends LocalAIServiceBase {
|
||||
const modelPath = await this.getModelPath(modelId);
|
||||
const checksumInfo = DOWNLOAD_CHECKSUMS.whisper.models[modelId];
|
||||
|
||||
this._broadcastToAllWindows('whisper:download-progress', { modelId, progress: 0 });
|
||||
// Emit progress event - LocalAIManager가 처리
|
||||
this.emit('install-progress', {
|
||||
model: modelId,
|
||||
progress: 0
|
||||
});
|
||||
|
||||
await this.downloadWithRetry(modelInfo.url, modelPath, {
|
||||
expectedChecksum: checksumInfo?.sha256,
|
||||
modelId, // modelId를 전달하여 LocalAIServiceBase에서 이벤트 발생 시 사용
|
||||
modelId, // pass modelId to LocalAIServiceBase for event handling
|
||||
onProgress: (progress) => {
|
||||
this._broadcastToAllWindows('whisper:download-progress', { modelId, progress });
|
||||
// Emit progress event - LocalAIManager가 처리
|
||||
this.emit('install-progress', {
|
||||
model: modelId,
|
||||
progress
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`[WhisperService] Model ${modelId} downloaded successfully`);
|
||||
this._broadcastToAllWindows('whisper:download-complete', { modelId });
|
||||
this.emit('model-download-complete', { modelId });
|
||||
}
|
||||
|
||||
async handleDownloadModel(modelId) {
|
||||
try {
|
||||
console.log(`[WhisperService] Handling download for model: ${modelId}`);
|
||||
|
||||
if (!this.isInitialized) {
|
||||
if (!this.installState.isInitialized) {
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
@ -204,7 +498,7 @@ class WhisperService extends LocalAIServiceBase {
|
||||
|
||||
async handleGetInstalledModels() {
|
||||
try {
|
||||
if (!this.isInitialized) {
|
||||
if (!this.installState.isInitialized) {
|
||||
await this.initialize();
|
||||
}
|
||||
const models = await this.getInstalledModels();
|
||||
@ -216,7 +510,7 @@ class WhisperService extends LocalAIServiceBase {
|
||||
}
|
||||
|
||||
async getModelPath(modelId) {
|
||||
if (!this.isInitialized || !this.modelsDir) {
|
||||
if (!this.installState.isInitialized || !this.modelsDir) {
|
||||
throw new Error('WhisperService is not initialized. Call initialize() first.');
|
||||
}
|
||||
return path.join(this.modelsDir, `${modelId}.bin`);
|
||||
@ -241,7 +535,7 @@ class WhisperService extends LocalAIServiceBase {
|
||||
|
||||
createWavHeader(dataSize) {
|
||||
const header = Buffer.alloc(44);
|
||||
const sampleRate = 24000;
|
||||
const sampleRate = 16000;
|
||||
const numChannels = 1;
|
||||
const bitsPerSample = 16;
|
||||
|
||||
@ -290,7 +584,7 @@ class WhisperService extends LocalAIServiceBase {
|
||||
}
|
||||
|
||||
async getInstalledModels() {
|
||||
if (!this.isInitialized) {
|
||||
if (!this.installState.isInitialized) {
|
||||
console.log('[WhisperService] Service not initialized for getInstalledModels, initializing now...');
|
||||
await this.initialize();
|
||||
}
|
||||
@ -319,11 +613,11 @@ class WhisperService extends LocalAIServiceBase {
|
||||
}
|
||||
|
||||
async isServiceRunning() {
|
||||
return this.isInitialized;
|
||||
return this.installState.isInitialized;
|
||||
}
|
||||
|
||||
async startService() {
|
||||
if (!this.isInitialized) {
|
||||
if (!this.installState.isInitialized) {
|
||||
await this.initialize();
|
||||
}
|
||||
return true;
|
||||
@ -493,6 +787,92 @@ class WhisperService extends LocalAIServiceBase {
|
||||
}
|
||||
}
|
||||
|
||||
// WhisperSession class
|
||||
class WhisperSession {
|
||||
constructor(config, service) {
|
||||
this.id = `session_${Date.now()}_${Math.random()}`;
|
||||
this.config = config;
|
||||
this.service = service;
|
||||
this.process = null;
|
||||
this.inUse = true;
|
||||
this.audioBuffer = Buffer.alloc(0);
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
await this.service.ensureModelAvailable(this.config.model);
|
||||
this.startProcessingLoop();
|
||||
}
|
||||
|
||||
async reconfigure(config) {
|
||||
this.config = config;
|
||||
await this.service.ensureModelAvailable(this.config.model);
|
||||
}
|
||||
|
||||
startProcessingLoop() {
|
||||
// TODO: 실제 처리 루프 구현
|
||||
}
|
||||
|
||||
async cleanup() {
|
||||
// 임시 파일 정리
|
||||
await this.cleanupTempFiles();
|
||||
}
|
||||
|
||||
async cleanupTempFiles() {
|
||||
// TODO: 임시 파일 정리 구현
|
||||
}
|
||||
|
||||
async destroy() {
|
||||
if (this.process) {
|
||||
this.process.kill();
|
||||
}
|
||||
// 임시 파일 정리
|
||||
await this.cleanupTempFiles();
|
||||
}
|
||||
}
|
||||
|
||||
// verify installation
|
||||
WhisperService.prototype.verifyInstallation = async function() {
|
||||
try {
|
||||
console.log('[WhisperService] Verifying installation...');
|
||||
|
||||
// 1. check binary
|
||||
if (!this.whisperPath) {
|
||||
return { success: false, error: 'Whisper binary path not set' };
|
||||
}
|
||||
|
||||
try {
|
||||
await fsPromises.access(this.whisperPath, fs.constants.X_OK);
|
||||
} catch (error) {
|
||||
return { success: false, error: 'Whisper binary not executable' };
|
||||
}
|
||||
|
||||
// 2. check version
|
||||
try {
|
||||
const { stdout } = await spawnAsync(this.whisperPath, ['--help']);
|
||||
if (!stdout.includes('whisper')) {
|
||||
return { success: false, error: 'Invalid whisper binary' };
|
||||
}
|
||||
} catch (error) {
|
||||
return { success: false, error: 'Whisper binary not responding' };
|
||||
}
|
||||
|
||||
// 3. check directories
|
||||
try {
|
||||
await fsPromises.access(this.modelsDir, fs.constants.W_OK);
|
||||
await fsPromises.access(this.tempDir, fs.constants.W_OK);
|
||||
} catch (error) {
|
||||
return { success: false, error: 'Required directories not accessible' };
|
||||
}
|
||||
|
||||
console.log('[WhisperService] Installation verified successfully');
|
||||
return { success: true };
|
||||
|
||||
} catch (error) {
|
||||
console.error('[WhisperService] Verification failed:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
};
|
||||
|
||||
// Export singleton instance
|
||||
const whisperService = new WhisperService();
|
||||
module.exports = whisperService;
|
@ -110,13 +110,17 @@ class ListenService {
|
||||
console.log('[ListenService] changeSession to "Listen"');
|
||||
internalBridge.emit('window:requestVisibility', { name: 'listen', visible: true });
|
||||
await this.initializeSession();
|
||||
listenWindow.webContents.send('session-state-changed', { isActive: true });
|
||||
if (listenWindow && !listenWindow.isDestroyed()) {
|
||||
listenWindow.webContents.send('session-state-changed', { isActive: true });
|
||||
}
|
||||
break;
|
||||
|
||||
case 'Stop':
|
||||
console.log('[ListenService] changeSession to "Stop"');
|
||||
await this.closeSession();
|
||||
listenWindow.webContents.send('session-state-changed', { isActive: false });
|
||||
if (listenWindow && !listenWindow.isDestroyed()) {
|
||||
listenWindow.webContents.send('session-state-changed', { isActive: false });
|
||||
}
|
||||
break;
|
||||
|
||||
case 'Done':
|
||||
|
@ -6,8 +6,7 @@ const { getStoredApiKey, getStoredProvider, windowPool } = require('../../window
|
||||
|
||||
// New imports for common services
|
||||
const modelStateService = require('../common/services/modelStateService');
|
||||
const ollamaService = require('../common/services/ollamaService');
|
||||
const whisperService = require('../common/services/whisperService');
|
||||
const localAIManager = require('../common/services/localAIManager');
|
||||
|
||||
const store = new Store({
|
||||
name: 'pickle-glass-settings',
|
||||
@ -58,17 +57,21 @@ async function setSelectedModel(type, modelId) {
|
||||
return { success };
|
||||
}
|
||||
|
||||
// Ollama facade functions
|
||||
// LocalAI facade functions
|
||||
async function getOllamaStatus() {
|
||||
return ollamaService.getStatus();
|
||||
return localAIManager.getServiceStatus('ollama');
|
||||
}
|
||||
|
||||
async function ensureOllamaReady() {
|
||||
return ollamaService.ensureReady();
|
||||
const status = await localAIManager.getServiceStatus('ollama');
|
||||
if (!status.installed || !status.running) {
|
||||
await localAIManager.startService('ollama');
|
||||
}
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
async function shutdownOllama() {
|
||||
return ollamaService.shutdown(false); // false for graceful shutdown
|
||||
return localAIManager.stopService('ollama');
|
||||
}
|
||||
|
||||
|
||||
|
@ -31,11 +31,20 @@ contextBridge.exposeInMainWorld('api', {
|
||||
apiKeyHeader: {
|
||||
// Model & Provider Management
|
||||
getProviderConfig: () => ipcRenderer.invoke('model:get-provider-config'),
|
||||
getOllamaStatus: () => ipcRenderer.invoke('ollama:get-status'),
|
||||
// LocalAI 통합 API
|
||||
getLocalAIStatus: (service) => ipcRenderer.invoke('localai:get-status', service),
|
||||
installLocalAI: (service, options) => ipcRenderer.invoke('localai:install', { service, options }),
|
||||
startLocalAIService: (service) => ipcRenderer.invoke('localai:start-service', service),
|
||||
stopLocalAIService: (service) => ipcRenderer.invoke('localai:stop-service', service),
|
||||
installLocalAIModel: (service, modelId, options) => ipcRenderer.invoke('localai:install-model', { service, modelId, options }),
|
||||
getInstalledModels: (service) => ipcRenderer.invoke('localai:get-installed-models', service),
|
||||
|
||||
// Legacy support (호환성 위해 유지)
|
||||
getOllamaStatus: () => ipcRenderer.invoke('localai:get-status', 'ollama'),
|
||||
getModelSuggestions: () => ipcRenderer.invoke('ollama:get-model-suggestions'),
|
||||
ensureOllamaReady: () => ipcRenderer.invoke('ollama:ensure-ready'),
|
||||
installOllama: () => ipcRenderer.invoke('ollama:install'),
|
||||
startOllamaService: () => ipcRenderer.invoke('ollama:start-service'),
|
||||
installOllama: () => ipcRenderer.invoke('localai:install', { service: 'ollama' }),
|
||||
startOllamaService: () => ipcRenderer.invoke('localai:start-service', 'ollama'),
|
||||
pullOllamaModel: (modelName) => ipcRenderer.invoke('ollama:pull-model', modelName),
|
||||
downloadWhisperModel: (modelId) => ipcRenderer.invoke('whisper:download-model', modelId),
|
||||
validateKey: (data) => ipcRenderer.invoke('model:validate-key', data),
|
||||
@ -47,21 +56,25 @@ contextBridge.exposeInMainWorld('api', {
|
||||
moveHeaderTo: (x, y) => ipcRenderer.invoke('move-header-to', x, y),
|
||||
|
||||
// Listeners
|
||||
onOllamaInstallProgress: (callback) => ipcRenderer.on('ollama:install-progress', callback),
|
||||
removeOnOllamaInstallProgress: (callback) => ipcRenderer.removeListener('ollama:install-progress', callback),
|
||||
onceOllamaInstallComplete: (callback) => ipcRenderer.once('ollama:install-complete', callback),
|
||||
removeOnceOllamaInstallComplete: (callback) => ipcRenderer.removeListener('ollama:install-complete', callback),
|
||||
onOllamaPullProgress: (callback) => ipcRenderer.on('ollama:pull-progress', callback),
|
||||
removeOnOllamaPullProgress: (callback) => ipcRenderer.removeListener('ollama:pull-progress', callback),
|
||||
onWhisperDownloadProgress: (callback) => ipcRenderer.on('whisper:download-progress', callback),
|
||||
removeOnWhisperDownloadProgress: (callback) => ipcRenderer.removeListener('whisper:download-progress', callback),
|
||||
// LocalAI 통합 이벤트 리스너
|
||||
onLocalAIProgress: (callback) => ipcRenderer.on('localai:install-progress', callback),
|
||||
removeOnLocalAIProgress: (callback) => ipcRenderer.removeListener('localai:install-progress', callback),
|
||||
onLocalAIComplete: (callback) => ipcRenderer.on('localai:installation-complete', callback),
|
||||
removeOnLocalAIComplete: (callback) => ipcRenderer.removeListener('localai:installation-complete', callback),
|
||||
onLocalAIError: (callback) => ipcRenderer.on('localai:error-notification', callback),
|
||||
removeOnLocalAIError: (callback) => ipcRenderer.removeListener('localai:error-notification', callback),
|
||||
onLocalAIModelReady: (callback) => ipcRenderer.on('localai:model-ready', callback),
|
||||
removeOnLocalAIModelReady: (callback) => ipcRenderer.removeListener('localai:model-ready', callback),
|
||||
|
||||
|
||||
// Remove all listeners (for cleanup)
|
||||
removeAllListeners: () => {
|
||||
ipcRenderer.removeAllListeners('whisper:download-progress');
|
||||
ipcRenderer.removeAllListeners('ollama:install-progress');
|
||||
ipcRenderer.removeAllListeners('ollama:pull-progress');
|
||||
ipcRenderer.removeAllListeners('ollama:install-complete');
|
||||
// LocalAI 통합 이벤트
|
||||
ipcRenderer.removeAllListeners('localai:install-progress');
|
||||
ipcRenderer.removeAllListeners('localai:installation-complete');
|
||||
ipcRenderer.removeAllListeners('localai:error-notification');
|
||||
ipcRenderer.removeAllListeners('localai:model-ready');
|
||||
ipcRenderer.removeAllListeners('localai:service-status-changed');
|
||||
}
|
||||
},
|
||||
|
||||
@ -239,10 +252,11 @@ contextBridge.exposeInMainWorld('api', {
|
||||
removeOnPresetsUpdated: (callback) => ipcRenderer.removeListener('presets-updated', callback),
|
||||
onShortcutsUpdated: (callback) => ipcRenderer.on('shortcuts-updated', callback),
|
||||
removeOnShortcutsUpdated: (callback) => ipcRenderer.removeListener('shortcuts-updated', callback),
|
||||
onWhisperDownloadProgress: (callback) => ipcRenderer.on('whisper:download-progress', callback),
|
||||
removeOnWhisperDownloadProgress: (callback) => ipcRenderer.removeListener('whisper:download-progress', callback),
|
||||
onOllamaPullProgress: (callback) => ipcRenderer.on('ollama:pull-progress', callback),
|
||||
removeOnOllamaPullProgress: (callback) => ipcRenderer.removeListener('ollama:pull-progress', callback)
|
||||
// 통합 LocalAI 이벤트 사용
|
||||
onLocalAIInstallProgress: (callback) => ipcRenderer.on('localai:install-progress', callback),
|
||||
removeOnLocalAIInstallProgress: (callback) => ipcRenderer.removeListener('localai:install-progress', callback),
|
||||
onLocalAIInstallationComplete: (callback) => ipcRenderer.on('localai:installation-complete', callback),
|
||||
removeOnLocalAIInstallationComplete: (callback) => ipcRenderer.removeListener('localai:installation-complete', callback)
|
||||
},
|
||||
|
||||
// src/ui/settings/ShortCutSettingsView.js
|
||||
|
@ -1092,6 +1092,9 @@ export class ApiKeyHeader extends LitElement {
|
||||
this.requestUpdate();
|
||||
|
||||
const progressHandler = (event, data) => {
|
||||
// 통합 LocalAI 이벤트에서 Ollama 진행률만 처리
|
||||
if (data.service !== 'ollama') return;
|
||||
|
||||
let baseProgress = 0;
|
||||
let stageTotal = 0;
|
||||
|
||||
@ -1137,17 +1140,21 @@ export class ApiKeyHeader extends LitElement {
|
||||
}
|
||||
}, 15000); // 15 second timeout
|
||||
|
||||
const completionHandler = async (event, result) => {
|
||||
const completionHandler = async (event, data) => {
|
||||
// 통합 LocalAI 이벤트에서 Ollama 완료만 처리
|
||||
if (data.service !== 'ollama') return;
|
||||
if (operationCompleted) return;
|
||||
operationCompleted = true;
|
||||
clearTimeout(completionTimeout);
|
||||
|
||||
window.api.apiKeyHeader.removeOnOllamaInstallProgress(progressHandler);
|
||||
await this._handleOllamaSetupCompletion(result.success, result.error);
|
||||
window.api.apiKeyHeader.removeOnLocalAIProgress(progressHandler);
|
||||
// installation-complete 이벤트는 성공을 의미
|
||||
await this._handleOllamaSetupCompletion(true);
|
||||
};
|
||||
|
||||
window.api.apiKeyHeader.onceOllamaInstallComplete(completionHandler);
|
||||
window.api.apiKeyHeader.onOllamaInstallProgress(progressHandler);
|
||||
// 통합 LocalAI 이벤트 사용
|
||||
window.api.apiKeyHeader.onLocalAIComplete(completionHandler);
|
||||
window.api.apiKeyHeader.onLocalAIProgress(progressHandler);
|
||||
|
||||
try {
|
||||
let result;
|
||||
@ -1173,8 +1180,8 @@ export class ApiKeyHeader extends LitElement {
|
||||
operationCompleted = true;
|
||||
clearTimeout(completionTimeout);
|
||||
console.error('[ApiKeyHeader] Ollama setup failed:', error);
|
||||
window.api.apiKeyHeader.removeOnOllamaInstallProgress(progressHandler);
|
||||
window.api.apiKeyHeader.removeOnceOllamaInstallComplete(completionHandler);
|
||||
window.api.apiKeyHeader.removeOnLocalAIProgress(progressHandler);
|
||||
window.api.apiKeyHeader.removeOnLocalAIComplete(completionHandler);
|
||||
await this._handleOllamaSetupCompletion(false, error.message);
|
||||
}
|
||||
}
|
||||
@ -1304,7 +1311,7 @@ export class ApiKeyHeader extends LitElement {
|
||||
|
||||
// Create robust progress handler with timeout protection
|
||||
progressHandler = (event, data) => {
|
||||
if (data.model === modelName && !this._isOperationCancelled(modelName)) {
|
||||
if (data.service === 'ollama' && data.model === modelName && !this._isOperationCancelled(modelName)) {
|
||||
const progress = Math.round(Math.max(0, Math.min(100, data.progress || 0)));
|
||||
|
||||
if (progress !== this.installProgress) {
|
||||
@ -1315,8 +1322,8 @@ export class ApiKeyHeader extends LitElement {
|
||||
}
|
||||
};
|
||||
|
||||
// Set up progress tracking
|
||||
window.api.apiKeyHeader.onOllamaPullProgress(progressHandler);
|
||||
// Set up progress tracking - 통합 LocalAI 이벤트 사용
|
||||
window.api.apiKeyHeader.onLocalAIProgress(progressHandler);
|
||||
|
||||
// Execute the model pull with timeout
|
||||
const installPromise = window.api.apiKeyHeader.pullOllamaModel(modelName);
|
||||
@ -1346,7 +1353,7 @@ export class ApiKeyHeader extends LitElement {
|
||||
} finally {
|
||||
// Comprehensive cleanup
|
||||
if (progressHandler) {
|
||||
window.api.apiKeyHeader.removeOnOllamaPullProgress(progressHandler);
|
||||
window.api.apiKeyHeader.removeOnLocalAIProgress(progressHandler);
|
||||
}
|
||||
|
||||
this.installingModel = null;
|
||||
@ -1376,17 +1383,17 @@ export class ApiKeyHeader extends LitElement {
|
||||
let progressHandler = null;
|
||||
|
||||
try {
|
||||
// Set up robust progress listener
|
||||
progressHandler = (event, { modelId: id, progress }) => {
|
||||
if (id === modelId) {
|
||||
const cleanProgress = Math.round(Math.max(0, Math.min(100, progress || 0)));
|
||||
// Set up robust progress listener - 통합 LocalAI 이벤트 사용
|
||||
progressHandler = (event, data) => {
|
||||
if (data.service === 'whisper' && data.model === modelId) {
|
||||
const cleanProgress = Math.round(Math.max(0, Math.min(100, data.progress || 0)));
|
||||
this.whisperInstallingModels = { ...this.whisperInstallingModels, [modelId]: cleanProgress };
|
||||
console.log(`[ApiKeyHeader] Whisper download progress: ${cleanProgress}% for ${modelId}`);
|
||||
this.requestUpdate();
|
||||
}
|
||||
};
|
||||
|
||||
window.api.apiKeyHeader.onWhisperDownloadProgress(progressHandler);
|
||||
window.api.apiKeyHeader.onLocalAIProgress(progressHandler);
|
||||
|
||||
// Start download with timeout protection
|
||||
const downloadPromise = window.api.apiKeyHeader.downloadWhisperModel(modelId);
|
||||
@ -1413,7 +1420,7 @@ export class ApiKeyHeader extends LitElement {
|
||||
} finally {
|
||||
// Cleanup
|
||||
if (progressHandler) {
|
||||
window.api.apiKeyHeader.removeOnWhisperDownloadProgress(progressHandler);
|
||||
window.api.apiKeyHeader.removeOnLocalAIProgress(progressHandler);
|
||||
}
|
||||
delete this.whisperInstallingModels[modelId];
|
||||
this.requestUpdate();
|
||||
|
@ -575,19 +575,50 @@ export class SettingsView extends LitElement {
|
||||
this.requestUpdate();
|
||||
}
|
||||
|
||||
async loadLocalAIStatus() {
|
||||
try {
|
||||
// Load Ollama status
|
||||
const ollamaStatus = await window.api.settingsView.getOllamaStatus();
|
||||
if (ollamaStatus?.success) {
|
||||
this.ollamaStatus = { installed: ollamaStatus.installed, running: ollamaStatus.running };
|
||||
this.ollamaModels = ollamaStatus.models || [];
|
||||
}
|
||||
|
||||
// Load Whisper models status only if Whisper is enabled
|
||||
if (this.apiKeys?.whisper === 'local') {
|
||||
const whisperModelsResult = await window.api.settingsView.getWhisperInstalledModels();
|
||||
if (whisperModelsResult?.success) {
|
||||
const installedWhisperModels = whisperModelsResult.models;
|
||||
if (this.providerConfig?.whisper) {
|
||||
this.providerConfig.whisper.sttModels.forEach(m => {
|
||||
const installedInfo = installedWhisperModels.find(i => i.id === m.id);
|
||||
if (installedInfo) {
|
||||
m.installed = installedInfo.installed;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Trigger UI update
|
||||
this.requestUpdate();
|
||||
} catch (error) {
|
||||
console.error('Error loading LocalAI status:', error);
|
||||
}
|
||||
}
|
||||
|
||||
//////// after_modelStateService ////////
|
||||
async loadInitialData() {
|
||||
if (!window.api) return;
|
||||
this.isLoading = true;
|
||||
try {
|
||||
const [userState, modelSettings, presets, contentProtection, shortcuts, ollamaStatus, whisperModelsResult] = await Promise.all([
|
||||
// Load essential data first
|
||||
const [userState, modelSettings, presets, contentProtection, shortcuts] = await Promise.all([
|
||||
window.api.settingsView.getCurrentUser(),
|
||||
window.api.settingsView.getModelSettings(), // Facade call
|
||||
window.api.settingsView.getPresets(),
|
||||
window.api.settingsView.getContentProtectionStatus(),
|
||||
window.api.settingsView.getCurrentShortcuts(),
|
||||
window.api.settingsView.getOllamaStatus(),
|
||||
window.api.settingsView.getWhisperInstalledModels()
|
||||
window.api.settingsView.getCurrentShortcuts()
|
||||
]);
|
||||
|
||||
if (userState && userState.isLoggedIn) this.firebaseUser = userState;
|
||||
@ -609,23 +640,9 @@ export class SettingsView extends LitElement {
|
||||
const firstUserPreset = this.presets.find(p => p.is_default === 0);
|
||||
if (firstUserPreset) this.selectedPreset = firstUserPreset;
|
||||
}
|
||||
// Ollama status
|
||||
if (ollamaStatus?.success) {
|
||||
this.ollamaStatus = { installed: ollamaStatus.installed, running: ollamaStatus.running };
|
||||
this.ollamaModels = ollamaStatus.models || [];
|
||||
}
|
||||
// Whisper status
|
||||
if (whisperModelsResult?.success) {
|
||||
const installedWhisperModels = whisperModelsResult.models;
|
||||
if (this.providerConfig.whisper) {
|
||||
this.providerConfig.whisper.sttModels.forEach(m => {
|
||||
const installedInfo = installedWhisperModels.find(i => i.id === m.id);
|
||||
if (installedInfo) {
|
||||
m.installed = installedInfo.installed;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Load LocalAI status asynchronously to improve initial load time
|
||||
this.loadLocalAIStatus();
|
||||
} catch (error) {
|
||||
console.error('Error loading initial settings data:', error);
|
||||
} finally {
|
||||
@ -779,16 +796,16 @@ export class SettingsView extends LitElement {
|
||||
this.installingModels = { ...this.installingModels, [modelName]: 0 };
|
||||
this.requestUpdate();
|
||||
|
||||
// 진행률 이벤트 리스너 설정
|
||||
// 진행률 이벤트 리스너 설정 - 통합 LocalAI 이벤트 사용
|
||||
const progressHandler = (event, data) => {
|
||||
if (data.modelId === modelName) {
|
||||
this.installingModels = { ...this.installingModels, [modelName]: data.progress };
|
||||
if (data.service === 'ollama' && data.model === modelName) {
|
||||
this.installingModels = { ...this.installingModels, [modelName]: data.progress || 0 };
|
||||
this.requestUpdate();
|
||||
}
|
||||
};
|
||||
|
||||
// 진행률 이벤트 리스너 등록
|
||||
window.api.settingsView.onOllamaPullProgress(progressHandler);
|
||||
// 통합 LocalAI 이벤트 리스너 등록
|
||||
window.api.settingsView.onLocalAIInstallProgress(progressHandler);
|
||||
|
||||
try {
|
||||
const result = await window.api.settingsView.pullOllamaModel(modelName);
|
||||
@ -805,8 +822,8 @@ export class SettingsView extends LitElement {
|
||||
throw new Error(result.error || 'Installation failed');
|
||||
}
|
||||
} finally {
|
||||
// 진행률 이벤트 리스너 제거
|
||||
window.api.settingsView.removeOnOllamaPullProgress(progressHandler);
|
||||
// 통합 LocalAI 이벤트 리스너 제거
|
||||
window.api.settingsView.removeOnLocalAIInstallProgress(progressHandler);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[SettingsView] Error installing model ${modelName}:`, error);
|
||||
@ -821,34 +838,52 @@ export class SettingsView extends LitElement {
|
||||
this.requestUpdate();
|
||||
|
||||
try {
|
||||
// Set up progress listener
|
||||
const progressHandler = (event, { modelId: id, progress }) => {
|
||||
if (id === modelId) {
|
||||
this.installingModels = { ...this.installingModels, [modelId]: progress };
|
||||
// Set up progress listener - 통합 LocalAI 이벤트 사용
|
||||
const progressHandler = (event, data) => {
|
||||
if (data.service === 'whisper' && data.model === modelId) {
|
||||
this.installingModels = { ...this.installingModels, [modelId]: data.progress || 0 };
|
||||
this.requestUpdate();
|
||||
}
|
||||
};
|
||||
|
||||
window.api.settingsView.onWhisperDownloadProgress(progressHandler);
|
||||
window.api.settingsView.onLocalAIInstallProgress(progressHandler);
|
||||
|
||||
// Start download
|
||||
const result = await window.api.settingsView.downloadWhisperModel(modelId);
|
||||
|
||||
if (result.success) {
|
||||
// Update the model's installed status
|
||||
if (this.providerConfig?.whisper?.sttModels) {
|
||||
const modelInfo = this.providerConfig.whisper.sttModels.find(m => m.id === modelId);
|
||||
if (modelInfo) {
|
||||
modelInfo.installed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Remove from installing models
|
||||
delete this.installingModels[modelId];
|
||||
this.requestUpdate();
|
||||
|
||||
// Reload LocalAI status to get fresh data
|
||||
await this.loadLocalAIStatus();
|
||||
|
||||
// Auto-select the model after download
|
||||
await this.selectModel('stt', modelId);
|
||||
} else {
|
||||
// Remove from installing models on failure too
|
||||
delete this.installingModels[modelId];
|
||||
this.requestUpdate();
|
||||
alert(`Failed to download Whisper model: ${result.error}`);
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
window.api.settingsView.removeOnWhisperDownloadProgress(progressHandler);
|
||||
window.api.settingsView.removeOnLocalAIInstallProgress(progressHandler);
|
||||
} catch (error) {
|
||||
console.error(`[SettingsView] Error downloading Whisper model ${modelId}:`, error);
|
||||
alert(`Error downloading ${modelId}: ${error.message}`);
|
||||
} finally {
|
||||
// Remove from installing models on error
|
||||
delete this.installingModels[modelId];
|
||||
this.requestUpdate();
|
||||
alert(`Error downloading ${modelId}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
@ -862,12 +897,6 @@ export class SettingsView extends LitElement {
|
||||
return null;
|
||||
}
|
||||
|
||||
async handleWhisperModelSelect(modelId) {
|
||||
if (!modelId) return;
|
||||
|
||||
// Select the model (will trigger download if needed)
|
||||
await this.selectModel('stt', modelId);
|
||||
}
|
||||
|
||||
handleUsePicklesKey(e) {
|
||||
e.preventDefault()
|
||||
@ -1192,12 +1221,7 @@ export class SettingsView extends LitElement {
|
||||
}
|
||||
|
||||
if (id === 'whisper') {
|
||||
// Special UI for Whisper with model selection
|
||||
const whisperModels = config.sttModels || [];
|
||||
const selectedWhisperModel = this.selectedStt && this.getProviderForModel('stt', this.selectedStt) === 'whisper'
|
||||
? this.selectedStt
|
||||
: null;
|
||||
|
||||
// Simplified UI for Whisper without model selection
|
||||
return html`
|
||||
<div class="provider-key-group">
|
||||
<label>${config.name} (Local STT)</label>
|
||||
@ -1205,51 +1229,6 @@ export class SettingsView extends LitElement {
|
||||
<div style="padding: 8px; background: rgba(0,255,0,0.1); border-radius: 4px; font-size: 11px; color: rgba(0,255,0,0.8); margin-bottom: 8px;">
|
||||
✓ Whisper is enabled
|
||||
</div>
|
||||
|
||||
<!-- Whisper Model Selection Dropdown -->
|
||||
<label style="font-size: 10px; margin-top: 8px;">Select Model:</label>
|
||||
<select
|
||||
class="model-dropdown"
|
||||
style="width: 100%; padding: 6px; background: rgba(0,0,0,0.2); border: 1px solid rgba(255,255,255,0.2); color: white; border-radius: 4px; font-size: 11px; margin-bottom: 8px;"
|
||||
@change=${(e) => this.handleWhisperModelSelect(e.target.value)}
|
||||
.value=${selectedWhisperModel || ''}
|
||||
>
|
||||
<option value="">Choose a model...</option>
|
||||
${whisperModels.map(model => {
|
||||
const isInstalling = this.installingModels[model.id] !== undefined;
|
||||
const progress = this.installingModels[model.id] || 0;
|
||||
|
||||
let statusText = '';
|
||||
if (isInstalling) {
|
||||
statusText = ` (Downloading ${progress}%)`;
|
||||
} else if (model.installed) {
|
||||
statusText = ' (Installed)';
|
||||
}
|
||||
|
||||
return html`
|
||||
<option value="${model.id}" ?disabled=${isInstalling}>
|
||||
${model.name}${statusText}
|
||||
</option>
|
||||
`;
|
||||
})}
|
||||
</select>
|
||||
|
||||
${Object.entries(this.installingModels).map(([modelId, progress]) => {
|
||||
if (modelId.startsWith('whisper-') && progress !== undefined) {
|
||||
return html`
|
||||
<div style="margin: 8px 0;">
|
||||
<div style="font-size: 10px; color: rgba(255,255,255,0.7); margin-bottom: 4px;">
|
||||
Downloading ${modelId}...
|
||||
</div>
|
||||
<div class="install-progress" style="height: 4px; background: rgba(255,255,255,0.1); border-radius: 2px; overflow: hidden;">
|
||||
<div class="install-progress-bar" style="height: 100%; background: rgba(0, 122, 255, 0.8); width: ${progress}%; transition: width 0.3s ease;"></div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
return null;
|
||||
})}
|
||||
|
||||
<button class="settings-button full-width danger" @click=${() => this.handleClearKey(id)}>
|
||||
Disable Whisper
|
||||
</button>
|
||||
@ -1331,6 +1310,9 @@ export class SettingsView extends LitElement {
|
||||
<div class="model-list">
|
||||
${this.availableSttModels.map(model => {
|
||||
const isWhisper = this.getProviderForModel('stt', model.id) === 'whisper';
|
||||
const whisperModel = isWhisper && this.providerConfig?.whisper?.sttModels
|
||||
? this.providerConfig.whisper.sttModels.find(m => m.id === model.id)
|
||||
: null;
|
||||
const isInstalling = this.installingModels[model.id] !== undefined;
|
||||
const installProgress = this.installingModels[model.id] || 0;
|
||||
|
||||
@ -1338,10 +1320,16 @@ export class SettingsView extends LitElement {
|
||||
<div class="model-item ${this.selectedStt === model.id ? 'selected' : ''}"
|
||||
@click=${() => this.selectModel('stt', model.id)}>
|
||||
<span>${model.name}</span>
|
||||
${isWhisper && isInstalling ? html`
|
||||
<div class="install-progress">
|
||||
<div class="install-progress-bar" style="width: ${installProgress}%"></div>
|
||||
</div>
|
||||
${isWhisper ? html`
|
||||
${isInstalling ? html`
|
||||
<div class="install-progress">
|
||||
<div class="install-progress-bar" style="width: ${installProgress}%"></div>
|
||||
</div>
|
||||
` : whisperModel?.installed ? html`
|
||||
<span class="model-status installed">✓ Installed</span>
|
||||
` : html`
|
||||
<span class="model-status not-installed">Not Installed</span>
|
||||
`}
|
||||
` : ''}
|
||||
</div>
|
||||
`;
|
||||
|
Loading…
x
Reference in New Issue
Block a user