Add local LLM, STT
This commit is contained in:
parent
594f9e8d19
commit
0ff9f4b74e
@ -69,7 +69,7 @@ npm run setup
|
||||
**Currently Supporting:**
|
||||
- OpenAI API: Get OpenAI API Key [here](https://platform.openai.com/api-keys)
|
||||
- Gemini API: Get Gemini API Key [here](https://aistudio.google.com/apikey)
|
||||
- Local LLM (WIP)
|
||||
- Local LLM Ollama & Whisper
|
||||
|
||||
### Liquid Glass Design (coming soon)
|
||||
|
||||
@ -115,8 +115,6 @@ We have a list of [help wanted](https://github.com/pickle-com/glass/issues?q=is%
|
||||
|
||||
| Status | Issue | Description |
|
||||
|--------|--------------------------------|---------------------------------------------------|
|
||||
| 🚧 WIP | Local LLM Support | Supporting Local LLM to power AI answers |
|
||||
| 🚧 WIP | Firebase Data Storage Issue | Session & ask should be saved in firebase for signup users |
|
||||
| 🚧 WIP | Liquid Glass | Liquid Glass UI for MacOS 26 |
|
||||
|
||||
### Changelog
|
||||
@ -125,7 +123,7 @@ We have a list of [help wanted](https://github.com/pickle-com/glass/issues?q=is%
|
||||
- Jul 6: Full code refactoring has done.
|
||||
- Jul 7: Now support Claude, LLM/STT model selection
|
||||
- Jul 8: Now support Windows(beta), Improved AEC by Rust(to seperate mic/system audio), shortcut editing(beta)
|
||||
|
||||
- Jul 8: Now support Local LLM & STT, Firebase Data Storage
|
||||
|
||||
|
||||
## About Pickle
|
||||
|
@ -34,6 +34,8 @@ extraResources:
|
||||
|
||||
asarUnpack:
|
||||
- "src/assets/SystemAudioDump"
|
||||
- "**/node_modules/sharp/**/*"
|
||||
- "**/node_modules/@img/**/*"
|
||||
|
||||
# Windows configuration
|
||||
win:
|
||||
|
2417
package-lock.json
generated
2417
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,7 @@
|
||||
"name": "pickle-glass",
|
||||
"productName": "Glass",
|
||||
|
||||
"version": "0.2.3",
|
||||
"version": "0.2.4",
|
||||
|
||||
"description": "Cl*ely for Free",
|
||||
"main": "src/index.js",
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -57,6 +57,34 @@ const PROVIDERS = {
|
||||
],
|
||||
sttModels: [],
|
||||
},
|
||||
'ollama': {
|
||||
name: 'Ollama (Local)',
|
||||
handler: () => require("./providers/ollama"),
|
||||
llmModels: [], // Dynamic models populated from installed Ollama models
|
||||
sttModels: [], // Ollama doesn't support STT yet
|
||||
},
|
||||
'whisper': {
|
||||
name: 'Whisper (Local)',
|
||||
handler: () => {
|
||||
// Only load in main process
|
||||
if (typeof window === 'undefined') {
|
||||
return require("./providers/whisper");
|
||||
}
|
||||
// Return dummy for renderer
|
||||
return {
|
||||
createSTT: () => { throw new Error('Whisper STT is only available in main process'); },
|
||||
createLLM: () => { throw new Error('Whisper does not support LLM'); },
|
||||
createStreamingLLM: () => { throw new Error('Whisper does not support LLM'); }
|
||||
};
|
||||
},
|
||||
llmModels: [],
|
||||
sttModels: [
|
||||
{ id: 'whisper-tiny', name: 'Whisper Tiny (39M)' },
|
||||
{ id: 'whisper-base', name: 'Whisper Base (74M)' },
|
||||
{ id: 'whisper-small', name: 'Whisper Small (244M)' },
|
||||
{ id: 'whisper-medium', name: 'Whisper Medium (769M)' },
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
function sanitizeModelId(model) {
|
||||
|
242
src/common/ai/providers/ollama.js
Normal file
242
src/common/ai/providers/ollama.js
Normal file
@ -0,0 +1,242 @@
|
||||
const http = require('http');
|
||||
const fetch = require('node-fetch');
|
||||
|
||||
function convertMessagesToOllamaFormat(messages) {
|
||||
return messages.map(msg => {
|
||||
if (Array.isArray(msg.content)) {
|
||||
let textContent = '';
|
||||
const images = [];
|
||||
|
||||
for (const part of msg.content) {
|
||||
if (part.type === 'text') {
|
||||
textContent += part.text;
|
||||
} else if (part.type === 'image_url') {
|
||||
const base64 = part.image_url.url.replace(/^data:image\/[^;]+;base64,/, '');
|
||||
images.push(base64);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
role: msg.role,
|
||||
content: textContent,
|
||||
...(images.length > 0 && { images })
|
||||
};
|
||||
} else {
|
||||
return msg;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function createLLM({
|
||||
model,
|
||||
temperature = 0.7,
|
||||
maxTokens = 2048,
|
||||
baseUrl = 'http://localhost:11434',
|
||||
...config
|
||||
}) {
|
||||
if (!model) {
|
||||
throw new Error('Model parameter is required for Ollama LLM. Please specify a model name (e.g., "llama3.2:latest", "gemma3:4b")');
|
||||
}
|
||||
return {
|
||||
generateContent: async (parts) => {
|
||||
let systemPrompt = '';
|
||||
const userContent = [];
|
||||
|
||||
for (const part of parts) {
|
||||
if (typeof part === 'string') {
|
||||
if (systemPrompt === '' && part.includes('You are')) {
|
||||
systemPrompt = part;
|
||||
} else {
|
||||
userContent.push(part);
|
||||
}
|
||||
} else if (part.inlineData) {
|
||||
userContent.push({
|
||||
type: 'image',
|
||||
image: `data:${part.inlineData.mimeType};base64,${part.inlineData.data}`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const messages = [];
|
||||
if (systemPrompt) {
|
||||
messages.push({ role: 'system', content: systemPrompt });
|
||||
}
|
||||
messages.push({ role: 'user', content: userContent.join('\n') });
|
||||
|
||||
try {
|
||||
const response = await fetch(`${baseUrl}/api/chat`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model,
|
||||
messages,
|
||||
stream: false,
|
||||
options: {
|
||||
temperature,
|
||||
num_predict: maxTokens,
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const result = await response.json();
|
||||
|
||||
return {
|
||||
response: {
|
||||
text: () => result.message.content
|
||||
},
|
||||
raw: result
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Ollama LLM error:', error);
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
chat: async (messages) => {
|
||||
const ollamaMessages = convertMessagesToOllamaFormat(messages);
|
||||
|
||||
try {
|
||||
const response = await fetch(`${baseUrl}/api/chat`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model,
|
||||
messages: ollamaMessages,
|
||||
stream: false,
|
||||
options: {
|
||||
temperature,
|
||||
num_predict: maxTokens,
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
const result = await response.json();
|
||||
|
||||
return {
|
||||
content: result.message.content,
|
||||
raw: result
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Ollama chat error:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function createStreamingLLM({
|
||||
model,
|
||||
temperature = 0.7,
|
||||
maxTokens = 2048,
|
||||
baseUrl = 'http://localhost:11434',
|
||||
...config
|
||||
}) {
|
||||
if (!model) {
|
||||
throw new Error('Model parameter is required for Ollama streaming LLM. Please specify a model name (e.g., "llama3.2:latest", "gemma3:4b")');
|
||||
}
|
||||
return {
|
||||
streamChat: async (messages) => {
|
||||
console.log('[Ollama Provider] Starting streaming request');
|
||||
|
||||
const ollamaMessages = convertMessagesToOllamaFormat(messages);
|
||||
console.log('[Ollama Provider] Converted messages for Ollama:', ollamaMessages);
|
||||
|
||||
try {
|
||||
const response = await fetch(`${baseUrl}/api/chat`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model,
|
||||
messages: ollamaMessages,
|
||||
stream: true,
|
||||
options: {
|
||||
temperature,
|
||||
num_predict: maxTokens,
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Ollama API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
console.log('[Ollama Provider] Got streaming response');
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
let buffer = '';
|
||||
|
||||
try {
|
||||
response.body.on('data', (chunk) => {
|
||||
buffer += chunk.toString();
|
||||
const lines = buffer.split('\n');
|
||||
buffer = lines.pop() || '';
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.trim() === '') continue;
|
||||
|
||||
try {
|
||||
const data = JSON.parse(line);
|
||||
|
||||
if (data.message?.content) {
|
||||
const sseData = JSON.stringify({
|
||||
choices: [{
|
||||
delta: {
|
||||
content: data.message.content
|
||||
}
|
||||
}]
|
||||
});
|
||||
controller.enqueue(new TextEncoder().encode(`data: ${sseData}\n\n`));
|
||||
}
|
||||
|
||||
if (data.done) {
|
||||
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'));
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('[Ollama Provider] Failed to parse chunk:', e);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
response.body.on('end', () => {
|
||||
controller.close();
|
||||
console.log('[Ollama Provider] Streaming completed');
|
||||
});
|
||||
|
||||
response.body.on('error', (error) => {
|
||||
console.error('[Ollama Provider] Streaming error:', error);
|
||||
controller.error(error);
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('[Ollama Provider] Streaming setup error:', error);
|
||||
controller.error(error);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
ok: true,
|
||||
body: stream
|
||||
};
|
||||
|
||||
} catch (error) {
|
||||
console.error('[Ollama Provider] Request error:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createLLM,
|
||||
createStreamingLLM
|
||||
};
|
231
src/common/ai/providers/whisper.js
Normal file
231
src/common/ai/providers/whisper.js
Normal file
@ -0,0 +1,231 @@
|
||||
let spawn, path, EventEmitter;
|
||||
|
||||
if (typeof window === 'undefined') {
|
||||
spawn = require('child_process').spawn;
|
||||
path = require('path');
|
||||
EventEmitter = require('events').EventEmitter;
|
||||
} else {
|
||||
class DummyEventEmitter {
|
||||
on() {}
|
||||
emit() {}
|
||||
removeAllListeners() {}
|
||||
}
|
||||
EventEmitter = DummyEventEmitter;
|
||||
}
|
||||
|
||||
class WhisperSTTSession extends EventEmitter {
|
||||
constructor(model, whisperService, sessionId) {
|
||||
super();
|
||||
this.model = model;
|
||||
this.whisperService = whisperService;
|
||||
this.sessionId = sessionId || `session_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
this.process = null;
|
||||
this.isRunning = false;
|
||||
this.audioBuffer = Buffer.alloc(0);
|
||||
this.processingInterval = null;
|
||||
this.lastTranscription = '';
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
try {
|
||||
await this.whisperService.ensureModelAvailable(this.model);
|
||||
this.isRunning = true;
|
||||
this.startProcessingLoop();
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('[WhisperSTT] Initialization error:', error);
|
||||
this.emit('error', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
startProcessingLoop() {
|
||||
this.processingInterval = setInterval(async () => {
|
||||
const minBufferSize = 24000 * 2 * 0.15;
|
||||
if (this.audioBuffer.length >= minBufferSize && !this.process) {
|
||||
console.log(`[WhisperSTT-${this.sessionId}] Processing audio chunk, buffer size: ${this.audioBuffer.length}`);
|
||||
await this.processAudioChunk();
|
||||
}
|
||||
}, 1500);
|
||||
}
|
||||
|
||||
async processAudioChunk() {
|
||||
if (!this.isRunning || this.audioBuffer.length === 0) return;
|
||||
|
||||
const audioData = this.audioBuffer;
|
||||
this.audioBuffer = Buffer.alloc(0);
|
||||
|
||||
try {
|
||||
const tempFile = await this.whisperService.saveAudioToTemp(audioData, this.sessionId);
|
||||
|
||||
if (!tempFile || typeof tempFile !== 'string') {
|
||||
console.error('[WhisperSTT] Invalid temp file path:', tempFile);
|
||||
return;
|
||||
}
|
||||
|
||||
const whisperPath = await this.whisperService.getWhisperPath();
|
||||
const modelPath = await this.whisperService.getModelPath(this.model);
|
||||
|
||||
if (!whisperPath || !modelPath) {
|
||||
console.error('[WhisperSTT] Invalid whisper or model path:', { whisperPath, modelPath });
|
||||
return;
|
||||
}
|
||||
|
||||
this.process = spawn(whisperPath, [
|
||||
'-m', modelPath,
|
||||
'-f', tempFile,
|
||||
'--no-timestamps',
|
||||
'--output-txt',
|
||||
'--output-json',
|
||||
'--language', 'auto',
|
||||
'--threads', '4',
|
||||
'--print-progress', 'false'
|
||||
]);
|
||||
|
||||
let output = '';
|
||||
let errorOutput = '';
|
||||
|
||||
this.process.stdout.on('data', (data) => {
|
||||
output += data.toString();
|
||||
});
|
||||
|
||||
this.process.stderr.on('data', (data) => {
|
||||
errorOutput += data.toString();
|
||||
});
|
||||
|
||||
this.process.on('close', async (code) => {
|
||||
this.process = null;
|
||||
|
||||
if (code === 0 && output.trim()) {
|
||||
const transcription = output.trim();
|
||||
if (transcription && transcription !== this.lastTranscription) {
|
||||
this.lastTranscription = transcription;
|
||||
console.log(`[WhisperSTT-${this.sessionId}] Transcription: "${transcription}"`);
|
||||
this.emit('transcription', {
|
||||
text: transcription,
|
||||
timestamp: Date.now(),
|
||||
confidence: 1.0,
|
||||
sessionId: this.sessionId
|
||||
});
|
||||
}
|
||||
} else if (errorOutput) {
|
||||
console.error(`[WhisperSTT-${this.sessionId}] Process error:`, errorOutput);
|
||||
}
|
||||
|
||||
await this.whisperService.cleanupTempFile(tempFile);
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
console.error('[WhisperSTT] Processing error:', error);
|
||||
this.emit('error', error);
|
||||
}
|
||||
}
|
||||
|
||||
sendRealtimeInput(audioData) {
|
||||
if (!this.isRunning) {
|
||||
console.warn(`[WhisperSTT-${this.sessionId}] Session not running, cannot accept audio`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof audioData === 'string') {
|
||||
try {
|
||||
audioData = Buffer.from(audioData, 'base64');
|
||||
} catch (error) {
|
||||
console.error('[WhisperSTT] Failed to decode base64 audio data:', error);
|
||||
return;
|
||||
}
|
||||
} else if (audioData instanceof ArrayBuffer) {
|
||||
audioData = Buffer.from(audioData);
|
||||
} else if (!Buffer.isBuffer(audioData) && !(audioData instanceof Uint8Array)) {
|
||||
console.error('[WhisperSTT] Invalid audio data type:', typeof audioData);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!Buffer.isBuffer(audioData)) {
|
||||
audioData = Buffer.from(audioData);
|
||||
}
|
||||
|
||||
if (audioData.length > 0) {
|
||||
this.audioBuffer = Buffer.concat([this.audioBuffer, audioData]);
|
||||
// Log every 10th audio chunk to avoid spam
|
||||
if (Math.random() < 0.1) {
|
||||
console.log(`[WhisperSTT-${this.sessionId}] Received audio chunk: ${audioData.length} bytes, total buffer: ${this.audioBuffer.length} bytes`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async close() {
|
||||
console.log(`[WhisperSTT-${this.sessionId}] Closing session`);
|
||||
this.isRunning = false;
|
||||
|
||||
if (this.processingInterval) {
|
||||
clearInterval(this.processingInterval);
|
||||
this.processingInterval = null;
|
||||
}
|
||||
|
||||
if (this.process) {
|
||||
this.process.kill('SIGTERM');
|
||||
this.process = null;
|
||||
}
|
||||
|
||||
this.removeAllListeners();
|
||||
}
|
||||
}
|
||||
|
||||
class WhisperProvider {
|
||||
constructor() {
|
||||
this.whisperService = null;
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
if (!this.whisperService) {
|
||||
const { WhisperService } = require('../../services/whisperService');
|
||||
this.whisperService = new WhisperService();
|
||||
await this.whisperService.initialize();
|
||||
}
|
||||
}
|
||||
|
||||
async createSTT(config) {
|
||||
await this.initialize();
|
||||
|
||||
const model = config.model || 'whisper-tiny';
|
||||
const sessionType = config.sessionType || 'unknown';
|
||||
console.log(`[WhisperProvider] Creating ${sessionType} STT session with model: ${model}`);
|
||||
|
||||
// Create unique session ID based on type
|
||||
const sessionId = `${sessionType}_${Date.now()}_${Math.random().toString(36).substr(2, 6)}`;
|
||||
const session = new WhisperSTTSession(model, this.whisperService, sessionId);
|
||||
|
||||
// Log session creation
|
||||
console.log(`[WhisperProvider] Created session: ${sessionId}`);
|
||||
|
||||
const initialized = await session.initialize();
|
||||
if (!initialized) {
|
||||
throw new Error('Failed to initialize Whisper STT session');
|
||||
}
|
||||
|
||||
if (config.callbacks) {
|
||||
if (config.callbacks.onmessage) {
|
||||
session.on('transcription', config.callbacks.onmessage);
|
||||
}
|
||||
if (config.callbacks.onerror) {
|
||||
session.on('error', config.callbacks.onerror);
|
||||
}
|
||||
if (config.callbacks.onclose) {
|
||||
session.on('close', config.callbacks.onclose);
|
||||
}
|
||||
}
|
||||
|
||||
return session;
|
||||
}
|
||||
|
||||
async createLLM() {
|
||||
throw new Error('Whisper provider does not support LLM functionality');
|
||||
}
|
||||
|
||||
async createStreamingLLM() {
|
||||
throw new Error('Whisper provider does not support streaming LLM functionality');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new WhisperProvider();
|
46
src/common/config/checksums.js
Normal file
46
src/common/config/checksums.js
Normal file
@ -0,0 +1,46 @@
|
||||
const DOWNLOAD_CHECKSUMS = {
|
||||
ollama: {
|
||||
dmg: {
|
||||
url: 'https://ollama.com/download/Ollama.dmg',
|
||||
sha256: null // To be updated with actual checksum
|
||||
},
|
||||
exe: {
|
||||
url: 'https://ollama.com/download/OllamaSetup.exe',
|
||||
sha256: null // To be updated with actual checksum
|
||||
}
|
||||
},
|
||||
whisper: {
|
||||
models: {
|
||||
'whisper-tiny': {
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.bin',
|
||||
sha256: 'be07e048e1e599ad46341c8d2a135645097a538221678b7acdd1b1919c6e1b21'
|
||||
},
|
||||
'whisper-base': {
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.bin',
|
||||
sha256: '60ed5bc3dd14eea856493d334349b405782ddcaf0028d4b5df4088345fba2efe'
|
||||
},
|
||||
'whisper-small': {
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.bin',
|
||||
sha256: '1be3a9b2063867b937e64e2ec7483364a79917e157fa98c5d94b5c1fffea987b'
|
||||
},
|
||||
'whisper-medium': {
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.bin',
|
||||
sha256: '6c14d5adee5f86394037b4e4e8b59f1673b6cee10e3cf0b11bbdbee79c156208'
|
||||
}
|
||||
},
|
||||
binaries: {
|
||||
'v1.7.6': {
|
||||
windows: {
|
||||
url: 'https://github.com/ggerganov/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-win-x64.zip',
|
||||
sha256: null // To be updated with actual checksum
|
||||
},
|
||||
linux: {
|
||||
url: 'https://github.com/ggerganov/whisper.cpp/releases/download/v1.7.6/whisper-cpp-v1.7.6-linux-x64.tar.gz',
|
||||
sha256: null // To be updated with actual checksum
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = { DOWNLOAD_CHECKSUMS };
|
@ -73,6 +73,23 @@ const LATEST_SCHEMA = {
|
||||
{ name: 'created_at', type: 'INTEGER' },
|
||||
{ name: 'sync_state', type: 'TEXT DEFAULT \'clean\'' }
|
||||
]
|
||||
},
|
||||
ollama_models: {
|
||||
columns: [
|
||||
{ name: 'name', type: 'TEXT PRIMARY KEY' },
|
||||
{ name: 'size', type: 'TEXT NOT NULL' },
|
||||
{ name: 'installed', type: 'INTEGER DEFAULT 0' },
|
||||
{ name: 'installing', type: 'INTEGER DEFAULT 0' }
|
||||
]
|
||||
},
|
||||
whisper_models: {
|
||||
columns: [
|
||||
{ name: 'id', type: 'TEXT PRIMARY KEY' },
|
||||
{ name: 'name', type: 'TEXT NOT NULL' },
|
||||
{ name: 'size', type: 'TEXT NOT NULL' },
|
||||
{ name: 'installed', type: 'INTEGER DEFAULT 0' },
|
||||
{ name: 'installing', type: 'INTEGER DEFAULT 0' }
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
|
20
src/common/repositories/ollamaModel/index.js
Normal file
20
src/common/repositories/ollamaModel/index.js
Normal file
@ -0,0 +1,20 @@
|
||||
const sqliteRepository = require('./sqlite.repository');
|
||||
|
||||
// For now, we only use SQLite repository
|
||||
// In the future, we could add cloud sync support
|
||||
|
||||
function getRepository() {
|
||||
return sqliteRepository;
|
||||
}
|
||||
|
||||
// Export all repository methods
|
||||
module.exports = {
|
||||
getAllModels: (...args) => getRepository().getAllModels(...args),
|
||||
getModel: (...args) => getRepository().getModel(...args),
|
||||
upsertModel: (...args) => getRepository().upsertModel(...args),
|
||||
updateInstallStatus: (...args) => getRepository().updateInstallStatus(...args),
|
||||
initializeDefaultModels: (...args) => getRepository().initializeDefaultModels(...args),
|
||||
deleteModel: (...args) => getRepository().deleteModel(...args),
|
||||
getInstalledModels: (...args) => getRepository().getInstalledModels(...args),
|
||||
getInstallingModels: (...args) => getRepository().getInstallingModels(...args)
|
||||
};
|
137
src/common/repositories/ollamaModel/sqlite.repository.js
Normal file
137
src/common/repositories/ollamaModel/sqlite.repository.js
Normal file
@ -0,0 +1,137 @@
|
||||
const sqliteClient = require('../../services/sqliteClient');
|
||||
|
||||
/**
|
||||
* Get all Ollama models
|
||||
*/
|
||||
function getAllModels() {
|
||||
const db = sqliteClient.getDb();
|
||||
const query = 'SELECT * FROM ollama_models ORDER BY name';
|
||||
|
||||
try {
|
||||
return db.prepare(query).all() || [];
|
||||
} catch (err) {
|
||||
console.error('[OllamaModel Repository] Failed to get models:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific model by name
|
||||
*/
|
||||
function getModel(name) {
|
||||
const db = sqliteClient.getDb();
|
||||
const query = 'SELECT * FROM ollama_models WHERE name = ?';
|
||||
|
||||
try {
|
||||
return db.prepare(query).get(name);
|
||||
} catch (err) {
|
||||
console.error('[OllamaModel Repository] Failed to get model:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create or update a model entry
|
||||
*/
|
||||
function upsertModel({ name, size, installed = false, installing = false }) {
|
||||
const db = sqliteClient.getDb();
|
||||
const query = `
|
||||
INSERT INTO ollama_models (name, size, installed, installing)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(name) DO UPDATE SET
|
||||
size = excluded.size,
|
||||
installed = excluded.installed,
|
||||
installing = excluded.installing
|
||||
`;
|
||||
|
||||
try {
|
||||
db.prepare(query).run(name, size, installed ? 1 : 0, installing ? 1 : 0);
|
||||
return { success: true };
|
||||
} catch (err) {
|
||||
console.error('[OllamaModel Repository] Failed to upsert model:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update installation status for a model
|
||||
*/
|
||||
function updateInstallStatus(name, installed, installing = false) {
|
||||
const db = sqliteClient.getDb();
|
||||
const query = 'UPDATE ollama_models SET installed = ?, installing = ? WHERE name = ?';
|
||||
|
||||
try {
|
||||
const result = db.prepare(query).run(installed ? 1 : 0, installing ? 1 : 0, name);
|
||||
return { success: true, changes: result.changes };
|
||||
} catch (err) {
|
||||
console.error('[OllamaModel Repository] Failed to update install status:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize default models - now done dynamically based on installed models
|
||||
*/
|
||||
function initializeDefaultModels() {
|
||||
// Default models are now detected dynamically from Ollama installation
|
||||
// This function maintains compatibility but doesn't hardcode any models
|
||||
console.log('[OllamaModel Repository] Default models initialization skipped - using dynamic detection');
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a model entry
|
||||
*/
|
||||
function deleteModel(name) {
|
||||
const db = sqliteClient.getDb();
|
||||
const query = 'DELETE FROM ollama_models WHERE name = ?';
|
||||
|
||||
try {
|
||||
const result = db.prepare(query).run(name);
|
||||
return { success: true, changes: result.changes };
|
||||
} catch (err) {
|
||||
console.error('[OllamaModel Repository] Failed to delete model:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get installed models
|
||||
*/
|
||||
function getInstalledModels() {
|
||||
const db = sqliteClient.getDb();
|
||||
const query = 'SELECT * FROM ollama_models WHERE installed = 1 ORDER BY name';
|
||||
|
||||
try {
|
||||
return db.prepare(query).all() || [];
|
||||
} catch (err) {
|
||||
console.error('[OllamaModel Repository] Failed to get installed models:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get models currently being installed
|
||||
*/
|
||||
function getInstallingModels() {
|
||||
const db = sqliteClient.getDb();
|
||||
const query = 'SELECT * FROM ollama_models WHERE installing = 1 ORDER BY name';
|
||||
|
||||
try {
|
||||
return db.prepare(query).all() || [];
|
||||
} catch (err) {
|
||||
console.error('[OllamaModel Repository] Failed to get installing models:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getAllModels,
|
||||
getModel,
|
||||
upsertModel,
|
||||
updateInstallStatus,
|
||||
initializeDefaultModels,
|
||||
deleteModel,
|
||||
getInstalledModels,
|
||||
getInstallingModels
|
||||
};
|
53
src/common/repositories/whisperModel/index.js
Normal file
53
src/common/repositories/whisperModel/index.js
Normal file
@ -0,0 +1,53 @@
|
||||
const BaseModelRepository = require('../baseModel');
|
||||
|
||||
class WhisperModelRepository extends BaseModelRepository {
|
||||
constructor(db, tableName = 'whisper_models') {
|
||||
super(db, tableName);
|
||||
}
|
||||
|
||||
async initializeModels(availableModels) {
|
||||
const existingModels = await this.getAll();
|
||||
const existingIds = new Set(existingModels.map(m => m.id));
|
||||
|
||||
for (const [modelId, modelInfo] of Object.entries(availableModels)) {
|
||||
if (!existingIds.has(modelId)) {
|
||||
await this.create({
|
||||
id: modelId,
|
||||
name: modelInfo.name,
|
||||
size: modelInfo.size,
|
||||
installed: 0,
|
||||
installing: 0
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getInstalledModels() {
|
||||
return this.findAll({ installed: 1 });
|
||||
}
|
||||
|
||||
async setInstalled(modelId, installed = true) {
|
||||
return this.update({ id: modelId }, {
|
||||
installed: installed ? 1 : 0,
|
||||
installing: 0
|
||||
});
|
||||
}
|
||||
|
||||
async setInstalling(modelId, installing = true) {
|
||||
return this.update({ id: modelId }, {
|
||||
installing: installing ? 1 : 0
|
||||
});
|
||||
}
|
||||
|
||||
async isInstalled(modelId) {
|
||||
const model = await this.findOne({ id: modelId });
|
||||
return model && model.installed === 1;
|
||||
}
|
||||
|
||||
async isInstalling(modelId) {
|
||||
const model = await this.findOne({ id: modelId });
|
||||
return model && model.installing === 1;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = WhisperModelRepository;
|
89
src/common/services/cryptoService.js
Normal file
89
src/common/services/cryptoService.js
Normal file
@ -0,0 +1,89 @@
|
||||
const crypto = require('crypto');
|
||||
const { app } = require('electron');
|
||||
const os = require('os');
|
||||
|
||||
class CryptoService {
|
||||
constructor() {
|
||||
this.algorithm = 'aes-256-gcm';
|
||||
this.saltLength = 32;
|
||||
this.tagLength = 16;
|
||||
this.ivLength = 16;
|
||||
this.iterations = 100000;
|
||||
this.keyLength = 32;
|
||||
this._derivedKey = null;
|
||||
}
|
||||
|
||||
_getMachineId() {
|
||||
const machineInfo = `${os.hostname()}-${os.platform()}-${os.arch()}`;
|
||||
const appPath = app.getPath('userData');
|
||||
return crypto.createHash('sha256').update(machineInfo + appPath).digest('hex');
|
||||
}
|
||||
|
||||
_deriveKey() {
|
||||
if (this._derivedKey) return this._derivedKey;
|
||||
|
||||
const machineId = this._getMachineId();
|
||||
const salt = crypto.createHash('sha256').update('pickle-glass-salt').digest();
|
||||
this._derivedKey = crypto.pbkdf2Sync(machineId, salt, this.iterations, this.keyLength, 'sha256');
|
||||
return this._derivedKey;
|
||||
}
|
||||
|
||||
encrypt(text) {
|
||||
if (!text) return null;
|
||||
|
||||
try {
|
||||
const iv = crypto.randomBytes(this.ivLength);
|
||||
const salt = crypto.randomBytes(this.saltLength);
|
||||
const key = this._deriveKey();
|
||||
|
||||
const cipher = crypto.createCipheriv(this.algorithm, key, iv);
|
||||
|
||||
const encrypted = Buffer.concat([
|
||||
cipher.update(text, 'utf8'),
|
||||
cipher.final()
|
||||
]);
|
||||
|
||||
const tag = cipher.getAuthTag();
|
||||
|
||||
const combined = Buffer.concat([salt, iv, tag, encrypted]);
|
||||
return combined.toString('base64');
|
||||
} catch (error) {
|
||||
console.error('[CryptoService] Encryption failed:', error.message);
|
||||
throw new Error('Encryption failed');
|
||||
}
|
||||
}
|
||||
|
||||
decrypt(encryptedData) {
|
||||
if (!encryptedData) return null;
|
||||
|
||||
try {
|
||||
const combined = Buffer.from(encryptedData, 'base64');
|
||||
|
||||
const salt = combined.slice(0, this.saltLength);
|
||||
const iv = combined.slice(this.saltLength, this.saltLength + this.ivLength);
|
||||
const tag = combined.slice(this.saltLength + this.ivLength, this.saltLength + this.ivLength + this.tagLength);
|
||||
const encrypted = combined.slice(this.saltLength + this.ivLength + this.tagLength);
|
||||
|
||||
const key = this._deriveKey();
|
||||
|
||||
const decipher = crypto.createDecipheriv(this.algorithm, key, iv);
|
||||
decipher.setAuthTag(tag);
|
||||
|
||||
const decrypted = Buffer.concat([
|
||||
decipher.update(encrypted),
|
||||
decipher.final()
|
||||
]);
|
||||
|
||||
return decrypted.toString('utf8');
|
||||
} catch (error) {
|
||||
console.error('[CryptoService] Decryption failed:', error.message);
|
||||
throw new Error('Decryption failed');
|
||||
}
|
||||
}
|
||||
|
||||
clearCache() {
|
||||
this._derivedKey = null;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = new CryptoService();
|
277
src/common/services/localAIServiceBase.js
Normal file
277
src/common/services/localAIServiceBase.js
Normal file
@ -0,0 +1,277 @@
|
||||
const { exec } = require('child_process');
|
||||
const { promisify } = require('util');
|
||||
const { EventEmitter } = require('events');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const https = require('https');
|
||||
const fs = require('fs');
|
||||
const crypto = require('crypto');
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
class LocalAIServiceBase extends EventEmitter {
|
||||
constructor(serviceName) {
|
||||
super();
|
||||
this.serviceName = serviceName;
|
||||
this.baseUrl = null;
|
||||
this.installationProgress = new Map();
|
||||
}
|
||||
|
||||
getPlatform() {
|
||||
return process.platform;
|
||||
}
|
||||
|
||||
async checkCommand(command) {
|
||||
try {
|
||||
const platform = this.getPlatform();
|
||||
const checkCmd = platform === 'win32' ? 'where' : 'which';
|
||||
const { stdout } = await execAsync(`${checkCmd} ${command}`);
|
||||
return stdout.trim();
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async isInstalled() {
|
||||
throw new Error('isInstalled() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async isServiceRunning() {
|
||||
throw new Error('isServiceRunning() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async startService() {
|
||||
throw new Error('startService() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async stopService() {
|
||||
throw new Error('stopService() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async waitForService(checkFn, maxAttempts = 30, delayMs = 1000) {
|
||||
for (let i = 0; i < maxAttempts; i++) {
|
||||
if (await checkFn()) {
|
||||
console.log(`[${this.serviceName}] Service is ready`);
|
||||
return true;
|
||||
}
|
||||
await new Promise(resolve => setTimeout(resolve, delayMs));
|
||||
}
|
||||
throw new Error(`${this.serviceName} service failed to start within timeout`);
|
||||
}
|
||||
|
||||
getInstallProgress(modelName) {
|
||||
return this.installationProgress.get(modelName) || 0;
|
||||
}
|
||||
|
||||
setInstallProgress(modelName, progress) {
|
||||
this.installationProgress.set(modelName, progress);
|
||||
this.emit('install-progress', { model: modelName, progress });
|
||||
}
|
||||
|
||||
clearInstallProgress(modelName) {
|
||||
this.installationProgress.delete(modelName);
|
||||
}
|
||||
|
||||
async autoInstall(onProgress) {
|
||||
const platform = this.getPlatform();
|
||||
console.log(`[${this.serviceName}] Starting auto-installation for ${platform}`);
|
||||
|
||||
try {
|
||||
switch(platform) {
|
||||
case 'darwin':
|
||||
return await this.installMacOS(onProgress);
|
||||
case 'win32':
|
||||
return await this.installWindows(onProgress);
|
||||
case 'linux':
|
||||
return await this.installLinux();
|
||||
default:
|
||||
throw new Error(`Unsupported platform: ${platform}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[${this.serviceName}] Auto-installation failed:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async installMacOS() {
|
||||
throw new Error('installMacOS() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async installWindows() {
|
||||
throw new Error('installWindows() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async installLinux() {
|
||||
throw new Error('installLinux() must be implemented by subclass');
|
||||
}
|
||||
|
||||
// parseProgress method removed - using proper REST API now
|
||||
|
||||
async shutdown(force = false) {
|
||||
console.log(`[${this.serviceName}] Starting ${force ? 'forced' : 'graceful'} shutdown...`);
|
||||
|
||||
const isRunning = await this.isServiceRunning();
|
||||
if (!isRunning) {
|
||||
console.log(`[${this.serviceName}] Service not running, nothing to shutdown`);
|
||||
return true;
|
||||
}
|
||||
|
||||
const platform = this.getPlatform();
|
||||
|
||||
try {
|
||||
switch(platform) {
|
||||
case 'darwin':
|
||||
return await this.shutdownMacOS(force);
|
||||
case 'win32':
|
||||
return await this.shutdownWindows(force);
|
||||
case 'linux':
|
||||
return await this.shutdownLinux(force);
|
||||
default:
|
||||
console.warn(`[${this.serviceName}] Unsupported platform for shutdown: ${platform}`);
|
||||
return false;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[${this.serviceName}] Error during shutdown:`, error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async shutdownMacOS(force) {
|
||||
throw new Error('shutdownMacOS() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async shutdownWindows(force) {
|
||||
throw new Error('shutdownWindows() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async shutdownLinux(force) {
|
||||
throw new Error('shutdownLinux() must be implemented by subclass');
|
||||
}
|
||||
|
||||
async downloadFile(url, destination, options = {}) {
|
||||
const {
|
||||
onProgress = null,
|
||||
headers = { 'User-Agent': 'Glass-App' },
|
||||
timeout = 300000 // 5 minutes default
|
||||
} = options;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const file = fs.createWriteStream(destination);
|
||||
let downloadedSize = 0;
|
||||
let totalSize = 0;
|
||||
|
||||
const request = https.get(url, { headers }, (response) => {
|
||||
// Handle redirects (301, 302, 307, 308)
|
||||
if ([301, 302, 307, 308].includes(response.statusCode)) {
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
|
||||
if (!response.headers.location) {
|
||||
reject(new Error('Redirect without location header'));
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`[${this.serviceName}] Following redirect from ${url} to ${response.headers.location}`);
|
||||
this.downloadFile(response.headers.location, destination, options)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
return;
|
||||
}
|
||||
|
||||
if (response.statusCode !== 200) {
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
reject(new Error(`Download failed: ${response.statusCode} ${response.statusMessage}`));
|
||||
return;
|
||||
}
|
||||
|
||||
totalSize = parseInt(response.headers['content-length'], 10) || 0;
|
||||
|
||||
response.on('data', (chunk) => {
|
||||
downloadedSize += chunk.length;
|
||||
|
||||
if (onProgress && totalSize > 0) {
|
||||
const progress = Math.round((downloadedSize / totalSize) * 100);
|
||||
onProgress(progress, downloadedSize, totalSize);
|
||||
}
|
||||
});
|
||||
|
||||
response.pipe(file);
|
||||
|
||||
file.on('finish', () => {
|
||||
file.close(() => {
|
||||
this.emit('download-complete', { url, destination, size: downloadedSize });
|
||||
resolve({ success: true, size: downloadedSize });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
request.on('timeout', () => {
|
||||
request.destroy();
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
reject(new Error('Download timeout'));
|
||||
});
|
||||
|
||||
request.on('error', (err) => {
|
||||
file.close();
|
||||
fs.unlink(destination, () => {});
|
||||
this.emit('download-error', { url, error: err });
|
||||
reject(err);
|
||||
});
|
||||
|
||||
request.setTimeout(timeout);
|
||||
|
||||
file.on('error', (err) => {
|
||||
fs.unlink(destination, () => {});
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async downloadWithRetry(url, destination, options = {}) {
|
||||
const { maxRetries = 3, retryDelay = 1000, expectedChecksum = null, ...downloadOptions } = options;
|
||||
|
||||
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||
try {
|
||||
const result = await this.downloadFile(url, destination, downloadOptions);
|
||||
|
||||
if (expectedChecksum) {
|
||||
const isValid = await this.verifyChecksum(destination, expectedChecksum);
|
||||
if (!isValid) {
|
||||
fs.unlinkSync(destination);
|
||||
throw new Error('Checksum verification failed');
|
||||
}
|
||||
console.log(`[${this.serviceName}] Checksum verified successfully`);
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
if (attempt === maxRetries) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
console.log(`Download attempt ${attempt} failed, retrying in ${retryDelay}ms...`);
|
||||
await new Promise(resolve => setTimeout(resolve, retryDelay * attempt));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async verifyChecksum(filePath, expectedChecksum) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
const stream = fs.createReadStream(filePath);
|
||||
|
||||
stream.on('data', (data) => hash.update(data));
|
||||
stream.on('end', () => {
|
||||
const fileChecksum = hash.digest('hex');
|
||||
console.log(`[${this.serviceName}] File checksum: ${fileChecksum}`);
|
||||
console.log(`[${this.serviceName}] Expected checksum: ${expectedChecksum}`);
|
||||
resolve(fileChecksum === expectedChecksum);
|
||||
});
|
||||
stream.on('error', reject);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LocalAIServiceBase;
|
133
src/common/services/localProgressTracker.js
Normal file
133
src/common/services/localProgressTracker.js
Normal file
@ -0,0 +1,133 @@
|
||||
export class LocalProgressTracker {
|
||||
constructor(serviceName) {
|
||||
this.serviceName = serviceName;
|
||||
this.activeOperations = new Map(); // operationId -> { controller, onProgress }
|
||||
this.ipcRenderer = window.require?.('electron')?.ipcRenderer;
|
||||
|
||||
if (!this.ipcRenderer) {
|
||||
throw new Error(`${serviceName} requires Electron environment`);
|
||||
}
|
||||
|
||||
this.globalProgressHandler = (event, data) => {
|
||||
const operation = this.activeOperations.get(data.model || data.modelId);
|
||||
if (operation && !operation.controller.signal.aborted) {
|
||||
operation.onProgress(data.progress);
|
||||
}
|
||||
};
|
||||
|
||||
const progressEvents = {
|
||||
'ollama': 'ollama:pull-progress',
|
||||
'whisper': 'whisper:download-progress'
|
||||
};
|
||||
|
||||
const eventName = progressEvents[serviceName.toLowerCase()] || `${serviceName}:progress`;
|
||||
this.progressEvent = eventName;
|
||||
this.ipcRenderer.on(eventName, this.globalProgressHandler);
|
||||
}
|
||||
|
||||
async trackOperation(operationId, operationType, onProgress) {
|
||||
if (this.activeOperations.has(operationId)) {
|
||||
throw new Error(`${operationType} ${operationId} is already in progress`);
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
const operation = { controller, onProgress };
|
||||
this.activeOperations.set(operationId, operation);
|
||||
|
||||
try {
|
||||
const ipcChannels = {
|
||||
'ollama': { install: 'ollama:pull-model' },
|
||||
'whisper': { download: 'whisper:download-model' }
|
||||
};
|
||||
|
||||
const channel = ipcChannels[this.serviceName.toLowerCase()]?.[operationType] ||
|
||||
`${this.serviceName}:${operationType}`;
|
||||
|
||||
const result = await this.ipcRenderer.invoke(channel, operationId);
|
||||
|
||||
if (!result.success) {
|
||||
throw new Error(result.error || `${operationType} failed`);
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (!controller.signal.aborted) {
|
||||
throw error;
|
||||
}
|
||||
return false;
|
||||
} finally {
|
||||
this.activeOperations.delete(operationId);
|
||||
}
|
||||
}
|
||||
|
||||
async installModel(modelName, onProgress) {
|
||||
return this.trackOperation(modelName, 'install', onProgress);
|
||||
}
|
||||
|
||||
async downloadModel(modelId, onProgress) {
|
||||
return this.trackOperation(modelId, 'download', onProgress);
|
||||
}
|
||||
|
||||
cancelOperation(operationId) {
|
||||
const operation = this.activeOperations.get(operationId);
|
||||
if (operation) {
|
||||
operation.controller.abort();
|
||||
this.activeOperations.delete(operationId);
|
||||
}
|
||||
}
|
||||
|
||||
cancelAllOperations() {
|
||||
for (const [operationId, operation] of this.activeOperations) {
|
||||
operation.controller.abort();
|
||||
}
|
||||
this.activeOperations.clear();
|
||||
}
|
||||
|
||||
isOperationActive(operationId) {
|
||||
return this.activeOperations.has(operationId);
|
||||
}
|
||||
|
||||
getActiveOperations() {
|
||||
return Array.from(this.activeOperations.keys());
|
||||
}
|
||||
|
||||
destroy() {
|
||||
this.cancelAllOperations();
|
||||
if (this.ipcRenderer) {
|
||||
this.ipcRenderer.removeListener(this.progressEvent, this.globalProgressHandler);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let trackers = new Map();
|
||||
|
||||
export function getLocalProgressTracker(serviceName) {
|
||||
if (!trackers.has(serviceName)) {
|
||||
trackers.set(serviceName, new LocalProgressTracker(serviceName));
|
||||
}
|
||||
return trackers.get(serviceName);
|
||||
}
|
||||
|
||||
export function destroyLocalProgressTracker(serviceName) {
|
||||
const tracker = trackers.get(serviceName);
|
||||
if (tracker) {
|
||||
tracker.destroy();
|
||||
trackers.delete(serviceName);
|
||||
}
|
||||
}
|
||||
|
||||
export function destroyAllProgressTrackers() {
|
||||
for (const [name, tracker] of trackers) {
|
||||
tracker.destroy();
|
||||
}
|
||||
trackers.clear();
|
||||
}
|
||||
|
||||
// Legacy compatibility exports
|
||||
export function getOllamaProgressTracker() {
|
||||
return getLocalProgressTracker('ollama');
|
||||
}
|
||||
|
||||
export function destroyOllamaProgressTracker() {
|
||||
destroyLocalProgressTracker('ollama');
|
||||
}
|
@ -2,6 +2,7 @@ const Store = require('electron-store');
|
||||
const fetch = require('node-fetch');
|
||||
const { ipcMain, webContents } = require('electron');
|
||||
const { PROVIDERS } = require('../ai/factory');
|
||||
const cryptoService = require('./cryptoService');
|
||||
|
||||
class ModelStateService {
|
||||
constructor(authService) {
|
||||
@ -23,7 +24,7 @@ class ModelStateService {
|
||||
const llmProvider = this.getProviderForModel('llm', llmModel) || 'None';
|
||||
const sttProvider = this.getProviderForModel('stt', sttModel) || 'None';
|
||||
|
||||
console.log(`[ModelStateService] 🌟 Current Selection -> LLM: ${llmModel || 'None'} (Provider: ${llmProvider}), STT: ${sttModel || 'None'} (Provider: ${sttProvider})`);
|
||||
console.log(`[ModelStateService] Current Selection -> LLM: ${llmModel || 'None'} (Provider: ${llmProvider}), STT: ${sttModel || 'None'} (Provider: ${sttProvider})`);
|
||||
}
|
||||
|
||||
_autoSelectAvailableModels() {
|
||||
@ -36,7 +37,9 @@ class ModelStateService {
|
||||
|
||||
if (currentModelId) {
|
||||
const provider = this.getProviderForModel(type, currentModelId);
|
||||
if (provider && this.getApiKey(provider)) {
|
||||
const apiKey = this.getApiKey(provider);
|
||||
// For Ollama, 'local' is a valid API key
|
||||
if (provider && (apiKey || (provider === 'ollama' && apiKey === 'local'))) {
|
||||
isCurrentModelValid = true;
|
||||
}
|
||||
}
|
||||
@ -45,8 +48,15 @@ class ModelStateService {
|
||||
console.log(`[ModelStateService] No valid ${type.toUpperCase()} model selected. Finding an alternative...`);
|
||||
const availableModels = this.getAvailableModels(type);
|
||||
if (availableModels.length > 0) {
|
||||
this.state.selectedModels[type] = availableModels[0].id;
|
||||
console.log(`[ModelStateService] Auto-selected ${type.toUpperCase()} model: ${availableModels[0].id}`);
|
||||
// Prefer API providers over local providers for auto-selection
|
||||
const apiModel = availableModels.find(model => {
|
||||
const provider = this.getProviderForModel(type, model.id);
|
||||
return provider && provider !== 'ollama' && provider !== 'whisper';
|
||||
});
|
||||
|
||||
const selectedModel = apiModel || availableModels[0];
|
||||
this.state.selectedModels[type] = selectedModel.id;
|
||||
console.log(`[ModelStateService] Auto-selected ${type.toUpperCase()} model: ${selectedModel.id} (preferred: ${apiModel ? 'API' : 'local'})`);
|
||||
} else {
|
||||
this.state.selectedModels[type] = null;
|
||||
}
|
||||
@ -67,11 +77,20 @@ class ModelStateService {
|
||||
};
|
||||
this.state = this.store.get(`users.${userId}`, defaultState);
|
||||
console.log(`[ModelStateService] State loaded for user: ${userId}`);
|
||||
|
||||
for (const p of Object.keys(PROVIDERS)) {
|
||||
if (!(p in this.state.apiKeys)) {
|
||||
this.state.apiKeys[p] = null;
|
||||
} else if (this.state.apiKeys[p] && p !== 'ollama' && p !== 'whisper') {
|
||||
try {
|
||||
this.state.apiKeys[p] = cryptoService.decrypt(this.state.apiKeys[p]);
|
||||
} catch (error) {
|
||||
console.error(`[ModelStateService] Failed to decrypt API key for ${p}, resetting`);
|
||||
this.state.apiKeys[p] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this._autoSelectAvailableModels();
|
||||
this._saveState();
|
||||
this._logCurrentSelection();
|
||||
@ -80,7 +99,23 @@ class ModelStateService {
|
||||
|
||||
_saveState() {
|
||||
const userId = this.authService.getCurrentUserId();
|
||||
this.store.set(`users.${userId}`, this.state);
|
||||
const stateToSave = {
|
||||
...this.state,
|
||||
apiKeys: { ...this.state.apiKeys }
|
||||
};
|
||||
|
||||
for (const [provider, key] of Object.entries(stateToSave.apiKeys)) {
|
||||
if (key && provider !== 'ollama' && provider !== 'whisper') {
|
||||
try {
|
||||
stateToSave.apiKeys[provider] = cryptoService.encrypt(key);
|
||||
} catch (error) {
|
||||
console.error(`[ModelStateService] Failed to encrypt API key for ${provider}`);
|
||||
stateToSave.apiKeys[provider] = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.store.set(`users.${userId}`, stateToSave);
|
||||
console.log(`[ModelStateService] State saved for user: ${userId}`);
|
||||
this._logCurrentSelection();
|
||||
}
|
||||
@ -94,6 +129,26 @@ class ModelStateService {
|
||||
const body = undefined;
|
||||
|
||||
switch (provider) {
|
||||
case 'ollama':
|
||||
// Ollama doesn't need API key validation
|
||||
// Just check if the service is running
|
||||
try {
|
||||
const response = await fetch('http://localhost:11434/api/tags');
|
||||
if (response.ok) {
|
||||
console.log(`[ModelStateService] Ollama service is accessible.`);
|
||||
this.setApiKey(provider, 'local'); // Use 'local' as a placeholder
|
||||
return { success: true };
|
||||
} else {
|
||||
return { success: false, error: 'Ollama service is not running. Please start Ollama first.' };
|
||||
}
|
||||
} catch (error) {
|
||||
return { success: false, error: 'Cannot connect to Ollama. Please ensure Ollama is installed and running.' };
|
||||
}
|
||||
case 'whisper':
|
||||
// Whisper is a local service, no API key validation needed
|
||||
console.log(`[ModelStateService] Whisper is a local service.`);
|
||||
this.setApiKey(provider, 'local'); // Use 'local' as a placeholder
|
||||
return { success: true };
|
||||
case 'openai':
|
||||
validationUrl = 'https://api.openai.com/v1/models';
|
||||
headers = { 'Authorization': `Bearer ${key}` };
|
||||
@ -176,11 +231,19 @@ class ModelStateService {
|
||||
const llmModels = PROVIDERS[provider]?.llmModels;
|
||||
const sttModels = PROVIDERS[provider]?.sttModels;
|
||||
|
||||
if (!this.state.selectedModels.llm && llmModels?.length > 0) {
|
||||
// Prioritize newly set API key provider over existing selections
|
||||
// Only for non-local providers or if no model is currently selected
|
||||
if (llmModels?.length > 0) {
|
||||
if (!this.state.selectedModels.llm || provider !== 'ollama') {
|
||||
this.state.selectedModels.llm = llmModels[0].id;
|
||||
console.log(`[ModelStateService] Selected LLM model from newly configured provider ${provider}: ${llmModels[0].id}`);
|
||||
}
|
||||
if (!this.state.selectedModels.stt && sttModels?.length > 0) {
|
||||
}
|
||||
if (sttModels?.length > 0) {
|
||||
if (!this.state.selectedModels.stt || provider !== 'whisper') {
|
||||
this.state.selectedModels.stt = sttModels[0].id;
|
||||
console.log(`[ModelStateService] Selected STT model from newly configured provider ${provider}: ${sttModels[0].id}`);
|
||||
}
|
||||
}
|
||||
this._saveState();
|
||||
this._logCurrentSelection();
|
||||
@ -223,6 +286,14 @@ class ModelStateService {
|
||||
return providerId;
|
||||
}
|
||||
}
|
||||
|
||||
// If no provider was found, assume it could be a custom Ollama model
|
||||
// if Ollama provider is configured (has a key).
|
||||
if (type === 'llm' && this.state.apiKeys['ollama']) {
|
||||
console.log(`[ModelStateService] Model '${modelId}' not found in PROVIDERS list, assuming it's a custom Ollama model.`);
|
||||
return 'ollama';
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -239,10 +310,33 @@ class ModelStateService {
|
||||
if (this.isLoggedInWithFirebase()) return true;
|
||||
|
||||
// LLM과 STT 모델을 제공하는 Provider 중 하나라도 API 키가 설정되었는지 확인
|
||||
const hasLlmKey = Object.entries(this.state.apiKeys).some(([provider, key]) => key && PROVIDERS[provider]?.llmModels.length > 0);
|
||||
const hasSttKey = Object.entries(this.state.apiKeys).some(([provider, key]) => key && PROVIDERS[provider]?.sttModels.length > 0);
|
||||
const hasLlmKey = Object.entries(this.state.apiKeys).some(([provider, key]) => {
|
||||
if (provider === 'ollama') {
|
||||
// Ollama uses dynamic models, so just check if configured (has 'local' key)
|
||||
return key === 'local';
|
||||
}
|
||||
if (provider === 'whisper') {
|
||||
// Whisper doesn't support LLM
|
||||
return false;
|
||||
}
|
||||
return key && PROVIDERS[provider]?.llmModels.length > 0;
|
||||
});
|
||||
|
||||
return hasLlmKey && hasSttKey;
|
||||
const hasSttKey = Object.entries(this.state.apiKeys).some(([provider, key]) => {
|
||||
if (provider === 'whisper') {
|
||||
// Whisper has static model list and supports STT
|
||||
return key === 'local' && PROVIDERS[provider]?.sttModels.length > 0;
|
||||
}
|
||||
if (provider === 'ollama') {
|
||||
// Ollama doesn't support STT yet
|
||||
return false;
|
||||
}
|
||||
return key && PROVIDERS[provider]?.sttModels.length > 0;
|
||||
});
|
||||
|
||||
const result = hasLlmKey && hasSttKey;
|
||||
console.log(`[ModelStateService] areProvidersConfigured: LLM=${hasLlmKey}, STT=${hasSttKey}, result=${result}`);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
@ -265,13 +359,58 @@ class ModelStateService {
|
||||
setSelectedModel(type, modelId) {
|
||||
const provider = this.getProviderForModel(type, modelId);
|
||||
if (provider && this.state.apiKeys[provider]) {
|
||||
const previousModel = this.state.selectedModels[type];
|
||||
this.state.selectedModels[type] = modelId;
|
||||
this._saveState();
|
||||
|
||||
// Auto warm-up for Ollama LLM models when changed
|
||||
if (type === 'llm' && provider === 'ollama' && modelId !== previousModel) {
|
||||
this._autoWarmUpOllamaModel(modelId, previousModel);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Auto warm-up Ollama model when LLM selection changes
|
||||
* @private
|
||||
* @param {string} newModelId - The newly selected model
|
||||
* @param {string} previousModelId - The previously selected model
|
||||
*/
|
||||
async _autoWarmUpOllamaModel(newModelId, previousModelId) {
|
||||
try {
|
||||
console.log(`[ModelStateService] 🔥 LLM model changed: ${previousModelId || 'None'} → ${newModelId}, triggering warm-up`);
|
||||
|
||||
// Get Ollama service if available
|
||||
const ollamaService = require('./ollamaService');
|
||||
if (!ollamaService) {
|
||||
console.log('[ModelStateService] OllamaService not available for auto warm-up');
|
||||
return;
|
||||
}
|
||||
|
||||
// Delay warm-up slightly to allow UI to update first
|
||||
setTimeout(async () => {
|
||||
try {
|
||||
console.log(`[ModelStateService] Starting background warm-up for: ${newModelId}`);
|
||||
const success = await ollamaService.warmUpModel(newModelId);
|
||||
|
||||
if (success) {
|
||||
console.log(`[ModelStateService] ✅ Successfully warmed up model: ${newModelId}`);
|
||||
} else {
|
||||
console.log(`[ModelStateService] ⚠️ Failed to warm up model: ${newModelId}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(`[ModelStateService] 🚫 Error during auto warm-up for ${newModelId}:`, error.message);
|
||||
}
|
||||
}, 500); // 500ms delay
|
||||
|
||||
} catch (error) {
|
||||
console.error('[ModelStateService] Error in auto warm-up setup:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {('llm' | 'stt')} type
|
||||
|
809
src/common/services/ollamaService.js
Normal file
809
src/common/services/ollamaService.js
Normal file
@ -0,0 +1,809 @@
|
||||
const { spawn } = require('child_process');
|
||||
const { promisify } = require('util');
|
||||
const fetch = require('node-fetch');
|
||||
const path = require('path');
|
||||
const fs = require('fs').promises;
|
||||
const { app } = require('electron');
|
||||
const LocalAIServiceBase = require('./localAIServiceBase');
|
||||
const { spawnAsync } = require('../utils/spawnHelper');
|
||||
const { DOWNLOAD_CHECKSUMS } = require('../config/checksums');
|
||||
|
||||
class OllamaService extends LocalAIServiceBase {
|
||||
constructor() {
|
||||
super('OllamaService');
|
||||
this.baseUrl = 'http://localhost:11434';
|
||||
this.warmingModels = new Map();
|
||||
this.warmedModels = new Set();
|
||||
this.lastWarmUpAttempt = new Map();
|
||||
|
||||
// Request management system
|
||||
this.activeRequests = new Map();
|
||||
this.requestTimeouts = new Map();
|
||||
this.healthStatus = {
|
||||
lastHealthCheck: 0,
|
||||
consecutive_failures: 0,
|
||||
is_circuit_open: false
|
||||
};
|
||||
|
||||
// Configuration
|
||||
this.requestTimeout = 8000; // 8s for health checks
|
||||
this.warmupTimeout = 15000; // 15s for model warmup
|
||||
this.healthCheckInterval = 60000; // 1min between health checks
|
||||
this.circuitBreakerThreshold = 3;
|
||||
this.circuitBreakerCooldown = 30000; // 30s
|
||||
|
||||
// Supported models are determined dynamically from installed models
|
||||
this.supportedModels = {};
|
||||
|
||||
// Start health monitoring
|
||||
this._startHealthMonitoring();
|
||||
}
|
||||
|
||||
getOllamaCliPath() {
|
||||
if (this.getPlatform() === 'darwin') {
|
||||
return '/Applications/Ollama.app/Contents/Resources/ollama';
|
||||
}
|
||||
return 'ollama';
|
||||
}
|
||||
|
||||
/**
|
||||
* Professional request management with AbortController-based cancellation
|
||||
*/
|
||||
async _makeRequest(url, options = {}, operationType = 'default') {
|
||||
const requestId = `${operationType}_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
|
||||
// Circuit breaker check
|
||||
if (this._isCircuitOpen()) {
|
||||
throw new Error('Service temporarily unavailable (circuit breaker open)');
|
||||
}
|
||||
|
||||
// Request deduplication for health checks
|
||||
if (operationType === 'health' && this.activeRequests.has('health')) {
|
||||
console.log('[OllamaService] Health check already in progress, returning existing promise');
|
||||
return this.activeRequests.get('health');
|
||||
}
|
||||
|
||||
const controller = new AbortController();
|
||||
const timeout = options.timeout || this.requestTimeout;
|
||||
|
||||
// Set up timeout mechanism
|
||||
const timeoutId = setTimeout(() => {
|
||||
controller.abort();
|
||||
this.activeRequests.delete(requestId);
|
||||
this._recordFailure();
|
||||
}, timeout);
|
||||
|
||||
this.requestTimeouts.set(requestId, timeoutId);
|
||||
|
||||
const requestPromise = this._executeRequest(url, {
|
||||
...options,
|
||||
signal: controller.signal
|
||||
}, requestId);
|
||||
|
||||
// Store active request for deduplication and cleanup
|
||||
this.activeRequests.set(operationType === 'health' ? 'health' : requestId, requestPromise);
|
||||
|
||||
try {
|
||||
const result = await requestPromise;
|
||||
this._recordSuccess();
|
||||
return result;
|
||||
} catch (error) {
|
||||
this._recordFailure();
|
||||
if (error.name === 'AbortError') {
|
||||
throw new Error(`Request timeout after ${timeout}ms`);
|
||||
}
|
||||
throw error;
|
||||
} finally {
|
||||
clearTimeout(timeoutId);
|
||||
this.requestTimeouts.delete(requestId);
|
||||
this.activeRequests.delete(operationType === 'health' ? 'health' : requestId);
|
||||
}
|
||||
}
|
||||
|
||||
async _executeRequest(url, options, requestId) {
|
||||
try {
|
||||
console.log(`[OllamaService] Executing request ${requestId} to ${url}`);
|
||||
const response = await fetch(url, options);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
console.error(`[OllamaService] Request ${requestId} failed:`, error.message);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
_isCircuitOpen() {
|
||||
if (!this.healthStatus.is_circuit_open) return false;
|
||||
|
||||
// Check if cooldown period has passed
|
||||
const now = Date.now();
|
||||
if (now - this.healthStatus.lastHealthCheck > this.circuitBreakerCooldown) {
|
||||
console.log('[OllamaService] Circuit breaker cooldown expired, attempting recovery');
|
||||
this.healthStatus.is_circuit_open = false;
|
||||
this.healthStatus.consecutive_failures = 0;
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
_recordSuccess() {
|
||||
this.healthStatus.consecutive_failures = 0;
|
||||
this.healthStatus.is_circuit_open = false;
|
||||
this.healthStatus.lastHealthCheck = Date.now();
|
||||
}
|
||||
|
||||
_recordFailure() {
|
||||
this.healthStatus.consecutive_failures++;
|
||||
this.healthStatus.lastHealthCheck = Date.now();
|
||||
|
||||
if (this.healthStatus.consecutive_failures >= this.circuitBreakerThreshold) {
|
||||
console.warn(`[OllamaService] Circuit breaker opened after ${this.healthStatus.consecutive_failures} failures`);
|
||||
this.healthStatus.is_circuit_open = true;
|
||||
}
|
||||
}
|
||||
|
||||
_startHealthMonitoring() {
|
||||
// Passive health monitoring - only when requests are made
|
||||
console.log('[OllamaService] Health monitoring system initialized');
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup all active requests and resources
|
||||
*/
|
||||
_cleanup() {
|
||||
console.log(`[OllamaService] Cleaning up ${this.activeRequests.size} active requests`);
|
||||
|
||||
// Cancel all active requests
|
||||
for (const [requestId, promise] of this.activeRequests) {
|
||||
if (this.requestTimeouts.has(requestId)) {
|
||||
clearTimeout(this.requestTimeouts.get(requestId));
|
||||
this.requestTimeouts.delete(requestId);
|
||||
}
|
||||
}
|
||||
|
||||
this.activeRequests.clear();
|
||||
this.requestTimeouts.clear();
|
||||
}
|
||||
|
||||
async isInstalled() {
|
||||
try {
|
||||
const platform = this.getPlatform();
|
||||
|
||||
if (platform === 'darwin') {
|
||||
try {
|
||||
await fs.access('/Applications/Ollama.app');
|
||||
return true;
|
||||
} catch {
|
||||
const ollamaPath = await this.checkCommand(this.getOllamaCliPath());
|
||||
return !!ollamaPath;
|
||||
}
|
||||
} else {
|
||||
const ollamaPath = await this.checkCommand(this.getOllamaCliPath());
|
||||
return !!ollamaPath;
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('[OllamaService] Ollama not found:', error.message);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async isServiceRunning() {
|
||||
try {
|
||||
const response = await this._makeRequest(`${this.baseUrl}/api/tags`, {
|
||||
method: 'GET',
|
||||
timeout: this.requestTimeout
|
||||
}, 'health');
|
||||
|
||||
return response.ok;
|
||||
} catch (error) {
|
||||
console.log(`[OllamaService] Service health check failed: ${error.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async startService() {
|
||||
const platform = this.getPlatform();
|
||||
|
||||
try {
|
||||
if (platform === 'darwin') {
|
||||
try {
|
||||
await spawnAsync('open', ['-a', 'Ollama']);
|
||||
await this.waitForService(() => this.isServiceRunning());
|
||||
return true;
|
||||
} catch {
|
||||
spawn(this.getOllamaCliPath(), ['serve'], {
|
||||
detached: true,
|
||||
stdio: 'ignore'
|
||||
}).unref();
|
||||
await this.waitForService(() => this.isServiceRunning());
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
spawn(this.getOllamaCliPath(), ['serve'], {
|
||||
detached: true,
|
||||
stdio: 'ignore',
|
||||
shell: platform === 'win32'
|
||||
}).unref();
|
||||
await this.waitForService(() => this.isServiceRunning());
|
||||
return true;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[OllamaService] Failed to start service:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async stopService() {
|
||||
return await this.shutdown();
|
||||
}
|
||||
|
||||
async getInstalledModels() {
|
||||
try {
|
||||
const response = await this._makeRequest(`${this.baseUrl}/api/tags`, {
|
||||
method: 'GET',
|
||||
timeout: this.requestTimeout
|
||||
}, 'models');
|
||||
|
||||
const data = await response.json();
|
||||
return data.models || [];
|
||||
} catch (error) {
|
||||
console.error('[OllamaService] Failed to get installed models:', error.message);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async getInstalledModelsList() {
|
||||
try {
|
||||
const { stdout } = await spawnAsync(this.getOllamaCliPath(), ['list']);
|
||||
const lines = stdout.split('\n').filter(line => line.trim());
|
||||
|
||||
// Skip header line (NAME, ID, SIZE, MODIFIED)
|
||||
const modelLines = lines.slice(1);
|
||||
|
||||
const models = [];
|
||||
for (const line of modelLines) {
|
||||
if (!line.trim()) continue;
|
||||
|
||||
// Parse line: "model:tag model_id size modified_time"
|
||||
const parts = line.split(/\s+/);
|
||||
if (parts.length >= 3) {
|
||||
models.push({
|
||||
name: parts[0],
|
||||
id: parts[1],
|
||||
size: parts[2] + (parts[3] === 'GB' || parts[3] === 'MB' ? ' ' + parts[3] : ''),
|
||||
status: 'installed'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return models;
|
||||
} catch (error) {
|
||||
console.log('[OllamaService] Failed to get installed models via CLI, falling back to API');
|
||||
// Fallback to API if CLI fails
|
||||
const apiModels = await this.getInstalledModels();
|
||||
return apiModels.map(model => ({
|
||||
name: model.name,
|
||||
id: model.digest || 'unknown',
|
||||
size: model.size || 'Unknown',
|
||||
status: 'installed'
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
async getModelSuggestions() {
|
||||
try {
|
||||
// Get actually installed models
|
||||
const installedModels = await this.getInstalledModelsList();
|
||||
|
||||
// Get user input history from storage (we'll implement this in the frontend)
|
||||
// For now, just return installed models
|
||||
return installedModels;
|
||||
} catch (error) {
|
||||
console.error('[OllamaService] Failed to get model suggestions:', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async isModelInstalled(modelName) {
|
||||
const models = await this.getInstalledModels();
|
||||
return models.some(model => model.name === modelName);
|
||||
}
|
||||
|
||||
async pullModel(modelName) {
|
||||
if (!modelName?.trim()) {
|
||||
throw new Error(`Invalid model name: ${modelName}`);
|
||||
}
|
||||
|
||||
console.log(`[OllamaService] Starting to pull model: ${modelName} via API`);
|
||||
|
||||
try {
|
||||
const response = await fetch(`${this.baseUrl}/api/pull`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model: modelName,
|
||||
stream: true
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Pull API failed: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
// Handle Node.js streaming response
|
||||
return new Promise((resolve, reject) => {
|
||||
let buffer = '';
|
||||
|
||||
response.body.on('data', (chunk) => {
|
||||
buffer += chunk.toString();
|
||||
const lines = buffer.split('\n');
|
||||
|
||||
// Keep incomplete line in buffer
|
||||
buffer = lines.pop() || '';
|
||||
|
||||
// Process complete lines
|
||||
for (const line of lines) {
|
||||
if (!line.trim()) continue;
|
||||
|
||||
try {
|
||||
const data = JSON.parse(line);
|
||||
const progress = this._parseOllamaPullProgress(data, modelName);
|
||||
|
||||
if (progress !== null) {
|
||||
this.setInstallProgress(modelName, progress);
|
||||
this.emit('pull-progress', {
|
||||
model: modelName,
|
||||
progress,
|
||||
status: data.status || 'downloading'
|
||||
});
|
||||
console.log(`[OllamaService] API Progress: ${progress}% for ${modelName} (${data.status || 'downloading'})`);
|
||||
}
|
||||
|
||||
// Handle completion
|
||||
if (data.status === 'success') {
|
||||
console.log(`[OllamaService] Successfully pulled model: ${modelName}`);
|
||||
this.emit('pull-complete', { model: modelName });
|
||||
this.clearInstallProgress(modelName);
|
||||
resolve();
|
||||
return;
|
||||
}
|
||||
} catch (parseError) {
|
||||
console.warn('[OllamaService] Failed to parse response line:', line);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
response.body.on('end', () => {
|
||||
// Process any remaining data in buffer
|
||||
if (buffer.trim()) {
|
||||
try {
|
||||
const data = JSON.parse(buffer);
|
||||
if (data.status === 'success') {
|
||||
console.log(`[OllamaService] Successfully pulled model: ${modelName}`);
|
||||
this.emit('pull-complete', { model: modelName });
|
||||
}
|
||||
} catch (parseError) {
|
||||
console.warn('[OllamaService] Failed to parse final buffer:', buffer);
|
||||
}
|
||||
}
|
||||
this.clearInstallProgress(modelName);
|
||||
resolve();
|
||||
});
|
||||
|
||||
response.body.on('error', (error) => {
|
||||
console.error(`[OllamaService] Stream error for ${modelName}:`, error);
|
||||
this.clearInstallProgress(modelName);
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
this.clearInstallProgress(modelName);
|
||||
console.error(`[OllamaService] Pull model failed:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
_parseOllamaPullProgress(data, modelName) {
|
||||
// Handle Ollama API response format
|
||||
if (data.status === 'success') {
|
||||
return 100;
|
||||
}
|
||||
|
||||
// Handle downloading progress
|
||||
if (data.total && data.completed !== undefined) {
|
||||
const progress = Math.round((data.completed / data.total) * 100);
|
||||
return Math.min(progress, 99); // Don't show 100% until success
|
||||
}
|
||||
|
||||
// Handle status-based progress
|
||||
const statusProgress = {
|
||||
'pulling manifest': 5,
|
||||
'downloading': 10,
|
||||
'verifying sha256 digest': 90,
|
||||
'writing manifest': 95,
|
||||
'removing any unused layers': 98
|
||||
};
|
||||
|
||||
if (data.status && statusProgress[data.status] !== undefined) {
|
||||
return statusProgress[data.status];
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
|
||||
async installMacOS(onProgress) {
|
||||
console.log('[OllamaService] Installing Ollama on macOS using DMG...');
|
||||
|
||||
try {
|
||||
const dmgUrl = 'https://ollama.com/download/Ollama.dmg';
|
||||
const tempDir = app.getPath('temp');
|
||||
const dmgPath = path.join(tempDir, 'Ollama.dmg');
|
||||
const mountPoint = path.join(tempDir, 'OllamaMount');
|
||||
|
||||
console.log('[OllamaService] Step 1: Downloading Ollama DMG...');
|
||||
onProgress?.({ stage: 'downloading', message: 'Downloading Ollama installer...', progress: 0 });
|
||||
const checksumInfo = DOWNLOAD_CHECKSUMS.ollama.dmg;
|
||||
await this.downloadWithRetry(dmgUrl, dmgPath, {
|
||||
expectedChecksum: checksumInfo?.sha256,
|
||||
onProgress: (progress) => {
|
||||
onProgress?.({ stage: 'downloading', message: `Downloading... ${progress}%`, progress });
|
||||
}
|
||||
});
|
||||
|
||||
console.log('[OllamaService] Step 2: Mounting DMG...');
|
||||
onProgress?.({ stage: 'mounting', message: 'Mounting disk image...', progress: 0 });
|
||||
await fs.mkdir(mountPoint, { recursive: true });
|
||||
await spawnAsync('hdiutil', ['attach', dmgPath, '-mountpoint', mountPoint]);
|
||||
onProgress?.({ stage: 'mounting', message: 'Disk image mounted.', progress: 100 });
|
||||
|
||||
console.log('[OllamaService] Step 3: Installing Ollama.app...');
|
||||
onProgress?.({ stage: 'installing', message: 'Installing Ollama application...', progress: 0 });
|
||||
await spawnAsync('cp', ['-R', `${mountPoint}/Ollama.app`, '/Applications/']);
|
||||
onProgress?.({ stage: 'installing', message: 'Application installed.', progress: 100 });
|
||||
|
||||
console.log('[OllamaService] Step 4: Setting up CLI path...');
|
||||
onProgress?.({ stage: 'linking', message: 'Creating command-line shortcut...', progress: 0 });
|
||||
try {
|
||||
const script = `do shell script "mkdir -p /usr/local/bin && ln -sf '${this.getOllamaCliPath()}' '/usr/local/bin/ollama'" with administrator privileges`;
|
||||
await spawnAsync('osascript', ['-e', script]);
|
||||
onProgress?.({ stage: 'linking', message: 'Shortcut created.', progress: 100 });
|
||||
} catch (linkError) {
|
||||
console.error('[OllamaService] CLI symlink creation failed:', linkError.message);
|
||||
onProgress?.({ stage: 'linking', message: 'Shortcut creation failed (permissions?).', progress: 100 });
|
||||
// Not throwing an error, as the app might still work
|
||||
}
|
||||
|
||||
console.log('[OllamaService] Step 5: Cleanup...');
|
||||
onProgress?.({ stage: 'cleanup', message: 'Cleaning up installation files...', progress: 0 });
|
||||
await spawnAsync('hdiutil', ['detach', mountPoint]);
|
||||
await fs.unlink(dmgPath).catch(() => {});
|
||||
await fs.rmdir(mountPoint).catch(() => {});
|
||||
onProgress?.({ stage: 'cleanup', message: 'Cleanup complete.', progress: 100 });
|
||||
|
||||
console.log('[OllamaService] Ollama installed successfully on macOS');
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 2000));
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('[OllamaService] macOS installation failed:', error);
|
||||
throw new Error(`Failed to install Ollama on macOS: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
async installWindows(onProgress) {
|
||||
console.log('[OllamaService] Installing Ollama on Windows...');
|
||||
|
||||
try {
|
||||
const exeUrl = 'https://ollama.com/download/OllamaSetup.exe';
|
||||
const tempDir = app.getPath('temp');
|
||||
const exePath = path.join(tempDir, 'OllamaSetup.exe');
|
||||
|
||||
console.log('[OllamaService] Step 1: Downloading Ollama installer...');
|
||||
onProgress?.({ stage: 'downloading', message: 'Downloading Ollama installer...', progress: 0 });
|
||||
const checksumInfo = DOWNLOAD_CHECKSUMS.ollama.exe;
|
||||
await this.downloadWithRetry(exeUrl, exePath, {
|
||||
expectedChecksum: checksumInfo?.sha256,
|
||||
onProgress: (progress) => {
|
||||
onProgress?.({ stage: 'downloading', message: `Downloading... ${progress}%`, progress });
|
||||
}
|
||||
});
|
||||
|
||||
console.log('[OllamaService] Step 2: Running silent installation...');
|
||||
onProgress?.({ stage: 'installing', message: 'Installing Ollama...', progress: 0 });
|
||||
await spawnAsync(exePath, ['/VERYSILENT', '/NORESTART']);
|
||||
onProgress?.({ stage: 'installing', message: 'Installation complete.', progress: 100 });
|
||||
|
||||
console.log('[OllamaService] Step 3: Cleanup...');
|
||||
onProgress?.({ stage: 'cleanup', message: 'Cleaning up installation files...', progress: 0 });
|
||||
await fs.unlink(exePath).catch(() => {});
|
||||
onProgress?.({ stage: 'cleanup', message: 'Cleanup complete.', progress: 100 });
|
||||
|
||||
console.log('[OllamaService] Ollama installed successfully on Windows');
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 3000));
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('[OllamaService] Windows installation failed:', error);
|
||||
throw new Error(`Failed to install Ollama on Windows: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
async installLinux() {
|
||||
console.log('[OllamaService] Installing Ollama on Linux...');
|
||||
console.log('[OllamaService] Automatic installation on Linux is not supported for security reasons.');
|
||||
console.log('[OllamaService] Please install Ollama manually:');
|
||||
console.log('[OllamaService] 1. Visit https://ollama.com/download/linux');
|
||||
console.log('[OllamaService] 2. Follow the official installation instructions');
|
||||
console.log('[OllamaService] 3. Or use your package manager if available');
|
||||
throw new Error('Manual installation required on Linux. Please visit https://ollama.com/download/linux');
|
||||
}
|
||||
|
||||
|
||||
|
||||
async warmUpModel(modelName, forceRefresh = false) {
|
||||
if (!modelName?.trim()) {
|
||||
console.warn(`[OllamaService] Invalid model name for warm-up`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if already warmed (and not forcing refresh)
|
||||
if (!forceRefresh && this.warmedModels.has(modelName)) {
|
||||
console.log(`[OllamaService] Model ${modelName} already warmed up, skipping`);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if currently warming - return existing Promise
|
||||
if (this.warmingModels.has(modelName)) {
|
||||
console.log(`[OllamaService] Model ${modelName} is already warming up, joining existing operation`);
|
||||
return await this.warmingModels.get(modelName);
|
||||
}
|
||||
|
||||
// Check rate limiting (prevent too frequent attempts)
|
||||
const lastAttempt = this.lastWarmUpAttempt.get(modelName);
|
||||
const now = Date.now();
|
||||
if (lastAttempt && (now - lastAttempt) < 5000) { // 5 second cooldown
|
||||
console.log(`[OllamaService] Rate limiting warm-up for ${modelName}, try again in ${5 - Math.floor((now - lastAttempt) / 1000)}s`);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Create and store the warming Promise
|
||||
const warmingPromise = this._performWarmUp(modelName);
|
||||
this.warmingModels.set(modelName, warmingPromise);
|
||||
this.lastWarmUpAttempt.set(modelName, now);
|
||||
|
||||
try {
|
||||
const result = await warmingPromise;
|
||||
|
||||
if (result) {
|
||||
this.warmedModels.add(modelName);
|
||||
console.log(`[OllamaService] Model ${modelName} successfully warmed up`);
|
||||
}
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
// Always clean up the warming Promise
|
||||
this.warmingModels.delete(modelName);
|
||||
}
|
||||
}
|
||||
|
||||
async _performWarmUp(modelName) {
|
||||
console.log(`[OllamaService] Starting warm-up for model: ${modelName}`);
|
||||
|
||||
try {
|
||||
const response = await this._makeRequest(`${this.baseUrl}/api/chat`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
model: modelName,
|
||||
messages: [
|
||||
{ role: 'user', content: 'Hi' }
|
||||
],
|
||||
stream: false,
|
||||
options: {
|
||||
num_predict: 1, // Minimal response
|
||||
temperature: 0
|
||||
}
|
||||
}),
|
||||
timeout: this.warmupTimeout
|
||||
}, `warmup_${modelName}`);
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error(`[OllamaService] Failed to warm up model ${modelName}:`, error.message);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async autoWarmUpSelectedModel() {
|
||||
try {
|
||||
// Get selected model from ModelStateService
|
||||
const modelStateService = global.modelStateService;
|
||||
if (!modelStateService) {
|
||||
console.log('[OllamaService] ModelStateService not available for auto warm-up');
|
||||
return false;
|
||||
}
|
||||
|
||||
const selectedModels = modelStateService.getSelectedModels();
|
||||
const llmModelId = selectedModels.llm;
|
||||
|
||||
// Check if it's an Ollama model
|
||||
const provider = modelStateService.getProviderForModel('llm', llmModelId);
|
||||
if (provider !== 'ollama') {
|
||||
console.log('[OllamaService] Selected LLM is not Ollama, skipping warm-up');
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if Ollama service is running
|
||||
const isRunning = await this.isServiceRunning();
|
||||
if (!isRunning) {
|
||||
console.log('[OllamaService] Ollama service not running, clearing warm-up cache');
|
||||
this._clearWarmUpCache();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if model is installed
|
||||
const isInstalled = await this.isModelInstalled(llmModelId);
|
||||
if (!isInstalled) {
|
||||
console.log(`[OllamaService] Model ${llmModelId} not installed, skipping warm-up`);
|
||||
return false;
|
||||
}
|
||||
|
||||
console.log(`[OllamaService] Auto-warming up selected model: ${llmModelId}`);
|
||||
return await this.warmUpModel(llmModelId);
|
||||
|
||||
} catch (error) {
|
||||
console.error('[OllamaService] Auto warm-up failed:', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
_clearWarmUpCache() {
|
||||
this.warmedModels.clear();
|
||||
this.warmingModels.clear();
|
||||
this.lastWarmUpAttempt.clear();
|
||||
console.log('[OllamaService] Warm-up cache cleared');
|
||||
}
|
||||
|
||||
getWarmUpStatus() {
|
||||
return {
|
||||
warmedModels: Array.from(this.warmedModels),
|
||||
warmingModels: Array.from(this.warmingModels.keys()),
|
||||
lastAttempts: Object.fromEntries(this.lastWarmUpAttempt)
|
||||
};
|
||||
}
|
||||
|
||||
async shutdown(force = false) {
|
||||
console.log(`[OllamaService] Shutdown initiated (force: ${force})`);
|
||||
|
||||
if (!force && this.warmingModels.size > 0) {
|
||||
const warmingList = Array.from(this.warmingModels.keys());
|
||||
console.log(`[OllamaService] Waiting for ${warmingList.length} models to finish warming: ${warmingList.join(', ')}`);
|
||||
|
||||
const warmingPromises = Array.from(this.warmingModels.values());
|
||||
try {
|
||||
// Use Promise.allSettled instead of race with setTimeout
|
||||
const results = await Promise.allSettled(warmingPromises);
|
||||
const completed = results.filter(r => r.status === 'fulfilled').length;
|
||||
console.log(`[OllamaService] ${completed}/${results.length} warming operations completed`);
|
||||
} catch (error) {
|
||||
console.log('[OllamaService] Error waiting for warm-up completion, proceeding with shutdown');
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up all resources
|
||||
this._cleanup();
|
||||
this._clearWarmUpCache();
|
||||
|
||||
return super.shutdown(force);
|
||||
}
|
||||
|
||||
async shutdownMacOS(force) {
|
||||
try {
|
||||
// Try to quit Ollama.app gracefully
|
||||
await spawnAsync('osascript', ['-e', 'tell application "Ollama" to quit']);
|
||||
console.log('[OllamaService] Ollama.app quit successfully');
|
||||
|
||||
// Wait a moment for graceful shutdown
|
||||
await new Promise(resolve => setTimeout(resolve, 2000));
|
||||
|
||||
// Check if still running
|
||||
const stillRunning = await this.isServiceRunning();
|
||||
if (stillRunning) {
|
||||
console.log('[OllamaService] Ollama still running, forcing shutdown');
|
||||
// Force kill if necessary
|
||||
await spawnAsync('pkill', ['-f', this.getOllamaCliPath()]);
|
||||
}
|
||||
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.log('[OllamaService] Graceful quit failed, trying force kill');
|
||||
try {
|
||||
await spawnAsync('pkill', ['-f', this.getOllamaCliPath()]);
|
||||
return true;
|
||||
} catch (killError) {
|
||||
console.error('[OllamaService] Failed to force kill Ollama:', killError);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async shutdownWindows(force) {
|
||||
try {
|
||||
// Try to stop the service gracefully
|
||||
await spawnAsync('taskkill', ['/IM', 'ollama.exe', '/T']);
|
||||
console.log('[OllamaService] Ollama process terminated on Windows');
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.log('[OllamaService] Standard termination failed, trying force kill');
|
||||
try {
|
||||
await spawnAsync('taskkill', ['/IM', 'ollama.exe', '/F', '/T']);
|
||||
return true;
|
||||
} catch (killError) {
|
||||
console.error('[OllamaService] Failed to force kill Ollama on Windows:', killError);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async shutdownLinux(force) {
|
||||
try {
|
||||
await spawnAsync('pkill', ['-f', this.getOllamaCliPath()]);
|
||||
console.log('[OllamaService] Ollama process terminated on Linux');
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (force) {
|
||||
await spawnAsync('pkill', ['-9', '-f', this.getOllamaCliPath()]).catch(() => {});
|
||||
}
|
||||
console.error('[OllamaService] Failed to shutdown Ollama on Linux:', error);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async getAllModelsWithStatus() {
|
||||
// Get all installed models directly from Ollama
|
||||
const installedModels = await this.getInstalledModels();
|
||||
|
||||
const models = [];
|
||||
for (const model of installedModels) {
|
||||
models.push({
|
||||
name: model.name,
|
||||
displayName: model.name, // Use model name as display name
|
||||
size: model.size || 'Unknown',
|
||||
description: `Ollama model: ${model.name}`,
|
||||
installed: true,
|
||||
installing: this.installationProgress.has(model.name),
|
||||
progress: this.getInstallProgress(model.name)
|
||||
});
|
||||
}
|
||||
|
||||
// Also add any models currently being installed
|
||||
for (const [modelName, progress] of this.installationProgress) {
|
||||
if (!models.find(m => m.name === modelName)) {
|
||||
models.push({
|
||||
name: modelName,
|
||||
displayName: modelName,
|
||||
size: 'Unknown',
|
||||
description: `Ollama model: ${modelName}`,
|
||||
installed: false,
|
||||
installing: true,
|
||||
progress: progress
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return models;
|
||||
}
|
||||
}
|
||||
|
||||
// Export singleton instance
|
||||
const ollamaService = new OllamaService();
|
||||
module.exports = ollamaService;
|
352
src/common/services/whisperService.js
Normal file
352
src/common/services/whisperService.js
Normal file
@ -0,0 +1,352 @@
|
||||
const { spawn } = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const os = require('os');
|
||||
const LocalAIServiceBase = require('./localAIServiceBase');
|
||||
const { spawnAsync } = require('../utils/spawnHelper');
|
||||
const { DOWNLOAD_CHECKSUMS } = require('../config/checksums');
|
||||
|
||||
const fsPromises = fs.promises;
|
||||
|
||||
class WhisperService extends LocalAIServiceBase {
|
||||
constructor() {
|
||||
super('WhisperService');
|
||||
this.isInitialized = false;
|
||||
this.whisperPath = null;
|
||||
this.modelsDir = null;
|
||||
this.tempDir = null;
|
||||
this.availableModels = {
|
||||
'whisper-tiny': {
|
||||
name: 'Tiny',
|
||||
size: '39M',
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-tiny.bin'
|
||||
},
|
||||
'whisper-base': {
|
||||
name: 'Base',
|
||||
size: '74M',
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-base.bin'
|
||||
},
|
||||
'whisper-small': {
|
||||
name: 'Small',
|
||||
size: '244M',
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-small.bin'
|
||||
},
|
||||
'whisper-medium': {
|
||||
name: 'Medium',
|
||||
size: '769M',
|
||||
url: 'https://huggingface.co/ggerganov/whisper.cpp/resolve/main/ggml-medium.bin'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async initialize() {
|
||||
if (this.isInitialized) return;
|
||||
|
||||
try {
|
||||
const homeDir = os.homedir();
|
||||
const whisperDir = path.join(homeDir, '.glass', 'whisper');
|
||||
|
||||
this.modelsDir = path.join(whisperDir, 'models');
|
||||
this.tempDir = path.join(whisperDir, 'temp');
|
||||
this.whisperPath = path.join(whisperDir, 'bin', 'whisper');
|
||||
|
||||
await this.ensureDirectories();
|
||||
await this.ensureWhisperBinary();
|
||||
|
||||
this.isInitialized = true;
|
||||
console.log('[WhisperService] Initialized successfully');
|
||||
} catch (error) {
|
||||
console.error('[WhisperService] Initialization failed:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async ensureDirectories() {
|
||||
await fsPromises.mkdir(this.modelsDir, { recursive: true });
|
||||
await fsPromises.mkdir(this.tempDir, { recursive: true });
|
||||
await fsPromises.mkdir(path.dirname(this.whisperPath), { recursive: true });
|
||||
}
|
||||
|
||||
async ensureWhisperBinary() {
|
||||
const whisperCliPath = await this.checkCommand('whisper-cli');
|
||||
if (whisperCliPath) {
|
||||
this.whisperPath = whisperCliPath;
|
||||
console.log(`[WhisperService] Found whisper-cli at: ${this.whisperPath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const whisperPath = await this.checkCommand('whisper');
|
||||
if (whisperPath) {
|
||||
this.whisperPath = whisperPath;
|
||||
console.log(`[WhisperService] Found whisper at: ${this.whisperPath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await fsPromises.access(this.whisperPath, fs.constants.X_OK);
|
||||
console.log('[WhisperService] Custom whisper binary found');
|
||||
return;
|
||||
} catch (error) {
|
||||
// Continue to installation
|
||||
}
|
||||
|
||||
const platform = this.getPlatform();
|
||||
if (platform === 'darwin') {
|
||||
console.log('[WhisperService] Whisper not found, trying Homebrew installation...');
|
||||
try {
|
||||
await this.installViaHomebrew();
|
||||
return;
|
||||
} catch (error) {
|
||||
console.log('[WhisperService] Homebrew installation failed:', error.message);
|
||||
}
|
||||
}
|
||||
|
||||
await this.autoInstall();
|
||||
}
|
||||
|
||||
async installViaHomebrew() {
|
||||
const brewPath = await this.checkCommand('brew');
|
||||
if (!brewPath) {
|
||||
throw new Error('Homebrew not found. Please install Homebrew first.');
|
||||
}
|
||||
|
||||
console.log('[WhisperService] Installing whisper-cpp via Homebrew...');
|
||||
await spawnAsync('brew', ['install', 'whisper-cpp']);
|
||||
|
||||
const whisperCliPath = await this.checkCommand('whisper-cli');
|
||||
if (whisperCliPath) {
|
||||
this.whisperPath = whisperCliPath;
|
||||
console.log(`[WhisperService] Whisper-cli installed via Homebrew at: ${this.whisperPath}`);
|
||||
} else {
|
||||
const whisperPath = await this.checkCommand('whisper');
|
||||
if (whisperPath) {
|
||||
this.whisperPath = whisperPath;
|
||||
console.log(`[WhisperService] Whisper installed via Homebrew at: ${this.whisperPath}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async ensureModelAvailable(modelId) {
|
||||
if (!this.isInitialized) {
|
||||
console.log('[WhisperService] Service not initialized, initializing now...');
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
const modelInfo = this.availableModels[modelId];
|
||||
if (!modelInfo) {
|
||||
throw new Error(`Unknown model: ${modelId}. Available models: ${Object.keys(this.availableModels).join(', ')}`);
|
||||
}
|
||||
|
||||
const modelPath = await this.getModelPath(modelId);
|
||||
try {
|
||||
await fsPromises.access(modelPath, fs.constants.R_OK);
|
||||
console.log(`[WhisperService] Model ${modelId} already available at: ${modelPath}`);
|
||||
} catch (error) {
|
||||
console.log(`[WhisperService] Model ${modelId} not found, downloading...`);
|
||||
await this.downloadModel(modelId);
|
||||
}
|
||||
}
|
||||
|
||||
async downloadModel(modelId) {
|
||||
const modelInfo = this.availableModels[modelId];
|
||||
const modelPath = await this.getModelPath(modelId);
|
||||
const checksumInfo = DOWNLOAD_CHECKSUMS.whisper.models[modelId];
|
||||
|
||||
this.emit('downloadProgress', { modelId, progress: 0 });
|
||||
|
||||
await this.downloadWithRetry(modelInfo.url, modelPath, {
|
||||
expectedChecksum: checksumInfo?.sha256,
|
||||
onProgress: (progress) => {
|
||||
this.emit('downloadProgress', { modelId, progress });
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`[WhisperService] Model ${modelId} downloaded successfully`);
|
||||
}
|
||||
|
||||
|
||||
async getModelPath(modelId) {
|
||||
if (!this.isInitialized || !this.modelsDir) {
|
||||
throw new Error('WhisperService is not initialized. Call initialize() first.');
|
||||
}
|
||||
return path.join(this.modelsDir, `${modelId}.bin`);
|
||||
}
|
||||
|
||||
async getWhisperPath() {
|
||||
return this.whisperPath;
|
||||
}
|
||||
|
||||
async saveAudioToTemp(audioBuffer, sessionId = '') {
|
||||
const timestamp = Date.now();
|
||||
const random = Math.random().toString(36).substr(2, 6);
|
||||
const sessionPrefix = sessionId ? `${sessionId}_` : '';
|
||||
const tempFile = path.join(this.tempDir, `audio_${sessionPrefix}${timestamp}_${random}.wav`);
|
||||
|
||||
const wavHeader = this.createWavHeader(audioBuffer.length);
|
||||
const wavBuffer = Buffer.concat([wavHeader, audioBuffer]);
|
||||
|
||||
await fsPromises.writeFile(tempFile, wavBuffer);
|
||||
return tempFile;
|
||||
}
|
||||
|
||||
createWavHeader(dataSize) {
|
||||
const header = Buffer.alloc(44);
|
||||
const sampleRate = 24000;
|
||||
const numChannels = 1;
|
||||
const bitsPerSample = 16;
|
||||
|
||||
header.write('RIFF', 0);
|
||||
header.writeUInt32LE(36 + dataSize, 4);
|
||||
header.write('WAVE', 8);
|
||||
header.write('fmt ', 12);
|
||||
header.writeUInt32LE(16, 16);
|
||||
header.writeUInt16LE(1, 20);
|
||||
header.writeUInt16LE(numChannels, 22);
|
||||
header.writeUInt32LE(sampleRate, 24);
|
||||
header.writeUInt32LE(sampleRate * numChannels * bitsPerSample / 8, 28);
|
||||
header.writeUInt16LE(numChannels * bitsPerSample / 8, 32);
|
||||
header.writeUInt16LE(bitsPerSample, 34);
|
||||
header.write('data', 36);
|
||||
header.writeUInt32LE(dataSize, 40);
|
||||
|
||||
return header;
|
||||
}
|
||||
|
||||
async cleanupTempFile(filePath) {
|
||||
if (!filePath || typeof filePath !== 'string') {
|
||||
console.warn('[WhisperService] Invalid file path for cleanup:', filePath);
|
||||
return;
|
||||
}
|
||||
|
||||
const filesToCleanup = [
|
||||
filePath,
|
||||
filePath.replace('.wav', '.txt'),
|
||||
filePath.replace('.wav', '.json')
|
||||
];
|
||||
|
||||
for (const file of filesToCleanup) {
|
||||
try {
|
||||
// Check if file exists before attempting to delete
|
||||
await fsPromises.access(file, fs.constants.F_OK);
|
||||
await fsPromises.unlink(file);
|
||||
console.log(`[WhisperService] Cleaned up: ${file}`);
|
||||
} catch (error) {
|
||||
// File doesn't exist or already deleted - this is normal
|
||||
if (error.code !== 'ENOENT') {
|
||||
console.warn(`[WhisperService] Failed to cleanup ${file}:`, error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async getInstalledModels() {
|
||||
if (!this.isInitialized) {
|
||||
console.log('[WhisperService] Service not initialized for getInstalledModels, initializing now...');
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
const models = [];
|
||||
for (const [modelId, modelInfo] of Object.entries(this.availableModels)) {
|
||||
try {
|
||||
const modelPath = await this.getModelPath(modelId);
|
||||
await fsPromises.access(modelPath, fs.constants.R_OK);
|
||||
models.push({
|
||||
id: modelId,
|
||||
name: modelInfo.name,
|
||||
size: modelInfo.size,
|
||||
installed: true
|
||||
});
|
||||
} catch (error) {
|
||||
models.push({
|
||||
id: modelId,
|
||||
name: modelInfo.name,
|
||||
size: modelInfo.size,
|
||||
installed: false
|
||||
});
|
||||
}
|
||||
}
|
||||
return models;
|
||||
}
|
||||
|
||||
async isServiceRunning() {
|
||||
return this.isInitialized;
|
||||
}
|
||||
|
||||
async startService() {
|
||||
if (!this.isInitialized) {
|
||||
await this.initialize();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
async stopService() {
|
||||
return true;
|
||||
}
|
||||
|
||||
async isInstalled() {
|
||||
try {
|
||||
const whisperPath = await this.checkCommand('whisper-cli') || await this.checkCommand('whisper');
|
||||
return !!whisperPath;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async installMacOS() {
|
||||
throw new Error('Binary installation not available for macOS. Please install Homebrew and run: brew install whisper-cpp');
|
||||
}
|
||||
|
||||
async installWindows() {
|
||||
console.log('[WhisperService] Installing Whisper on Windows...');
|
||||
const version = 'v1.7.6';
|
||||
const binaryUrl = `https://github.com/ggerganov/whisper.cpp/releases/download/${version}/whisper-cpp-${version}-win-x64.zip`;
|
||||
const tempFile = path.join(this.tempDir, 'whisper-binary.zip');
|
||||
|
||||
try {
|
||||
await this.downloadWithRetry(binaryUrl, tempFile);
|
||||
const extractDir = path.dirname(this.whisperPath);
|
||||
await spawnAsync('powershell', ['-command', `Expand-Archive -Path '${tempFile}' -DestinationPath '${extractDir}' -Force`]);
|
||||
await fsPromises.unlink(tempFile);
|
||||
console.log('[WhisperService] Whisper installed successfully on Windows');
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('[WhisperService] Windows installation failed:', error);
|
||||
throw new Error(`Failed to install Whisper on Windows: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
async installLinux() {
|
||||
console.log('[WhisperService] Installing Whisper on Linux...');
|
||||
const version = 'v1.7.6';
|
||||
const binaryUrl = `https://github.com/ggerganov/whisper.cpp/releases/download/${version}/whisper-cpp-${version}-linux-x64.tar.gz`;
|
||||
const tempFile = path.join(this.tempDir, 'whisper-binary.tar.gz');
|
||||
|
||||
try {
|
||||
await this.downloadWithRetry(binaryUrl, tempFile);
|
||||
const extractDir = path.dirname(this.whisperPath);
|
||||
await spawnAsync('tar', ['-xzf', tempFile, '-C', extractDir, '--strip-components=1']);
|
||||
await spawnAsync('chmod', ['+x', this.whisperPath]);
|
||||
await fsPromises.unlink(tempFile);
|
||||
console.log('[WhisperService] Whisper installed successfully on Linux');
|
||||
return true;
|
||||
} catch (error) {
|
||||
console.error('[WhisperService] Linux installation failed:', error);
|
||||
throw new Error(`Failed to install Whisper on Linux: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
async shutdownMacOS(force) {
|
||||
return true;
|
||||
}
|
||||
|
||||
async shutdownWindows(force) {
|
||||
return true;
|
||||
}
|
||||
|
||||
async shutdownLinux(force) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { WhisperService };
|
39
src/common/utils/spawnHelper.js
Normal file
39
src/common/utils/spawnHelper.js
Normal file
@ -0,0 +1,39 @@
|
||||
const { spawn } = require('child_process');
|
||||
|
||||
function spawnAsync(command, args = [], options = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn(command, args, options);
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
if (child.stdout) {
|
||||
child.stdout.on('data', (data) => {
|
||||
stdout += data.toString();
|
||||
});
|
||||
}
|
||||
|
||||
if (child.stderr) {
|
||||
child.stderr.on('data', (data) => {
|
||||
stderr += data.toString();
|
||||
});
|
||||
}
|
||||
|
||||
child.on('error', (error) => {
|
||||
reject(error);
|
||||
});
|
||||
|
||||
child.on('close', (code) => {
|
||||
if (code === 0) {
|
||||
resolve({ stdout, stderr });
|
||||
} else {
|
||||
const error = new Error(`Command failed with code ${code}: ${stderr || stdout}`);
|
||||
error.code = code;
|
||||
error.stdout = stdout;
|
||||
error.stderr = stderr;
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = { spawnAsync };
|
@ -6,7 +6,17 @@ const fs = require('node:fs');
|
||||
const os = require('os');
|
||||
const util = require('util');
|
||||
const execFile = util.promisify(require('child_process').execFile);
|
||||
const sharp = require('sharp');
|
||||
|
||||
// Try to load sharp, but don't fail if it's not available
|
||||
let sharp;
|
||||
try {
|
||||
sharp = require('sharp');
|
||||
console.log('[WindowManager] Sharp module loaded successfully');
|
||||
} catch (error) {
|
||||
console.warn('[WindowManager] Sharp module not available:', error.message);
|
||||
console.warn('[WindowManager] Screenshot functionality will work with reduced image processing capabilities');
|
||||
sharp = null;
|
||||
}
|
||||
const authService = require('../common/services/authService');
|
||||
const systemSettingsRepository = require('../common/repositories/systemSettings');
|
||||
const userRepository = require('../common/repositories/user');
|
||||
@ -1522,6 +1532,9 @@ async function captureScreenshot(options = {}) {
|
||||
const imageBuffer = await fs.promises.readFile(tempPath);
|
||||
await fs.promises.unlink(tempPath);
|
||||
|
||||
if (sharp) {
|
||||
try {
|
||||
// Try using sharp for optimal image processing
|
||||
const resizedBuffer = await sharp(imageBuffer)
|
||||
// .resize({ height: 1080 })
|
||||
.resize({ height: 384 })
|
||||
@ -1539,8 +1552,25 @@ async function captureScreenshot(options = {}) {
|
||||
};
|
||||
|
||||
return { success: true, base64, width: metadata.width, height: metadata.height };
|
||||
} catch (sharpError) {
|
||||
console.warn('Sharp module failed, falling back to basic image processing:', sharpError.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: Return the original image without resizing
|
||||
console.log('[WindowManager] Using fallback image processing (no resize/compression)');
|
||||
const base64 = imageBuffer.toString('base64');
|
||||
|
||||
lastScreenshot = {
|
||||
base64,
|
||||
width: null, // We don't have metadata without sharp
|
||||
height: null,
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
|
||||
return { success: true, base64, width: null, height: null };
|
||||
} catch (error) {
|
||||
console.error('Failed to capture and resize screenshot:', error);
|
||||
console.error('Failed to capture screenshot:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
}
|
||||
|
@ -133,7 +133,50 @@ class SttService {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.modelInfo.provider === 'gemini') {
|
||||
if (this.modelInfo.provider === 'whisper') {
|
||||
// Whisper STT emits 'transcription' events with different structure
|
||||
if (message.text && message.text.trim()) {
|
||||
const finalText = message.text.trim();
|
||||
|
||||
// Filter out Whisper noise transcriptions
|
||||
const noisePatterns = [
|
||||
'[BLANK_AUDIO]',
|
||||
'[INAUDIBLE]',
|
||||
'[MUSIC]',
|
||||
'[SOUND]',
|
||||
'[NOISE]',
|
||||
'(BLANK_AUDIO)',
|
||||
'(INAUDIBLE)',
|
||||
'(MUSIC)',
|
||||
'(SOUND)',
|
||||
'(NOISE)'
|
||||
];
|
||||
|
||||
|
||||
|
||||
const normalizedText = finalText.toLowerCase().trim();
|
||||
|
||||
const isNoise = noisePatterns.some(pattern =>
|
||||
finalText.includes(pattern) || finalText === pattern
|
||||
);
|
||||
|
||||
|
||||
if (!isNoise && finalText.length > 2) {
|
||||
this.debounceMyCompletion(finalText);
|
||||
|
||||
this.sendToRenderer('stt-update', {
|
||||
speaker: 'Me',
|
||||
text: finalText,
|
||||
isPartial: false,
|
||||
isFinal: true,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
} else {
|
||||
console.log(`[Whisper-Me] Filtered noise: "${finalText}"`);
|
||||
}
|
||||
}
|
||||
return;
|
||||
} else if (this.modelInfo.provider === 'gemini') {
|
||||
if (!message.serverContent?.modelTurn) {
|
||||
console.log('[Gemini STT - Me]', JSON.stringify(message, null, 2));
|
||||
}
|
||||
@ -203,7 +246,50 @@ class SttService {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.modelInfo.provider === 'gemini') {
|
||||
if (this.modelInfo.provider === 'whisper') {
|
||||
// Whisper STT emits 'transcription' events with different structure
|
||||
if (message.text && message.text.trim()) {
|
||||
const finalText = message.text.trim();
|
||||
|
||||
// Filter out Whisper noise transcriptions
|
||||
const noisePatterns = [
|
||||
'[BLANK_AUDIO]',
|
||||
'[INAUDIBLE]',
|
||||
'[MUSIC]',
|
||||
'[SOUND]',
|
||||
'[NOISE]',
|
||||
'(BLANK_AUDIO)',
|
||||
'(INAUDIBLE)',
|
||||
'(MUSIC)',
|
||||
'(SOUND)',
|
||||
'(NOISE)'
|
||||
];
|
||||
|
||||
|
||||
const normalizedText = finalText.toLowerCase().trim();
|
||||
|
||||
const isNoise = noisePatterns.some(pattern =>
|
||||
finalText.includes(pattern) || finalText === pattern
|
||||
);
|
||||
|
||||
|
||||
// Only process if it's not noise, not a false positive, and has meaningful content
|
||||
if (!isNoise && finalText.length > 2) {
|
||||
this.debounceTheirCompletion(finalText);
|
||||
|
||||
this.sendToRenderer('stt-update', {
|
||||
speaker: 'Them',
|
||||
text: finalText,
|
||||
isPartial: false,
|
||||
isFinal: true,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
} else {
|
||||
console.log(`[Whisper-Them] Filtered noise: "${finalText}"`);
|
||||
}
|
||||
}
|
||||
return;
|
||||
} else if (this.modelInfo.provider === 'gemini') {
|
||||
if (!message.serverContent?.modelTurn) {
|
||||
console.log('[Gemini STT - Them]', JSON.stringify(message, null, 2));
|
||||
}
|
||||
@ -294,9 +380,13 @@ class SttService {
|
||||
portkeyVirtualKey: this.modelInfo.provider === 'openai-glass' ? this.modelInfo.apiKey : undefined,
|
||||
};
|
||||
|
||||
// Add sessionType for Whisper to distinguish between My and Their sessions
|
||||
const myOptions = { ...sttOptions, callbacks: mySttConfig.callbacks, sessionType: 'my' };
|
||||
const theirOptions = { ...sttOptions, callbacks: theirSttConfig.callbacks, sessionType: 'their' };
|
||||
|
||||
[this.mySttSession, this.theirSttSession] = await Promise.all([
|
||||
createSTT(this.modelInfo.provider, { ...sttOptions, callbacks: mySttConfig.callbacks }),
|
||||
createSTT(this.modelInfo.provider, { ...sttOptions, callbacks: theirSttConfig.callbacks }),
|
||||
createSTT(this.modelInfo.provider, myOptions),
|
||||
createSTT(this.modelInfo.provider, theirOptions),
|
||||
]);
|
||||
|
||||
console.log('✅ Both STT sessions initialized successfully.');
|
||||
|
@ -1,4 +1,5 @@
|
||||
import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
|
||||
import { getOllamaProgressTracker } from '../../common/services/localProgressTracker.js';
|
||||
|
||||
export class SettingsView extends LitElement {
|
||||
static styles = css`
|
||||
@ -408,9 +409,54 @@ export class SettingsView extends LitElement {
|
||||
overflow-y: auto; background: rgba(0,0,0,0.3); border-radius: 4px;
|
||||
padding: 4px; margin-top: 4px;
|
||||
}
|
||||
.model-item { padding: 5px 8px; font-size: 11px; border-radius: 3px; cursor: pointer; transition: background-color 0.15s; }
|
||||
.model-item {
|
||||
padding: 5px 8px;
|
||||
font-size: 11px;
|
||||
border-radius: 3px;
|
||||
cursor: pointer;
|
||||
transition: background-color 0.15s;
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
.model-item:hover { background-color: rgba(255,255,255,0.1); }
|
||||
.model-item.selected { background-color: rgba(0, 122, 255, 0.4); font-weight: 500; }
|
||||
.model-status {
|
||||
font-size: 9px;
|
||||
color: rgba(255,255,255,0.6);
|
||||
margin-left: 8px;
|
||||
}
|
||||
.model-status.installed { color: rgba(0, 255, 0, 0.8); }
|
||||
.model-status.not-installed { color: rgba(255, 200, 0, 0.8); }
|
||||
.install-progress {
|
||||
flex: 1;
|
||||
height: 4px;
|
||||
background: rgba(255,255,255,0.1);
|
||||
border-radius: 2px;
|
||||
margin-left: 8px;
|
||||
overflow: hidden;
|
||||
}
|
||||
.install-progress-bar {
|
||||
height: 100%;
|
||||
background: rgba(0, 122, 255, 0.8);
|
||||
transition: width 0.3s ease;
|
||||
}
|
||||
|
||||
/* Dropdown styles */
|
||||
select.model-dropdown {
|
||||
background: rgba(0,0,0,0.2);
|
||||
color: white;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
select.model-dropdown option {
|
||||
background: #1a1a1a;
|
||||
color: white;
|
||||
}
|
||||
|
||||
select.model-dropdown option:disabled {
|
||||
color: rgba(255,255,255,0.4);
|
||||
}
|
||||
|
||||
/* ────────────────[ GLASS BYPASS ]─────────────── */
|
||||
:host-context(body.has-glass) {
|
||||
@ -458,6 +504,12 @@ export class SettingsView extends LitElement {
|
||||
showPresets: { type: Boolean, state: true },
|
||||
autoUpdateEnabled: { type: Boolean, state: true },
|
||||
autoUpdateLoading: { type: Boolean, state: true },
|
||||
// Ollama related properties
|
||||
ollamaStatus: { type: Object, state: true },
|
||||
ollamaModels: { type: Array, state: true },
|
||||
installingModels: { type: Object, state: true },
|
||||
// Whisper related properties
|
||||
whisperModels: { type: Array, state: true },
|
||||
};
|
||||
//////// after_modelStateService ////////
|
||||
|
||||
@ -466,7 +518,7 @@ export class SettingsView extends LitElement {
|
||||
//////// after_modelStateService ////////
|
||||
this.shortcuts = {};
|
||||
this.firebaseUser = null;
|
||||
this.apiKeys = { openai: '', gemini: '', anthropic: '' };
|
||||
this.apiKeys = { openai: '', gemini: '', anthropic: '', whisper: '' };
|
||||
this.providerConfig = {};
|
||||
this.isLoading = true;
|
||||
this.isContentProtectionOn = true;
|
||||
@ -480,6 +532,14 @@ export class SettingsView extends LitElement {
|
||||
this.presets = [];
|
||||
this.selectedPreset = null;
|
||||
this.showPresets = false;
|
||||
// Ollama related
|
||||
this.ollamaStatus = { installed: false, running: false };
|
||||
this.ollamaModels = [];
|
||||
this.installingModels = {}; // { modelName: progress }
|
||||
this.progressTracker = getOllamaProgressTracker();
|
||||
// Whisper related
|
||||
this.whisperModels = [];
|
||||
this.whisperProgressTracker = null; // Will be initialized when needed
|
||||
this.handleUsePicklesKey = this.handleUsePicklesKey.bind(this)
|
||||
this.autoUpdateEnabled = true;
|
||||
this.autoUpdateLoading = true;
|
||||
@ -529,7 +589,7 @@ export class SettingsView extends LitElement {
|
||||
this.isLoading = true;
|
||||
const { ipcRenderer } = window.require('electron');
|
||||
try {
|
||||
const [userState, config, storedKeys, availableLlm, availableStt, selectedModels, presets, contentProtection, shortcuts] = await Promise.all([
|
||||
const [userState, config, storedKeys, availableLlm, availableStt, selectedModels, presets, contentProtection, shortcuts, ollamaStatus, whisperModelsResult] = await Promise.all([
|
||||
ipcRenderer.invoke('get-current-user'),
|
||||
ipcRenderer.invoke('model:get-provider-config'), // Provider 설정 로드
|
||||
ipcRenderer.invoke('model:get-all-keys'),
|
||||
@ -538,7 +598,9 @@ export class SettingsView extends LitElement {
|
||||
ipcRenderer.invoke('model:get-selected-models'),
|
||||
ipcRenderer.invoke('settings:getPresets'),
|
||||
ipcRenderer.invoke('get-content-protection-status'),
|
||||
ipcRenderer.invoke('get-current-shortcuts')
|
||||
ipcRenderer.invoke('get-current-shortcuts'),
|
||||
ipcRenderer.invoke('ollama:get-status'),
|
||||
ipcRenderer.invoke('whisper:get-installed-models')
|
||||
]);
|
||||
|
||||
if (userState && userState.isLoggedIn) this.firebaseUser = userState;
|
||||
@ -555,6 +617,23 @@ export class SettingsView extends LitElement {
|
||||
const firstUserPreset = this.presets.find(p => p.is_default === 0);
|
||||
if (firstUserPreset) this.selectedPreset = firstUserPreset;
|
||||
}
|
||||
// Ollama status
|
||||
if (ollamaStatus?.success) {
|
||||
this.ollamaStatus = { installed: ollamaStatus.installed, running: ollamaStatus.running };
|
||||
this.ollamaModels = ollamaStatus.models || [];
|
||||
}
|
||||
// Whisper status
|
||||
if (whisperModelsResult?.success) {
|
||||
const installedWhisperModels = whisperModelsResult.models;
|
||||
if (this.providerConfig.whisper) {
|
||||
this.providerConfig.whisper.sttModels.forEach(m => {
|
||||
const installedInfo = installedWhisperModels.find(i => i.id === m.id);
|
||||
if (installedInfo) {
|
||||
m.installed = installedInfo.installed;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error loading initial settings data:', error);
|
||||
} finally {
|
||||
@ -566,8 +645,52 @@ export class SettingsView extends LitElement {
|
||||
const input = this.shadowRoot.querySelector(`#key-input-${provider}`);
|
||||
if (!input) return;
|
||||
const key = input.value;
|
||||
this.saving = true;
|
||||
|
||||
// For Ollama, we need to ensure it's ready first
|
||||
if (provider === 'ollama') {
|
||||
this.saving = true;
|
||||
const { ipcRenderer } = window.require('electron');
|
||||
|
||||
// First ensure Ollama is installed and running
|
||||
const ensureResult = await ipcRenderer.invoke('ollama:ensure-ready');
|
||||
if (!ensureResult.success) {
|
||||
alert(`Failed to setup Ollama: ${ensureResult.error}`);
|
||||
this.saving = false;
|
||||
return;
|
||||
}
|
||||
|
||||
// Now validate (which will check if service is running)
|
||||
const result = await ipcRenderer.invoke('model:validate-key', { provider, key: 'local' });
|
||||
|
||||
if (result.success) {
|
||||
this.apiKeys = { ...this.apiKeys, [provider]: 'local' };
|
||||
await this.refreshModelData();
|
||||
await this.refreshOllamaStatus();
|
||||
} else {
|
||||
alert(`Failed to connect to Ollama: ${result.error}`);
|
||||
}
|
||||
this.saving = false;
|
||||
return;
|
||||
}
|
||||
|
||||
// For Whisper, just enable it
|
||||
if (provider === 'whisper') {
|
||||
this.saving = true;
|
||||
const { ipcRenderer } = window.require('electron');
|
||||
const result = await ipcRenderer.invoke('model:validate-key', { provider, key: 'local' });
|
||||
|
||||
if (result.success) {
|
||||
this.apiKeys = { ...this.apiKeys, [provider]: 'local' };
|
||||
await this.refreshModelData();
|
||||
} else {
|
||||
alert(`Failed to enable Whisper: ${result.error}`);
|
||||
}
|
||||
this.saving = false;
|
||||
return;
|
||||
}
|
||||
|
||||
// For other providers, use the normal flow
|
||||
this.saving = true;
|
||||
const { ipcRenderer } = window.require('electron');
|
||||
const result = await ipcRenderer.invoke('model:validate-key', { provider, key });
|
||||
|
||||
@ -592,15 +715,17 @@ export class SettingsView extends LitElement {
|
||||
|
||||
async refreshModelData() {
|
||||
const { ipcRenderer } = window.require('electron');
|
||||
const [availableLlm, availableStt, selected] = await Promise.all([
|
||||
const [availableLlm, availableStt, selected, storedKeys] = await Promise.all([
|
||||
ipcRenderer.invoke('model:get-available-models', { type: 'llm' }),
|
||||
ipcRenderer.invoke('model:get-available-models', { type: 'stt' }),
|
||||
ipcRenderer.invoke('model:get-selected-models')
|
||||
ipcRenderer.invoke('model:get-selected-models'),
|
||||
ipcRenderer.invoke('model:get-all-keys')
|
||||
]);
|
||||
this.availableLlmModels = availableLlm;
|
||||
this.availableSttModels = availableStt;
|
||||
this.selectedLlm = selected.llm;
|
||||
this.selectedStt = selected.stt;
|
||||
this.apiKeys = storedKeys;
|
||||
this.requestUpdate();
|
||||
}
|
||||
|
||||
@ -622,6 +747,28 @@ export class SettingsView extends LitElement {
|
||||
}
|
||||
|
||||
async selectModel(type, modelId) {
|
||||
// Check if this is an Ollama model that needs to be installed
|
||||
const provider = this.getProviderForModel(type, modelId);
|
||||
if (provider === 'ollama') {
|
||||
const ollamaModel = this.ollamaModels.find(m => m.name === modelId);
|
||||
if (ollamaModel && !ollamaModel.installed && !ollamaModel.installing) {
|
||||
// Need to install the model first
|
||||
await this.installOllamaModel(modelId);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if this is a Whisper model that needs to be downloaded
|
||||
if (provider === 'whisper' && type === 'stt') {
|
||||
const isInstalling = this.installingModels[modelId] !== undefined;
|
||||
const whisperModelInfo = this.providerConfig.whisper.sttModels.find(m => m.id === modelId);
|
||||
|
||||
if (whisperModelInfo && !whisperModelInfo.installed && !isInstalling) {
|
||||
await this.downloadWhisperModel(modelId);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
this.saving = true;
|
||||
const { ipcRenderer } = window.require('electron');
|
||||
await ipcRenderer.invoke('model:set-selected-model', { type, modelId });
|
||||
@ -633,6 +780,102 @@ export class SettingsView extends LitElement {
|
||||
this.requestUpdate();
|
||||
}
|
||||
|
||||
async refreshOllamaStatus() {
|
||||
const { ipcRenderer } = window.require('electron');
|
||||
const ollamaStatus = await ipcRenderer.invoke('ollama:get-status');
|
||||
if (ollamaStatus?.success) {
|
||||
this.ollamaStatus = { installed: ollamaStatus.installed, running: ollamaStatus.running };
|
||||
this.ollamaModels = ollamaStatus.models || [];
|
||||
}
|
||||
}
|
||||
|
||||
async installOllamaModel(modelName) {
|
||||
// Mark as installing
|
||||
this.installingModels = { ...this.installingModels, [modelName]: 0 };
|
||||
this.requestUpdate();
|
||||
|
||||
try {
|
||||
// Use the clean progress tracker - no manual event management needed
|
||||
const success = await this.progressTracker.installModel(modelName, (progress) => {
|
||||
this.installingModels = { ...this.installingModels, [modelName]: progress };
|
||||
this.requestUpdate();
|
||||
});
|
||||
|
||||
if (success) {
|
||||
// Refresh status after installation
|
||||
await this.refreshOllamaStatus();
|
||||
await this.refreshModelData();
|
||||
// Auto-select the model after installation
|
||||
await this.selectModel('llm', modelName);
|
||||
} else {
|
||||
alert(`Installation of ${modelName} was cancelled`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`[SettingsView] Error installing model ${modelName}:`, error);
|
||||
alert(`Error installing ${modelName}: ${error.message}`);
|
||||
} finally {
|
||||
// Automatic cleanup - no manual event listener management
|
||||
delete this.installingModels[modelName];
|
||||
this.requestUpdate();
|
||||
}
|
||||
}
|
||||
|
||||
async downloadWhisperModel(modelId) {
|
||||
// Mark as installing
|
||||
this.installingModels = { ...this.installingModels, [modelId]: 0 };
|
||||
this.requestUpdate();
|
||||
|
||||
try {
|
||||
const { ipcRenderer } = window.require('electron');
|
||||
|
||||
// Set up progress listener
|
||||
const progressHandler = (event, { modelId: id, progress }) => {
|
||||
if (id === modelId) {
|
||||
this.installingModels = { ...this.installingModels, [modelId]: progress };
|
||||
this.requestUpdate();
|
||||
}
|
||||
};
|
||||
|
||||
ipcRenderer.on('whisper:download-progress', progressHandler);
|
||||
|
||||
// Start download
|
||||
const result = await ipcRenderer.invoke('whisper:download-model', modelId);
|
||||
|
||||
if (result.success) {
|
||||
// Auto-select the model after download
|
||||
await this.selectModel('stt', modelId);
|
||||
} else {
|
||||
alert(`Failed to download Whisper model: ${result.error}`);
|
||||
}
|
||||
|
||||
// Cleanup
|
||||
ipcRenderer.removeListener('whisper:download-progress', progressHandler);
|
||||
} catch (error) {
|
||||
console.error(`[SettingsView] Error downloading Whisper model ${modelId}:`, error);
|
||||
alert(`Error downloading ${modelId}: ${error.message}`);
|
||||
} finally {
|
||||
delete this.installingModels[modelId];
|
||||
this.requestUpdate();
|
||||
}
|
||||
}
|
||||
|
||||
getProviderForModel(type, modelId) {
|
||||
for (const [providerId, config] of Object.entries(this.providerConfig)) {
|
||||
const models = type === 'llm' ? config.llmModels : config.sttModels;
|
||||
if (models?.some(m => m.id === modelId)) {
|
||||
return providerId;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async handleWhisperModelSelect(modelId) {
|
||||
if (!modelId) return;
|
||||
|
||||
// Select the model (will trigger download if needed)
|
||||
await this.selectModel('stt', modelId);
|
||||
}
|
||||
|
||||
handleUsePicklesKey(e) {
|
||||
e.preventDefault()
|
||||
if (this.wasJustDragged) return
|
||||
@ -665,6 +908,14 @@ export class SettingsView extends LitElement {
|
||||
this.cleanupEventListeners();
|
||||
this.cleanupIpcListeners();
|
||||
this.cleanupWindowResize();
|
||||
|
||||
// Cancel any ongoing Ollama installations when component is destroyed
|
||||
const installingModels = Object.keys(this.installingModels);
|
||||
if (installingModels.length > 0) {
|
||||
installingModels.forEach(modelName => {
|
||||
this.progressTracker.cancelInstallation(modelName);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
setupEventListeners() {
|
||||
@ -920,6 +1171,36 @@ export class SettingsView extends LitElement {
|
||||
}
|
||||
}
|
||||
|
||||
async handleOllamaShutdown() {
|
||||
console.log('[SettingsView] Shutting down Ollama service...');
|
||||
|
||||
if (!window.require) return;
|
||||
|
||||
const { ipcRenderer } = window.require('electron');
|
||||
|
||||
try {
|
||||
// Show loading state
|
||||
this.ollamaStatus = { ...this.ollamaStatus, running: false };
|
||||
this.requestUpdate();
|
||||
|
||||
const result = await ipcRenderer.invoke('ollama:shutdown', false); // Graceful shutdown
|
||||
|
||||
if (result.success) {
|
||||
console.log('[SettingsView] Ollama shut down successfully');
|
||||
// Refresh status to reflect the change
|
||||
await this.refreshOllamaStatus();
|
||||
} else {
|
||||
console.error('[SettingsView] Failed to shutdown Ollama:', result.error);
|
||||
// Restore previous state on error
|
||||
await this.refreshOllamaStatus();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('[SettingsView] Error during Ollama shutdown:', error);
|
||||
// Restore previous state on error
|
||||
await this.refreshOllamaStatus();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//////// before_modelStateService ////////
|
||||
// render() {
|
||||
@ -1072,20 +1353,124 @@ export class SettingsView extends LitElement {
|
||||
<div class="api-key-section">
|
||||
${Object.entries(this.providerConfig)
|
||||
.filter(([id, config]) => !id.includes('-glass'))
|
||||
.map(([id, config]) => html`
|
||||
.map(([id, config]) => {
|
||||
if (id === 'ollama') {
|
||||
// Special UI for Ollama
|
||||
return html`
|
||||
<div class="provider-key-group">
|
||||
<label>${config.name} (Local)</label>
|
||||
${this.ollamaStatus.installed && this.ollamaStatus.running ? html`
|
||||
<div style="padding: 8px; background: rgba(0,255,0,0.1); border-radius: 4px; font-size: 11px; color: rgba(0,255,0,0.8);">
|
||||
✓ Ollama is running
|
||||
</div>
|
||||
<button class="settings-button full-width danger" @click=${this.handleOllamaShutdown}>
|
||||
Stop Ollama Service
|
||||
</button>
|
||||
` : this.ollamaStatus.installed ? html`
|
||||
<div style="padding: 8px; background: rgba(255,200,0,0.1); border-radius: 4px; font-size: 11px; color: rgba(255,200,0,0.8);">
|
||||
⚠ Ollama installed but not running
|
||||
</div>
|
||||
<button class="settings-button full-width" @click=${() => this.handleSaveKey(id)}>
|
||||
Start Ollama
|
||||
</button>
|
||||
` : html`
|
||||
<div style="padding: 8px; background: rgba(255,100,100,0.1); border-radius: 4px; font-size: 11px; color: rgba(255,100,100,0.8);">
|
||||
✗ Ollama not installed
|
||||
</div>
|
||||
<button class="settings-button full-width" @click=${() => this.handleSaveKey(id)}>
|
||||
Install & Setup Ollama
|
||||
</button>
|
||||
`}
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
if (id === 'whisper') {
|
||||
// Special UI for Whisper with model selection
|
||||
const whisperModels = config.sttModels || [];
|
||||
const selectedWhisperModel = this.selectedStt && this.getProviderForModel('stt', this.selectedStt) === 'whisper'
|
||||
? this.selectedStt
|
||||
: null;
|
||||
|
||||
return html`
|
||||
<div class="provider-key-group">
|
||||
<label>${config.name} (Local STT)</label>
|
||||
${this.apiKeys[id] === 'local' ? html`
|
||||
<div style="padding: 8px; background: rgba(0,255,0,0.1); border-radius: 4px; font-size: 11px; color: rgba(0,255,0,0.8); margin-bottom: 8px;">
|
||||
✓ Whisper is enabled
|
||||
</div>
|
||||
|
||||
<!-- Whisper Model Selection Dropdown -->
|
||||
<label style="font-size: 10px; margin-top: 8px;">Select Model:</label>
|
||||
<select
|
||||
class="model-dropdown"
|
||||
style="width: 100%; padding: 6px; background: rgba(0,0,0,0.2); border: 1px solid rgba(255,255,255,0.2); color: white; border-radius: 4px; font-size: 11px; margin-bottom: 8px;"
|
||||
@change=${(e) => this.handleWhisperModelSelect(e.target.value)}
|
||||
.value=${selectedWhisperModel || ''}
|
||||
>
|
||||
<option value="">Choose a model...</option>
|
||||
${whisperModels.map(model => {
|
||||
const isInstalling = this.installingModels[model.id] !== undefined;
|
||||
const progress = this.installingModels[model.id] || 0;
|
||||
|
||||
let statusText = '';
|
||||
if (isInstalling) {
|
||||
statusText = ` (Downloading ${progress}%)`;
|
||||
} else if (model.installed) {
|
||||
statusText = ' (Installed)';
|
||||
}
|
||||
|
||||
return html`
|
||||
<option value="${model.id}" ?disabled=${isInstalling}>
|
||||
${model.name}${statusText}
|
||||
</option>
|
||||
`;
|
||||
})}
|
||||
</select>
|
||||
|
||||
${Object.entries(this.installingModels).map(([modelId, progress]) => {
|
||||
if (modelId.startsWith('whisper-') && progress !== undefined) {
|
||||
return html`
|
||||
<div style="margin: 8px 0;">
|
||||
<div style="font-size: 10px; color: rgba(255,255,255,0.7); margin-bottom: 4px;">
|
||||
Downloading ${modelId}...
|
||||
</div>
|
||||
<div class="install-progress" style="height: 4px; background: rgba(255,255,255,0.1); border-radius: 2px; overflow: hidden;">
|
||||
<div class="install-progress-bar" style="height: 100%; background: rgba(0, 122, 255, 0.8); width: ${progress}%; transition: width 0.3s ease;"></div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
return null;
|
||||
})}
|
||||
|
||||
<button class="settings-button full-width danger" @click=${() => this.handleClearKey(id)}>
|
||||
Disable Whisper
|
||||
</button>
|
||||
` : html`
|
||||
<button class="settings-button full-width" @click=${() => this.handleSaveKey(id)}>
|
||||
Enable Whisper STT
|
||||
</button>
|
||||
`}
|
||||
</div>
|
||||
`;
|
||||
}
|
||||
|
||||
// Regular providers
|
||||
return html`
|
||||
<div class="provider-key-group">
|
||||
<label for="key-input-${id}">${config.name} API Key</label>
|
||||
<input type="password" id="key-input-${id}"
|
||||
placeholder=${loggedIn ? "Using Pickle's Key" : `Enter ${config.name} API Key`}
|
||||
.value=${this.apiKeys[id] || ''}
|
||||
|
||||
>
|
||||
<div class="key-buttons">
|
||||
<button class="settings-button" @click=${() => this.handleSaveKey(id)} >Save</button>
|
||||
<button class="settings-button danger" @click=${() => this.handleClearKey(id)} }>Clear</button>
|
||||
</div>
|
||||
</div>
|
||||
`)}
|
||||
`;
|
||||
})}
|
||||
</div>
|
||||
`;
|
||||
|
||||
@ -1104,11 +1489,30 @@ export class SettingsView extends LitElement {
|
||||
</button>
|
||||
${this.isLlmListVisible ? html`
|
||||
<div class="model-list">
|
||||
${this.availableLlmModels.map(model => html`
|
||||
<div class="model-item ${this.selectedLlm === model.id ? 'selected' : ''}" @click=${() => this.selectModel('llm', model.id)}>
|
||||
${model.name}
|
||||
${this.availableLlmModels.map(model => {
|
||||
const isOllama = this.getProviderForModel('llm', model.id) === 'ollama';
|
||||
const ollamaModel = isOllama ? this.ollamaModels.find(m => m.name === model.id) : null;
|
||||
const isInstalling = this.installingModels[model.id] !== undefined;
|
||||
const installProgress = this.installingModels[model.id] || 0;
|
||||
|
||||
return html`
|
||||
<div class="model-item ${this.selectedLlm === model.id ? 'selected' : ''}"
|
||||
@click=${() => this.selectModel('llm', model.id)}>
|
||||
<span>${model.name}</span>
|
||||
${isOllama ? html`
|
||||
${isInstalling ? html`
|
||||
<div class="install-progress">
|
||||
<div class="install-progress-bar" style="width: ${installProgress}%"></div>
|
||||
</div>
|
||||
`)}
|
||||
` : ollamaModel?.installed ? html`
|
||||
<span class="model-status installed">✓ Installed</span>
|
||||
` : html`
|
||||
<span class="model-status not-installed">Click to install</span>
|
||||
`}
|
||||
` : ''}
|
||||
</div>
|
||||
`;
|
||||
})}
|
||||
</div>
|
||||
` : ''}
|
||||
</div>
|
||||
@ -1119,11 +1523,23 @@ export class SettingsView extends LitElement {
|
||||
</button>
|
||||
${this.isSttListVisible ? html`
|
||||
<div class="model-list">
|
||||
${this.availableSttModels.map(model => html`
|
||||
<div class="model-item ${this.selectedStt === model.id ? 'selected' : ''}" @click=${() => this.selectModel('stt', model.id)}>
|
||||
${model.name}
|
||||
${this.availableSttModels.map(model => {
|
||||
const isWhisper = this.getProviderForModel('stt', model.id) === 'whisper';
|
||||
const isInstalling = this.installingModels[model.id] !== undefined;
|
||||
const installProgress = this.installingModels[model.id] || 0;
|
||||
|
||||
return html`
|
||||
<div class="model-item ${this.selectedStt === model.id ? 'selected' : ''}"
|
||||
@click=${() => this.selectModel('stt', model.id)}>
|
||||
<span>${model.name}</span>
|
||||
${isWhisper && isInstalling ? html`
|
||||
<div class="install-progress">
|
||||
<div class="install-progress-bar" style="width: ${installProgress}%"></div>
|
||||
</div>
|
||||
`)}
|
||||
` : ''}
|
||||
</div>
|
||||
`;
|
||||
})}
|
||||
</div>
|
||||
` : ''}
|
||||
</div>
|
||||
|
346
src/index.js
346
src/index.js
@ -28,8 +28,10 @@ const sessionRepository = require('./common/repositories/session');
|
||||
const ModelStateService = require('./common/services/modelStateService');
|
||||
const sqliteClient = require('./common/services/sqliteClient');
|
||||
|
||||
// Global variables
|
||||
const eventBridge = new EventEmitter();
|
||||
let WEB_PORT = 3000;
|
||||
let isShuttingDown = false; // Flag to prevent infinite shutdown loop
|
||||
|
||||
const listenService = new ListenService();
|
||||
// Make listenService globally accessible so other modules (e.g., windowManager, askService) can reuse the same instance
|
||||
@ -40,6 +42,10 @@ const modelStateService = new ModelStateService(authService);
|
||||
global.modelStateService = modelStateService;
|
||||
//////// after_modelStateService ////////
|
||||
|
||||
// Import and initialize OllamaService
|
||||
const ollamaService = require('./common/services/ollamaService');
|
||||
const ollamaModelRepository = require('./common/repositories/ollamaModel');
|
||||
|
||||
// Native deep link handling - cross-platform compatible
|
||||
let pendingDeepLinkUrl = null;
|
||||
|
||||
@ -200,6 +206,21 @@ app.whenReady().then(async () => {
|
||||
askService.initialize();
|
||||
settingsService.initialize();
|
||||
setupGeneralIpcHandlers();
|
||||
setupOllamaIpcHandlers();
|
||||
setupWhisperIpcHandlers();
|
||||
|
||||
// Initialize Ollama models in database
|
||||
await ollamaModelRepository.initializeDefaultModels();
|
||||
|
||||
// Auto warm-up selected Ollama model in background (non-blocking)
|
||||
setTimeout(async () => {
|
||||
try {
|
||||
console.log('[index.js] Starting background Ollama model warm-up...');
|
||||
await ollamaService.autoWarmUpSelectedModel();
|
||||
} catch (error) {
|
||||
console.log('[index.js] Background warm-up failed (non-critical):', error.message);
|
||||
}
|
||||
}, 2000); // Wait 2 seconds after app start
|
||||
|
||||
// Start web server and create windows ONLY after all initializations are successful
|
||||
WEB_PORT = await startWebStack();
|
||||
@ -234,11 +255,71 @@ app.on('window-all-closed', () => {
|
||||
}
|
||||
});
|
||||
|
||||
app.on('before-quit', async () => {
|
||||
console.log('[Shutdown] App is about to quit.');
|
||||
app.on('before-quit', async (event) => {
|
||||
// Prevent infinite loop by checking if shutdown is already in progress
|
||||
if (isShuttingDown) {
|
||||
console.log('[Shutdown] 🔄 Shutdown already in progress, allowing quit...');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('[Shutdown] App is about to quit. Starting graceful shutdown...');
|
||||
|
||||
// Set shutdown flag to prevent infinite loop
|
||||
isShuttingDown = true;
|
||||
|
||||
// Prevent immediate quit to allow graceful shutdown
|
||||
event.preventDefault();
|
||||
|
||||
try {
|
||||
// 1. Stop audio capture first (immediate)
|
||||
listenService.stopMacOSAudioCapture();
|
||||
// await sessionRepository.endAllActiveSessions(); // MOVED TO authService
|
||||
console.log('[Shutdown] Audio capture stopped');
|
||||
|
||||
// 2. End all active sessions (database operations) - with error handling
|
||||
try {
|
||||
await sessionRepository.endAllActiveSessions();
|
||||
console.log('[Shutdown] Active sessions ended');
|
||||
} catch (dbError) {
|
||||
console.warn('[Shutdown] Could not end active sessions (database may be closed):', dbError.message);
|
||||
}
|
||||
|
||||
// 3. Shutdown Ollama service (potentially time-consuming)
|
||||
console.log('[Shutdown] shutting down Ollama service...');
|
||||
const ollamaShutdownSuccess = await Promise.race([
|
||||
ollamaService.shutdown(false), // Graceful shutdown
|
||||
new Promise(resolve => setTimeout(() => resolve(false), 8000)) // 8s timeout
|
||||
]);
|
||||
|
||||
if (ollamaShutdownSuccess) {
|
||||
console.log('[Shutdown] Ollama service shut down gracefully');
|
||||
} else {
|
||||
console.log('[Shutdown] Ollama shutdown timeout, forcing...');
|
||||
// Force shutdown if graceful failed
|
||||
try {
|
||||
await ollamaService.shutdown(true);
|
||||
} catch (forceShutdownError) {
|
||||
console.warn('[Shutdown] Force shutdown also failed:', forceShutdownError.message);
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Close database connections (final cleanup)
|
||||
try {
|
||||
databaseInitializer.close();
|
||||
console.log('[Shutdown] Database connections closed');
|
||||
} catch (closeError) {
|
||||
console.warn('[Shutdown] Error closing database:', closeError.message);
|
||||
}
|
||||
|
||||
console.log('[Shutdown] Graceful shutdown completed successfully');
|
||||
|
||||
} catch (error) {
|
||||
console.error('[Shutdown] Error during graceful shutdown:', error);
|
||||
// Continue with shutdown even if there were errors
|
||||
} finally {
|
||||
// Actually quit the app now
|
||||
console.log('[Shutdown] Exiting application...');
|
||||
app.exit(0); // Use app.exit() instead of app.quit() to force quit
|
||||
}
|
||||
});
|
||||
|
||||
app.on('activate', () => {
|
||||
@ -247,6 +328,70 @@ app.on('activate', () => {
|
||||
}
|
||||
});
|
||||
|
||||
function setupWhisperIpcHandlers() {
|
||||
const { WhisperService } = require('./common/services/whisperService');
|
||||
const whisperService = new WhisperService();
|
||||
|
||||
// Forward download progress events to renderer
|
||||
whisperService.on('downloadProgress', (data) => {
|
||||
const windows = BrowserWindow.getAllWindows();
|
||||
windows.forEach(window => {
|
||||
window.webContents.send('whisper:download-progress', data);
|
||||
});
|
||||
});
|
||||
|
||||
// IPC handlers for Whisper operations
|
||||
ipcMain.handle('whisper:download-model', async (event, modelId) => {
|
||||
try {
|
||||
console.log(`[Whisper IPC] Starting download for model: ${modelId}`);
|
||||
|
||||
// Ensure WhisperService is initialized first
|
||||
if (!whisperService.isInitialized) {
|
||||
console.log('[Whisper IPC] Initializing WhisperService...');
|
||||
await whisperService.initialize();
|
||||
}
|
||||
|
||||
// Set up progress listener
|
||||
const progressHandler = (data) => {
|
||||
if (data.modelId === modelId) {
|
||||
event.sender.send('whisper:download-progress', data);
|
||||
}
|
||||
};
|
||||
|
||||
whisperService.on('downloadProgress', progressHandler);
|
||||
|
||||
try {
|
||||
await whisperService.ensureModelAvailable(modelId);
|
||||
console.log(`[Whisper IPC] Model ${modelId} download completed successfully`);
|
||||
} finally {
|
||||
// Cleanup listener
|
||||
whisperService.removeListener('downloadProgress', progressHandler);
|
||||
}
|
||||
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error(`[Whisper IPC] Failed to download model ${modelId}:`, error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('whisper:get-installed-models', async () => {
|
||||
try {
|
||||
// Ensure WhisperService is initialized first
|
||||
if (!whisperService.isInitialized) {
|
||||
console.log('[Whisper IPC] Initializing WhisperService for model list...');
|
||||
await whisperService.initialize();
|
||||
}
|
||||
|
||||
const models = await whisperService.getInstalledModels();
|
||||
return { success: true, models };
|
||||
} catch (error) {
|
||||
console.error('[Whisper IPC] Failed to get installed models:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function setupGeneralIpcHandlers() {
|
||||
const userRepository = require('./common/repositories/user');
|
||||
const presetRepository = require('./common/repositories/preset');
|
||||
@ -299,6 +444,201 @@ function setupGeneralIpcHandlers() {
|
||||
setupWebDataHandlers();
|
||||
}
|
||||
|
||||
function setupOllamaIpcHandlers() {
|
||||
// Ollama status and installation
|
||||
ipcMain.handle('ollama:get-status', async () => {
|
||||
try {
|
||||
const installed = await ollamaService.isInstalled();
|
||||
const running = installed ? await ollamaService.isServiceRunning() : false;
|
||||
const models = await ollamaService.getAllModelsWithStatus();
|
||||
|
||||
return {
|
||||
installed,
|
||||
running,
|
||||
models,
|
||||
success: true
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to get status:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('ollama:install', async (event) => {
|
||||
try {
|
||||
const onProgress = (data) => {
|
||||
event.sender.send('ollama:install-progress', data);
|
||||
};
|
||||
|
||||
await ollamaService.autoInstall(onProgress);
|
||||
|
||||
if (!await ollamaService.isServiceRunning()) {
|
||||
onProgress({ stage: 'starting', message: 'Starting Ollama service...', progress: 0 });
|
||||
await ollamaService.startService();
|
||||
onProgress({ stage: 'starting', message: 'Ollama service started.', progress: 100 });
|
||||
}
|
||||
event.sender.send('ollama:install-complete', { success: true });
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to install:', error);
|
||||
event.sender.send('ollama:install-complete', { success: false, error: error.message });
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
ipcMain.handle('ollama:start-service', async (event) => {
|
||||
try {
|
||||
if (!await ollamaService.isServiceRunning()) {
|
||||
console.log('[Ollama IPC] Starting Ollama service...');
|
||||
await ollamaService.startService();
|
||||
}
|
||||
event.sender.send('ollama:install-complete', { success: true });
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to start service:', error);
|
||||
event.sender.send('ollama:install-complete', { success: false, error: error.message });
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
// Ensure Ollama is ready (starts service if installed but not running)
|
||||
ipcMain.handle('ollama:ensure-ready', async () => {
|
||||
try {
|
||||
if (await ollamaService.isInstalled() && !await ollamaService.isServiceRunning()) {
|
||||
console.log('[Ollama IPC] Ollama installed but not running, starting service...');
|
||||
await ollamaService.startService();
|
||||
}
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to ensure ready:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
// Get all models with their status
|
||||
ipcMain.handle('ollama:get-models', async () => {
|
||||
try {
|
||||
const models = await ollamaService.getAllModelsWithStatus();
|
||||
return { success: true, models };
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to get models:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
// Get model suggestions for autocomplete
|
||||
ipcMain.handle('ollama:get-model-suggestions', async () => {
|
||||
try {
|
||||
const suggestions = await ollamaService.getModelSuggestions();
|
||||
return { success: true, suggestions };
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to get model suggestions:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
// Pull/install a specific model
|
||||
ipcMain.handle('ollama:pull-model', async (event, modelName) => {
|
||||
try {
|
||||
console.log(`[Ollama IPC] Starting model pull: ${modelName}`);
|
||||
|
||||
// Update DB status to installing
|
||||
await ollamaModelRepository.updateInstallStatus(modelName, false, true);
|
||||
|
||||
// Set up progress listener for real-time updates
|
||||
const progressHandler = (data) => {
|
||||
if (data.model === modelName) {
|
||||
event.sender.send('ollama:pull-progress', data);
|
||||
}
|
||||
};
|
||||
|
||||
const completeHandler = (data) => {
|
||||
if (data.model === modelName) {
|
||||
console.log(`[Ollama IPC] Model ${modelName} pull completed`);
|
||||
// Clean up listeners
|
||||
ollamaService.removeListener('pull-progress', progressHandler);
|
||||
ollamaService.removeListener('pull-complete', completeHandler);
|
||||
}
|
||||
};
|
||||
|
||||
ollamaService.on('pull-progress', progressHandler);
|
||||
ollamaService.on('pull-complete', completeHandler);
|
||||
|
||||
// Pull the model using REST API
|
||||
await ollamaService.pullModel(modelName);
|
||||
|
||||
// Update DB status to installed
|
||||
await ollamaModelRepository.updateInstallStatus(modelName, true, false);
|
||||
|
||||
console.log(`[Ollama IPC] Model ${modelName} pull successful`);
|
||||
return { success: true };
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to pull model:', error);
|
||||
// Reset status on error
|
||||
await ollamaModelRepository.updateInstallStatus(modelName, false, false);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
// Check if a specific model is installed
|
||||
ipcMain.handle('ollama:is-model-installed', async (event, modelName) => {
|
||||
try {
|
||||
const installed = await ollamaService.isModelInstalled(modelName);
|
||||
return { success: true, installed };
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to check model installation:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
// Warm up a specific model
|
||||
ipcMain.handle('ollama:warm-up-model', async (event, modelName) => {
|
||||
try {
|
||||
const success = await ollamaService.warmUpModel(modelName);
|
||||
return { success };
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to warm up model:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
// Auto warm-up currently selected model
|
||||
ipcMain.handle('ollama:auto-warm-up', async () => {
|
||||
try {
|
||||
const success = await ollamaService.autoWarmUpSelectedModel();
|
||||
return { success };
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to auto warm-up:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
// Get warm-up status for debugging
|
||||
ipcMain.handle('ollama:get-warm-up-status', async () => {
|
||||
try {
|
||||
const status = ollamaService.getWarmUpStatus();
|
||||
return { success: true, status };
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to get warm-up status:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
// Shutdown Ollama service manually
|
||||
ipcMain.handle('ollama:shutdown', async (event, force = false) => {
|
||||
try {
|
||||
console.log(`[Ollama IPC] Manual shutdown requested (force: ${force})`);
|
||||
const success = await ollamaService.shutdown(force);
|
||||
return { success };
|
||||
} catch (error) {
|
||||
console.error('[Ollama IPC] Failed to shutdown Ollama:', error);
|
||||
return { success: false, error: error.message };
|
||||
}
|
||||
});
|
||||
|
||||
console.log('[Ollama IPC] Handlers registered');
|
||||
}
|
||||
|
||||
function setupWebDataHandlers() {
|
||||
const sessionRepository = require('./common/repositories/session');
|
||||
const sttRepository = require('./features/listen/stt/repositories');
|
||||
|
Loading…
x
Reference in New Issue
Block a user