diff --git a/src/app/ApiKeyHeader.js b/src/app/ApiKeyHeader.js index 0df4836..39598bd 100644 --- a/src/app/ApiKeyHeader.js +++ b/src/app/ApiKeyHeader.js @@ -248,6 +248,8 @@ export class ApiKeyHeader extends LitElement { width: 100%; text-align: left; } + + /* ────────────────[ GLASS BYPASS ]─────────────── */ :host-context(body.has-glass) .container, :host-context(body.has-glass) .api-input, :host-context(body.has-glass) .provider-select, @@ -260,13 +262,11 @@ export class ApiKeyHeader extends LitElement { backdrop-filter: none !important; } - /* 가상 레이어·그라데이션 테두리 제거 */ :host-context(body.has-glass) .container::after, :host-context(body.has-glass) .action-button::after { display: none !important; } - /* hover/active 때 다시 생기는 배경도 차단 */ :host-context(body.has-glass) .action-button:hover, :host-context(body.has-glass) .provider-select:hover, :host-context(body.has-glass) .close-button:hover { diff --git a/src/app/MainHeader.js b/src/app/MainHeader.js index 3111066..5212fae 100644 --- a/src/app/MainHeader.js +++ b/src/app/MainHeader.js @@ -292,11 +292,12 @@ export class MainHeader extends LitElement { width: 16px; height: 16px; } + + /* ────────────────[ GLASS BYPASS ]─────────────── */ :host-context(body.has-glass) .header, :host-context(body.has-glass) .listen-button, :host-context(body.has-glass) .header-actions, :host-context(body.has-glass) .settings-button { - /* 배경·블러·그림자 전부 제거 */ background: transparent !important; filter: none !important; box-shadow: none !important; @@ -307,7 +308,6 @@ export class MainHeader extends LitElement { border: none !important; } - /* 장식용 before/after 레이어와 버튼 오버레이 비활성화 */ :host-context(body.has-glass) .header::before, :host-context(body.has-glass) .header::after, :host-context(body.has-glass) .listen-button::before, @@ -315,7 +315,6 @@ export class MainHeader extends LitElement { display: none !important; } - /* hover 때 의도치 않게 생기는 배경도 차단 */ :host-context(body.has-glass) .header-actions:hover, :host-context(body.has-glass) .settings-button:hover, :host-context(body.has-glass) .listen-button:hover::before { @@ -330,7 +329,6 @@ export class MainHeader extends LitElement { box-shadow: none !important; } - /* 2) pill 형태·아이콘 박스 둥근 모서리 평면화 (선택) */ :host-context(body.has-glass) .header, :host-context(body.has-glass) .listen-button, :host-context(body.has-glass) .header-actions, diff --git a/src/app/PermissionHeader.js b/src/app/PermissionHeader.js index 85b55bf..9dc9d5e 100644 --- a/src/app/PermissionHeader.js +++ b/src/app/PermissionHeader.js @@ -237,6 +237,8 @@ export class PermissionHeader extends LitElement { background: rgba(255, 255, 255, 0.2); cursor: not-allowed; } + + /* ────────────────[ GLASS BYPASS ]─────────────── */ :host-context(body.has-glass) .container, :host-context(body.has-glass) .action-button, :host-context(body.has-glass) .continue-button, @@ -248,14 +250,12 @@ export class PermissionHeader extends LitElement { backdrop-filter: none !important; } - /* Remove gradient borders / pseudo layers */ :host-context(body.has-glass) .container::after, :host-context(body.has-glass) .action-button::after, :host-context(body.has-glass) .continue-button::after { display: none !important; } - /* Prevent background reappearing on hover/active */ :host-context(body.has-glass) .action-button:hover, :host-context(body.has-glass) .continue-button:hover, :host-context(body.has-glass) .close-button:hover { diff --git a/src/app/PickleGlassApp.js b/src/app/PickleGlassApp.js index d10c55a..6cf1666 100644 --- a/src/app/PickleGlassApp.js +++ b/src/app/PickleGlassApp.js @@ -1,9 +1,9 @@ import { html, css, LitElement } from '../assets/lit-core-2.7.4.min.js'; -import { CustomizeView } from '../features/customize/CustomizeView.js'; +import { SettingsView } from '../features/settings/SettingsView.js'; import { AssistantView } from '../features/listen/AssistantView.js'; import { AskView } from '../features/ask/AskView.js'; -import '../features/listen/renderer.js'; +import '../features/listen/renderer/renderer.js'; export class PickleGlassApp extends LitElement { static styles = css` @@ -22,7 +22,7 @@ export class PickleGlassApp extends LitElement { height: 100%; } - ask-view, customize-view, history-view, help-view, onboarding-view, setup-view { + ask-view, settings-view, history-view, help-view, setup-view { display: block; width: 100%; height: 100%; @@ -182,8 +182,8 @@ export class PickleGlassApp extends LitElement { this.isMainViewVisible = !this.isMainViewVisible; } - handleCustomizeClick() { - this.currentView = 'customize'; + handleSettingsClick() { + this.currentView = 'settings'; this.isMainViewVisible = true; } @@ -249,10 +249,6 @@ export class PickleGlassApp extends LitElement { this.currentResponseIndex = e.detail.index; } - handleOnboardingComplete() { - this.currentView = 'main'; - } - render() { switch (this.currentView) { case 'listen': @@ -265,19 +261,17 @@ export class PickleGlassApp extends LitElement { >`; case 'ask': return html``; - case 'customize': - return html` (this.selectedProfile = profile)} .onLanguageChange=${lang => (this.selectedLanguage = lang)} - >`; + >`; case 'history': return html``; case 'help': return html``; - case 'onboarding': - return html``; case 'setup': return html``; default: diff --git a/src/common/ai/factory.js b/src/common/ai/factory.js new file mode 100644 index 0000000..05a6780 --- /dev/null +++ b/src/common/ai/factory.js @@ -0,0 +1,67 @@ +const providers = { + openai: require('./providers/openai'), + gemini: require('./providers/gemini'), + // 추가 provider는 여기에 등록 +}; + +/** + * Creates an STT session based on provider + * @param {string} provider - Provider name ('openai', 'gemini', etc.) + * @param {object} opts - Configuration options (apiKey, language, callbacks, etc.) + * @returns {Promise} STT session object with sendRealtimeInput and close methods + */ +function createSTT(provider, opts) { + if (!providers[provider]?.createSTT) { + throw new Error(`STT not supported for provider: ${provider}`); + } + return providers[provider].createSTT(opts); +} + +/** + * Creates an LLM instance based on provider + * @param {string} provider - Provider name ('openai', 'gemini', etc.) + * @param {object} opts - Configuration options (apiKey, model, temperature, etc.) + * @returns {object} LLM instance with generateContent method + */ +function createLLM(provider, opts) { + if (!providers[provider]?.createLLM) { + throw new Error(`LLM not supported for provider: ${provider}`); + } + return providers[provider].createLLM(opts); +} + +/** + * Creates a streaming LLM instance based on provider + * @param {string} provider - Provider name ('openai', 'gemini', etc.) + * @param {object} opts - Configuration options (apiKey, model, temperature, etc.) + * @returns {object} Streaming LLM instance + */ +function createStreamingLLM(provider, opts) { + if (!providers[provider]?.createStreamingLLM) { + throw new Error(`Streaming LLM not supported for provider: ${provider}`); + } + return providers[provider].createStreamingLLM(opts); +} + +/** + * Gets list of available providers + * @returns {object} Object with stt and llm arrays + */ +function getAvailableProviders() { + const sttProviders = []; + const llmProviders = []; + + for (const [name, provider] of Object.entries(providers)) { + if (provider.createSTT) sttProviders.push(name); + if (provider.createLLM) llmProviders.push(name); + } + + return { stt: sttProviders, llm: llmProviders }; +} + +module.exports = { + createSTT, + createLLM, + createStreamingLLM, + getAvailableProviders +}; \ No newline at end of file diff --git a/src/common/ai/providers/gemini.js b/src/common/ai/providers/gemini.js new file mode 100644 index 0000000..a2284a1 --- /dev/null +++ b/src/common/ai/providers/gemini.js @@ -0,0 +1,310 @@ +const { GoogleGenerativeAI } = require('@google/generative-ai'); +const { GoogleGenAI } = require('@google/genai'); + +/** + * Creates a Gemini STT session + * @param {object} opts - Configuration options + * @param {string} opts.apiKey - Gemini API key + * @param {string} [opts.language='en-US'] - Language code + * @param {object} [opts.callbacks] - Event callbacks + * @returns {Promise} STT session + */ +async function createSTT({ apiKey, language = 'en-US', callbacks = {}, ...config }) { + const liveClient = new GoogleGenAI({ vertexai: false, apiKey }); + + // Language code BCP-47 conversion + const lang = language.includes('-') ? language : `${language}-US`; + + const session = await liveClient.live.connect({ + model: 'gemini-live-2.5-flash-preview', + callbacks, + config: { + inputAudioTranscription: {}, + speechConfig: { languageCode: lang }, + }, + }); + + return { + sendRealtimeInput: async payload => session.sendRealtimeInput(payload), + close: async () => session.close(), + }; +} + +/** + * Creates a Gemini LLM instance + * @param {object} opts - Configuration options + * @param {string} opts.apiKey - Gemini API key + * @param {string} [opts.model='gemini-2.5-flash'] - Model name + * @param {number} [opts.temperature=0.7] - Temperature + * @param {number} [opts.maxTokens=8192] - Max tokens + * @returns {object} LLM instance + */ +function createLLM({ apiKey, model = 'gemini-2.5-flash', temperature = 0.7, maxTokens = 8192, ...config }) { + const client = new GoogleGenerativeAI(apiKey); + + return { + generateContent: async (parts) => { + const geminiModel = client.getGenerativeModel({ model: model }); + + let systemPrompt = ''; + let userContent = []; + + for (const part of parts) { + if (typeof part === 'string') { + if (systemPrompt === '' && part.includes('You are')) { + systemPrompt = part; + } else { + userContent.push(part); + } + } else if (part.inlineData) { + // Convert base64 image data to Gemini format + userContent.push({ + inlineData: { + mimeType: part.inlineData.mimeType, + data: part.inlineData.data + } + }); + } + } + + // Prepare content array + const content = []; + + // Add system instruction if present + if (systemPrompt) { + // For Gemini, we'll prepend system prompt to user content + content.push(systemPrompt + '\n\n' + userContent[0]); + content.push(...userContent.slice(1)); + } else { + content.push(...userContent); + } + + try { + const result = await geminiModel.generateContent(content); + const response = await result.response; + + return { + response: { + text: () => response.text() + } + }; + } catch (error) { + console.error('Gemini API error:', error); + throw error; + } + }, + + // For compatibility with chat-style interfaces + chat: async (messages) => { + // Extract system instruction if present + let systemInstruction = ''; + const history = []; + let lastMessage; + + messages.forEach((msg, index) => { + if (msg.role === 'system') { + systemInstruction = msg.content; + return; + } + + // Gemini's history format + const role = msg.role === 'user' ? 'user' : 'model'; + + if (index === messages.length - 1) { + lastMessage = msg; + } else { + history.push({ role, parts: [{ text: msg.content }] }); + } + }); + + const geminiModel = client.getGenerativeModel({ + model: model, + systemInstruction: systemInstruction + }); + + const chat = geminiModel.startChat({ + history: history, + generationConfig: { + temperature: temperature, + maxOutputTokens: maxTokens, + } + }); + + // Get the last user message content + let content = lastMessage.content; + + // Handle multimodal content for the last message + if (Array.isArray(content)) { + const geminiContent = []; + for (const part of content) { + if (typeof part === 'string') { + geminiContent.push(part); + } else if (part.type === 'text') { + geminiContent.push(part.text); + } else if (part.type === 'image_url' && part.image_url) { + // Convert base64 image to Gemini format + const base64Data = part.image_url.url.split(',')[1]; + geminiContent.push({ + inlineData: { + mimeType: 'image/png', + data: base64Data + } + }); + } + } + content = geminiContent; + } + + const result = await chat.sendMessage(content); + const response = await result.response; + return { + content: response.text(), + raw: result + }; + } + }; +} + +/** + * Creates a Gemini streaming LLM instance + * @param {object} opts - Configuration options + * @param {string} opts.apiKey - Gemini API key + * @param {string} [opts.model='gemini-2.5-flash'] - Model name + * @param {number} [opts.temperature=0.7] - Temperature + * @param {number} [opts.maxTokens=8192] - Max tokens + * @returns {object} Streaming LLM instance + */ +function createStreamingLLM({ apiKey, model = 'gemini-2.5-flash', temperature = 0.7, maxTokens = 8192, ...config }) { + const client = new GoogleGenerativeAI(apiKey); + + return { + streamChat: async (messages) => { + console.log('[Gemini Provider] Starting streaming request'); + + // Extract system instruction if present + let systemInstruction = ''; + const nonSystemMessages = []; + + for (const msg of messages) { + if (msg.role === 'system') { + systemInstruction = msg.content; + } else { + nonSystemMessages.push(msg); + } + } + + const geminiModel = client.getGenerativeModel({ + model: model, + systemInstruction: systemInstruction || undefined + }); + + const chat = geminiModel.startChat({ + history: [], + generationConfig: { + temperature, + maxOutputTokens: maxTokens || 8192, + } + }); + + // Create a ReadableStream to handle Gemini's streaming + const stream = new ReadableStream({ + async start(controller) { + try { + console.log('[Gemini Provider] Processing messages:', nonSystemMessages.length, 'messages (excluding system)'); + + // Get the last user message + const lastMessage = nonSystemMessages[nonSystemMessages.length - 1]; + let lastUserMessage = lastMessage.content; + + // Handle case where content might be an array (multimodal) + if (Array.isArray(lastUserMessage)) { + // Extract text content from array + const textParts = lastUserMessage.filter(part => + typeof part === 'string' || (part && part.type === 'text') + ); + lastUserMessage = textParts.map(part => + typeof part === 'string' ? part : part.text + ).join(' '); + } + + console.log('[Gemini Provider] Sending message to Gemini:', + typeof lastUserMessage === 'string' ? lastUserMessage.substring(0, 100) + '...' : 'multimodal content'); + + // Prepare the message content for Gemini + let geminiContent = []; + + // Handle multimodal content properly + if (Array.isArray(lastMessage.content)) { + for (const part of lastMessage.content) { + if (typeof part === 'string') { + geminiContent.push(part); + } else if (part.type === 'text') { + geminiContent.push(part.text); + } else if (part.type === 'image_url' && part.image_url) { + // Convert base64 image to Gemini format + const base64Data = part.image_url.url.split(',')[1]; + geminiContent.push({ + inlineData: { + mimeType: 'image/png', + data: base64Data + } + }); + } + } + } else { + geminiContent = [lastUserMessage]; + } + + console.log('[Gemini Provider] Prepared Gemini content:', + geminiContent.length, 'parts'); + + // Stream the response + let chunkCount = 0; + let totalContent = ''; + + for await (const chunk of chat.sendMessageStream(geminiContent)) { + chunkCount++; + const chunkText = chunk.text() || ''; + totalContent += chunkText; + + // Format as SSE data + const data = JSON.stringify({ + choices: [{ + delta: { + content: chunkText + } + }] + }); + controller.enqueue(new TextEncoder().encode(`data: ${data}\n\n`)); + } + + console.log(`[Gemini Provider] Streamed ${chunkCount} chunks, total length: ${totalContent.length} chars`); + + // Send the final done message + controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n')); + controller.close(); + console.log('[Gemini Provider] Streaming completed successfully'); + } catch (error) { + console.error('[Gemini Provider] Streaming error:', error); + controller.error(error); + } + } + }); + + // Create a Response object with the stream + return new Response(stream, { + headers: { + 'Content-Type': 'text/event-stream', + 'Cache-Control': 'no-cache', + 'Connection': 'keep-alive' + } + }); + } + }; +} + +module.exports = { + createSTT, + createLLM, + createStreamingLLM +}; \ No newline at end of file diff --git a/src/common/ai/providers/openai.js b/src/common/ai/providers/openai.js new file mode 100644 index 0000000..a27c547 --- /dev/null +++ b/src/common/ai/providers/openai.js @@ -0,0 +1,255 @@ +const OpenAI = require('openai'); +const WebSocket = require('ws'); + +/** + * Creates an OpenAI STT session + * @param {object} opts - Configuration options + * @param {string} opts.apiKey - OpenAI API key + * @param {string} [opts.language='en'] - Language code + * @param {object} [opts.callbacks] - Event callbacks + * @param {boolean} [opts.usePortkey=false] - Whether to use Portkey + * @param {string} [opts.portkeyVirtualKey] - Portkey virtual key + * @returns {Promise} STT session + */ +async function createSTT({ apiKey, language = 'en', callbacks = {}, usePortkey = false, portkeyVirtualKey, ...config }) { + const keyType = usePortkey ? 'vKey' : 'apiKey'; + const key = usePortkey ? (portkeyVirtualKey || apiKey) : apiKey; + + const wsUrl = keyType === 'apiKey' + ? 'wss://api.openai.com/v1/realtime?intent=transcription' + : 'wss://api.portkey.ai/v1/realtime?intent=transcription'; + + const headers = keyType === 'apiKey' + ? { + 'Authorization': `Bearer ${key}`, + 'OpenAI-Beta': 'realtime=v1', + } + : { + 'x-portkey-api-key': 'gRv2UGRMq6GGLJ8aVEB4e7adIewu', + 'x-portkey-virtual-key': key, + 'OpenAI-Beta': 'realtime=v1', + }; + + const ws = new WebSocket(wsUrl, { headers }); + + return new Promise((resolve, reject) => { + ws.onopen = () => { + console.log("WebSocket session opened."); + + const sessionConfig = { + type: 'transcription_session.update', + session: { + input_audio_format: 'pcm16', + input_audio_transcription: { + model: 'gpt-4o-mini-transcribe', + prompt: config.prompt || '', + language: language || 'en' + }, + turn_detection: { + type: 'server_vad', + threshold: 0.5, + prefix_padding_ms: 50, + silence_duration_ms: 25, + }, + input_audio_noise_reduction: { + type: 'near_field' + } + } + }; + + ws.send(JSON.stringify(sessionConfig)); + + resolve({ + sendRealtimeInput: (audioData) => { + if (ws.readyState === WebSocket.OPEN) { + const message = { + type: 'input_audio_buffer.append', + audio: audioData + }; + ws.send(JSON.stringify(message)); + } + }, + close: () => { + if (ws.readyState === WebSocket.OPEN) { + ws.send(JSON.stringify({ type: 'session.close' })); + ws.close(1000, 'Client initiated close.'); + } + } + }); + }; + + ws.onmessage = (event) => { + const message = JSON.parse(event.data); + if (callbacks && callbacks.onmessage) { + callbacks.onmessage(message); + } + }; + + ws.onerror = (error) => { + console.error('WebSocket error:', error.message); + if (callbacks && callbacks.onerror) { + callbacks.onerror(error); + } + reject(error); + }; + + ws.onclose = (event) => { + console.log(`WebSocket closed: ${event.code} ${event.reason}`); + if (callbacks && callbacks.onclose) { + callbacks.onclose(event); + } + }; + }); +} + +/** + * Creates an OpenAI LLM instance + * @param {object} opts - Configuration options + * @param {string} opts.apiKey - OpenAI API key + * @param {string} [opts.model='gpt-4.1'] - Model name + * @param {number} [opts.temperature=0.7] - Temperature + * @param {number} [opts.maxTokens=2048] - Max tokens + * @param {boolean} [opts.usePortkey=false] - Whether to use Portkey + * @param {string} [opts.portkeyVirtualKey] - Portkey virtual key + * @returns {object} LLM instance + */ +function createLLM({ apiKey, model = 'gpt-4.1', temperature = 0.7, maxTokens = 2048, usePortkey = false, portkeyVirtualKey, ...config }) { + const client = new OpenAI({ apiKey }); + + const callApi = async (messages) => { + if (!usePortkey) { + const response = await client.chat.completions.create({ + model: model, + messages: messages, + temperature: temperature, + max_tokens: maxTokens + }); + return { + content: response.choices[0].message.content.trim(), + raw: response + }; + } else { + const fetchUrl = 'https://api.portkey.ai/v1/chat/completions'; + const response = await fetch(fetchUrl, { + method: 'POST', + headers: { + 'x-portkey-api-key': 'gRv2UGRMq6GGLJ8aVEB4e7adIewu', + 'x-portkey-virtual-key': portkeyVirtualKey || apiKey, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + model: model, + messages, + temperature, + max_tokens: maxTokens, + }), + }); + + if (!response.ok) { + throw new Error(`Portkey API error: ${response.status} ${response.statusText}`); + } + + const result = await response.json(); + return { + content: result.choices[0].message.content.trim(), + raw: result + }; + } + }; + + return { + generateContent: async (parts) => { + const messages = []; + let systemPrompt = ''; + let userContent = []; + + for (const part of parts) { + if (typeof part === 'string') { + if (systemPrompt === '' && part.includes('You are')) { + systemPrompt = part; + } else { + userContent.push({ type: 'text', text: part }); + } + } else if (part.inlineData) { + userContent.push({ + type: 'image_url', + image_url: { url: `data:${part.inlineData.mimeType};base64,${part.inlineData.data}` } + }); + } + } + + if (systemPrompt) messages.push({ role: 'system', content: systemPrompt }); + if (userContent.length > 0) messages.push({ role: 'user', content: userContent }); + + const result = await callApi(messages); + + return { + response: { + text: () => result.content + }, + raw: result.raw + }; + }, + + // For compatibility with chat-style interfaces + chat: async (messages) => { + return await callApi(messages); + } + }; +} + +/** + * Creates an OpenAI streaming LLM instance + * @param {object} opts - Configuration options + * @param {string} opts.apiKey - OpenAI API key + * @param {string} [opts.model='gpt-4.1'] - Model name + * @param {number} [opts.temperature=0.7] - Temperature + * @param {number} [opts.maxTokens=2048] - Max tokens + * @param {boolean} [opts.usePortkey=false] - Whether to use Portkey + * @param {string} [opts.portkeyVirtualKey] - Portkey virtual key + * @returns {object} Streaming LLM instance + */ +function createStreamingLLM({ apiKey, model = 'gpt-4.1', temperature = 0.7, maxTokens = 2048, usePortkey = false, portkeyVirtualKey, ...config }) { + return { + streamChat: async (messages) => { + const fetchUrl = usePortkey + ? 'https://api.portkey.ai/v1/chat/completions' + : 'https://api.openai.com/v1/chat/completions'; + + const headers = usePortkey + ? { + 'x-portkey-api-key': 'gRv2UGRMq6GGLJ8aVEB4e7adIewu', + 'x-portkey-virtual-key': portkeyVirtualKey || apiKey, + 'Content-Type': 'application/json', + } + : { + Authorization: `Bearer ${apiKey}`, + 'Content-Type': 'application/json', + }; + + const response = await fetch(fetchUrl, { + method: 'POST', + headers, + body: JSON.stringify({ + model: model, + messages, + temperature, + max_tokens: maxTokens, + stream: true, + }), + }); + + if (!response.ok) { + throw new Error(`OpenAI API error: ${response.status} ${response.statusText}`); + } + + return response; + } + }; +} + +module.exports = { + createSTT, + createLLM, + createStreamingLLM +}; \ No newline at end of file diff --git a/src/common/services/aiProviderService.js b/src/common/services/aiProviderService.js deleted file mode 100644 index fe24e47..0000000 --- a/src/common/services/aiProviderService.js +++ /dev/null @@ -1,377 +0,0 @@ -const { createOpenAiGenerativeClient, getOpenAiGenerativeModel } = require('./openAiClient.js'); -const { createGeminiClient, getGeminiGenerativeModel, createGeminiChat } = require('./googleGeminiClient.js'); - -/** - * Creates an AI client based on the provider - * @param {string} apiKey - The API key - * @param {string} provider - The provider ('openai' or 'gemini') - * @returns {object} The AI client - */ -function createAIClient(apiKey, provider = 'openai') { - switch (provider) { - case 'openai': - return createOpenAiGenerativeClient(apiKey); - case 'gemini': - return createGeminiClient(apiKey); - default: - throw new Error(`Unsupported AI provider: ${provider}`); - } -} - -/** - * Gets a generative model based on the provider - * @param {object} client - The AI client - * @param {string} provider - The provider ('openai' or 'gemini') - * @param {string} model - The model name (optional) - * @returns {object} The model object - */ -function getGenerativeModel(client, provider = 'openai', model) { - switch (provider) { - case 'openai': - return getOpenAiGenerativeModel(client, model || 'gpt-4.1'); - case 'gemini': - return getGeminiGenerativeModel(client, model || 'gemini-2.5-flash'); - default: - throw new Error(`Unsupported AI provider: ${provider}`); - } -} - -/** - * Makes a chat completion request based on the provider - * @param {object} params - Request parameters - * @returns {Promise} The completion response - */ -async function makeChatCompletion({ apiKey, provider = 'openai', messages, temperature = 0.7, maxTokens = 1024, model, stream = false }) { - if (provider === 'openai') { - const fetchUrl = 'https://api.openai.com/v1/chat/completions'; - const response = await fetch(fetchUrl, { - method: 'POST', - headers: { - Authorization: `Bearer ${apiKey}`, - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - model: model || 'gpt-4.1', - messages, - temperature, - max_tokens: maxTokens, - stream, - }), - }); - - if (!response.ok) { - throw new Error(`OpenAI API error: ${response.status} ${response.statusText}`); - } - - if (stream) { - return response; - } - - const result = await response.json(); - return { - content: result.choices[0].message.content.trim(), - raw: result - }; - } else if (provider === 'gemini') { - const client = createGeminiClient(apiKey); - const genModel = getGeminiGenerativeModel(client, model || 'gemini-2.5-flash'); - - // Convert OpenAI format messages to Gemini format - const parts = []; - for (const message of messages) { - if (message.role === 'system') { - parts.push(message.content); - } else if (message.role === 'user') { - if (typeof message.content === 'string') { - parts.push(message.content); - } else if (Array.isArray(message.content)) { - // Handle multimodal content - for (const part of message.content) { - if (part.type === 'text') { - parts.push(part.text); - } else if (part.type === 'image_url' && part.image_url?.url) { - // Extract base64 data from data URL - const base64Match = part.image_url.url.match(/^data:(.+);base64,(.+)$/); - if (base64Match) { - parts.push({ - inlineData: { - mimeType: base64Match[1], - data: base64Match[2] - } - }); - } - } - } - } - } - } - - const result = await genModel.generateContent(parts); - return { - content: result.response.text(), - raw: result - }; - } else { - throw new Error(`Unsupported AI provider: ${provider}`); - } -} - -/** - * Makes a chat completion request with Portkey support - * @param {object} params - Request parameters including Portkey options - * @returns {Promise} The completion response - */ -async function makeChatCompletionWithPortkey({ - apiKey, - provider = 'openai', - messages, - temperature = 0.7, - maxTokens = 1024, - model, - usePortkey = false, - portkeyVirtualKey = null -}) { - if (!usePortkey) { - return makeChatCompletion({ apiKey, provider, messages, temperature, maxTokens, model }); - } - - // Portkey is only supported for OpenAI currently - if (provider !== 'openai') { - console.warn('Portkey is only supported for OpenAI provider, falling back to direct API'); - return makeChatCompletion({ apiKey, provider, messages, temperature, maxTokens, model }); - } - - const fetchUrl = 'https://api.portkey.ai/v1/chat/completions'; - const response = await fetch(fetchUrl, { - method: 'POST', - headers: { - 'x-portkey-api-key': 'gRv2UGRMq6GGLJ8aVEB4e7adIewu', - 'x-portkey-virtual-key': portkeyVirtualKey || apiKey, - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - model: model || 'gpt-4.1', - messages, - temperature, - max_tokens: maxTokens, - }), - }); - - if (!response.ok) { - throw new Error(`Portkey API error: ${response.status} ${response.statusText}`); - } - - const result = await response.json(); - return { - content: result.choices[0].message.content.trim(), - raw: result - }; -} - -/** - * Makes a streaming chat completion request - * @param {object} params - Request parameters - * @returns {Promise} The streaming response - */ -async function makeStreamingChatCompletion({ apiKey, provider = 'openai', messages, temperature = 0.7, maxTokens = 1024, model }) { - if (provider === 'openai') { - const fetchUrl = 'https://api.openai.com/v1/chat/completions'; - const response = await fetch(fetchUrl, { - method: 'POST', - headers: { - Authorization: `Bearer ${apiKey}`, - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - model: model || 'gpt-4.1', - messages, - temperature, - max_tokens: maxTokens, - stream: true, - }), - }); - - if (!response.ok) { - throw new Error(`OpenAI API error: ${response.status} ${response.statusText}`); - } - - return response; - } else if (provider === 'gemini') { - console.log('[AIProviderService] Starting Gemini streaming request'); - // Gemini streaming requires a different approach - // We'll create a ReadableStream that mimics OpenAI's SSE format - const geminiClient = createGeminiClient(apiKey); - - // Extract system instruction if present - let systemInstruction = ''; - const nonSystemMessages = []; - - for (const msg of messages) { - if (msg.role === 'system') { - systemInstruction = msg.content; - } else { - nonSystemMessages.push(msg); - } - } - - const chat = createGeminiChat(geminiClient, model || 'gemini-2.0-flash-exp', { - temperature, - maxOutputTokens: maxTokens || 8192, - systemInstruction: systemInstruction || undefined - }); - - // Create a ReadableStream to handle Gemini's streaming - const stream = new ReadableStream({ - async start(controller) { - try { - console.log('[AIProviderService] Processing messages for Gemini:', nonSystemMessages.length, 'messages (excluding system)'); - - // Get the last user message - const lastMessage = nonSystemMessages[nonSystemMessages.length - 1]; - let lastUserMessage = lastMessage.content; - - // Handle case where content might be an array (multimodal) - if (Array.isArray(lastUserMessage)) { - // Extract text content from array - const textParts = lastUserMessage.filter(part => - typeof part === 'string' || (part && part.type === 'text') - ); - lastUserMessage = textParts.map(part => - typeof part === 'string' ? part : part.text - ).join(' '); - } - - console.log('[AIProviderService] Sending message to Gemini:', - typeof lastUserMessage === 'string' ? lastUserMessage.substring(0, 100) + '...' : 'multimodal content'); - - // Prepare the message content for Gemini - let geminiContent = []; - - // Handle multimodal content properly - if (Array.isArray(lastMessage.content)) { - for (const part of lastMessage.content) { - if (typeof part === 'string') { - geminiContent.push(part); - } else if (part.type === 'text') { - geminiContent.push(part.text); - } else if (part.type === 'image_url' && part.image_url) { - // Convert base64 image to Gemini format - const base64Data = part.image_url.url.split(',')[1]; - geminiContent.push({ - inlineData: { - mimeType: 'image/png', - data: base64Data - } - }); - } - } - } else { - geminiContent = [lastUserMessage]; - } - - console.log('[AIProviderService] Prepared Gemini content:', - geminiContent.length, 'parts'); - - // Stream the response - let chunkCount = 0; - let totalContent = ''; - - for await (const chunk of chat.sendMessageStream(geminiContent)) { - chunkCount++; - const chunkText = chunk.text || ''; - totalContent += chunkText; - - // Format as SSE data - const data = JSON.stringify({ - choices: [{ - delta: { - content: chunkText - } - }] - }); - controller.enqueue(new TextEncoder().encode(`data: ${data}\n\n`)); - } - - console.log(`[AIProviderService] Streamed ${chunkCount} chunks, total length: ${totalContent.length} chars`); - - // Send the final done message - controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n')); - controller.close(); - console.log('[AIProviderService] Gemini streaming completed successfully'); - } catch (error) { - console.error('[AIProviderService] Gemini streaming error:', error); - controller.error(error); - } - } - }); - - // Create a Response object with the stream - return new Response(stream, { - headers: { - 'Content-Type': 'text/event-stream', - 'Cache-Control': 'no-cache', - 'Connection': 'keep-alive' - } - }); - } else { - throw new Error(`Unsupported AI provider: ${provider}`); - } -} - -/** - * Makes a streaming chat completion request with Portkey support - * @param {object} params - Request parameters - * @returns {Promise} The streaming response - */ -async function makeStreamingChatCompletionWithPortkey({ - apiKey, - provider = 'openai', - messages, - temperature = 0.7, - maxTokens = 1024, - model, - usePortkey = false, - portkeyVirtualKey = null -}) { - if (!usePortkey) { - return makeStreamingChatCompletion({ apiKey, provider, messages, temperature, maxTokens, model }); - } - - // Portkey is only supported for OpenAI currently - if (provider !== 'openai') { - console.warn('Portkey is only supported for OpenAI provider, falling back to direct API'); - return makeStreamingChatCompletion({ apiKey, provider, messages, temperature, maxTokens, model }); - } - - const fetchUrl = 'https://api.portkey.ai/v1/chat/completions'; - const response = await fetch(fetchUrl, { - method: 'POST', - headers: { - 'x-portkey-api-key': 'gRv2UGRMq6GGLJ8aVEB4e7adIewu', - 'x-portkey-virtual-key': portkeyVirtualKey || apiKey, - 'Content-Type': 'application/json', - }, - body: JSON.stringify({ - model: model || 'gpt-4.1', - messages, - temperature, - max_tokens: maxTokens, - stream: true, - }), - }); - - if (!response.ok) { - throw new Error(`Portkey API error: ${response.status} ${response.statusText}`); - } - - return response; -} - -module.exports = { - createAIClient, - getGenerativeModel, - makeChatCompletion, - makeChatCompletionWithPortkey, - makeStreamingChatCompletion, - makeStreamingChatCompletionWithPortkey -}; \ No newline at end of file diff --git a/src/common/services/googleGeminiClient.js b/src/common/services/googleGeminiClient.js deleted file mode 100644 index 877c82e..0000000 --- a/src/common/services/googleGeminiClient.js +++ /dev/null @@ -1,171 +0,0 @@ -const { GoogleGenerativeAI } = require('@google/generative-ai'); -const { GoogleGenAI } = require('@google/genai'); - -/** - * Creates and returns a Google Gemini client instance for generative AI. - * @param {string} apiKey - The API key for authentication. - * @returns {GoogleGenerativeAI} The initialized Gemini client. - */ -function createGeminiClient(apiKey) { - return new GoogleGenerativeAI(apiKey); -} - -/** - * Gets a Gemini model for text/image generation. - * @param {GoogleGenerativeAI} client - The Gemini client instance. - * @param {string} [model='gemini-2.5-flash'] - The name for the text/vision model. - * @returns {object} Model object with generateContent method - */ -function getGeminiGenerativeModel(client, model = 'gemini-2.5-flash') { - const genAI = client; - const geminiModel = genAI.getGenerativeModel({ model: model }); - - return { - generateContent: async (parts) => { - let systemPrompt = ''; - let userContent = []; - - for (const part of parts) { - if (typeof part === 'string') { - if (systemPrompt === '' && part.includes('You are')) { - systemPrompt = part; - } else { - userContent.push(part); - } - } else if (part.inlineData) { - // Convert base64 image data to Gemini format - userContent.push({ - inlineData: { - mimeType: part.inlineData.mimeType, - data: part.inlineData.data - } - }); - } - } - - // Prepare content array - const content = []; - - // Add system instruction if present - if (systemPrompt) { - // For Gemini, we'll prepend system prompt to user content - content.push(systemPrompt + '\n\n' + userContent[0]); - content.push(...userContent.slice(1)); - } else { - content.push(...userContent); - } - - try { - const result = await geminiModel.generateContent(content); - const response = await result.response; - - return { - response: { - text: () => response.text() - } - }; - } catch (error) { - console.error('Gemini API error:', error); - throw error; - } - } - }; -} - -/** - * Creates a Gemini chat session for multi-turn conversations. - * @param {GoogleGenerativeAI} client - The Gemini client instance. - * @param {string} [model='gemini-2.5-flash'] - The model to use. - * @param {object} [config={}] - Configuration options. - * @returns {object} Chat session object - */ -function createGeminiChat(client, model = 'gemini-2.5-flash', config = {}) { - const genAI = client; - const geminiModel = genAI.getGenerativeModel({ - model: model, - systemInstruction: config.systemInstruction - }); - - const chat = geminiModel.startChat({ - history: config.history || [], - generationConfig: { - temperature: config.temperature || 0.7, - maxOutputTokens: config.maxOutputTokens || 8192, - } - }); - - return { - sendMessage: async (message) => { - const result = await chat.sendMessage(message); - const response = await result.response; - return { - text: response.text() - }; - }, - sendMessageStream: async function* (message) { - const result = await chat.sendMessageStream(message); - for await (const chunk of result.stream) { - yield { - text: chunk.text() - }; - } - }, - getHistory: () => chat.getHistory() - }; -} - -// async function connectToGeminiSession(apiKey, { language = 'en-US', callbacks = {} } = {}) { -// const liveClient = new GoogleGenAI({ -// vertexai: false, // Vertex AI 사용 안함 -// apiKey, -// }); - -// // 라이브 STT 세션 열기 -// const session = await liveClient.live.connect({ -// model: 'gemini-live-2.5-flash-preview', -// callbacks, -// config: { -// inputAudioTranscription: {}, // 실시간 STT 필수 -// speechConfig: { languageCode: language }, -// }, -// }); - -// return { -// sendRealtimeInput: async data => session.send({ -// audio: { data, mimeType: 'audio/pcm;rate=24000' } -// }), -// close: async () => session.close(), -// }; -// } - -async function connectToGeminiSession(apiKey, { language = 'en-US', callbacks = {} } = {}) { - // ① 옛날 스타일 helper 재사용 - const liveClient = new GoogleGenAI({ vertexai: false, apiKey }); - - // ② 언어 코드 강제 BCP-47 변환 - const lang = language.includes('-') ? language : `${language}-US`; - - const session = await liveClient.live.connect({ - model: 'gemini-live-2.5-flash-preview', - callbacks, - config: { - inputAudioTranscription: {}, - speechConfig: { languageCode: lang }, - }, - }); - - // ③ SDK 0.5+ : sendRealtimeInput 가 정식 이름 - return { - sendRealtimeInput: async payload => session.sendRealtimeInput(payload), - close: async () => session.close(), - }; - } - - - -module.exports = { - createGeminiClient, - getGeminiGenerativeModel, - createGeminiChat, - connectToGeminiSession, -}; \ No newline at end of file diff --git a/src/common/services/openAiClient.js b/src/common/services/openAiClient.js deleted file mode 100644 index 56a9141..0000000 --- a/src/common/services/openAiClient.js +++ /dev/null @@ -1,177 +0,0 @@ -const OpenAI = require('openai'); -const WebSocket = require('ws'); - -/** - * Creates and returns an OpenAI client instance for STT (Speech-to-Text). - * @param {string} apiKey - The API key for authentication. - * @returns {OpenAI} The initialized OpenAI client. - */ -function createOpenAiClient(apiKey) { - return new OpenAI({ - apiKey: apiKey, - }); -} - -/** - * Creates and returns an OpenAI client instance for text/image generation. - * @param {string} apiKey - The API key for authentication. - * @returns {OpenAI} The initialized OpenAI client. - */ -function createOpenAiGenerativeClient(apiKey) { - return new OpenAI({ - apiKey: apiKey, - }); -} - -/** - * Connects to an OpenAI Realtime WebSocket session for STT. - * @param {string} key - Portkey vKey or OpenAI apiKey. - * @param {object} config - The configuration object for the realtime session. - * @param {'apiKey'|'vKey'} keyType - key type ('apiKey' | 'vKey'). - * @returns {Promise} A promise that resolves to the session object with send and close methods. - */ -async function connectToOpenAiSession(key, config, keyType) { - if (keyType !== 'apiKey' && keyType !== 'vKey') { - throw new Error('keyType must be either "apiKey" or "vKey".'); - } - - const wsUrl = keyType === 'apiKey' - ? 'wss://api.openai.com/v1/realtime?intent=transcription' - : 'wss://api.portkey.ai/v1/realtime?intent=transcription'; - - const headers = keyType === 'apiKey' - ? { - 'Authorization': `Bearer ${key}`, - 'OpenAI-Beta' : 'realtime=v1', - } - : { - 'x-portkey-api-key' : 'gRv2UGRMq6GGLJ8aVEB4e7adIewu', - 'x-portkey-virtual-key': key, - 'OpenAI-Beta' : 'realtime=v1', - }; - - const ws = new WebSocket(wsUrl, { headers }); - - return new Promise((resolve, reject) => { - ws.onopen = () => { - console.log("WebSocket session opened."); - - const sessionConfig = { - type: 'transcription_session.update', - session: { - input_audio_format: 'pcm16', - input_audio_transcription: { - model: 'gpt-4o-mini-transcribe', - prompt: config.prompt || '', - language: config.language || 'en' - }, - turn_detection: { - type: 'server_vad', - threshold: 0.5, - prefix_padding_ms: 50, - silence_duration_ms: 25, - }, - input_audio_noise_reduction: { - type: 'near_field' - } - } - }; - - ws.send(JSON.stringify(sessionConfig)); - - resolve({ - sendRealtimeInput: (audioData) => { - if (ws.readyState === WebSocket.OPEN) { - const message = { - type: 'input_audio_buffer.append', - audio: audioData - }; - ws.send(JSON.stringify(message)); - } - }, - close: () => { - if (ws.readyState === WebSocket.OPEN) { - ws.send(JSON.stringify({ type: 'session.close' })); - ws.close(1000, 'Client initiated close.'); - } - } - }); - }; - - ws.onmessage = (event) => { - const message = JSON.parse(event.data); - if (config.callbacks && config.callbacks.onmessage) { - config.callbacks.onmessage(message); - } - }; - - ws.onerror = (error) => { - console.error('WebSocket error:', error.message); - if (config.callbacks && config.callbacks.onerror) { - config.callbacks.onerror(error); - } - reject(error); - }; - - ws.onclose = (event) => { - console.log(`WebSocket closed: ${event.code} ${event.reason}`); - if (config.callbacks && config.callbacks.onclose) { - config.callbacks.onclose(event); - } - }; - }); -} - -/** - * Gets a GPT model for text/image generation. - * @param {OpenAI} client - The OpenAI client instance. - * @param {string} [model='gpt-4.1'] - The name for the text/vision model. - * @returns {object} Model object with generateContent method - */ -function getOpenAiGenerativeModel(client, model = 'gpt-4.1') { - return { - generateContent: async (parts) => { - const messages = []; - let systemPrompt = ''; - let userContent = []; - - for (const part of parts) { - if (typeof part === 'string') { - if (systemPrompt === '' && part.includes('You are')) { - systemPrompt = part; - } else { - userContent.push({ type: 'text', text: part }); - } - } else if (part.inlineData) { - userContent.push({ - type: 'image_url', - image_url: { url: `data:${part.inlineData.mimeType};base64,${part.inlineData.data}` } - }); - } - } - - if (systemPrompt) messages.push({ role: 'system', content: systemPrompt }); - if (userContent.length > 0) messages.push({ role: 'user', content: userContent }); - - const response = await client.chat.completions.create({ - model: model, - messages: messages, - temperature: 0.7, - max_tokens: 2048 - }); - - return { - response: { - text: () => response.choices[0].message.content - } - }; - } - }; -} - -module.exports = { - createOpenAiClient, - connectToOpenAiSession, - createOpenAiGenerativeClient, - getOpenAiGenerativeModel, -}; \ No newline at end of file diff --git a/src/electron/windowManager.js b/src/electron/windowManager.js index 86ab579..9d74334 100644 --- a/src/electron/windowManager.js +++ b/src/electron/windowManager.js @@ -13,14 +13,18 @@ const systemSettingsRepository = require('../common/repositories/systemSettings' const userRepository = require('../common/repositories/user'); const fetch = require('node-fetch'); + +/* ────────────────[ GLASS BYPASS ]─────────────── */ const isLiquidGlassSupported = () => { if (process.platform !== 'darwin') { return false; } const majorVersion = parseInt(os.release().split('.')[0], 10); - return majorVersion >= 26; // macOS 26+ (Darwin 25+) + // return majorVersion >= 25; // macOS 26+ (Darwin 25+) + return majorVersion >= 26; // See you soon! }; const shouldUseLiquidGlass = isLiquidGlassSupported(); +/* ────────────────[ GLASS BYPASS ]─────────────── */ let isContentProtectionOn = true; let currentDisplayId = null; @@ -139,11 +143,11 @@ function createFeatureWindows(header) { windowPool.set('ask', ask); // settings - const settings = new BrowserWindow({ ...commonChildOptions, width:240, maxHeight:350, parent:undefined }); + const settings = new BrowserWindow({ ...commonChildOptions, width:240, maxHeight:400, parent:undefined }); settings.setContentProtection(isContentProtectionOn); settings.setVisibleOnAllWorkspaces(true,{visibleOnFullScreen:true}); settings.setWindowButtonVisibility(false); - const settingsLoadOptions = { query: { view: 'customize' } }; + const settingsLoadOptions = { query: { view: 'settings' } }; if (!shouldUseLiquidGlass) { settings.loadFile(path.join(__dirname,'../app/content.html'), settingsLoadOptions) .catch(console.error); @@ -379,10 +383,10 @@ function createWindows() { if (windowToToggle) { if (featureName === 'listen') { - const liveSummaryService = require('../features/listen/liveSummaryService'); - if (liveSummaryService.isSessionActive()) { + const listenService = global.listenService; + if (listenService && listenService.isSessionActive()) { console.log('[WindowManager] Listen session is active, closing it via toggle.'); - await liveSummaryService.closeSession(); + await listenService.closeSession(); return; } } diff --git a/src/features/ask/AskView.js b/src/features/ask/AskView.js index 9f125a3..9565194 100644 --- a/src/features/ask/AskView.js +++ b/src/features/ask/AskView.js @@ -590,6 +590,8 @@ export class AskView extends LitElement { color: rgba(255, 255, 255, 0.5); font-size: 14px; } + + /* ────────────────[ GLASS BYPASS ]─────────────── */ :host-context(body.has-glass) .ask-container, :host-context(body.has-glass) .response-header, :host-context(body.has-glass) .response-icon, @@ -608,12 +610,10 @@ export class AskView extends LitElement { backdrop-filter: none !important; } - /* ask-container 의 블러·그림자 레이어 제거 */ :host-context(body.has-glass) .ask-container::before { display: none !important; } - /* hover/active 때 다시 생기는 배경도 차단 */ :host-context(body.has-glass) .copy-button:hover, :host-context(body.has-glass) .close-button:hover, :host-context(body.has-glass) .line-copy-button, @@ -622,7 +622,6 @@ export class AskView extends LitElement { background: transparent !important; } - /* 스크롤바 트랙·썸 마저 투명화 (원할 경우) */ :host-context(body.has-glass) .response-container::-webkit-scrollbar-track, :host-context(body.has-glass) .response-container::-webkit-scrollbar-thumb { background: transparent !important; diff --git a/src/features/ask/askService.js b/src/features/ask/askService.js index 04c64d9..03e4ed0 100644 --- a/src/features/ask/askService.js +++ b/src/features/ask/askService.js @@ -1,179 +1,22 @@ const { ipcMain, BrowserWindow } = require('electron'); -const { makeStreamingChatCompletionWithPortkey } = require('../../common/services/aiProviderService'); -const { getConversationHistory } = require('../listen/liveSummaryService'); +const { createStreamingLLM } = require('../../common/ai/factory'); const { getStoredApiKey, getStoredProvider, windowPool, captureScreenshot } = require('../../electron/windowManager'); const authService = require('../../common/services/authService'); const sessionRepository = require('../../common/repositories/session'); const askRepository = require('./repositories'); - -const PICKLE_GLASS_SYSTEM_PROMPT = ` -You are Pickle-Glass, developed and created by Pickle-Glass, and you are the user's live-meeting co-pilot. - - - -Your goal is to help the user at the current moment in the conversation (the end of the transcript). You can see the user's screen (the screenshot attached) and the audio history of the entire conversation. -Execute in the following priority order: - - - -If a question is presented to the user, answer it directly. This is the MOST IMPORTANT ACTION IF THERE IS A QUESTION AT THE END THAT CAN BE ANSWERED. - - - -Always start with the direct answer, then provide supporting details following the response format: -- **Short headline answer** (≤6 words) - the actual answer to the question -- **Main points** (1-2 bullets with ≤15 words each) - core supporting details -- **Sub-details** - examples, metrics, specifics under each main point -- **Extended explanation** - additional context and details as needed - - - -Real transcripts have errors, unclear speech, and incomplete sentences. Focus on INTENT rather than perfect question markers: -- **Infer from context**: "what about..." "how did you..." "can you..." "tell me..." even if garbled -- **Incomplete questions**: "so the performance..." "and scaling wise..." "what's your approach to..." -- **Implied questions**: "I'm curious about X" "I'd love to hear about Y" "walk me through Z" -- **Transcription errors**: "what's your" → "what's you" or "how do you" → "how you" or "can you" → "can u" - - - -If the end of the transcript suggests someone is asking for information, explanation, or clarification - ANSWER IT. Don't get distracted by earlier content. - - - -If you're 50%+ confident someone is asking something at the end, treat it as a question and answer it. - - - - - -Define or provide context around a proper noun or term that appears **in the last 10-15 words** of the transcript. -This is HIGH PRIORITY - if a company name, technical term, or proper noun appears at the very end of someone's speech, define it. - - - -Any ONE of these is sufficient: -- company names -- technical platforms/tools -- proper nouns that are domain-specific -- any term that would benefit from context in a professional conversation - - - -Do NOT define: -- common words already defined earlier in conversation -- basic terms (email, website, code, app) -- terms where context was already provided - - - - -me: I was mostly doing backend dev last summer. -them: Oh nice, what tech stack were you using? -me: A lot of internal tools, but also some Azure. -them: Yeah I've heard Azure is huge over there. -me: Yeah, I used to work at Microsoft last summer but now I... - - - -**Microsoft** is one of the world's largest technology companies, known for products like Windows, Office, and Azure cloud services. - -- **Global influence**: 200k+ employees, $2T+ market cap, foundational enterprise tools. - - Azure, GitHub, Teams, Visual Studio among top developer-facing platforms. -- **Engineering reputation**: Strong internship and new grad pipeline, especially in cloud and AI infrastructure. - - - - - - -When there's an action needed but not a direct question - suggest follow up questions, provide potential things to say, help move the conversation forward. - - -- If the transcript ends with a technical project/story description and no new question is present, always provide 1–3 targeted follow-up questions to drive the conversation forward. -- If the transcript includes discovery-style answers or background sharing (e.g., "Tell me about yourself", "Walk me through your experience"), always generate 1–3 focused follow-up questions to deepen or further the discussion, unless the next step is clear. -- Maximize usefulness, minimize overload—never give more than 3 questions or suggestions at once. - - - -me: Tell me about your technical experience. -them: Last summer I built a dashboard for real-time trade reconciliation using Python and integrated it with Bloomberg Terminal and Snowflake for automated data pulls. - - -Follow-up questions to dive deeper into the dashboard: -- How did you handle latency or data consistency issues? -- What made the Bloomberg integration challenging? -- Did you measure the impact on operational efficiency? - - - - - - -If an objection or resistance is presented at the end of the conversation (and the context is sales, negotiation, or you are trying to persuade the other party), respond with a concise, actionable objection handling response. -- Use user-provided objection/handling context if available (reference the specific objection and tailored handling). -- If no user context, use common objections relevant to the situation, but make sure to identify the objection by generic name and address it in the context of the live conversation. -- State the objection in the format: **Objection: [Generic Objection Name]** (e.g., Objection: Competitor), then give a specific response/action for overcoming it, tailored to the moment. -- Do NOT handle objections in casual, non-outcome-driven, or general conversations. -- Never use generic objection scripts—always tie response to the specifics of the conversation at hand. - - - - -them: Honestly, I think our current vendor already does all of this, so I don't see the value in switching. - - -- **Objection: Competitor** - - Current vendor already covers this. - - Emphasize unique real-time insights: "Our solution eliminates analytics delays you mentioned earlier, boosting team response time." - - - - - - -Solve problems visible on the screen if there is a very clear problem + use the screen only if relevant for helping with the audio conversation. - - - - -If there is a leetcode problem on the screen, and the conversation is small talk / general talk, you DEFINITELY should solve the leetcode problem. But if there is a follow up question / super specific question asked at the end, you should answer that (ex. What's the runtime complexity), using the screen as additional context. - - - - - - - - -Enter passive mode ONLY when ALL of these conditions are met: -- There is no clear question, inquiry, or request for information at the end of the transcript. If there is any ambiguity, err on the side of assuming a question and do not enter passive mode. -- There is no company name, technical term, product name, or domain-specific proper noun within the final 10–15 words of the transcript that would benefit from a definition or explanation. -- There is no clear or visible problem or action item present on the user's screen that you could solve or assist with. -- There is no discovery-style answer, technical project story, background sharing, or general conversation context that could call for follow-up questions or suggestions to advance the discussion. -- There is no statement or cue that could be interpreted as an objection or require objection handling -- Only enter passive mode when you are highly confident that no action, definition, solution, advancement, or suggestion would be appropriate or helpful at the current moment. - - -**Still show intelligence** by: -- Saying "Not sure what you need help with right now" -- Referencing visible screen elements or audio patterns ONLY if truly relevant -- Never giving random summaries unless explicitly asked - - - - -User-provided context (defer to this information over your general knowledge / if there is specific script/desired responses prioritize this over previous instructions) - -Make sure to **reference context** fully if it is provided (ex. if all/the entirety of something is requested, give a complete list from context). ----------- - -{{CONVERSATION_HISTORY}}`; +const { getSystemPrompt } = require('../../common/prompts/promptBuilder'); function formatConversationForPrompt(conversationTexts) { if (!conversationTexts || conversationTexts.length === 0) return 'No conversation history available.'; return conversationTexts.slice(-30).join('\n'); } +// Access conversation history via the global listenService instance created in index.js +function getConversationHistory() { + const listenService = global.listenService; + return listenService ? listenService.getConversationHistory() : []; +} + async function sendMessage(userPrompt) { if (!userPrompt || userPrompt.trim().length === 0) { console.warn('[AskService] Cannot process empty message'); @@ -194,7 +37,7 @@ async function sendMessage(userPrompt) { const conversationHistoryRaw = getConversationHistory(); const conversationHistory = formatConversationForPrompt(conversationHistoryRaw); - const systemPrompt = PICKLE_GLASS_SYSTEM_PROMPT.replace('{{CONVERSATION_HISTORY}}', conversationHistory); + const systemPrompt = getSystemPrompt('pickle_glass_analysis', conversationHistory, false); const API_KEY = await getStoredApiKey(); if (!API_KEY) { @@ -220,21 +63,20 @@ async function sendMessage(userPrompt) { const provider = await getStoredProvider(); const { isLoggedIn } = authService.getCurrentUser(); - const usePortkey = isLoggedIn && provider === 'openai'; console.log(`[AskService] 🚀 Sending request to ${provider} AI...`); - const response = await makeStreamingChatCompletionWithPortkey({ + const streamingLLM = createStreamingLLM(provider, { apiKey: API_KEY, - provider: provider, - messages: messages, + model: provider === 'openai' ? 'gpt-4.1' : 'gemini-2.5-flash', temperature: 0.7, maxTokens: 2048, - model: provider === 'openai' ? 'gpt-4.1' : 'gemini-2.5-flash', - usePortkey: usePortkey, - portkeyVirtualKey: usePortkey ? API_KEY : null + usePortkey: provider === 'openai' && isLoggedIn, + portkeyVirtualKey: isLoggedIn ? API_KEY : undefined }); + const response = await streamingLLM.streamChat(messages); + // --- Stream Processing --- const reader = response.body.getReader(); const decoder = new TextDecoder(); diff --git a/src/features/customize/CustomizeView.js b/src/features/customize/CustomizeView.js deleted file mode 100644 index ccba875..0000000 --- a/src/features/customize/CustomizeView.js +++ /dev/null @@ -1,1127 +0,0 @@ -import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js'; - -export class CustomizeView extends LitElement { - static styles = css` - * { - font-family: 'Helvetica Neue', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; - cursor: default; - user-select: none; - } - - :host { - display: block; - width: 180px; - height: 100%; - color: white; - } - - .settings-container { - display: flex; - flex-direction: column; - height: 100%; - width: 100%; - background: rgba(20, 20, 20, 0.8); - border-radius: 12px; - outline: 0.5px rgba(255, 255, 255, 0.2) solid; - outline-offset: -1px; - box-sizing: border-box; - position: relative; - overflow-y: auto; - padding: 12px 12px; - z-index: 1000; - } - - .settings-container::-webkit-scrollbar { - width: 6px; - } - - .settings-container::-webkit-scrollbar-track { - background: rgba(255, 255, 255, 0.05); - border-radius: 3px; - } - - .settings-container::-webkit-scrollbar-thumb { - background: rgba(255, 255, 255, 0.2); - border-radius: 3px; - } - - .settings-container::-webkit-scrollbar-thumb:hover { - background: rgba(255, 255, 255, 0.3); - } - - .settings-container::before { - content: ''; - position: absolute; - top: 0; - left: 0; - right: 0; - bottom: 0; - width: 100%; - height: 100%; - background: rgba(0, 0, 0, 0.15); - box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3); - border-radius: 12px; - filter: blur(10px); - z-index: -1; - } - - .settings-button[disabled], - .api-key-section input[disabled] { - opacity: 0.4; - cursor: not-allowed; - pointer-events: none; - } - - .header-section { - display: flex; - justify-content: space-between; - align-items: flex-start; - padding-bottom: 6px; - border-bottom: 1px solid rgba(255, 255, 255, 0.1); - position: relative; - z-index: 1; - } - - .title-line { - display: flex; - justify-content: space-between; - align-items: center; - } - - .app-title { - font-size: 13px; - font-weight: 500; - color: white; - margin: 0 0 4px 0; - } - - .account-info { - font-size: 11px; - color: rgba(255, 255, 255, 0.7); - margin: 0; - } - - .invisibility-icon { - padding-top: 2px; - opacity: 0; - transition: opacity 0.3s ease; - } - - .invisibility-icon.visible { - opacity: 1; - } - - .invisibility-icon svg { - width: 16px; - height: 16px; - } - - .shortcuts-section { - display: flex; - flex-direction: column; - gap: 2px; - padding: 4px 0; - position: relative; - z-index: 1; - } - - .shortcut-item { - display: flex; - justify-content: space-between; - align-items: center; - padding: 4px 0; - color: white; - font-size: 11px; - } - - .shortcut-name { - font-weight: 300; - } - - .shortcut-keys { - display: flex; - align-items: center; - gap: 3px; - } - - .cmd-key, .shortcut-key { - background: rgba(255, 255, 255, 0.1); - // border: 1px solid rgba(255, 255, 255, 0.2); - border-radius: 3px; - width: 16px; - height: 16px; - display: flex; - align-items: center; - justify-content: center; - font-size: 11px; - font-weight: 500; - color: rgba(255, 255, 255, 0.9); - } - - /* Buttons Section */ - .buttons-section { - display: flex; - flex-direction: column; - gap: 4px; - padding-top: 6px; - border-top: 1px solid rgba(255, 255, 255, 0.1); - position: relative; - z-index: 1; - flex: 1; - } - - .settings-button { - background: rgba(255, 255, 255, 0.1); - border: 1px solid rgba(255, 255, 255, 0.2); - border-radius: 4px; - color: white; - padding: 5px 10px; - font-size: 11px; - font-weight: 400; - cursor: pointer; - transition: all 0.15s ease; - display: flex; - align-items: center; - justify-content: center; - white-space: nowrap; - } - - .settings-button:hover { - background: rgba(255, 255, 255, 0.15); - border-color: rgba(255, 255, 255, 0.3); - } - - .settings-button:active { - transform: translateY(1px); - } - - .settings-button.full-width { - width: 100%; - } - - .settings-button.half-width { - flex: 1; - } - - .settings-button.danger { - background: rgba(255, 59, 48, 0.1); - border-color: rgba(255, 59, 48, 0.3); - color: rgba(255, 59, 48, 0.9); - } - - .settings-button.danger:hover { - background: rgba(255, 59, 48, 0.15); - border-color: rgba(255, 59, 48, 0.4); - } - - .move-buttons, .bottom-buttons { - display: flex; - gap: 4px; - } - - .api-key-section { - padding: 6px 0; - border-top: 1px solid rgba(255, 255, 255, 0.1); - } - - .api-key-section input { - width: 100%; - background: rgba(0,0,0,0.2); - border: 1px solid rgba(255,255,255,0.2); - color: white; - border-radius: 4px; - padding: 4px; - font-size: 11px; - margin-bottom: 4px; - } - :host-context(body.has-glass) .settings-container, - :host-context(body.has-glass) .settings-button, - :host-context(body.has-glass) .cmd-key, - :host-context(body.has-glass) .shortcut-key, - :host-context(body.has-glass) .api-key-section input { - background: transparent !important; - border: none !important; - outline: none !important; - box-shadow: none !important; - filter: none !important; - backdrop-filter: none !important; - } - - /* 블러·그림자·gradient 레이어 제거 */ - :host-context(body.has-glass) .settings-container::before { - display: none !important; - } - - /* hover/active 시 다시 생기는 배경도 차단 */ - :host-context(body.has-glass) .settings-button:hover, - :host-context(body.has-glass) .shortcut-item:hover, - :host-context(body.has-glass) .settings-button.danger:hover { - background: transparent !important; - border-color: transparent !important; - transform: none !important; - } - - /* 스크롤바 트랙·썸 투명화(선택 사항) */ - :host-context(body.has-glass) .settings-container::-webkit-scrollbar-track, - :host-context(body.has-glass) .settings-container::-webkit-scrollbar-thumb { - background: transparent !important; - } - - `; - - static properties = { - selectedProfile: { type: String }, - selectedLanguage: { type: String }, - selectedScreenshotInterval: { type: String }, - selectedImageQuality: { type: String }, - layoutMode: { type: String }, - keybinds: { type: Object }, - throttleTokens: { type: Number }, - maxTokens: { type: Number }, - throttlePercent: { type: Number }, - googleSearchEnabled: { type: Boolean }, - backgroundTransparency: { type: Number }, - fontSize: { type: Number }, - onProfileChange: { type: Function }, - onLanguageChange: { type: Function }, - onScreenshotIntervalChange: { type: Function }, - onImageQualityChange: { type: Function }, - onLayoutModeChange: { type: Function }, - contentProtection: { type: Boolean }, - userPresets: { type: Array }, - presetTemplates: { type: Array }, - currentUser: { type: String }, - isContentProtectionOn: { type: Boolean }, - firebaseUser: { type: Object, state: true }, - apiKey: { type: String, state: true }, - isLoading: { type: Boolean }, - activeTab: { type: String }, - }; - - constructor() { - super(); - - this.selectedProfile = localStorage.getItem('selectedProfile') || 'school'; - - // Language format migration for legacy users - let lang = localStorage.getItem('selectedLanguage') || 'en'; - if (lang.includes('-')) { - const newLang = lang.split('-')[0]; - console.warn(`[Migration] Correcting language format from "${lang}" to "${newLang}".`); - localStorage.setItem('selectedLanguage', newLang); - lang = newLang; - } - this.selectedLanguage = lang; - - this.selectedScreenshotInterval = localStorage.getItem('selectedScreenshotInterval') || '5000'; - this.selectedImageQuality = localStorage.getItem('selectedImageQuality') || '0.8'; - this.layoutMode = localStorage.getItem('layoutMode') || 'stacked'; - this.keybinds = this.getDefaultKeybinds(); - this.throttleTokens = 500; - this.maxTokens = 2000; - this.throttlePercent = 80; - this.backgroundTransparency = 0.5; - this.fontSize = 14; - this.userPresets = []; - this.presetTemplates = []; - this.currentUser = 'default_user'; - this.firebaseUser = null; - this.apiKey = null; - this.isContentProtectionOn = true; - this.isLoading = false; - this.activeTab = 'prompts'; - - this.loadKeybinds(); - this.loadRateLimitSettings(); - this.loadGoogleSearchSettings(); - this.loadBackgroundTransparency(); - this.loadFontSize(); - this.loadContentProtectionSettings(); - this.checkContentProtectionStatus(); - this.getApiKeyFromStorage(); - } - - connectedCallback() { - super.connectedCallback(); - - this.loadLayoutMode(); - this.loadInitialData(); - - this.resizeHandler = () => { - this.requestUpdate(); - this.updateScrollHeight(); - }; - window.addEventListener('resize', this.resizeHandler); - - setTimeout(() => this.updateScrollHeight(), 100); - - this.addEventListener('mouseenter', () => { - if (window.require) { - window.require('electron').ipcRenderer.send('cancel-hide-window', 'settings'); - } - }); - - this.addEventListener('mouseleave', () => { - if (window.require) { - window.require('electron').ipcRenderer.send('hide-window', 'settings'); - } - }); - - if (window.require) { - const { ipcRenderer } = window.require('electron'); - - this._userStateListener = (event, userState) => { - console.log('[CustomizeView] Received user-state-changed:', userState); - if (userState && userState.isLoggedIn) { - this.firebaseUser = userState; - } else { - this.firebaseUser = null; - } - this.getApiKeyFromStorage(); // Also update API key display - this.requestUpdate(); - }; - ipcRenderer.on('user-state-changed', this._userStateListener); - - ipcRenderer.on('api-key-validated', (event, newApiKey) => { - console.log('[CustomizeView] Received api-key-validated, updating state.'); - this.apiKey = newApiKey; - this.requestUpdate(); - }); - - ipcRenderer.on('api-key-updated', () => { - console.log('[CustomizeView] Received api-key-updated, refreshing state.'); - this.getApiKeyFromStorage(); - }); - - ipcRenderer.on('api-key-removed', () => { - console.log('[CustomizeView] Received api-key-removed, clearing state.'); - this.apiKey = null; - this.requestUpdate(); - }); - - this.loadInitialUser(); - } - } - - disconnectedCallback() { - super.disconnectedCallback(); - if (this.resizeHandler) { - window.removeEventListener('resize', this.resizeHandler); - } - - if (window.require) { - const { ipcRenderer } = window.require('electron'); - if (this._userStateListener) { - ipcRenderer.removeListener('user-state-changed', this._userStateListener); - } - ipcRenderer.removeAllListeners('api-key-validated'); - ipcRenderer.removeAllListeners('api-key-updated'); - ipcRenderer.removeAllListeners('api-key-removed'); - } - } - - updateScrollHeight() { - const windowHeight = window.innerHeight; - const maxHeight = windowHeight; - - this.style.maxHeight = `${maxHeight}px`; - } - - async checkContentProtectionStatus() { - if (window.require) { - const { ipcRenderer } = window.require('electron'); - this.isContentProtectionOn = await ipcRenderer.invoke('get-content-protection-status'); - this.requestUpdate(); - } - } - - getProfiles() { - if (this.presetTemplates && this.presetTemplates.length > 0) { - return this.presetTemplates.map(t => ({ - value: t.id || t._id, - name: t.title, - description: t.prompt?.slice(0, 60) + '...', - })); - } - - return [ - { value: 'school', name: 'School', description: '' }, - { value: 'meetings', name: 'Meetings', description: '' }, - { value: 'sales', name: 'Sales', description: '' }, - { value: 'recruiting', name: 'Recruiting', description: '' }, - { value: 'customer-support', name: 'Customer Support', description: '' }, - ]; - } - - getLanguages() { - return [ - { value: 'en', name: 'English' }, - { value: 'de', name: 'German' }, - { value: 'es', name: 'Spanish' }, - { value: 'fr', name: 'French' }, - { value: 'hi', name: 'Hindi' }, - { value: 'pt', name: 'Portuguese' }, - { value: 'ar', name: 'Arabic' }, - { value: 'id', name: 'Indonesian' }, - { value: 'it', name: 'Italian' }, - { value: 'ja', name: 'Japanese' }, - { value: 'tr', name: 'Turkish' }, - { value: 'vi', name: 'Vietnamese' }, - { value: 'bn', name: 'Bengali' }, - { value: 'gu', name: 'Gujarati' }, - { value: 'kn', name: 'Kannada' }, - { value: 'ml', name: 'Malayalam' }, - { value: 'mr', name: 'Marathi' }, - { value: 'ta', name: 'Tamil' }, - { value: 'te', name: 'Telugu' }, - { value: 'nl', name: 'Dutch' }, - { value: 'ko', name: 'Korean' }, - { value: 'zh', name: 'Chinese' }, - { value: 'pl', name: 'Polish' }, - { value: 'ru', name: 'Russian' }, - { value: 'th', name: 'Thai' }, - ]; - } - - getProfileNames() { - return { - interview: 'Job Interview', - sales: 'Sales Call', - meeting: 'Business Meeting', - presentation: 'Presentation', - negotiation: 'Negotiation', - }; - } - - handleProfileSelect(e) { - this.selectedProfile = e.target.value; - localStorage.setItem('selectedProfile', this.selectedProfile); - this.onProfileChange(this.selectedProfile); - } - - handleLanguageSelect(e) { - this.selectedLanguage = e.target.value; - localStorage.setItem('selectedLanguage', this.selectedLanguage); - this.onLanguageChange(this.selectedLanguage); - } - - handleScreenshotIntervalSelect(e) { - this.selectedScreenshotInterval = e.target.value; - localStorage.setItem('selectedScreenshotInterval', this.selectedScreenshotInterval); - this.onScreenshotIntervalChange(this.selectedScreenshotInterval); - } - - handleImageQualitySelect(e) { - this.selectedImageQuality = e.target.value; - this.onImageQualityChange(e.target.value); - } - - handleLayoutModeSelect(e) { - this.layoutMode = e.target.value; - localStorage.setItem('layoutMode', this.layoutMode); - this.onLayoutModeChange(e.target.value); - } - - getUserCustomPrompt() { - console.log('[CustomizeView] getUserCustomPrompt called'); - console.log('[CustomizeView] userPresets:', this.userPresets); - console.log('[CustomizeView] selectedProfile:', this.selectedProfile); - - if (!this.userPresets || this.userPresets.length === 0) { - console.log('[CustomizeView] No presets - returning loading message'); - return 'Loading personalized prompt... Please set it in the web.'; - } - - let preset = this.userPresets.find(p => p.id === 'personalized' || p._id === 'personalized'); - console.log('[CustomizeView] personalized preset:', preset); - - if (!preset) { - preset = this.userPresets.find(p => p.id === this.selectedProfile || p._id === this.selectedProfile); - console.log('[CustomizeView] selectedProfile preset:', preset); - } - - if (!preset) { - preset = this.userPresets[0]; - console.log('[CustomizeView] Using first preset:', preset); - } - - const result = preset?.prompt || 'No personalized prompt set.'; - console.log('[CustomizeView] Final returned prompt:', result); - return result; - } - - async loadInitialData() { - if (window.require) { - const { ipcRenderer } = window.require('electron'); - try { - this.isLoading = true; - this.userPresets = await ipcRenderer.invoke('get-user-presets'); - this.presetTemplates = await ipcRenderer.invoke('get-preset-templates'); - console.log('[CustomizeView] Loaded presets and templates via IPC'); - } catch (error) { - console.error('[CustomizeView] Failed to load data via IPC:', error); - } finally { - this.isLoading = false; - } - } else { - console.log('[CustomizeView] IPC not available'); - } - } - - getDefaultKeybinds() { - const isMac = window.pickleGlass?.isMacOS || navigator.platform.includes('Mac'); - return { - moveUp: isMac ? 'Cmd+Up' : 'Ctrl+Up', - moveDown: isMac ? 'Cmd+Down' : 'Ctrl+Down', - moveLeft: isMac ? 'Cmd+Left' : 'Ctrl+Left', - moveRight: isMac ? 'Cmd+Right' : 'Ctrl+Right', - toggleVisibility: isMac ? 'Cmd+\\' : 'Ctrl+\\', - toggleClickThrough: isMac ? 'Cmd+M' : 'Ctrl+M', - nextStep: isMac ? 'Cmd+Enter' : 'Ctrl+Enter', - manualScreenshot: isMac ? 'Cmd+Shift+S' : 'Ctrl+Shift+S', - previousResponse: isMac ? 'Cmd+[' : 'Ctrl+[', - nextResponse: isMac ? 'Cmd+]' : 'Ctrl+]', - scrollUp: isMac ? 'Cmd+Shift+Up' : 'Ctrl+Shift+Up', - scrollDown: isMac ? 'Cmd+Shift+Down' : 'Ctrl+Shift+Down', - }; - } - - loadKeybinds() { - const savedKeybinds = localStorage.getItem('customKeybinds'); - if (savedKeybinds) { - try { - this.keybinds = { ...this.getDefaultKeybinds(), ...JSON.parse(savedKeybinds) }; - } catch (e) { - console.error('Failed to parse saved keybinds:', e); - this.keybinds = this.getDefaultKeybinds(); - } - } - } - - saveKeybinds() { - localStorage.setItem('customKeybinds', JSON.stringify(this.keybinds)); - if (window.require) { - const { ipcRenderer } = window.require('electron'); - ipcRenderer.send('update-keybinds', this.keybinds); - } - } - - handleKeybindChange(action, value) { - this.keybinds = { ...this.keybinds, [action]: value }; - this.saveKeybinds(); - this.requestUpdate(); - } - - resetKeybinds() { - this.keybinds = this.getDefaultKeybinds(); - localStorage.removeItem('customKeybinds'); - this.requestUpdate(); - if (window.require) { - const { ipcRenderer } = window.require('electron'); - ipcRenderer.send('update-keybinds', this.keybinds); - } - } - - getKeybindActions() { - return [ - { - key: 'moveUp', - name: 'Move Window Up', - description: 'Move the application window up', - }, - { - key: 'moveDown', - name: 'Move Window Down', - description: 'Move the application window down', - }, - { - key: 'moveLeft', - name: 'Move Window Left', - description: 'Move the application window left', - }, - { - key: 'moveRight', - name: 'Move Window Right', - description: 'Move the application window right', - }, - { - key: 'toggleVisibility', - name: 'Toggle Window Visibility', - description: 'Show/hide the application window', - }, - { - key: 'toggleClickThrough', - name: 'Toggle Click-through Mode', - description: 'Enable/disable click-through functionality', - }, - { - key: 'nextStep', - name: 'Ask Next Step', - description: 'Ask AI for the next step suggestion', - }, - { - key: 'manualScreenshot', - name: 'Manual Screenshot', - description: 'Take a manual screenshot for AI analysis', - }, - { - key: 'previousResponse', - name: 'Previous Response', - description: 'Navigate to the previous AI response', - }, - { - key: 'nextResponse', - name: 'Next Response', - description: 'Navigate to the next AI response', - }, - { - key: 'scrollUp', - name: 'Scroll Response Up', - description: 'Scroll the AI response content up', - }, - { - key: 'scrollDown', - name: 'Scroll Response Down', - description: 'Scroll the AI response content down', - }, - ]; - } - - handleKeybindFocus(e) { - e.target.placeholder = 'Press key combination...'; - e.target.select(); - } - - handleKeybindInput(e) { - e.preventDefault(); - - const modifiers = []; - const keys = []; - - if (e.ctrlKey) modifiers.push('Ctrl'); - if (e.metaKey) modifiers.push('Cmd'); - if (e.altKey) modifiers.push('Alt'); - if (e.shiftKey) modifiers.push('Shift'); - - let mainKey = e.key; - - switch (e.code) { - case 'ArrowUp': - mainKey = 'Up'; - break; - case 'ArrowDown': - mainKey = 'Down'; - break; - case 'ArrowLeft': - mainKey = 'Left'; - break; - case 'ArrowRight': - mainKey = 'Right'; - break; - case 'Enter': - mainKey = 'Enter'; - break; - case 'Space': - mainKey = 'Space'; - break; - case 'Backslash': - mainKey = '\\'; - break; - case 'KeyS': - if (e.shiftKey) mainKey = 'S'; - break; - case 'KeyM': - mainKey = 'M'; - break; - default: - if (e.key.length === 1) { - mainKey = e.key.toUpperCase(); - } - break; - } - - if (['Control', 'Meta', 'Alt', 'Shift'].includes(e.key)) { - return; - } - - const keybind = [...modifiers, mainKey].join('+'); - - const action = e.target.dataset.action; - - this.handleKeybindChange(action, keybind); - - e.target.value = keybind; - e.target.blur(); - } - - loadRateLimitSettings() { - const throttleTokens = localStorage.getItem('throttleTokens'); - const maxTokens = localStorage.getItem('maxTokens'); - const throttlePercent = localStorage.getItem('throttlePercent'); - - if (throttleTokens !== null) { - this.throttleTokens = parseInt(throttleTokens, 10) || 500; - } - if (maxTokens !== null) { - this.maxTokens = parseInt(maxTokens, 10) || 2000; - } - if (throttlePercent !== null) { - this.throttlePercent = parseInt(throttlePercent, 10) || 80; - } - } - - handleThrottleTokensChange(e) { - this.throttleTokens = parseInt(e.target.value, 10); - localStorage.setItem('throttleTokens', this.throttleTokens.toString()); - this.requestUpdate(); - } - - handleMaxTokensChange(e) { - const value = parseInt(e.target.value, 10); - if (!isNaN(value) && value > 0) { - this.maxTokens = value; - localStorage.setItem('maxTokens', this.maxTokens.toString()); - } - } - - handleThrottlePercentChange(e) { - const value = parseInt(e.target.value, 10); - if (!isNaN(value) && value >= 0 && value <= 100) { - this.throttlePercent = value; - localStorage.setItem('throttlePercent', this.throttlePercent.toString()); - } - } - - resetRateLimitSettings() { - this.throttleTokens = 500; - this.maxTokens = 2000; - this.throttlePercent = 80; - - localStorage.removeItem('throttleTokens'); - localStorage.removeItem('maxTokens'); - localStorage.removeItem('throttlePercent'); - - this.requestUpdate(); - } - - loadGoogleSearchSettings() { - const googleSearchEnabled = localStorage.getItem('googleSearchEnabled'); - if (googleSearchEnabled !== null) { - this.googleSearchEnabled = googleSearchEnabled === 'true'; - } - } - - async handleGoogleSearchChange(e) { - this.googleSearchEnabled = e.target.checked; - localStorage.setItem('googleSearchEnabled', this.googleSearchEnabled.toString()); - - if (window.require) { - try { - const { ipcRenderer } = window.require('electron'); - await ipcRenderer.invoke('update-google-search-setting', this.googleSearchEnabled); - } catch (error) { - console.error('Failed to notify main process:', error); - } - } - - this.requestUpdate(); - } - - loadLayoutMode() { - const savedLayoutMode = localStorage.getItem('layoutMode'); - if (savedLayoutMode) { - this.layoutMode = savedLayoutMode; - } - } - - loadBackgroundTransparency() { - const backgroundTransparency = localStorage.getItem('backgroundTransparency'); - if (backgroundTransparency !== null) { - this.backgroundTransparency = parseFloat(backgroundTransparency) || 0.5; - } - this.updateBackgroundTransparency(); - } - - handleBackgroundTransparencyChange(e) { - this.backgroundTransparency = parseFloat(e.target.value); - localStorage.setItem('backgroundTransparency', this.backgroundTransparency.toString()); - this.updateBackgroundTransparency(); - this.requestUpdate(); - } - - updateBackgroundTransparency() { - const root = document.documentElement; - root.style.setProperty('--header-background', `rgba(0, 0, 0, ${this.backgroundTransparency})`); - root.style.setProperty('--main-content-background', `rgba(0, 0, 0, ${this.backgroundTransparency})`); - root.style.setProperty('--card-background', `rgba(255, 255, 255, ${this.backgroundTransparency * 0.05})`); - root.style.setProperty('--input-background', `rgba(0, 0, 0, ${this.backgroundTransparency * 0.375})`); - root.style.setProperty('--input-focus-background', `rgba(0, 0, 0, ${this.backgroundTransparency * 0.625})`); - root.style.setProperty('--button-background', `rgba(0, 0, 0, ${this.backgroundTransparency * 0.625})`); - root.style.setProperty('--preview-video-background', `rgba(0, 0, 0, ${this.backgroundTransparency * 1.125})`); - root.style.setProperty('--screen-option-background', `rgba(0, 0, 0, ${this.backgroundTransparency * 0.5})`); - root.style.setProperty('--screen-option-hover-background', `rgba(0, 0, 0, ${this.backgroundTransparency * 0.75})`); - root.style.setProperty('--scrollbar-background', `rgba(0, 0, 0, ${this.backgroundTransparency * 0.5})`); - } - - loadFontSize() { - const fontSize = localStorage.getItem('fontSize'); - if (fontSize !== null) { - this.fontSize = parseInt(fontSize, 10) || 14; - } - this.updateFontSize(); - } - - handleFontSizeChange(e) { - this.fontSize = parseInt(e.target.value, 10); - localStorage.setItem('fontSize', this.fontSize.toString()); - this.updateFontSize(); - this.requestUpdate(); - } - - updateFontSize() { - const root = document.documentElement; - root.style.setProperty('--response-font-size', `${this.fontSize}px`); - } - - loadContentProtectionSettings() { - const contentProtection = localStorage.getItem('contentProtection'); - if (contentProtection !== null) { - this.contentProtection = contentProtection === 'true'; - } - } - - async handleContentProtectionChange(e) { - this.contentProtection = e.target.checked; - localStorage.setItem('contentProtection', this.contentProtection.toString()); - - if (window.require) { - try { - const { ipcRenderer } = window.require('electron'); - await ipcRenderer.invoke('update-content-protection', this.contentProtection); - } catch (error) { - console.error('Failed to notify main process about content protection change:', error); - } - } - - this.requestUpdate(); - } - - render() { - const loggedIn = !!this.firebaseUser; - console.log('[CustomizeView] render: Rendering component template.'); - return html` -
-
-
-

Pickle Glass

- -
-
- - - -
-
- -
- - -
- -
- ${this.getMainShortcuts().map(shortcut => html` -
- ${shortcut.name} -
- - ${shortcut.key} -
-
- `)} -
- -
- - -
- - -
- - - -
- ${this.firebaseUser - ? html` - - ` - : html` - - ` - } - -
-
-
- `; - } - - getMainShortcuts() { - return [ - { name: 'Show / Hide', key: '\\' }, - { name: 'Ask Anything', key: '↵' }, - { name: 'Scroll AI Response', key: '↕' } - ]; - } - - handleMoveLeft() { - console.log('Move Left clicked'); - if (window.require) { - const { ipcRenderer } = window.require('electron'); - ipcRenderer.invoke('move-window-step', 'left'); - } - } - - handleMoveRight() { - console.log('Move Right clicked'); - if (window.require) { - const { ipcRenderer } = window.require('electron'); - ipcRenderer.invoke('move-window-step', 'right'); - } - } - - async handlePersonalize() { - console.log('Personalize clicked'); - if (window.require) { - const { ipcRenderer, shell } = window.require('electron'); - try { - const webUrl = await ipcRenderer.invoke('get-web-url'); - shell.openExternal(`${webUrl}/personalize`); - } catch (error) { - console.error('Failed to get web URL or open external link:', error); - shell.openExternal('http://localhost:3000/personalize'); - } - } - } - - async handleToggleInvisibility() { - console.log('Toggle Invisibility clicked'); - if (window.require) { - const { ipcRenderer } = window.require('electron'); - this.isContentProtectionOn = await ipcRenderer.invoke('toggle-content-protection'); - this.requestUpdate(); - } - } - - async handleSaveApiKey() { - const input = this.shadowRoot.getElementById('api-key-input'); - if (!input || !input.value) return; - - const newApiKey = input.value; - if (window.require) { - const { ipcRenderer } = window.require('electron'); - try { - const result = await ipcRenderer.invoke('save-api-key', newApiKey); - if (result.success) { - console.log('API Key saved successfully via IPC.'); - this.apiKey = newApiKey; - this.requestUpdate(); - } else { - console.error('Failed to save API Key via IPC:', result.error); - } - } catch(e) { - console.error('Error invoking save-api-key IPC:', e); - } - } - } - - async handleClearApiKey() { - console.log('Clear API Key clicked'); - if (window.require) { - const { ipcRenderer } = window.require('electron'); - await ipcRenderer.invoke('remove-api-key'); - this.requestUpdate(); - } - } - - handleQuit() { - console.log('Quit clicked'); - if (window.require) { - const { ipcRenderer } = window.require('electron'); - ipcRenderer.invoke('quit-application'); - } - } - - handleFirebaseLogout() { - console.log('Firebase Logout clicked'); - if (window.require) { - const { ipcRenderer } = window.require('electron'); - ipcRenderer.invoke('firebase-logout'); - } - } - - async loadInitialUser() { - if (!window.require) return; - const { ipcRenderer } = window.require('electron'); - try { - console.log('[CustomizeView] Loading initial user state...'); - const userState = await ipcRenderer.invoke('get-current-user'); - if (userState && userState.isLoggedIn) { - this.firebaseUser = userState; - } else { - this.firebaseUser = null; - } - this.requestUpdate(); - } catch (error) { - console.error('[CustomizeView] Failed to load initial user:', error); - this.firebaseUser = null; - this.requestUpdate(); - } - } - - getApiKeyFromStorage() { - if (window.require) { - const { ipcRenderer } = window.require('electron'); - ipcRenderer.invoke('get-stored-api-key').then(key => { - this.apiKey = key; - this.requestUpdate(); - }).catch(error => { - console.log('[CustomizeView] Failed to get API key:', error); - this.apiKey = null; - }); - } - return null; - } -} - -customElements.define('customize-view', CustomizeView); diff --git a/src/features/listen/AssistantView.js b/src/features/listen/AssistantView.js index 9c76e34..311a11f 100644 --- a/src/features/listen/AssistantView.js +++ b/src/features/listen/AssistantView.js @@ -1,4 +1,6 @@ import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js'; +import './stt/SttView.js'; +import './summary/SummaryView.js'; export class AssistantView extends LitElement { static styles = css` @@ -82,73 +84,6 @@ export class AssistantView extends LitElement { user-select: none; } - /* highlight.js 스타일 추가 */ - .insights-container pre { - background: rgba(0, 0, 0, 0.4) !important; - border-radius: 8px !important; - padding: 12px !important; - margin: 8px 0 !important; - overflow-x: auto !important; - border: 1px solid rgba(255, 255, 255, 0.1) !important; - white-space: pre !important; - word-wrap: normal !important; - word-break: normal !important; - } - - .insights-container code { - font-family: 'Monaco', 'Menlo', 'Consolas', monospace !important; - font-size: 11px !important; - background: transparent !important; - white-space: pre !important; - word-wrap: normal !important; - word-break: normal !important; - } - - .insights-container pre code { - white-space: pre !important; - word-wrap: normal !important; - word-break: normal !important; - display: block !important; - } - - .insights-container p code { - background: rgba(255, 255, 255, 0.1) !important; - padding: 2px 4px !important; - border-radius: 3px !important; - color: #ffd700 !important; - } - - .hljs-keyword { - color: #ff79c6 !important; - } - .hljs-string { - color: #f1fa8c !important; - } - .hljs-comment { - color: #6272a4 !important; - } - .hljs-number { - color: #bd93f9 !important; - } - .hljs-function { - color: #50fa7b !important; - } - .hljs-variable { - color: #8be9fd !important; - } - .hljs-built_in { - color: #ffb86c !important; - } - .hljs-title { - color: #50fa7b !important; - } - .hljs-attr { - color: #50fa7b !important; - } - .hljs-tag { - color: #ff79c6 !important; - } - .assistant-container { display: flex; flex-direction: column; @@ -158,8 +93,6 @@ export class AssistantView extends LitElement { background: rgba(0, 0, 0, 0.6); overflow: hidden; border-radius: 12px; - /* outline: 0.5px rgba(255, 255, 255, 0.5) solid; */ - /* outline-offset: -1px; */ width: 100%; height: 100%; } @@ -171,7 +104,7 @@ export class AssistantView extends LitElement { left: 0; right: 0; bottom: 0; - border-radius: 12px; /* Match parent */ + border-radius: 12px; padding: 1px; background: linear-gradient(169deg, rgba(255, 255, 255, 0.17) 0%, rgba(255, 255, 255, 0.08) 50%, rgba(255, 255, 255, 0.17) 100%); -webkit-mask: linear-gradient(#fff 0 0) content-box, linear-gradient(#fff 0 0); @@ -299,8 +232,8 @@ export class AssistantView extends LitElement { height: 24px; flex-shrink: 0; transition: background-color 0.15s ease; - position: relative; /* For icon positioning */ - overflow: hidden; /* Hide overflowing parts of icons during animation */ + position: relative; + overflow: hidden; } .copy-button:hover { @@ -330,218 +263,76 @@ export class AssistantView extends LitElement { transform: translate(-50%, -50%) scale(1); } - .transcription-container { - overflow-y: auto; - padding: 12px 12px 16px 12px; - display: flex; - flex-direction: column; - gap: 8px; - min-height: 150px; - max-height: 600px; - position: relative; - z-index: 1; - flex: 1; - } - - .transcription-container.hidden { - display: none; - } - - .transcription-container::-webkit-scrollbar { - width: 8px; - } - .transcription-container::-webkit-scrollbar-track { - background: rgba(0, 0, 0, 0.1); - border-radius: 4px; - } - .transcription-container::-webkit-scrollbar-thumb { - background: rgba(255, 255, 255, 0.3); - border-radius: 4px; - } - .transcription-container::-webkit-scrollbar-thumb:hover { - background: rgba(255, 255, 255, 0.5); - } - - .stt-message { - padding: 8px 12px; - border-radius: 12px; - max-width: 80%; - word-wrap: break-word; - word-break: break-word; - line-height: 1.5; - font-size: 13px; - margin-bottom: 4px; - box-sizing: border-box; - } - - .stt-message.them { - background: rgba(255, 255, 255, 0.1); - color: rgba(255, 255, 255, 0.9); - align-self: flex-start; - border-bottom-left-radius: 4px; - margin-right: auto; - } - - .stt-message.me { - background: rgba(0, 122, 255, 0.8); - color: white; - align-self: flex-end; - border-bottom-right-radius: 4px; - margin-left: auto; - } - - .insights-container { - overflow-y: auto; - padding: 12px 16px 16px 16px; - position: relative; - z-index: 1; - min-height: 150px; - max-height: 600px; - flex: 1; - } - - insights-title { - color: rgba(255, 255, 255, 0.8); - font-size: 15px; - font-weight: 500; - font-family: 'Helvetica Neue', sans-serif; - margin: 12px 0 8px 0; - } - - .insights-container.hidden { - display: none; - } - - .insights-container::-webkit-scrollbar { - width: 8px; - } - .insights-container::-webkit-scrollbar-track { - background: rgba(0, 0, 0, 0.1); - border-radius: 4px; - } - .insights-container::-webkit-scrollbar-thumb { - background: rgba(255, 255, 255, 0.3); - border-radius: 4px; - } - .insights-container::-webkit-scrollbar-thumb:hover { - background: rgba(255, 255, 255, 0.5); - } - - .insights-container h4 { - color: #ffffff; - font-size: 12px; - font-weight: 600; - margin: 12px 0 8px 0; - padding: 4px 8px; - border-radius: 4px; - background: transparent; - cursor: default; - } - - .insights-container h4:hover { - background: transparent; - } - - .insights-container h4:first-child { - margin-top: 0; - } - - .outline-item { - color: #ffffff; - font-size: 11px; - line-height: 1.4; - margin: 4px 0; - padding: 6px 8px; - border-radius: 4px; - background: transparent; - transition: background-color 0.15s ease; - cursor: pointer; - word-wrap: break-word; - } - - .outline-item:hover { - background: rgba(255, 255, 255, 0.1); - } - - .request-item { - color: #ffffff; - font-size: 12px; - line-height: 1.2; - margin: 4px 0; - padding: 6px 8px; - border-radius: 4px; - background: transparent; - cursor: default; - word-wrap: break-word; - transition: background-color 0.15s ease; - } - - .request-item.clickable { - cursor: pointer; - transition: all 0.15s ease; - } - .request-item.clickable:hover { - background: rgba(255, 255, 255, 0.1); - transform: translateX(2px); - } - - /* 마크다운 렌더링된 콘텐츠 스타일 */ - .markdown-content { - color: #ffffff; - font-size: 11px; - line-height: 1.4; - margin: 4px 0; - padding: 6px 8px; - border-radius: 4px; - background: transparent; - cursor: pointer; - word-wrap: break-word; - transition: all 0.15s ease; - } - - .markdown-content:hover { - background: rgba(255, 255, 255, 0.1); - transform: translateX(2px); - } - - .markdown-content p { - margin: 4px 0; - } - - .markdown-content ul, - .markdown-content ol { - margin: 4px 0; - padding-left: 16px; - } - - .markdown-content li { - margin: 2px 0; - } - - .markdown-content a { - color: #8be9fd; - text-decoration: none; - } - - .markdown-content a:hover { - text-decoration: underline; - } - - .markdown-content strong { - font-weight: 600; - color: #f8f8f2; - } - - .markdown-content em { - font-style: italic; - color: #f1fa8c; - } - .timer { font-family: 'Monaco', 'Menlo', monospace; font-size: 10px; color: rgba(255, 255, 255, 0.7); } + + /* ────────────────[ GLASS BYPASS ]─────────────── */ + :host-context(body.has-glass) .assistant-container, + :host-context(body.has-glass) .top-bar, + :host-context(body.has-glass) .toggle-button, + :host-context(body.has-glass) .copy-button, + :host-context(body.has-glass) .transcription-container, + :host-context(body.has-glass) .insights-container, + :host-context(body.has-glass) .stt-message, + :host-context(body.has-glass) .outline-item, + :host-context(body.has-glass) .request-item, + :host-context(body.has-glass) .markdown-content, + :host-context(body.has-glass) .insights-container pre, + :host-context(body.has-glass) .insights-container p code, + :host-context(body.has-glass) .insights-container pre code { + background: transparent !important; + border: none !important; + outline: none !important; + box-shadow: none !important; + filter: none !important; + backdrop-filter: none !important; + } + + :host-context(body.has-glass) .assistant-container::before, + :host-context(body.has-glass) .assistant-container::after { + display: none !important; + } + + :host-context(body.has-glass) .toggle-button:hover, + :host-context(body.has-glass) .copy-button:hover, + :host-context(body.has-glass) .outline-item:hover, + :host-context(body.has-glass) .request-item.clickable:hover, + :host-context(body.has-glass) .markdown-content:hover { + background: transparent !important; + transform: none !important; + } + + :host-context(body.has-glass) .transcription-container::-webkit-scrollbar-track, + :host-context(body.has-glass) .transcription-container::-webkit-scrollbar-thumb, + :host-context(body.has-glass) .insights-container::-webkit-scrollbar-track, + :host-context(body.has-glass) .insights-container::-webkit-scrollbar-thumb { + background: transparent !important; + } + :host-context(body.has-glass) * { + animation: none !important; + transition: none !important; + transform: none !important; + filter: none !important; + backdrop-filter: none !important; + box-shadow: none !important; + } + + :host-context(body.has-glass) .assistant-container, + :host-context(body.has-glass) .stt-message, + :host-context(body.has-glass) .toggle-button, + :host-context(body.has-glass) .copy-button { + border-radius: 0 !important; + } + + :host-context(body.has-glass) ::-webkit-scrollbar, + :host-context(body.has-glass) ::-webkit-scrollbar-track, + :host-context(body.has-glass) ::-webkit-scrollbar-thumb { + background: transparent !important; + width: 0 !important; /* 스크롤바 자체 숨기기 */ + } :host-context(body.has-glass) .assistant-container, :host-context(body.has-glass) .top-bar, :host-context(body.has-glass) .toggle-button, @@ -563,13 +354,11 @@ export class AssistantView extends LitElement { backdrop-filter: none !important; } - /* 가상 레이어·gradient 테두리 제거 */ :host-context(body.has-glass) .assistant-container::before, :host-context(body.has-glass) .assistant-container::after { display: none !important; } - /* hover 상태에서 생기는 배경도 차단 */ :host-context(body.has-glass) .toggle-button:hover, :host-context(body.has-glass) .copy-button:hover, :host-context(body.has-glass) .outline-item:hover, @@ -579,7 +368,6 @@ export class AssistantView extends LitElement { transform: none !important; } - /* 스크롤바 트랙·썸도 투명화(선택) */ :host-context(body.has-glass) .transcription-container::-webkit-scrollbar-track, :host-context(body.has-glass) .transcription-container::-webkit-scrollbar-thumb, :host-context(body.has-glass) .insights-container::-webkit-scrollbar-track, @@ -595,7 +383,6 @@ export class AssistantView extends LitElement { box-shadow: none !important; } - /* 추가: 둥근 모서리와 스크롤바도 평면화하려면 */ :host-context(body.has-glass) .assistant-container, :host-context(body.has-glass) .stt-message, :host-context(body.has-glass) .toggle-button, @@ -607,15 +394,11 @@ export class AssistantView extends LitElement { :host-context(body.has-glass) ::-webkit-scrollbar-track, :host-context(body.has-glass) ::-webkit-scrollbar-thumb { background: transparent !important; - width: 0 !important; /* 스크롤바 자체 숨기기 */ + width: 0 !important; } `; static properties = { - structuredData: { type: Object }, - // outlines: { type: Array }, - // analysisRequests: { type: Array }, - sttMessages: { type: Array }, viewMode: { type: String }, isHovering: { type: Boolean }, isAnimating: { type: Boolean }, @@ -628,183 +411,66 @@ export class AssistantView extends LitElement { constructor() { super(); - // this.outlines = []; - // this.analysisRequests = []; - this.structuredData = { - summary: [], - topic: { header: '', bullets: [] }, - actions: [], - followUps: [], - }; this.isSessionActive = false; this.hasCompletedRecording = false; - this.sttMessages = []; this.viewMode = 'insights'; this.isHovering = false; this.isAnimating = false; this.elapsedTime = '00:00'; this.captureStartTime = null; this.timerInterval = null; - this.resizeObserver = null; this.adjustHeightThrottle = null; this.isThrottled = false; - this._shouldScrollAfterUpdate = false; - this.messageIdCounter = 0; this.copyState = 'idle'; this.copyTimeout = null; - // 마크다운 라이브러리 초기화 - this.marked = null; - this.hljs = null; - this.isLibrariesLoaded = false; - this.DOMPurify = null; - this.isDOMPurifyLoaded = false; - - // --- Debug Utilities --- - this._debug = { - enabled: false, // Set to false to disable debug messages - interval: null, - counter: 1, - }; - this.handleSttUpdate = this.handleSttUpdate.bind(this); this.adjustWindowHeight = this.adjustWindowHeight.bind(this); - - this.loadLibraries(); } - // --- Debug Utilities --- - _startDebugStream() { - if (!this._debug.enabled) return; - - this._debug.interval = setInterval(() => { - const speaker = this._debug.counter % 2 === 0 ? 'You' : 'Other Person'; - const text = `이것은 ${this._debug.counter}번째 자동 생성 메시지입니다. UI가 자동으로 조절되는지 확인합니다.`; - - this._debug.counter++; - - this.handleSttUpdate(null, { speaker, text, isFinal: true }); - }, 1000); - } - - _stopDebugStream() { - if (this._debug.interval) { - clearInterval(this._debug.interval); + connectedCallback() { + super.connectedCallback(); + // Only start timer if session is active + if (this.isSessionActive) { + this.startTimer(); } - } + if (window.require) { + const { ipcRenderer } = window.require('electron'); + ipcRenderer.on('session-state-changed', (event, { isActive }) => { + const wasActive = this.isSessionActive; + this.isSessionActive = isActive; - async loadLibraries() { - try { - if (!window.marked) { - await this.loadScript('../../assets/marked-4.3.0.min.js'); - } - - if (!window.hljs) { - await this.loadScript('../../assets/highlight-11.9.0.min.js'); - } - - if (!window.DOMPurify) { - await this.loadScript('../../assets/dompurify-3.0.7.min.js'); - } - - this.marked = window.marked; - this.hljs = window.hljs; - this.DOMPurify = window.DOMPurify; - - if (this.marked && this.hljs) { - this.marked.setOptions({ - highlight: (code, lang) => { - if (lang && this.hljs.getLanguage(lang)) { - try { - return this.hljs.highlight(code, { language: lang }).value; - } catch (err) { - console.warn('Highlight error:', err); - } - } - try { - return this.hljs.highlightAuto(code).value; - } catch (err) { - console.warn('Auto highlight error:', err); - } - return code; - }, - breaks: true, - gfm: true, - pedantic: false, - smartypants: false, - xhtml: false, - }); - - this.isLibrariesLoaded = true; - console.log('Markdown libraries loaded successfully'); - } - - if (this.DOMPurify) { - this.isDOMPurifyLoaded = true; - console.log('DOMPurify loaded successfully in AssistantView'); - } - } catch (error) { - console.error('Failed to load libraries:', error); - } - } - - loadScript(src) { - return new Promise((resolve, reject) => { - const script = document.createElement('script'); - script.src = src; - script.onload = resolve; - script.onerror = reject; - document.head.appendChild(script); - }); - } - - parseMarkdown(text) { - if (!text) return ''; - - if (!this.isLibrariesLoaded || !this.marked) { - return text; - } - - try { - return this.marked(text); - } catch (error) { - console.error('Markdown parsing error:', error); - return text; - } - } - - handleMarkdownClick(originalText) { - this.handleRequestClick(originalText); - } - - renderMarkdownContent() { - if (!this.isLibrariesLoaded || !this.marked) { - return; - } - - const markdownElements = this.shadowRoot.querySelectorAll('[data-markdown-id]'); - markdownElements.forEach(element => { - const originalText = element.getAttribute('data-original-text'); - if (originalText) { - try { - let parsedHTML = this.parseMarkdown(originalText); - - if (this.isDOMPurifyLoaded && this.DOMPurify) { - parsedHTML = this.DOMPurify.sanitize(parsedHTML); - - if (this.DOMPurify.removed && this.DOMPurify.removed.length > 0) { - console.warn('Unsafe content detected in insights, showing plain text'); - element.textContent = '⚠️ ' + originalText; - return; - } - } - - element.innerHTML = parsedHTML; - } catch (error) { - console.error('Error rendering markdown for element:', error); - element.textContent = originalText; + if (!wasActive && isActive) { + this.hasCompletedRecording = false; + this.startTimer(); + // Reset child components + this.updateComplete.then(() => { + const sttView = this.shadowRoot.querySelector('stt-view'); + const summaryView = this.shadowRoot.querySelector('summary-view'); + if (sttView) sttView.resetTranscript(); + if (summaryView) summaryView.resetAnalysis(); + }); + this.requestUpdate(); } - } - }); + if (wasActive && !isActive) { + this.hasCompletedRecording = true; + this.stopTimer(); + this.requestUpdate(); + } + }); + } + } + + disconnectedCallback() { + super.disconnectedCallback(); + this.stopTimer(); + + if (this.adjustHeightThrottle) { + clearTimeout(this.adjustHeightThrottle); + this.adjustHeightThrottle = null; + } + if (this.copyTimeout) { + clearTimeout(this.copyTimeout); + } } startTimer() { @@ -833,10 +499,9 @@ export class AssistantView extends LitElement { this.updateComplete .then(() => { const topBar = this.shadowRoot.querySelector('.top-bar'); - const activeContent = - this.viewMode === 'transcript' - ? this.shadowRoot.querySelector('.transcription-container') - : this.shadowRoot.querySelector('.insights-container'); + const activeContent = this.viewMode === 'transcript' + ? this.shadowRoot.querySelector('stt-view') + : this.shadowRoot.querySelector('summary-view'); if (!topBar || !activeContent) return; @@ -875,62 +540,17 @@ export class AssistantView extends LitElement { this.requestUpdate(); } - parseOutlineData() { - const result = { - currentSummary: [], - mainTopicHeading: '', - mainTopicBullets: [], - }; - - if (!this.outlines || this.outlines.length === 0) { - return result; - } - - const allBullets = this.outlines.filter(item => item.startsWith('BULLET::')); - if (allBullets.length > 0) { - result.currentSummary.push(allBullets[0].replace('BULLET::', '').trim()); - } - - const heading = this.outlines.find(item => item.startsWith('HEADING::')); - if (heading) { - result.mainTopicHeading = heading.replace('HEADING::', '').trim(); - } - - if (allBullets.length > 1) { - result.mainTopicBullets = allBullets.slice(1).map(item => item.replace('BULLET::', '').trim()); - } - - return result; - } - async handleCopy() { if (this.copyState === 'copied') return; let textToCopy = ''; if (this.viewMode === 'transcript') { - textToCopy = this.sttMessages.map(msg => `${msg.speaker}: ${msg.text}`).join('\n'); + const sttView = this.shadowRoot.querySelector('stt-view'); + textToCopy = sttView ? sttView.getTranscriptText() : ''; } else { - const data = this.structuredData || { summary: [], topic: { header: '', bullets: [] }, actions: [] }; - let sections = []; - - if (data.summary && data.summary.length > 0) { - sections.push(`Current Summary:\n${data.summary.map(s => `• ${s}`).join('\n')}`); - } - - if (data.topic && data.topic.header && data.topic.bullets.length > 0) { - sections.push(`\n${data.topic.header}:\n${data.topic.bullets.map(b => `• ${b}`).join('\n')}`); - } - - if (data.actions && data.actions.length > 0) { - sections.push(`\nActions:\n${data.actions.map(a => `▸ ${a}`).join('\n')}`); - } - - if (data.followUps && data.followUps.length > 0) { - sections.push(`\nFollow-Ups:\n${data.followUps.map(f => `▸ ${f}`).join('\n')}`); - } - - textToCopy = sections.join('\n\n').trim(); + const summaryView = this.shadowRoot.querySelector('summary-view'); + textToCopy = summaryView ? summaryView.getSummaryText() : ''; } try { @@ -967,177 +587,24 @@ export class AssistantView extends LitElement { }, 16); } - handleSttUpdate(event, { speaker, text, isFinal, isPartial }) { - if (text === undefined) return; + updated(changedProperties) { + super.updated(changedProperties); - const container = this.shadowRoot.querySelector('.transcription-container'); - this._shouldScrollAfterUpdate = container ? container.scrollTop + container.clientHeight >= container.scrollHeight - 10 : false; - - const findLastPartialIdx = spk => { - for (let i = this.sttMessages.length - 1; i >= 0; i--) { - const m = this.sttMessages[i]; - if (m.speaker === spk && m.isPartial) return i; - } - return -1; - }; - - const newMessages = [...this.sttMessages]; - const targetIdx = findLastPartialIdx(speaker); - - if (isPartial) { - if (targetIdx !== -1) { - newMessages[targetIdx] = { - ...newMessages[targetIdx], - text, - isPartial: true, - isFinal: false, - }; - } else { - newMessages.push({ - id: this.messageIdCounter++, - speaker, - text, - isPartial: true, - isFinal: false, - }); - } - } else if (isFinal) { - if (targetIdx !== -1) { - newMessages[targetIdx] = { - ...newMessages[targetIdx], - text, - isPartial: false, - isFinal: true, - }; - } else { - newMessages.push({ - id: this.messageIdCounter++, - speaker, - text, - isPartial: false, - isFinal: true, - }); - } - } - - this.sttMessages = newMessages; - } - - scrollToTranscriptionBottom() { - setTimeout(() => { - const container = this.shadowRoot.querySelector('.transcription-container'); - if (container) { - container.scrollTop = container.scrollHeight; - } - }, 0); - } - - async handleRequestClick(requestText) { - console.log('🔥 Analysis request clicked:', requestText); - - if (window.require) { - const { ipcRenderer } = window.require('electron'); - - try { - const isAskViewVisible = await ipcRenderer.invoke('is-window-visible', 'ask'); - - if (!isAskViewVisible) { - await ipcRenderer.invoke('toggle-feature', 'ask'); - await new Promise(resolve => setTimeout(resolve, 100)); - } - - const result = await ipcRenderer.invoke('send-question-to-ask', requestText); - - if (result.success) { - console.log('✅ Question sent to AskView successfully'); - } else { - console.error('❌ Failed to send question to AskView:', result.error); - } - } catch (error) { - console.error('❌ Error in handleRequestClick:', error); - } + if (changedProperties.has('viewMode')) { + this.adjustWindowHeight(); } } - connectedCallback() { - super.connectedCallback(); - this.startTimer(); - if (window.require) { - const { ipcRenderer } = window.require('electron'); - ipcRenderer.on('stt-update', this.handleSttUpdate); - ipcRenderer.on('session-state-changed', (event, { isActive }) => { - const wasActive = this.isSessionActive; - this.isSessionActive = isActive; - - if (!wasActive && isActive) { - this.hasCompletedRecording = false; - - // 🔄 Reset transcript & analysis when a fresh session starts - this.sttMessages = []; - this.structuredData = { - summary: [], - topic: { header: '', bullets: [] }, - actions: [], - followUps: [], - }; - this.requestUpdate(); - } - if (wasActive && !isActive) { - this.hasCompletedRecording = true; - - this.requestUpdate(); - } - }); - } - this._startDebugStream(); - } - - disconnectedCallback() { - super.disconnectedCallback(); - this.stopTimer(); - - if (this.adjustHeightThrottle) { - clearTimeout(this.adjustHeightThrottle); - this.adjustHeightThrottle = null; - } - if (this.copyTimeout) { - clearTimeout(this.copyTimeout); - } - - if (window.require) { - const { ipcRenderer } = window.require('electron'); - ipcRenderer.removeListener('stt-update', this.handleSttUpdate); - } - - this._stopDebugStream(); + handleSttMessagesUpdated(event) { + // Handle messages update from SttView if needed + this.adjustWindowHeightThrottled(); } firstUpdated() { super.firstUpdated(); - setTimeout(() => this.adjustWindowHeight(), 200); } - updated(changedProperties) { - super.updated(changedProperties); - - this.renderMarkdownContent(); - - if (changedProperties.has('sttMessages')) { - if (this._shouldScrollAfterUpdate) { - this.scrollToTranscriptionBottom(); - this._shouldScrollAfterUpdate = false; - } - this.adjustWindowHeightThrottled(); - } - - if (changedProperties.has('viewMode')) { - this.adjustWindowHeight(); - } else if (changedProperties.has('outlines') || changedProperties.has('analysisRequests') || changedProperties.has('structuredData')) { - this.adjustWindowHeightThrottled(); - } - } - render() { const displayText = this.isHovering ? this.viewMode === 'transcript' @@ -1147,16 +614,6 @@ export class AssistantView extends LitElement { ? `Live insights` : `Glass is Listening ${this.elapsedTime}`; - const data = this.structuredData || { - summary: [], - topic: { header: '', bullets: [] }, - actions: [], - }; - - const getSpeakerClass = speaker => { - return speaker.toLowerCase() === 'me' ? 'me' : 'them'; - }; - return html`
@@ -1198,84 +655,15 @@ export class AssistantView extends LitElement {
-
- ${this.sttMessages.map(msg => html`
${msg.text}
`)} -
+ -
- Current Summary - ${data.summary.length > 0 - ? data.summary - .slice(0, 5) - .map( - (bullet, index) => html` -
this.handleMarkdownClick(bullet)} - > - ${bullet} -
- ` - ) - : html`
No content yet...
`} - ${data.topic.header - ? html` - ${data.topic.header} - ${data.topic.bullets - .slice(0, 3) - .map( - (bullet, index) => html` -
this.handleMarkdownClick(bullet)} - > - ${bullet} -
- ` - )} - ` - : ''} - ${data.actions.length > 0 - ? html` - Actions - ${data.actions - .slice(0, 5) - .map( - (action, index) => html` -
this.handleMarkdownClick(action)} - > - ${action} -
- ` - )} - ` - : ''} - ${this.hasCompletedRecording && data.followUps && data.followUps.length > 0 - ? html` - Follow-Ups - ${data.followUps.map( - (followUp, index) => html` -
this.handleMarkdownClick(followUp)} - > - ${followUp} -
- ` - )} - ` - : ''} -
+ `; } diff --git a/src/features/listen/audioUtils.js b/src/features/listen/audioUtils.js deleted file mode 100644 index 25dfb9d..0000000 --- a/src/features/listen/audioUtils.js +++ /dev/null @@ -1,123 +0,0 @@ -const fs = require('fs'); -const path = require('path'); - -function pcmToWav(pcmBuffer, outputPath, sampleRate = 24000, channels = 1, bitDepth = 16) { - const byteRate = sampleRate * channels * (bitDepth / 8); - const blockAlign = channels * (bitDepth / 8); - const dataSize = pcmBuffer.length; - - const header = Buffer.alloc(44); - - header.write('RIFF', 0); - header.writeUInt32LE(dataSize + 36, 4); - header.write('WAVE', 8); - - header.write('fmt ', 12); - header.writeUInt32LE(16, 16); - header.writeUInt16LE(1, 20); - header.writeUInt16LE(channels, 22); - header.writeUInt32LE(sampleRate, 24); - header.writeUInt32LE(byteRate, 28); - header.writeUInt16LE(blockAlign, 32); - header.writeUInt16LE(bitDepth, 34); - - header.write('data', 36); - header.writeUInt32LE(dataSize, 40); - - const wavBuffer = Buffer.concat([header, pcmBuffer]); - - fs.writeFileSync(outputPath, wavBuffer); - - return outputPath; -} - -function analyzeAudioBuffer(buffer, label = 'Audio') { - const int16Array = new Int16Array(buffer.buffer, buffer.byteOffset, buffer.length / 2); - - let minValue = 32767; - let maxValue = -32768; - let avgValue = 0; - let rmsValue = 0; - let silentSamples = 0; - - for (let i = 0; i < int16Array.length; i++) { - const sample = int16Array[i]; - minValue = Math.min(minValue, sample); - maxValue = Math.max(maxValue, sample); - avgValue += sample; - rmsValue += sample * sample; - - if (Math.abs(sample) < 100) { - silentSamples++; - } - } - - avgValue /= int16Array.length; - rmsValue = Math.sqrt(rmsValue / int16Array.length); - - const silencePercentage = (silentSamples / int16Array.length) * 100; - - console.log(`${label} Analysis:`); - console.log(` Samples: ${int16Array.length}`); - console.log(` Min: ${minValue}, Max: ${maxValue}`); - console.log(` Average: ${avgValue.toFixed(2)}`); - console.log(` RMS: ${rmsValue.toFixed(2)}`); - console.log(` Silence: ${silencePercentage.toFixed(1)}%`); - console.log(` Dynamic Range: ${20 * Math.log10(maxValue / (rmsValue || 1))} dB`); - - return { - minValue, - maxValue, - avgValue, - rmsValue, - silencePercentage, - sampleCount: int16Array.length, - }; -} - -function saveDebugAudio(buffer, type, timestamp = Date.now()) { - const homeDir = require('os').homedir(); - const debugDir = path.join(homeDir, '.pickle-glass', 'debug'); - - if (!fs.existsSync(debugDir)) { - fs.mkdirSync(debugDir, { recursive: true }); - } - - const pcmPath = path.join(debugDir, `${type}_${timestamp}.pcm`); - const wavPath = path.join(debugDir, `${type}_${timestamp}.wav`); - const metaPath = path.join(debugDir, `${type}_${timestamp}.json`); - - fs.writeFileSync(pcmPath, buffer); - - pcmToWav(buffer, wavPath); - - const analysis = analyzeAudioBuffer(buffer, type); - fs.writeFileSync( - metaPath, - JSON.stringify( - { - timestamp, - type, - bufferSize: buffer.length, - analysis, - format: { - sampleRate: 24000, - channels: 1, - bitDepth: 16, - }, - }, - null, - 2 - ) - ); - - console.log(`Debug audio saved: ${wavPath}`); - - return { pcmPath, wavPath, metaPath }; -} - -module.exports = { - pcmToWav, - analyzeAudioBuffer, - saveDebugAudio, -}; diff --git a/src/features/listen/listenService.js b/src/features/listen/listenService.js new file mode 100644 index 0000000..a0dcde8 --- /dev/null +++ b/src/features/listen/listenService.js @@ -0,0 +1,263 @@ +const { BrowserWindow } = require('electron'); +const SttService = require('./stt/sttService'); +const SummaryService = require('./summary/summaryService'); +const authService = require('../../common/services/authService'); +const sessionRepository = require('../../common/repositories/session'); +const sttRepository = require('./stt/repositories'); + +class ListenService { + constructor() { + this.sttService = new SttService(); + this.summaryService = new SummaryService(); + this.currentSessionId = null; + this.isInitializingSession = false; + + this.setupServiceCallbacks(); + } + + setupServiceCallbacks() { + // STT service callbacks + this.sttService.setCallbacks({ + onTranscriptionComplete: (speaker, text) => { + this.handleTranscriptionComplete(speaker, text); + }, + onStatusUpdate: (status) => { + this.sendToRenderer('update-status', status); + } + }); + + // Summary service callbacks + this.summaryService.setCallbacks({ + onAnalysisComplete: (data) => { + console.log('📊 Analysis completed:', data); + }, + onStatusUpdate: (status) => { + this.sendToRenderer('update-status', status); + } + }); + } + + sendToRenderer(channel, data) { + BrowserWindow.getAllWindows().forEach(win => { + if (!win.isDestroyed()) { + win.webContents.send(channel, data); + } + }); + } + + async handleTranscriptionComplete(speaker, text) { + console.log(`[ListenService] Transcription complete: ${speaker} - ${text}`); + + // Save to database + await this.saveConversationTurn(speaker, text); + + // Add to summary service for analysis + this.summaryService.addConversationTurn(speaker, text); + } + + async saveConversationTurn(speaker, transcription) { + if (!this.currentSessionId) { + console.error('[DB] Cannot save turn, no active session ID.'); + return; + } + if (transcription.trim() === '') return; + + try { + await sessionRepository.touch(this.currentSessionId); + await sttRepository.addTranscript({ + sessionId: this.currentSessionId, + speaker: speaker, + text: transcription.trim(), + }); + console.log(`[DB] Saved transcript for session ${this.currentSessionId}: (${speaker})`); + } catch (error) { + console.error('Failed to save transcript to DB:', error); + } + } + + async initializeNewSession() { + try { + const uid = authService.getCurrentUserId(); + if (!uid) { + throw new Error("Cannot initialize session: user not logged in."); + } + + this.currentSessionId = await sessionRepository.getOrCreateActive(uid, 'listen'); + console.log(`[DB] New listen session ensured: ${this.currentSessionId}`); + + // Set session ID for summary service + this.summaryService.setSessionId(this.currentSessionId); + + // Reset conversation history + this.summaryService.resetConversationHistory(); + + console.log('New conversation session started:', this.currentSessionId); + return true; + } catch (error) { + console.error('Failed to initialize new session in DB:', error); + this.currentSessionId = null; + return false; + } + } + + async initializeSession(language = 'en') { + if (this.isInitializingSession) { + console.log('Session initialization already in progress.'); + return false; + } + + this.isInitializingSession = true; + this.sendToRenderer('session-initializing', true); + this.sendToRenderer('update-status', 'Initializing sessions...'); + + try { + // Initialize database session + const sessionInitialized = await this.initializeNewSession(); + if (!sessionInitialized) { + throw new Error('Failed to initialize database session'); + } + + // Initialize STT sessions + await this.sttService.initializeSttSessions(language); + + console.log('✅ Listen service initialized successfully.'); + + this.sendToRenderer('session-state-changed', { isActive: true }); + this.sendToRenderer('update-status', 'Connected. Ready to listen.'); + + return true; + } catch (error) { + console.error('❌ Failed to initialize listen service:', error); + this.sendToRenderer('update-status', 'Initialization failed.'); + return false; + } finally { + this.isInitializingSession = false; + this.sendToRenderer('session-initializing', false); + } + } + + async sendAudioContent(data, mimeType) { + return await this.sttService.sendAudioContent(data, mimeType); + } + + async startMacOSAudioCapture() { + if (process.platform !== 'darwin') { + throw new Error('macOS audio capture only available on macOS'); + } + return await this.sttService.startMacOSAudioCapture(); + } + + async stopMacOSAudioCapture() { + this.sttService.stopMacOSAudioCapture(); + } + + isSessionActive() { + return this.sttService.isSessionActive(); + } + + async closeSession() { + try { + // Close STT sessions + await this.sttService.closeSessions(); + + // End database session + if (this.currentSessionId) { + await sessionRepository.end(this.currentSessionId); + console.log(`[DB] Session ${this.currentSessionId} ended.`); + } + + // Reset state + this.currentSessionId = null; + this.summaryService.resetConversationHistory(); + + this.sendToRenderer('session-state-changed', { isActive: false }); + this.sendToRenderer('session-did-close'); + + console.log('Listen service session closed.'); + return { success: true }; + } catch (error) { + console.error('Error closing listen service session:', error); + return { success: false, error: error.message }; + } + } + + getCurrentSessionData() { + return { + sessionId: this.currentSessionId, + conversationHistory: this.summaryService.getConversationHistory(), + totalTexts: this.summaryService.getConversationHistory().length, + analysisData: this.summaryService.getCurrentAnalysisData(), + }; + } + + getConversationHistory() { + return this.summaryService.getConversationHistory(); + } + + setupIpcHandlers() { + const { ipcMain } = require('electron'); + + ipcMain.handle('is-session-active', async () => { + const isActive = this.isSessionActive(); + console.log(`Checking session status. Active: ${isActive}`); + return isActive; + }); + + ipcMain.handle('initialize-openai', async (event, profile = 'interview', language = 'en') => { + console.log(`Received initialize-openai request with profile: ${profile}, language: ${language}`); + const success = await this.initializeSession(language); + return success; + }); + + ipcMain.handle('send-audio-content', async (event, { data, mimeType }) => { + try { + await this.sendAudioContent(data, mimeType); + return { success: true }; + } catch (error) { + console.error('Error sending user audio:', error); + return { success: false, error: error.message }; + } + }); + + ipcMain.handle('start-macos-audio', async () => { + if (process.platform !== 'darwin') { + return { success: false, error: 'macOS audio capture only available on macOS' }; + } + try { + const success = await this.startMacOSAudioCapture(); + return { success }; + } catch (error) { + console.error('Error starting macOS audio capture:', error); + return { success: false, error: error.message }; + } + }); + + ipcMain.handle('stop-macos-audio', async () => { + try { + this.stopMacOSAudioCapture(); + return { success: true }; + } catch (error) { + console.error('Error stopping macOS audio capture:', error); + return { success: false, error: error.message }; + } + }); + + ipcMain.handle('close-session', async () => { + return await this.closeSession(); + }); + + ipcMain.handle('update-google-search-setting', async (event, enabled) => { + try { + console.log('Google Search setting updated to:', enabled); + return { success: true }; + } catch (error) { + console.error('Error updating Google Search setting:', error); + return { success: false, error: error.message }; + } + }); + + console.log('✅ Listen service IPC handlers registered'); + } +} + +module.exports = ListenService; \ No newline at end of file diff --git a/src/features/listen/liveSummaryService.js b/src/features/listen/liveSummaryService.js deleted file mode 100644 index b892f66..0000000 --- a/src/features/listen/liveSummaryService.js +++ /dev/null @@ -1,973 +0,0 @@ -require('dotenv').config(); -const { BrowserWindow, ipcMain } = require('electron'); -const { spawn } = require('child_process'); -const { saveDebugAudio } = require('./audioUtils.js'); -const { getSystemPrompt } = require('../../common/prompts/promptBuilder.js'); -const { connectToGeminiSession } = require('../../common/services/googleGeminiClient.js'); -const { connectToOpenAiSession, createOpenAiGenerativeClient, getOpenAiGenerativeModel } = require('../../common/services/openAiClient.js'); -const { makeChatCompletionWithPortkey } = require('../../common/services/aiProviderService.js'); -const authService = require('../../common/services/authService'); -const sessionRepository = require('../../common/repositories/session'); -const listenRepository = require('./repositories'); - -const { getStoredApiKey, getStoredProvider } = require('../../electron/windowManager'); - -const MAX_BUFFER_LENGTH_CHARS = 2000; -const COMPLETION_DEBOUNCE_MS = 2000; - -async function getApiKey() { - const storedKey = await getStoredApiKey(); - - if (storedKey) { - console.log('[LiveSummaryService] Using stored API key'); - return storedKey; - } - - const envKey = process.env.OPENAI_API_KEY; - if (envKey) { - console.log('[LiveSummaryService] Using environment API key'); - return envKey; - } - - console.error('[LiveSummaryService] No API key found in storage or environment'); - return null; -} - -async function getAiProvider() { - try { - const { ipcRenderer } = require('electron'); - const provider = await ipcRenderer.invoke('get-ai-provider'); - return provider || 'openai'; - } catch (error) { - // If we're in the main process, get it directly - return getStoredProvider ? getStoredProvider() : 'openai'; - } -} - -let currentSessionId = null; -let conversationHistory = []; -let isInitializingSession = false; - -let mySttSession = null; -let theirSttSession = null; -let myCurrentUtterance = ''; -let theirCurrentUtterance = ''; - -let myLastPartialText = ''; -let theirLastPartialText = ''; -let myInactivityTimer = null; -let theirInactivityTimer = null; -const INACTIVITY_TIMEOUT = 3000; - -const SESSION_IDLE_TIMEOUT_SECONDS = 30 * 60; // 30 minutes - -let previousAnalysisResult = null; -let analysisHistory = []; - -// --------------------------------------------------------------------------- -// 🎛️ Turn-completion debouncing -// --------------------------------------------------------------------------- -// Very aggressive VAD (e.g. 50 ms) tends to split one spoken sentence into -// many "completed" events. To avoid creating a separate chat bubble for each -// of those micro-turns we debounce the *completed* events per speaker. Any -// completions that arrive within this window are concatenated and flushed as -// **one** final turn. - -let myCompletionBuffer = ''; -let theirCompletionBuffer = ''; -let myCompletionTimer = null; -let theirCompletionTimer = null; - -function flushMyCompletion() { - if (!myCompletionBuffer.trim()) return; - - const finalText = myCompletionBuffer.trim(); - // Save to DB & send to renderer as final - saveConversationTurn('Me', finalText); - sendToRenderer('stt-update', { - speaker: 'Me', - text: finalText, - isPartial: false, - isFinal: true, - timestamp: Date.now(), - }); - - myCompletionBuffer = ''; - myCompletionTimer = null; - myCurrentUtterance = ''; // Reset utterance accumulator on flush - sendToRenderer('update-status', 'Listening...'); -} - -function flushTheirCompletion() { - if (!theirCompletionBuffer.trim()) return; - - const finalText = theirCompletionBuffer.trim(); - saveConversationTurn('Them', finalText); - sendToRenderer('stt-update', { - speaker: 'Them', - text: finalText, - isPartial: false, - isFinal: true, - timestamp: Date.now(), - }); - - theirCompletionBuffer = ''; - theirCompletionTimer = null; - theirCurrentUtterance = ''; // Reset utterance accumulator on flush - sendToRenderer('update-status', 'Listening...'); -} - -function debounceMyCompletion(text) { - // 상대방이 말하고 있던 경우, 화자가 변경되었으므로 즉시 상대방의 말풍선을 완성합니다. - if (theirCompletionTimer) { - clearTimeout(theirCompletionTimer); - flushTheirCompletion(); - } - - myCompletionBuffer += (myCompletionBuffer ? ' ' : '') + text; - - if (myCompletionTimer) clearTimeout(myCompletionTimer); - myCompletionTimer = setTimeout(flushMyCompletion, COMPLETION_DEBOUNCE_MS); -} - -function debounceTheirCompletion(text) { - // 내가 말하고 있던 경우, 화자가 변경되었으므로 즉시 내 말풍선을 완성합니다. - if (myCompletionTimer) { - clearTimeout(myCompletionTimer); - flushMyCompletion(); - } - - theirCompletionBuffer += (theirCompletionBuffer ? ' ' : '') + text; - - if (theirCompletionTimer) clearTimeout(theirCompletionTimer); - theirCompletionTimer = setTimeout(flushTheirCompletion, COMPLETION_DEBOUNCE_MS); -} - -let systemAudioProc = null; - -let analysisIntervalId = null; - -/** - * Converts conversation history into text to include in the prompt. - * @param {Array} conversationTexts - Array of conversation texts ["me: ~~~", "them: ~~~", ...] - * @param {number} maxTurns - Maximum number of recent turns to include - * @returns {string} - Formatted conversation string for the prompt - */ -function formatConversationForPrompt(conversationTexts, maxTurns = 30) { - if (conversationTexts.length === 0) return ''; - return conversationTexts.slice(-maxTurns).join('\n'); -} - -async function makeOutlineAndRequests(conversationTexts, maxTurns = 30) { - console.log(`🔍 makeOutlineAndRequests called - conversationTexts: ${conversationTexts.length}`); - - if (conversationTexts.length === 0) { - console.log('⚠️ No conversation texts available for analysis'); - return null; - } - - const recentConversation = formatConversationForPrompt(conversationTexts, maxTurns); - - // 이전 분석 결과를 프롬프트에 포함 - let contextualPrompt = ''; - if (previousAnalysisResult) { - contextualPrompt = ` -Previous Analysis Context: -- Main Topic: ${previousAnalysisResult.topic.header} -- Key Points: ${previousAnalysisResult.summary.slice(0, 3).join(', ')} -- Last Actions: ${previousAnalysisResult.actions.slice(0, 2).join(', ')} - -Please build upon this context while analyzing the new conversation segments. -`; - } - - const basePrompt = getSystemPrompt('pickle_glass_analysis', '', false); - const systemPrompt = basePrompt.replace('{{CONVERSATION_HISTORY}}', recentConversation); - - try { - if (currentSessionId) { - await sessionRepository.touch(currentSessionId); - } - const messages = [ - { - role: 'system', - content: systemPrompt, - }, - { - role: 'user', - content: `${contextualPrompt} - -Analyze the conversation and provide a structured summary. Format your response as follows: - -**Summary Overview** -- Main discussion point with context - -**Key Topic: [Topic Name]** -- First key insight -- Second key insight -- Third key insight - -**Extended Explanation** -Provide 2-3 sentences explaining the context and implications. - -**Suggested Questions** -1. First follow-up question? -2. Second follow-up question? -3. Third follow-up question? - -Keep all points concise and build upon previous analysis if provided.`, - }, - ]; - - console.log('🤖 Sending analysis request to OpenAI...'); - - const API_KEY = await getApiKey(); - if (!API_KEY) { - throw new Error('No API key available'); - } - - const provider = getStoredProvider ? getStoredProvider() : 'openai'; - const loggedIn = authService.getCurrentUser().isLoggedIn; // true ➜ vKey, false ➜ apiKey - const usePortkey = loggedIn && provider === 'openai'; // Only use Portkey for OpenAI with Firebase - - console.log(`[LiveSummary] provider: ${provider}, usePortkey: ${usePortkey}`); - - const completion = await makeChatCompletionWithPortkey({ - apiKey: API_KEY, - provider: provider, - messages: messages, - temperature: 0.7, - maxTokens: 1024, - model: provider === 'openai' ? 'gpt-4.1' : 'gemini-2.5-flash', - usePortkey: usePortkey, - portkeyVirtualKey: usePortkey ? API_KEY : null - }); - - const responseText = completion.content; - console.log(`✅ Analysis response received: ${responseText}`); - const structuredData = parseResponseText(responseText, previousAnalysisResult); - - if (currentSessionId) { - listenRepository.saveSummary({ - sessionId: currentSessionId, - tldr: structuredData.summary.join('\n'), - bullet_json: JSON.stringify(structuredData.topic.bullets), - action_json: JSON.stringify(structuredData.actions), - model: 'gpt-4.1' - }).catch(err => console.error('[DB] Failed to save summary:', err)); - } - - // 분석 결과 저장 - previousAnalysisResult = structuredData; - analysisHistory.push({ - timestamp: Date.now(), - data: structuredData, - conversationLength: conversationTexts.length, - }); - - // 히스토리 크기 제한 (최근 10개만 유지) - if (analysisHistory.length > 10) { - analysisHistory.shift(); - } - - return structuredData; - } catch (error) { - console.error('❌ Error during analysis generation:', error.message); - return previousAnalysisResult; // 에러 시 이전 결과 반환 - } -} - -function parseResponseText(responseText, previousResult) { - const structuredData = { - summary: [], - topic: { header: '', bullets: [] }, - actions: [], - followUps: ['✉️ Draft a follow-up email', '✅ Generate action items', '📝 Show summary'], - }; - - // 이전 결과가 있으면 기본값으로 사용 - if (previousResult) { - structuredData.topic.header = previousResult.topic.header; - structuredData.summary = [...previousResult.summary]; - } - - try { - const lines = responseText.split('\n'); - let currentSection = ''; - let isCapturingTopic = false; - let topicName = ''; - - for (const line of lines) { - const trimmedLine = line.trim(); - - // 섹션 헤더 감지 - if (trimmedLine.startsWith('**Summary Overview**')) { - currentSection = 'summary-overview'; - continue; - } else if (trimmedLine.startsWith('**Key Topic:')) { - currentSection = 'topic'; - isCapturingTopic = true; - topicName = trimmedLine.match(/\*\*Key Topic: (.+?)\*\*/)?.[1] || ''; - if (topicName) { - structuredData.topic.header = topicName + ':'; - } - continue; - } else if (trimmedLine.startsWith('**Extended Explanation**')) { - currentSection = 'explanation'; - continue; - } else if (trimmedLine.startsWith('**Suggested Questions**')) { - currentSection = 'questions'; - continue; - } - - // 컨텐츠 파싱 - if (trimmedLine.startsWith('-') && currentSection === 'summary-overview') { - const summaryPoint = trimmedLine.substring(1).trim(); - if (summaryPoint && !structuredData.summary.includes(summaryPoint)) { - // 기존 summary 업데이트 (최대 5개 유지) - structuredData.summary.unshift(summaryPoint); - if (structuredData.summary.length > 5) { - structuredData.summary.pop(); - } - } - } else if (trimmedLine.startsWith('-') && currentSection === 'topic') { - const bullet = trimmedLine.substring(1).trim(); - if (bullet && structuredData.topic.bullets.length < 3) { - structuredData.topic.bullets.push(bullet); - } - } else if (currentSection === 'explanation' && trimmedLine) { - // explanation을 topic bullets에 추가 (문장 단위로) - const sentences = trimmedLine - .split(/\.\s+/) - .filter(s => s.trim().length > 0) - .map(s => s.trim() + (s.endsWith('.') ? '' : '.')); - - sentences.forEach(sentence => { - if (structuredData.topic.bullets.length < 3 && !structuredData.topic.bullets.includes(sentence)) { - structuredData.topic.bullets.push(sentence); - } - }); - } else if (trimmedLine.match(/^\d+\./) && currentSection === 'questions') { - const question = trimmedLine.replace(/^\d+\.\s*/, '').trim(); - if (question && question.includes('?')) { - structuredData.actions.push(`❓ ${question}`); - } - } - } - - // 기본 액션 추가 - const defaultActions = ['✨ What should I say next?', '💬 Suggest follow-up questions']; - defaultActions.forEach(action => { - if (!structuredData.actions.includes(action)) { - structuredData.actions.push(action); - } - }); - - // 액션 개수 제한 - structuredData.actions = structuredData.actions.slice(0, 5); - - // 유효성 검증 및 이전 데이터 병합 - if (structuredData.summary.length === 0 && previousResult) { - structuredData.summary = previousResult.summary; - } - if (structuredData.topic.bullets.length === 0 && previousResult) { - structuredData.topic.bullets = previousResult.topic.bullets; - } - } catch (error) { - console.error('❌ Error parsing response text:', error); - // 에러 시 이전 결과 반환 - return ( - previousResult || { - summary: [], - topic: { header: 'Analysis in progress', bullets: [] }, - actions: ['✨ What should I say next?', '💬 Suggest follow-up questions'], - followUps: ['✉️ Draft a follow-up email', '✅ Generate action items', '📝 Show summary'], - } - ); - } - - console.log('📊 Final structured data:', JSON.stringify(structuredData, null, 2)); - return structuredData; -} - -/** - * Triggers analysis when conversation history reaches 5 texts. - */ -async function triggerAnalysisIfNeeded() { - if (conversationHistory.length >= 5 && conversationHistory.length % 5 === 0) { - console.log(`🚀 Triggering analysis (non-blocking) - ${conversationHistory.length} conversation texts accumulated`); - - makeOutlineAndRequests(conversationHistory) - .then(data => { - if (data) { - console.log('📤 Sending structured data to renderer'); - sendToRenderer('update-structured-data', data); - } else { - console.log('❌ No analysis data returned from non-blocking call'); - } - }) - .catch(error => { - console.error('❌ Error in non-blocking analysis:', error); - }); - } -} - -/** - * Schedules periodic updates of outline and analysis every 10 seconds. - DEPRECATED - * Now analysis is triggered every 5 conversation texts. - */ -function startAnalysisInterval() { - console.log('⏰ Analysis will be triggered every 5 conversation texts (not on timer)'); - - if (analysisIntervalId) { - clearInterval(analysisIntervalId); - analysisIntervalId = null; - } -} - -function stopAnalysisInterval() { - if (analysisIntervalId) { - clearInterval(analysisIntervalId); - analysisIntervalId = null; - } - - if (myInactivityTimer) { - clearTimeout(myInactivityTimer); - myInactivityTimer = null; - } - if (theirInactivityTimer) { - clearTimeout(theirInactivityTimer); - theirInactivityTimer = null; - } -} - -function sendToRenderer(channel, data) { - BrowserWindow.getAllWindows().forEach(win => { - if (!win.isDestroyed()) { - win.webContents.send(channel, data); - } - }); -} - -function getCurrentSessionData() { - return { - sessionId: currentSessionId, - conversationHistory: conversationHistory, - totalTexts: conversationHistory.length, - }; -} - -// Conversation management functions -async function initializeNewSession() { - try { - const uid = authService.getCurrentUserId(); - if (!uid) { - throw new Error("Cannot initialize session: user not logged in."); - } - currentSessionId = await sessionRepository.getOrCreateActive(uid, 'listen'); - console.log(`[DB] New listen session ensured: ${currentSessionId}`); - - conversationHistory = []; - myCurrentUtterance = ''; - theirCurrentUtterance = ''; - - // 🔄 Reset analysis state so the new session starts fresh - previousAnalysisResult = null; - analysisHistory = []; - - // sendToRenderer('update-outline', []); - // sendToRenderer('update-analysis-requests', []); - - myLastPartialText = ''; - theirLastPartialText = ''; - if (myInactivityTimer) { - clearTimeout(myInactivityTimer); - myInactivityTimer = null; - } - if (theirInactivityTimer) { - clearTimeout(theirInactivityTimer); - theirInactivityTimer = null; - } - - console.log('New conversation session started:', currentSessionId); - return true; - } catch (error) { - console.error('Failed to initialize new session in DB:', error); - currentSessionId = null; - return false; - } -} - -async function saveConversationTurn(speaker, transcription) { - if (!currentSessionId) { - console.error('[DB] Cannot save turn, no active session ID.'); - return; - } - if (transcription.trim() === '') return; - - try { - await sessionRepository.touch(currentSessionId); - await listenRepository.addTranscript({ - sessionId: currentSessionId, - speaker: speaker, - text: transcription.trim(), - }); - console.log(`[DB] Saved transcript for session ${currentSessionId}: (${speaker})`); - - const conversationText = `${speaker.toLowerCase()}: ${transcription.trim()}`; - conversationHistory.push(conversationText); - console.log(`💬 Saved conversation text: ${conversationText}`); - console.log(`📈 Total conversation history: ${conversationHistory.length} texts`); - - triggerAnalysisIfNeeded(); - - const conversationTurn = { - speaker: speaker, - timestamp: Date.now(), - transcription: transcription.trim(), - }; - } catch (error) { - console.error('Failed to save transcript to DB:', error); - } -} - -async function initializeLiveSummarySession(language = 'en') { - // Use system environment variable if set, otherwise use the provided language - const effectiveLanguage = process.env.OPENAI_TRANSCRIBE_LANG || language || 'en'; - if (isInitializingSession) { - console.log('Session initialization already in progress.'); - return false; - } - - const userState = authService.getCurrentUser(); - const loggedIn = userState.isLoggedIn; - const keyType = loggedIn ? 'vKey' : 'apiKey'; - - isInitializingSession = true; - sendToRenderer('session-initializing', true); - sendToRenderer('update-status', 'Initializing sessions...'); - - const API_KEY = await getApiKey(); - if (!API_KEY) { - console.error('FATAL ERROR: API Key is not defined.'); - sendToRenderer('update-status', 'API Key not configured.'); - isInitializingSession = false; - sendToRenderer('session-initializing', false); - return false; - } - - await initializeNewSession(); - - const provider = await getAiProvider(); - const isGemini = provider === 'gemini'; - console.log(`[LiveSummaryService] Initializing STT for provider: ${provider}`); - - try { - const handleMyMessage = message => { - if (isGemini) { - // console.log('[Gemini Raw Message - Me]:', JSON.stringify(message, null, 2)); - const text = message.serverContent?.inputTranscription?.text || ''; - if (text && text.trim()) { - const finalUtteranceText = text.trim().replace(//g, '').trim(); - if (finalUtteranceText && finalUtteranceText !== '.') { - debounceMyCompletion(finalUtteranceText); - } - } - } else { - const type = message.type; - const text = message.transcript || message.delta || (message.alternatives && message.alternatives[0]?.transcript) || ''; - - if (type === 'conversation.item.input_audio_transcription.delta') { - if (myCompletionTimer) clearTimeout(myCompletionTimer); - myCompletionTimer = null; - myCurrentUtterance += text; - const continuousText = myCompletionBuffer + (myCompletionBuffer ? ' ' : '') + myCurrentUtterance; - if (text && !text.includes('vq_lbr_audio_')) { - sendToRenderer('stt-update', { - speaker: 'Me', - text: continuousText, - isPartial: true, - isFinal: false, - timestamp: Date.now(), - }); - } - } else if (type === 'conversation.item.input_audio_transcription.completed') { - if (text && text.trim()) { - const finalUtteranceText = text.trim(); - myCurrentUtterance = ''; - debounceMyCompletion(finalUtteranceText); - } - } - } - - if (message.error) { - console.error('[Me] STT Session Error:', message.error); - } - }; - - const handleTheirMessage = message => { - if (isGemini) { - // console.log('[Gemini Raw Message - Them]:', JSON.stringify(message, null, 2)); - const text = message.serverContent?.inputTranscription?.text || ''; - if (text && text.trim()) { - const finalUtteranceText = text.trim().replace(//g, '').trim(); - if (finalUtteranceText && finalUtteranceText !== '.') { - debounceTheirCompletion(finalUtteranceText); - } - } - } else { - const type = message.type; - const text = message.transcript || message.delta || (message.alternatives && message.alternatives[0]?.transcript) || ''; - if (type === 'conversation.item.input_audio_transcription.delta') { - if (theirCompletionTimer) clearTimeout(theirCompletionTimer); - theirCompletionTimer = null; - theirCurrentUtterance += text; - const continuousText = theirCompletionBuffer + (theirCompletionBuffer ? ' ' : '') + theirCurrentUtterance; - if (text && !text.includes('vq_lbr_audio_')) { - sendToRenderer('stt-update', { - speaker: 'Them', - text: continuousText, - isPartial: true, - isFinal: false, - timestamp: Date.now(), - }); - } - } else if (type === 'conversation.item.input_audio_transcription.completed') { - if (text && text.trim()) { - const finalUtteranceText = text.trim(); - theirCurrentUtterance = ''; - debounceTheirCompletion(finalUtteranceText); - } - } - } - - if (message.error) { - console.error('[Them] STT Session Error:', message.error); - } - }; - - const mySttConfig = { - language: effectiveLanguage, - callbacks: { - onmessage: handleMyMessage, - onerror: error => console.error('My STT session error:', error.message), - onclose: event => console.log('My STT session closed:', event.reason), - }, - }; - const theirSttConfig = { - language: effectiveLanguage, - callbacks: { - onmessage: handleTheirMessage, - onerror: error => console.error('Their STT session error:', error.message), - onclose: event => console.log('Their STT session closed:', event.reason), - }, - }; - - if (isGemini) { - [mySttSession, theirSttSession] = await Promise.all([ - connectToGeminiSession(API_KEY, mySttConfig), - connectToGeminiSession(API_KEY, theirSttConfig), - ]); - } else { - [mySttSession, theirSttSession] = await Promise.all([ - connectToOpenAiSession(API_KEY, mySttConfig, keyType), - connectToOpenAiSession(API_KEY, theirSttConfig, keyType), - ]); - } - - console.log('✅ Both STT sessions initialized successfully.'); - triggerAnalysisIfNeeded(); - - sendToRenderer('session-state-changed', { isActive: true }); - - isInitializingSession = false; - sendToRenderer('session-initializing', false); - sendToRenderer('update-status', 'Connected. Ready to listen.'); - return true; - } catch (error) { - console.error('❌ Failed to initialize STT sessions:', error); - isInitializingSession = false; - sendToRenderer('session-initializing', false); - sendToRenderer('update-status', 'Initialization failed.'); - mySttSession = null; - theirSttSession = null; - return false; - } -} - -function killExistingSystemAudioDump() { - return new Promise(resolve => { - console.log('Checking for existing SystemAudioDump processes...'); - - const killProc = spawn('pkill', ['-f', 'SystemAudioDump'], { - stdio: 'ignore', - }); - - killProc.on('close', code => { - if (code === 0) { - console.log('Killed existing SystemAudioDump processes'); - } else { - console.log('No existing SystemAudioDump processes found'); - } - resolve(); - }); - - killProc.on('error', err => { - console.log('Error checking for existing processes (this is normal):', err.message); - resolve(); - }); - - setTimeout(() => { - killProc.kill(); - resolve(); - }, 2000); - }); -} - -async function startMacOSAudioCapture() { - if (process.platform !== 'darwin' || !theirSttSession) return false; - - await killExistingSystemAudioDump(); - console.log('Starting macOS audio capture for "Them"...'); - - const { app } = require('electron'); - const path = require('path'); - const systemAudioPath = app.isPackaged - ? path.join(process.resourcesPath, 'app.asar.unpacked', 'src', 'assets', 'SystemAudioDump') - : path.join(app.getAppPath(), 'src', 'assets', 'SystemAudioDump'); - - console.log('SystemAudioDump path:', systemAudioPath); - - systemAudioProc = spawn(systemAudioPath, [], { - stdio: ['ignore', 'pipe', 'pipe'], - }); - - if (!systemAudioProc.pid) { - console.error('Failed to start SystemAudioDump'); - return false; - } - - console.log('SystemAudioDump started with PID:', systemAudioProc.pid); - - const CHUNK_DURATION = 0.1; - const SAMPLE_RATE = 24000; - const BYTES_PER_SAMPLE = 2; - const CHANNELS = 2; - const CHUNK_SIZE = SAMPLE_RATE * BYTES_PER_SAMPLE * CHANNELS * CHUNK_DURATION; - - let audioBuffer = Buffer.alloc(0); - - const provider = await getAiProvider(); - const isGemini = provider === 'gemini'; - - systemAudioProc.stdout.on('data', async data => { - audioBuffer = Buffer.concat([audioBuffer, data]); - - while (audioBuffer.length >= CHUNK_SIZE) { - const chunk = audioBuffer.slice(0, CHUNK_SIZE); - audioBuffer = audioBuffer.slice(CHUNK_SIZE); - - const monoChunk = CHANNELS === 2 ? convertStereoToMono(chunk) : chunk; - const base64Data = monoChunk.toString('base64'); - - sendToRenderer('system-audio-data', { data: base64Data }); - - if (theirSttSession) { - try { - // await theirSttSession.sendRealtimeInput(base64Data); - const payload = isGemini - ? { audio: { data: base64Data, mimeType: 'audio/pcm;rate=24000' } } - : base64Data; - await theirSttSession.sendRealtimeInput(payload); - } catch (err) { - console.error('Error sending system audio:', err.message); - } - } - - if (process.env.DEBUG_AUDIO) { - saveDebugAudio(monoChunk, 'system_audio'); - } - } - }); - - systemAudioProc.stderr.on('data', data => { - console.error('SystemAudioDump stderr:', data.toString()); - }); - - systemAudioProc.on('close', code => { - console.log('SystemAudioDump process closed with code:', code); - systemAudioProc = null; - }); - - systemAudioProc.on('error', err => { - console.error('SystemAudioDump process error:', err); - systemAudioProc = null; - }); - - return true; -} - -function convertStereoToMono(stereoBuffer) { - const samples = stereoBuffer.length / 4; - const monoBuffer = Buffer.alloc(samples * 2); - - for (let i = 0; i < samples; i++) { - const leftSample = stereoBuffer.readInt16LE(i * 4); - monoBuffer.writeInt16LE(leftSample, i * 2); - } - - return monoBuffer; -} - -function stopMacOSAudioCapture() { - if (systemAudioProc) { - console.log('Stopping SystemAudioDump...'); - systemAudioProc.kill('SIGTERM'); - systemAudioProc = null; - } -} - -async function sendAudioToOpenAI(base64Data, sttSessionRef) { - if (!sttSessionRef.current) return; - - try { - process.stdout.write('.'); - await sttSessionRef.current.sendRealtimeInput({ - audio: { - data: base64Data, - mimeType: 'audio/pcm;rate=24000', - }, - }); - } catch (error) { - console.error('Error sending audio to OpenAI:', error); - } -} - -function isSessionActive() { - return !!mySttSession && !!theirSttSession; -} - -async function closeSession() { - try { - stopMacOSAudioCapture(); - stopAnalysisInterval(); - - if (currentSessionId) { - await sessionRepository.end(currentSessionId); - console.log(`[DB] Session ${currentSessionId} ended.`); - } - - const closePromises = []; - if (mySttSession) { - closePromises.push(mySttSession.close()); - mySttSession = null; - } - if (theirSttSession) { - closePromises.push(theirSttSession.close()); - theirSttSession = null; - } - - await Promise.all(closePromises); - console.log('All sessions closed.'); - - currentSessionId = null; - conversationHistory = []; - - sendToRenderer('session-state-changed', { isActive: false }); - sendToRenderer('session-did-close'); - - return { success: true }; - } catch (error) { - console.error('Error closing sessions:', error); - return { success: false, error: error.message }; - } -} - -function setupLiveSummaryIpcHandlers() { - ipcMain.handle('is-session-active', async () => { - const isActive = isSessionActive(); - console.log(`Checking session status. Active: ${isActive}`); - return isActive; - }); - - ipcMain.handle('initialize-openai', async (event, profile = 'interview', language = 'en') => { - console.log(`Received initialize-openai request with profile: ${profile}, language: ${language}`); - const success = await initializeLiveSummarySession(language); - return success; - }); - - ipcMain.handle('send-audio-content', async (event, { data, mimeType }) => { - const provider = await getAiProvider(); - const isGemini = provider === 'gemini'; - if (!mySttSession) return { success: false, error: 'User STT session not active' }; - try { - // await mySttSession.sendRealtimeInput(data); - // provider에 맞는 형식으로 래핑 - const payload = isGemini - ? { audio: { data, mimeType: mimeType || 'audio/pcm;rate=24000' } } - : data; // OpenAI는 base64 string 그대로 - - await mySttSession.sendRealtimeInput(payload); - return { success: true }; - } catch (error) { - console.error('Error sending user audio:', error); - return { success: false, error: error.message }; - } - }); - - ipcMain.handle('start-macos-audio', async () => { - if (process.platform !== 'darwin') { - return { success: false, error: 'macOS audio capture only available on macOS' }; - } - try { - const success = await startMacOSAudioCapture(); - return { success }; - } catch (error) { - console.error('Error starting macOS audio capture:', error); - return { success: false, error: error.message }; - } - }); - - ipcMain.handle('stop-macos-audio', async () => { - try { - stopMacOSAudioCapture(); - return { success: true }; - } catch (error) { - console.error('Error stopping macOS audio capture:', error); - return { success: false, error: error.message }; - } - }); - - ipcMain.handle('close-session', async () => { - return await closeSession(); - }); - - ipcMain.handle('update-google-search-setting', async (event, enabled) => { - try { - console.log('Google Search setting updated to:', enabled); - return { success: true }; - } catch (error) { - console.error('Error updating Google Search setting:', error); - return { success: false, error: error.message }; - } - }); -} - -function getConversationHistory() { - return conversationHistory; -} - -module.exports = { - sendToRenderer, - initializeNewSession, - saveConversationTurn, - killExistingSystemAudioDump, - startMacOSAudioCapture, - convertStereoToMono, - stopMacOSAudioCapture, - sendAudioToOpenAI, - setupLiveSummaryIpcHandlers, - isSessionActive, - closeSession, - getConversationHistory, -}; diff --git a/src/features/listen/renderer.js b/src/features/listen/renderer/listenCapture.js similarity index 80% rename from src/features/listen/renderer.js rename to src/features/listen/renderer/listenCapture.js index 99d0bd3..a4eef43 100644 --- a/src/features/listen/renderer.js +++ b/src/features/listen/renderer/listenCapture.js @@ -1,20 +1,29 @@ -// renderer.js const { ipcRenderer } = require('electron'); -const { makeStreamingChatCompletionWithPortkey } = require('../../common/services/aiProviderService.js'); -let mediaStream = null; -let screenshotInterval = null; -let audioContext = null; -let audioProcessor = null; -let micMediaStream = null; -let audioBuffer = []; +// --------------------------- +// Constants & Globals +// --------------------------- const SAMPLE_RATE = 24000; const AUDIO_CHUNK_DURATION = 0.1; const BUFFER_SIZE = 4096; +const isLinux = process.platform === 'linux'; +const isMacOS = process.platform === 'darwin'; + +let mediaStream = null; +let micMediaStream = null; +let screenshotInterval = null; +let audioContext = null; +let audioProcessor = null; +let currentImageQuality = 'medium'; +let lastScreenshotBase64 = null; + let systemAudioBuffer = []; const MAX_SYSTEM_BUFFER_SIZE = 10; +// --------------------------- +// Utility helpers (exact from renderer.js) +// --------------------------- function isVoiceActive(audioFloat32Array, threshold = 0.005) { if (!audioFloat32Array || audioFloat32Array.length === 0) { return false; @@ -31,11 +40,6 @@ function isVoiceActive(audioFloat32Array, threshold = 0.005) { return rms > threshold; } -let currentImageQuality = 'medium'; // Store current image quality for manual screenshots -let lastScreenshotBase64 = null; // Store the latest screenshot - -let realtimeConversationHistory = []; - function base64ToFloat32Array(base64) { const binaryString = atob(base64); const bytes = new Uint8Array(binaryString.length); @@ -54,11 +58,29 @@ function base64ToFloat32Array(base64) { return float32Array; } -async function queryLoginState() { - const userState = await ipcRenderer.invoke('get-current-user'); - return userState; +function convertFloat32ToInt16(float32Array) { + const int16Array = new Int16Array(float32Array.length); + for (let i = 0; i < float32Array.length; i++) { + // Improved scaling to prevent clipping + const s = Math.max(-1, Math.min(1, float32Array[i])); + int16Array[i] = s < 0 ? s * 0x8000 : s * 0x7fff; + } + return int16Array; } +function arrayBufferToBase64(buffer) { + let binary = ''; + const bytes = new Uint8Array(buffer); + const len = bytes.byteLength; + for (let i = 0; i < len; i++) { + binary += String.fromCharCode(bytes[i]); + } + return btoa(binary); +} + +// --------------------------- +// Complete SimpleAEC implementation (exact from renderer.js) +// --------------------------- class SimpleAEC { constructor() { this.adaptiveFilter = new Float32Array(1024); @@ -179,11 +201,24 @@ class SimpleAEC { let aecProcessor = new SimpleAEC(); -const isLinux = process.platform === 'linux'; -const isMacOS = process.platform === 'darwin'; +// System audio data handler +ipcRenderer.on('system-audio-data', (event, { data }) => { + systemAudioBuffer.push({ + data: data, + timestamp: Date.now(), + }); -window.pickleGlass = window.pickleGlass || {}; + // 오래된 데이터 제거 + if (systemAudioBuffer.length > MAX_SYSTEM_BUFFER_SIZE) { + systemAudioBuffer = systemAudioBuffer.slice(-MAX_SYSTEM_BUFFER_SIZE); + } + console.log('📥 Received system audio for AEC reference'); +}); + +// --------------------------- +// Complete token tracker (exact from renderer.js) +// --------------------------- let tokenTracker = { tokens: [], audioStartTime: null, @@ -265,126 +300,201 @@ setInterval(() => { tokenTracker.trackAudioTokens(); }, 2000); -function pickleGlassElement() { - return document.getElementById('pickle-glass'); -} +// --------------------------- +// Audio processing functions (exact from renderer.js) +// --------------------------- +function setupMicProcessing(micStream) { + const micAudioContext = new AudioContext({ sampleRate: SAMPLE_RATE }); + const micSource = micAudioContext.createMediaStreamSource(micStream); + const micProcessor = micAudioContext.createScriptProcessor(BUFFER_SIZE, 1, 1); -function convertFloat32ToInt16(float32Array) { - const int16Array = new Int16Array(float32Array.length); - for (let i = 0; i < float32Array.length; i++) { - // Improved scaling to prevent clipping - const s = Math.max(-1, Math.min(1, float32Array[i])); - int16Array[i] = s < 0 ? s * 0x8000 : s * 0x7fff; - } - return int16Array; -} + let audioBuffer = []; + const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION; -function arrayBufferToBase64(buffer) { - let binary = ''; - const bytes = new Uint8Array(buffer); - const len = bytes.byteLength; - for (let i = 0; i < len; i++) { - binary += String.fromCharCode(bytes[i]); - } - return btoa(binary); -} + micProcessor.onaudioprocess = async e => { + const inputData = e.inputBuffer.getChannelData(0); + audioBuffer.push(...inputData); -async function initializeopenai(profile = 'interview', language = 'en') { - // The API key is now handled in the main process from .env file. - // We just need to trigger the initialization. - try { - console.log(`Requesting OpenAI initialization with profile: ${profile}, language: ${language}`); - const success = await ipcRenderer.invoke('initialize-openai', profile, language); - if (success) { - // The status will be updated via 'update-status' event from the main process. - console.log('OpenAI initialization successful.'); - } else { - console.error('OpenAI initialization failed.'); - const appElement = pickleGlassElement(); - if (appElement && typeof appElement.setStatus === 'function') { - appElement.setStatus('Initialization Failed'); + while (audioBuffer.length >= samplesPerChunk) { + let chunk = audioBuffer.splice(0, samplesPerChunk); + let processedChunk = new Float32Array(chunk); + + // Check for system audio and apply AEC only if voice is active + if (aecProcessor && systemAudioBuffer.length > 0) { + const latestSystemAudio = systemAudioBuffer[systemAudioBuffer.length - 1]; + const systemFloat32 = base64ToFloat32Array(latestSystemAudio.data); + + // Apply AEC only when system audio has active speech + if (isVoiceActive(systemFloat32)) { + processedChunk = aecProcessor.process(new Float32Array(chunk), systemFloat32); + console.log('🔊 Applied AEC because system audio is active'); + } } + + const pcmData16 = convertFloat32ToInt16(processedChunk); + const base64Data = arrayBufferToBase64(pcmData16.buffer); + + await ipcRenderer.invoke('send-audio-content', { + data: base64Data, + mimeType: 'audio/pcm;rate=24000', + }); + } + }; + + micSource.connect(micProcessor); + micProcessor.connect(micAudioContext.destination); + + audioProcessor = micProcessor; +} + +function setupLinuxMicProcessing(micStream) { + // Setup microphone audio processing for Linux + const micAudioContext = new AudioContext({ sampleRate: SAMPLE_RATE }); + const micSource = micAudioContext.createMediaStreamSource(micStream); + const micProcessor = micAudioContext.createScriptProcessor(BUFFER_SIZE, 1, 1); + + let audioBuffer = []; + const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION; + + micProcessor.onaudioprocess = async e => { + const inputData = e.inputBuffer.getChannelData(0); + audioBuffer.push(...inputData); + + // Process audio in chunks + while (audioBuffer.length >= samplesPerChunk) { + const chunk = audioBuffer.splice(0, samplesPerChunk); + const pcmData16 = convertFloat32ToInt16(chunk); + const base64Data = arrayBufferToBase64(pcmData16.buffer); + + await ipcRenderer.invoke('send-audio-content', { + data: base64Data, + mimeType: 'audio/pcm;rate=24000', + }); + } + }; + + micSource.connect(micProcessor); + micProcessor.connect(micAudioContext.destination); + + // Store processor reference for cleanup + audioProcessor = micProcessor; +} + +function setupWindowsLoopbackProcessing() { + // Setup audio processing for Windows loopback audio only + audioContext = new AudioContext({ sampleRate: SAMPLE_RATE }); + const source = audioContext.createMediaStreamSource(mediaStream); + audioProcessor = audioContext.createScriptProcessor(BUFFER_SIZE, 1, 1); + + let audioBuffer = []; + const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION; + + audioProcessor.onaudioprocess = async e => { + const inputData = e.inputBuffer.getChannelData(0); + audioBuffer.push(...inputData); + + // Process audio in chunks + while (audioBuffer.length >= samplesPerChunk) { + const chunk = audioBuffer.splice(0, samplesPerChunk); + const pcmData16 = convertFloat32ToInt16(chunk); + const base64Data = arrayBufferToBase64(pcmData16.buffer); + + await ipcRenderer.invoke('send-audio-content', { + data: base64Data, + mimeType: 'audio/pcm;rate=24000', + }); + } + }; + + source.connect(audioProcessor); + audioProcessor.connect(audioContext.destination); +} + +// --------------------------- +// Screenshot functions (exact from renderer.js) +// --------------------------- +async function captureScreenshot(imageQuality = 'medium', isManual = false) { + console.log(`Capturing ${isManual ? 'manual' : 'automated'} screenshot...`); + + // Check rate limiting for automated screenshots only + if (!isManual && tokenTracker.shouldThrottle()) { + console.log('⚠️ Automated screenshot skipped due to rate limiting'); + return; + } + + try { + // Request screenshot from main process + const result = await ipcRenderer.invoke('capture-screenshot', { + quality: imageQuality, + }); + + if (result.success && result.base64) { + // Store the latest screenshot + lastScreenshotBase64 = result.base64; + + // Note: sendResult is not defined in the original, this was likely an error + // Commenting out this section as it references undefined variable + /* + if (sendResult.success) { + // Track image tokens after successful send + const imageTokens = tokenTracker.calculateImageTokens(result.width || 1920, result.height || 1080); + tokenTracker.addTokens(imageTokens, 'image'); + console.log(`📊 Image sent successfully - ${imageTokens} tokens used (${result.width}x${result.height})`); + } else { + console.error('Failed to send image:', sendResult.error); + } + */ + } else { + console.error('Failed to capture screenshot:', result.error); } } catch (error) { - console.error('Error during OpenAI initialization IPC call:', error); - const appElement = pickleGlassElement(); - if (appElement && typeof appElement.setStatus === 'function') { - appElement.setStatus('Error'); - } + console.error('Error capturing screenshot:', error); } } +async function captureManualScreenshot(imageQuality = null) { + console.log('Manual screenshot triggered'); + const quality = imageQuality || currentImageQuality; + await captureScreenshot(quality, true); +} -ipcRenderer.on('system-audio-data', (event, { data }) => { - systemAudioBuffer.push({ - data: data, - timestamp: Date.now(), - }); +async function getCurrentScreenshot() { + try { + // First try to get a fresh screenshot from main process + const result = await ipcRenderer.invoke('get-current-screenshot'); - // 오래된 데이터 제거 - if (systemAudioBuffer.length > MAX_SYSTEM_BUFFER_SIZE) { - systemAudioBuffer = systemAudioBuffer.slice(-MAX_SYSTEM_BUFFER_SIZE); - } - - console.log('📥 Received system audio for AEC reference'); -}); - -// Listen for status updates -ipcRenderer.on('update-status', (event, status) => { - console.log('Status update:', status); - pickleGlass.e().setStatus(status); -}); - -// Listen for real-time STT updates -ipcRenderer.on('stt-update', (event, data) => { - console.log('Renderer.js stt-update', data); - const { speaker, text, isFinal, isPartial, timestamp } = data; - - if (isPartial) { - console.log(`🔄 [${speaker} - partial]: ${text}`); - } else if (isFinal) { - console.log(`✅ [${speaker} - final]: ${text}`); - - const speakerText = speaker.toLowerCase(); - const conversationText = `${speakerText}: ${text.trim()}`; - - realtimeConversationHistory.push(conversationText); - - if (realtimeConversationHistory.length > 30) { - realtimeConversationHistory = realtimeConversationHistory.slice(-30); + if (result.success && result.base64) { + console.log('📸 Got fresh screenshot from main process'); + return result.base64; } - console.log(`📝 Updated realtime conversation history: ${realtimeConversationHistory.length} texts`); - console.log(`📋 Latest text: ${conversationText}`); - } - - if (pickleGlass.e() && typeof pickleGlass.e().updateRealtimeTranscription === 'function') { - pickleGlass.e().updateRealtimeTranscription({ - speaker, - text, - isFinal, - isPartial, - timestamp, + // If no screenshot available, capture one now + console.log('📸 No screenshot available, capturing new one'); + const captureResult = await ipcRenderer.invoke('capture-screenshot', { + quality: currentImageQuality, }); + + if (captureResult.success && captureResult.base64) { + lastScreenshotBase64 = captureResult.base64; + return captureResult.base64; + } + + // Fallback to last stored screenshot + if (lastScreenshotBase64) { + console.log('📸 Using cached screenshot'); + return lastScreenshotBase64; + } + + throw new Error('Failed to get screenshot'); + } catch (error) { + console.error('Error getting current screenshot:', error); + return null; } -}); - - -ipcRenderer.on('update-structured-data', (_, structuredData) => { - console.log('📥 Received structured data update:', structuredData); - window.pickleGlass.structuredData = structuredData; - window.pickleGlass.setStructuredData(structuredData); -}); -window.pickleGlass.structuredData = { - summary: [], - topic: { header: '', bullets: [] }, - actions: [], -}; -window.pickleGlass.setStructuredData = data => { - window.pickleGlass.structuredData = data; - pickleGlass.e()?.updateStructuredData?.(data); -}; +} +// --------------------------- +// Main capture functions (exact from renderer.js) +// --------------------------- async function startCapture(screenshotIntervalSeconds = 5, imageQuality = 'medium') { // Store the image quality for manual screenshots currentImageQuality = imageQuality; @@ -490,12 +600,6 @@ async function startCapture(screenshotIntervalSeconds = 5, imageQuality = 'mediu setupWindowsLoopbackProcessing(); } - // console.log('MediaStream obtained:', { - // hasVideo: mediaStream.getVideoTracks().length > 0, - // hasAudio: mediaStream.getAudioTracks().length > 0, - // videoTrack: mediaStream.getVideoTracks()[0]?.getSettings(), - // }); - // Start capturing screenshots - check if manual mode if (screenshotIntervalSeconds === 'manual' || screenshotIntervalSeconds === 'Manual') { console.log('Manual mode enabled - screenshots will be captured on demand only'); @@ -511,162 +615,11 @@ async function startCapture(screenshotIntervalSeconds = 5, imageQuality = 'mediu } } catch (err) { console.error('Error starting capture:', err); - pickleGlass.e().setStatus('error'); + // Note: pickleGlass.e() is not available in this context, commenting out + // pickleGlass.e().setStatus('error'); } } -function setupMicProcessing(micStream) { - const micAudioContext = new AudioContext({ sampleRate: SAMPLE_RATE }); - const micSource = micAudioContext.createMediaStreamSource(micStream); - const micProcessor = micAudioContext.createScriptProcessor(BUFFER_SIZE, 1, 1); - - let audioBuffer = []; - const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION; - - micProcessor.onaudioprocess = async e => { - const inputData = e.inputBuffer.getChannelData(0); - audioBuffer.push(...inputData); - - while (audioBuffer.length >= samplesPerChunk) { - let chunk = audioBuffer.splice(0, samplesPerChunk); - let processedChunk = new Float32Array(chunk); - - // Check for system audio and apply AEC only if voice is active - if (aecProcessor && systemAudioBuffer.length > 0) { - const latestSystemAudio = systemAudioBuffer[systemAudioBuffer.length - 1]; - const systemFloat32 = base64ToFloat32Array(latestSystemAudio.data); - - // Apply AEC only when system audio has active speech - if (isVoiceActive(systemFloat32)) { - processedChunk = aecProcessor.process(new Float32Array(chunk), systemFloat32); - console.log('🔊 Applied AEC because system audio is active'); - } - } - - const pcmData16 = convertFloat32ToInt16(processedChunk); - const base64Data = arrayBufferToBase64(pcmData16.buffer); - - await ipcRenderer.invoke('send-audio-content', { - data: base64Data, - mimeType: 'audio/pcm;rate=24000', - }); - } - }; - - micSource.connect(micProcessor); - micProcessor.connect(micAudioContext.destination); - - audioProcessor = micProcessor; -} -////////// for index & subjects ////////// - -function setupLinuxMicProcessing(micStream) { - // Setup microphone audio processing for Linux - const micAudioContext = new AudioContext({ sampleRate: SAMPLE_RATE }); - const micSource = micAudioContext.createMediaStreamSource(micStream); - const micProcessor = micAudioContext.createScriptProcessor(BUFFER_SIZE, 1, 1); - - let audioBuffer = []; - const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION; - - micProcessor.onaudioprocess = async e => { - const inputData = e.inputBuffer.getChannelData(0); - audioBuffer.push(...inputData); - - // Process audio in chunks - while (audioBuffer.length >= samplesPerChunk) { - const chunk = audioBuffer.splice(0, samplesPerChunk); - const pcmData16 = convertFloat32ToInt16(chunk); - const base64Data = arrayBufferToBase64(pcmData16.buffer); - - await ipcRenderer.invoke('send-audio-content', { - data: base64Data, - mimeType: 'audio/pcm;rate=24000', - }); - } - }; - - micSource.connect(micProcessor); - micProcessor.connect(micAudioContext.destination); - - // Store processor reference for cleanup - audioProcessor = micProcessor; -} - -function setupWindowsLoopbackProcessing() { - // Setup audio processing for Windows loopback audio only - audioContext = new AudioContext({ sampleRate: SAMPLE_RATE }); - const source = audioContext.createMediaStreamSource(mediaStream); - audioProcessor = audioContext.createScriptProcessor(BUFFER_SIZE, 1, 1); - - let audioBuffer = []; - const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION; - - audioProcessor.onaudioprocess = async e => { - const inputData = e.inputBuffer.getChannelData(0); - audioBuffer.push(...inputData); - - // Process audio in chunks - while (audioBuffer.length >= samplesPerChunk) { - const chunk = audioBuffer.splice(0, samplesPerChunk); - const pcmData16 = convertFloat32ToInt16(chunk); - const base64Data = arrayBufferToBase64(pcmData16.buffer); - - await ipcRenderer.invoke('send-audio-content', { - data: base64Data, - mimeType: 'audio/pcm;rate=24000', - }); - } - }; - - source.connect(audioProcessor); - audioProcessor.connect(audioContext.destination); -} - -async function captureScreenshot(imageQuality = 'medium', isManual = false) { - console.log(`Capturing ${isManual ? 'manual' : 'automated'} screenshot...`); - - // Check rate limiting for automated screenshots only - if (!isManual && tokenTracker.shouldThrottle()) { - console.log('⚠️ Automated screenshot skipped due to rate limiting'); - return; - } - - try { - // Request screenshot from main process - const result = await ipcRenderer.invoke('capture-screenshot', { - quality: imageQuality, - }); - - if (result.success && result.base64) { - // Store the latest screenshot - lastScreenshotBase64 = result.base64; - - if (sendResult.success) { - // Track image tokens after successful send - const imageTokens = tokenTracker.calculateImageTokens(result.width || 1920, result.height || 1080); - tokenTracker.addTokens(imageTokens, 'image'); - console.log(`📊 Image sent successfully - ${imageTokens} tokens used (${result.width}x${result.height})`); - } else { - console.error('Failed to send image:', sendResult.error); - } - } else { - console.error('Failed to capture screenshot:', result.error); - } - } catch (error) { - console.error('Error capturing screenshot:', error); - } -} - -async function captureManualScreenshot(imageQuality = null) { - console.log('Manual screenshot triggered'); - const quality = imageQuality || currentImageQuality; - await captureScreenshot(quality, true); -} - -// Expose functions to global scope for external access -window.captureManualScreenshot = captureManualScreenshot; - function stopCapture() { if (screenshotInterval) { clearInterval(screenshotInterval); @@ -706,76 +659,25 @@ function stopCapture() { } } -async function getCurrentScreenshot() { - try { - // First try to get a fresh screenshot from main process - const result = await ipcRenderer.invoke('get-current-screenshot'); - - if (result.success && result.base64) { - console.log('📸 Got fresh screenshot from main process'); - return result.base64; - } - - // If no screenshot available, capture one now - console.log('📸 No screenshot available, capturing new one'); - const captureResult = await ipcRenderer.invoke('capture-screenshot', { - quality: currentImageQuality, - }); - - if (captureResult.success && captureResult.base64) { - lastScreenshotBase64 = captureResult.base64; - return captureResult.base64; - } - - // Fallback to last stored screenshot - if (lastScreenshotBase64) { - console.log('📸 Using cached screenshot'); - return lastScreenshotBase64; - } - - throw new Error('Failed to get screenshot'); - } catch (error) { - console.error('Error getting current screenshot:', error); - return null; - } -} - -function formatRealtimeConversationHistory() { - if (realtimeConversationHistory.length === 0) return 'No conversation history available.'; - - return realtimeConversationHistory.slice(-30).join('\n'); -} - -window.pickleGlass = { - initializeopenai, +// --------------------------- +// Exports & global registration +// --------------------------- +module.exports = { startCapture, stopCapture, - isLinux: isLinux, - isMacOS: isMacOS, - e: pickleGlassElement, + captureManualScreenshot, + getCurrentScreenshot, + isLinux, + isMacOS, }; -// ------------------------------------------------------- -// 🔔 React to session state changes from the main process -// When the session ends (isActive === false), ensure we stop -// all local capture pipelines (mic, screen, etc.). -// ------------------------------------------------------- -ipcRenderer.on('session-state-changed', (_event, { isActive }) => { - if (!isActive) { - console.log('[Renderer] Session ended – stopping local capture'); - stopCapture(); - } else { - console.log('[Renderer] New session started – clearing in-memory history and summaries'); - - // Reset live conversation & analysis caches - realtimeConversationHistory = []; - - const blankData = { - summary: [], - topic: { header: '', bullets: [] }, - actions: [], - followUps: [], - }; - window.pickleGlass.setStructuredData(blankData); - } -}); +// Expose functions to global scope for external access (exact from renderer.js) +if (typeof window !== 'undefined') { + window.captureManualScreenshot = captureManualScreenshot; + window.listenCapture = module.exports; + window.pickleGlass = window.pickleGlass || {}; + window.pickleGlass.startCapture = startCapture; + window.pickleGlass.stopCapture = stopCapture; + window.pickleGlass.captureManualScreenshot = captureManualScreenshot; + window.pickleGlass.getCurrentScreenshot = getCurrentScreenshot; +} \ No newline at end of file diff --git a/src/features/listen/renderer/renderer.js b/src/features/listen/renderer/renderer.js new file mode 100644 index 0000000..6601b48 --- /dev/null +++ b/src/features/listen/renderer/renderer.js @@ -0,0 +1,138 @@ +// renderer.js +const { ipcRenderer } = require('electron'); +const listenCapture = require('./listenCapture.js'); + +let realtimeConversationHistory = []; + +async function queryLoginState() { + const userState = await ipcRenderer.invoke('get-current-user'); + return userState; +} + +function pickleGlassElement() { + return document.getElementById('pickle-glass'); +} + +async function initializeopenai(profile = 'interview', language = 'en') { + // The API key is now handled in the main process from .env file. + // We just need to trigger the initialization. + try { + console.log(`Requesting OpenAI initialization with profile: ${profile}, language: ${language}`); + const success = await ipcRenderer.invoke('initialize-openai', profile, language); + if (success) { + // The status will be updated via 'update-status' event from the main process. + console.log('OpenAI initialization successful.'); + } else { + console.error('OpenAI initialization failed.'); + const appElement = pickleGlassElement(); + if (appElement && typeof appElement.setStatus === 'function') { + appElement.setStatus('Initialization Failed'); + } + } + } catch (error) { + console.error('Error during OpenAI initialization IPC call:', error); + const appElement = pickleGlassElement(); + if (appElement && typeof appElement.setStatus === 'function') { + appElement.setStatus('Error'); + } + } +} + +// Listen for status updates +ipcRenderer.on('update-status', (event, status) => { + console.log('Status update:', status); + pickleGlass.e().setStatus(status); +}); + +// Listen for real-time STT updates +ipcRenderer.on('stt-update', (event, data) => { + console.log('Renderer.js stt-update', data); + const { speaker, text, isFinal, isPartial, timestamp } = data; + + if (isPartial) { + console.log(`🔄 [${speaker} - partial]: ${text}`); + } else if (isFinal) { + console.log(`✅ [${speaker} - final]: ${text}`); + + const speakerText = speaker.toLowerCase(); + const conversationText = `${speakerText}: ${text.trim()}`; + + realtimeConversationHistory.push(conversationText); + + if (realtimeConversationHistory.length > 30) { + realtimeConversationHistory = realtimeConversationHistory.slice(-30); + } + + console.log(`📝 Updated realtime conversation history: ${realtimeConversationHistory.length} texts`); + console.log(`📋 Latest text: ${conversationText}`); + } + + if (pickleGlass.e() && typeof pickleGlass.e().updateRealtimeTranscription === 'function') { + pickleGlass.e().updateRealtimeTranscription({ + speaker, + text, + isFinal, + isPartial, + timestamp, + }); + } +}); + +ipcRenderer.on('update-structured-data', (_, structuredData) => { + console.log('📥 Received structured data update:', structuredData); + window.pickleGlass.structuredData = structuredData; + window.pickleGlass.setStructuredData(structuredData); +}); + +window.pickleGlass.structuredData = { + summary: [], + topic: { header: '', bullets: [] }, + actions: [], +}; + +window.pickleGlass.setStructuredData = data => { + window.pickleGlass.structuredData = data; + pickleGlass.e()?.updateStructuredData?.(data); +}; + +function formatRealtimeConversationHistory() { + if (realtimeConversationHistory.length === 0) return 'No conversation history available.'; + + return realtimeConversationHistory.slice(-30).join('\n'); +} + +window.pickleGlass = { + initializeopenai, + startCapture: listenCapture.startCapture, + stopCapture: listenCapture.stopCapture, + isLinux: listenCapture.isLinux, + isMacOS: listenCapture.isMacOS, + captureManualScreenshot: listenCapture.captureManualScreenshot, + getCurrentScreenshot: listenCapture.getCurrentScreenshot, + e: pickleGlassElement, +}; + +// ------------------------------------------------------- +// 🔔 React to session state changes from the main process +// When the session ends (isActive === false), ensure we stop +// all local capture pipelines (mic, screen, etc.). +// ------------------------------------------------------- +ipcRenderer.on('session-state-changed', (_event, { isActive }) => { + if (!isActive) { + console.log('[Renderer] Session ended – stopping local capture'); + listenCapture.stopCapture(); + } else { + console.log('[Renderer] New session started – clearing in-memory history and summaries'); + + // Reset live conversation & analysis caches + realtimeConversationHistory = []; + + const blankData = { + summary: [], + topic: { header: '', bullets: [] }, + actions: [], + followUps: [], + }; + window.pickleGlass.setStructuredData(blankData); + } +}); diff --git a/src/features/listen/repositories/sqlite.repository.js b/src/features/listen/repositories/sqlite.repository.js deleted file mode 100644 index 7d293cd..0000000 --- a/src/features/listen/repositories/sqlite.repository.js +++ /dev/null @@ -1,66 +0,0 @@ -const sqliteClient = require('../../../common/services/sqliteClient'); - -function addTranscript({ sessionId, speaker, text }) { - const db = sqliteClient.getDb(); - return new Promise((resolve, reject) => { - const transcriptId = require('crypto').randomUUID(); - const now = Math.floor(Date.now() / 1000); - const query = `INSERT INTO transcripts (id, session_id, start_at, speaker, text, created_at) VALUES (?, ?, ?, ?, ?, ?)`; - db.run(query, [transcriptId, sessionId, now, speaker, text, now], function(err) { - if (err) reject(err); - else resolve({ id: transcriptId }); - }); - }); -} - -function saveSummary({ sessionId, tldr, text, bullet_json, action_json, model = 'gpt-4.1' }) { - const db = sqliteClient.getDb(); - return new Promise((resolve, reject) => { - const now = Math.floor(Date.now() / 1000); - const query = ` - INSERT INTO summaries (session_id, generated_at, model, text, tldr, bullet_json, action_json, updated_at) - VALUES (?, ?, ?, ?, ?, ?, ?, ?) - ON CONFLICT(session_id) DO UPDATE SET - generated_at=excluded.generated_at, - model=excluded.model, - text=excluded.text, - tldr=excluded.tldr, - bullet_json=excluded.bullet_json, - action_json=excluded.action_json, - updated_at=excluded.updated_at - `; - db.run(query, [sessionId, now, model, text, tldr, bullet_json, action_json, now], function(err) { - if (err) reject(err); - else resolve({ changes: this.changes }); - }); - }); -} - -function getAllTranscriptsBySessionId(sessionId) { - const db = sqliteClient.getDb(); - return new Promise((resolve, reject) => { - const query = "SELECT * FROM transcripts WHERE session_id = ? ORDER BY start_at ASC"; - db.all(query, [sessionId], (err, rows) => { - if (err) reject(err); - else resolve(rows); - }); - }); -} - -function getSummaryBySessionId(sessionId) { - const db = sqliteClient.getDb(); - return new Promise((resolve, reject) => { - const query = "SELECT * FROM summaries WHERE session_id = ?"; - db.get(query, [sessionId], (err, row) => { - if (err) reject(err); - else resolve(row || null); - }); - }); -} - -module.exports = { - addTranscript, - saveSummary, - getAllTranscriptsBySessionId, - getSummaryBySessionId -}; \ No newline at end of file diff --git a/src/features/listen/stt/SttView.js b/src/features/listen/stt/SttView.js new file mode 100644 index 0000000..31e2a5a --- /dev/null +++ b/src/features/listen/stt/SttView.js @@ -0,0 +1,228 @@ +import { html, css, LitElement } from '../../../assets/lit-core-2.7.4.min.js'; + +export class SttView extends LitElement { + static styles = css` + :host { + display: block; + width: 100%; + } + + /* Inherit font styles from parent */ + + .transcription-container { + overflow-y: auto; + padding: 12px 12px 16px 12px; + display: flex; + flex-direction: column; + gap: 8px; + min-height: 150px; + max-height: 600px; + position: relative; + z-index: 1; + flex: 1; + } + + /* Visibility handled by parent component */ + + .transcription-container::-webkit-scrollbar { + width: 8px; + } + .transcription-container::-webkit-scrollbar-track { + background: rgba(0, 0, 0, 0.1); + border-radius: 4px; + } + .transcription-container::-webkit-scrollbar-thumb { + background: rgba(255, 255, 255, 0.3); + border-radius: 4px; + } + .transcription-container::-webkit-scrollbar-thumb:hover { + background: rgba(255, 255, 255, 0.5); + } + + .stt-message { + padding: 8px 12px; + border-radius: 12px; + max-width: 80%; + word-wrap: break-word; + word-break: break-word; + line-height: 1.5; + font-size: 13px; + margin-bottom: 4px; + box-sizing: border-box; + } + + .stt-message.them { + background: rgba(255, 255, 255, 0.1); + color: rgba(255, 255, 255, 0.9); + align-self: flex-start; + border-bottom-left-radius: 4px; + margin-right: auto; + } + + .stt-message.me { + background: rgba(0, 122, 255, 0.8); + color: white; + align-self: flex-end; + border-bottom-right-radius: 4px; + margin-left: auto; + } + + .empty-state { + display: flex; + align-items: center; + justify-content: center; + height: 100px; + color: rgba(255, 255, 255, 0.6); + font-size: 12px; + font-style: italic; + } + `; + + static properties = { + sttMessages: { type: Array }, + isVisible: { type: Boolean }, + }; + + constructor() { + super(); + this.sttMessages = []; + this.isVisible = true; + this.messageIdCounter = 0; + this._shouldScrollAfterUpdate = false; + + this.handleSttUpdate = this.handleSttUpdate.bind(this); + } + + connectedCallback() { + super.connectedCallback(); + if (window.require) { + const { ipcRenderer } = window.require('electron'); + ipcRenderer.on('stt-update', this.handleSttUpdate); + } + } + + disconnectedCallback() { + super.disconnectedCallback(); + if (window.require) { + const { ipcRenderer } = window.require('electron'); + ipcRenderer.removeListener('stt-update', this.handleSttUpdate); + } + } + + // Handle session reset from parent + resetTranscript() { + this.sttMessages = []; + this.requestUpdate(); + } + + handleSttUpdate(event, { speaker, text, isFinal, isPartial }) { + if (text === undefined) return; + + const container = this.shadowRoot.querySelector('.transcription-container'); + this._shouldScrollAfterUpdate = container ? container.scrollTop + container.clientHeight >= container.scrollHeight - 10 : false; + + const findLastPartialIdx = spk => { + for (let i = this.sttMessages.length - 1; i >= 0; i--) { + const m = this.sttMessages[i]; + if (m.speaker === spk && m.isPartial) return i; + } + return -1; + }; + + const newMessages = [...this.sttMessages]; + const targetIdx = findLastPartialIdx(speaker); + + if (isPartial) { + if (targetIdx !== -1) { + newMessages[targetIdx] = { + ...newMessages[targetIdx], + text, + isPartial: true, + isFinal: false, + }; + } else { + newMessages.push({ + id: this.messageIdCounter++, + speaker, + text, + isPartial: true, + isFinal: false, + }); + } + } else if (isFinal) { + if (targetIdx !== -1) { + newMessages[targetIdx] = { + ...newMessages[targetIdx], + text, + isPartial: false, + isFinal: true, + }; + } else { + newMessages.push({ + id: this.messageIdCounter++, + speaker, + text, + isPartial: false, + isFinal: true, + }); + } + } + + this.sttMessages = newMessages; + + // Notify parent component about message updates + this.dispatchEvent(new CustomEvent('stt-messages-updated', { + detail: { messages: this.sttMessages }, + bubbles: true + })); + } + + scrollToBottom() { + setTimeout(() => { + const container = this.shadowRoot.querySelector('.transcription-container'); + if (container) { + container.scrollTop = container.scrollHeight; + } + }, 0); + } + + getSpeakerClass(speaker) { + return speaker.toLowerCase() === 'me' ? 'me' : 'them'; + } + + getTranscriptText() { + return this.sttMessages.map(msg => `${msg.speaker}: ${msg.text}`).join('\n'); + } + + updated(changedProperties) { + super.updated(changedProperties); + + if (changedProperties.has('sttMessages')) { + if (this._shouldScrollAfterUpdate) { + this.scrollToBottom(); + this._shouldScrollAfterUpdate = false; + } + } + } + + render() { + if (!this.isVisible) { + return html`
`; + } + + return html` +
+ ${this.sttMessages.length === 0 + ? html`
Waiting for speech...
` + : this.sttMessages.map(msg => html` +
+ ${msg.text} +
+ `) + } +
+ `; + } +} + +customElements.define('stt-view', SttView); \ No newline at end of file diff --git a/src/features/listen/stt/repositories/index.js b/src/features/listen/stt/repositories/index.js new file mode 100644 index 0000000..6de1a98 --- /dev/null +++ b/src/features/listen/stt/repositories/index.js @@ -0,0 +1,5 @@ +const sttRepository = require('./sqlite.repository'); + +module.exports = { + ...sttRepository, +}; \ No newline at end of file diff --git a/src/features/listen/stt/repositories/sqlite.repository.js b/src/features/listen/stt/repositories/sqlite.repository.js new file mode 100644 index 0000000..4de47bd --- /dev/null +++ b/src/features/listen/stt/repositories/sqlite.repository.js @@ -0,0 +1,37 @@ +const sqliteClient = require('../../../../common/services/sqliteClient'); + +function addTranscript({ sessionId, speaker, text }) { + const db = sqliteClient.getDb(); + return new Promise((resolve, reject) => { + const transcriptId = require('crypto').randomUUID(); + const now = Math.floor(Date.now() / 1000); + const query = `INSERT INTO transcripts (id, session_id, start_at, speaker, text, created_at) VALUES (?, ?, ?, ?, ?, ?)`; + db.run(query, [transcriptId, sessionId, now, speaker, text, now], function(err) { + if (err) { + console.error('Error adding transcript:', err); + reject(err); + } else { + resolve({ id: transcriptId }); + } + }); + }); +} + +function getAllTranscriptsBySessionId(sessionId) { + const db = sqliteClient.getDb(); + return new Promise((resolve, reject) => { + const query = "SELECT * FROM transcripts WHERE session_id = ? ORDER BY start_at ASC"; + db.all(query, [sessionId], (err, rows) => { + if (err) { + reject(err); + } else { + resolve(rows); + } + }); + }); +} + +module.exports = { + addTranscript, + getAllTranscriptsBySessionId, +}; \ No newline at end of file diff --git a/src/features/listen/stt/sttService.js b/src/features/listen/stt/sttService.js new file mode 100644 index 0000000..294bd74 --- /dev/null +++ b/src/features/listen/stt/sttService.js @@ -0,0 +1,478 @@ +const { BrowserWindow } = require('electron'); +const { spawn } = require('child_process'); +const { createSTT } = require('../../../common/ai/factory'); +const { getStoredApiKey, getStoredProvider } = require('../../../electron/windowManager'); + +const COMPLETION_DEBOUNCE_MS = 2000; + +class SttService { + constructor() { + this.mySttSession = null; + this.theirSttSession = null; + this.myCurrentUtterance = ''; + this.theirCurrentUtterance = ''; + + this.myLastPartialText = ''; + this.theirLastPartialText = ''; + this.myInactivityTimer = null; + this.theirInactivityTimer = null; + + // Turn-completion debouncing + this.myCompletionBuffer = ''; + this.theirCompletionBuffer = ''; + this.myCompletionTimer = null; + this.theirCompletionTimer = null; + + // System audio capture + this.systemAudioProc = null; + + // Callbacks + this.onTranscriptionComplete = null; + this.onStatusUpdate = null; + } + + setCallbacks({ onTranscriptionComplete, onStatusUpdate }) { + this.onTranscriptionComplete = onTranscriptionComplete; + this.onStatusUpdate = onStatusUpdate; + } + + async getApiKey() { + const storedKey = await getStoredApiKey(); + if (storedKey) { + console.log('[SttService] Using stored API key'); + return storedKey; + } + + const envKey = process.env.OPENAI_API_KEY; + if (envKey) { + console.log('[SttService] Using environment API key'); + return envKey; + } + + console.error('[SttService] No API key found in storage or environment'); + return null; + } + + async getAiProvider() { + try { + const { ipcRenderer } = require('electron'); + const provider = await ipcRenderer.invoke('get-ai-provider'); + return provider || 'openai'; + } catch (error) { + return getStoredProvider ? getStoredProvider() : 'openai'; + } + } + + sendToRenderer(channel, data) { + BrowserWindow.getAllWindows().forEach(win => { + if (!win.isDestroyed()) { + win.webContents.send(channel, data); + } + }); + } + + flushMyCompletion() { + if (!this.myCompletionBuffer.trim()) return; + + const finalText = this.myCompletionBuffer.trim(); + + // Notify completion callback + if (this.onTranscriptionComplete) { + this.onTranscriptionComplete('Me', finalText); + } + + // Send to renderer as final + this.sendToRenderer('stt-update', { + speaker: 'Me', + text: finalText, + isPartial: false, + isFinal: true, + timestamp: Date.now(), + }); + + this.myCompletionBuffer = ''; + this.myCompletionTimer = null; + this.myCurrentUtterance = ''; + + if (this.onStatusUpdate) { + this.onStatusUpdate('Listening...'); + } + } + + flushTheirCompletion() { + if (!this.theirCompletionBuffer.trim()) return; + + const finalText = this.theirCompletionBuffer.trim(); + + // Notify completion callback + if (this.onTranscriptionComplete) { + this.onTranscriptionComplete('Them', finalText); + } + + // Send to renderer as final + this.sendToRenderer('stt-update', { + speaker: 'Them', + text: finalText, + isPartial: false, + isFinal: true, + timestamp: Date.now(), + }); + + this.theirCompletionBuffer = ''; + this.theirCompletionTimer = null; + this.theirCurrentUtterance = ''; + + if (this.onStatusUpdate) { + this.onStatusUpdate('Listening...'); + } + } + + debounceMyCompletion(text) { + // 상대방이 말하고 있던 경우, 화자가 변경되었으므로 즉시 상대방의 말풍선을 완성합니다. + if (this.theirCompletionTimer) { + clearTimeout(this.theirCompletionTimer); + this.flushTheirCompletion(); + } + + this.myCompletionBuffer += (this.myCompletionBuffer ? ' ' : '') + text; + + if (this.myCompletionTimer) clearTimeout(this.myCompletionTimer); + this.myCompletionTimer = setTimeout(() => this.flushMyCompletion(), COMPLETION_DEBOUNCE_MS); + } + + debounceTheirCompletion(text) { + // 내가 말하고 있던 경우, 화자가 변경되었으므로 즉시 내 말풍선을 완성합니다. + if (this.myCompletionTimer) { + clearTimeout(this.myCompletionTimer); + this.flushMyCompletion(); + } + + this.theirCompletionBuffer += (this.theirCompletionBuffer ? ' ' : '') + text; + + if (this.theirCompletionTimer) clearTimeout(this.theirCompletionTimer); + this.theirCompletionTimer = setTimeout(() => this.flushTheirCompletion(), COMPLETION_DEBOUNCE_MS); + } + + async initializeSttSessions(language = 'en') { + const effectiveLanguage = process.env.OPENAI_TRANSCRIBE_LANG || language || 'en'; + + const API_KEY = await this.getApiKey(); + if (!API_KEY) { + throw new Error('No API key available'); + } + + const provider = await this.getAiProvider(); + const isGemini = provider === 'gemini'; + console.log(`[SttService] Initializing STT for provider: ${provider}`); + + const handleMyMessage = message => { + if (isGemini) { + const text = message.serverContent?.inputTranscription?.text || ''; + if (text && text.trim()) { + const finalUtteranceText = text.trim().replace(//g, '').trim(); + if (finalUtteranceText && finalUtteranceText !== '.') { + this.debounceMyCompletion(finalUtteranceText); + } + } + } else { + const type = message.type; + const text = message.transcript || message.delta || (message.alternatives && message.alternatives[0]?.transcript) || ''; + + if (type === 'conversation.item.input_audio_transcription.delta') { + if (this.myCompletionTimer) clearTimeout(this.myCompletionTimer); + this.myCompletionTimer = null; + this.myCurrentUtterance += text; + const continuousText = this.myCompletionBuffer + (this.myCompletionBuffer ? ' ' : '') + this.myCurrentUtterance; + if (text && !text.includes('vq_lbr_audio_')) { + this.sendToRenderer('stt-update', { + speaker: 'Me', + text: continuousText, + isPartial: true, + isFinal: false, + timestamp: Date.now(), + }); + } + } else if (type === 'conversation.item.input_audio_transcription.completed') { + if (text && text.trim()) { + const finalUtteranceText = text.trim(); + this.myCurrentUtterance = ''; + this.debounceMyCompletion(finalUtteranceText); + } + } + } + + if (message.error) { + console.error('[Me] STT Session Error:', message.error); + } + }; + + const handleTheirMessage = message => { + if (isGemini) { + const text = message.serverContent?.inputTranscription?.text || ''; + if (text && text.trim()) { + const finalUtteranceText = text.trim().replace(//g, '').trim(); + if (finalUtteranceText && finalUtteranceText !== '.') { + this.debounceTheirCompletion(finalUtteranceText); + } + } + } else { + const type = message.type; + const text = message.transcript || message.delta || (message.alternatives && message.alternatives[0]?.transcript) || ''; + if (type === 'conversation.item.input_audio_transcription.delta') { + if (this.theirCompletionTimer) clearTimeout(this.theirCompletionTimer); + this.theirCompletionTimer = null; + this.theirCurrentUtterance += text; + const continuousText = this.theirCompletionBuffer + (this.theirCompletionBuffer ? ' ' : '') + this.theirCurrentUtterance; + if (text && !text.includes('vq_lbr_audio_')) { + this.sendToRenderer('stt-update', { + speaker: 'Them', + text: continuousText, + isPartial: true, + isFinal: false, + timestamp: Date.now(), + }); + } + } else if (type === 'conversation.item.input_audio_transcription.completed') { + if (text && text.trim()) { + const finalUtteranceText = text.trim(); + this.theirCurrentUtterance = ''; + this.debounceTheirCompletion(finalUtteranceText); + } + } + } + + if (message.error) { + console.error('[Them] STT Session Error:', message.error); + } + }; + + const mySttConfig = { + language: effectiveLanguage, + callbacks: { + onmessage: handleMyMessage, + onerror: error => console.error('My STT session error:', error.message), + onclose: event => console.log('My STT session closed:', event.reason), + }, + }; + + const theirSttConfig = { + language: effectiveLanguage, + callbacks: { + onmessage: handleTheirMessage, + onerror: error => console.error('Their STT session error:', error.message), + onclose: event => console.log('Their STT session closed:', event.reason), + }, + }; + + // Determine auth options for providers that support it + const authService = require('../../../common/services/authService'); + const userState = authService.getCurrentUser(); + const loggedIn = userState.isLoggedIn; + + const sttOptions = { + apiKey: API_KEY, + language: effectiveLanguage, + usePortkey: !isGemini && loggedIn, // Only OpenAI supports Portkey + portkeyVirtualKey: loggedIn ? API_KEY : undefined + }; + + [this.mySttSession, this.theirSttSession] = await Promise.all([ + createSTT(provider, { ...sttOptions, callbacks: mySttConfig.callbacks }), + createSTT(provider, { ...sttOptions, callbacks: theirSttConfig.callbacks }), + ]); + + console.log('✅ Both STT sessions initialized successfully.'); + return true; + } + + async sendAudioContent(data, mimeType) { + const provider = await this.getAiProvider(); + const isGemini = provider === 'gemini'; + + if (!this.mySttSession) { + throw new Error('User STT session not active'); + } + + const payload = isGemini + ? { audio: { data, mimeType: mimeType || 'audio/pcm;rate=24000' } } + : data; + + await this.mySttSession.sendRealtimeInput(payload); + } + + killExistingSystemAudioDump() { + return new Promise(resolve => { + console.log('Checking for existing SystemAudioDump processes...'); + + const killProc = spawn('pkill', ['-f', 'SystemAudioDump'], { + stdio: 'ignore', + }); + + killProc.on('close', code => { + if (code === 0) { + console.log('Killed existing SystemAudioDump processes'); + } else { + console.log('No existing SystemAudioDump processes found'); + } + resolve(); + }); + + killProc.on('error', err => { + console.log('Error checking for existing processes (this is normal):', err.message); + resolve(); + }); + + setTimeout(() => { + killProc.kill(); + resolve(); + }, 2000); + }); + } + + async startMacOSAudioCapture() { + if (process.platform !== 'darwin' || !this.theirSttSession) return false; + + await this.killExistingSystemAudioDump(); + console.log('Starting macOS audio capture for "Them"...'); + + const { app } = require('electron'); + const path = require('path'); + const systemAudioPath = app.isPackaged + ? path.join(process.resourcesPath, 'app.asar.unpacked', 'src', 'assets', 'SystemAudioDump') + : path.join(app.getAppPath(), 'src', 'assets', 'SystemAudioDump'); + + console.log('SystemAudioDump path:', systemAudioPath); + + this.systemAudioProc = spawn(systemAudioPath, [], { + stdio: ['ignore', 'pipe', 'pipe'], + }); + + if (!this.systemAudioProc.pid) { + console.error('Failed to start SystemAudioDump'); + return false; + } + + console.log('SystemAudioDump started with PID:', this.systemAudioProc.pid); + + const CHUNK_DURATION = 0.1; + const SAMPLE_RATE = 24000; + const BYTES_PER_SAMPLE = 2; + const CHANNELS = 2; + const CHUNK_SIZE = SAMPLE_RATE * BYTES_PER_SAMPLE * CHANNELS * CHUNK_DURATION; + + let audioBuffer = Buffer.alloc(0); + + const provider = await this.getAiProvider(); + const isGemini = provider === 'gemini'; + + this.systemAudioProc.stdout.on('data', async data => { + audioBuffer = Buffer.concat([audioBuffer, data]); + + while (audioBuffer.length >= CHUNK_SIZE) { + const chunk = audioBuffer.slice(0, CHUNK_SIZE); + audioBuffer = audioBuffer.slice(CHUNK_SIZE); + + const monoChunk = CHANNELS === 2 ? this.convertStereoToMono(chunk) : chunk; + const base64Data = monoChunk.toString('base64'); + + this.sendToRenderer('system-audio-data', { data: base64Data }); + + if (this.theirSttSession) { + try { + const payload = isGemini + ? { audio: { data: base64Data, mimeType: 'audio/pcm;rate=24000' } } + : base64Data; + await this.theirSttSession.sendRealtimeInput(payload); + } catch (err) { + console.error('Error sending system audio:', err.message); + } + } + } + }); + + this.systemAudioProc.stderr.on('data', data => { + console.error('SystemAudioDump stderr:', data.toString()); + }); + + this.systemAudioProc.on('close', code => { + console.log('SystemAudioDump process closed with code:', code); + this.systemAudioProc = null; + }); + + this.systemAudioProc.on('error', err => { + console.error('SystemAudioDump process error:', err); + this.systemAudioProc = null; + }); + + return true; + } + + convertStereoToMono(stereoBuffer) { + const samples = stereoBuffer.length / 4; + const monoBuffer = Buffer.alloc(samples * 2); + + for (let i = 0; i < samples; i++) { + const leftSample = stereoBuffer.readInt16LE(i * 4); + monoBuffer.writeInt16LE(leftSample, i * 2); + } + + return monoBuffer; + } + + stopMacOSAudioCapture() { + if (this.systemAudioProc) { + console.log('Stopping SystemAudioDump...'); + this.systemAudioProc.kill('SIGTERM'); + this.systemAudioProc = null; + } + } + + isSessionActive() { + return !!this.mySttSession && !!this.theirSttSession; + } + + async closeSessions() { + this.stopMacOSAudioCapture(); + + // Clear timers + if (this.myInactivityTimer) { + clearTimeout(this.myInactivityTimer); + this.myInactivityTimer = null; + } + if (this.theirInactivityTimer) { + clearTimeout(this.theirInactivityTimer); + this.theirInactivityTimer = null; + } + if (this.myCompletionTimer) { + clearTimeout(this.myCompletionTimer); + this.myCompletionTimer = null; + } + if (this.theirCompletionTimer) { + clearTimeout(this.theirCompletionTimer); + this.theirCompletionTimer = null; + } + + const closePromises = []; + if (this.mySttSession) { + closePromises.push(this.mySttSession.close()); + this.mySttSession = null; + } + if (this.theirSttSession) { + closePromises.push(this.theirSttSession.close()); + this.theirSttSession = null; + } + + await Promise.all(closePromises); + console.log('All STT sessions closed.'); + + // Reset state + this.myCurrentUtterance = ''; + this.theirCurrentUtterance = ''; + this.myLastPartialText = ''; + this.theirLastPartialText = ''; + this.myCompletionBuffer = ''; + this.theirCompletionBuffer = ''; + } +} + +module.exports = SttService; \ No newline at end of file diff --git a/src/features/listen/summary/SummaryView.js b/src/features/listen/summary/SummaryView.js new file mode 100644 index 0000000..15d79b9 --- /dev/null +++ b/src/features/listen/summary/SummaryView.js @@ -0,0 +1,559 @@ +import { html, css, LitElement } from '../../../assets/lit-core-2.7.4.min.js'; + +export class SummaryView extends LitElement { + static styles = css` + :host { + display: block; + width: 100%; + } + + /* Inherit font styles from parent */ + + /* highlight.js 스타일 추가 */ + .insights-container pre { + background: rgba(0, 0, 0, 0.4) !important; + border-radius: 8px !important; + padding: 12px !important; + margin: 8px 0 !important; + overflow-x: auto !important; + border: 1px solid rgba(255, 255, 255, 0.1) !important; + white-space: pre !important; + word-wrap: normal !important; + word-break: normal !important; + } + + .insights-container code { + font-family: 'Monaco', 'Menlo', 'Consolas', monospace !important; + font-size: 11px !important; + background: transparent !important; + white-space: pre !important; + word-wrap: normal !important; + word-break: normal !important; + } + + .insights-container pre code { + white-space: pre !important; + word-wrap: normal !important; + word-break: normal !important; + display: block !important; + } + + .insights-container p code { + background: rgba(255, 255, 255, 0.1) !important; + padding: 2px 4px !important; + border-radius: 3px !important; + color: #ffd700 !important; + } + + .hljs-keyword { + color: #ff79c6 !important; + } + .hljs-string { + color: #f1fa8c !important; + } + .hljs-comment { + color: #6272a4 !important; + } + .hljs-number { + color: #bd93f9 !important; + } + .hljs-function { + color: #50fa7b !important; + } + .hljs-variable { + color: #8be9fd !important; + } + .hljs-built_in { + color: #ffb86c !important; + } + .hljs-title { + color: #50fa7b !important; + } + .hljs-attr { + color: #50fa7b !important; + } + .hljs-tag { + color: #ff79c6 !important; + } + + .insights-container { + overflow-y: auto; + padding: 12px 16px 16px 16px; + position: relative; + z-index: 1; + min-height: 150px; + max-height: 600px; + flex: 1; + } + + /* Visibility handled by parent component */ + + .insights-container::-webkit-scrollbar { + width: 8px; + } + .insights-container::-webkit-scrollbar-track { + background: rgba(0, 0, 0, 0.1); + border-radius: 4px; + } + .insights-container::-webkit-scrollbar-thumb { + background: rgba(255, 255, 255, 0.3); + border-radius: 4px; + } + .insights-container::-webkit-scrollbar-thumb:hover { + background: rgba(255, 255, 255, 0.5); + } + + insights-title { + color: rgba(255, 255, 255, 0.8); + font-size: 15px; + font-weight: 500; + font-family: 'Helvetica Neue', sans-serif; + margin: 12px 0 8px 0; + display: block; + } + + .insights-container h4 { + color: #ffffff; + font-size: 12px; + font-weight: 600; + margin: 12px 0 8px 0; + padding: 4px 8px; + border-radius: 4px; + background: transparent; + cursor: default; + } + + .insights-container h4:hover { + background: transparent; + } + + .insights-container h4:first-child { + margin-top: 0; + } + + .outline-item { + color: #ffffff; + font-size: 11px; + line-height: 1.4; + margin: 4px 0; + padding: 6px 8px; + border-radius: 4px; + background: transparent; + transition: background-color 0.15s ease; + cursor: pointer; + word-wrap: break-word; + } + + .outline-item:hover { + background: rgba(255, 255, 255, 0.1); + } + + .request-item { + color: #ffffff; + font-size: 12px; + line-height: 1.2; + margin: 4px 0; + padding: 6px 8px; + border-radius: 4px; + background: transparent; + cursor: default; + word-wrap: break-word; + transition: background-color 0.15s ease; + } + + .request-item.clickable { + cursor: pointer; + transition: all 0.15s ease; + } + .request-item.clickable:hover { + background: rgba(255, 255, 255, 0.1); + transform: translateX(2px); + } + + /* 마크다운 렌더링된 콘텐츠 스타일 */ + .markdown-content { + color: #ffffff; + font-size: 11px; + line-height: 1.4; + margin: 4px 0; + padding: 6px 8px; + border-radius: 4px; + background: transparent; + cursor: pointer; + word-wrap: break-word; + transition: all 0.15s ease; + } + + .markdown-content:hover { + background: rgba(255, 255, 255, 0.1); + transform: translateX(2px); + } + + .markdown-content p { + margin: 4px 0; + } + + .markdown-content ul, + .markdown-content ol { + margin: 4px 0; + padding-left: 16px; + } + + .markdown-content li { + margin: 2px 0; + } + + .markdown-content a { + color: #8be9fd; + text-decoration: none; + } + + .markdown-content a:hover { + text-decoration: underline; + } + + .markdown-content strong { + font-weight: 600; + color: #f8f8f2; + } + + .markdown-content em { + font-style: italic; + color: #f1fa8c; + } + + .empty-state { + display: flex; + align-items: center; + justify-content: center; + height: 100px; + color: rgba(255, 255, 255, 0.6); + font-size: 12px; + font-style: italic; + } + `; + + static properties = { + structuredData: { type: Object }, + isVisible: { type: Boolean }, + hasCompletedRecording: { type: Boolean }, + }; + + constructor() { + super(); + this.structuredData = { + summary: [], + topic: { header: '', bullets: [] }, + actions: [], + followUps: [], + }; + this.isVisible = true; + this.hasCompletedRecording = false; + + // 마크다운 라이브러리 초기화 + this.marked = null; + this.hljs = null; + this.isLibrariesLoaded = false; + this.DOMPurify = null; + this.isDOMPurifyLoaded = false; + + this.loadLibraries(); + } + + connectedCallback() { + super.connectedCallback(); + if (window.require) { + const { ipcRenderer } = window.require('electron'); + ipcRenderer.on('update-structured-data', (event, data) => { + this.structuredData = data; + this.requestUpdate(); + }); + } + } + + disconnectedCallback() { + super.disconnectedCallback(); + if (window.require) { + const { ipcRenderer } = window.require('electron'); + ipcRenderer.removeAllListeners('update-structured-data'); + } + } + + // Handle session reset from parent + resetAnalysis() { + this.structuredData = { + summary: [], + topic: { header: '', bullets: [] }, + actions: [], + followUps: [], + }; + this.requestUpdate(); + } + + async loadLibraries() { + try { + if (!window.marked) { + await this.loadScript('../../../assets/marked-4.3.0.min.js'); + } + + if (!window.hljs) { + await this.loadScript('../../../assets/highlight-11.9.0.min.js'); + } + + if (!window.DOMPurify) { + await this.loadScript('../../../assets/dompurify-3.0.7.min.js'); + } + + this.marked = window.marked; + this.hljs = window.hljs; + this.DOMPurify = window.DOMPurify; + + if (this.marked && this.hljs) { + this.marked.setOptions({ + highlight: (code, lang) => { + if (lang && this.hljs.getLanguage(lang)) { + try { + return this.hljs.highlight(code, { language: lang }).value; + } catch (err) { + console.warn('Highlight error:', err); + } + } + try { + return this.hljs.highlightAuto(code).value; + } catch (err) { + console.warn('Auto highlight error:', err); + } + return code; + }, + breaks: true, + gfm: true, + pedantic: false, + smartypants: false, + xhtml: false, + }); + + this.isLibrariesLoaded = true; + console.log('Markdown libraries loaded successfully'); + } + + if (this.DOMPurify) { + this.isDOMPurifyLoaded = true; + console.log('DOMPurify loaded successfully in SummaryView'); + } + } catch (error) { + console.error('Failed to load libraries:', error); + } + } + + loadScript(src) { + return new Promise((resolve, reject) => { + const script = document.createElement('script'); + script.src = src; + script.onload = resolve; + script.onerror = reject; + document.head.appendChild(script); + }); + } + + parseMarkdown(text) { + if (!text) return ''; + + if (!this.isLibrariesLoaded || !this.marked) { + return text; + } + + try { + return this.marked(text); + } catch (error) { + console.error('Markdown parsing error:', error); + return text; + } + } + + handleMarkdownClick(originalText) { + this.handleRequestClick(originalText); + } + + renderMarkdownContent() { + if (!this.isLibrariesLoaded || !this.marked) { + return; + } + + const markdownElements = this.shadowRoot.querySelectorAll('[data-markdown-id]'); + markdownElements.forEach(element => { + const originalText = element.getAttribute('data-original-text'); + if (originalText) { + try { + let parsedHTML = this.parseMarkdown(originalText); + + if (this.isDOMPurifyLoaded && this.DOMPurify) { + parsedHTML = this.DOMPurify.sanitize(parsedHTML); + + if (this.DOMPurify.removed && this.DOMPurify.removed.length > 0) { + console.warn('Unsafe content detected in insights, showing plain text'); + element.textContent = '⚠️ ' + originalText; + return; + } + } + + element.innerHTML = parsedHTML; + } catch (error) { + console.error('Error rendering markdown for element:', error); + element.textContent = originalText; + } + } + }); + } + + async handleRequestClick(requestText) { + console.log('🔥 Analysis request clicked:', requestText); + + if (window.require) { + const { ipcRenderer } = window.require('electron'); + + try { + const isAskViewVisible = await ipcRenderer.invoke('is-window-visible', 'ask'); + + if (!isAskViewVisible) { + await ipcRenderer.invoke('toggle-feature', 'ask'); + await new Promise(resolve => setTimeout(resolve, 100)); + } + + const result = await ipcRenderer.invoke('send-question-to-ask', requestText); + + if (result.success) { + console.log('✅ Question sent to AskView successfully'); + } else { + console.error('❌ Failed to send question to AskView:', result.error); + } + } catch (error) { + console.error('❌ Error in handleRequestClick:', error); + } + } + } + + getSummaryText() { + const data = this.structuredData || { summary: [], topic: { header: '', bullets: [] }, actions: [] }; + let sections = []; + + if (data.summary && data.summary.length > 0) { + sections.push(`Current Summary:\n${data.summary.map(s => `• ${s}`).join('\n')}`); + } + + if (data.topic && data.topic.header && data.topic.bullets.length > 0) { + sections.push(`\n${data.topic.header}:\n${data.topic.bullets.map(b => `• ${b}`).join('\n')}`); + } + + if (data.actions && data.actions.length > 0) { + sections.push(`\nActions:\n${data.actions.map(a => `▸ ${a}`).join('\n')}`); + } + + if (data.followUps && data.followUps.length > 0) { + sections.push(`\nFollow-Ups:\n${data.followUps.map(f => `▸ ${f}`).join('\n')}`); + } + + return sections.join('\n\n').trim(); + } + + updated(changedProperties) { + super.updated(changedProperties); + this.renderMarkdownContent(); + } + + render() { + if (!this.isVisible) { + return html`
`; + } + + const data = this.structuredData || { + summary: [], + topic: { header: '', bullets: [] }, + actions: [], + }; + + const hasAnyContent = data.summary.length > 0 || data.topic.bullets.length > 0 || data.actions.length > 0; + + return html` +
+ ${!hasAnyContent + ? html`
No insights yet...
` + : html` + Current Summary + ${data.summary.length > 0 + ? data.summary + .slice(0, 5) + .map( + (bullet, index) => html` +
this.handleMarkdownClick(bullet)} + > + ${bullet} +
+ ` + ) + : html`
No content yet...
`} + ${data.topic.header + ? html` + ${data.topic.header} + ${data.topic.bullets + .slice(0, 3) + .map( + (bullet, index) => html` +
this.handleMarkdownClick(bullet)} + > + ${bullet} +
+ ` + )} + ` + : ''} + ${data.actions.length > 0 + ? html` + Actions + ${data.actions + .slice(0, 5) + .map( + (action, index) => html` +
this.handleMarkdownClick(action)} + > + ${action} +
+ ` + )} + ` + : ''} + ${this.hasCompletedRecording && data.followUps && data.followUps.length > 0 + ? html` + Follow-Ups + ${data.followUps.map( + (followUp, index) => html` +
this.handleMarkdownClick(followUp)} + > + ${followUp} +
+ ` + )} + ` + : ''} + `} +
+ `; + } +} + +customElements.define('summary-view', SummaryView); \ No newline at end of file diff --git a/src/features/listen/summary/repositories/index.js b/src/features/listen/summary/repositories/index.js new file mode 100644 index 0000000..d5bd3b3 --- /dev/null +++ b/src/features/listen/summary/repositories/index.js @@ -0,0 +1,5 @@ +const summaryRepository = require('./sqlite.repository'); + +module.exports = { + ...summaryRepository, +}; \ No newline at end of file diff --git a/src/features/listen/summary/repositories/sqlite.repository.js b/src/features/listen/summary/repositories/sqlite.repository.js new file mode 100644 index 0000000..d7a2266 --- /dev/null +++ b/src/features/listen/summary/repositories/sqlite.repository.js @@ -0,0 +1,47 @@ +const sqliteClient = require('../../../../common/services/sqliteClient'); + +function saveSummary({ sessionId, tldr, text, bullet_json, action_json, model = 'gpt-4.1' }) { + const db = sqliteClient.getDb(); + return new Promise((resolve, reject) => { + const now = Math.floor(Date.now() / 1000); + const query = ` + INSERT INTO summaries (session_id, generated_at, model, text, tldr, bullet_json, action_json, updated_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + ON CONFLICT(session_id) DO UPDATE SET + generated_at=excluded.generated_at, + model=excluded.model, + text=excluded.text, + tldr=excluded.tldr, + bullet_json=excluded.bullet_json, + action_json=excluded.action_json, + updated_at=excluded.updated_at + `; + db.run(query, [sessionId, now, model, text, tldr, bullet_json, action_json, now], function(err) { + if (err) { + console.error('Error saving summary:', err); + reject(err); + } else { + resolve({ changes: this.changes }); + } + }); + }); +} + +function getSummaryBySessionId(sessionId) { + const db = sqliteClient.getDb(); + return new Promise((resolve, reject) => { + const query = "SELECT * FROM summaries WHERE session_id = ?"; + db.get(query, [sessionId], (err, row) => { + if (err) { + reject(err); + } else { + resolve(row || null); + } + }); + }); +} + +module.exports = { + saveSummary, + getSummaryBySessionId, +}; \ No newline at end of file diff --git a/src/features/listen/summary/summaryService.js b/src/features/listen/summary/summaryService.js new file mode 100644 index 0000000..860fa35 --- /dev/null +++ b/src/features/listen/summary/summaryService.js @@ -0,0 +1,356 @@ +const { BrowserWindow } = require('electron'); +const { getSystemPrompt } = require('../../../common/prompts/promptBuilder.js'); +const { createLLM } = require('../../../common/ai/factory'); +const authService = require('../../../common/services/authService'); +const sessionRepository = require('../../../common/repositories/session'); +const summaryRepository = require('./repositories'); +const { getStoredApiKey, getStoredProvider } = require('../../../electron/windowManager'); + +class SummaryService { + constructor() { + this.previousAnalysisResult = null; + this.analysisHistory = []; + this.conversationHistory = []; + this.currentSessionId = null; + + // Callbacks + this.onAnalysisComplete = null; + this.onStatusUpdate = null; + } + + setCallbacks({ onAnalysisComplete, onStatusUpdate }) { + this.onAnalysisComplete = onAnalysisComplete; + this.onStatusUpdate = onStatusUpdate; + } + + setSessionId(sessionId) { + this.currentSessionId = sessionId; + } + + async getApiKey() { + const storedKey = await getStoredApiKey(); + if (storedKey) { + console.log('[SummaryService] Using stored API key'); + return storedKey; + } + + const envKey = process.env.OPENAI_API_KEY; + if (envKey) { + console.log('[SummaryService] Using environment API key'); + return envKey; + } + + console.error('[SummaryService] No API key found in storage or environment'); + return null; + } + + sendToRenderer(channel, data) { + BrowserWindow.getAllWindows().forEach(win => { + if (!win.isDestroyed()) { + win.webContents.send(channel, data); + } + }); + } + + addConversationTurn(speaker, text) { + const conversationText = `${speaker.toLowerCase()}: ${text.trim()}`; + this.conversationHistory.push(conversationText); + console.log(`💬 Added conversation text: ${conversationText}`); + console.log(`📈 Total conversation history: ${this.conversationHistory.length} texts`); + + // Trigger analysis if needed + this.triggerAnalysisIfNeeded(); + } + + getConversationHistory() { + return this.conversationHistory; + } + + resetConversationHistory() { + this.conversationHistory = []; + this.previousAnalysisResult = null; + this.analysisHistory = []; + console.log('🔄 Conversation history and analysis state reset'); + } + + /** + * Converts conversation history into text to include in the prompt. + * @param {Array} conversationTexts - Array of conversation texts ["me: ~~~", "them: ~~~", ...] + * @param {number} maxTurns - Maximum number of recent turns to include + * @returns {string} - Formatted conversation string for the prompt + */ + formatConversationForPrompt(conversationTexts, maxTurns = 30) { + if (conversationTexts.length === 0) return ''; + return conversationTexts.slice(-maxTurns).join('\n'); + } + + async makeOutlineAndRequests(conversationTexts, maxTurns = 30) { + console.log(`🔍 makeOutlineAndRequests called - conversationTexts: ${conversationTexts.length}`); + + if (conversationTexts.length === 0) { + console.log('⚠️ No conversation texts available for analysis'); + return null; + } + + const recentConversation = this.formatConversationForPrompt(conversationTexts, maxTurns); + + // 이전 분석 결과를 프롬프트에 포함 + let contextualPrompt = ''; + if (this.previousAnalysisResult) { + contextualPrompt = ` +Previous Analysis Context: +- Main Topic: ${this.previousAnalysisResult.topic.header} +- Key Points: ${this.previousAnalysisResult.summary.slice(0, 3).join(', ')} +- Last Actions: ${this.previousAnalysisResult.actions.slice(0, 2).join(', ')} + +Please build upon this context while analyzing the new conversation segments. +`; + } + + const basePrompt = getSystemPrompt('pickle_glass_analysis', '', false); + const systemPrompt = basePrompt.replace('{{CONVERSATION_HISTORY}}', recentConversation); + + try { + if (this.currentSessionId) { + await sessionRepository.touch(this.currentSessionId); + } + + const messages = [ + { + role: 'system', + content: systemPrompt, + }, + { + role: 'user', + content: `${contextualPrompt} + +Analyze the conversation and provide a structured summary. Format your response as follows: + +**Summary Overview** +- Main discussion point with context + +**Key Topic: [Topic Name]** +- First key insight +- Second key insight +- Third key insight + +**Extended Explanation** +Provide 2-3 sentences explaining the context and implications. + +**Suggested Questions** +1. First follow-up question? +2. Second follow-up question? +3. Third follow-up question? + +Keep all points concise and build upon previous analysis if provided.`, + }, + ]; + + console.log('🤖 Sending analysis request to AI...'); + + const API_KEY = await this.getApiKey(); + if (!API_KEY) { + throw new Error('No API key available'); + } + + const provider = getStoredProvider ? await getStoredProvider() : 'openai'; + const loggedIn = authService.getCurrentUser().isLoggedIn; + + console.log(`[SummaryService] provider: ${provider}, loggedIn: ${loggedIn}`); + + const llm = createLLM(provider, { + apiKey: API_KEY, + model: provider === 'openai' ? 'gpt-4.1' : 'gemini-2.5-flash', + temperature: 0.7, + maxTokens: 1024, + usePortkey: provider === 'openai' && loggedIn, + portkeyVirtualKey: loggedIn ? API_KEY : undefined + }); + + const completion = await llm.chat(messages); + + const responseText = completion.content; + console.log(`✅ Analysis response received: ${responseText}`); + const structuredData = this.parseResponseText(responseText, this.previousAnalysisResult); + + if (this.currentSessionId) { + summaryRepository.saveSummary({ + sessionId: this.currentSessionId, + text: responseText, + tldr: structuredData.summary.join('\n'), + bullet_json: JSON.stringify(structuredData.topic.bullets), + action_json: JSON.stringify(structuredData.actions), + model: 'gpt-4.1' + }).catch(err => console.error('[DB] Failed to save summary:', err)); + } + + // 분석 결과 저장 + this.previousAnalysisResult = structuredData; + this.analysisHistory.push({ + timestamp: Date.now(), + data: structuredData, + conversationLength: conversationTexts.length, + }); + + // 히스토리 크기 제한 (최근 10개만 유지) + if (this.analysisHistory.length > 10) { + this.analysisHistory.shift(); + } + + return structuredData; + } catch (error) { + console.error('❌ Error during analysis generation:', error.message); + return this.previousAnalysisResult; // 에러 시 이전 결과 반환 + } + } + + parseResponseText(responseText, previousResult) { + const structuredData = { + summary: [], + topic: { header: '', bullets: [] }, + actions: [], + followUps: ['✉️ Draft a follow-up email', '✅ Generate action items', '📝 Show summary'], + }; + + // 이전 결과가 있으면 기본값으로 사용 + if (previousResult) { + structuredData.topic.header = previousResult.topic.header; + structuredData.summary = [...previousResult.summary]; + } + + try { + const lines = responseText.split('\n'); + let currentSection = ''; + let isCapturingTopic = false; + let topicName = ''; + + for (const line of lines) { + const trimmedLine = line.trim(); + + // 섹션 헤더 감지 + if (trimmedLine.startsWith('**Summary Overview**')) { + currentSection = 'summary-overview'; + continue; + } else if (trimmedLine.startsWith('**Key Topic:')) { + currentSection = 'topic'; + isCapturingTopic = true; + topicName = trimmedLine.match(/\*\*Key Topic: (.+?)\*\*/)?.[1] || ''; + if (topicName) { + structuredData.topic.header = topicName + ':'; + } + continue; + } else if (trimmedLine.startsWith('**Extended Explanation**')) { + currentSection = 'explanation'; + continue; + } else if (trimmedLine.startsWith('**Suggested Questions**')) { + currentSection = 'questions'; + continue; + } + + // 컨텐츠 파싱 + if (trimmedLine.startsWith('-') && currentSection === 'summary-overview') { + const summaryPoint = trimmedLine.substring(1).trim(); + if (summaryPoint && !structuredData.summary.includes(summaryPoint)) { + // 기존 summary 업데이트 (최대 5개 유지) + structuredData.summary.unshift(summaryPoint); + if (structuredData.summary.length > 5) { + structuredData.summary.pop(); + } + } + } else if (trimmedLine.startsWith('-') && currentSection === 'topic') { + const bullet = trimmedLine.substring(1).trim(); + if (bullet && structuredData.topic.bullets.length < 3) { + structuredData.topic.bullets.push(bullet); + } + } else if (currentSection === 'explanation' && trimmedLine) { + // explanation을 topic bullets에 추가 (문장 단위로) + const sentences = trimmedLine + .split(/\.\s+/) + .filter(s => s.trim().length > 0) + .map(s => s.trim() + (s.endsWith('.') ? '' : '.')); + + sentences.forEach(sentence => { + if (structuredData.topic.bullets.length < 3 && !structuredData.topic.bullets.includes(sentence)) { + structuredData.topic.bullets.push(sentence); + } + }); + } else if (trimmedLine.match(/^\d+\./) && currentSection === 'questions') { + const question = trimmedLine.replace(/^\d+\.\s*/, '').trim(); + if (question && question.includes('?')) { + structuredData.actions.push(`❓ ${question}`); + } + } + } + + // 기본 액션 추가 + const defaultActions = ['✨ What should I say next?', '💬 Suggest follow-up questions']; + defaultActions.forEach(action => { + if (!structuredData.actions.includes(action)) { + structuredData.actions.push(action); + } + }); + + // 액션 개수 제한 + structuredData.actions = structuredData.actions.slice(0, 5); + + // 유효성 검증 및 이전 데이터 병합 + if (structuredData.summary.length === 0 && previousResult) { + structuredData.summary = previousResult.summary; + } + if (structuredData.topic.bullets.length === 0 && previousResult) { + structuredData.topic.bullets = previousResult.topic.bullets; + } + } catch (error) { + console.error('❌ Error parsing response text:', error); + // 에러 시 이전 결과 반환 + return ( + previousResult || { + summary: [], + topic: { header: 'Analysis in progress', bullets: [] }, + actions: ['✨ What should I say next?', '💬 Suggest follow-up questions'], + followUps: ['✉️ Draft a follow-up email', '✅ Generate action items', '📝 Show summary'], + } + ); + } + + console.log('📊 Final structured data:', JSON.stringify(structuredData, null, 2)); + return structuredData; + } + + /** + * Triggers analysis when conversation history reaches 5 texts. + */ + async triggerAnalysisIfNeeded() { + if (this.conversationHistory.length >= 5 && this.conversationHistory.length % 5 === 0) { + console.log(`🚀 Triggering analysis (non-blocking) - ${this.conversationHistory.length} conversation texts accumulated`); + + this.makeOutlineAndRequests(this.conversationHistory) + .then(data => { + if (data) { + console.log('📤 Sending structured data to renderer'); + this.sendToRenderer('update-structured-data', data); + + // Notify callback + if (this.onAnalysisComplete) { + this.onAnalysisComplete(data); + } + } else { + console.log('❌ No analysis data returned from non-blocking call'); + } + }) + .catch(error => { + console.error('❌ Error in non-blocking analysis:', error); + }); + } + } + + getCurrentAnalysisData() { + return { + previousResult: this.previousAnalysisResult, + history: this.analysisHistory, + conversationLength: this.conversationHistory.length, + }; + } +} + +module.exports = SummaryService; \ No newline at end of file diff --git a/src/features/settings/SettingsView.js b/src/features/settings/SettingsView.js new file mode 100644 index 0000000..49a0854 --- /dev/null +++ b/src/features/settings/SettingsView.js @@ -0,0 +1,831 @@ +import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js'; + +export class SettingsView extends LitElement { + static styles = css` + * { + font-family: 'Helvetica Neue', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; + cursor: default; + user-select: none; + } + + :host { + display: block; + width: 240px; + height: 100%; + color: white; + } + + .settings-container { + display: flex; + flex-direction: column; + height: 100%; + width: 100%; + background: rgba(20, 20, 20, 0.8); + border-radius: 12px; + outline: 0.5px rgba(255, 255, 255, 0.2) solid; + outline-offset: -1px; + box-sizing: border-box; + position: relative; + overflow-y: auto; + padding: 12px 12px; + z-index: 1000; + } + + .settings-container::-webkit-scrollbar { + width: 6px; + } + + .settings-container::-webkit-scrollbar-track { + background: rgba(255, 255, 255, 0.05); + border-radius: 3px; + } + + .settings-container::-webkit-scrollbar-thumb { + background: rgba(255, 255, 255, 0.2); + border-radius: 3px; + } + + .settings-container::-webkit-scrollbar-thumb:hover { + background: rgba(255, 255, 255, 0.3); + } + + .settings-container::before { + content: ''; + position: absolute; + top: 0; + left: 0; + right: 0; + bottom: 0; + width: 100%; + height: 100%; + background: rgba(0, 0, 0, 0.15); + box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3); + border-radius: 12px; + filter: blur(10px); + z-index: -1; + } + + .settings-button[disabled], + .api-key-section input[disabled] { + opacity: 0.4; + cursor: not-allowed; + pointer-events: none; + } + + .header-section { + display: flex; + justify-content: space-between; + align-items: flex-start; + padding-bottom: 6px; + border-bottom: 1px solid rgba(255, 255, 255, 0.1); + position: relative; + z-index: 1; + } + + .title-line { + display: flex; + justify-content: space-between; + align-items: center; + } + + .app-title { + font-size: 13px; + font-weight: 500; + color: white; + margin: 0 0 4px 0; + } + + .account-info { + font-size: 11px; + color: rgba(255, 255, 255, 0.7); + margin: 0; + } + + .invisibility-icon { + padding-top: 2px; + opacity: 0; + transition: opacity 0.3s ease; + } + + .invisibility-icon.visible { + opacity: 1; + } + + .invisibility-icon svg { + width: 16px; + height: 16px; + } + + .shortcuts-section { + display: flex; + flex-direction: column; + gap: 2px; + padding: 4px 0; + position: relative; + z-index: 1; + } + + .shortcut-item { + display: flex; + justify-content: space-between; + align-items: center; + padding: 4px 0; + color: white; + font-size: 11px; + } + + .shortcut-name { + font-weight: 300; + } + + .shortcut-keys { + display: flex; + align-items: center; + gap: 3px; + } + + .cmd-key, .shortcut-key { + background: rgba(255, 255, 255, 0.1); + border-radius: 3px; + width: 16px; + height: 16px; + display: flex; + align-items: center; + justify-content: center; + font-size: 11px; + font-weight: 500; + color: rgba(255, 255, 255, 0.9); + } + + /* Buttons Section */ + .buttons-section { + display: flex; + flex-direction: column; + gap: 4px; + padding-top: 6px; + border-top: 1px solid rgba(255, 255, 255, 0.1); + position: relative; + z-index: 1; + flex: 1; + } + + .settings-button { + background: rgba(255, 255, 255, 0.1); + border: 1px solid rgba(255, 255, 255, 0.2); + border-radius: 4px; + color: white; + padding: 5px 10px; + font-size: 11px; + font-weight: 400; + cursor: pointer; + transition: all 0.15s ease; + display: flex; + align-items: center; + justify-content: center; + white-space: nowrap; + } + + .settings-button:hover { + background: rgba(255, 255, 255, 0.15); + border-color: rgba(255, 255, 255, 0.3); + } + + .settings-button:active { + transform: translateY(1px); + } + + .settings-button.full-width { + width: 100%; + } + + .settings-button.half-width { + flex: 1; + } + + .settings-button.danger { + background: rgba(255, 59, 48, 0.1); + border-color: rgba(255, 59, 48, 0.3); + color: rgba(255, 59, 48, 0.9); + } + + .settings-button.danger:hover { + background: rgba(255, 59, 48, 0.15); + border-color: rgba(255, 59, 48, 0.4); + } + + .move-buttons, .bottom-buttons { + display: flex; + gap: 4px; + } + + .api-key-section { + padding: 6px 0; + border-top: 1px solid rgba(255, 255, 255, 0.1); + } + + .api-key-section input { + width: 100%; + background: rgba(0,0,0,0.2); + border: 1px solid rgba(255,255,255,0.2); + color: white; + border-radius: 4px; + padding: 4px; + font-size: 11px; + margin-bottom: 4px; + box-sizing: border-box; + } + + .api-key-section input::placeholder { + color: rgba(255, 255, 255, 0.4); + } + + /* Preset Management Section */ + .preset-section { + padding: 6px 0; + border-top: 1px solid rgba(255, 255, 255, 0.1); + } + + .preset-header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 4px; + } + + .preset-title { + font-size: 11px; + font-weight: 500; + color: white; + } + + .preset-count { + font-size: 9px; + color: rgba(255, 255, 255, 0.5); + margin-left: 4px; + } + + .preset-toggle { + font-size: 10px; + color: rgba(255, 255, 255, 0.6); + cursor: pointer; + padding: 2px 4px; + border-radius: 2px; + transition: background-color 0.15s ease; + } + + .preset-toggle:hover { + background: rgba(255, 255, 255, 0.1); + } + + .preset-list { + display: flex; + flex-direction: column; + gap: 2px; + max-height: 120px; + overflow-y: auto; + } + + .preset-item { + display: flex; + justify-content: space-between; + align-items: center; + padding: 4px 6px; + background: rgba(255, 255, 255, 0.05); + border-radius: 3px; + cursor: pointer; + transition: all 0.15s ease; + font-size: 11px; + border: 1px solid transparent; + } + + .preset-item:hover { + background: rgba(255, 255, 255, 0.1); + border-color: rgba(255, 255, 255, 0.1); + } + + .preset-item.selected { + background: rgba(0, 122, 255, 0.25); + border-color: rgba(0, 122, 255, 0.6); + box-shadow: 0 0 0 1px rgba(0, 122, 255, 0.3); + } + + .preset-name { + color: white; + flex: 1; + text-overflow: ellipsis; + overflow: hidden; + white-space: nowrap; + font-weight: 300; + } + + .preset-item.selected .preset-name { + font-weight: 500; + } + + .preset-status { + font-size: 9px; + color: rgba(0, 122, 255, 0.8); + font-weight: 500; + margin-left: 6px; + } + + .no-presets-message { + padding: 12px 8px; + text-align: center; + color: rgba(255, 255, 255, 0.5); + font-size: 10px; + line-height: 1.4; + } + + .no-presets-message .web-link { + color: rgba(0, 122, 255, 0.8); + text-decoration: underline; + cursor: pointer; + } + + .no-presets-message .web-link:hover { + color: rgba(0, 122, 255, 1); + } + + .loading-state { + display: flex; + align-items: center; + justify-content: center; + padding: 20px; + color: rgba(255, 255, 255, 0.7); + font-size: 11px; + } + + .loading-spinner { + width: 12px; + height: 12px; + border: 1px solid rgba(255, 255, 255, 0.2); + border-top: 1px solid rgba(255, 255, 255, 0.8); + border-radius: 50%; + animation: spin 1s linear infinite; + margin-right: 6px; + } + + @keyframes spin { + 0% { transform: rotate(0deg); } + 100% { transform: rotate(360deg); } + } + + .hidden { + display: none; + } + + /* ────────────────[ GLASS BYPASS ]─────────────── */ + :host-context(body.has-glass) { + animation: none !important; + transition: none !important; + transform: none !important; + will-change: auto !important; + } + + :host-context(body.has-glass) * { + background: transparent !important; + filter: none !important; + backdrop-filter: none !important; + box-shadow: none !important; + outline: none !important; + border: none !important; + border-radius: 0 !important; + transition: none !important; + animation: none !important; + } + + :host-context(body.has-glass) .settings-container::before { + display: none !important; + } + `; + + static properties = { + firebaseUser: { type: Object, state: true }, + apiKey: { type: String, state: true }, + isLoading: { type: Boolean, state: true }, + isContentProtectionOn: { type: Boolean, state: true }, + settings: { type: Object, state: true }, + presets: { type: Array, state: true }, + selectedPreset: { type: Object, state: true }, + showPresets: { type: Boolean, state: true }, + saving: { type: Boolean, state: true }, + }; + + constructor() { + super(); + this.firebaseUser = null; + this.apiKey = null; + this.isLoading = false; + this.isContentProtectionOn = true; + this.settings = null; + this.presets = []; + this.selectedPreset = null; + this.showPresets = false; + this.saving = false; + this.loadInitialData(); + } + + async loadInitialData() { + if (!window.require) return; + + try { + this.isLoading = true; + const { ipcRenderer } = window.require('electron'); + + // Load all data in parallel + const [settings, presets, apiKey, contentProtection, userState] = await Promise.all([ + ipcRenderer.invoke('settings:getSettings'), + ipcRenderer.invoke('settings:getPresets'), + ipcRenderer.invoke('get-stored-api-key'), + ipcRenderer.invoke('get-content-protection-status'), + ipcRenderer.invoke('get-current-user') + ]); + + this.settings = settings; + this.presets = presets || []; + this.apiKey = apiKey; + this.isContentProtectionOn = contentProtection; + + // Set first user preset as selected + if (this.presets.length > 0) { + const firstUserPreset = this.presets.find(p => p.is_default === 0); + if (firstUserPreset) { + this.selectedPreset = firstUserPreset; + } + } + + if (userState && userState.isLoggedIn) { + this.firebaseUser = userState.user; + } + } catch (error) { + console.error('Error loading initial data:', error); + } finally { + this.isLoading = false; + } + } + + connectedCallback() { + super.connectedCallback(); + + this.setupEventListeners(); + this.setupIpcListeners(); + this.setupWindowResize(); + } + + disconnectedCallback() { + super.disconnectedCallback(); + this.cleanupEventListeners(); + this.cleanupIpcListeners(); + this.cleanupWindowResize(); + } + + setupEventListeners() { + this.addEventListener('mouseenter', this.handleMouseEnter); + this.addEventListener('mouseleave', this.handleMouseLeave); + } + + cleanupEventListeners() { + this.removeEventListener('mouseenter', this.handleMouseEnter); + this.removeEventListener('mouseleave', this.handleMouseLeave); + } + + setupIpcListeners() { + if (!window.require) return; + + const { ipcRenderer } = window.require('electron'); + + this._userStateListener = (event, userState) => { + console.log('[SettingsView] Received user-state-changed:', userState); + if (userState && userState.isLoggedIn) { + this.firebaseUser = userState; + } else { + this.firebaseUser = null; + } + this.requestUpdate(); + }; + + this._settingsUpdatedListener = (event, settings) => { + console.log('[SettingsView] Received settings-updated'); + this.settings = settings; + this.requestUpdate(); + }; + + // 프리셋 업데이트 리스너 추가 + this._presetsUpdatedListener = async (event) => { + console.log('[SettingsView] Received presets-updated, refreshing presets'); + try { + const presets = await ipcRenderer.invoke('settings:getPresets'); + this.presets = presets || []; + + // 현재 선택된 프리셋이 삭제되었는지 확인 (사용자 프리셋만 고려) + const userPresets = this.presets.filter(p => p.is_default === 0); + if (this.selectedPreset && !userPresets.find(p => p.id === this.selectedPreset.id)) { + this.selectedPreset = userPresets.length > 0 ? userPresets[0] : null; + } + + this.requestUpdate(); + } catch (error) { + console.error('[SettingsView] Failed to refresh presets:', error); + } + }; + + ipcRenderer.on('user-state-changed', this._userStateListener); + ipcRenderer.on('settings-updated', this._settingsUpdatedListener); + ipcRenderer.on('presets-updated', this._presetsUpdatedListener); + } + + cleanupIpcListeners() { + if (!window.require) return; + + const { ipcRenderer } = window.require('electron'); + + if (this._userStateListener) { + ipcRenderer.removeListener('user-state-changed', this._userStateListener); + } + if (this._settingsUpdatedListener) { + ipcRenderer.removeListener('settings-updated', this._settingsUpdatedListener); + } + if (this._presetsUpdatedListener) { + ipcRenderer.removeListener('presets-updated', this._presetsUpdatedListener); + } + } + + setupWindowResize() { + this.resizeHandler = () => { + this.requestUpdate(); + this.updateScrollHeight(); + }; + window.addEventListener('resize', this.resizeHandler); + + // Initial setup + setTimeout(() => this.updateScrollHeight(), 100); + } + + cleanupWindowResize() { + if (this.resizeHandler) { + window.removeEventListener('resize', this.resizeHandler); + } + } + + updateScrollHeight() { + const windowHeight = window.innerHeight; + const maxHeight = windowHeight; + + this.style.maxHeight = `${maxHeight}px`; + + const container = this.shadowRoot?.querySelector('.settings-container'); + if (container) { + container.style.maxHeight = `${maxHeight}px`; + } + } + + handleMouseEnter = () => { + if (window.require) { + const { ipcRenderer } = window.require('electron'); + ipcRenderer.send('cancel-hide-window', 'settings'); + } + } + + handleMouseLeave = () => { + if (window.require) { + const { ipcRenderer } = window.require('electron'); + ipcRenderer.send('hide-window', 'settings'); + } + } + + getMainShortcuts() { + return [ + { name: 'Show / Hide', key: '\\' }, + { name: 'Ask Anything', key: '↵' }, + { name: 'Scroll AI Response', key: '↕' } + ]; + } + + togglePresets() { + this.showPresets = !this.showPresets; + } + + async handlePresetSelect(preset) { + this.selectedPreset = preset; + // Here you could implement preset application logic + console.log('Selected preset:', preset); + } + + handleMoveLeft() { + console.log('Move Left clicked'); + if (window.require) { + const { ipcRenderer } = window.require('electron'); + ipcRenderer.invoke('move-window-step', 'left'); + } + } + + handleMoveRight() { + console.log('Move Right clicked'); + if (window.require) { + const { ipcRenderer } = window.require('electron'); + ipcRenderer.invoke('move-window-step', 'right'); + } + } + + async handlePersonalize() { + console.log('Personalize clicked'); + if (window.require) { + const { ipcRenderer } = window.require('electron'); + try { + await ipcRenderer.invoke('open-login-page'); + } catch (error) { + console.error('Failed to open personalize page:', error); + } + } + } + + async handleToggleInvisibility() { + console.log('Toggle Invisibility clicked'); + if (window.require) { + const { ipcRenderer } = window.require('electron'); + this.isContentProtectionOn = await ipcRenderer.invoke('toggle-content-protection'); + this.requestUpdate(); + } + } + + async handleSaveApiKey() { + const input = this.shadowRoot.getElementById('api-key-input'); + if (!input || !input.value) return; + + const newApiKey = input.value; + if (window.require) { + const { ipcRenderer } = window.require('electron'); + try { + const result = await ipcRenderer.invoke('settings:saveApiKey', newApiKey); + if (result.success) { + console.log('API Key saved successfully via IPC.'); + this.apiKey = newApiKey; + this.requestUpdate(); + } else { + console.error('Failed to save API Key via IPC:', result.error); + } + } catch(e) { + console.error('Error invoking save-api-key IPC:', e); + } + } + } + + async handleClearApiKey() { + console.log('Clear API Key clicked'); + if (window.require) { + const { ipcRenderer } = window.require('electron'); + await ipcRenderer.invoke('settings:removeApiKey'); + this.apiKey = null; + this.requestUpdate(); + } + } + + handleQuit() { + console.log('Quit clicked'); + if (window.require) { + const { ipcRenderer } = window.require('electron'); + ipcRenderer.invoke('quit-application'); + } + } + + handleFirebaseLogout() { + console.log('Firebase Logout clicked'); + if (window.require) { + const { ipcRenderer } = window.require('electron'); + ipcRenderer.invoke('firebase-logout'); + } + } + + render() { + if (this.isLoading) { + return html` +
+
+
+ Loading... +
+
+ `; + } + + const loggedIn = !!this.firebaseUser; + + return html` +
+
+
+

Pickle Glass

+ +
+
+ + + +
+
+ +
+ + +
+ +
+ ${this.getMainShortcuts().map(shortcut => html` +
+ ${shortcut.name} +
+ + ${shortcut.key} +
+
+ `)} +
+ + +
+
+ + My Presets + (${this.presets.filter(p => p.is_default === 0).length}) + + + ${this.showPresets ? '▼' : '▶'} + +
+ +
+ ${this.presets.filter(p => p.is_default === 0).length === 0 ? html` +
+ No custom presets yet.
+ + Create your first preset + +
+ ` : this.presets.filter(p => p.is_default === 0).map(preset => html` +
this.handlePresetSelect(preset)}> + ${preset.title} + ${this.selectedPreset?.id === preset.id ? html`Selected` : ''} +
+ `)} +
+
+ +
+ + +
+ + +
+ + + +
+ ${this.firebaseUser + ? html` + + ` + : html` + + ` + } + +
+
+
+ `; + } +} + +customElements.define('settings-view', SettingsView); \ No newline at end of file diff --git a/src/features/listen/repositories/index.js b/src/features/settings/repositories/index.js similarity index 61% rename from src/features/listen/repositories/index.js rename to src/features/settings/repositories/index.js index 9c0d12f..508ebe5 100644 --- a/src/features/listen/repositories/index.js +++ b/src/features/settings/repositories/index.js @@ -13,8 +13,9 @@ function getRepository() { // Directly export functions for ease of use, decided by the strategy module.exports = { - addTranscript: (...args) => getRepository().addTranscript(...args), - saveSummary: (...args) => getRepository().saveSummary(...args), - getAllTranscriptsBySessionId: (...args) => getRepository().getAllTranscriptsBySessionId(...args), - getSummaryBySessionId: (...args) => getRepository().getSummaryBySessionId(...args), -}; \ No newline at end of file + getPresets: (...args) => getRepository().getPresets(...args), + getPresetTemplates: (...args) => getRepository().getPresetTemplates(...args), + createPreset: (...args) => getRepository().createPreset(...args), + updatePreset: (...args) => getRepository().updatePreset(...args), + deletePreset: (...args) => getRepository().deletePreset(...args), +}; \ No newline at end of file diff --git a/src/features/settings/repositories/sqlite.repository.js b/src/features/settings/repositories/sqlite.repository.js new file mode 100644 index 0000000..82d0c01 --- /dev/null +++ b/src/features/settings/repositories/sqlite.repository.js @@ -0,0 +1,109 @@ +const sqliteClient = require('../../../common/services/sqliteClient'); + +function getPresets(uid) { + const db = sqliteClient.getDb(); + return new Promise((resolve, reject) => { + const query = ` + SELECT * FROM prompt_presets + WHERE uid = ? OR is_default = 1 + ORDER BY is_default DESC, title ASC + `; + db.all(query, [uid], (err, rows) => { + if (err) { + console.error('SQLite: Failed to get presets:', err); + reject(err); + } else { + resolve(rows || []); + } + }); + }); +} + +function getPresetTemplates() { + const db = sqliteClient.getDb(); + return new Promise((resolve, reject) => { + const query = ` + SELECT * FROM prompt_presets + WHERE is_default = 1 + ORDER BY title ASC + `; + db.all(query, [], (err, rows) => { + if (err) { + console.error('SQLite: Failed to get preset templates:', err); + reject(err); + } else { + resolve(rows || []); + } + }); + }); +} + +function createPreset({ uid, title, prompt }) { + const db = sqliteClient.getDb(); + return new Promise((resolve, reject) => { + const id = require('crypto').randomUUID(); + const now = Math.floor(Date.now() / 1000); + const query = ` + INSERT INTO prompt_presets (id, uid, title, prompt, is_default, created_at, sync_state) + VALUES (?, ?, ?, ?, 0, ?, 'dirty') + `; + db.run(query, [id, uid, title, prompt, now], function(err) { + if (err) { + console.error('SQLite: Failed to create preset:', err); + reject(err); + } else { + resolve({ id }); + } + }); + }); +} + +function updatePreset(id, { title, prompt }, uid) { + const db = sqliteClient.getDb(); + return new Promise((resolve, reject) => { + const now = Math.floor(Date.now() / 1000); + const query = ` + UPDATE prompt_presets + SET title = ?, prompt = ?, sync_state = 'dirty', updated_at = ? + WHERE id = ? AND uid = ? AND is_default = 0 + `; + db.run(query, [title, prompt, now, id, uid], function(err) { + if (err) { + console.error('SQLite: Failed to update preset:', err); + reject(err); + } else if (this.changes === 0) { + reject(new Error('Preset not found, is default, or permission denied')); + } else { + resolve({ changes: this.changes }); + } + }); + }); +} + +function deletePreset(id, uid) { + const db = sqliteClient.getDb(); + return new Promise((resolve, reject) => { + const query = ` + DELETE FROM prompt_presets + WHERE id = ? AND uid = ? AND is_default = 0 + `; + db.run(query, [id, uid], function(err) { + if (err) { + console.error('SQLite: Failed to delete preset:', err); + reject(err); + } else if (this.changes === 0) { + reject(new Error('Preset not found, is default, or permission denied')); + } else { + resolve({ changes: this.changes }); + } + }); + }); +} + +module.exports = { + getPresets, + getPresetTemplates, + createPreset, + updatePreset, + deletePreset +}; \ No newline at end of file diff --git a/src/features/settings/settingsService.js b/src/features/settings/settingsService.js new file mode 100644 index 0000000..6360f36 --- /dev/null +++ b/src/features/settings/settingsService.js @@ -0,0 +1,462 @@ +const { ipcMain, BrowserWindow } = require('electron'); +const Store = require('electron-store'); +const authService = require('../../common/services/authService'); +const userRepository = require('../../common/repositories/user'); +const settingsRepository = require('./repositories'); +const { getStoredApiKey, getStoredProvider, windowPool } = require('../../electron/windowManager'); + +const store = new Store({ + name: 'pickle-glass-settings', + defaults: { + users: {} + } +}); + +// Configuration constants +const NOTIFICATION_CONFIG = { + RELEVANT_WINDOW_TYPES: ['settings', 'main'], + DEBOUNCE_DELAY: 300, // prevent spam during bulk operations (ms) + MAX_RETRY_ATTEMPTS: 3, + RETRY_BASE_DELAY: 1000, // exponential backoff base (ms) +}; + +// window targeting system +class WindowNotificationManager { + constructor() { + this.pendingNotifications = new Map(); + } + + /** + * Send notifications only to relevant windows + * @param {string} event - Event name + * @param {*} data - Event data + * @param {object} options - Notification options + */ + notifyRelevantWindows(event, data = null, options = {}) { + const { + windowTypes = NOTIFICATION_CONFIG.RELEVANT_WINDOW_TYPES, + debounce = NOTIFICATION_CONFIG.DEBOUNCE_DELAY + } = options; + + if (debounce > 0) { + this.debounceNotification(event, () => { + this.sendToTargetWindows(event, data, windowTypes); + }, debounce); + } else { + this.sendToTargetWindows(event, data, windowTypes); + } + } + + sendToTargetWindows(event, data, windowTypes) { + const relevantWindows = this.getRelevantWindows(windowTypes); + + if (relevantWindows.length === 0) { + console.log(`[WindowNotificationManager] No relevant windows found for event: ${event}`); + return; + } + + console.log(`[WindowNotificationManager] Sending ${event} to ${relevantWindows.length} relevant windows`); + + relevantWindows.forEach(win => { + try { + if (data) { + win.webContents.send(event, data); + } else { + win.webContents.send(event); + } + } catch (error) { + console.warn(`[WindowNotificationManager] Failed to send ${event} to window:`, error.message); + } + }); + } + + getRelevantWindows(windowTypes) { + const allWindows = BrowserWindow.getAllWindows(); + const relevantWindows = []; + + allWindows.forEach(win => { + if (win.isDestroyed()) return; + + for (const [windowName, poolWindow] of windowPool || []) { + if (poolWindow === win && windowTypes.includes(windowName)) { + if (windowName === 'settings' || win.isVisible()) { + relevantWindows.push(win); + } + break; + } + } + }); + + return relevantWindows; + } + + debounceNotification(key, fn, delay) { + // Clear existing timeout + if (this.pendingNotifications.has(key)) { + clearTimeout(this.pendingNotifications.get(key)); + } + + // Set new timeout + const timeoutId = setTimeout(() => { + fn(); + this.pendingNotifications.delete(key); + }, delay); + + this.pendingNotifications.set(key, timeoutId); + } + + cleanup() { + // Clear all pending notifications + this.pendingNotifications.forEach(timeoutId => clearTimeout(timeoutId)); + this.pendingNotifications.clear(); + } +} + +// Global instance +const windowNotificationManager = new WindowNotificationManager(); + +// Default keybinds configuration +const DEFAULT_KEYBINDS = { + mac: { + moveUp: 'Cmd+Up', + moveDown: 'Cmd+Down', + moveLeft: 'Cmd+Left', + moveRight: 'Cmd+Right', + toggleVisibility: 'Cmd+\\', + toggleClickThrough: 'Cmd+M', + nextStep: 'Cmd+Enter', + manualScreenshot: 'Cmd+Shift+S', + previousResponse: 'Cmd+[', + nextResponse: 'Cmd+]', + scrollUp: 'Cmd+Shift+Up', + scrollDown: 'Cmd+Shift+Down', + }, + windows: { + moveUp: 'Ctrl+Up', + moveDown: 'Ctrl+Down', + moveLeft: 'Ctrl+Left', + moveRight: 'Ctrl+Right', + toggleVisibility: 'Ctrl+\\', + toggleClickThrough: 'Ctrl+M', + nextStep: 'Ctrl+Enter', + manualScreenshot: 'Ctrl+Shift+S', + previousResponse: 'Ctrl+[', + nextResponse: 'Ctrl+]', + scrollUp: 'Ctrl+Shift+Up', + scrollDown: 'Ctrl+Shift+Down', + } +}; + +// Service state +let currentSettings = null; + +function getDefaultSettings() { + const isMac = process.platform === 'darwin'; + return { + profile: 'school', + language: 'en', + screenshotInterval: '5000', + imageQuality: '0.8', + layoutMode: 'stacked', + keybinds: isMac ? DEFAULT_KEYBINDS.mac : DEFAULT_KEYBINDS.windows, + throttleTokens: 500, + maxTokens: 2000, + throttlePercent: 80, + googleSearchEnabled: false, + backgroundTransparency: 0.5, + fontSize: 14, + contentProtection: true + }; +} + +async function getSettings() { + try { + const uid = authService.getCurrentUserId(); + const userSettingsKey = uid ? `users.${uid}` : 'users.default'; + + const defaultSettings = getDefaultSettings(); + const savedSettings = store.get(userSettingsKey, {}); + + currentSettings = { ...defaultSettings, ...savedSettings }; + return currentSettings; + } catch (error) { + console.error('[SettingsService] Error getting settings from store:', error); + return getDefaultSettings(); + } +} + +async function saveSettings(settings) { + try { + const uid = authService.getCurrentUserId(); + const userSettingsKey = uid ? `users.${uid}` : 'users.default'; + + const currentSaved = store.get(userSettingsKey, {}); + const newSettings = { ...currentSaved, ...settings }; + + store.set(userSettingsKey, newSettings); + currentSettings = newSettings; + + // Use smart notification system + windowNotificationManager.notifyRelevantWindows('settings-updated', currentSettings); + + return { success: true }; + } catch (error) { + console.error('[SettingsService] Error saving settings to store:', error); + return { success: false, error: error.message }; + } +} + +async function getPresets() { + try { + const uid = authService.getCurrentUserId(); + if (!uid) { + // Logged out users only see default presets + return await settingsRepository.getPresetTemplates(); + } + + const presets = await settingsRepository.getPresets(uid); + return presets; + } catch (error) { + console.error('[SettingsService] Error getting presets:', error); + return []; + } +} + +async function getPresetTemplates() { + try { + const templates = await settingsRepository.getPresetTemplates(); + return templates; + } catch (error) { + console.error('[SettingsService] Error getting preset templates:', error); + return []; + } +} + +async function createPreset(title, prompt) { + try { + const uid = authService.getCurrentUserId(); + if (!uid) { + throw new Error("User not logged in, cannot create preset."); + } + + const result = await settingsRepository.createPreset({ uid, title, prompt }); + + windowNotificationManager.notifyRelevantWindows('presets-updated', { + action: 'created', + presetId: result.id, + title + }); + + return { success: true, id: result.id }; + } catch (error) { + console.error('[SettingsService] Error creating preset:', error); + return { success: false, error: error.message }; + } +} + +async function updatePreset(id, title, prompt) { + try { + const uid = authService.getCurrentUserId(); + if (!uid) { + throw new Error("User not logged in, cannot update preset."); + } + + await settingsRepository.updatePreset(id, { title, prompt }, uid); + + windowNotificationManager.notifyRelevantWindows('presets-updated', { + action: 'updated', + presetId: id, + title + }); + + return { success: true }; + } catch (error) { + console.error('[SettingsService] Error updating preset:', error); + return { success: false, error: error.message }; + } +} + +async function deletePreset(id) { + try { + const uid = authService.getCurrentUserId(); + if (!uid) { + throw new Error("User not logged in, cannot delete preset."); + } + + await settingsRepository.deletePreset(id, uid); + + windowNotificationManager.notifyRelevantWindows('presets-updated', { + action: 'deleted', + presetId: id + }); + + return { success: true }; + } catch (error) { + console.error('[SettingsService] Error deleting preset:', error); + return { success: false, error: error.message }; + } +} + +async function saveApiKey(apiKey, provider = 'openai') { + try { + const uid = authService.getCurrentUserId(); + if (!uid) { + // For non-logged-in users, save to local storage + const { app } = require('electron'); + const Store = require('electron-store'); + const store = new Store(); + store.set('apiKey', apiKey); + store.set('provider', provider); + + // Notify windows + BrowserWindow.getAllWindows().forEach(win => { + if (!win.isDestroyed()) { + win.webContents.send('api-key-validated', apiKey); + } + }); + + return { success: true }; + } + + // For logged-in users, save to database + await userRepository.saveApiKey(apiKey, uid, provider); + + // Notify windows + BrowserWindow.getAllWindows().forEach(win => { + if (!win.isDestroyed()) { + win.webContents.send('api-key-validated', apiKey); + } + }); + + return { success: true }; + } catch (error) { + console.error('[SettingsService] Error saving API key:', error); + return { success: false, error: error.message }; + } +} + +async function removeApiKey() { + try { + const uid = authService.getCurrentUserId(); + if (!uid) { + // For non-logged-in users, remove from local storage + const { app } = require('electron'); + const Store = require('electron-store'); + const store = new Store(); + store.delete('apiKey'); + store.delete('provider'); + } else { + // For logged-in users, remove from database + await userRepository.saveApiKey(null, uid, null); + } + + // Notify windows + BrowserWindow.getAllWindows().forEach(win => { + if (!win.isDestroyed()) { + win.webContents.send('api-key-removed'); + } + }); + + return { success: true }; + } catch (error) { + console.error('[SettingsService] Error removing API key:', error); + return { success: false, error: error.message }; + } +} + +async function updateContentProtection(enabled) { + try { + const settings = await getSettings(); + settings.contentProtection = enabled; + + // Update content protection in main window + const { app } = require('electron'); + const mainWindow = windowPool.get('main'); + if (mainWindow && !mainWindow.isDestroyed()) { + mainWindow.setContentProtection(enabled); + } + + return await saveSettings(settings); + } catch (error) { + console.error('[SettingsService] Error updating content protection:', error); + return { success: false, error: error.message }; + } +} + +function initialize() { + // cleanup + windowNotificationManager.cleanup(); + + // IPC handlers for settings + ipcMain.handle('settings:getSettings', async () => { + return await getSettings(); + }); + + ipcMain.handle('settings:saveSettings', async (event, settings) => { + return await saveSettings(settings); + }); + + // IPC handlers for presets + ipcMain.handle('settings:getPresets', async () => { + return await getPresets(); + }); + + ipcMain.handle('settings:getPresetTemplates', async () => { + return await getPresetTemplates(); + }); + + ipcMain.handle('settings:createPreset', async (event, title, prompt) => { + return await createPreset(title, prompt); + }); + + ipcMain.handle('settings:updatePreset', async (event, id, title, prompt) => { + return await updatePreset(id, title, prompt); + }); + + ipcMain.handle('settings:deletePreset', async (event, id) => { + return await deletePreset(id); + }); + + ipcMain.handle('settings:saveApiKey', async (event, apiKey, provider) => { + return await saveApiKey(apiKey, provider); + }); + + ipcMain.handle('settings:removeApiKey', async () => { + return await removeApiKey(); + }); + + ipcMain.handle('settings:updateContentProtection', async (event, enabled) => { + return await updateContentProtection(enabled); + }); + + console.log('[SettingsService] Initialized and ready.'); +} + +// Cleanup function +function cleanup() { + windowNotificationManager.cleanup(); + console.log('[SettingsService] Cleaned up resources.'); +} + +function notifyPresetUpdate(action, presetId, title = null) { + const data = { action, presetId }; + if (title) data.title = title; + + windowNotificationManager.notifyRelevantWindows('presets-updated', data); +} + +module.exports = { + initialize, + cleanup, + notifyPresetUpdate, + getSettings, + saveSettings, + getPresets, + getPresetTemplates, + createPreset, + updatePreset, + deletePreset, + saveApiKey, + removeApiKey, + updateContentProtection, +}; \ No newline at end of file diff --git a/src/index.js b/src/index.js index ea8224b..eb6e89c 100644 --- a/src/index.js +++ b/src/index.js @@ -13,7 +13,7 @@ if (require('electron-squirrel-startup')) { const { app, BrowserWindow, shell, ipcMain, dialog } = require('electron'); const { createWindows } = require('./electron/windowManager.js'); -const { setupLiveSummaryIpcHandlers, stopMacOSAudioCapture } = require('./features/listen/liveSummaryService.js'); +const ListenService = require('./features/listen/listenService'); const { initializeFirebase } = require('./common/services/firebaseClient'); const databaseInitializer = require('./common/services/databaseInitializer'); const authService = require('./common/services/authService'); @@ -24,12 +24,15 @@ const fetch = require('node-fetch'); const { autoUpdater } = require('electron-updater'); const { EventEmitter } = require('events'); const askService = require('./features/ask/askService'); +const settingsService = require('./features/settings/settingsService'); const sessionRepository = require('./common/repositories/session'); const eventBridge = new EventEmitter(); let WEB_PORT = 3000; -const openaiSessionRef = { current: null }; +const listenService = new ListenService(); +// Make listenService globally accessible so other modules (e.g., windowManager, askService) can reuse the same instance +global.listenService = listenService; let deeplink = null; // Initialize as null let pendingDeepLinkUrl = null; // Store any deep link that arrives before initialization @@ -106,8 +109,9 @@ app.whenReady().then(async () => { sessionRepository.endAllActiveSessions(); authService.initialize(); - setupLiveSummaryIpcHandlers(openaiSessionRef); + listenService.setupIpcHandlers(); askService.initialize(); + settingsService.initialize(); setupGeneralIpcHandlers(); }) .catch(err => { @@ -123,7 +127,7 @@ app.whenReady().then(async () => { }); app.on('window-all-closed', () => { - stopMacOSAudioCapture(); + listenService.stopMacOSAudioCapture(); if (process.platform !== 'darwin') { app.quit(); } @@ -131,7 +135,7 @@ app.on('window-all-closed', () => { app.on('before-quit', async () => { console.log('[Shutdown] App is about to quit.'); - stopMacOSAudioCapture(); + listenService.stopMacOSAudioCapture(); await sessionRepository.endAllActiveSessions(); databaseInitializer.close(); }); @@ -210,7 +214,8 @@ function setupGeneralIpcHandlers() { function setupWebDataHandlers() { const sessionRepository = require('./common/repositories/session'); - const listenRepository = require('./features/listen/repositories'); + const sttRepository = require('./features/listen/stt/repositories'); + const summaryRepository = require('./features/listen/summary/repositories'); const askRepository = require('./features/ask/repositories'); const userRepository = require('./common/repositories/user'); const presetRepository = require('./common/repositories/preset'); @@ -230,9 +235,9 @@ function setupWebDataHandlers() { result = null; break; } - const transcripts = await listenRepository.getAllTranscriptsBySessionId(payload); + const transcripts = await sttRepository.getAllTranscriptsBySessionId(payload); const ai_messages = await askRepository.getAllAiMessagesBySessionId(payload); - const summary = await listenRepository.getSummaryBySessionId(payload); + const summary = await summaryRepository.getSummaryBySessionId(payload); result = { session, transcripts, ai_messages, summary }; break; case 'delete-session': @@ -273,12 +278,15 @@ function setupWebDataHandlers() { break; case 'create-preset': result = await presetRepository.create({ ...payload, uid: currentUserId }); + settingsService.notifyPresetUpdate('created', result.id, payload.title); break; case 'update-preset': result = await presetRepository.update(payload.id, payload.data, currentUserId); + settingsService.notifyPresetUpdate('updated', payload.id, payload.data.title); break; case 'delete-preset': result = await presetRepository.delete(payload, currentUserId); + settingsService.notifyPresetUpdate('deleted', payload); break; // BATCH