fix header name, modulize windowmanager, fix ui size bug, liquid glass env set

This commit is contained in:
sanio 2025-07-07 08:24:28 +09:00
commit a3f76662e1
34 changed files with 4600 additions and 4328 deletions

View File

@ -248,6 +248,8 @@ export class ApiKeyHeader extends LitElement {
width: 100%;
text-align: left;
}
/* ────────────────[ GLASS BYPASS ]─────────────── */
:host-context(body.has-glass) .container,
:host-context(body.has-glass) .api-input,
:host-context(body.has-glass) .provider-select,
@ -260,13 +262,11 @@ export class ApiKeyHeader extends LitElement {
backdrop-filter: none !important;
}
/* 가상 레이어·그라데이션 테두리 제거 */
:host-context(body.has-glass) .container::after,
:host-context(body.has-glass) .action-button::after {
display: none !important;
}
/* hover/active 때 다시 생기는 배경도 차단 */
:host-context(body.has-glass) .action-button:hover,
:host-context(body.has-glass) .provider-select:hover,
:host-context(body.has-glass) .close-button:hover {

View File

@ -292,11 +292,12 @@ export class MainHeader extends LitElement {
width: 16px;
height: 16px;
}
/* ────────────────[ GLASS BYPASS ]─────────────── */
:host-context(body.has-glass) .header,
:host-context(body.has-glass) .listen-button,
:host-context(body.has-glass) .header-actions,
:host-context(body.has-glass) .settings-button {
/* 배경·블러·그림자 전부 제거 */
background: transparent !important;
filter: none !important;
box-shadow: none !important;
@ -307,7 +308,6 @@ export class MainHeader extends LitElement {
border: none !important;
}
/* 장식용 before/after 레이어와 버튼 오버레이 비활성화 */
:host-context(body.has-glass) .header::before,
:host-context(body.has-glass) .header::after,
:host-context(body.has-glass) .listen-button::before,
@ -315,7 +315,6 @@ export class MainHeader extends LitElement {
display: none !important;
}
/* hover 때 의도치 않게 생기는 배경도 차단 */
:host-context(body.has-glass) .header-actions:hover,
:host-context(body.has-glass) .settings-button:hover,
:host-context(body.has-glass) .listen-button:hover::before {
@ -330,7 +329,6 @@ export class MainHeader extends LitElement {
box-shadow: none !important;
}
/* 2) pill 형태·아이콘 박스 둥근 모서리 평면화 (선택) */
:host-context(body.has-glass) .header,
:host-context(body.has-glass) .listen-button,
:host-context(body.has-glass) .header-actions,

View File

@ -237,6 +237,8 @@ export class PermissionHeader extends LitElement {
background: rgba(255, 255, 255, 0.2);
cursor: not-allowed;
}
/* ────────────────[ GLASS BYPASS ]─────────────── */
:host-context(body.has-glass) .container,
:host-context(body.has-glass) .action-button,
:host-context(body.has-glass) .continue-button,
@ -248,14 +250,12 @@ export class PermissionHeader extends LitElement {
backdrop-filter: none !important;
}
/* Remove gradient borders / pseudo layers */
:host-context(body.has-glass) .container::after,
:host-context(body.has-glass) .action-button::after,
:host-context(body.has-glass) .continue-button::after {
display: none !important;
}
/* Prevent background reappearing on hover/active */
:host-context(body.has-glass) .action-button:hover,
:host-context(body.has-glass) .continue-button:hover,
:host-context(body.has-glass) .close-button:hover {

View File

@ -1,9 +1,9 @@
import { html, css, LitElement } from '../assets/lit-core-2.7.4.min.js';
import { CustomizeView } from '../features/customize/CustomizeView.js';
import { SettingsView } from '../features/settings/SettingsView.js';
import { AssistantView } from '../features/listen/AssistantView.js';
import { AskView } from '../features/ask/AskView.js';
import '../features/listen/renderer.js';
import '../features/listen/renderer/renderer.js';
export class PickleGlassApp extends LitElement {
static styles = css`
@ -22,7 +22,7 @@ export class PickleGlassApp extends LitElement {
height: 100%;
}
ask-view, customize-view, history-view, help-view, onboarding-view, setup-view {
ask-view, settings-view, history-view, help-view, setup-view {
display: block;
width: 100%;
height: 100%;
@ -182,8 +182,8 @@ export class PickleGlassApp extends LitElement {
this.isMainViewVisible = !this.isMainViewVisible;
}
handleCustomizeClick() {
this.currentView = 'customize';
handleSettingsClick() {
this.currentView = 'settings';
this.isMainViewVisible = true;
}
@ -249,10 +249,6 @@ export class PickleGlassApp extends LitElement {
this.currentResponseIndex = e.detail.index;
}
handleOnboardingComplete() {
this.currentView = 'main';
}
render() {
switch (this.currentView) {
case 'listen':
@ -265,19 +261,17 @@ export class PickleGlassApp extends LitElement {
></assistant-view>`;
case 'ask':
return html`<ask-view></ask-view>`;
case 'customize':
return html`<customize-view
case 'settings':
return html`<settings-view
.selectedProfile=${this.selectedProfile}
.selectedLanguage=${this.selectedLanguage}
.onProfileChange=${profile => (this.selectedProfile = profile)}
.onLanguageChange=${lang => (this.selectedLanguage = lang)}
></customize-view>`;
></settings-view>`;
case 'history':
return html`<history-view></history-view>`;
case 'help':
return html`<help-view></help-view>`;
case 'onboarding':
return html`<onboarding-view></onboarding-view>`;
case 'setup':
return html`<setup-view></setup-view>`;
default:

67
src/common/ai/factory.js Normal file
View File

@ -0,0 +1,67 @@
const providers = {
openai: require('./providers/openai'),
gemini: require('./providers/gemini'),
// 추가 provider는 여기에 등록
};
/**
* Creates an STT session based on provider
* @param {string} provider - Provider name ('openai', 'gemini', etc.)
* @param {object} opts - Configuration options (apiKey, language, callbacks, etc.)
* @returns {Promise<object>} STT session object with sendRealtimeInput and close methods
*/
function createSTT(provider, opts) {
if (!providers[provider]?.createSTT) {
throw new Error(`STT not supported for provider: ${provider}`);
}
return providers[provider].createSTT(opts);
}
/**
* Creates an LLM instance based on provider
* @param {string} provider - Provider name ('openai', 'gemini', etc.)
* @param {object} opts - Configuration options (apiKey, model, temperature, etc.)
* @returns {object} LLM instance with generateContent method
*/
function createLLM(provider, opts) {
if (!providers[provider]?.createLLM) {
throw new Error(`LLM not supported for provider: ${provider}`);
}
return providers[provider].createLLM(opts);
}
/**
* Creates a streaming LLM instance based on provider
* @param {string} provider - Provider name ('openai', 'gemini', etc.)
* @param {object} opts - Configuration options (apiKey, model, temperature, etc.)
* @returns {object} Streaming LLM instance
*/
function createStreamingLLM(provider, opts) {
if (!providers[provider]?.createStreamingLLM) {
throw new Error(`Streaming LLM not supported for provider: ${provider}`);
}
return providers[provider].createStreamingLLM(opts);
}
/**
* Gets list of available providers
* @returns {object} Object with stt and llm arrays
*/
function getAvailableProviders() {
const sttProviders = [];
const llmProviders = [];
for (const [name, provider] of Object.entries(providers)) {
if (provider.createSTT) sttProviders.push(name);
if (provider.createLLM) llmProviders.push(name);
}
return { stt: sttProviders, llm: llmProviders };
}
module.exports = {
createSTT,
createLLM,
createStreamingLLM,
getAvailableProviders
};

View File

@ -0,0 +1,310 @@
const { GoogleGenerativeAI } = require('@google/generative-ai');
const { GoogleGenAI } = require('@google/genai');
/**
* Creates a Gemini STT session
* @param {object} opts - Configuration options
* @param {string} opts.apiKey - Gemini API key
* @param {string} [opts.language='en-US'] - Language code
* @param {object} [opts.callbacks] - Event callbacks
* @returns {Promise<object>} STT session
*/
async function createSTT({ apiKey, language = 'en-US', callbacks = {}, ...config }) {
const liveClient = new GoogleGenAI({ vertexai: false, apiKey });
// Language code BCP-47 conversion
const lang = language.includes('-') ? language : `${language}-US`;
const session = await liveClient.live.connect({
model: 'gemini-live-2.5-flash-preview',
callbacks,
config: {
inputAudioTranscription: {},
speechConfig: { languageCode: lang },
},
});
return {
sendRealtimeInput: async payload => session.sendRealtimeInput(payload),
close: async () => session.close(),
};
}
/**
* Creates a Gemini LLM instance
* @param {object} opts - Configuration options
* @param {string} opts.apiKey - Gemini API key
* @param {string} [opts.model='gemini-2.5-flash'] - Model name
* @param {number} [opts.temperature=0.7] - Temperature
* @param {number} [opts.maxTokens=8192] - Max tokens
* @returns {object} LLM instance
*/
function createLLM({ apiKey, model = 'gemini-2.5-flash', temperature = 0.7, maxTokens = 8192, ...config }) {
const client = new GoogleGenerativeAI(apiKey);
return {
generateContent: async (parts) => {
const geminiModel = client.getGenerativeModel({ model: model });
let systemPrompt = '';
let userContent = [];
for (const part of parts) {
if (typeof part === 'string') {
if (systemPrompt === '' && part.includes('You are')) {
systemPrompt = part;
} else {
userContent.push(part);
}
} else if (part.inlineData) {
// Convert base64 image data to Gemini format
userContent.push({
inlineData: {
mimeType: part.inlineData.mimeType,
data: part.inlineData.data
}
});
}
}
// Prepare content array
const content = [];
// Add system instruction if present
if (systemPrompt) {
// For Gemini, we'll prepend system prompt to user content
content.push(systemPrompt + '\n\n' + userContent[0]);
content.push(...userContent.slice(1));
} else {
content.push(...userContent);
}
try {
const result = await geminiModel.generateContent(content);
const response = await result.response;
return {
response: {
text: () => response.text()
}
};
} catch (error) {
console.error('Gemini API error:', error);
throw error;
}
},
// For compatibility with chat-style interfaces
chat: async (messages) => {
// Extract system instruction if present
let systemInstruction = '';
const history = [];
let lastMessage;
messages.forEach((msg, index) => {
if (msg.role === 'system') {
systemInstruction = msg.content;
return;
}
// Gemini's history format
const role = msg.role === 'user' ? 'user' : 'model';
if (index === messages.length - 1) {
lastMessage = msg;
} else {
history.push({ role, parts: [{ text: msg.content }] });
}
});
const geminiModel = client.getGenerativeModel({
model: model,
systemInstruction: systemInstruction
});
const chat = geminiModel.startChat({
history: history,
generationConfig: {
temperature: temperature,
maxOutputTokens: maxTokens,
}
});
// Get the last user message content
let content = lastMessage.content;
// Handle multimodal content for the last message
if (Array.isArray(content)) {
const geminiContent = [];
for (const part of content) {
if (typeof part === 'string') {
geminiContent.push(part);
} else if (part.type === 'text') {
geminiContent.push(part.text);
} else if (part.type === 'image_url' && part.image_url) {
// Convert base64 image to Gemini format
const base64Data = part.image_url.url.split(',')[1];
geminiContent.push({
inlineData: {
mimeType: 'image/png',
data: base64Data
}
});
}
}
content = geminiContent;
}
const result = await chat.sendMessage(content);
const response = await result.response;
return {
content: response.text(),
raw: result
};
}
};
}
/**
* Creates a Gemini streaming LLM instance
* @param {object} opts - Configuration options
* @param {string} opts.apiKey - Gemini API key
* @param {string} [opts.model='gemini-2.5-flash'] - Model name
* @param {number} [opts.temperature=0.7] - Temperature
* @param {number} [opts.maxTokens=8192] - Max tokens
* @returns {object} Streaming LLM instance
*/
function createStreamingLLM({ apiKey, model = 'gemini-2.5-flash', temperature = 0.7, maxTokens = 8192, ...config }) {
const client = new GoogleGenerativeAI(apiKey);
return {
streamChat: async (messages) => {
console.log('[Gemini Provider] Starting streaming request');
// Extract system instruction if present
let systemInstruction = '';
const nonSystemMessages = [];
for (const msg of messages) {
if (msg.role === 'system') {
systemInstruction = msg.content;
} else {
nonSystemMessages.push(msg);
}
}
const geminiModel = client.getGenerativeModel({
model: model,
systemInstruction: systemInstruction || undefined
});
const chat = geminiModel.startChat({
history: [],
generationConfig: {
temperature,
maxOutputTokens: maxTokens || 8192,
}
});
// Create a ReadableStream to handle Gemini's streaming
const stream = new ReadableStream({
async start(controller) {
try {
console.log('[Gemini Provider] Processing messages:', nonSystemMessages.length, 'messages (excluding system)');
// Get the last user message
const lastMessage = nonSystemMessages[nonSystemMessages.length - 1];
let lastUserMessage = lastMessage.content;
// Handle case where content might be an array (multimodal)
if (Array.isArray(lastUserMessage)) {
// Extract text content from array
const textParts = lastUserMessage.filter(part =>
typeof part === 'string' || (part && part.type === 'text')
);
lastUserMessage = textParts.map(part =>
typeof part === 'string' ? part : part.text
).join(' ');
}
console.log('[Gemini Provider] Sending message to Gemini:',
typeof lastUserMessage === 'string' ? lastUserMessage.substring(0, 100) + '...' : 'multimodal content');
// Prepare the message content for Gemini
let geminiContent = [];
// Handle multimodal content properly
if (Array.isArray(lastMessage.content)) {
for (const part of lastMessage.content) {
if (typeof part === 'string') {
geminiContent.push(part);
} else if (part.type === 'text') {
geminiContent.push(part.text);
} else if (part.type === 'image_url' && part.image_url) {
// Convert base64 image to Gemini format
const base64Data = part.image_url.url.split(',')[1];
geminiContent.push({
inlineData: {
mimeType: 'image/png',
data: base64Data
}
});
}
}
} else {
geminiContent = [lastUserMessage];
}
console.log('[Gemini Provider] Prepared Gemini content:',
geminiContent.length, 'parts');
// Stream the response
let chunkCount = 0;
let totalContent = '';
for await (const chunk of chat.sendMessageStream(geminiContent)) {
chunkCount++;
const chunkText = chunk.text() || '';
totalContent += chunkText;
// Format as SSE data
const data = JSON.stringify({
choices: [{
delta: {
content: chunkText
}
}]
});
controller.enqueue(new TextEncoder().encode(`data: ${data}\n\n`));
}
console.log(`[Gemini Provider] Streamed ${chunkCount} chunks, total length: ${totalContent.length} chars`);
// Send the final done message
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'));
controller.close();
console.log('[Gemini Provider] Streaming completed successfully');
} catch (error) {
console.error('[Gemini Provider] Streaming error:', error);
controller.error(error);
}
}
});
// Create a Response object with the stream
return new Response(stream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive'
}
});
}
};
}
module.exports = {
createSTT,
createLLM,
createStreamingLLM
};

View File

@ -0,0 +1,255 @@
const OpenAI = require('openai');
const WebSocket = require('ws');
/**
* Creates an OpenAI STT session
* @param {object} opts - Configuration options
* @param {string} opts.apiKey - OpenAI API key
* @param {string} [opts.language='en'] - Language code
* @param {object} [opts.callbacks] - Event callbacks
* @param {boolean} [opts.usePortkey=false] - Whether to use Portkey
* @param {string} [opts.portkeyVirtualKey] - Portkey virtual key
* @returns {Promise<object>} STT session
*/
async function createSTT({ apiKey, language = 'en', callbacks = {}, usePortkey = false, portkeyVirtualKey, ...config }) {
const keyType = usePortkey ? 'vKey' : 'apiKey';
const key = usePortkey ? (portkeyVirtualKey || apiKey) : apiKey;
const wsUrl = keyType === 'apiKey'
? 'wss://api.openai.com/v1/realtime?intent=transcription'
: 'wss://api.portkey.ai/v1/realtime?intent=transcription';
const headers = keyType === 'apiKey'
? {
'Authorization': `Bearer ${key}`,
'OpenAI-Beta': 'realtime=v1',
}
: {
'x-portkey-api-key': 'gRv2UGRMq6GGLJ8aVEB4e7adIewu',
'x-portkey-virtual-key': key,
'OpenAI-Beta': 'realtime=v1',
};
const ws = new WebSocket(wsUrl, { headers });
return new Promise((resolve, reject) => {
ws.onopen = () => {
console.log("WebSocket session opened.");
const sessionConfig = {
type: 'transcription_session.update',
session: {
input_audio_format: 'pcm16',
input_audio_transcription: {
model: 'gpt-4o-mini-transcribe',
prompt: config.prompt || '',
language: language || 'en'
},
turn_detection: {
type: 'server_vad',
threshold: 0.5,
prefix_padding_ms: 50,
silence_duration_ms: 25,
},
input_audio_noise_reduction: {
type: 'near_field'
}
}
};
ws.send(JSON.stringify(sessionConfig));
resolve({
sendRealtimeInput: (audioData) => {
if (ws.readyState === WebSocket.OPEN) {
const message = {
type: 'input_audio_buffer.append',
audio: audioData
};
ws.send(JSON.stringify(message));
}
},
close: () => {
if (ws.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify({ type: 'session.close' }));
ws.close(1000, 'Client initiated close.');
}
}
});
};
ws.onmessage = (event) => {
const message = JSON.parse(event.data);
if (callbacks && callbacks.onmessage) {
callbacks.onmessage(message);
}
};
ws.onerror = (error) => {
console.error('WebSocket error:', error.message);
if (callbacks && callbacks.onerror) {
callbacks.onerror(error);
}
reject(error);
};
ws.onclose = (event) => {
console.log(`WebSocket closed: ${event.code} ${event.reason}`);
if (callbacks && callbacks.onclose) {
callbacks.onclose(event);
}
};
});
}
/**
* Creates an OpenAI LLM instance
* @param {object} opts - Configuration options
* @param {string} opts.apiKey - OpenAI API key
* @param {string} [opts.model='gpt-4.1'] - Model name
* @param {number} [opts.temperature=0.7] - Temperature
* @param {number} [opts.maxTokens=2048] - Max tokens
* @param {boolean} [opts.usePortkey=false] - Whether to use Portkey
* @param {string} [opts.portkeyVirtualKey] - Portkey virtual key
* @returns {object} LLM instance
*/
function createLLM({ apiKey, model = 'gpt-4.1', temperature = 0.7, maxTokens = 2048, usePortkey = false, portkeyVirtualKey, ...config }) {
const client = new OpenAI({ apiKey });
const callApi = async (messages) => {
if (!usePortkey) {
const response = await client.chat.completions.create({
model: model,
messages: messages,
temperature: temperature,
max_tokens: maxTokens
});
return {
content: response.choices[0].message.content.trim(),
raw: response
};
} else {
const fetchUrl = 'https://api.portkey.ai/v1/chat/completions';
const response = await fetch(fetchUrl, {
method: 'POST',
headers: {
'x-portkey-api-key': 'gRv2UGRMq6GGLJ8aVEB4e7adIewu',
'x-portkey-virtual-key': portkeyVirtualKey || apiKey,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: model,
messages,
temperature,
max_tokens: maxTokens,
}),
});
if (!response.ok) {
throw new Error(`Portkey API error: ${response.status} ${response.statusText}`);
}
const result = await response.json();
return {
content: result.choices[0].message.content.trim(),
raw: result
};
}
};
return {
generateContent: async (parts) => {
const messages = [];
let systemPrompt = '';
let userContent = [];
for (const part of parts) {
if (typeof part === 'string') {
if (systemPrompt === '' && part.includes('You are')) {
systemPrompt = part;
} else {
userContent.push({ type: 'text', text: part });
}
} else if (part.inlineData) {
userContent.push({
type: 'image_url',
image_url: { url: `data:${part.inlineData.mimeType};base64,${part.inlineData.data}` }
});
}
}
if (systemPrompt) messages.push({ role: 'system', content: systemPrompt });
if (userContent.length > 0) messages.push({ role: 'user', content: userContent });
const result = await callApi(messages);
return {
response: {
text: () => result.content
},
raw: result.raw
};
},
// For compatibility with chat-style interfaces
chat: async (messages) => {
return await callApi(messages);
}
};
}
/**
* Creates an OpenAI streaming LLM instance
* @param {object} opts - Configuration options
* @param {string} opts.apiKey - OpenAI API key
* @param {string} [opts.model='gpt-4.1'] - Model name
* @param {number} [opts.temperature=0.7] - Temperature
* @param {number} [opts.maxTokens=2048] - Max tokens
* @param {boolean} [opts.usePortkey=false] - Whether to use Portkey
* @param {string} [opts.portkeyVirtualKey] - Portkey virtual key
* @returns {object} Streaming LLM instance
*/
function createStreamingLLM({ apiKey, model = 'gpt-4.1', temperature = 0.7, maxTokens = 2048, usePortkey = false, portkeyVirtualKey, ...config }) {
return {
streamChat: async (messages) => {
const fetchUrl = usePortkey
? 'https://api.portkey.ai/v1/chat/completions'
: 'https://api.openai.com/v1/chat/completions';
const headers = usePortkey
? {
'x-portkey-api-key': 'gRv2UGRMq6GGLJ8aVEB4e7adIewu',
'x-portkey-virtual-key': portkeyVirtualKey || apiKey,
'Content-Type': 'application/json',
}
: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json',
};
const response = await fetch(fetchUrl, {
method: 'POST',
headers,
body: JSON.stringify({
model: model,
messages,
temperature,
max_tokens: maxTokens,
stream: true,
}),
});
if (!response.ok) {
throw new Error(`OpenAI API error: ${response.status} ${response.statusText}`);
}
return response;
}
};
}
module.exports = {
createSTT,
createLLM,
createStreamingLLM
};

View File

@ -1,377 +0,0 @@
const { createOpenAiGenerativeClient, getOpenAiGenerativeModel } = require('./openAiClient.js');
const { createGeminiClient, getGeminiGenerativeModel, createGeminiChat } = require('./googleGeminiClient.js');
/**
* Creates an AI client based on the provider
* @param {string} apiKey - The API key
* @param {string} provider - The provider ('openai' or 'gemini')
* @returns {object} The AI client
*/
function createAIClient(apiKey, provider = 'openai') {
switch (provider) {
case 'openai':
return createOpenAiGenerativeClient(apiKey);
case 'gemini':
return createGeminiClient(apiKey);
default:
throw new Error(`Unsupported AI provider: ${provider}`);
}
}
/**
* Gets a generative model based on the provider
* @param {object} client - The AI client
* @param {string} provider - The provider ('openai' or 'gemini')
* @param {string} model - The model name (optional)
* @returns {object} The model object
*/
function getGenerativeModel(client, provider = 'openai', model) {
switch (provider) {
case 'openai':
return getOpenAiGenerativeModel(client, model || 'gpt-4.1');
case 'gemini':
return getGeminiGenerativeModel(client, model || 'gemini-2.5-flash');
default:
throw new Error(`Unsupported AI provider: ${provider}`);
}
}
/**
* Makes a chat completion request based on the provider
* @param {object} params - Request parameters
* @returns {Promise<object>} The completion response
*/
async function makeChatCompletion({ apiKey, provider = 'openai', messages, temperature = 0.7, maxTokens = 1024, model, stream = false }) {
if (provider === 'openai') {
const fetchUrl = 'https://api.openai.com/v1/chat/completions';
const response = await fetch(fetchUrl, {
method: 'POST',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: model || 'gpt-4.1',
messages,
temperature,
max_tokens: maxTokens,
stream,
}),
});
if (!response.ok) {
throw new Error(`OpenAI API error: ${response.status} ${response.statusText}`);
}
if (stream) {
return response;
}
const result = await response.json();
return {
content: result.choices[0].message.content.trim(),
raw: result
};
} else if (provider === 'gemini') {
const client = createGeminiClient(apiKey);
const genModel = getGeminiGenerativeModel(client, model || 'gemini-2.5-flash');
// Convert OpenAI format messages to Gemini format
const parts = [];
for (const message of messages) {
if (message.role === 'system') {
parts.push(message.content);
} else if (message.role === 'user') {
if (typeof message.content === 'string') {
parts.push(message.content);
} else if (Array.isArray(message.content)) {
// Handle multimodal content
for (const part of message.content) {
if (part.type === 'text') {
parts.push(part.text);
} else if (part.type === 'image_url' && part.image_url?.url) {
// Extract base64 data from data URL
const base64Match = part.image_url.url.match(/^data:(.+);base64,(.+)$/);
if (base64Match) {
parts.push({
inlineData: {
mimeType: base64Match[1],
data: base64Match[2]
}
});
}
}
}
}
}
}
const result = await genModel.generateContent(parts);
return {
content: result.response.text(),
raw: result
};
} else {
throw new Error(`Unsupported AI provider: ${provider}`);
}
}
/**
* Makes a chat completion request with Portkey support
* @param {object} params - Request parameters including Portkey options
* @returns {Promise<object>} The completion response
*/
async function makeChatCompletionWithPortkey({
apiKey,
provider = 'openai',
messages,
temperature = 0.7,
maxTokens = 1024,
model,
usePortkey = false,
portkeyVirtualKey = null
}) {
if (!usePortkey) {
return makeChatCompletion({ apiKey, provider, messages, temperature, maxTokens, model });
}
// Portkey is only supported for OpenAI currently
if (provider !== 'openai') {
console.warn('Portkey is only supported for OpenAI provider, falling back to direct API');
return makeChatCompletion({ apiKey, provider, messages, temperature, maxTokens, model });
}
const fetchUrl = 'https://api.portkey.ai/v1/chat/completions';
const response = await fetch(fetchUrl, {
method: 'POST',
headers: {
'x-portkey-api-key': 'gRv2UGRMq6GGLJ8aVEB4e7adIewu',
'x-portkey-virtual-key': portkeyVirtualKey || apiKey,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: model || 'gpt-4.1',
messages,
temperature,
max_tokens: maxTokens,
}),
});
if (!response.ok) {
throw new Error(`Portkey API error: ${response.status} ${response.statusText}`);
}
const result = await response.json();
return {
content: result.choices[0].message.content.trim(),
raw: result
};
}
/**
* Makes a streaming chat completion request
* @param {object} params - Request parameters
* @returns {Promise<Response>} The streaming response
*/
async function makeStreamingChatCompletion({ apiKey, provider = 'openai', messages, temperature = 0.7, maxTokens = 1024, model }) {
if (provider === 'openai') {
const fetchUrl = 'https://api.openai.com/v1/chat/completions';
const response = await fetch(fetchUrl, {
method: 'POST',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: model || 'gpt-4.1',
messages,
temperature,
max_tokens: maxTokens,
stream: true,
}),
});
if (!response.ok) {
throw new Error(`OpenAI API error: ${response.status} ${response.statusText}`);
}
return response;
} else if (provider === 'gemini') {
console.log('[AIProviderService] Starting Gemini streaming request');
// Gemini streaming requires a different approach
// We'll create a ReadableStream that mimics OpenAI's SSE format
const geminiClient = createGeminiClient(apiKey);
// Extract system instruction if present
let systemInstruction = '';
const nonSystemMessages = [];
for (const msg of messages) {
if (msg.role === 'system') {
systemInstruction = msg.content;
} else {
nonSystemMessages.push(msg);
}
}
const chat = createGeminiChat(geminiClient, model || 'gemini-2.0-flash-exp', {
temperature,
maxOutputTokens: maxTokens || 8192,
systemInstruction: systemInstruction || undefined
});
// Create a ReadableStream to handle Gemini's streaming
const stream = new ReadableStream({
async start(controller) {
try {
console.log('[AIProviderService] Processing messages for Gemini:', nonSystemMessages.length, 'messages (excluding system)');
// Get the last user message
const lastMessage = nonSystemMessages[nonSystemMessages.length - 1];
let lastUserMessage = lastMessage.content;
// Handle case where content might be an array (multimodal)
if (Array.isArray(lastUserMessage)) {
// Extract text content from array
const textParts = lastUserMessage.filter(part =>
typeof part === 'string' || (part && part.type === 'text')
);
lastUserMessage = textParts.map(part =>
typeof part === 'string' ? part : part.text
).join(' ');
}
console.log('[AIProviderService] Sending message to Gemini:',
typeof lastUserMessage === 'string' ? lastUserMessage.substring(0, 100) + '...' : 'multimodal content');
// Prepare the message content for Gemini
let geminiContent = [];
// Handle multimodal content properly
if (Array.isArray(lastMessage.content)) {
for (const part of lastMessage.content) {
if (typeof part === 'string') {
geminiContent.push(part);
} else if (part.type === 'text') {
geminiContent.push(part.text);
} else if (part.type === 'image_url' && part.image_url) {
// Convert base64 image to Gemini format
const base64Data = part.image_url.url.split(',')[1];
geminiContent.push({
inlineData: {
mimeType: 'image/png',
data: base64Data
}
});
}
}
} else {
geminiContent = [lastUserMessage];
}
console.log('[AIProviderService] Prepared Gemini content:',
geminiContent.length, 'parts');
// Stream the response
let chunkCount = 0;
let totalContent = '';
for await (const chunk of chat.sendMessageStream(geminiContent)) {
chunkCount++;
const chunkText = chunk.text || '';
totalContent += chunkText;
// Format as SSE data
const data = JSON.stringify({
choices: [{
delta: {
content: chunkText
}
}]
});
controller.enqueue(new TextEncoder().encode(`data: ${data}\n\n`));
}
console.log(`[AIProviderService] Streamed ${chunkCount} chunks, total length: ${totalContent.length} chars`);
// Send the final done message
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'));
controller.close();
console.log('[AIProviderService] Gemini streaming completed successfully');
} catch (error) {
console.error('[AIProviderService] Gemini streaming error:', error);
controller.error(error);
}
}
});
// Create a Response object with the stream
return new Response(stream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive'
}
});
} else {
throw new Error(`Unsupported AI provider: ${provider}`);
}
}
/**
* Makes a streaming chat completion request with Portkey support
* @param {object} params - Request parameters
* @returns {Promise<Response>} The streaming response
*/
async function makeStreamingChatCompletionWithPortkey({
apiKey,
provider = 'openai',
messages,
temperature = 0.7,
maxTokens = 1024,
model,
usePortkey = false,
portkeyVirtualKey = null
}) {
if (!usePortkey) {
return makeStreamingChatCompletion({ apiKey, provider, messages, temperature, maxTokens, model });
}
// Portkey is only supported for OpenAI currently
if (provider !== 'openai') {
console.warn('Portkey is only supported for OpenAI provider, falling back to direct API');
return makeStreamingChatCompletion({ apiKey, provider, messages, temperature, maxTokens, model });
}
const fetchUrl = 'https://api.portkey.ai/v1/chat/completions';
const response = await fetch(fetchUrl, {
method: 'POST',
headers: {
'x-portkey-api-key': 'gRv2UGRMq6GGLJ8aVEB4e7adIewu',
'x-portkey-virtual-key': portkeyVirtualKey || apiKey,
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: model || 'gpt-4.1',
messages,
temperature,
max_tokens: maxTokens,
stream: true,
}),
});
if (!response.ok) {
throw new Error(`Portkey API error: ${response.status} ${response.statusText}`);
}
return response;
}
module.exports = {
createAIClient,
getGenerativeModel,
makeChatCompletion,
makeChatCompletionWithPortkey,
makeStreamingChatCompletion,
makeStreamingChatCompletionWithPortkey
};

View File

@ -1,171 +0,0 @@
const { GoogleGenerativeAI } = require('@google/generative-ai');
const { GoogleGenAI } = require('@google/genai');
/**
* Creates and returns a Google Gemini client instance for generative AI.
* @param {string} apiKey - The API key for authentication.
* @returns {GoogleGenerativeAI} The initialized Gemini client.
*/
function createGeminiClient(apiKey) {
return new GoogleGenerativeAI(apiKey);
}
/**
* Gets a Gemini model for text/image generation.
* @param {GoogleGenerativeAI} client - The Gemini client instance.
* @param {string} [model='gemini-2.5-flash'] - The name for the text/vision model.
* @returns {object} Model object with generateContent method
*/
function getGeminiGenerativeModel(client, model = 'gemini-2.5-flash') {
const genAI = client;
const geminiModel = genAI.getGenerativeModel({ model: model });
return {
generateContent: async (parts) => {
let systemPrompt = '';
let userContent = [];
for (const part of parts) {
if (typeof part === 'string') {
if (systemPrompt === '' && part.includes('You are')) {
systemPrompt = part;
} else {
userContent.push(part);
}
} else if (part.inlineData) {
// Convert base64 image data to Gemini format
userContent.push({
inlineData: {
mimeType: part.inlineData.mimeType,
data: part.inlineData.data
}
});
}
}
// Prepare content array
const content = [];
// Add system instruction if present
if (systemPrompt) {
// For Gemini, we'll prepend system prompt to user content
content.push(systemPrompt + '\n\n' + userContent[0]);
content.push(...userContent.slice(1));
} else {
content.push(...userContent);
}
try {
const result = await geminiModel.generateContent(content);
const response = await result.response;
return {
response: {
text: () => response.text()
}
};
} catch (error) {
console.error('Gemini API error:', error);
throw error;
}
}
};
}
/**
* Creates a Gemini chat session for multi-turn conversations.
* @param {GoogleGenerativeAI} client - The Gemini client instance.
* @param {string} [model='gemini-2.5-flash'] - The model to use.
* @param {object} [config={}] - Configuration options.
* @returns {object} Chat session object
*/
function createGeminiChat(client, model = 'gemini-2.5-flash', config = {}) {
const genAI = client;
const geminiModel = genAI.getGenerativeModel({
model: model,
systemInstruction: config.systemInstruction
});
const chat = geminiModel.startChat({
history: config.history || [],
generationConfig: {
temperature: config.temperature || 0.7,
maxOutputTokens: config.maxOutputTokens || 8192,
}
});
return {
sendMessage: async (message) => {
const result = await chat.sendMessage(message);
const response = await result.response;
return {
text: response.text()
};
},
sendMessageStream: async function* (message) {
const result = await chat.sendMessageStream(message);
for await (const chunk of result.stream) {
yield {
text: chunk.text()
};
}
},
getHistory: () => chat.getHistory()
};
}
// async function connectToGeminiSession(apiKey, { language = 'en-US', callbacks = {} } = {}) {
// const liveClient = new GoogleGenAI({
// vertexai: false, // Vertex AI 사용 안함
// apiKey,
// });
// // 라이브 STT 세션 열기
// const session = await liveClient.live.connect({
// model: 'gemini-live-2.5-flash-preview',
// callbacks,
// config: {
// inputAudioTranscription: {}, // 실시간 STT 필수
// speechConfig: { languageCode: language },
// },
// });
// return {
// sendRealtimeInput: async data => session.send({
// audio: { data, mimeType: 'audio/pcm;rate=24000' }
// }),
// close: async () => session.close(),
// };
// }
async function connectToGeminiSession(apiKey, { language = 'en-US', callbacks = {} } = {}) {
// ① 옛날 스타일 helper 재사용
const liveClient = new GoogleGenAI({ vertexai: false, apiKey });
// ② 언어 코드 강제 BCP-47 변환
const lang = language.includes('-') ? language : `${language}-US`;
const session = await liveClient.live.connect({
model: 'gemini-live-2.5-flash-preview',
callbacks,
config: {
inputAudioTranscription: {},
speechConfig: { languageCode: lang },
},
});
// ③ SDK 0.5+ : sendRealtimeInput 가 정식 이름
return {
sendRealtimeInput: async payload => session.sendRealtimeInput(payload),
close: async () => session.close(),
};
}
module.exports = {
createGeminiClient,
getGeminiGenerativeModel,
createGeminiChat,
connectToGeminiSession,
};

View File

@ -1,177 +0,0 @@
const OpenAI = require('openai');
const WebSocket = require('ws');
/**
* Creates and returns an OpenAI client instance for STT (Speech-to-Text).
* @param {string} apiKey - The API key for authentication.
* @returns {OpenAI} The initialized OpenAI client.
*/
function createOpenAiClient(apiKey) {
return new OpenAI({
apiKey: apiKey,
});
}
/**
* Creates and returns an OpenAI client instance for text/image generation.
* @param {string} apiKey - The API key for authentication.
* @returns {OpenAI} The initialized OpenAI client.
*/
function createOpenAiGenerativeClient(apiKey) {
return new OpenAI({
apiKey: apiKey,
});
}
/**
* Connects to an OpenAI Realtime WebSocket session for STT.
* @param {string} key - Portkey vKey or OpenAI apiKey.
* @param {object} config - The configuration object for the realtime session.
* @param {'apiKey'|'vKey'} keyType - key type ('apiKey' | 'vKey').
* @returns {Promise<object>} A promise that resolves to the session object with send and close methods.
*/
async function connectToOpenAiSession(key, config, keyType) {
if (keyType !== 'apiKey' && keyType !== 'vKey') {
throw new Error('keyType must be either "apiKey" or "vKey".');
}
const wsUrl = keyType === 'apiKey'
? 'wss://api.openai.com/v1/realtime?intent=transcription'
: 'wss://api.portkey.ai/v1/realtime?intent=transcription';
const headers = keyType === 'apiKey'
? {
'Authorization': `Bearer ${key}`,
'OpenAI-Beta' : 'realtime=v1',
}
: {
'x-portkey-api-key' : 'gRv2UGRMq6GGLJ8aVEB4e7adIewu',
'x-portkey-virtual-key': key,
'OpenAI-Beta' : 'realtime=v1',
};
const ws = new WebSocket(wsUrl, { headers });
return new Promise((resolve, reject) => {
ws.onopen = () => {
console.log("WebSocket session opened.");
const sessionConfig = {
type: 'transcription_session.update',
session: {
input_audio_format: 'pcm16',
input_audio_transcription: {
model: 'gpt-4o-mini-transcribe',
prompt: config.prompt || '',
language: config.language || 'en'
},
turn_detection: {
type: 'server_vad',
threshold: 0.5,
prefix_padding_ms: 50,
silence_duration_ms: 25,
},
input_audio_noise_reduction: {
type: 'near_field'
}
}
};
ws.send(JSON.stringify(sessionConfig));
resolve({
sendRealtimeInput: (audioData) => {
if (ws.readyState === WebSocket.OPEN) {
const message = {
type: 'input_audio_buffer.append',
audio: audioData
};
ws.send(JSON.stringify(message));
}
},
close: () => {
if (ws.readyState === WebSocket.OPEN) {
ws.send(JSON.stringify({ type: 'session.close' }));
ws.close(1000, 'Client initiated close.');
}
}
});
};
ws.onmessage = (event) => {
const message = JSON.parse(event.data);
if (config.callbacks && config.callbacks.onmessage) {
config.callbacks.onmessage(message);
}
};
ws.onerror = (error) => {
console.error('WebSocket error:', error.message);
if (config.callbacks && config.callbacks.onerror) {
config.callbacks.onerror(error);
}
reject(error);
};
ws.onclose = (event) => {
console.log(`WebSocket closed: ${event.code} ${event.reason}`);
if (config.callbacks && config.callbacks.onclose) {
config.callbacks.onclose(event);
}
};
});
}
/**
* Gets a GPT model for text/image generation.
* @param {OpenAI} client - The OpenAI client instance.
* @param {string} [model='gpt-4.1'] - The name for the text/vision model.
* @returns {object} Model object with generateContent method
*/
function getOpenAiGenerativeModel(client, model = 'gpt-4.1') {
return {
generateContent: async (parts) => {
const messages = [];
let systemPrompt = '';
let userContent = [];
for (const part of parts) {
if (typeof part === 'string') {
if (systemPrompt === '' && part.includes('You are')) {
systemPrompt = part;
} else {
userContent.push({ type: 'text', text: part });
}
} else if (part.inlineData) {
userContent.push({
type: 'image_url',
image_url: { url: `data:${part.inlineData.mimeType};base64,${part.inlineData.data}` }
});
}
}
if (systemPrompt) messages.push({ role: 'system', content: systemPrompt });
if (userContent.length > 0) messages.push({ role: 'user', content: userContent });
const response = await client.chat.completions.create({
model: model,
messages: messages,
temperature: 0.7,
max_tokens: 2048
});
return {
response: {
text: () => response.choices[0].message.content
}
};
}
};
}
module.exports = {
createOpenAiClient,
connectToOpenAiSession,
createOpenAiGenerativeClient,
getOpenAiGenerativeModel,
};

View File

@ -13,14 +13,18 @@ const systemSettingsRepository = require('../common/repositories/systemSettings'
const userRepository = require('../common/repositories/user');
const fetch = require('node-fetch');
/* ────────────────[ GLASS BYPASS ]─────────────── */
const isLiquidGlassSupported = () => {
if (process.platform !== 'darwin') {
return false;
}
const majorVersion = parseInt(os.release().split('.')[0], 10);
return majorVersion >= 26; // macOS 26+ (Darwin 25+)
// return majorVersion >= 25; // macOS 26+ (Darwin 25+)
return majorVersion >= 26; // See you soon!
};
const shouldUseLiquidGlass = isLiquidGlassSupported();
/* ────────────────[ GLASS BYPASS ]─────────────── */
let isContentProtectionOn = true;
let currentDisplayId = null;
@ -139,11 +143,11 @@ function createFeatureWindows(header) {
windowPool.set('ask', ask);
// settings
const settings = new BrowserWindow({ ...commonChildOptions, width:240, maxHeight:350, parent:undefined });
const settings = new BrowserWindow({ ...commonChildOptions, width:240, maxHeight:400, parent:undefined });
settings.setContentProtection(isContentProtectionOn);
settings.setVisibleOnAllWorkspaces(true,{visibleOnFullScreen:true});
settings.setWindowButtonVisibility(false);
const settingsLoadOptions = { query: { view: 'customize' } };
const settingsLoadOptions = { query: { view: 'settings' } };
if (!shouldUseLiquidGlass) {
settings.loadFile(path.join(__dirname,'../app/content.html'), settingsLoadOptions)
.catch(console.error);
@ -379,10 +383,10 @@ function createWindows() {
if (windowToToggle) {
if (featureName === 'listen') {
const liveSummaryService = require('../features/listen/liveSummaryService');
if (liveSummaryService.isSessionActive()) {
const listenService = global.listenService;
if (listenService && listenService.isSessionActive()) {
console.log('[WindowManager] Listen session is active, closing it via toggle.');
await liveSummaryService.closeSession();
await listenService.closeSession();
return;
}
}

View File

@ -590,6 +590,8 @@ export class AskView extends LitElement {
color: rgba(255, 255, 255, 0.5);
font-size: 14px;
}
/* ────────────────[ GLASS BYPASS ]─────────────── */
:host-context(body.has-glass) .ask-container,
:host-context(body.has-glass) .response-header,
:host-context(body.has-glass) .response-icon,
@ -608,12 +610,10 @@ export class AskView extends LitElement {
backdrop-filter: none !important;
}
/* ask-container 의 블러·그림자 레이어 제거 */
:host-context(body.has-glass) .ask-container::before {
display: none !important;
}
/* hover/active 때 다시 생기는 배경도 차단 */
:host-context(body.has-glass) .copy-button:hover,
:host-context(body.has-glass) .close-button:hover,
:host-context(body.has-glass) .line-copy-button,
@ -622,7 +622,6 @@ export class AskView extends LitElement {
background: transparent !important;
}
/* 스크롤바 트랙·썸 마저 투명화 (원할 경우) */
:host-context(body.has-glass) .response-container::-webkit-scrollbar-track,
:host-context(body.has-glass) .response-container::-webkit-scrollbar-thumb {
background: transparent !important;

View File

@ -1,179 +1,22 @@
const { ipcMain, BrowserWindow } = require('electron');
const { makeStreamingChatCompletionWithPortkey } = require('../../common/services/aiProviderService');
const { getConversationHistory } = require('../listen/liveSummaryService');
const { createStreamingLLM } = require('../../common/ai/factory');
const { getStoredApiKey, getStoredProvider, windowPool, captureScreenshot } = require('../../electron/windowManager');
const authService = require('../../common/services/authService');
const sessionRepository = require('../../common/repositories/session');
const askRepository = require('./repositories');
const PICKLE_GLASS_SYSTEM_PROMPT = `<core_identity>
You are Pickle-Glass, developed and created by Pickle-Glass, and you are the user's live-meeting co-pilot.
</core_identity>
<objective>
Your goal is to help the user at the current moment in the conversation (the end of the transcript). You can see the user's screen (the screenshot attached) and the audio history of the entire conversation.
Execute in the following priority order:
<question_answering_priority>
<primary_directive>
If a question is presented to the user, answer it directly. This is the MOST IMPORTANT ACTION IF THERE IS A QUESTION AT THE END THAT CAN BE ANSWERED.
</primary_directive>
<question_response_structure>
Always start with the direct answer, then provide supporting details following the response format:
- **Short headline answer** (6 words) - the actual answer to the question
- **Main points** (1-2 bullets with 15 words each) - core supporting details
- **Sub-details** - examples, metrics, specifics under each main point
- **Extended explanation** - additional context and details as needed
</question_response_structure>
<intent_detection_guidelines>
Real transcripts have errors, unclear speech, and incomplete sentences. Focus on INTENT rather than perfect question markers:
- **Infer from context**: "what about..." "how did you..." "can you..." "tell me..." even if garbled
- **Incomplete questions**: "so the performance..." "and scaling wise..." "what's your approach to..."
- **Implied questions**: "I'm curious about X" "I'd love to hear about Y" "walk me through Z"
- **Transcription errors**: "what's your" "what's you" or "how do you" "how you" or "can you" "can u"
</intent_detection_guidelines>
<question_answering_priority_rules>
If the end of the transcript suggests someone is asking for information, explanation, or clarification - ANSWER IT. Don't get distracted by earlier content.
</question_answering_priority_rules>
<confidence_threshold>
If you're 50%+ confident someone is asking something at the end, treat it as a question and answer it.
</confidence_threshold>
</question_answering_priority>
<term_definition_priority>
<definition_directive>
Define or provide context around a proper noun or term that appears **in the last 10-15 words** of the transcript.
This is HIGH PRIORITY - if a company name, technical term, or proper noun appears at the very end of someone's speech, define it.
</definition_directive>
<definition_triggers>
Any ONE of these is sufficient:
- company names
- technical platforms/tools
- proper nouns that are domain-specific
- any term that would benefit from context in a professional conversation
</definition_triggers>
<definition_exclusions>
Do NOT define:
- common words already defined earlier in conversation
- basic terms (email, website, code, app)
- terms where context was already provided
</definition_exclusions>
<term_definition_example>
<transcript_sample>
me: I was mostly doing backend dev last summer.
them: Oh nice, what tech stack were you using?
me: A lot of internal tools, but also some Azure.
them: Yeah I've heard Azure is huge over there.
me: Yeah, I used to work at Microsoft last summer but now I...
</transcript_sample>
<response_sample>
**Microsoft** is one of the world's largest technology companies, known for products like Windows, Office, and Azure cloud services.
- **Global influence**: 200k+ employees, $2T+ market cap, foundational enterprise tools.
- Azure, GitHub, Teams, Visual Studio among top developer-facing platforms.
- **Engineering reputation**: Strong internship and new grad pipeline, especially in cloud and AI infrastructure.
</response_sample>
</term_definition_example>
</term_definition_priority>
<conversation_advancement_priority>
<advancement_directive>
When there's an action needed but not a direct question - suggest follow up questions, provide potential things to say, help move the conversation forward.
</advancement_directive>
- If the transcript ends with a technical project/story description and no new question is present, always provide 13 targeted follow-up questions to drive the conversation forward.
- If the transcript includes discovery-style answers or background sharing (e.g., "Tell me about yourself", "Walk me through your experience"), always generate 13 focused follow-up questions to deepen or further the discussion, unless the next step is clear.
- Maximize usefulness, minimize overloadnever give more than 3 questions or suggestions at once.
<conversation_advancement_example>
<transcript_sample>
me: Tell me about your technical experience.
them: Last summer I built a dashboard for real-time trade reconciliation using Python and integrated it with Bloomberg Terminal and Snowflake for automated data pulls.
</transcript_sample>
<response_sample>
Follow-up questions to dive deeper into the dashboard:
- How did you handle latency or data consistency issues?
- What made the Bloomberg integration challenging?
- Did you measure the impact on operational efficiency?
</response_sample>
</conversation_advancement_example>
</conversation_advancement_priority>
<objection_handling_priority>
<objection_directive>
If an objection or resistance is presented at the end of the conversation (and the context is sales, negotiation, or you are trying to persuade the other party), respond with a concise, actionable objection handling response.
- Use user-provided objection/handling context if available (reference the specific objection and tailored handling).
- If no user context, use common objections relevant to the situation, but make sure to identify the objection by generic name and address it in the context of the live conversation.
- State the objection in the format: **Objection: [Generic Objection Name]** (e.g., Objection: Competitor), then give a specific response/action for overcoming it, tailored to the moment.
- Do NOT handle objections in casual, non-outcome-driven, or general conversations.
- Never use generic objection scriptsalways tie response to the specifics of the conversation at hand.
</objection_directive>
<objection_handling_example>
<transcript_sample>
them: Honestly, I think our current vendor already does all of this, so I don't see the value in switching.
</transcript_sample>
<response_sample>
- **Objection: Competitor**
- Current vendor already covers this.
- Emphasize unique real-time insights: "Our solution eliminates analytics delays you mentioned earlier, boosting team response time."
</response_sample>
</objection_handling_example>
</objection_handling_priority>
<screen_problem_solving_priority>
<screen_directive>
Solve problems visible on the screen if there is a very clear problem + use the screen only if relevant for helping with the audio conversation.
</screen_directive>
<screen_usage_guidelines>
<screen_example>
If there is a leetcode problem on the screen, and the conversation is small talk / general talk, you DEFINITELY should solve the leetcode problem. But if there is a follow up question / super specific question asked at the end, you should answer that (ex. What's the runtime complexity), using the screen as additional context.
</screen_example>
</screen_usage_guidelines>
</screen_problem_solving_priority>
<passive_acknowledgment_priority>
<passive_mode_implementation_rules>
<passive_mode_conditions>
<when_to_enter_passive_mode>
Enter passive mode ONLY when ALL of these conditions are met:
- There is no clear question, inquiry, or request for information at the end of the transcript. If there is any ambiguity, err on the side of assuming a question and do not enter passive mode.
- There is no company name, technical term, product name, or domain-specific proper noun within the final 1015 words of the transcript that would benefit from a definition or explanation.
- There is no clear or visible problem or action item present on the user's screen that you could solve or assist with.
- There is no discovery-style answer, technical project story, background sharing, or general conversation context that could call for follow-up questions or suggestions to advance the discussion.
- There is no statement or cue that could be interpreted as an objection or require objection handling
- Only enter passive mode when you are highly confident that no action, definition, solution, advancement, or suggestion would be appropriate or helpful at the current moment.
</when_to_enter_passive_mode>
<passive_mode_behavior>
**Still show intelligence** by:
- Saying "Not sure what you need help with right now"
- Referencing visible screen elements or audio patterns ONLY if truly relevant
- Never giving random summaries unless explicitly asked
</passive_acknowledgment_priority>
</passive_mode_implementation_rules>
</objective>
User-provided context (defer to this information over your general knowledge / if there is specific script/desired responses prioritize this over previous instructions)
Make sure to **reference context** fully if it is provided (ex. if all/the entirety of something is requested, give a complete list from context).
----------
{{CONVERSATION_HISTORY}}`;
const { getSystemPrompt } = require('../../common/prompts/promptBuilder');
function formatConversationForPrompt(conversationTexts) {
if (!conversationTexts || conversationTexts.length === 0) return 'No conversation history available.';
return conversationTexts.slice(-30).join('\n');
}
// Access conversation history via the global listenService instance created in index.js
function getConversationHistory() {
const listenService = global.listenService;
return listenService ? listenService.getConversationHistory() : [];
}
async function sendMessage(userPrompt) {
if (!userPrompt || userPrompt.trim().length === 0) {
console.warn('[AskService] Cannot process empty message');
@ -194,7 +37,7 @@ async function sendMessage(userPrompt) {
const conversationHistoryRaw = getConversationHistory();
const conversationHistory = formatConversationForPrompt(conversationHistoryRaw);
const systemPrompt = PICKLE_GLASS_SYSTEM_PROMPT.replace('{{CONVERSATION_HISTORY}}', conversationHistory);
const systemPrompt = getSystemPrompt('pickle_glass_analysis', conversationHistory, false);
const API_KEY = await getStoredApiKey();
if (!API_KEY) {
@ -220,21 +63,20 @@ async function sendMessage(userPrompt) {
const provider = await getStoredProvider();
const { isLoggedIn } = authService.getCurrentUser();
const usePortkey = isLoggedIn && provider === 'openai';
console.log(`[AskService] 🚀 Sending request to ${provider} AI...`);
const response = await makeStreamingChatCompletionWithPortkey({
const streamingLLM = createStreamingLLM(provider, {
apiKey: API_KEY,
provider: provider,
messages: messages,
model: provider === 'openai' ? 'gpt-4.1' : 'gemini-2.5-flash',
temperature: 0.7,
maxTokens: 2048,
model: provider === 'openai' ? 'gpt-4.1' : 'gemini-2.5-flash',
usePortkey: usePortkey,
portkeyVirtualKey: usePortkey ? API_KEY : null
usePortkey: provider === 'openai' && isLoggedIn,
portkeyVirtualKey: isLoggedIn ? API_KEY : undefined
});
const response = await streamingLLM.streamChat(messages);
// --- Stream Processing ---
const reader = response.body.getReader();
const decoder = new TextDecoder();

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,123 +0,0 @@
const fs = require('fs');
const path = require('path');
function pcmToWav(pcmBuffer, outputPath, sampleRate = 24000, channels = 1, bitDepth = 16) {
const byteRate = sampleRate * channels * (bitDepth / 8);
const blockAlign = channels * (bitDepth / 8);
const dataSize = pcmBuffer.length;
const header = Buffer.alloc(44);
header.write('RIFF', 0);
header.writeUInt32LE(dataSize + 36, 4);
header.write('WAVE', 8);
header.write('fmt ', 12);
header.writeUInt32LE(16, 16);
header.writeUInt16LE(1, 20);
header.writeUInt16LE(channels, 22);
header.writeUInt32LE(sampleRate, 24);
header.writeUInt32LE(byteRate, 28);
header.writeUInt16LE(blockAlign, 32);
header.writeUInt16LE(bitDepth, 34);
header.write('data', 36);
header.writeUInt32LE(dataSize, 40);
const wavBuffer = Buffer.concat([header, pcmBuffer]);
fs.writeFileSync(outputPath, wavBuffer);
return outputPath;
}
function analyzeAudioBuffer(buffer, label = 'Audio') {
const int16Array = new Int16Array(buffer.buffer, buffer.byteOffset, buffer.length / 2);
let minValue = 32767;
let maxValue = -32768;
let avgValue = 0;
let rmsValue = 0;
let silentSamples = 0;
for (let i = 0; i < int16Array.length; i++) {
const sample = int16Array[i];
minValue = Math.min(minValue, sample);
maxValue = Math.max(maxValue, sample);
avgValue += sample;
rmsValue += sample * sample;
if (Math.abs(sample) < 100) {
silentSamples++;
}
}
avgValue /= int16Array.length;
rmsValue = Math.sqrt(rmsValue / int16Array.length);
const silencePercentage = (silentSamples / int16Array.length) * 100;
console.log(`${label} Analysis:`);
console.log(` Samples: ${int16Array.length}`);
console.log(` Min: ${minValue}, Max: ${maxValue}`);
console.log(` Average: ${avgValue.toFixed(2)}`);
console.log(` RMS: ${rmsValue.toFixed(2)}`);
console.log(` Silence: ${silencePercentage.toFixed(1)}%`);
console.log(` Dynamic Range: ${20 * Math.log10(maxValue / (rmsValue || 1))} dB`);
return {
minValue,
maxValue,
avgValue,
rmsValue,
silencePercentage,
sampleCount: int16Array.length,
};
}
function saveDebugAudio(buffer, type, timestamp = Date.now()) {
const homeDir = require('os').homedir();
const debugDir = path.join(homeDir, '.pickle-glass', 'debug');
if (!fs.existsSync(debugDir)) {
fs.mkdirSync(debugDir, { recursive: true });
}
const pcmPath = path.join(debugDir, `${type}_${timestamp}.pcm`);
const wavPath = path.join(debugDir, `${type}_${timestamp}.wav`);
const metaPath = path.join(debugDir, `${type}_${timestamp}.json`);
fs.writeFileSync(pcmPath, buffer);
pcmToWav(buffer, wavPath);
const analysis = analyzeAudioBuffer(buffer, type);
fs.writeFileSync(
metaPath,
JSON.stringify(
{
timestamp,
type,
bufferSize: buffer.length,
analysis,
format: {
sampleRate: 24000,
channels: 1,
bitDepth: 16,
},
},
null,
2
)
);
console.log(`Debug audio saved: ${wavPath}`);
return { pcmPath, wavPath, metaPath };
}
module.exports = {
pcmToWav,
analyzeAudioBuffer,
saveDebugAudio,
};

View File

@ -0,0 +1,263 @@
const { BrowserWindow } = require('electron');
const SttService = require('./stt/sttService');
const SummaryService = require('./summary/summaryService');
const authService = require('../../common/services/authService');
const sessionRepository = require('../../common/repositories/session');
const sttRepository = require('./stt/repositories');
class ListenService {
constructor() {
this.sttService = new SttService();
this.summaryService = new SummaryService();
this.currentSessionId = null;
this.isInitializingSession = false;
this.setupServiceCallbacks();
}
setupServiceCallbacks() {
// STT service callbacks
this.sttService.setCallbacks({
onTranscriptionComplete: (speaker, text) => {
this.handleTranscriptionComplete(speaker, text);
},
onStatusUpdate: (status) => {
this.sendToRenderer('update-status', status);
}
});
// Summary service callbacks
this.summaryService.setCallbacks({
onAnalysisComplete: (data) => {
console.log('📊 Analysis completed:', data);
},
onStatusUpdate: (status) => {
this.sendToRenderer('update-status', status);
}
});
}
sendToRenderer(channel, data) {
BrowserWindow.getAllWindows().forEach(win => {
if (!win.isDestroyed()) {
win.webContents.send(channel, data);
}
});
}
async handleTranscriptionComplete(speaker, text) {
console.log(`[ListenService] Transcription complete: ${speaker} - ${text}`);
// Save to database
await this.saveConversationTurn(speaker, text);
// Add to summary service for analysis
this.summaryService.addConversationTurn(speaker, text);
}
async saveConversationTurn(speaker, transcription) {
if (!this.currentSessionId) {
console.error('[DB] Cannot save turn, no active session ID.');
return;
}
if (transcription.trim() === '') return;
try {
await sessionRepository.touch(this.currentSessionId);
await sttRepository.addTranscript({
sessionId: this.currentSessionId,
speaker: speaker,
text: transcription.trim(),
});
console.log(`[DB] Saved transcript for session ${this.currentSessionId}: (${speaker})`);
} catch (error) {
console.error('Failed to save transcript to DB:', error);
}
}
async initializeNewSession() {
try {
const uid = authService.getCurrentUserId();
if (!uid) {
throw new Error("Cannot initialize session: user not logged in.");
}
this.currentSessionId = await sessionRepository.getOrCreateActive(uid, 'listen');
console.log(`[DB] New listen session ensured: ${this.currentSessionId}`);
// Set session ID for summary service
this.summaryService.setSessionId(this.currentSessionId);
// Reset conversation history
this.summaryService.resetConversationHistory();
console.log('New conversation session started:', this.currentSessionId);
return true;
} catch (error) {
console.error('Failed to initialize new session in DB:', error);
this.currentSessionId = null;
return false;
}
}
async initializeSession(language = 'en') {
if (this.isInitializingSession) {
console.log('Session initialization already in progress.');
return false;
}
this.isInitializingSession = true;
this.sendToRenderer('session-initializing', true);
this.sendToRenderer('update-status', 'Initializing sessions...');
try {
// Initialize database session
const sessionInitialized = await this.initializeNewSession();
if (!sessionInitialized) {
throw new Error('Failed to initialize database session');
}
// Initialize STT sessions
await this.sttService.initializeSttSessions(language);
console.log('✅ Listen service initialized successfully.');
this.sendToRenderer('session-state-changed', { isActive: true });
this.sendToRenderer('update-status', 'Connected. Ready to listen.');
return true;
} catch (error) {
console.error('❌ Failed to initialize listen service:', error);
this.sendToRenderer('update-status', 'Initialization failed.');
return false;
} finally {
this.isInitializingSession = false;
this.sendToRenderer('session-initializing', false);
}
}
async sendAudioContent(data, mimeType) {
return await this.sttService.sendAudioContent(data, mimeType);
}
async startMacOSAudioCapture() {
if (process.platform !== 'darwin') {
throw new Error('macOS audio capture only available on macOS');
}
return await this.sttService.startMacOSAudioCapture();
}
async stopMacOSAudioCapture() {
this.sttService.stopMacOSAudioCapture();
}
isSessionActive() {
return this.sttService.isSessionActive();
}
async closeSession() {
try {
// Close STT sessions
await this.sttService.closeSessions();
// End database session
if (this.currentSessionId) {
await sessionRepository.end(this.currentSessionId);
console.log(`[DB] Session ${this.currentSessionId} ended.`);
}
// Reset state
this.currentSessionId = null;
this.summaryService.resetConversationHistory();
this.sendToRenderer('session-state-changed', { isActive: false });
this.sendToRenderer('session-did-close');
console.log('Listen service session closed.');
return { success: true };
} catch (error) {
console.error('Error closing listen service session:', error);
return { success: false, error: error.message };
}
}
getCurrentSessionData() {
return {
sessionId: this.currentSessionId,
conversationHistory: this.summaryService.getConversationHistory(),
totalTexts: this.summaryService.getConversationHistory().length,
analysisData: this.summaryService.getCurrentAnalysisData(),
};
}
getConversationHistory() {
return this.summaryService.getConversationHistory();
}
setupIpcHandlers() {
const { ipcMain } = require('electron');
ipcMain.handle('is-session-active', async () => {
const isActive = this.isSessionActive();
console.log(`Checking session status. Active: ${isActive}`);
return isActive;
});
ipcMain.handle('initialize-openai', async (event, profile = 'interview', language = 'en') => {
console.log(`Received initialize-openai request with profile: ${profile}, language: ${language}`);
const success = await this.initializeSession(language);
return success;
});
ipcMain.handle('send-audio-content', async (event, { data, mimeType }) => {
try {
await this.sendAudioContent(data, mimeType);
return { success: true };
} catch (error) {
console.error('Error sending user audio:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('start-macos-audio', async () => {
if (process.platform !== 'darwin') {
return { success: false, error: 'macOS audio capture only available on macOS' };
}
try {
const success = await this.startMacOSAudioCapture();
return { success };
} catch (error) {
console.error('Error starting macOS audio capture:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('stop-macos-audio', async () => {
try {
this.stopMacOSAudioCapture();
return { success: true };
} catch (error) {
console.error('Error stopping macOS audio capture:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('close-session', async () => {
return await this.closeSession();
});
ipcMain.handle('update-google-search-setting', async (event, enabled) => {
try {
console.log('Google Search setting updated to:', enabled);
return { success: true };
} catch (error) {
console.error('Error updating Google Search setting:', error);
return { success: false, error: error.message };
}
});
console.log('✅ Listen service IPC handlers registered');
}
}
module.exports = ListenService;

View File

@ -1,973 +0,0 @@
require('dotenv').config();
const { BrowserWindow, ipcMain } = require('electron');
const { spawn } = require('child_process');
const { saveDebugAudio } = require('./audioUtils.js');
const { getSystemPrompt } = require('../../common/prompts/promptBuilder.js');
const { connectToGeminiSession } = require('../../common/services/googleGeminiClient.js');
const { connectToOpenAiSession, createOpenAiGenerativeClient, getOpenAiGenerativeModel } = require('../../common/services/openAiClient.js');
const { makeChatCompletionWithPortkey } = require('../../common/services/aiProviderService.js');
const authService = require('../../common/services/authService');
const sessionRepository = require('../../common/repositories/session');
const listenRepository = require('./repositories');
const { getStoredApiKey, getStoredProvider } = require('../../electron/windowManager');
const MAX_BUFFER_LENGTH_CHARS = 2000;
const COMPLETION_DEBOUNCE_MS = 2000;
async function getApiKey() {
const storedKey = await getStoredApiKey();
if (storedKey) {
console.log('[LiveSummaryService] Using stored API key');
return storedKey;
}
const envKey = process.env.OPENAI_API_KEY;
if (envKey) {
console.log('[LiveSummaryService] Using environment API key');
return envKey;
}
console.error('[LiveSummaryService] No API key found in storage or environment');
return null;
}
async function getAiProvider() {
try {
const { ipcRenderer } = require('electron');
const provider = await ipcRenderer.invoke('get-ai-provider');
return provider || 'openai';
} catch (error) {
// If we're in the main process, get it directly
return getStoredProvider ? getStoredProvider() : 'openai';
}
}
let currentSessionId = null;
let conversationHistory = [];
let isInitializingSession = false;
let mySttSession = null;
let theirSttSession = null;
let myCurrentUtterance = '';
let theirCurrentUtterance = '';
let myLastPartialText = '';
let theirLastPartialText = '';
let myInactivityTimer = null;
let theirInactivityTimer = null;
const INACTIVITY_TIMEOUT = 3000;
const SESSION_IDLE_TIMEOUT_SECONDS = 30 * 60; // 30 minutes
let previousAnalysisResult = null;
let analysisHistory = [];
// ---------------------------------------------------------------------------
// 🎛️ Turn-completion debouncing
// ---------------------------------------------------------------------------
// Very aggressive VAD (e.g. 50 ms) tends to split one spoken sentence into
// many "completed" events. To avoid creating a separate chat bubble for each
// of those micro-turns we debounce the *completed* events per speaker. Any
// completions that arrive within this window are concatenated and flushed as
// **one** final turn.
let myCompletionBuffer = '';
let theirCompletionBuffer = '';
let myCompletionTimer = null;
let theirCompletionTimer = null;
function flushMyCompletion() {
if (!myCompletionBuffer.trim()) return;
const finalText = myCompletionBuffer.trim();
// Save to DB & send to renderer as final
saveConversationTurn('Me', finalText);
sendToRenderer('stt-update', {
speaker: 'Me',
text: finalText,
isPartial: false,
isFinal: true,
timestamp: Date.now(),
});
myCompletionBuffer = '';
myCompletionTimer = null;
myCurrentUtterance = ''; // Reset utterance accumulator on flush
sendToRenderer('update-status', 'Listening...');
}
function flushTheirCompletion() {
if (!theirCompletionBuffer.trim()) return;
const finalText = theirCompletionBuffer.trim();
saveConversationTurn('Them', finalText);
sendToRenderer('stt-update', {
speaker: 'Them',
text: finalText,
isPartial: false,
isFinal: true,
timestamp: Date.now(),
});
theirCompletionBuffer = '';
theirCompletionTimer = null;
theirCurrentUtterance = ''; // Reset utterance accumulator on flush
sendToRenderer('update-status', 'Listening...');
}
function debounceMyCompletion(text) {
// 상대방이 말하고 있던 경우, 화자가 변경되었으므로 즉시 상대방의 말풍선을 완성합니다.
if (theirCompletionTimer) {
clearTimeout(theirCompletionTimer);
flushTheirCompletion();
}
myCompletionBuffer += (myCompletionBuffer ? ' ' : '') + text;
if (myCompletionTimer) clearTimeout(myCompletionTimer);
myCompletionTimer = setTimeout(flushMyCompletion, COMPLETION_DEBOUNCE_MS);
}
function debounceTheirCompletion(text) {
// 내가 말하고 있던 경우, 화자가 변경되었으므로 즉시 내 말풍선을 완성합니다.
if (myCompletionTimer) {
clearTimeout(myCompletionTimer);
flushMyCompletion();
}
theirCompletionBuffer += (theirCompletionBuffer ? ' ' : '') + text;
if (theirCompletionTimer) clearTimeout(theirCompletionTimer);
theirCompletionTimer = setTimeout(flushTheirCompletion, COMPLETION_DEBOUNCE_MS);
}
let systemAudioProc = null;
let analysisIntervalId = null;
/**
* Converts conversation history into text to include in the prompt.
* @param {Array<string>} conversationTexts - Array of conversation texts ["me: ~~~", "them: ~~~", ...]
* @param {number} maxTurns - Maximum number of recent turns to include
* @returns {string} - Formatted conversation string for the prompt
*/
function formatConversationForPrompt(conversationTexts, maxTurns = 30) {
if (conversationTexts.length === 0) return '';
return conversationTexts.slice(-maxTurns).join('\n');
}
async function makeOutlineAndRequests(conversationTexts, maxTurns = 30) {
console.log(`🔍 makeOutlineAndRequests called - conversationTexts: ${conversationTexts.length}`);
if (conversationTexts.length === 0) {
console.log('⚠️ No conversation texts available for analysis');
return null;
}
const recentConversation = formatConversationForPrompt(conversationTexts, maxTurns);
// 이전 분석 결과를 프롬프트에 포함
let contextualPrompt = '';
if (previousAnalysisResult) {
contextualPrompt = `
Previous Analysis Context:
- Main Topic: ${previousAnalysisResult.topic.header}
- Key Points: ${previousAnalysisResult.summary.slice(0, 3).join(', ')}
- Last Actions: ${previousAnalysisResult.actions.slice(0, 2).join(', ')}
Please build upon this context while analyzing the new conversation segments.
`;
}
const basePrompt = getSystemPrompt('pickle_glass_analysis', '', false);
const systemPrompt = basePrompt.replace('{{CONVERSATION_HISTORY}}', recentConversation);
try {
if (currentSessionId) {
await sessionRepository.touch(currentSessionId);
}
const messages = [
{
role: 'system',
content: systemPrompt,
},
{
role: 'user',
content: `${contextualPrompt}
Analyze the conversation and provide a structured summary. Format your response as follows:
**Summary Overview**
- Main discussion point with context
**Key Topic: [Topic Name]**
- First key insight
- Second key insight
- Third key insight
**Extended Explanation**
Provide 2-3 sentences explaining the context and implications.
**Suggested Questions**
1. First follow-up question?
2. Second follow-up question?
3. Third follow-up question?
Keep all points concise and build upon previous analysis if provided.`,
},
];
console.log('🤖 Sending analysis request to OpenAI...');
const API_KEY = await getApiKey();
if (!API_KEY) {
throw new Error('No API key available');
}
const provider = getStoredProvider ? getStoredProvider() : 'openai';
const loggedIn = authService.getCurrentUser().isLoggedIn; // true ➜ vKey, false ➜ apiKey
const usePortkey = loggedIn && provider === 'openai'; // Only use Portkey for OpenAI with Firebase
console.log(`[LiveSummary] provider: ${provider}, usePortkey: ${usePortkey}`);
const completion = await makeChatCompletionWithPortkey({
apiKey: API_KEY,
provider: provider,
messages: messages,
temperature: 0.7,
maxTokens: 1024,
model: provider === 'openai' ? 'gpt-4.1' : 'gemini-2.5-flash',
usePortkey: usePortkey,
portkeyVirtualKey: usePortkey ? API_KEY : null
});
const responseText = completion.content;
console.log(`✅ Analysis response received: ${responseText}`);
const structuredData = parseResponseText(responseText, previousAnalysisResult);
if (currentSessionId) {
listenRepository.saveSummary({
sessionId: currentSessionId,
tldr: structuredData.summary.join('\n'),
bullet_json: JSON.stringify(structuredData.topic.bullets),
action_json: JSON.stringify(structuredData.actions),
model: 'gpt-4.1'
}).catch(err => console.error('[DB] Failed to save summary:', err));
}
// 분석 결과 저장
previousAnalysisResult = structuredData;
analysisHistory.push({
timestamp: Date.now(),
data: structuredData,
conversationLength: conversationTexts.length,
});
// 히스토리 크기 제한 (최근 10개만 유지)
if (analysisHistory.length > 10) {
analysisHistory.shift();
}
return structuredData;
} catch (error) {
console.error('❌ Error during analysis generation:', error.message);
return previousAnalysisResult; // 에러 시 이전 결과 반환
}
}
function parseResponseText(responseText, previousResult) {
const structuredData = {
summary: [],
topic: { header: '', bullets: [] },
actions: [],
followUps: ['✉️ Draft a follow-up email', '✅ Generate action items', '📝 Show summary'],
};
// 이전 결과가 있으면 기본값으로 사용
if (previousResult) {
structuredData.topic.header = previousResult.topic.header;
structuredData.summary = [...previousResult.summary];
}
try {
const lines = responseText.split('\n');
let currentSection = '';
let isCapturingTopic = false;
let topicName = '';
for (const line of lines) {
const trimmedLine = line.trim();
// 섹션 헤더 감지
if (trimmedLine.startsWith('**Summary Overview**')) {
currentSection = 'summary-overview';
continue;
} else if (trimmedLine.startsWith('**Key Topic:')) {
currentSection = 'topic';
isCapturingTopic = true;
topicName = trimmedLine.match(/\*\*Key Topic: (.+?)\*\*/)?.[1] || '';
if (topicName) {
structuredData.topic.header = topicName + ':';
}
continue;
} else if (trimmedLine.startsWith('**Extended Explanation**')) {
currentSection = 'explanation';
continue;
} else if (trimmedLine.startsWith('**Suggested Questions**')) {
currentSection = 'questions';
continue;
}
// 컨텐츠 파싱
if (trimmedLine.startsWith('-') && currentSection === 'summary-overview') {
const summaryPoint = trimmedLine.substring(1).trim();
if (summaryPoint && !structuredData.summary.includes(summaryPoint)) {
// 기존 summary 업데이트 (최대 5개 유지)
structuredData.summary.unshift(summaryPoint);
if (structuredData.summary.length > 5) {
structuredData.summary.pop();
}
}
} else if (trimmedLine.startsWith('-') && currentSection === 'topic') {
const bullet = trimmedLine.substring(1).trim();
if (bullet && structuredData.topic.bullets.length < 3) {
structuredData.topic.bullets.push(bullet);
}
} else if (currentSection === 'explanation' && trimmedLine) {
// explanation을 topic bullets에 추가 (문장 단위로)
const sentences = trimmedLine
.split(/\.\s+/)
.filter(s => s.trim().length > 0)
.map(s => s.trim() + (s.endsWith('.') ? '' : '.'));
sentences.forEach(sentence => {
if (structuredData.topic.bullets.length < 3 && !structuredData.topic.bullets.includes(sentence)) {
structuredData.topic.bullets.push(sentence);
}
});
} else if (trimmedLine.match(/^\d+\./) && currentSection === 'questions') {
const question = trimmedLine.replace(/^\d+\.\s*/, '').trim();
if (question && question.includes('?')) {
structuredData.actions.push(`${question}`);
}
}
}
// 기본 액션 추가
const defaultActions = ['✨ What should I say next?', '💬 Suggest follow-up questions'];
defaultActions.forEach(action => {
if (!structuredData.actions.includes(action)) {
structuredData.actions.push(action);
}
});
// 액션 개수 제한
structuredData.actions = structuredData.actions.slice(0, 5);
// 유효성 검증 및 이전 데이터 병합
if (structuredData.summary.length === 0 && previousResult) {
structuredData.summary = previousResult.summary;
}
if (structuredData.topic.bullets.length === 0 && previousResult) {
structuredData.topic.bullets = previousResult.topic.bullets;
}
} catch (error) {
console.error('❌ Error parsing response text:', error);
// 에러 시 이전 결과 반환
return (
previousResult || {
summary: [],
topic: { header: 'Analysis in progress', bullets: [] },
actions: ['✨ What should I say next?', '💬 Suggest follow-up questions'],
followUps: ['✉️ Draft a follow-up email', '✅ Generate action items', '📝 Show summary'],
}
);
}
console.log('📊 Final structured data:', JSON.stringify(structuredData, null, 2));
return structuredData;
}
/**
* Triggers analysis when conversation history reaches 5 texts.
*/
async function triggerAnalysisIfNeeded() {
if (conversationHistory.length >= 5 && conversationHistory.length % 5 === 0) {
console.log(`🚀 Triggering analysis (non-blocking) - ${conversationHistory.length} conversation texts accumulated`);
makeOutlineAndRequests(conversationHistory)
.then(data => {
if (data) {
console.log('📤 Sending structured data to renderer');
sendToRenderer('update-structured-data', data);
} else {
console.log('❌ No analysis data returned from non-blocking call');
}
})
.catch(error => {
console.error('❌ Error in non-blocking analysis:', error);
});
}
}
/**
* Schedules periodic updates of outline and analysis every 10 seconds. - DEPRECATED
* Now analysis is triggered every 5 conversation texts.
*/
function startAnalysisInterval() {
console.log('⏰ Analysis will be triggered every 5 conversation texts (not on timer)');
if (analysisIntervalId) {
clearInterval(analysisIntervalId);
analysisIntervalId = null;
}
}
function stopAnalysisInterval() {
if (analysisIntervalId) {
clearInterval(analysisIntervalId);
analysisIntervalId = null;
}
if (myInactivityTimer) {
clearTimeout(myInactivityTimer);
myInactivityTimer = null;
}
if (theirInactivityTimer) {
clearTimeout(theirInactivityTimer);
theirInactivityTimer = null;
}
}
function sendToRenderer(channel, data) {
BrowserWindow.getAllWindows().forEach(win => {
if (!win.isDestroyed()) {
win.webContents.send(channel, data);
}
});
}
function getCurrentSessionData() {
return {
sessionId: currentSessionId,
conversationHistory: conversationHistory,
totalTexts: conversationHistory.length,
};
}
// Conversation management functions
async function initializeNewSession() {
try {
const uid = authService.getCurrentUserId();
if (!uid) {
throw new Error("Cannot initialize session: user not logged in.");
}
currentSessionId = await sessionRepository.getOrCreateActive(uid, 'listen');
console.log(`[DB] New listen session ensured: ${currentSessionId}`);
conversationHistory = [];
myCurrentUtterance = '';
theirCurrentUtterance = '';
// 🔄 Reset analysis state so the new session starts fresh
previousAnalysisResult = null;
analysisHistory = [];
// sendToRenderer('update-outline', []);
// sendToRenderer('update-analysis-requests', []);
myLastPartialText = '';
theirLastPartialText = '';
if (myInactivityTimer) {
clearTimeout(myInactivityTimer);
myInactivityTimer = null;
}
if (theirInactivityTimer) {
clearTimeout(theirInactivityTimer);
theirInactivityTimer = null;
}
console.log('New conversation session started:', currentSessionId);
return true;
} catch (error) {
console.error('Failed to initialize new session in DB:', error);
currentSessionId = null;
return false;
}
}
async function saveConversationTurn(speaker, transcription) {
if (!currentSessionId) {
console.error('[DB] Cannot save turn, no active session ID.');
return;
}
if (transcription.trim() === '') return;
try {
await sessionRepository.touch(currentSessionId);
await listenRepository.addTranscript({
sessionId: currentSessionId,
speaker: speaker,
text: transcription.trim(),
});
console.log(`[DB] Saved transcript for session ${currentSessionId}: (${speaker})`);
const conversationText = `${speaker.toLowerCase()}: ${transcription.trim()}`;
conversationHistory.push(conversationText);
console.log(`💬 Saved conversation text: ${conversationText}`);
console.log(`📈 Total conversation history: ${conversationHistory.length} texts`);
triggerAnalysisIfNeeded();
const conversationTurn = {
speaker: speaker,
timestamp: Date.now(),
transcription: transcription.trim(),
};
} catch (error) {
console.error('Failed to save transcript to DB:', error);
}
}
async function initializeLiveSummarySession(language = 'en') {
// Use system environment variable if set, otherwise use the provided language
const effectiveLanguage = process.env.OPENAI_TRANSCRIBE_LANG || language || 'en';
if (isInitializingSession) {
console.log('Session initialization already in progress.');
return false;
}
const userState = authService.getCurrentUser();
const loggedIn = userState.isLoggedIn;
const keyType = loggedIn ? 'vKey' : 'apiKey';
isInitializingSession = true;
sendToRenderer('session-initializing', true);
sendToRenderer('update-status', 'Initializing sessions...');
const API_KEY = await getApiKey();
if (!API_KEY) {
console.error('FATAL ERROR: API Key is not defined.');
sendToRenderer('update-status', 'API Key not configured.');
isInitializingSession = false;
sendToRenderer('session-initializing', false);
return false;
}
await initializeNewSession();
const provider = await getAiProvider();
const isGemini = provider === 'gemini';
console.log(`[LiveSummaryService] Initializing STT for provider: ${provider}`);
try {
const handleMyMessage = message => {
if (isGemini) {
// console.log('[Gemini Raw Message - Me]:', JSON.stringify(message, null, 2));
const text = message.serverContent?.inputTranscription?.text || '';
if (text && text.trim()) {
const finalUtteranceText = text.trim().replace(/<noise>/g, '').trim();
if (finalUtteranceText && finalUtteranceText !== '.') {
debounceMyCompletion(finalUtteranceText);
}
}
} else {
const type = message.type;
const text = message.transcript || message.delta || (message.alternatives && message.alternatives[0]?.transcript) || '';
if (type === 'conversation.item.input_audio_transcription.delta') {
if (myCompletionTimer) clearTimeout(myCompletionTimer);
myCompletionTimer = null;
myCurrentUtterance += text;
const continuousText = myCompletionBuffer + (myCompletionBuffer ? ' ' : '') + myCurrentUtterance;
if (text && !text.includes('vq_lbr_audio_')) {
sendToRenderer('stt-update', {
speaker: 'Me',
text: continuousText,
isPartial: true,
isFinal: false,
timestamp: Date.now(),
});
}
} else if (type === 'conversation.item.input_audio_transcription.completed') {
if (text && text.trim()) {
const finalUtteranceText = text.trim();
myCurrentUtterance = '';
debounceMyCompletion(finalUtteranceText);
}
}
}
if (message.error) {
console.error('[Me] STT Session Error:', message.error);
}
};
const handleTheirMessage = message => {
if (isGemini) {
// console.log('[Gemini Raw Message - Them]:', JSON.stringify(message, null, 2));
const text = message.serverContent?.inputTranscription?.text || '';
if (text && text.trim()) {
const finalUtteranceText = text.trim().replace(/<noise>/g, '').trim();
if (finalUtteranceText && finalUtteranceText !== '.') {
debounceTheirCompletion(finalUtteranceText);
}
}
} else {
const type = message.type;
const text = message.transcript || message.delta || (message.alternatives && message.alternatives[0]?.transcript) || '';
if (type === 'conversation.item.input_audio_transcription.delta') {
if (theirCompletionTimer) clearTimeout(theirCompletionTimer);
theirCompletionTimer = null;
theirCurrentUtterance += text;
const continuousText = theirCompletionBuffer + (theirCompletionBuffer ? ' ' : '') + theirCurrentUtterance;
if (text && !text.includes('vq_lbr_audio_')) {
sendToRenderer('stt-update', {
speaker: 'Them',
text: continuousText,
isPartial: true,
isFinal: false,
timestamp: Date.now(),
});
}
} else if (type === 'conversation.item.input_audio_transcription.completed') {
if (text && text.trim()) {
const finalUtteranceText = text.trim();
theirCurrentUtterance = '';
debounceTheirCompletion(finalUtteranceText);
}
}
}
if (message.error) {
console.error('[Them] STT Session Error:', message.error);
}
};
const mySttConfig = {
language: effectiveLanguage,
callbacks: {
onmessage: handleMyMessage,
onerror: error => console.error('My STT session error:', error.message),
onclose: event => console.log('My STT session closed:', event.reason),
},
};
const theirSttConfig = {
language: effectiveLanguage,
callbacks: {
onmessage: handleTheirMessage,
onerror: error => console.error('Their STT session error:', error.message),
onclose: event => console.log('Their STT session closed:', event.reason),
},
};
if (isGemini) {
[mySttSession, theirSttSession] = await Promise.all([
connectToGeminiSession(API_KEY, mySttConfig),
connectToGeminiSession(API_KEY, theirSttConfig),
]);
} else {
[mySttSession, theirSttSession] = await Promise.all([
connectToOpenAiSession(API_KEY, mySttConfig, keyType),
connectToOpenAiSession(API_KEY, theirSttConfig, keyType),
]);
}
console.log('✅ Both STT sessions initialized successfully.');
triggerAnalysisIfNeeded();
sendToRenderer('session-state-changed', { isActive: true });
isInitializingSession = false;
sendToRenderer('session-initializing', false);
sendToRenderer('update-status', 'Connected. Ready to listen.');
return true;
} catch (error) {
console.error('❌ Failed to initialize STT sessions:', error);
isInitializingSession = false;
sendToRenderer('session-initializing', false);
sendToRenderer('update-status', 'Initialization failed.');
mySttSession = null;
theirSttSession = null;
return false;
}
}
function killExistingSystemAudioDump() {
return new Promise(resolve => {
console.log('Checking for existing SystemAudioDump processes...');
const killProc = spawn('pkill', ['-f', 'SystemAudioDump'], {
stdio: 'ignore',
});
killProc.on('close', code => {
if (code === 0) {
console.log('Killed existing SystemAudioDump processes');
} else {
console.log('No existing SystemAudioDump processes found');
}
resolve();
});
killProc.on('error', err => {
console.log('Error checking for existing processes (this is normal):', err.message);
resolve();
});
setTimeout(() => {
killProc.kill();
resolve();
}, 2000);
});
}
async function startMacOSAudioCapture() {
if (process.platform !== 'darwin' || !theirSttSession) return false;
await killExistingSystemAudioDump();
console.log('Starting macOS audio capture for "Them"...');
const { app } = require('electron');
const path = require('path');
const systemAudioPath = app.isPackaged
? path.join(process.resourcesPath, 'app.asar.unpacked', 'src', 'assets', 'SystemAudioDump')
: path.join(app.getAppPath(), 'src', 'assets', 'SystemAudioDump');
console.log('SystemAudioDump path:', systemAudioPath);
systemAudioProc = spawn(systemAudioPath, [], {
stdio: ['ignore', 'pipe', 'pipe'],
});
if (!systemAudioProc.pid) {
console.error('Failed to start SystemAudioDump');
return false;
}
console.log('SystemAudioDump started with PID:', systemAudioProc.pid);
const CHUNK_DURATION = 0.1;
const SAMPLE_RATE = 24000;
const BYTES_PER_SAMPLE = 2;
const CHANNELS = 2;
const CHUNK_SIZE = SAMPLE_RATE * BYTES_PER_SAMPLE * CHANNELS * CHUNK_DURATION;
let audioBuffer = Buffer.alloc(0);
const provider = await getAiProvider();
const isGemini = provider === 'gemini';
systemAudioProc.stdout.on('data', async data => {
audioBuffer = Buffer.concat([audioBuffer, data]);
while (audioBuffer.length >= CHUNK_SIZE) {
const chunk = audioBuffer.slice(0, CHUNK_SIZE);
audioBuffer = audioBuffer.slice(CHUNK_SIZE);
const monoChunk = CHANNELS === 2 ? convertStereoToMono(chunk) : chunk;
const base64Data = monoChunk.toString('base64');
sendToRenderer('system-audio-data', { data: base64Data });
if (theirSttSession) {
try {
// await theirSttSession.sendRealtimeInput(base64Data);
const payload = isGemini
? { audio: { data: base64Data, mimeType: 'audio/pcm;rate=24000' } }
: base64Data;
await theirSttSession.sendRealtimeInput(payload);
} catch (err) {
console.error('Error sending system audio:', err.message);
}
}
if (process.env.DEBUG_AUDIO) {
saveDebugAudio(monoChunk, 'system_audio');
}
}
});
systemAudioProc.stderr.on('data', data => {
console.error('SystemAudioDump stderr:', data.toString());
});
systemAudioProc.on('close', code => {
console.log('SystemAudioDump process closed with code:', code);
systemAudioProc = null;
});
systemAudioProc.on('error', err => {
console.error('SystemAudioDump process error:', err);
systemAudioProc = null;
});
return true;
}
function convertStereoToMono(stereoBuffer) {
const samples = stereoBuffer.length / 4;
const monoBuffer = Buffer.alloc(samples * 2);
for (let i = 0; i < samples; i++) {
const leftSample = stereoBuffer.readInt16LE(i * 4);
monoBuffer.writeInt16LE(leftSample, i * 2);
}
return monoBuffer;
}
function stopMacOSAudioCapture() {
if (systemAudioProc) {
console.log('Stopping SystemAudioDump...');
systemAudioProc.kill('SIGTERM');
systemAudioProc = null;
}
}
async function sendAudioToOpenAI(base64Data, sttSessionRef) {
if (!sttSessionRef.current) return;
try {
process.stdout.write('.');
await sttSessionRef.current.sendRealtimeInput({
audio: {
data: base64Data,
mimeType: 'audio/pcm;rate=24000',
},
});
} catch (error) {
console.error('Error sending audio to OpenAI:', error);
}
}
function isSessionActive() {
return !!mySttSession && !!theirSttSession;
}
async function closeSession() {
try {
stopMacOSAudioCapture();
stopAnalysisInterval();
if (currentSessionId) {
await sessionRepository.end(currentSessionId);
console.log(`[DB] Session ${currentSessionId} ended.`);
}
const closePromises = [];
if (mySttSession) {
closePromises.push(mySttSession.close());
mySttSession = null;
}
if (theirSttSession) {
closePromises.push(theirSttSession.close());
theirSttSession = null;
}
await Promise.all(closePromises);
console.log('All sessions closed.');
currentSessionId = null;
conversationHistory = [];
sendToRenderer('session-state-changed', { isActive: false });
sendToRenderer('session-did-close');
return { success: true };
} catch (error) {
console.error('Error closing sessions:', error);
return { success: false, error: error.message };
}
}
function setupLiveSummaryIpcHandlers() {
ipcMain.handle('is-session-active', async () => {
const isActive = isSessionActive();
console.log(`Checking session status. Active: ${isActive}`);
return isActive;
});
ipcMain.handle('initialize-openai', async (event, profile = 'interview', language = 'en') => {
console.log(`Received initialize-openai request with profile: ${profile}, language: ${language}`);
const success = await initializeLiveSummarySession(language);
return success;
});
ipcMain.handle('send-audio-content', async (event, { data, mimeType }) => {
const provider = await getAiProvider();
const isGemini = provider === 'gemini';
if (!mySttSession) return { success: false, error: 'User STT session not active' };
try {
// await mySttSession.sendRealtimeInput(data);
// provider에 맞는 형식으로 래핑
const payload = isGemini
? { audio: { data, mimeType: mimeType || 'audio/pcm;rate=24000' } }
: data; // OpenAI는 base64 string 그대로
await mySttSession.sendRealtimeInput(payload);
return { success: true };
} catch (error) {
console.error('Error sending user audio:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('start-macos-audio', async () => {
if (process.platform !== 'darwin') {
return { success: false, error: 'macOS audio capture only available on macOS' };
}
try {
const success = await startMacOSAudioCapture();
return { success };
} catch (error) {
console.error('Error starting macOS audio capture:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('stop-macos-audio', async () => {
try {
stopMacOSAudioCapture();
return { success: true };
} catch (error) {
console.error('Error stopping macOS audio capture:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('close-session', async () => {
return await closeSession();
});
ipcMain.handle('update-google-search-setting', async (event, enabled) => {
try {
console.log('Google Search setting updated to:', enabled);
return { success: true };
} catch (error) {
console.error('Error updating Google Search setting:', error);
return { success: false, error: error.message };
}
});
}
function getConversationHistory() {
return conversationHistory;
}
module.exports = {
sendToRenderer,
initializeNewSession,
saveConversationTurn,
killExistingSystemAudioDump,
startMacOSAudioCapture,
convertStereoToMono,
stopMacOSAudioCapture,
sendAudioToOpenAI,
setupLiveSummaryIpcHandlers,
isSessionActive,
closeSession,
getConversationHistory,
};

View File

@ -1,20 +1,29 @@
// renderer.js
const { ipcRenderer } = require('electron');
const { makeStreamingChatCompletionWithPortkey } = require('../../common/services/aiProviderService.js');
let mediaStream = null;
let screenshotInterval = null;
let audioContext = null;
let audioProcessor = null;
let micMediaStream = null;
let audioBuffer = [];
// ---------------------------
// Constants & Globals
// ---------------------------
const SAMPLE_RATE = 24000;
const AUDIO_CHUNK_DURATION = 0.1;
const BUFFER_SIZE = 4096;
const isLinux = process.platform === 'linux';
const isMacOS = process.platform === 'darwin';
let mediaStream = null;
let micMediaStream = null;
let screenshotInterval = null;
let audioContext = null;
let audioProcessor = null;
let currentImageQuality = 'medium';
let lastScreenshotBase64 = null;
let systemAudioBuffer = [];
const MAX_SYSTEM_BUFFER_SIZE = 10;
// ---------------------------
// Utility helpers (exact from renderer.js)
// ---------------------------
function isVoiceActive(audioFloat32Array, threshold = 0.005) {
if (!audioFloat32Array || audioFloat32Array.length === 0) {
return false;
@ -31,11 +40,6 @@ function isVoiceActive(audioFloat32Array, threshold = 0.005) {
return rms > threshold;
}
let currentImageQuality = 'medium'; // Store current image quality for manual screenshots
let lastScreenshotBase64 = null; // Store the latest screenshot
let realtimeConversationHistory = [];
function base64ToFloat32Array(base64) {
const binaryString = atob(base64);
const bytes = new Uint8Array(binaryString.length);
@ -54,11 +58,29 @@ function base64ToFloat32Array(base64) {
return float32Array;
}
async function queryLoginState() {
const userState = await ipcRenderer.invoke('get-current-user');
return userState;
function convertFloat32ToInt16(float32Array) {
const int16Array = new Int16Array(float32Array.length);
for (let i = 0; i < float32Array.length; i++) {
// Improved scaling to prevent clipping
const s = Math.max(-1, Math.min(1, float32Array[i]));
int16Array[i] = s < 0 ? s * 0x8000 : s * 0x7fff;
}
return int16Array;
}
function arrayBufferToBase64(buffer) {
let binary = '';
const bytes = new Uint8Array(buffer);
const len = bytes.byteLength;
for (let i = 0; i < len; i++) {
binary += String.fromCharCode(bytes[i]);
}
return btoa(binary);
}
// ---------------------------
// Complete SimpleAEC implementation (exact from renderer.js)
// ---------------------------
class SimpleAEC {
constructor() {
this.adaptiveFilter = new Float32Array(1024);
@ -179,11 +201,24 @@ class SimpleAEC {
let aecProcessor = new SimpleAEC();
const isLinux = process.platform === 'linux';
const isMacOS = process.platform === 'darwin';
// System audio data handler
ipcRenderer.on('system-audio-data', (event, { data }) => {
systemAudioBuffer.push({
data: data,
timestamp: Date.now(),
});
window.pickleGlass = window.pickleGlass || {};
// 오래된 데이터 제거
if (systemAudioBuffer.length > MAX_SYSTEM_BUFFER_SIZE) {
systemAudioBuffer = systemAudioBuffer.slice(-MAX_SYSTEM_BUFFER_SIZE);
}
console.log('📥 Received system audio for AEC reference');
});
// ---------------------------
// Complete token tracker (exact from renderer.js)
// ---------------------------
let tokenTracker = {
tokens: [],
audioStartTime: null,
@ -265,126 +300,201 @@ setInterval(() => {
tokenTracker.trackAudioTokens();
}, 2000);
function pickleGlassElement() {
return document.getElementById('pickle-glass');
}
// ---------------------------
// Audio processing functions (exact from renderer.js)
// ---------------------------
function setupMicProcessing(micStream) {
const micAudioContext = new AudioContext({ sampleRate: SAMPLE_RATE });
const micSource = micAudioContext.createMediaStreamSource(micStream);
const micProcessor = micAudioContext.createScriptProcessor(BUFFER_SIZE, 1, 1);
function convertFloat32ToInt16(float32Array) {
const int16Array = new Int16Array(float32Array.length);
for (let i = 0; i < float32Array.length; i++) {
// Improved scaling to prevent clipping
const s = Math.max(-1, Math.min(1, float32Array[i]));
int16Array[i] = s < 0 ? s * 0x8000 : s * 0x7fff;
}
return int16Array;
}
let audioBuffer = [];
const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION;
function arrayBufferToBase64(buffer) {
let binary = '';
const bytes = new Uint8Array(buffer);
const len = bytes.byteLength;
for (let i = 0; i < len; i++) {
binary += String.fromCharCode(bytes[i]);
}
return btoa(binary);
}
micProcessor.onaudioprocess = async e => {
const inputData = e.inputBuffer.getChannelData(0);
audioBuffer.push(...inputData);
async function initializeopenai(profile = 'interview', language = 'en') {
// The API key is now handled in the main process from .env file.
// We just need to trigger the initialization.
try {
console.log(`Requesting OpenAI initialization with profile: ${profile}, language: ${language}`);
const success = await ipcRenderer.invoke('initialize-openai', profile, language);
if (success) {
// The status will be updated via 'update-status' event from the main process.
console.log('OpenAI initialization successful.');
} else {
console.error('OpenAI initialization failed.');
const appElement = pickleGlassElement();
if (appElement && typeof appElement.setStatus === 'function') {
appElement.setStatus('Initialization Failed');
while (audioBuffer.length >= samplesPerChunk) {
let chunk = audioBuffer.splice(0, samplesPerChunk);
let processedChunk = new Float32Array(chunk);
// Check for system audio and apply AEC only if voice is active
if (aecProcessor && systemAudioBuffer.length > 0) {
const latestSystemAudio = systemAudioBuffer[systemAudioBuffer.length - 1];
const systemFloat32 = base64ToFloat32Array(latestSystemAudio.data);
// Apply AEC only when system audio has active speech
if (isVoiceActive(systemFloat32)) {
processedChunk = aecProcessor.process(new Float32Array(chunk), systemFloat32);
console.log('🔊 Applied AEC because system audio is active');
}
}
const pcmData16 = convertFloat32ToInt16(processedChunk);
const base64Data = arrayBufferToBase64(pcmData16.buffer);
await ipcRenderer.invoke('send-audio-content', {
data: base64Data,
mimeType: 'audio/pcm;rate=24000',
});
}
};
micSource.connect(micProcessor);
micProcessor.connect(micAudioContext.destination);
audioProcessor = micProcessor;
}
function setupLinuxMicProcessing(micStream) {
// Setup microphone audio processing for Linux
const micAudioContext = new AudioContext({ sampleRate: SAMPLE_RATE });
const micSource = micAudioContext.createMediaStreamSource(micStream);
const micProcessor = micAudioContext.createScriptProcessor(BUFFER_SIZE, 1, 1);
let audioBuffer = [];
const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION;
micProcessor.onaudioprocess = async e => {
const inputData = e.inputBuffer.getChannelData(0);
audioBuffer.push(...inputData);
// Process audio in chunks
while (audioBuffer.length >= samplesPerChunk) {
const chunk = audioBuffer.splice(0, samplesPerChunk);
const pcmData16 = convertFloat32ToInt16(chunk);
const base64Data = arrayBufferToBase64(pcmData16.buffer);
await ipcRenderer.invoke('send-audio-content', {
data: base64Data,
mimeType: 'audio/pcm;rate=24000',
});
}
};
micSource.connect(micProcessor);
micProcessor.connect(micAudioContext.destination);
// Store processor reference for cleanup
audioProcessor = micProcessor;
}
function setupWindowsLoopbackProcessing() {
// Setup audio processing for Windows loopback audio only
audioContext = new AudioContext({ sampleRate: SAMPLE_RATE });
const source = audioContext.createMediaStreamSource(mediaStream);
audioProcessor = audioContext.createScriptProcessor(BUFFER_SIZE, 1, 1);
let audioBuffer = [];
const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION;
audioProcessor.onaudioprocess = async e => {
const inputData = e.inputBuffer.getChannelData(0);
audioBuffer.push(...inputData);
// Process audio in chunks
while (audioBuffer.length >= samplesPerChunk) {
const chunk = audioBuffer.splice(0, samplesPerChunk);
const pcmData16 = convertFloat32ToInt16(chunk);
const base64Data = arrayBufferToBase64(pcmData16.buffer);
await ipcRenderer.invoke('send-audio-content', {
data: base64Data,
mimeType: 'audio/pcm;rate=24000',
});
}
};
source.connect(audioProcessor);
audioProcessor.connect(audioContext.destination);
}
// ---------------------------
// Screenshot functions (exact from renderer.js)
// ---------------------------
async function captureScreenshot(imageQuality = 'medium', isManual = false) {
console.log(`Capturing ${isManual ? 'manual' : 'automated'} screenshot...`);
// Check rate limiting for automated screenshots only
if (!isManual && tokenTracker.shouldThrottle()) {
console.log('⚠️ Automated screenshot skipped due to rate limiting');
return;
}
try {
// Request screenshot from main process
const result = await ipcRenderer.invoke('capture-screenshot', {
quality: imageQuality,
});
if (result.success && result.base64) {
// Store the latest screenshot
lastScreenshotBase64 = result.base64;
// Note: sendResult is not defined in the original, this was likely an error
// Commenting out this section as it references undefined variable
/*
if (sendResult.success) {
// Track image tokens after successful send
const imageTokens = tokenTracker.calculateImageTokens(result.width || 1920, result.height || 1080);
tokenTracker.addTokens(imageTokens, 'image');
console.log(`📊 Image sent successfully - ${imageTokens} tokens used (${result.width}x${result.height})`);
} else {
console.error('Failed to send image:', sendResult.error);
}
*/
} else {
console.error('Failed to capture screenshot:', result.error);
}
} catch (error) {
console.error('Error during OpenAI initialization IPC call:', error);
const appElement = pickleGlassElement();
if (appElement && typeof appElement.setStatus === 'function') {
appElement.setStatus('Error');
}
console.error('Error capturing screenshot:', error);
}
}
async function captureManualScreenshot(imageQuality = null) {
console.log('Manual screenshot triggered');
const quality = imageQuality || currentImageQuality;
await captureScreenshot(quality, true);
}
ipcRenderer.on('system-audio-data', (event, { data }) => {
systemAudioBuffer.push({
data: data,
timestamp: Date.now(),
});
async function getCurrentScreenshot() {
try {
// First try to get a fresh screenshot from main process
const result = await ipcRenderer.invoke('get-current-screenshot');
// 오래된 데이터 제거
if (systemAudioBuffer.length > MAX_SYSTEM_BUFFER_SIZE) {
systemAudioBuffer = systemAudioBuffer.slice(-MAX_SYSTEM_BUFFER_SIZE);
}
console.log('📥 Received system audio for AEC reference');
});
// Listen for status updates
ipcRenderer.on('update-status', (event, status) => {
console.log('Status update:', status);
pickleGlass.e().setStatus(status);
});
// Listen for real-time STT updates
ipcRenderer.on('stt-update', (event, data) => {
console.log('Renderer.js stt-update', data);
const { speaker, text, isFinal, isPartial, timestamp } = data;
if (isPartial) {
console.log(`🔄 [${speaker} - partial]: ${text}`);
} else if (isFinal) {
console.log(`✅ [${speaker} - final]: ${text}`);
const speakerText = speaker.toLowerCase();
const conversationText = `${speakerText}: ${text.trim()}`;
realtimeConversationHistory.push(conversationText);
if (realtimeConversationHistory.length > 30) {
realtimeConversationHistory = realtimeConversationHistory.slice(-30);
if (result.success && result.base64) {
console.log('📸 Got fresh screenshot from main process');
return result.base64;
}
console.log(`📝 Updated realtime conversation history: ${realtimeConversationHistory.length} texts`);
console.log(`📋 Latest text: ${conversationText}`);
}
if (pickleGlass.e() && typeof pickleGlass.e().updateRealtimeTranscription === 'function') {
pickleGlass.e().updateRealtimeTranscription({
speaker,
text,
isFinal,
isPartial,
timestamp,
// If no screenshot available, capture one now
console.log('📸 No screenshot available, capturing new one');
const captureResult = await ipcRenderer.invoke('capture-screenshot', {
quality: currentImageQuality,
});
if (captureResult.success && captureResult.base64) {
lastScreenshotBase64 = captureResult.base64;
return captureResult.base64;
}
// Fallback to last stored screenshot
if (lastScreenshotBase64) {
console.log('📸 Using cached screenshot');
return lastScreenshotBase64;
}
throw new Error('Failed to get screenshot');
} catch (error) {
console.error('Error getting current screenshot:', error);
return null;
}
});
ipcRenderer.on('update-structured-data', (_, structuredData) => {
console.log('📥 Received structured data update:', structuredData);
window.pickleGlass.structuredData = structuredData;
window.pickleGlass.setStructuredData(structuredData);
});
window.pickleGlass.structuredData = {
summary: [],
topic: { header: '', bullets: [] },
actions: [],
};
window.pickleGlass.setStructuredData = data => {
window.pickleGlass.structuredData = data;
pickleGlass.e()?.updateStructuredData?.(data);
};
}
// ---------------------------
// Main capture functions (exact from renderer.js)
// ---------------------------
async function startCapture(screenshotIntervalSeconds = 5, imageQuality = 'medium') {
// Store the image quality for manual screenshots
currentImageQuality = imageQuality;
@ -490,12 +600,6 @@ async function startCapture(screenshotIntervalSeconds = 5, imageQuality = 'mediu
setupWindowsLoopbackProcessing();
}
// console.log('MediaStream obtained:', {
// hasVideo: mediaStream.getVideoTracks().length > 0,
// hasAudio: mediaStream.getAudioTracks().length > 0,
// videoTrack: mediaStream.getVideoTracks()[0]?.getSettings(),
// });
// Start capturing screenshots - check if manual mode
if (screenshotIntervalSeconds === 'manual' || screenshotIntervalSeconds === 'Manual') {
console.log('Manual mode enabled - screenshots will be captured on demand only');
@ -511,162 +615,11 @@ async function startCapture(screenshotIntervalSeconds = 5, imageQuality = 'mediu
}
} catch (err) {
console.error('Error starting capture:', err);
pickleGlass.e().setStatus('error');
// Note: pickleGlass.e() is not available in this context, commenting out
// pickleGlass.e().setStatus('error');
}
}
function setupMicProcessing(micStream) {
const micAudioContext = new AudioContext({ sampleRate: SAMPLE_RATE });
const micSource = micAudioContext.createMediaStreamSource(micStream);
const micProcessor = micAudioContext.createScriptProcessor(BUFFER_SIZE, 1, 1);
let audioBuffer = [];
const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION;
micProcessor.onaudioprocess = async e => {
const inputData = e.inputBuffer.getChannelData(0);
audioBuffer.push(...inputData);
while (audioBuffer.length >= samplesPerChunk) {
let chunk = audioBuffer.splice(0, samplesPerChunk);
let processedChunk = new Float32Array(chunk);
// Check for system audio and apply AEC only if voice is active
if (aecProcessor && systemAudioBuffer.length > 0) {
const latestSystemAudio = systemAudioBuffer[systemAudioBuffer.length - 1];
const systemFloat32 = base64ToFloat32Array(latestSystemAudio.data);
// Apply AEC only when system audio has active speech
if (isVoiceActive(systemFloat32)) {
processedChunk = aecProcessor.process(new Float32Array(chunk), systemFloat32);
console.log('🔊 Applied AEC because system audio is active');
}
}
const pcmData16 = convertFloat32ToInt16(processedChunk);
const base64Data = arrayBufferToBase64(pcmData16.buffer);
await ipcRenderer.invoke('send-audio-content', {
data: base64Data,
mimeType: 'audio/pcm;rate=24000',
});
}
};
micSource.connect(micProcessor);
micProcessor.connect(micAudioContext.destination);
audioProcessor = micProcessor;
}
////////// for index & subjects //////////
function setupLinuxMicProcessing(micStream) {
// Setup microphone audio processing for Linux
const micAudioContext = new AudioContext({ sampleRate: SAMPLE_RATE });
const micSource = micAudioContext.createMediaStreamSource(micStream);
const micProcessor = micAudioContext.createScriptProcessor(BUFFER_SIZE, 1, 1);
let audioBuffer = [];
const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION;
micProcessor.onaudioprocess = async e => {
const inputData = e.inputBuffer.getChannelData(0);
audioBuffer.push(...inputData);
// Process audio in chunks
while (audioBuffer.length >= samplesPerChunk) {
const chunk = audioBuffer.splice(0, samplesPerChunk);
const pcmData16 = convertFloat32ToInt16(chunk);
const base64Data = arrayBufferToBase64(pcmData16.buffer);
await ipcRenderer.invoke('send-audio-content', {
data: base64Data,
mimeType: 'audio/pcm;rate=24000',
});
}
};
micSource.connect(micProcessor);
micProcessor.connect(micAudioContext.destination);
// Store processor reference for cleanup
audioProcessor = micProcessor;
}
function setupWindowsLoopbackProcessing() {
// Setup audio processing for Windows loopback audio only
audioContext = new AudioContext({ sampleRate: SAMPLE_RATE });
const source = audioContext.createMediaStreamSource(mediaStream);
audioProcessor = audioContext.createScriptProcessor(BUFFER_SIZE, 1, 1);
let audioBuffer = [];
const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION;
audioProcessor.onaudioprocess = async e => {
const inputData = e.inputBuffer.getChannelData(0);
audioBuffer.push(...inputData);
// Process audio in chunks
while (audioBuffer.length >= samplesPerChunk) {
const chunk = audioBuffer.splice(0, samplesPerChunk);
const pcmData16 = convertFloat32ToInt16(chunk);
const base64Data = arrayBufferToBase64(pcmData16.buffer);
await ipcRenderer.invoke('send-audio-content', {
data: base64Data,
mimeType: 'audio/pcm;rate=24000',
});
}
};
source.connect(audioProcessor);
audioProcessor.connect(audioContext.destination);
}
async function captureScreenshot(imageQuality = 'medium', isManual = false) {
console.log(`Capturing ${isManual ? 'manual' : 'automated'} screenshot...`);
// Check rate limiting for automated screenshots only
if (!isManual && tokenTracker.shouldThrottle()) {
console.log('⚠️ Automated screenshot skipped due to rate limiting');
return;
}
try {
// Request screenshot from main process
const result = await ipcRenderer.invoke('capture-screenshot', {
quality: imageQuality,
});
if (result.success && result.base64) {
// Store the latest screenshot
lastScreenshotBase64 = result.base64;
if (sendResult.success) {
// Track image tokens after successful send
const imageTokens = tokenTracker.calculateImageTokens(result.width || 1920, result.height || 1080);
tokenTracker.addTokens(imageTokens, 'image');
console.log(`📊 Image sent successfully - ${imageTokens} tokens used (${result.width}x${result.height})`);
} else {
console.error('Failed to send image:', sendResult.error);
}
} else {
console.error('Failed to capture screenshot:', result.error);
}
} catch (error) {
console.error('Error capturing screenshot:', error);
}
}
async function captureManualScreenshot(imageQuality = null) {
console.log('Manual screenshot triggered');
const quality = imageQuality || currentImageQuality;
await captureScreenshot(quality, true);
}
// Expose functions to global scope for external access
window.captureManualScreenshot = captureManualScreenshot;
function stopCapture() {
if (screenshotInterval) {
clearInterval(screenshotInterval);
@ -706,76 +659,25 @@ function stopCapture() {
}
}
async function getCurrentScreenshot() {
try {
// First try to get a fresh screenshot from main process
const result = await ipcRenderer.invoke('get-current-screenshot');
if (result.success && result.base64) {
console.log('📸 Got fresh screenshot from main process');
return result.base64;
}
// If no screenshot available, capture one now
console.log('📸 No screenshot available, capturing new one');
const captureResult = await ipcRenderer.invoke('capture-screenshot', {
quality: currentImageQuality,
});
if (captureResult.success && captureResult.base64) {
lastScreenshotBase64 = captureResult.base64;
return captureResult.base64;
}
// Fallback to last stored screenshot
if (lastScreenshotBase64) {
console.log('📸 Using cached screenshot');
return lastScreenshotBase64;
}
throw new Error('Failed to get screenshot');
} catch (error) {
console.error('Error getting current screenshot:', error);
return null;
}
}
function formatRealtimeConversationHistory() {
if (realtimeConversationHistory.length === 0) return 'No conversation history available.';
return realtimeConversationHistory.slice(-30).join('\n');
}
window.pickleGlass = {
initializeopenai,
// ---------------------------
// Exports & global registration
// ---------------------------
module.exports = {
startCapture,
stopCapture,
isLinux: isLinux,
isMacOS: isMacOS,
e: pickleGlassElement,
captureManualScreenshot,
getCurrentScreenshot,
isLinux,
isMacOS,
};
// -------------------------------------------------------
// 🔔 React to session state changes from the main process
// When the session ends (isActive === false), ensure we stop
// all local capture pipelines (mic, screen, etc.).
// -------------------------------------------------------
ipcRenderer.on('session-state-changed', (_event, { isActive }) => {
if (!isActive) {
console.log('[Renderer] Session ended stopping local capture');
stopCapture();
} else {
console.log('[Renderer] New session started clearing in-memory history and summaries');
// Reset live conversation & analysis caches
realtimeConversationHistory = [];
const blankData = {
summary: [],
topic: { header: '', bullets: [] },
actions: [],
followUps: [],
};
window.pickleGlass.setStructuredData(blankData);
}
});
// Expose functions to global scope for external access (exact from renderer.js)
if (typeof window !== 'undefined') {
window.captureManualScreenshot = captureManualScreenshot;
window.listenCapture = module.exports;
window.pickleGlass = window.pickleGlass || {};
window.pickleGlass.startCapture = startCapture;
window.pickleGlass.stopCapture = stopCapture;
window.pickleGlass.captureManualScreenshot = captureManualScreenshot;
window.pickleGlass.getCurrentScreenshot = getCurrentScreenshot;
}

View File

@ -0,0 +1,138 @@
// renderer.js
const { ipcRenderer } = require('electron');
const listenCapture = require('./listenCapture.js');
let realtimeConversationHistory = [];
async function queryLoginState() {
const userState = await ipcRenderer.invoke('get-current-user');
return userState;
}
function pickleGlassElement() {
return document.getElementById('pickle-glass');
}
async function initializeopenai(profile = 'interview', language = 'en') {
// The API key is now handled in the main process from .env file.
// We just need to trigger the initialization.
try {
console.log(`Requesting OpenAI initialization with profile: ${profile}, language: ${language}`);
const success = await ipcRenderer.invoke('initialize-openai', profile, language);
if (success) {
// The status will be updated via 'update-status' event from the main process.
console.log('OpenAI initialization successful.');
} else {
console.error('OpenAI initialization failed.');
const appElement = pickleGlassElement();
if (appElement && typeof appElement.setStatus === 'function') {
appElement.setStatus('Initialization Failed');
}
}
} catch (error) {
console.error('Error during OpenAI initialization IPC call:', error);
const appElement = pickleGlassElement();
if (appElement && typeof appElement.setStatus === 'function') {
appElement.setStatus('Error');
}
}
}
// Listen for status updates
ipcRenderer.on('update-status', (event, status) => {
console.log('Status update:', status);
pickleGlass.e().setStatus(status);
});
// Listen for real-time STT updates
ipcRenderer.on('stt-update', (event, data) => {
console.log('Renderer.js stt-update', data);
const { speaker, text, isFinal, isPartial, timestamp } = data;
if (isPartial) {
console.log(`🔄 [${speaker} - partial]: ${text}`);
} else if (isFinal) {
console.log(`✅ [${speaker} - final]: ${text}`);
const speakerText = speaker.toLowerCase();
const conversationText = `${speakerText}: ${text.trim()}`;
realtimeConversationHistory.push(conversationText);
if (realtimeConversationHistory.length > 30) {
realtimeConversationHistory = realtimeConversationHistory.slice(-30);
}
console.log(`📝 Updated realtime conversation history: ${realtimeConversationHistory.length} texts`);
console.log(`📋 Latest text: ${conversationText}`);
}
if (pickleGlass.e() && typeof pickleGlass.e().updateRealtimeTranscription === 'function') {
pickleGlass.e().updateRealtimeTranscription({
speaker,
text,
isFinal,
isPartial,
timestamp,
});
}
});
ipcRenderer.on('update-structured-data', (_, structuredData) => {
console.log('📥 Received structured data update:', structuredData);
window.pickleGlass.structuredData = structuredData;
window.pickleGlass.setStructuredData(structuredData);
});
window.pickleGlass.structuredData = {
summary: [],
topic: { header: '', bullets: [] },
actions: [],
};
window.pickleGlass.setStructuredData = data => {
window.pickleGlass.structuredData = data;
pickleGlass.e()?.updateStructuredData?.(data);
};
function formatRealtimeConversationHistory() {
if (realtimeConversationHistory.length === 0) return 'No conversation history available.';
return realtimeConversationHistory.slice(-30).join('\n');
}
window.pickleGlass = {
initializeopenai,
startCapture: listenCapture.startCapture,
stopCapture: listenCapture.stopCapture,
isLinux: listenCapture.isLinux,
isMacOS: listenCapture.isMacOS,
captureManualScreenshot: listenCapture.captureManualScreenshot,
getCurrentScreenshot: listenCapture.getCurrentScreenshot,
e: pickleGlassElement,
};
// -------------------------------------------------------
// 🔔 React to session state changes from the main process
// When the session ends (isActive === false), ensure we stop
// all local capture pipelines (mic, screen, etc.).
// -------------------------------------------------------
ipcRenderer.on('session-state-changed', (_event, { isActive }) => {
if (!isActive) {
console.log('[Renderer] Session ended stopping local capture');
listenCapture.stopCapture();
} else {
console.log('[Renderer] New session started clearing in-memory history and summaries');
// Reset live conversation & analysis caches
realtimeConversationHistory = [];
const blankData = {
summary: [],
topic: { header: '', bullets: [] },
actions: [],
followUps: [],
};
window.pickleGlass.setStructuredData(blankData);
}
});

View File

@ -1,66 +0,0 @@
const sqliteClient = require('../../../common/services/sqliteClient');
function addTranscript({ sessionId, speaker, text }) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const transcriptId = require('crypto').randomUUID();
const now = Math.floor(Date.now() / 1000);
const query = `INSERT INTO transcripts (id, session_id, start_at, speaker, text, created_at) VALUES (?, ?, ?, ?, ?, ?)`;
db.run(query, [transcriptId, sessionId, now, speaker, text, now], function(err) {
if (err) reject(err);
else resolve({ id: transcriptId });
});
});
}
function saveSummary({ sessionId, tldr, text, bullet_json, action_json, model = 'gpt-4.1' }) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const now = Math.floor(Date.now() / 1000);
const query = `
INSERT INTO summaries (session_id, generated_at, model, text, tldr, bullet_json, action_json, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(session_id) DO UPDATE SET
generated_at=excluded.generated_at,
model=excluded.model,
text=excluded.text,
tldr=excluded.tldr,
bullet_json=excluded.bullet_json,
action_json=excluded.action_json,
updated_at=excluded.updated_at
`;
db.run(query, [sessionId, now, model, text, tldr, bullet_json, action_json, now], function(err) {
if (err) reject(err);
else resolve({ changes: this.changes });
});
});
}
function getAllTranscriptsBySessionId(sessionId) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const query = "SELECT * FROM transcripts WHERE session_id = ? ORDER BY start_at ASC";
db.all(query, [sessionId], (err, rows) => {
if (err) reject(err);
else resolve(rows);
});
});
}
function getSummaryBySessionId(sessionId) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const query = "SELECT * FROM summaries WHERE session_id = ?";
db.get(query, [sessionId], (err, row) => {
if (err) reject(err);
else resolve(row || null);
});
});
}
module.exports = {
addTranscript,
saveSummary,
getAllTranscriptsBySessionId,
getSummaryBySessionId
};

View File

@ -0,0 +1,228 @@
import { html, css, LitElement } from '../../../assets/lit-core-2.7.4.min.js';
export class SttView extends LitElement {
static styles = css`
:host {
display: block;
width: 100%;
}
/* Inherit font styles from parent */
.transcription-container {
overflow-y: auto;
padding: 12px 12px 16px 12px;
display: flex;
flex-direction: column;
gap: 8px;
min-height: 150px;
max-height: 600px;
position: relative;
z-index: 1;
flex: 1;
}
/* Visibility handled by parent component */
.transcription-container::-webkit-scrollbar {
width: 8px;
}
.transcription-container::-webkit-scrollbar-track {
background: rgba(0, 0, 0, 0.1);
border-radius: 4px;
}
.transcription-container::-webkit-scrollbar-thumb {
background: rgba(255, 255, 255, 0.3);
border-radius: 4px;
}
.transcription-container::-webkit-scrollbar-thumb:hover {
background: rgba(255, 255, 255, 0.5);
}
.stt-message {
padding: 8px 12px;
border-radius: 12px;
max-width: 80%;
word-wrap: break-word;
word-break: break-word;
line-height: 1.5;
font-size: 13px;
margin-bottom: 4px;
box-sizing: border-box;
}
.stt-message.them {
background: rgba(255, 255, 255, 0.1);
color: rgba(255, 255, 255, 0.9);
align-self: flex-start;
border-bottom-left-radius: 4px;
margin-right: auto;
}
.stt-message.me {
background: rgba(0, 122, 255, 0.8);
color: white;
align-self: flex-end;
border-bottom-right-radius: 4px;
margin-left: auto;
}
.empty-state {
display: flex;
align-items: center;
justify-content: center;
height: 100px;
color: rgba(255, 255, 255, 0.6);
font-size: 12px;
font-style: italic;
}
`;
static properties = {
sttMessages: { type: Array },
isVisible: { type: Boolean },
};
constructor() {
super();
this.sttMessages = [];
this.isVisible = true;
this.messageIdCounter = 0;
this._shouldScrollAfterUpdate = false;
this.handleSttUpdate = this.handleSttUpdate.bind(this);
}
connectedCallback() {
super.connectedCallback();
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.on('stt-update', this.handleSttUpdate);
}
}
disconnectedCallback() {
super.disconnectedCallback();
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.removeListener('stt-update', this.handleSttUpdate);
}
}
// Handle session reset from parent
resetTranscript() {
this.sttMessages = [];
this.requestUpdate();
}
handleSttUpdate(event, { speaker, text, isFinal, isPartial }) {
if (text === undefined) return;
const container = this.shadowRoot.querySelector('.transcription-container');
this._shouldScrollAfterUpdate = container ? container.scrollTop + container.clientHeight >= container.scrollHeight - 10 : false;
const findLastPartialIdx = spk => {
for (let i = this.sttMessages.length - 1; i >= 0; i--) {
const m = this.sttMessages[i];
if (m.speaker === spk && m.isPartial) return i;
}
return -1;
};
const newMessages = [...this.sttMessages];
const targetIdx = findLastPartialIdx(speaker);
if (isPartial) {
if (targetIdx !== -1) {
newMessages[targetIdx] = {
...newMessages[targetIdx],
text,
isPartial: true,
isFinal: false,
};
} else {
newMessages.push({
id: this.messageIdCounter++,
speaker,
text,
isPartial: true,
isFinal: false,
});
}
} else if (isFinal) {
if (targetIdx !== -1) {
newMessages[targetIdx] = {
...newMessages[targetIdx],
text,
isPartial: false,
isFinal: true,
};
} else {
newMessages.push({
id: this.messageIdCounter++,
speaker,
text,
isPartial: false,
isFinal: true,
});
}
}
this.sttMessages = newMessages;
// Notify parent component about message updates
this.dispatchEvent(new CustomEvent('stt-messages-updated', {
detail: { messages: this.sttMessages },
bubbles: true
}));
}
scrollToBottom() {
setTimeout(() => {
const container = this.shadowRoot.querySelector('.transcription-container');
if (container) {
container.scrollTop = container.scrollHeight;
}
}, 0);
}
getSpeakerClass(speaker) {
return speaker.toLowerCase() === 'me' ? 'me' : 'them';
}
getTranscriptText() {
return this.sttMessages.map(msg => `${msg.speaker}: ${msg.text}`).join('\n');
}
updated(changedProperties) {
super.updated(changedProperties);
if (changedProperties.has('sttMessages')) {
if (this._shouldScrollAfterUpdate) {
this.scrollToBottom();
this._shouldScrollAfterUpdate = false;
}
}
}
render() {
if (!this.isVisible) {
return html`<div style="display: none;"></div>`;
}
return html`
<div class="transcription-container">
${this.sttMessages.length === 0
? html`<div class="empty-state">Waiting for speech...</div>`
: this.sttMessages.map(msg => html`
<div class="stt-message ${this.getSpeakerClass(msg.speaker)}">
${msg.text}
</div>
`)
}
</div>
`;
}
}
customElements.define('stt-view', SttView);

View File

@ -0,0 +1,5 @@
const sttRepository = require('./sqlite.repository');
module.exports = {
...sttRepository,
};

View File

@ -0,0 +1,37 @@
const sqliteClient = require('../../../../common/services/sqliteClient');
function addTranscript({ sessionId, speaker, text }) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const transcriptId = require('crypto').randomUUID();
const now = Math.floor(Date.now() / 1000);
const query = `INSERT INTO transcripts (id, session_id, start_at, speaker, text, created_at) VALUES (?, ?, ?, ?, ?, ?)`;
db.run(query, [transcriptId, sessionId, now, speaker, text, now], function(err) {
if (err) {
console.error('Error adding transcript:', err);
reject(err);
} else {
resolve({ id: transcriptId });
}
});
});
}
function getAllTranscriptsBySessionId(sessionId) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const query = "SELECT * FROM transcripts WHERE session_id = ? ORDER BY start_at ASC";
db.all(query, [sessionId], (err, rows) => {
if (err) {
reject(err);
} else {
resolve(rows);
}
});
});
}
module.exports = {
addTranscript,
getAllTranscriptsBySessionId,
};

View File

@ -0,0 +1,478 @@
const { BrowserWindow } = require('electron');
const { spawn } = require('child_process');
const { createSTT } = require('../../../common/ai/factory');
const { getStoredApiKey, getStoredProvider } = require('../../../electron/windowManager');
const COMPLETION_DEBOUNCE_MS = 2000;
class SttService {
constructor() {
this.mySttSession = null;
this.theirSttSession = null;
this.myCurrentUtterance = '';
this.theirCurrentUtterance = '';
this.myLastPartialText = '';
this.theirLastPartialText = '';
this.myInactivityTimer = null;
this.theirInactivityTimer = null;
// Turn-completion debouncing
this.myCompletionBuffer = '';
this.theirCompletionBuffer = '';
this.myCompletionTimer = null;
this.theirCompletionTimer = null;
// System audio capture
this.systemAudioProc = null;
// Callbacks
this.onTranscriptionComplete = null;
this.onStatusUpdate = null;
}
setCallbacks({ onTranscriptionComplete, onStatusUpdate }) {
this.onTranscriptionComplete = onTranscriptionComplete;
this.onStatusUpdate = onStatusUpdate;
}
async getApiKey() {
const storedKey = await getStoredApiKey();
if (storedKey) {
console.log('[SttService] Using stored API key');
return storedKey;
}
const envKey = process.env.OPENAI_API_KEY;
if (envKey) {
console.log('[SttService] Using environment API key');
return envKey;
}
console.error('[SttService] No API key found in storage or environment');
return null;
}
async getAiProvider() {
try {
const { ipcRenderer } = require('electron');
const provider = await ipcRenderer.invoke('get-ai-provider');
return provider || 'openai';
} catch (error) {
return getStoredProvider ? getStoredProvider() : 'openai';
}
}
sendToRenderer(channel, data) {
BrowserWindow.getAllWindows().forEach(win => {
if (!win.isDestroyed()) {
win.webContents.send(channel, data);
}
});
}
flushMyCompletion() {
if (!this.myCompletionBuffer.trim()) return;
const finalText = this.myCompletionBuffer.trim();
// Notify completion callback
if (this.onTranscriptionComplete) {
this.onTranscriptionComplete('Me', finalText);
}
// Send to renderer as final
this.sendToRenderer('stt-update', {
speaker: 'Me',
text: finalText,
isPartial: false,
isFinal: true,
timestamp: Date.now(),
});
this.myCompletionBuffer = '';
this.myCompletionTimer = null;
this.myCurrentUtterance = '';
if (this.onStatusUpdate) {
this.onStatusUpdate('Listening...');
}
}
flushTheirCompletion() {
if (!this.theirCompletionBuffer.trim()) return;
const finalText = this.theirCompletionBuffer.trim();
// Notify completion callback
if (this.onTranscriptionComplete) {
this.onTranscriptionComplete('Them', finalText);
}
// Send to renderer as final
this.sendToRenderer('stt-update', {
speaker: 'Them',
text: finalText,
isPartial: false,
isFinal: true,
timestamp: Date.now(),
});
this.theirCompletionBuffer = '';
this.theirCompletionTimer = null;
this.theirCurrentUtterance = '';
if (this.onStatusUpdate) {
this.onStatusUpdate('Listening...');
}
}
debounceMyCompletion(text) {
// 상대방이 말하고 있던 경우, 화자가 변경되었으므로 즉시 상대방의 말풍선을 완성합니다.
if (this.theirCompletionTimer) {
clearTimeout(this.theirCompletionTimer);
this.flushTheirCompletion();
}
this.myCompletionBuffer += (this.myCompletionBuffer ? ' ' : '') + text;
if (this.myCompletionTimer) clearTimeout(this.myCompletionTimer);
this.myCompletionTimer = setTimeout(() => this.flushMyCompletion(), COMPLETION_DEBOUNCE_MS);
}
debounceTheirCompletion(text) {
// 내가 말하고 있던 경우, 화자가 변경되었으므로 즉시 내 말풍선을 완성합니다.
if (this.myCompletionTimer) {
clearTimeout(this.myCompletionTimer);
this.flushMyCompletion();
}
this.theirCompletionBuffer += (this.theirCompletionBuffer ? ' ' : '') + text;
if (this.theirCompletionTimer) clearTimeout(this.theirCompletionTimer);
this.theirCompletionTimer = setTimeout(() => this.flushTheirCompletion(), COMPLETION_DEBOUNCE_MS);
}
async initializeSttSessions(language = 'en') {
const effectiveLanguage = process.env.OPENAI_TRANSCRIBE_LANG || language || 'en';
const API_KEY = await this.getApiKey();
if (!API_KEY) {
throw new Error('No API key available');
}
const provider = await this.getAiProvider();
const isGemini = provider === 'gemini';
console.log(`[SttService] Initializing STT for provider: ${provider}`);
const handleMyMessage = message => {
if (isGemini) {
const text = message.serverContent?.inputTranscription?.text || '';
if (text && text.trim()) {
const finalUtteranceText = text.trim().replace(/<noise>/g, '').trim();
if (finalUtteranceText && finalUtteranceText !== '.') {
this.debounceMyCompletion(finalUtteranceText);
}
}
} else {
const type = message.type;
const text = message.transcript || message.delta || (message.alternatives && message.alternatives[0]?.transcript) || '';
if (type === 'conversation.item.input_audio_transcription.delta') {
if (this.myCompletionTimer) clearTimeout(this.myCompletionTimer);
this.myCompletionTimer = null;
this.myCurrentUtterance += text;
const continuousText = this.myCompletionBuffer + (this.myCompletionBuffer ? ' ' : '') + this.myCurrentUtterance;
if (text && !text.includes('vq_lbr_audio_')) {
this.sendToRenderer('stt-update', {
speaker: 'Me',
text: continuousText,
isPartial: true,
isFinal: false,
timestamp: Date.now(),
});
}
} else if (type === 'conversation.item.input_audio_transcription.completed') {
if (text && text.trim()) {
const finalUtteranceText = text.trim();
this.myCurrentUtterance = '';
this.debounceMyCompletion(finalUtteranceText);
}
}
}
if (message.error) {
console.error('[Me] STT Session Error:', message.error);
}
};
const handleTheirMessage = message => {
if (isGemini) {
const text = message.serverContent?.inputTranscription?.text || '';
if (text && text.trim()) {
const finalUtteranceText = text.trim().replace(/<noise>/g, '').trim();
if (finalUtteranceText && finalUtteranceText !== '.') {
this.debounceTheirCompletion(finalUtteranceText);
}
}
} else {
const type = message.type;
const text = message.transcript || message.delta || (message.alternatives && message.alternatives[0]?.transcript) || '';
if (type === 'conversation.item.input_audio_transcription.delta') {
if (this.theirCompletionTimer) clearTimeout(this.theirCompletionTimer);
this.theirCompletionTimer = null;
this.theirCurrentUtterance += text;
const continuousText = this.theirCompletionBuffer + (this.theirCompletionBuffer ? ' ' : '') + this.theirCurrentUtterance;
if (text && !text.includes('vq_lbr_audio_')) {
this.sendToRenderer('stt-update', {
speaker: 'Them',
text: continuousText,
isPartial: true,
isFinal: false,
timestamp: Date.now(),
});
}
} else if (type === 'conversation.item.input_audio_transcription.completed') {
if (text && text.trim()) {
const finalUtteranceText = text.trim();
this.theirCurrentUtterance = '';
this.debounceTheirCompletion(finalUtteranceText);
}
}
}
if (message.error) {
console.error('[Them] STT Session Error:', message.error);
}
};
const mySttConfig = {
language: effectiveLanguage,
callbacks: {
onmessage: handleMyMessage,
onerror: error => console.error('My STT session error:', error.message),
onclose: event => console.log('My STT session closed:', event.reason),
},
};
const theirSttConfig = {
language: effectiveLanguage,
callbacks: {
onmessage: handleTheirMessage,
onerror: error => console.error('Their STT session error:', error.message),
onclose: event => console.log('Their STT session closed:', event.reason),
},
};
// Determine auth options for providers that support it
const authService = require('../../../common/services/authService');
const userState = authService.getCurrentUser();
const loggedIn = userState.isLoggedIn;
const sttOptions = {
apiKey: API_KEY,
language: effectiveLanguage,
usePortkey: !isGemini && loggedIn, // Only OpenAI supports Portkey
portkeyVirtualKey: loggedIn ? API_KEY : undefined
};
[this.mySttSession, this.theirSttSession] = await Promise.all([
createSTT(provider, { ...sttOptions, callbacks: mySttConfig.callbacks }),
createSTT(provider, { ...sttOptions, callbacks: theirSttConfig.callbacks }),
]);
console.log('✅ Both STT sessions initialized successfully.');
return true;
}
async sendAudioContent(data, mimeType) {
const provider = await this.getAiProvider();
const isGemini = provider === 'gemini';
if (!this.mySttSession) {
throw new Error('User STT session not active');
}
const payload = isGemini
? { audio: { data, mimeType: mimeType || 'audio/pcm;rate=24000' } }
: data;
await this.mySttSession.sendRealtimeInput(payload);
}
killExistingSystemAudioDump() {
return new Promise(resolve => {
console.log('Checking for existing SystemAudioDump processes...');
const killProc = spawn('pkill', ['-f', 'SystemAudioDump'], {
stdio: 'ignore',
});
killProc.on('close', code => {
if (code === 0) {
console.log('Killed existing SystemAudioDump processes');
} else {
console.log('No existing SystemAudioDump processes found');
}
resolve();
});
killProc.on('error', err => {
console.log('Error checking for existing processes (this is normal):', err.message);
resolve();
});
setTimeout(() => {
killProc.kill();
resolve();
}, 2000);
});
}
async startMacOSAudioCapture() {
if (process.platform !== 'darwin' || !this.theirSttSession) return false;
await this.killExistingSystemAudioDump();
console.log('Starting macOS audio capture for "Them"...');
const { app } = require('electron');
const path = require('path');
const systemAudioPath = app.isPackaged
? path.join(process.resourcesPath, 'app.asar.unpacked', 'src', 'assets', 'SystemAudioDump')
: path.join(app.getAppPath(), 'src', 'assets', 'SystemAudioDump');
console.log('SystemAudioDump path:', systemAudioPath);
this.systemAudioProc = spawn(systemAudioPath, [], {
stdio: ['ignore', 'pipe', 'pipe'],
});
if (!this.systemAudioProc.pid) {
console.error('Failed to start SystemAudioDump');
return false;
}
console.log('SystemAudioDump started with PID:', this.systemAudioProc.pid);
const CHUNK_DURATION = 0.1;
const SAMPLE_RATE = 24000;
const BYTES_PER_SAMPLE = 2;
const CHANNELS = 2;
const CHUNK_SIZE = SAMPLE_RATE * BYTES_PER_SAMPLE * CHANNELS * CHUNK_DURATION;
let audioBuffer = Buffer.alloc(0);
const provider = await this.getAiProvider();
const isGemini = provider === 'gemini';
this.systemAudioProc.stdout.on('data', async data => {
audioBuffer = Buffer.concat([audioBuffer, data]);
while (audioBuffer.length >= CHUNK_SIZE) {
const chunk = audioBuffer.slice(0, CHUNK_SIZE);
audioBuffer = audioBuffer.slice(CHUNK_SIZE);
const monoChunk = CHANNELS === 2 ? this.convertStereoToMono(chunk) : chunk;
const base64Data = monoChunk.toString('base64');
this.sendToRenderer('system-audio-data', { data: base64Data });
if (this.theirSttSession) {
try {
const payload = isGemini
? { audio: { data: base64Data, mimeType: 'audio/pcm;rate=24000' } }
: base64Data;
await this.theirSttSession.sendRealtimeInput(payload);
} catch (err) {
console.error('Error sending system audio:', err.message);
}
}
}
});
this.systemAudioProc.stderr.on('data', data => {
console.error('SystemAudioDump stderr:', data.toString());
});
this.systemAudioProc.on('close', code => {
console.log('SystemAudioDump process closed with code:', code);
this.systemAudioProc = null;
});
this.systemAudioProc.on('error', err => {
console.error('SystemAudioDump process error:', err);
this.systemAudioProc = null;
});
return true;
}
convertStereoToMono(stereoBuffer) {
const samples = stereoBuffer.length / 4;
const monoBuffer = Buffer.alloc(samples * 2);
for (let i = 0; i < samples; i++) {
const leftSample = stereoBuffer.readInt16LE(i * 4);
monoBuffer.writeInt16LE(leftSample, i * 2);
}
return monoBuffer;
}
stopMacOSAudioCapture() {
if (this.systemAudioProc) {
console.log('Stopping SystemAudioDump...');
this.systemAudioProc.kill('SIGTERM');
this.systemAudioProc = null;
}
}
isSessionActive() {
return !!this.mySttSession && !!this.theirSttSession;
}
async closeSessions() {
this.stopMacOSAudioCapture();
// Clear timers
if (this.myInactivityTimer) {
clearTimeout(this.myInactivityTimer);
this.myInactivityTimer = null;
}
if (this.theirInactivityTimer) {
clearTimeout(this.theirInactivityTimer);
this.theirInactivityTimer = null;
}
if (this.myCompletionTimer) {
clearTimeout(this.myCompletionTimer);
this.myCompletionTimer = null;
}
if (this.theirCompletionTimer) {
clearTimeout(this.theirCompletionTimer);
this.theirCompletionTimer = null;
}
const closePromises = [];
if (this.mySttSession) {
closePromises.push(this.mySttSession.close());
this.mySttSession = null;
}
if (this.theirSttSession) {
closePromises.push(this.theirSttSession.close());
this.theirSttSession = null;
}
await Promise.all(closePromises);
console.log('All STT sessions closed.');
// Reset state
this.myCurrentUtterance = '';
this.theirCurrentUtterance = '';
this.myLastPartialText = '';
this.theirLastPartialText = '';
this.myCompletionBuffer = '';
this.theirCompletionBuffer = '';
}
}
module.exports = SttService;

View File

@ -0,0 +1,559 @@
import { html, css, LitElement } from '../../../assets/lit-core-2.7.4.min.js';
export class SummaryView extends LitElement {
static styles = css`
:host {
display: block;
width: 100%;
}
/* Inherit font styles from parent */
/* highlight.js 스타일 추가 */
.insights-container pre {
background: rgba(0, 0, 0, 0.4) !important;
border-radius: 8px !important;
padding: 12px !important;
margin: 8px 0 !important;
overflow-x: auto !important;
border: 1px solid rgba(255, 255, 255, 0.1) !important;
white-space: pre !important;
word-wrap: normal !important;
word-break: normal !important;
}
.insights-container code {
font-family: 'Monaco', 'Menlo', 'Consolas', monospace !important;
font-size: 11px !important;
background: transparent !important;
white-space: pre !important;
word-wrap: normal !important;
word-break: normal !important;
}
.insights-container pre code {
white-space: pre !important;
word-wrap: normal !important;
word-break: normal !important;
display: block !important;
}
.insights-container p code {
background: rgba(255, 255, 255, 0.1) !important;
padding: 2px 4px !important;
border-radius: 3px !important;
color: #ffd700 !important;
}
.hljs-keyword {
color: #ff79c6 !important;
}
.hljs-string {
color: #f1fa8c !important;
}
.hljs-comment {
color: #6272a4 !important;
}
.hljs-number {
color: #bd93f9 !important;
}
.hljs-function {
color: #50fa7b !important;
}
.hljs-variable {
color: #8be9fd !important;
}
.hljs-built_in {
color: #ffb86c !important;
}
.hljs-title {
color: #50fa7b !important;
}
.hljs-attr {
color: #50fa7b !important;
}
.hljs-tag {
color: #ff79c6 !important;
}
.insights-container {
overflow-y: auto;
padding: 12px 16px 16px 16px;
position: relative;
z-index: 1;
min-height: 150px;
max-height: 600px;
flex: 1;
}
/* Visibility handled by parent component */
.insights-container::-webkit-scrollbar {
width: 8px;
}
.insights-container::-webkit-scrollbar-track {
background: rgba(0, 0, 0, 0.1);
border-radius: 4px;
}
.insights-container::-webkit-scrollbar-thumb {
background: rgba(255, 255, 255, 0.3);
border-radius: 4px;
}
.insights-container::-webkit-scrollbar-thumb:hover {
background: rgba(255, 255, 255, 0.5);
}
insights-title {
color: rgba(255, 255, 255, 0.8);
font-size: 15px;
font-weight: 500;
font-family: 'Helvetica Neue', sans-serif;
margin: 12px 0 8px 0;
display: block;
}
.insights-container h4 {
color: #ffffff;
font-size: 12px;
font-weight: 600;
margin: 12px 0 8px 0;
padding: 4px 8px;
border-radius: 4px;
background: transparent;
cursor: default;
}
.insights-container h4:hover {
background: transparent;
}
.insights-container h4:first-child {
margin-top: 0;
}
.outline-item {
color: #ffffff;
font-size: 11px;
line-height: 1.4;
margin: 4px 0;
padding: 6px 8px;
border-radius: 4px;
background: transparent;
transition: background-color 0.15s ease;
cursor: pointer;
word-wrap: break-word;
}
.outline-item:hover {
background: rgba(255, 255, 255, 0.1);
}
.request-item {
color: #ffffff;
font-size: 12px;
line-height: 1.2;
margin: 4px 0;
padding: 6px 8px;
border-radius: 4px;
background: transparent;
cursor: default;
word-wrap: break-word;
transition: background-color 0.15s ease;
}
.request-item.clickable {
cursor: pointer;
transition: all 0.15s ease;
}
.request-item.clickable:hover {
background: rgba(255, 255, 255, 0.1);
transform: translateX(2px);
}
/* 마크다운 렌더링된 콘텐츠 스타일 */
.markdown-content {
color: #ffffff;
font-size: 11px;
line-height: 1.4;
margin: 4px 0;
padding: 6px 8px;
border-radius: 4px;
background: transparent;
cursor: pointer;
word-wrap: break-word;
transition: all 0.15s ease;
}
.markdown-content:hover {
background: rgba(255, 255, 255, 0.1);
transform: translateX(2px);
}
.markdown-content p {
margin: 4px 0;
}
.markdown-content ul,
.markdown-content ol {
margin: 4px 0;
padding-left: 16px;
}
.markdown-content li {
margin: 2px 0;
}
.markdown-content a {
color: #8be9fd;
text-decoration: none;
}
.markdown-content a:hover {
text-decoration: underline;
}
.markdown-content strong {
font-weight: 600;
color: #f8f8f2;
}
.markdown-content em {
font-style: italic;
color: #f1fa8c;
}
.empty-state {
display: flex;
align-items: center;
justify-content: center;
height: 100px;
color: rgba(255, 255, 255, 0.6);
font-size: 12px;
font-style: italic;
}
`;
static properties = {
structuredData: { type: Object },
isVisible: { type: Boolean },
hasCompletedRecording: { type: Boolean },
};
constructor() {
super();
this.structuredData = {
summary: [],
topic: { header: '', bullets: [] },
actions: [],
followUps: [],
};
this.isVisible = true;
this.hasCompletedRecording = false;
// 마크다운 라이브러리 초기화
this.marked = null;
this.hljs = null;
this.isLibrariesLoaded = false;
this.DOMPurify = null;
this.isDOMPurifyLoaded = false;
this.loadLibraries();
}
connectedCallback() {
super.connectedCallback();
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.on('update-structured-data', (event, data) => {
this.structuredData = data;
this.requestUpdate();
});
}
}
disconnectedCallback() {
super.disconnectedCallback();
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.removeAllListeners('update-structured-data');
}
}
// Handle session reset from parent
resetAnalysis() {
this.structuredData = {
summary: [],
topic: { header: '', bullets: [] },
actions: [],
followUps: [],
};
this.requestUpdate();
}
async loadLibraries() {
try {
if (!window.marked) {
await this.loadScript('../../../assets/marked-4.3.0.min.js');
}
if (!window.hljs) {
await this.loadScript('../../../assets/highlight-11.9.0.min.js');
}
if (!window.DOMPurify) {
await this.loadScript('../../../assets/dompurify-3.0.7.min.js');
}
this.marked = window.marked;
this.hljs = window.hljs;
this.DOMPurify = window.DOMPurify;
if (this.marked && this.hljs) {
this.marked.setOptions({
highlight: (code, lang) => {
if (lang && this.hljs.getLanguage(lang)) {
try {
return this.hljs.highlight(code, { language: lang }).value;
} catch (err) {
console.warn('Highlight error:', err);
}
}
try {
return this.hljs.highlightAuto(code).value;
} catch (err) {
console.warn('Auto highlight error:', err);
}
return code;
},
breaks: true,
gfm: true,
pedantic: false,
smartypants: false,
xhtml: false,
});
this.isLibrariesLoaded = true;
console.log('Markdown libraries loaded successfully');
}
if (this.DOMPurify) {
this.isDOMPurifyLoaded = true;
console.log('DOMPurify loaded successfully in SummaryView');
}
} catch (error) {
console.error('Failed to load libraries:', error);
}
}
loadScript(src) {
return new Promise((resolve, reject) => {
const script = document.createElement('script');
script.src = src;
script.onload = resolve;
script.onerror = reject;
document.head.appendChild(script);
});
}
parseMarkdown(text) {
if (!text) return '';
if (!this.isLibrariesLoaded || !this.marked) {
return text;
}
try {
return this.marked(text);
} catch (error) {
console.error('Markdown parsing error:', error);
return text;
}
}
handleMarkdownClick(originalText) {
this.handleRequestClick(originalText);
}
renderMarkdownContent() {
if (!this.isLibrariesLoaded || !this.marked) {
return;
}
const markdownElements = this.shadowRoot.querySelectorAll('[data-markdown-id]');
markdownElements.forEach(element => {
const originalText = element.getAttribute('data-original-text');
if (originalText) {
try {
let parsedHTML = this.parseMarkdown(originalText);
if (this.isDOMPurifyLoaded && this.DOMPurify) {
parsedHTML = this.DOMPurify.sanitize(parsedHTML);
if (this.DOMPurify.removed && this.DOMPurify.removed.length > 0) {
console.warn('Unsafe content detected in insights, showing plain text');
element.textContent = '⚠️ ' + originalText;
return;
}
}
element.innerHTML = parsedHTML;
} catch (error) {
console.error('Error rendering markdown for element:', error);
element.textContent = originalText;
}
}
});
}
async handleRequestClick(requestText) {
console.log('🔥 Analysis request clicked:', requestText);
if (window.require) {
const { ipcRenderer } = window.require('electron');
try {
const isAskViewVisible = await ipcRenderer.invoke('is-window-visible', 'ask');
if (!isAskViewVisible) {
await ipcRenderer.invoke('toggle-feature', 'ask');
await new Promise(resolve => setTimeout(resolve, 100));
}
const result = await ipcRenderer.invoke('send-question-to-ask', requestText);
if (result.success) {
console.log('✅ Question sent to AskView successfully');
} else {
console.error('❌ Failed to send question to AskView:', result.error);
}
} catch (error) {
console.error('❌ Error in handleRequestClick:', error);
}
}
}
getSummaryText() {
const data = this.structuredData || { summary: [], topic: { header: '', bullets: [] }, actions: [] };
let sections = [];
if (data.summary && data.summary.length > 0) {
sections.push(`Current Summary:\n${data.summary.map(s => `${s}`).join('\n')}`);
}
if (data.topic && data.topic.header && data.topic.bullets.length > 0) {
sections.push(`\n${data.topic.header}:\n${data.topic.bullets.map(b => `${b}`).join('\n')}`);
}
if (data.actions && data.actions.length > 0) {
sections.push(`\nActions:\n${data.actions.map(a => `${a}`).join('\n')}`);
}
if (data.followUps && data.followUps.length > 0) {
sections.push(`\nFollow-Ups:\n${data.followUps.map(f => `${f}`).join('\n')}`);
}
return sections.join('\n\n').trim();
}
updated(changedProperties) {
super.updated(changedProperties);
this.renderMarkdownContent();
}
render() {
if (!this.isVisible) {
return html`<div style="display: none;"></div>`;
}
const data = this.structuredData || {
summary: [],
topic: { header: '', bullets: [] },
actions: [],
};
const hasAnyContent = data.summary.length > 0 || data.topic.bullets.length > 0 || data.actions.length > 0;
return html`
<div class="insights-container">
${!hasAnyContent
? html`<div class="empty-state">No insights yet...</div>`
: html`
<insights-title>Current Summary</insights-title>
${data.summary.length > 0
? data.summary
.slice(0, 5)
.map(
(bullet, index) => html`
<div
class="markdown-content"
data-markdown-id="summary-${index}"
data-original-text="${bullet}"
@click=${() => this.handleMarkdownClick(bullet)}
>
${bullet}
</div>
`
)
: html` <div class="request-item">No content yet...</div> `}
${data.topic.header
? html`
<insights-title>${data.topic.header}</insights-title>
${data.topic.bullets
.slice(0, 3)
.map(
(bullet, index) => html`
<div
class="markdown-content"
data-markdown-id="topic-${index}"
data-original-text="${bullet}"
@click=${() => this.handleMarkdownClick(bullet)}
>
${bullet}
</div>
`
)}
`
: ''}
${data.actions.length > 0
? html`
<insights-title>Actions</insights-title>
${data.actions
.slice(0, 5)
.map(
(action, index) => html`
<div
class="markdown-content"
data-markdown-id="action-${index}"
data-original-text="${action}"
@click=${() => this.handleMarkdownClick(action)}
>
${action}
</div>
`
)}
`
: ''}
${this.hasCompletedRecording && data.followUps && data.followUps.length > 0
? html`
<insights-title>Follow-Ups</insights-title>
${data.followUps.map(
(followUp, index) => html`
<div
class="markdown-content"
data-markdown-id="followup-${index}"
data-original-text="${followUp}"
@click=${() => this.handleMarkdownClick(followUp)}
>
${followUp}
</div>
`
)}
`
: ''}
`}
</div>
`;
}
}
customElements.define('summary-view', SummaryView);

View File

@ -0,0 +1,5 @@
const summaryRepository = require('./sqlite.repository');
module.exports = {
...summaryRepository,
};

View File

@ -0,0 +1,47 @@
const sqliteClient = require('../../../../common/services/sqliteClient');
function saveSummary({ sessionId, tldr, text, bullet_json, action_json, model = 'gpt-4.1' }) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const now = Math.floor(Date.now() / 1000);
const query = `
INSERT INTO summaries (session_id, generated_at, model, text, tldr, bullet_json, action_json, updated_at)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(session_id) DO UPDATE SET
generated_at=excluded.generated_at,
model=excluded.model,
text=excluded.text,
tldr=excluded.tldr,
bullet_json=excluded.bullet_json,
action_json=excluded.action_json,
updated_at=excluded.updated_at
`;
db.run(query, [sessionId, now, model, text, tldr, bullet_json, action_json, now], function(err) {
if (err) {
console.error('Error saving summary:', err);
reject(err);
} else {
resolve({ changes: this.changes });
}
});
});
}
function getSummaryBySessionId(sessionId) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const query = "SELECT * FROM summaries WHERE session_id = ?";
db.get(query, [sessionId], (err, row) => {
if (err) {
reject(err);
} else {
resolve(row || null);
}
});
});
}
module.exports = {
saveSummary,
getSummaryBySessionId,
};

View File

@ -0,0 +1,356 @@
const { BrowserWindow } = require('electron');
const { getSystemPrompt } = require('../../../common/prompts/promptBuilder.js');
const { createLLM } = require('../../../common/ai/factory');
const authService = require('../../../common/services/authService');
const sessionRepository = require('../../../common/repositories/session');
const summaryRepository = require('./repositories');
const { getStoredApiKey, getStoredProvider } = require('../../../electron/windowManager');
class SummaryService {
constructor() {
this.previousAnalysisResult = null;
this.analysisHistory = [];
this.conversationHistory = [];
this.currentSessionId = null;
// Callbacks
this.onAnalysisComplete = null;
this.onStatusUpdate = null;
}
setCallbacks({ onAnalysisComplete, onStatusUpdate }) {
this.onAnalysisComplete = onAnalysisComplete;
this.onStatusUpdate = onStatusUpdate;
}
setSessionId(sessionId) {
this.currentSessionId = sessionId;
}
async getApiKey() {
const storedKey = await getStoredApiKey();
if (storedKey) {
console.log('[SummaryService] Using stored API key');
return storedKey;
}
const envKey = process.env.OPENAI_API_KEY;
if (envKey) {
console.log('[SummaryService] Using environment API key');
return envKey;
}
console.error('[SummaryService] No API key found in storage or environment');
return null;
}
sendToRenderer(channel, data) {
BrowserWindow.getAllWindows().forEach(win => {
if (!win.isDestroyed()) {
win.webContents.send(channel, data);
}
});
}
addConversationTurn(speaker, text) {
const conversationText = `${speaker.toLowerCase()}: ${text.trim()}`;
this.conversationHistory.push(conversationText);
console.log(`💬 Added conversation text: ${conversationText}`);
console.log(`📈 Total conversation history: ${this.conversationHistory.length} texts`);
// Trigger analysis if needed
this.triggerAnalysisIfNeeded();
}
getConversationHistory() {
return this.conversationHistory;
}
resetConversationHistory() {
this.conversationHistory = [];
this.previousAnalysisResult = null;
this.analysisHistory = [];
console.log('🔄 Conversation history and analysis state reset');
}
/**
* Converts conversation history into text to include in the prompt.
* @param {Array<string>} conversationTexts - Array of conversation texts ["me: ~~~", "them: ~~~", ...]
* @param {number} maxTurns - Maximum number of recent turns to include
* @returns {string} - Formatted conversation string for the prompt
*/
formatConversationForPrompt(conversationTexts, maxTurns = 30) {
if (conversationTexts.length === 0) return '';
return conversationTexts.slice(-maxTurns).join('\n');
}
async makeOutlineAndRequests(conversationTexts, maxTurns = 30) {
console.log(`🔍 makeOutlineAndRequests called - conversationTexts: ${conversationTexts.length}`);
if (conversationTexts.length === 0) {
console.log('⚠️ No conversation texts available for analysis');
return null;
}
const recentConversation = this.formatConversationForPrompt(conversationTexts, maxTurns);
// 이전 분석 결과를 프롬프트에 포함
let contextualPrompt = '';
if (this.previousAnalysisResult) {
contextualPrompt = `
Previous Analysis Context:
- Main Topic: ${this.previousAnalysisResult.topic.header}
- Key Points: ${this.previousAnalysisResult.summary.slice(0, 3).join(', ')}
- Last Actions: ${this.previousAnalysisResult.actions.slice(0, 2).join(', ')}
Please build upon this context while analyzing the new conversation segments.
`;
}
const basePrompt = getSystemPrompt('pickle_glass_analysis', '', false);
const systemPrompt = basePrompt.replace('{{CONVERSATION_HISTORY}}', recentConversation);
try {
if (this.currentSessionId) {
await sessionRepository.touch(this.currentSessionId);
}
const messages = [
{
role: 'system',
content: systemPrompt,
},
{
role: 'user',
content: `${contextualPrompt}
Analyze the conversation and provide a structured summary. Format your response as follows:
**Summary Overview**
- Main discussion point with context
**Key Topic: [Topic Name]**
- First key insight
- Second key insight
- Third key insight
**Extended Explanation**
Provide 2-3 sentences explaining the context and implications.
**Suggested Questions**
1. First follow-up question?
2. Second follow-up question?
3. Third follow-up question?
Keep all points concise and build upon previous analysis if provided.`,
},
];
console.log('🤖 Sending analysis request to AI...');
const API_KEY = await this.getApiKey();
if (!API_KEY) {
throw new Error('No API key available');
}
const provider = getStoredProvider ? await getStoredProvider() : 'openai';
const loggedIn = authService.getCurrentUser().isLoggedIn;
console.log(`[SummaryService] provider: ${provider}, loggedIn: ${loggedIn}`);
const llm = createLLM(provider, {
apiKey: API_KEY,
model: provider === 'openai' ? 'gpt-4.1' : 'gemini-2.5-flash',
temperature: 0.7,
maxTokens: 1024,
usePortkey: provider === 'openai' && loggedIn,
portkeyVirtualKey: loggedIn ? API_KEY : undefined
});
const completion = await llm.chat(messages);
const responseText = completion.content;
console.log(`✅ Analysis response received: ${responseText}`);
const structuredData = this.parseResponseText(responseText, this.previousAnalysisResult);
if (this.currentSessionId) {
summaryRepository.saveSummary({
sessionId: this.currentSessionId,
text: responseText,
tldr: structuredData.summary.join('\n'),
bullet_json: JSON.stringify(structuredData.topic.bullets),
action_json: JSON.stringify(structuredData.actions),
model: 'gpt-4.1'
}).catch(err => console.error('[DB] Failed to save summary:', err));
}
// 분석 결과 저장
this.previousAnalysisResult = structuredData;
this.analysisHistory.push({
timestamp: Date.now(),
data: structuredData,
conversationLength: conversationTexts.length,
});
// 히스토리 크기 제한 (최근 10개만 유지)
if (this.analysisHistory.length > 10) {
this.analysisHistory.shift();
}
return structuredData;
} catch (error) {
console.error('❌ Error during analysis generation:', error.message);
return this.previousAnalysisResult; // 에러 시 이전 결과 반환
}
}
parseResponseText(responseText, previousResult) {
const structuredData = {
summary: [],
topic: { header: '', bullets: [] },
actions: [],
followUps: ['✉️ Draft a follow-up email', '✅ Generate action items', '📝 Show summary'],
};
// 이전 결과가 있으면 기본값으로 사용
if (previousResult) {
structuredData.topic.header = previousResult.topic.header;
structuredData.summary = [...previousResult.summary];
}
try {
const lines = responseText.split('\n');
let currentSection = '';
let isCapturingTopic = false;
let topicName = '';
for (const line of lines) {
const trimmedLine = line.trim();
// 섹션 헤더 감지
if (trimmedLine.startsWith('**Summary Overview**')) {
currentSection = 'summary-overview';
continue;
} else if (trimmedLine.startsWith('**Key Topic:')) {
currentSection = 'topic';
isCapturingTopic = true;
topicName = trimmedLine.match(/\*\*Key Topic: (.+?)\*\*/)?.[1] || '';
if (topicName) {
structuredData.topic.header = topicName + ':';
}
continue;
} else if (trimmedLine.startsWith('**Extended Explanation**')) {
currentSection = 'explanation';
continue;
} else if (trimmedLine.startsWith('**Suggested Questions**')) {
currentSection = 'questions';
continue;
}
// 컨텐츠 파싱
if (trimmedLine.startsWith('-') && currentSection === 'summary-overview') {
const summaryPoint = trimmedLine.substring(1).trim();
if (summaryPoint && !structuredData.summary.includes(summaryPoint)) {
// 기존 summary 업데이트 (최대 5개 유지)
structuredData.summary.unshift(summaryPoint);
if (structuredData.summary.length > 5) {
structuredData.summary.pop();
}
}
} else if (trimmedLine.startsWith('-') && currentSection === 'topic') {
const bullet = trimmedLine.substring(1).trim();
if (bullet && structuredData.topic.bullets.length < 3) {
structuredData.topic.bullets.push(bullet);
}
} else if (currentSection === 'explanation' && trimmedLine) {
// explanation을 topic bullets에 추가 (문장 단위로)
const sentences = trimmedLine
.split(/\.\s+/)
.filter(s => s.trim().length > 0)
.map(s => s.trim() + (s.endsWith('.') ? '' : '.'));
sentences.forEach(sentence => {
if (structuredData.topic.bullets.length < 3 && !structuredData.topic.bullets.includes(sentence)) {
structuredData.topic.bullets.push(sentence);
}
});
} else if (trimmedLine.match(/^\d+\./) && currentSection === 'questions') {
const question = trimmedLine.replace(/^\d+\.\s*/, '').trim();
if (question && question.includes('?')) {
structuredData.actions.push(`${question}`);
}
}
}
// 기본 액션 추가
const defaultActions = ['✨ What should I say next?', '💬 Suggest follow-up questions'];
defaultActions.forEach(action => {
if (!structuredData.actions.includes(action)) {
structuredData.actions.push(action);
}
});
// 액션 개수 제한
structuredData.actions = structuredData.actions.slice(0, 5);
// 유효성 검증 및 이전 데이터 병합
if (structuredData.summary.length === 0 && previousResult) {
structuredData.summary = previousResult.summary;
}
if (structuredData.topic.bullets.length === 0 && previousResult) {
structuredData.topic.bullets = previousResult.topic.bullets;
}
} catch (error) {
console.error('❌ Error parsing response text:', error);
// 에러 시 이전 결과 반환
return (
previousResult || {
summary: [],
topic: { header: 'Analysis in progress', bullets: [] },
actions: ['✨ What should I say next?', '💬 Suggest follow-up questions'],
followUps: ['✉️ Draft a follow-up email', '✅ Generate action items', '📝 Show summary'],
}
);
}
console.log('📊 Final structured data:', JSON.stringify(structuredData, null, 2));
return structuredData;
}
/**
* Triggers analysis when conversation history reaches 5 texts.
*/
async triggerAnalysisIfNeeded() {
if (this.conversationHistory.length >= 5 && this.conversationHistory.length % 5 === 0) {
console.log(`🚀 Triggering analysis (non-blocking) - ${this.conversationHistory.length} conversation texts accumulated`);
this.makeOutlineAndRequests(this.conversationHistory)
.then(data => {
if (data) {
console.log('📤 Sending structured data to renderer');
this.sendToRenderer('update-structured-data', data);
// Notify callback
if (this.onAnalysisComplete) {
this.onAnalysisComplete(data);
}
} else {
console.log('❌ No analysis data returned from non-blocking call');
}
})
.catch(error => {
console.error('❌ Error in non-blocking analysis:', error);
});
}
}
getCurrentAnalysisData() {
return {
previousResult: this.previousAnalysisResult,
history: this.analysisHistory,
conversationLength: this.conversationHistory.length,
};
}
}
module.exports = SummaryService;

View File

@ -0,0 +1,831 @@
import { html, css, LitElement } from '../../assets/lit-core-2.7.4.min.js';
export class SettingsView extends LitElement {
static styles = css`
* {
font-family: 'Helvetica Neue', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
cursor: default;
user-select: none;
}
:host {
display: block;
width: 240px;
height: 100%;
color: white;
}
.settings-container {
display: flex;
flex-direction: column;
height: 100%;
width: 100%;
background: rgba(20, 20, 20, 0.8);
border-radius: 12px;
outline: 0.5px rgba(255, 255, 255, 0.2) solid;
outline-offset: -1px;
box-sizing: border-box;
position: relative;
overflow-y: auto;
padding: 12px 12px;
z-index: 1000;
}
.settings-container::-webkit-scrollbar {
width: 6px;
}
.settings-container::-webkit-scrollbar-track {
background: rgba(255, 255, 255, 0.05);
border-radius: 3px;
}
.settings-container::-webkit-scrollbar-thumb {
background: rgba(255, 255, 255, 0.2);
border-radius: 3px;
}
.settings-container::-webkit-scrollbar-thumb:hover {
background: rgba(255, 255, 255, 0.3);
}
.settings-container::before {
content: '';
position: absolute;
top: 0;
left: 0;
right: 0;
bottom: 0;
width: 100%;
height: 100%;
background: rgba(0, 0, 0, 0.15);
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3);
border-radius: 12px;
filter: blur(10px);
z-index: -1;
}
.settings-button[disabled],
.api-key-section input[disabled] {
opacity: 0.4;
cursor: not-allowed;
pointer-events: none;
}
.header-section {
display: flex;
justify-content: space-between;
align-items: flex-start;
padding-bottom: 6px;
border-bottom: 1px solid rgba(255, 255, 255, 0.1);
position: relative;
z-index: 1;
}
.title-line {
display: flex;
justify-content: space-between;
align-items: center;
}
.app-title {
font-size: 13px;
font-weight: 500;
color: white;
margin: 0 0 4px 0;
}
.account-info {
font-size: 11px;
color: rgba(255, 255, 255, 0.7);
margin: 0;
}
.invisibility-icon {
padding-top: 2px;
opacity: 0;
transition: opacity 0.3s ease;
}
.invisibility-icon.visible {
opacity: 1;
}
.invisibility-icon svg {
width: 16px;
height: 16px;
}
.shortcuts-section {
display: flex;
flex-direction: column;
gap: 2px;
padding: 4px 0;
position: relative;
z-index: 1;
}
.shortcut-item {
display: flex;
justify-content: space-between;
align-items: center;
padding: 4px 0;
color: white;
font-size: 11px;
}
.shortcut-name {
font-weight: 300;
}
.shortcut-keys {
display: flex;
align-items: center;
gap: 3px;
}
.cmd-key, .shortcut-key {
background: rgba(255, 255, 255, 0.1);
border-radius: 3px;
width: 16px;
height: 16px;
display: flex;
align-items: center;
justify-content: center;
font-size: 11px;
font-weight: 500;
color: rgba(255, 255, 255, 0.9);
}
/* Buttons Section */
.buttons-section {
display: flex;
flex-direction: column;
gap: 4px;
padding-top: 6px;
border-top: 1px solid rgba(255, 255, 255, 0.1);
position: relative;
z-index: 1;
flex: 1;
}
.settings-button {
background: rgba(255, 255, 255, 0.1);
border: 1px solid rgba(255, 255, 255, 0.2);
border-radius: 4px;
color: white;
padding: 5px 10px;
font-size: 11px;
font-weight: 400;
cursor: pointer;
transition: all 0.15s ease;
display: flex;
align-items: center;
justify-content: center;
white-space: nowrap;
}
.settings-button:hover {
background: rgba(255, 255, 255, 0.15);
border-color: rgba(255, 255, 255, 0.3);
}
.settings-button:active {
transform: translateY(1px);
}
.settings-button.full-width {
width: 100%;
}
.settings-button.half-width {
flex: 1;
}
.settings-button.danger {
background: rgba(255, 59, 48, 0.1);
border-color: rgba(255, 59, 48, 0.3);
color: rgba(255, 59, 48, 0.9);
}
.settings-button.danger:hover {
background: rgba(255, 59, 48, 0.15);
border-color: rgba(255, 59, 48, 0.4);
}
.move-buttons, .bottom-buttons {
display: flex;
gap: 4px;
}
.api-key-section {
padding: 6px 0;
border-top: 1px solid rgba(255, 255, 255, 0.1);
}
.api-key-section input {
width: 100%;
background: rgba(0,0,0,0.2);
border: 1px solid rgba(255,255,255,0.2);
color: white;
border-radius: 4px;
padding: 4px;
font-size: 11px;
margin-bottom: 4px;
box-sizing: border-box;
}
.api-key-section input::placeholder {
color: rgba(255, 255, 255, 0.4);
}
/* Preset Management Section */
.preset-section {
padding: 6px 0;
border-top: 1px solid rgba(255, 255, 255, 0.1);
}
.preset-header {
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: 4px;
}
.preset-title {
font-size: 11px;
font-weight: 500;
color: white;
}
.preset-count {
font-size: 9px;
color: rgba(255, 255, 255, 0.5);
margin-left: 4px;
}
.preset-toggle {
font-size: 10px;
color: rgba(255, 255, 255, 0.6);
cursor: pointer;
padding: 2px 4px;
border-radius: 2px;
transition: background-color 0.15s ease;
}
.preset-toggle:hover {
background: rgba(255, 255, 255, 0.1);
}
.preset-list {
display: flex;
flex-direction: column;
gap: 2px;
max-height: 120px;
overflow-y: auto;
}
.preset-item {
display: flex;
justify-content: space-between;
align-items: center;
padding: 4px 6px;
background: rgba(255, 255, 255, 0.05);
border-radius: 3px;
cursor: pointer;
transition: all 0.15s ease;
font-size: 11px;
border: 1px solid transparent;
}
.preset-item:hover {
background: rgba(255, 255, 255, 0.1);
border-color: rgba(255, 255, 255, 0.1);
}
.preset-item.selected {
background: rgba(0, 122, 255, 0.25);
border-color: rgba(0, 122, 255, 0.6);
box-shadow: 0 0 0 1px rgba(0, 122, 255, 0.3);
}
.preset-name {
color: white;
flex: 1;
text-overflow: ellipsis;
overflow: hidden;
white-space: nowrap;
font-weight: 300;
}
.preset-item.selected .preset-name {
font-weight: 500;
}
.preset-status {
font-size: 9px;
color: rgba(0, 122, 255, 0.8);
font-weight: 500;
margin-left: 6px;
}
.no-presets-message {
padding: 12px 8px;
text-align: center;
color: rgba(255, 255, 255, 0.5);
font-size: 10px;
line-height: 1.4;
}
.no-presets-message .web-link {
color: rgba(0, 122, 255, 0.8);
text-decoration: underline;
cursor: pointer;
}
.no-presets-message .web-link:hover {
color: rgba(0, 122, 255, 1);
}
.loading-state {
display: flex;
align-items: center;
justify-content: center;
padding: 20px;
color: rgba(255, 255, 255, 0.7);
font-size: 11px;
}
.loading-spinner {
width: 12px;
height: 12px;
border: 1px solid rgba(255, 255, 255, 0.2);
border-top: 1px solid rgba(255, 255, 255, 0.8);
border-radius: 50%;
animation: spin 1s linear infinite;
margin-right: 6px;
}
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
.hidden {
display: none;
}
/* ────────────────[ GLASS BYPASS ]─────────────── */
:host-context(body.has-glass) {
animation: none !important;
transition: none !important;
transform: none !important;
will-change: auto !important;
}
:host-context(body.has-glass) * {
background: transparent !important;
filter: none !important;
backdrop-filter: none !important;
box-shadow: none !important;
outline: none !important;
border: none !important;
border-radius: 0 !important;
transition: none !important;
animation: none !important;
}
:host-context(body.has-glass) .settings-container::before {
display: none !important;
}
`;
static properties = {
firebaseUser: { type: Object, state: true },
apiKey: { type: String, state: true },
isLoading: { type: Boolean, state: true },
isContentProtectionOn: { type: Boolean, state: true },
settings: { type: Object, state: true },
presets: { type: Array, state: true },
selectedPreset: { type: Object, state: true },
showPresets: { type: Boolean, state: true },
saving: { type: Boolean, state: true },
};
constructor() {
super();
this.firebaseUser = null;
this.apiKey = null;
this.isLoading = false;
this.isContentProtectionOn = true;
this.settings = null;
this.presets = [];
this.selectedPreset = null;
this.showPresets = false;
this.saving = false;
this.loadInitialData();
}
async loadInitialData() {
if (!window.require) return;
try {
this.isLoading = true;
const { ipcRenderer } = window.require('electron');
// Load all data in parallel
const [settings, presets, apiKey, contentProtection, userState] = await Promise.all([
ipcRenderer.invoke('settings:getSettings'),
ipcRenderer.invoke('settings:getPresets'),
ipcRenderer.invoke('get-stored-api-key'),
ipcRenderer.invoke('get-content-protection-status'),
ipcRenderer.invoke('get-current-user')
]);
this.settings = settings;
this.presets = presets || [];
this.apiKey = apiKey;
this.isContentProtectionOn = contentProtection;
// Set first user preset as selected
if (this.presets.length > 0) {
const firstUserPreset = this.presets.find(p => p.is_default === 0);
if (firstUserPreset) {
this.selectedPreset = firstUserPreset;
}
}
if (userState && userState.isLoggedIn) {
this.firebaseUser = userState.user;
}
} catch (error) {
console.error('Error loading initial data:', error);
} finally {
this.isLoading = false;
}
}
connectedCallback() {
super.connectedCallback();
this.setupEventListeners();
this.setupIpcListeners();
this.setupWindowResize();
}
disconnectedCallback() {
super.disconnectedCallback();
this.cleanupEventListeners();
this.cleanupIpcListeners();
this.cleanupWindowResize();
}
setupEventListeners() {
this.addEventListener('mouseenter', this.handleMouseEnter);
this.addEventListener('mouseleave', this.handleMouseLeave);
}
cleanupEventListeners() {
this.removeEventListener('mouseenter', this.handleMouseEnter);
this.removeEventListener('mouseleave', this.handleMouseLeave);
}
setupIpcListeners() {
if (!window.require) return;
const { ipcRenderer } = window.require('electron');
this._userStateListener = (event, userState) => {
console.log('[SettingsView] Received user-state-changed:', userState);
if (userState && userState.isLoggedIn) {
this.firebaseUser = userState;
} else {
this.firebaseUser = null;
}
this.requestUpdate();
};
this._settingsUpdatedListener = (event, settings) => {
console.log('[SettingsView] Received settings-updated');
this.settings = settings;
this.requestUpdate();
};
// 프리셋 업데이트 리스너 추가
this._presetsUpdatedListener = async (event) => {
console.log('[SettingsView] Received presets-updated, refreshing presets');
try {
const presets = await ipcRenderer.invoke('settings:getPresets');
this.presets = presets || [];
// 현재 선택된 프리셋이 삭제되었는지 확인 (사용자 프리셋만 고려)
const userPresets = this.presets.filter(p => p.is_default === 0);
if (this.selectedPreset && !userPresets.find(p => p.id === this.selectedPreset.id)) {
this.selectedPreset = userPresets.length > 0 ? userPresets[0] : null;
}
this.requestUpdate();
} catch (error) {
console.error('[SettingsView] Failed to refresh presets:', error);
}
};
ipcRenderer.on('user-state-changed', this._userStateListener);
ipcRenderer.on('settings-updated', this._settingsUpdatedListener);
ipcRenderer.on('presets-updated', this._presetsUpdatedListener);
}
cleanupIpcListeners() {
if (!window.require) return;
const { ipcRenderer } = window.require('electron');
if (this._userStateListener) {
ipcRenderer.removeListener('user-state-changed', this._userStateListener);
}
if (this._settingsUpdatedListener) {
ipcRenderer.removeListener('settings-updated', this._settingsUpdatedListener);
}
if (this._presetsUpdatedListener) {
ipcRenderer.removeListener('presets-updated', this._presetsUpdatedListener);
}
}
setupWindowResize() {
this.resizeHandler = () => {
this.requestUpdate();
this.updateScrollHeight();
};
window.addEventListener('resize', this.resizeHandler);
// Initial setup
setTimeout(() => this.updateScrollHeight(), 100);
}
cleanupWindowResize() {
if (this.resizeHandler) {
window.removeEventListener('resize', this.resizeHandler);
}
}
updateScrollHeight() {
const windowHeight = window.innerHeight;
const maxHeight = windowHeight;
this.style.maxHeight = `${maxHeight}px`;
const container = this.shadowRoot?.querySelector('.settings-container');
if (container) {
container.style.maxHeight = `${maxHeight}px`;
}
}
handleMouseEnter = () => {
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.send('cancel-hide-window', 'settings');
}
}
handleMouseLeave = () => {
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.send('hide-window', 'settings');
}
}
getMainShortcuts() {
return [
{ name: 'Show / Hide', key: '\\' },
{ name: 'Ask Anything', key: '↵' },
{ name: 'Scroll AI Response', key: '↕' }
];
}
togglePresets() {
this.showPresets = !this.showPresets;
}
async handlePresetSelect(preset) {
this.selectedPreset = preset;
// Here you could implement preset application logic
console.log('Selected preset:', preset);
}
handleMoveLeft() {
console.log('Move Left clicked');
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.invoke('move-window-step', 'left');
}
}
handleMoveRight() {
console.log('Move Right clicked');
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.invoke('move-window-step', 'right');
}
}
async handlePersonalize() {
console.log('Personalize clicked');
if (window.require) {
const { ipcRenderer } = window.require('electron');
try {
await ipcRenderer.invoke('open-login-page');
} catch (error) {
console.error('Failed to open personalize page:', error);
}
}
}
async handleToggleInvisibility() {
console.log('Toggle Invisibility clicked');
if (window.require) {
const { ipcRenderer } = window.require('electron');
this.isContentProtectionOn = await ipcRenderer.invoke('toggle-content-protection');
this.requestUpdate();
}
}
async handleSaveApiKey() {
const input = this.shadowRoot.getElementById('api-key-input');
if (!input || !input.value) return;
const newApiKey = input.value;
if (window.require) {
const { ipcRenderer } = window.require('electron');
try {
const result = await ipcRenderer.invoke('settings:saveApiKey', newApiKey);
if (result.success) {
console.log('API Key saved successfully via IPC.');
this.apiKey = newApiKey;
this.requestUpdate();
} else {
console.error('Failed to save API Key via IPC:', result.error);
}
} catch(e) {
console.error('Error invoking save-api-key IPC:', e);
}
}
}
async handleClearApiKey() {
console.log('Clear API Key clicked');
if (window.require) {
const { ipcRenderer } = window.require('electron');
await ipcRenderer.invoke('settings:removeApiKey');
this.apiKey = null;
this.requestUpdate();
}
}
handleQuit() {
console.log('Quit clicked');
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.invoke('quit-application');
}
}
handleFirebaseLogout() {
console.log('Firebase Logout clicked');
if (window.require) {
const { ipcRenderer } = window.require('electron');
ipcRenderer.invoke('firebase-logout');
}
}
render() {
if (this.isLoading) {
return html`
<div class="settings-container">
<div class="loading-state">
<div class="loading-spinner"></div>
<span>Loading...</span>
</div>
</div>
`;
}
const loggedIn = !!this.firebaseUser;
return html`
<div class="settings-container">
<div class="header-section">
<div>
<h1 class="app-title">Pickle Glass</h1>
<div class="account-info">
${this.firebaseUser
? html`Account: ${this.firebaseUser.email || 'Logged In'}`
: this.apiKey && this.apiKey.length > 10
? html`API Key: ${this.apiKey.substring(0, 6)}...${this.apiKey.substring(this.apiKey.length - 6)}`
: `Account: Not Logged In`
}
</div>
</div>
<div class="invisibility-icon ${this.isContentProtectionOn ? 'visible' : ''}" title="Invisibility is On">
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M9.785 7.41787C8.7 7.41787 7.79 8.19371 7.55667 9.22621C7.0025 8.98704 6.495 9.05121 6.11 9.22037C5.87083 8.18204 4.96083 7.41787 3.88167 7.41787C2.61583 7.41787 1.58333 8.46204 1.58333 9.75121C1.58333 11.0404 2.61583 12.0845 3.88167 12.0845C5.08333 12.0845 6.06333 11.1395 6.15667 9.93787C6.355 9.79787 6.87417 9.53537 7.51 9.94954C7.615 11.1454 8.58333 12.0845 9.785 12.0845C11.0508 12.0845 12.0833 11.0404 12.0833 9.75121C12.0833 8.46204 11.0508 7.41787 9.785 7.41787ZM3.88167 11.4195C2.97167 11.4195 2.2425 10.6729 2.2425 9.75121C2.2425 8.82954 2.9775 8.08287 3.88167 8.08287C4.79167 8.08287 5.52083 8.82954 5.52083 9.75121C5.52083 10.6729 4.79167 11.4195 3.88167 11.4195ZM9.785 11.4195C8.875 11.4195 8.14583 10.6729 8.14583 9.75121C8.14583 8.82954 8.875 8.08287 9.785 8.08287C10.695 8.08287 11.43 8.82954 11.43 9.75121C11.43 10.6729 10.6892 11.4195 9.785 11.4195ZM12.6667 5.95954H1V6.83454H12.6667V5.95954ZM8.8925 1.36871C8.76417 1.08287 8.4375 0.931207 8.12833 1.03037L6.83333 1.46204L5.5325 1.03037L5.50333 1.02454C5.19417 0.93704 4.8675 1.10037 4.75083 1.39787L3.33333 5.08454H10.3333L8.91 1.39787L8.8925 1.36871Z" fill="white"/>
</svg>
</div>
</div>
<div class="api-key-section">
<input
type="password"
id="api-key-input"
placeholder="Enter API Key"
.value=${this.apiKey || ''}
?disabled=${loggedIn}
>
<button class="settings-button full-width" @click=${this.handleSaveApiKey} ?disabled=${loggedIn}>
Save API Key
</button>
</div>
<div class="shortcuts-section">
${this.getMainShortcuts().map(shortcut => html`
<div class="shortcut-item">
<span class="shortcut-name">${shortcut.name}</span>
<div class="shortcut-keys">
<span class="cmd-key"></span>
<span class="shortcut-key">${shortcut.key}</span>
</div>
</div>
`)}
</div>
<!-- Preset Management Section -->
<div class="preset-section">
<div class="preset-header">
<span class="preset-title">
My Presets
<span class="preset-count">(${this.presets.filter(p => p.is_default === 0).length})</span>
</span>
<span class="preset-toggle" @click=${this.togglePresets}>
${this.showPresets ? '▼' : '▶'}
</span>
</div>
<div class="preset-list ${this.showPresets ? '' : 'hidden'}">
${this.presets.filter(p => p.is_default === 0).length === 0 ? html`
<div class="no-presets-message">
No custom presets yet.<br>
<span class="web-link" @click=${this.handlePersonalize}>
Create your first preset
</span>
</div>
` : this.presets.filter(p => p.is_default === 0).map(preset => html`
<div class="preset-item ${this.selectedPreset?.id === preset.id ? 'selected' : ''}"
@click=${() => this.handlePresetSelect(preset)}>
<span class="preset-name">${preset.title}</span>
${this.selectedPreset?.id === preset.id ? html`<span class="preset-status">Selected</span>` : ''}
</div>
`)}
</div>
</div>
<div class="buttons-section">
<button class="settings-button full-width" @click=${this.handlePersonalize}>
<span>Personalize / Meeting Notes</span>
</button>
<div class="move-buttons">
<button class="settings-button half-width" @click=${this.handleMoveLeft}>
<span> Move</span>
</button>
<button class="settings-button half-width" @click=${this.handleMoveRight}>
<span>Move </span>
</button>
</div>
<button class="settings-button full-width" @click=${this.handleToggleInvisibility}>
<span>${this.isContentProtectionOn ? 'Disable Invisibility' : 'Enable Invisibility'}</span>
</button>
<div class="bottom-buttons">
${this.firebaseUser
? html`
<button class="settings-button half-width danger" @click=${this.handleFirebaseLogout}>
<span>Logout</span>
</button>
`
: html`
<button class="settings-button half-width danger" @click=${this.handleClearApiKey}>
<span>Clear API Key</span>
</button>
`
}
<button class="settings-button half-width danger" @click=${this.handleQuit}>
<span>Quit</span>
</button>
</div>
</div>
</div>
`;
}
}
customElements.define('settings-view', SettingsView);

View File

@ -13,8 +13,9 @@ function getRepository() {
// Directly export functions for ease of use, decided by the strategy
module.exports = {
addTranscript: (...args) => getRepository().addTranscript(...args),
saveSummary: (...args) => getRepository().saveSummary(...args),
getAllTranscriptsBySessionId: (...args) => getRepository().getAllTranscriptsBySessionId(...args),
getSummaryBySessionId: (...args) => getRepository().getSummaryBySessionId(...args),
getPresets: (...args) => getRepository().getPresets(...args),
getPresetTemplates: (...args) => getRepository().getPresetTemplates(...args),
createPreset: (...args) => getRepository().createPreset(...args),
updatePreset: (...args) => getRepository().updatePreset(...args),
deletePreset: (...args) => getRepository().deletePreset(...args),
};

View File

@ -0,0 +1,109 @@
const sqliteClient = require('../../../common/services/sqliteClient');
function getPresets(uid) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const query = `
SELECT * FROM prompt_presets
WHERE uid = ? OR is_default = 1
ORDER BY is_default DESC, title ASC
`;
db.all(query, [uid], (err, rows) => {
if (err) {
console.error('SQLite: Failed to get presets:', err);
reject(err);
} else {
resolve(rows || []);
}
});
});
}
function getPresetTemplates() {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const query = `
SELECT * FROM prompt_presets
WHERE is_default = 1
ORDER BY title ASC
`;
db.all(query, [], (err, rows) => {
if (err) {
console.error('SQLite: Failed to get preset templates:', err);
reject(err);
} else {
resolve(rows || []);
}
});
});
}
function createPreset({ uid, title, prompt }) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const id = require('crypto').randomUUID();
const now = Math.floor(Date.now() / 1000);
const query = `
INSERT INTO prompt_presets (id, uid, title, prompt, is_default, created_at, sync_state)
VALUES (?, ?, ?, ?, 0, ?, 'dirty')
`;
db.run(query, [id, uid, title, prompt, now], function(err) {
if (err) {
console.error('SQLite: Failed to create preset:', err);
reject(err);
} else {
resolve({ id });
}
});
});
}
function updatePreset(id, { title, prompt }, uid) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const now = Math.floor(Date.now() / 1000);
const query = `
UPDATE prompt_presets
SET title = ?, prompt = ?, sync_state = 'dirty', updated_at = ?
WHERE id = ? AND uid = ? AND is_default = 0
`;
db.run(query, [title, prompt, now, id, uid], function(err) {
if (err) {
console.error('SQLite: Failed to update preset:', err);
reject(err);
} else if (this.changes === 0) {
reject(new Error('Preset not found, is default, or permission denied'));
} else {
resolve({ changes: this.changes });
}
});
});
}
function deletePreset(id, uid) {
const db = sqliteClient.getDb();
return new Promise((resolve, reject) => {
const query = `
DELETE FROM prompt_presets
WHERE id = ? AND uid = ? AND is_default = 0
`;
db.run(query, [id, uid], function(err) {
if (err) {
console.error('SQLite: Failed to delete preset:', err);
reject(err);
} else if (this.changes === 0) {
reject(new Error('Preset not found, is default, or permission denied'));
} else {
resolve({ changes: this.changes });
}
});
});
}
module.exports = {
getPresets,
getPresetTemplates,
createPreset,
updatePreset,
deletePreset
};

View File

@ -0,0 +1,462 @@
const { ipcMain, BrowserWindow } = require('electron');
const Store = require('electron-store');
const authService = require('../../common/services/authService');
const userRepository = require('../../common/repositories/user');
const settingsRepository = require('./repositories');
const { getStoredApiKey, getStoredProvider, windowPool } = require('../../electron/windowManager');
const store = new Store({
name: 'pickle-glass-settings',
defaults: {
users: {}
}
});
// Configuration constants
const NOTIFICATION_CONFIG = {
RELEVANT_WINDOW_TYPES: ['settings', 'main'],
DEBOUNCE_DELAY: 300, // prevent spam during bulk operations (ms)
MAX_RETRY_ATTEMPTS: 3,
RETRY_BASE_DELAY: 1000, // exponential backoff base (ms)
};
// window targeting system
class WindowNotificationManager {
constructor() {
this.pendingNotifications = new Map();
}
/**
* Send notifications only to relevant windows
* @param {string} event - Event name
* @param {*} data - Event data
* @param {object} options - Notification options
*/
notifyRelevantWindows(event, data = null, options = {}) {
const {
windowTypes = NOTIFICATION_CONFIG.RELEVANT_WINDOW_TYPES,
debounce = NOTIFICATION_CONFIG.DEBOUNCE_DELAY
} = options;
if (debounce > 0) {
this.debounceNotification(event, () => {
this.sendToTargetWindows(event, data, windowTypes);
}, debounce);
} else {
this.sendToTargetWindows(event, data, windowTypes);
}
}
sendToTargetWindows(event, data, windowTypes) {
const relevantWindows = this.getRelevantWindows(windowTypes);
if (relevantWindows.length === 0) {
console.log(`[WindowNotificationManager] No relevant windows found for event: ${event}`);
return;
}
console.log(`[WindowNotificationManager] Sending ${event} to ${relevantWindows.length} relevant windows`);
relevantWindows.forEach(win => {
try {
if (data) {
win.webContents.send(event, data);
} else {
win.webContents.send(event);
}
} catch (error) {
console.warn(`[WindowNotificationManager] Failed to send ${event} to window:`, error.message);
}
});
}
getRelevantWindows(windowTypes) {
const allWindows = BrowserWindow.getAllWindows();
const relevantWindows = [];
allWindows.forEach(win => {
if (win.isDestroyed()) return;
for (const [windowName, poolWindow] of windowPool || []) {
if (poolWindow === win && windowTypes.includes(windowName)) {
if (windowName === 'settings' || win.isVisible()) {
relevantWindows.push(win);
}
break;
}
}
});
return relevantWindows;
}
debounceNotification(key, fn, delay) {
// Clear existing timeout
if (this.pendingNotifications.has(key)) {
clearTimeout(this.pendingNotifications.get(key));
}
// Set new timeout
const timeoutId = setTimeout(() => {
fn();
this.pendingNotifications.delete(key);
}, delay);
this.pendingNotifications.set(key, timeoutId);
}
cleanup() {
// Clear all pending notifications
this.pendingNotifications.forEach(timeoutId => clearTimeout(timeoutId));
this.pendingNotifications.clear();
}
}
// Global instance
const windowNotificationManager = new WindowNotificationManager();
// Default keybinds configuration
const DEFAULT_KEYBINDS = {
mac: {
moveUp: 'Cmd+Up',
moveDown: 'Cmd+Down',
moveLeft: 'Cmd+Left',
moveRight: 'Cmd+Right',
toggleVisibility: 'Cmd+\\',
toggleClickThrough: 'Cmd+M',
nextStep: 'Cmd+Enter',
manualScreenshot: 'Cmd+Shift+S',
previousResponse: 'Cmd+[',
nextResponse: 'Cmd+]',
scrollUp: 'Cmd+Shift+Up',
scrollDown: 'Cmd+Shift+Down',
},
windows: {
moveUp: 'Ctrl+Up',
moveDown: 'Ctrl+Down',
moveLeft: 'Ctrl+Left',
moveRight: 'Ctrl+Right',
toggleVisibility: 'Ctrl+\\',
toggleClickThrough: 'Ctrl+M',
nextStep: 'Ctrl+Enter',
manualScreenshot: 'Ctrl+Shift+S',
previousResponse: 'Ctrl+[',
nextResponse: 'Ctrl+]',
scrollUp: 'Ctrl+Shift+Up',
scrollDown: 'Ctrl+Shift+Down',
}
};
// Service state
let currentSettings = null;
function getDefaultSettings() {
const isMac = process.platform === 'darwin';
return {
profile: 'school',
language: 'en',
screenshotInterval: '5000',
imageQuality: '0.8',
layoutMode: 'stacked',
keybinds: isMac ? DEFAULT_KEYBINDS.mac : DEFAULT_KEYBINDS.windows,
throttleTokens: 500,
maxTokens: 2000,
throttlePercent: 80,
googleSearchEnabled: false,
backgroundTransparency: 0.5,
fontSize: 14,
contentProtection: true
};
}
async function getSettings() {
try {
const uid = authService.getCurrentUserId();
const userSettingsKey = uid ? `users.${uid}` : 'users.default';
const defaultSettings = getDefaultSettings();
const savedSettings = store.get(userSettingsKey, {});
currentSettings = { ...defaultSettings, ...savedSettings };
return currentSettings;
} catch (error) {
console.error('[SettingsService] Error getting settings from store:', error);
return getDefaultSettings();
}
}
async function saveSettings(settings) {
try {
const uid = authService.getCurrentUserId();
const userSettingsKey = uid ? `users.${uid}` : 'users.default';
const currentSaved = store.get(userSettingsKey, {});
const newSettings = { ...currentSaved, ...settings };
store.set(userSettingsKey, newSettings);
currentSettings = newSettings;
// Use smart notification system
windowNotificationManager.notifyRelevantWindows('settings-updated', currentSettings);
return { success: true };
} catch (error) {
console.error('[SettingsService] Error saving settings to store:', error);
return { success: false, error: error.message };
}
}
async function getPresets() {
try {
const uid = authService.getCurrentUserId();
if (!uid) {
// Logged out users only see default presets
return await settingsRepository.getPresetTemplates();
}
const presets = await settingsRepository.getPresets(uid);
return presets;
} catch (error) {
console.error('[SettingsService] Error getting presets:', error);
return [];
}
}
async function getPresetTemplates() {
try {
const templates = await settingsRepository.getPresetTemplates();
return templates;
} catch (error) {
console.error('[SettingsService] Error getting preset templates:', error);
return [];
}
}
async function createPreset(title, prompt) {
try {
const uid = authService.getCurrentUserId();
if (!uid) {
throw new Error("User not logged in, cannot create preset.");
}
const result = await settingsRepository.createPreset({ uid, title, prompt });
windowNotificationManager.notifyRelevantWindows('presets-updated', {
action: 'created',
presetId: result.id,
title
});
return { success: true, id: result.id };
} catch (error) {
console.error('[SettingsService] Error creating preset:', error);
return { success: false, error: error.message };
}
}
async function updatePreset(id, title, prompt) {
try {
const uid = authService.getCurrentUserId();
if (!uid) {
throw new Error("User not logged in, cannot update preset.");
}
await settingsRepository.updatePreset(id, { title, prompt }, uid);
windowNotificationManager.notifyRelevantWindows('presets-updated', {
action: 'updated',
presetId: id,
title
});
return { success: true };
} catch (error) {
console.error('[SettingsService] Error updating preset:', error);
return { success: false, error: error.message };
}
}
async function deletePreset(id) {
try {
const uid = authService.getCurrentUserId();
if (!uid) {
throw new Error("User not logged in, cannot delete preset.");
}
await settingsRepository.deletePreset(id, uid);
windowNotificationManager.notifyRelevantWindows('presets-updated', {
action: 'deleted',
presetId: id
});
return { success: true };
} catch (error) {
console.error('[SettingsService] Error deleting preset:', error);
return { success: false, error: error.message };
}
}
async function saveApiKey(apiKey, provider = 'openai') {
try {
const uid = authService.getCurrentUserId();
if (!uid) {
// For non-logged-in users, save to local storage
const { app } = require('electron');
const Store = require('electron-store');
const store = new Store();
store.set('apiKey', apiKey);
store.set('provider', provider);
// Notify windows
BrowserWindow.getAllWindows().forEach(win => {
if (!win.isDestroyed()) {
win.webContents.send('api-key-validated', apiKey);
}
});
return { success: true };
}
// For logged-in users, save to database
await userRepository.saveApiKey(apiKey, uid, provider);
// Notify windows
BrowserWindow.getAllWindows().forEach(win => {
if (!win.isDestroyed()) {
win.webContents.send('api-key-validated', apiKey);
}
});
return { success: true };
} catch (error) {
console.error('[SettingsService] Error saving API key:', error);
return { success: false, error: error.message };
}
}
async function removeApiKey() {
try {
const uid = authService.getCurrentUserId();
if (!uid) {
// For non-logged-in users, remove from local storage
const { app } = require('electron');
const Store = require('electron-store');
const store = new Store();
store.delete('apiKey');
store.delete('provider');
} else {
// For logged-in users, remove from database
await userRepository.saveApiKey(null, uid, null);
}
// Notify windows
BrowserWindow.getAllWindows().forEach(win => {
if (!win.isDestroyed()) {
win.webContents.send('api-key-removed');
}
});
return { success: true };
} catch (error) {
console.error('[SettingsService] Error removing API key:', error);
return { success: false, error: error.message };
}
}
async function updateContentProtection(enabled) {
try {
const settings = await getSettings();
settings.contentProtection = enabled;
// Update content protection in main window
const { app } = require('electron');
const mainWindow = windowPool.get('main');
if (mainWindow && !mainWindow.isDestroyed()) {
mainWindow.setContentProtection(enabled);
}
return await saveSettings(settings);
} catch (error) {
console.error('[SettingsService] Error updating content protection:', error);
return { success: false, error: error.message };
}
}
function initialize() {
// cleanup
windowNotificationManager.cleanup();
// IPC handlers for settings
ipcMain.handle('settings:getSettings', async () => {
return await getSettings();
});
ipcMain.handle('settings:saveSettings', async (event, settings) => {
return await saveSettings(settings);
});
// IPC handlers for presets
ipcMain.handle('settings:getPresets', async () => {
return await getPresets();
});
ipcMain.handle('settings:getPresetTemplates', async () => {
return await getPresetTemplates();
});
ipcMain.handle('settings:createPreset', async (event, title, prompt) => {
return await createPreset(title, prompt);
});
ipcMain.handle('settings:updatePreset', async (event, id, title, prompt) => {
return await updatePreset(id, title, prompt);
});
ipcMain.handle('settings:deletePreset', async (event, id) => {
return await deletePreset(id);
});
ipcMain.handle('settings:saveApiKey', async (event, apiKey, provider) => {
return await saveApiKey(apiKey, provider);
});
ipcMain.handle('settings:removeApiKey', async () => {
return await removeApiKey();
});
ipcMain.handle('settings:updateContentProtection', async (event, enabled) => {
return await updateContentProtection(enabled);
});
console.log('[SettingsService] Initialized and ready.');
}
// Cleanup function
function cleanup() {
windowNotificationManager.cleanup();
console.log('[SettingsService] Cleaned up resources.');
}
function notifyPresetUpdate(action, presetId, title = null) {
const data = { action, presetId };
if (title) data.title = title;
windowNotificationManager.notifyRelevantWindows('presets-updated', data);
}
module.exports = {
initialize,
cleanup,
notifyPresetUpdate,
getSettings,
saveSettings,
getPresets,
getPresetTemplates,
createPreset,
updatePreset,
deletePreset,
saveApiKey,
removeApiKey,
updateContentProtection,
};

View File

@ -13,7 +13,7 @@ if (require('electron-squirrel-startup')) {
const { app, BrowserWindow, shell, ipcMain, dialog } = require('electron');
const { createWindows } = require('./electron/windowManager.js');
const { setupLiveSummaryIpcHandlers, stopMacOSAudioCapture } = require('./features/listen/liveSummaryService.js');
const ListenService = require('./features/listen/listenService');
const { initializeFirebase } = require('./common/services/firebaseClient');
const databaseInitializer = require('./common/services/databaseInitializer');
const authService = require('./common/services/authService');
@ -24,12 +24,15 @@ const fetch = require('node-fetch');
const { autoUpdater } = require('electron-updater');
const { EventEmitter } = require('events');
const askService = require('./features/ask/askService');
const settingsService = require('./features/settings/settingsService');
const sessionRepository = require('./common/repositories/session');
const eventBridge = new EventEmitter();
let WEB_PORT = 3000;
const openaiSessionRef = { current: null };
const listenService = new ListenService();
// Make listenService globally accessible so other modules (e.g., windowManager, askService) can reuse the same instance
global.listenService = listenService;
let deeplink = null; // Initialize as null
let pendingDeepLinkUrl = null; // Store any deep link that arrives before initialization
@ -106,8 +109,9 @@ app.whenReady().then(async () => {
sessionRepository.endAllActiveSessions();
authService.initialize();
setupLiveSummaryIpcHandlers(openaiSessionRef);
listenService.setupIpcHandlers();
askService.initialize();
settingsService.initialize();
setupGeneralIpcHandlers();
})
.catch(err => {
@ -123,7 +127,7 @@ app.whenReady().then(async () => {
});
app.on('window-all-closed', () => {
stopMacOSAudioCapture();
listenService.stopMacOSAudioCapture();
if (process.platform !== 'darwin') {
app.quit();
}
@ -131,7 +135,7 @@ app.on('window-all-closed', () => {
app.on('before-quit', async () => {
console.log('[Shutdown] App is about to quit.');
stopMacOSAudioCapture();
listenService.stopMacOSAudioCapture();
await sessionRepository.endAllActiveSessions();
databaseInitializer.close();
});
@ -210,7 +214,8 @@ function setupGeneralIpcHandlers() {
function setupWebDataHandlers() {
const sessionRepository = require('./common/repositories/session');
const listenRepository = require('./features/listen/repositories');
const sttRepository = require('./features/listen/stt/repositories');
const summaryRepository = require('./features/listen/summary/repositories');
const askRepository = require('./features/ask/repositories');
const userRepository = require('./common/repositories/user');
const presetRepository = require('./common/repositories/preset');
@ -230,9 +235,9 @@ function setupWebDataHandlers() {
result = null;
break;
}
const transcripts = await listenRepository.getAllTranscriptsBySessionId(payload);
const transcripts = await sttRepository.getAllTranscriptsBySessionId(payload);
const ai_messages = await askRepository.getAllAiMessagesBySessionId(payload);
const summary = await listenRepository.getSummaryBySessionId(payload);
const summary = await summaryRepository.getSummaryBySessionId(payload);
result = { session, transcripts, ai_messages, summary };
break;
case 'delete-session':
@ -273,12 +278,15 @@ function setupWebDataHandlers() {
break;
case 'create-preset':
result = await presetRepository.create({ ...payload, uid: currentUserId });
settingsService.notifyPresetUpdate('created', result.id, payload.title);
break;
case 'update-preset':
result = await presetRepository.update(payload.id, payload.data, currentUserId);
settingsService.notifyPresetUpdate('updated', payload.id, payload.data.title);
break;
case 'delete-preset':
result = await presetRepository.delete(payload, currentUserId);
settingsService.notifyPresetUpdate('deleted', payload);
break;
// BATCH