Merge remote-tracking branch 'origin/main' into pr-77
This commit is contained in:
commit
3ea5dd75b6
34
.github/workflows/build.yml
vendored
Normal file
34
.github/workflows/build.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
name: Build & Verify
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ "main" ] # Runs on every push to main branch
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
# Currently runs on macOS only, can add windows-latest later
|
||||||
|
runs-on: macos-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: 🚚 Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: ⚙️ Setup Node.js environment
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: '20.x' # Node.js version compatible with project
|
||||||
|
cache: 'npm' # npm dependency caching for speed improvement
|
||||||
|
|
||||||
|
- name: 📦 Install root dependencies
|
||||||
|
run: npm install
|
||||||
|
|
||||||
|
- name: 🌐 Install and build web (Renderer) part
|
||||||
|
# Move to pickleglass_web directory and run commands
|
||||||
|
working-directory: ./pickleglass_web
|
||||||
|
run: |
|
||||||
|
npm install
|
||||||
|
npm run build
|
||||||
|
|
||||||
|
- name: 🖥️ Build Electron app
|
||||||
|
# Run Electron build script from root directory
|
||||||
|
run: npm run build
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -102,7 +102,6 @@ pickleglass_web/venv/
|
|||||||
node_modules/
|
node_modules/
|
||||||
npm-debug.log
|
npm-debug.log
|
||||||
yarn-error.log
|
yarn-error.log
|
||||||
package-lock.json
|
|
||||||
|
|
||||||
# Database
|
# Database
|
||||||
data/*.db
|
data/*.db
|
||||||
|
@ -44,6 +44,8 @@ win:
|
|||||||
- target: portable
|
- target: portable
|
||||||
arch: x64
|
arch: x64
|
||||||
requestedExecutionLevel: asInvoker
|
requestedExecutionLevel: asInvoker
|
||||||
|
# Disable code signing to avoid symbolic link issues on Windows
|
||||||
|
signAndEditExecutable: false
|
||||||
|
|
||||||
# NSIS installer configuration for Windows
|
# NSIS installer configuration for Windows
|
||||||
nsis:
|
nsis:
|
||||||
|
12077
package-lock.json
generated
Normal file
12077
package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
10
package.json
10
package.json
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "pickle-glass",
|
"name": "pickle-glass",
|
||||||
"productName": "Glass",
|
"productName": "Glass",
|
||||||
"version": "0.2.0",
|
"version": "0.2.1",
|
||||||
"description": "Cl*ely for Free",
|
"description": "Cl*ely for Free",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@ -9,12 +9,14 @@
|
|||||||
"start": "npm run build:renderer && electron-forge start",
|
"start": "npm run build:renderer && electron-forge start",
|
||||||
"package": "npm run build:renderer && electron-forge package",
|
"package": "npm run build:renderer && electron-forge package",
|
||||||
"make": "npm run build:renderer && electron-forge make",
|
"make": "npm run build:renderer && electron-forge make",
|
||||||
"build": "npm run build:renderer && electron-builder --config electron-builder.yml --publish never",
|
"build": "npm run build:all && electron-builder --config electron-builder.yml --publish never",
|
||||||
"build:win": "npm run build:renderer && electron-builder --win --x64 --publish never",
|
"build:win": "npm run build:all && electron-builder --win --x64 --publish never",
|
||||||
"publish": "npm run build:renderer && electron-builder --config electron-builder.yml --publish always",
|
"publish": "npm run build:all && electron-builder --config electron-builder.yml --publish always",
|
||||||
"lint": "eslint --ext .ts,.tsx,.js .",
|
"lint": "eslint --ext .ts,.tsx,.js .",
|
||||||
"postinstall": "electron-builder install-app-deps",
|
"postinstall": "electron-builder install-app-deps",
|
||||||
"build:renderer": "node build.js",
|
"build:renderer": "node build.js",
|
||||||
|
"build:web": "cd pickleglass_web && npm run build && cd ..",
|
||||||
|
"build:all": "npm run build:renderer && npm run build:web",
|
||||||
"watch:renderer": "node build.js --watch"
|
"watch:renderer": "node build.js --watch"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
|
6976
pickleglass_web/package-lock.json
generated
Normal file
6976
pickleglass_web/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@ -219,6 +219,20 @@ class ListenService {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
ipcMain.handle('send-system-audio-content', async (event, { data, mimeType }) => {
|
||||||
|
try {
|
||||||
|
await this.sttService.sendSystemAudioContent(data, mimeType);
|
||||||
|
|
||||||
|
// Send system audio data back to renderer for AEC reference (like macOS does)
|
||||||
|
this.sendToRenderer('system-audio-data', { data });
|
||||||
|
|
||||||
|
return { success: true };
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error sending system audio:', error);
|
||||||
|
return { success: false, error: error.message };
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
ipcMain.handle('start-macos-audio', async () => {
|
ipcMain.handle('start-macos-audio', async () => {
|
||||||
if (process.platform !== 'darwin') {
|
if (process.platform !== 'darwin') {
|
||||||
return { success: false, error: 'macOS audio capture only available on macOS' };
|
return { success: false, error: 'macOS audio capture only available on macOS' };
|
||||||
|
@ -15,6 +15,8 @@ let micMediaStream = null;
|
|||||||
let screenshotInterval = null;
|
let screenshotInterval = null;
|
||||||
let audioContext = null;
|
let audioContext = null;
|
||||||
let audioProcessor = null;
|
let audioProcessor = null;
|
||||||
|
let systemAudioContext = null;
|
||||||
|
let systemAudioProcessor = null;
|
||||||
let currentImageQuality = 'medium';
|
let currentImageQuality = 'medium';
|
||||||
let lastScreenshotBase64 = null;
|
let lastScreenshotBase64 = null;
|
||||||
|
|
||||||
@ -345,6 +347,7 @@ function setupMicProcessing(micStream) {
|
|||||||
micProcessor.connect(micAudioContext.destination);
|
micProcessor.connect(micAudioContext.destination);
|
||||||
|
|
||||||
audioProcessor = micProcessor;
|
audioProcessor = micProcessor;
|
||||||
|
return { context: micAudioContext, processor: micProcessor };
|
||||||
}
|
}
|
||||||
|
|
||||||
function setupLinuxMicProcessing(micStream) {
|
function setupLinuxMicProcessing(micStream) {
|
||||||
@ -380,34 +383,40 @@ function setupLinuxMicProcessing(micStream) {
|
|||||||
audioProcessor = micProcessor;
|
audioProcessor = micProcessor;
|
||||||
}
|
}
|
||||||
|
|
||||||
function setupWindowsLoopbackProcessing() {
|
function setupSystemAudioProcessing(systemStream) {
|
||||||
// Setup audio processing for Windows loopback audio only
|
const systemAudioContext = new AudioContext({ sampleRate: SAMPLE_RATE });
|
||||||
audioContext = new AudioContext({ sampleRate: SAMPLE_RATE });
|
const systemSource = systemAudioContext.createMediaStreamSource(systemStream);
|
||||||
const source = audioContext.createMediaStreamSource(mediaStream);
|
const systemProcessor = systemAudioContext.createScriptProcessor(BUFFER_SIZE, 1, 1);
|
||||||
audioProcessor = audioContext.createScriptProcessor(BUFFER_SIZE, 1, 1);
|
|
||||||
|
|
||||||
let audioBuffer = [];
|
let audioBuffer = [];
|
||||||
const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION;
|
const samplesPerChunk = SAMPLE_RATE * AUDIO_CHUNK_DURATION;
|
||||||
|
|
||||||
audioProcessor.onaudioprocess = async e => {
|
systemProcessor.onaudioprocess = async e => {
|
||||||
const inputData = e.inputBuffer.getChannelData(0);
|
const inputData = e.inputBuffer.getChannelData(0);
|
||||||
|
if (!inputData || inputData.length === 0) return;
|
||||||
|
|
||||||
audioBuffer.push(...inputData);
|
audioBuffer.push(...inputData);
|
||||||
|
|
||||||
// Process audio in chunks
|
|
||||||
while (audioBuffer.length >= samplesPerChunk) {
|
while (audioBuffer.length >= samplesPerChunk) {
|
||||||
const chunk = audioBuffer.splice(0, samplesPerChunk);
|
const chunk = audioBuffer.splice(0, samplesPerChunk);
|
||||||
const pcmData16 = convertFloat32ToInt16(chunk);
|
const pcmData16 = convertFloat32ToInt16(chunk);
|
||||||
const base64Data = arrayBufferToBase64(pcmData16.buffer);
|
const base64Data = arrayBufferToBase64(pcmData16.buffer);
|
||||||
|
|
||||||
await ipcRenderer.invoke('send-audio-content', {
|
try {
|
||||||
|
await ipcRenderer.invoke('send-system-audio-content', {
|
||||||
data: base64Data,
|
data: base64Data,
|
||||||
mimeType: 'audio/pcm;rate=24000',
|
mimeType: 'audio/pcm;rate=24000',
|
||||||
});
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Failed to send system audio:', error);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
source.connect(audioProcessor);
|
systemSource.connect(systemProcessor);
|
||||||
audioProcessor.connect(audioContext.destination);
|
systemProcessor.connect(systemAudioContext.destination);
|
||||||
|
|
||||||
|
return { context: systemAudioContext, processor: systemProcessor };
|
||||||
}
|
}
|
||||||
|
|
||||||
// ---------------------------
|
// ---------------------------
|
||||||
@ -534,7 +543,9 @@ async function startCapture(screenshotIntervalSeconds = 5, imageQuality = 'mediu
|
|||||||
});
|
});
|
||||||
|
|
||||||
console.log('macOS microphone capture started');
|
console.log('macOS microphone capture started');
|
||||||
setupMicProcessing(micMediaStream);
|
const { context, processor } = setupMicProcessing(micMediaStream);
|
||||||
|
audioContext = context;
|
||||||
|
audioProcessor = processor;
|
||||||
} catch (micErr) {
|
} catch (micErr) {
|
||||||
console.warn('Failed to get microphone on macOS:', micErr);
|
console.warn('Failed to get microphone on macOS:', micErr);
|
||||||
}
|
}
|
||||||
@ -577,14 +588,24 @@ async function startCapture(screenshotIntervalSeconds = 5, imageQuality = 'mediu
|
|||||||
|
|
||||||
console.log('Linux screen capture started');
|
console.log('Linux screen capture started');
|
||||||
} else {
|
} else {
|
||||||
// Windows - use display media for audio, main process for screenshots
|
// Windows - capture mic and system audio separately using native loopback
|
||||||
|
console.log('Starting Windows capture with native loopback audio...');
|
||||||
|
|
||||||
|
// Start screen capture in main process for screenshots
|
||||||
const screenResult = await ipcRenderer.invoke('start-screen-capture');
|
const screenResult = await ipcRenderer.invoke('start-screen-capture');
|
||||||
if (!screenResult.success) {
|
if (!screenResult.success) {
|
||||||
throw new Error('Failed to start screen capture: ' + screenResult.error);
|
throw new Error('Failed to start screen capture: ' + screenResult.error);
|
||||||
}
|
}
|
||||||
|
|
||||||
mediaStream = await navigator.mediaDevices.getDisplayMedia({
|
// Ensure STT sessions are initialized before starting audio capture
|
||||||
video: false, // We don't need video in renderer
|
const sessionActive = await ipcRenderer.invoke('is-session-active');
|
||||||
|
if (!sessionActive) {
|
||||||
|
throw new Error('STT sessions not initialized - please wait for initialization to complete');
|
||||||
|
}
|
||||||
|
|
||||||
|
// 1. Get user's microphone
|
||||||
|
try {
|
||||||
|
micMediaStream = await navigator.mediaDevices.getUserMedia({
|
||||||
audio: {
|
audio: {
|
||||||
sampleRate: SAMPLE_RATE,
|
sampleRate: SAMPLE_RATE,
|
||||||
channelCount: 1,
|
channelCount: 1,
|
||||||
@ -592,12 +613,37 @@ async function startCapture(screenshotIntervalSeconds = 5, imageQuality = 'mediu
|
|||||||
noiseSuppression: true,
|
noiseSuppression: true,
|
||||||
autoGainControl: true,
|
autoGainControl: true,
|
||||||
},
|
},
|
||||||
|
video: false,
|
||||||
|
});
|
||||||
|
console.log('Windows microphone capture started');
|
||||||
|
const { context, processor } = setupMicProcessing(micMediaStream);
|
||||||
|
audioContext = context;
|
||||||
|
audioProcessor = processor;
|
||||||
|
} catch (micErr) {
|
||||||
|
console.warn('Could not get microphone access on Windows:', micErr);
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. Get system audio using native Electron loopback
|
||||||
|
try {
|
||||||
|
mediaStream = await navigator.mediaDevices.getDisplayMedia({
|
||||||
|
video: true,
|
||||||
|
audio: true // This will now use native loopback from our handler
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('Windows capture started with loopback audio');
|
// Verify we got audio tracks
|
||||||
|
const audioTracks = mediaStream.getAudioTracks();
|
||||||
|
if (audioTracks.length === 0) {
|
||||||
|
throw new Error('No audio track in native loopback stream');
|
||||||
|
}
|
||||||
|
|
||||||
// Setup audio processing for Windows loopback audio only
|
console.log('Windows native loopback audio capture started');
|
||||||
setupWindowsLoopbackProcessing();
|
const { context, processor } = setupSystemAudioProcessing(mediaStream);
|
||||||
|
systemAudioContext = context;
|
||||||
|
systemAudioProcessor = processor;
|
||||||
|
} catch (sysAudioErr) {
|
||||||
|
console.error('Failed to start Windows native loopback audio:', sysAudioErr);
|
||||||
|
// Continue without system audio
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start capturing screenshots - check if manual mode
|
// Start capturing screenshots - check if manual mode
|
||||||
@ -626,21 +672,31 @@ function stopCapture() {
|
|||||||
screenshotInterval = null;
|
screenshotInterval = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Clean up microphone resources
|
||||||
if (audioProcessor) {
|
if (audioProcessor) {
|
||||||
audioProcessor.disconnect();
|
audioProcessor.disconnect();
|
||||||
audioProcessor = null;
|
audioProcessor = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (audioContext) {
|
if (audioContext) {
|
||||||
audioContext.close();
|
audioContext.close();
|
||||||
audioContext = null;
|
audioContext = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Clean up system audio resources
|
||||||
|
if (systemAudioProcessor) {
|
||||||
|
systemAudioProcessor.disconnect();
|
||||||
|
systemAudioProcessor = null;
|
||||||
|
}
|
||||||
|
if (systemAudioContext) {
|
||||||
|
systemAudioContext.close();
|
||||||
|
systemAudioContext = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stop and release media stream tracks
|
||||||
if (mediaStream) {
|
if (mediaStream) {
|
||||||
mediaStream.getTracks().forEach(track => track.stop());
|
mediaStream.getTracks().forEach(track => track.stop());
|
||||||
mediaStream = null;
|
mediaStream = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (micMediaStream) {
|
if (micMediaStream) {
|
||||||
micMediaStream.getTracks().forEach(t => t.stop());
|
micMediaStream.getTracks().forEach(t => t.stop());
|
||||||
micMediaStream = null;
|
micMediaStream = null;
|
||||||
|
@ -319,6 +319,21 @@ class SttService {
|
|||||||
await this.mySttSession.sendRealtimeInput(payload);
|
await this.mySttSession.sendRealtimeInput(payload);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async sendSystemAudioContent(data, mimeType) {
|
||||||
|
const provider = await this.getAiProvider();
|
||||||
|
const isGemini = provider === 'gemini';
|
||||||
|
|
||||||
|
if (!this.theirSttSession) {
|
||||||
|
throw new Error('Their STT session not active');
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = isGemini
|
||||||
|
? { audio: { data, mimeType: mimeType || 'audio/pcm;rate=24000' } }
|
||||||
|
: data;
|
||||||
|
|
||||||
|
await this.theirSttSession.sendRealtimeInput(payload);
|
||||||
|
}
|
||||||
|
|
||||||
killExistingSystemAudioDump() {
|
killExistingSystemAudioDump() {
|
||||||
return new Promise(resolve => {
|
return new Promise(resolve => {
|
||||||
console.log('Checking for existing SystemAudioDump processes...');
|
console.log('Checking for existing SystemAudioDump processes...');
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
const sqliteClient = require('../../../../common/services/sqliteClient');
|
const sqliteClient = require('../../../../common/services/sqliteClient');
|
||||||
|
|
||||||
function saveSummary({ sessionId, tldr, text, bullet_json, action_json, model = 'gpt-4.1' }) {
|
function saveSummary({ sessionId, tldr, text, bullet_json, action_json, model = 'gpt-4.1' }) {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
try {
|
||||||
const db = sqliteClient.getDb();
|
const db = sqliteClient.getDb();
|
||||||
const now = Math.floor(Date.now() / 1000);
|
const now = Math.floor(Date.now() / 1000);
|
||||||
const query = `
|
const query = `
|
||||||
@ -16,13 +18,13 @@ function saveSummary({ sessionId, tldr, text, bullet_json, action_json, model =
|
|||||||
updated_at=excluded.updated_at
|
updated_at=excluded.updated_at
|
||||||
`;
|
`;
|
||||||
|
|
||||||
try {
|
|
||||||
const result = db.prepare(query).run(sessionId, now, model, text, tldr, bullet_json, action_json, now);
|
const result = db.prepare(query).run(sessionId, now, model, text, tldr, bullet_json, action_json, now);
|
||||||
return { changes: result.changes };
|
resolve({ changes: result.changes });
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error saving summary:', err);
|
console.error('Error saving summary:', err);
|
||||||
throw err;
|
reject(err);
|
||||||
}
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function getSummaryBySessionId(sessionId) {
|
function getSummaryBySessionId(sessionId) {
|
||||||
|
@ -170,6 +170,7 @@ Keep all points concise and build upon previous analysis if provided.`,
|
|||||||
const structuredData = this.parseResponseText(responseText, this.previousAnalysisResult);
|
const structuredData = this.parseResponseText(responseText, this.previousAnalysisResult);
|
||||||
|
|
||||||
if (this.currentSessionId) {
|
if (this.currentSessionId) {
|
||||||
|
try {
|
||||||
summaryRepository.saveSummary({
|
summaryRepository.saveSummary({
|
||||||
sessionId: this.currentSessionId,
|
sessionId: this.currentSessionId,
|
||||||
text: responseText,
|
text: responseText,
|
||||||
@ -177,7 +178,10 @@ Keep all points concise and build upon previous analysis if provided.`,
|
|||||||
bullet_json: JSON.stringify(structuredData.topic.bullets),
|
bullet_json: JSON.stringify(structuredData.topic.bullets),
|
||||||
action_json: JSON.stringify(structuredData.actions),
|
action_json: JSON.stringify(structuredData.actions),
|
||||||
model: modelInfo.model
|
model: modelInfo.model
|
||||||
}).catch(err => console.error('[DB] Failed to save summary:', err));
|
});
|
||||||
|
} catch (err) {
|
||||||
|
console.error('[DB] Failed to save summary:', err);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 분석 결과 저장
|
// 분석 결과 저장
|
||||||
|
27
src/index.js
27
src/index.js
@ -11,7 +11,7 @@ if (require('electron-squirrel-startup')) {
|
|||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
const { app, BrowserWindow, shell, ipcMain, dialog } = require('electron');
|
const { app, BrowserWindow, shell, ipcMain, dialog, desktopCapturer, session } = require('electron');
|
||||||
const { createWindows } = require('./electron/windowManager.js');
|
const { createWindows } = require('./electron/windowManager.js');
|
||||||
const ListenService = require('./features/listen/listenService');
|
const ListenService = require('./features/listen/listenService');
|
||||||
const { initializeFirebase } = require('./common/services/firebaseClient');
|
const { initializeFirebase } = require('./common/services/firebaseClient');
|
||||||
@ -168,11 +168,22 @@ setupProtocolHandling();
|
|||||||
|
|
||||||
app.whenReady().then(async () => {
|
app.whenReady().then(async () => {
|
||||||
|
|
||||||
|
// Setup native loopback audio capture for Windows
|
||||||
|
session.defaultSession.setDisplayMediaRequestHandler((request, callback) => {
|
||||||
|
desktopCapturer.getSources({ types: ['screen'] }).then((sources) => {
|
||||||
|
// Grant access to the first screen found with loopback audio
|
||||||
|
callback({ video: sources[0], audio: 'loopback' });
|
||||||
|
}).catch((error) => {
|
||||||
|
console.error('Failed to get desktop capturer sources:', error);
|
||||||
|
callback({});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
// Initialize core services
|
// Initialize core services
|
||||||
initializeFirebase();
|
initializeFirebase();
|
||||||
|
|
||||||
databaseInitializer.initialize()
|
try {
|
||||||
.then(() => {
|
await databaseInitializer.initialize();
|
||||||
console.log('>>> [index.js] Database initialized successfully');
|
console.log('>>> [index.js] Database initialized successfully');
|
||||||
|
|
||||||
// Clean up zombie sessions from previous runs first
|
// Clean up zombie sessions from previous runs first
|
||||||
@ -191,11 +202,21 @@ app.whenReady().then(async () => {
|
|||||||
console.error('>>> [index.js] Database initialization failed - some features may not work', err);
|
console.error('>>> [index.js] Database initialization failed - some features may not work', err);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Start web server and create windows ONLY after all initializations are successful
|
||||||
WEB_PORT = await startWebStack();
|
WEB_PORT = await startWebStack();
|
||||||
console.log('Web front-end listening on', WEB_PORT);
|
console.log('Web front-end listening on', WEB_PORT);
|
||||||
|
|
||||||
createWindows();
|
createWindows();
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
console.error('>>> [index.js] Database initialization failed - some features may not work', err);
|
||||||
|
// Optionally, show an error dialog to the user
|
||||||
|
dialog.showErrorBox(
|
||||||
|
'Application Error',
|
||||||
|
'A critical error occurred during startup. Some features might be disabled. Please restart the application.'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
initAutoUpdater();
|
initAutoUpdater();
|
||||||
|
|
||||||
// Process any pending deep link after everything is initialized
|
// Process any pending deep link after everything is initialized
|
||||||
|
Loading…
x
Reference in New Issue
Block a user