Add OpenAI-compatible API support with configuration management and response handling

This commit is contained in:
Илья Глазунов 2026-02-14 20:18:02 +03:00
parent bfd76dc0c1
commit bd62cf5524
4 changed files with 340 additions and 19 deletions

View File

@ -414,6 +414,10 @@ export class MainView extends LitElement {
_geminiKey: { state: true }, _geminiKey: { state: true },
_groqKey: { state: true }, _groqKey: { state: true },
_openaiKey: { state: true }, _openaiKey: { state: true },
_openaiCompatibleApiKey: { state: true },
_openaiCompatibleBaseUrl: { state: true },
_openaiCompatibleModel: { state: true },
_responseProvider: { state: true },
_tokenError: { state: true }, _tokenError: { state: true },
_keyError: { state: true }, _keyError: { state: true },
// Local AI state // Local AI state
@ -437,6 +441,10 @@ export class MainView extends LitElement {
this._geminiKey = ''; this._geminiKey = '';
this._groqKey = ''; this._groqKey = '';
this._openaiKey = ''; this._openaiKey = '';
this._openaiCompatibleApiKey = '';
this._openaiCompatibleBaseUrl = '';
this._openaiCompatibleModel = '';
this._responseProvider = 'gemini';
this._tokenError = false; this._tokenError = false;
this._keyError = false; this._keyError = false;
this._showLocalHelp = false; this._showLocalHelp = false;
@ -468,6 +476,15 @@ export class MainView extends LitElement {
this._groqKey = await cheatingDaddy.storage.getGroqApiKey().catch(() => '') || ''; this._groqKey = await cheatingDaddy.storage.getGroqApiKey().catch(() => '') || '';
this._openaiKey = creds.openaiKey || ''; this._openaiKey = creds.openaiKey || '';
// Load OpenAI-compatible config
const openaiConfig = await cheatingDaddy.storage.getOpenAICompatibleConfig().catch(() => ({}));
this._openaiCompatibleApiKey = openaiConfig.apiKey || '';
this._openaiCompatibleBaseUrl = openaiConfig.baseUrl || '';
this._openaiCompatibleModel = openaiConfig.model || '';
// Load response provider preference
this._responseProvider = prefs.responseProvider || 'gemini';
// Load local AI settings // Load local AI settings
this._ollamaHost = prefs.ollamaHost || 'http://127.0.0.1:11434'; this._ollamaHost = prefs.ollamaHost || 'http://127.0.0.1:11434';
this._ollamaModel = prefs.ollamaModel || 'llama3.1'; this._ollamaModel = prefs.ollamaModel || 'llama3.1';
@ -631,6 +648,42 @@ export class MainView extends LitElement {
this.requestUpdate(); this.requestUpdate();
} }
async _saveOpenAICompatibleApiKey(val) {
this._openaiCompatibleApiKey = val;
await cheatingDaddy.storage.setOpenAICompatibleConfig(
val,
this._openaiCompatibleBaseUrl,
this._openaiCompatibleModel
);
this.requestUpdate();
}
async _saveOpenAICompatibleBaseUrl(val) {
this._openaiCompatibleBaseUrl = val;
await cheatingDaddy.storage.setOpenAICompatibleConfig(
this._openaiCompatibleApiKey,
val,
this._openaiCompatibleModel
);
this.requestUpdate();
}
async _saveOpenAICompatibleModel(val) {
this._openaiCompatibleModel = val;
await cheatingDaddy.storage.setOpenAICompatibleConfig(
this._openaiCompatibleApiKey,
this._openaiCompatibleBaseUrl,
val
);
this.requestUpdate();
}
async _saveResponseProvider(val) {
this._responseProvider = val;
await cheatingDaddy.storage.updatePreference('responseProvider', val);
this.requestUpdate();
}
async _saveOllamaHost(val) { async _saveOllamaHost(val) {
this._ollamaHost = val; this._ollamaHost = val;
await cheatingDaddy.storage.updatePreference('ollamaHost', val); await cheatingDaddy.storage.updatePreference('ollamaHost', val);
@ -715,29 +768,75 @@ export class MainView extends LitElement {
<label class="form-label">Gemini API Key</label> <label class="form-label">Gemini API Key</label>
<input <input
type="password" type="password"
placeholder="Required" placeholder="Required for transcription"
.value=${this._geminiKey} .value=${this._geminiKey}
@input=${e => this._saveGeminiKey(e.target.value)} @input=${e => this._saveGeminiKey(e.target.value)}
class=${this._keyError ? 'error' : ''} class=${this._keyError ? 'error' : ''}
/> />
<div class="form-hint"> <div class="form-hint">
<span class="link" @click=${() => this.onExternalLink('https://aistudio.google.com/apikey')}>Get Gemini key</span> <span class="link" @click=${() => this.onExternalLink('https://aistudio.google.com/apikey')}>Get Gemini key</span> - Always used for audio transcription
</div> </div>
</div> </div>
<div class="form-group"> <div class="form-group">
<label class="form-label">Groq API Key</label> <label class="form-label">Response Provider</label>
<input <select
type="password" .value=${this._responseProvider}
placeholder="Optional" @change=${e => this._saveResponseProvider(e.target.value)}
.value=${this._groqKey} >
@input=${e => this._saveGroqKey(e.target.value)} <option value="gemini" ?selected=${this._responseProvider === 'gemini'}>Gemini (default)</option>
/> <option value="groq" ?selected=${this._responseProvider === 'groq'}>Groq (fast responses)</option>
<option value="openai-compatible" ?selected=${this._responseProvider === 'openai-compatible'}>OpenAI-Compatible API</option>
</select>
<div class="form-hint"> <div class="form-hint">
<span class="link" @click=${() => this.onExternalLink('https://console.groq.com/keys')}>Get Groq key</span> Choose which API to use for generating responses
</div> </div>
</div> </div>
${this._responseProvider === 'groq' ? html`
<div class="form-group">
<label class="form-label">Groq API Key</label>
<input
type="password"
placeholder="Required for Groq"
.value=${this._groqKey}
@input=${e => this._saveGroqKey(e.target.value)}
/>
<div class="form-hint">
<span class="link" @click=${() => this.onExternalLink('https://console.groq.com/keys')}>Get Groq key</span>
</div>
</div>
` : ''}
${this._responseProvider === 'openai-compatible' ? html`
<div class="form-group">
<label class="form-label">OpenAI-Compatible API</label>
<div style="display: flex; flex-direction: column; gap: 8px;">
<input
type="password"
placeholder="API Key"
.value=${this._openaiCompatibleApiKey}
@input=${e => this._saveOpenAICompatibleApiKey(e.target.value)}
/>
<input
type="text"
placeholder="Base URL (e.g., https://openrouter.ai/api)"
.value=${this._openaiCompatibleBaseUrl}
@input=${e => this._saveOpenAICompatibleBaseUrl(e.target.value)}
/>
<input
type="text"
placeholder="Model name (e.g., anthropic/claude-3.5-sonnet)"
.value=${this._openaiCompatibleModel}
@input=${e => this._saveOpenAICompatibleModel(e.target.value)}
/>
</div>
<div class="form-hint">
Use OpenRouter, DeepSeek, Together AI, or any OpenAI-compatible API
</div>
</div>
` : ''}
${this._renderStartButton()} ${this._renderStartButton()}
`; `;
} }

View File

@ -13,7 +13,10 @@ const DEFAULT_CONFIG = {
const DEFAULT_CREDENTIALS = { const DEFAULT_CREDENTIALS = {
apiKey: '', apiKey: '',
groqApiKey: '' groqApiKey: '',
openaiCompatibleApiKey: '',
openaiCompatibleBaseUrl: '',
openaiCompatibleModel: ''
}; };
const DEFAULT_PREFERENCES = { const DEFAULT_PREFERENCES = {
@ -27,6 +30,7 @@ const DEFAULT_PREFERENCES = {
fontSize: 'medium', fontSize: 'medium',
backgroundTransparency: 0.8, backgroundTransparency: 0.8,
googleSearchEnabled: false, googleSearchEnabled: false,
responseProvider: 'gemini',
ollamaHost: 'http://127.0.0.1:11434', ollamaHost: 'http://127.0.0.1:11434',
ollamaModel: 'llama3.1', ollamaModel: 'llama3.1',
whisperModel: 'Xenova/whisper-small', whisperModel: 'Xenova/whisper-small',
@ -204,6 +208,23 @@ function setGroqApiKey(groqApiKey) {
return setCredentials({ groqApiKey }); return setCredentials({ groqApiKey });
} }
function getOpenAICompatibleConfig() {
const creds = getCredentials();
return {
apiKey: creds.openaiCompatibleApiKey || '',
baseUrl: creds.openaiCompatibleBaseUrl || '',
model: creds.openaiCompatibleModel || ''
};
}
function setOpenAICompatibleConfig(apiKey, baseUrl, model) {
return setCredentials({
openaiCompatibleApiKey: apiKey,
openaiCompatibleBaseUrl: baseUrl,
openaiCompatibleModel: model
});
}
// ============ PREFERENCES ============ // ============ PREFERENCES ============
function getPreferences() { function getPreferences() {
@ -500,6 +521,8 @@ module.exports = {
setApiKey, setApiKey,
getGroqApiKey, getGroqApiKey,
setGroqApiKey, setGroqApiKey,
getOpenAICompatibleConfig,
setOpenAICompatibleConfig,
// Preferences // Preferences
getPreferences, getPreferences,

View File

@ -3,7 +3,7 @@ const { BrowserWindow, ipcMain } = require('electron');
const { spawn } = require('child_process'); const { spawn } = require('child_process');
const { saveDebugAudio } = require('../audioUtils'); const { saveDebugAudio } = require('../audioUtils');
const { getSystemPrompt } = require('./prompts'); const { getSystemPrompt } = require('./prompts');
const { getAvailableModel, incrementLimitCount, getApiKey, getGroqApiKey, incrementCharUsage, getModelForToday } = require('../storage'); const { getAvailableModel, incrementLimitCount, getApiKey, getGroqApiKey, getOpenAICompatibleConfig, incrementCharUsage, getModelForToday } = require('../storage');
// Lazy-loaded to avoid circular dependency (localai.js imports from gemini.js) // Lazy-loaded to avoid circular dependency (localai.js imports from gemini.js)
let _localai = null; let _localai = null;
@ -15,6 +15,9 @@ function getLocalAi() {
// Provider mode: 'byok' or 'local' // Provider mode: 'byok' or 'local'
let currentProviderMode = 'byok'; let currentProviderMode = 'byok';
// Response provider: 'gemini', 'groq', or 'openai-compatible'
let currentResponseProvider = 'gemini';
// Groq conversation history for context // Groq conversation history for context
let groqConversationHistory = []; let groqConversationHistory = [];
@ -205,6 +208,14 @@ function hasGroqKey() {
return key && key.trim() != '' return key && key.trim() != ''
} }
// helper to check if OpenAI-compatible API has been configured
function hasOpenAICompatibleConfig() {
const config = getOpenAICompatibleConfig();
return config.apiKey && config.apiKey.trim() !== '' &&
config.baseUrl && config.baseUrl.trim() !== '' &&
config.model && config.model.trim() !== '';
}
function trimConversationHistoryForGemma(history, maxChars=42000) { function trimConversationHistoryForGemma(history, maxChars=42000) {
if(!history || history.length === 0) return []; if(!history || history.length === 0) return [];
let totalChars = 0; let totalChars = 0;
@ -344,6 +355,128 @@ async function sendToGroq(transcription) {
} }
} }
async function sendToOpenAICompatible(transcription) {
const config = getOpenAICompatibleConfig();
if (!config.apiKey || !config.baseUrl || !config.model) {
console.log('OpenAI-compatible API not fully configured');
return;
}
if (!transcription || transcription.trim() === '') {
console.log('Empty transcription, skipping OpenAI-compatible API');
return;
}
console.log(`Sending to OpenAI-compatible API (${config.model}):`, transcription.substring(0, 100) + '...');
groqConversationHistory.push({
role: 'user',
content: transcription.trim()
});
if (groqConversationHistory.length > 20) {
groqConversationHistory = groqConversationHistory.slice(-20);
}
try {
// Ensure baseUrl ends with /v1/chat/completions or contains the full endpoint
let apiUrl = config.baseUrl.trim();
if (!apiUrl.includes('/chat/completions')) {
// Remove trailing slash if present
apiUrl = apiUrl.replace(/\/$/, '');
// Add OpenAI-compatible endpoint path
apiUrl = `${apiUrl}/v1/chat/completions`;
}
console.log(`Using OpenAI-compatible endpoint: ${apiUrl}`);
const response = await fetch(apiUrl, {
method: 'POST',
headers: {
'Authorization': `Bearer ${config.apiKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
model: config.model,
messages: [
{ role: 'system', content: currentSystemPrompt || 'You are a helpful assistant.' },
...groqConversationHistory
],
stream: true,
temperature: 0.7,
max_tokens: 2048
})
});
if (!response.ok) {
const errorText = await response.text();
console.error('OpenAI-compatible API error:', response.status, errorText);
sendToRenderer('update-status', `OpenAI API error: ${response.status}`);
return;
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
let fullText = '';
let isFirst = true;
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value, { stream: true });
const lines = chunk.split('\n').filter(line => line.trim() !== '');
for (const line of lines) {
if (line.startsWith('data: ')) {
const data = line.slice(6);
if (data === '[DONE]') continue;
try {
const parsed = JSON.parse(data);
const content = parsed.choices?.[0]?.delta?.content;
if (content) {
fullText += content;
sendToRenderer(isFirst ? 'new-response' : 'update-response', fullText);
isFirst = false;
}
} catch (e) {
// Ignore JSON parse errors from partial chunks
}
}
}
}
// Clean up <think> tags if present (for DeepSeek-style reasoning models)
const cleanText = stripThinkingTags(fullText);
if (cleanText !== fullText) {
sendToRenderer('update-response', cleanText);
}
if (fullText.trim()) {
groqConversationHistory.push({
role: 'assistant',
content: fullText.trim()
});
if (groqConversationHistory.length > 40) {
groqConversationHistory = groqConversationHistory.slice(-40);
}
saveConversationTurn(transcription, fullText);
}
console.log(`OpenAI-compatible API response completed (${config.model})`);
sendToRenderer('update-status', 'Listening...');
} catch (error) {
console.error('Error calling OpenAI-compatible API:', error);
sendToRenderer('update-status', 'OpenAI API error: ' + error.message);
}
}
async function sendToGemma(transcription) { async function sendToGemma(transcription) {
const apiKey = getApiKey(); const apiKey = getApiKey();
if (!apiKey) { if (!apiKey) {
@ -443,6 +576,14 @@ async function initializeGeminiSession(apiKey, customPrompt = '', profile = 'int
reconnectAttempts = 0; reconnectAttempts = 0;
} }
// Load response provider preference
if (!isReconnect) {
const { getPreferences } = require('../storage');
const prefs = getPreferences();
currentResponseProvider = prefs.responseProvider || 'gemini';
console.log('🔧 Response provider set to:', currentResponseProvider);
}
const client = new GoogleGenAI({ const client = new GoogleGenAI({
vertexai: false, vertexai: false,
apiKey: apiKey, apiKey: apiKey,
@ -488,17 +629,32 @@ async function initializeGeminiSession(apiKey, customPrompt = '', profile = 'int
// if (message.serverContent?.outputTranscription?.text) { ... } // if (message.serverContent?.outputTranscription?.text) { ... }
if (message.serverContent?.generationComplete) { if (message.serverContent?.generationComplete) {
console.log('Generation complete. Current transcription:', `"${currentTranscription}"`); console.log('Generation complete. Current transcription:', `"${currentTranscription}"`);
if (currentTranscription.trim() !== '') { if (currentTranscription.trim() !== '') {
console.log('Sending to', hasGroqKey() ? 'Groq' : 'Gemma'); // Use explicit user choice for response provider
if (hasGroqKey()) { if (currentResponseProvider === 'openai-compatible') {
sendToGroq(currentTranscription); if (hasOpenAICompatibleConfig()) {
console.log('📤 Sending to OpenAI-compatible API (user selected)');
sendToOpenAICompatible(currentTranscription);
} else {
console.log('⚠️ OpenAI-compatible selected but not configured, falling back to Gemini');
sendToGemma(currentTranscription);
}
} else if (currentResponseProvider === 'groq') {
if (hasGroqKey()) {
console.log('📤 Sending to Groq (user selected)');
sendToGroq(currentTranscription);
} else {
console.log('⚠️ Groq selected but not configured, falling back to Gemini');
sendToGemma(currentTranscription);
}
} else { } else {
console.log('📤 Sending to Gemini (user selected)');
sendToGemma(currentTranscription); sendToGemma(currentTranscription);
} }
currentTranscription = ''; currentTranscription = '';
} else { } else {
console.log('Transcription is empty, not sending to LLM'); console.log('⚠️ Transcription is empty, not sending to LLM');
} }
messageBuffer = ''; messageBuffer = '';
} }
@ -954,8 +1110,19 @@ function setupGeminiIpcHandlers(geminiSessionRef) {
try { try {
console.log('Sending text message:', text); console.log('Sending text message:', text);
if (hasGroqKey()) { // Use explicit user choice for response provider
sendToGroq(text.trim()); if (currentResponseProvider === 'openai-compatible') {
if (hasOpenAICompatibleConfig()) {
sendToOpenAICompatible(text.trim());
} else {
sendToGemma(text.trim());
}
} else if (currentResponseProvider === 'groq') {
if (hasGroqKey()) {
sendToGroq(text.trim());
} else {
sendToGemma(text.trim());
}
} else { } else {
sendToGemma(text.trim()); sendToGemma(text.trim());
} }
@ -1053,6 +1220,29 @@ function setupGeminiIpcHandlers(geminiSessionRef) {
return { success: false, error: error.message }; return { success: false, error: error.message };
} }
}); });
// OpenAI-compatible API configuration handlers
ipcMain.handle('set-openai-compatible-config', async (event, apiKey, baseUrl, model) => {
try {
const { setOpenAICompatibleConfig } = require('../storage');
setOpenAICompatibleConfig(apiKey, baseUrl, model);
console.log('OpenAI-compatible config saved:', { baseUrl, model: model.substring(0, 30) });
return { success: true };
} catch (error) {
console.error('Error setting OpenAI-compatible config:', error);
return { success: false, error: error.message };
}
});
ipcMain.handle('get-openai-compatible-config', async (event) => {
try {
const config = getOpenAICompatibleConfig();
return { success: true, config };
} catch (error) {
console.error('Error getting OpenAI-compatible config:', error);
return { success: false, error: error.message };
}
});
} }
module.exports = { module.exports = {
@ -1071,4 +1261,6 @@ module.exports = {
sendImageToGeminiHttp, sendImageToGeminiHttp,
setupGeminiIpcHandlers, setupGeminiIpcHandlers,
formatSpeakerResults, formatSpeakerResults,
hasOpenAICompatibleConfig,
sendToOpenAICompatible,
}; };

View File

@ -56,6 +56,13 @@ const storage = {
async setGroqApiKey(groqApiKey) { async setGroqApiKey(groqApiKey) {
return ipcRenderer.invoke('storage:set-groq-api-key', groqApiKey); return ipcRenderer.invoke('storage:set-groq-api-key', groqApiKey);
}, },
async getOpenAICompatibleConfig() {
const result = await ipcRenderer.invoke('get-openai-compatible-config');
return result.success ? result.config : { apiKey: '', baseUrl: '', model: '' };
},
async setOpenAICompatibleConfig(apiKey, baseUrl, model) {
return ipcRenderer.invoke('set-openai-compatible-config', apiKey, baseUrl, model);
},
// Preferences // Preferences
async getPreferences() { async getPreferences() {