Feature : Anthropic API Support
This commit is contained in:
parent
d610ecb6f5
commit
82796737ef
@ -107,12 +107,17 @@ function createLLM({ apiKey, model = "claude-3-5-sonnet-20241022", temperature =
|
||||
content.push({ type: "text", text: part.text })
|
||||
} else if (part.type === "image_url" && part.image_url) {
|
||||
// Convert base64 image to Anthropic format
|
||||
const base64Data = part.image_url.url.split(",")[1]
|
||||
const imageUrl = part.image_url.url
|
||||
const [mimeInfo, base64Data] = imageUrl.split(",")
|
||||
|
||||
// Extract the actual MIME type from the data URL
|
||||
const mimeType = mimeInfo.match(/data:([^;]+)/)?.[1] || "image/jpeg"
|
||||
|
||||
content.push({
|
||||
type: "image",
|
||||
source: {
|
||||
type: "base64",
|
||||
media_type: "image/png",
|
||||
media_type: mimeType,
|
||||
data: base64Data,
|
||||
},
|
||||
})
|
||||
@ -185,12 +190,19 @@ function createStreamingLLM({
|
||||
content.push({ type: "text", text: part.text })
|
||||
} else if (part.type === "image_url" && part.image_url) {
|
||||
// Convert base64 image to Anthropic format
|
||||
const base64Data = part.image_url.url.split(",")[1]
|
||||
const imageUrl = part.image_url.url
|
||||
const [mimeInfo, base64Data] = imageUrl.split(",")
|
||||
|
||||
// Extract the actual MIME type from the data URL
|
||||
const mimeType = mimeInfo.match(/data:([^;]+)/)?.[1] || "image/jpeg"
|
||||
|
||||
console.log(`[Anthropic] Processing image with MIME type: ${mimeType}`)
|
||||
|
||||
content.push({
|
||||
type: "image",
|
||||
source: {
|
||||
type: "base64",
|
||||
media_type: "image/png",
|
||||
media_type: mimeType,
|
||||
data: base64Data,
|
||||
},
|
||||
})
|
||||
|
@ -66,9 +66,27 @@ async function sendMessage(userPrompt) {
|
||||
|
||||
console.log(`[AskService] 🚀 Sending request to ${provider} AI...`);
|
||||
|
||||
// FIX: Proper model selection for each provider
|
||||
let model;
|
||||
switch (provider) {
|
||||
case 'openai':
|
||||
model = 'gpt-4o'; // Use a valid OpenAI model
|
||||
break;
|
||||
case 'gemini':
|
||||
model = 'gemini-2.0-flash-exp'; // Use a valid Gemini model
|
||||
break;
|
||||
case 'anthropic':
|
||||
model = 'claude-3-5-sonnet-20241022'; // Use a valid Claude model
|
||||
break;
|
||||
default:
|
||||
model = 'gpt-4o'; // Default fallback
|
||||
}
|
||||
|
||||
console.log(`[AskService] Using model: ${model} for provider: ${provider}`);
|
||||
|
||||
const streamingLLM = createStreamingLLM(provider, {
|
||||
apiKey: API_KEY,
|
||||
model: provider === 'openai' ? 'gpt-4.1' : 'gemini-2.5-flash',
|
||||
model: model,
|
||||
temperature: 0.7,
|
||||
maxTokens: 2048,
|
||||
usePortkey: provider === 'openai' && isLoggedIn,
|
||||
@ -144,4 +162,4 @@ function initialize() {
|
||||
|
||||
module.exports = {
|
||||
initialize,
|
||||
};
|
||||
};
|
Loading…
x
Reference in New Issue
Block a user