105 lines
2.6 KiB
TypeScript
105 lines
2.6 KiB
TypeScript
import OpenAI from 'openai';
|
|
import Anthropic from '@anthropic-ai/sdk';
|
|
import axios from 'axios';
|
|
// import { HttpsProxyAgent } from 'https-proxy-agent';
|
|
// export const httpAgent = new HttpsProxyAgent('http://127.0.0.1:7890');
|
|
|
|
export async function streamingOpenAIResponses(
|
|
messages: any,
|
|
callback: {
|
|
(content: string, event?: string | undefined): void;
|
|
(arg0: string, arg1: string | undefined): void;
|
|
},
|
|
params: {
|
|
openAiApiKey: any;
|
|
openAiBaseURL: any;
|
|
llm: string;
|
|
anthropicApiKey: any;
|
|
anthropicBaseURL: any
|
|
}
|
|
) {
|
|
if (params.llm === "anthropic") {
|
|
|
|
const options: {
|
|
apiKey: string;
|
|
baseURL?: string;
|
|
} = {
|
|
apiKey: params.anthropicApiKey,
|
|
}
|
|
if (params.anthropicBaseURL) {
|
|
options.baseURL = params.anthropicBaseURL
|
|
}
|
|
const anthropic = new Anthropic(options);
|
|
const systemMessage = messages.splice(0, 1)[0];
|
|
const aMessages = messages.map((message: any) => {
|
|
const {content} = message;
|
|
if (Array.isArray(content)) {
|
|
message.content = content.map((item) => {
|
|
let temp = item;
|
|
if (item.type === 'image_url') {
|
|
const imageUrl = item.image_url.url
|
|
temp = {
|
|
type: "image",
|
|
source: {
|
|
type: "base64",
|
|
media_type: imageUrl.split(";")[0].split(":")[1],
|
|
data: imageUrl.split(",")[1],
|
|
},
|
|
}
|
|
}
|
|
return temp;
|
|
})
|
|
|
|
}
|
|
return message;
|
|
})
|
|
const stream = anthropic.messages
|
|
.stream({
|
|
model: 'claude-3-5-sonnet-20240620',
|
|
|
|
max_tokens: 4096,
|
|
system: systemMessage.content,
|
|
messages: aMessages
|
|
})
|
|
.on('text', (text) => {
|
|
callback(text);
|
|
});
|
|
|
|
const message = await stream.finalMessage();
|
|
return message;
|
|
}
|
|
|
|
// OpenAI API
|
|
if (!params.openAiApiKey) {
|
|
callback('No OpenAI key set', 'error');
|
|
return '';
|
|
}
|
|
|
|
const openai = new OpenAI({
|
|
apiKey: params.openAiApiKey || process.env['OPENAI_API_KEY'], // defaults to process.env["OPENAI_API_KEY"]
|
|
baseURL:
|
|
params.openAiBaseURL ||
|
|
process.env['OPENAI_BASE_URL'] ||
|
|
'https://api.openai.com/v1',
|
|
// httpAgent
|
|
});
|
|
|
|
const stream = await openai.chat.completions.create({
|
|
// model: 'gpt-4o-2024-05-13',
|
|
model: 'claude-3-5-sonnet-20240620',
|
|
// model:'gpt-4-turbo',
|
|
temperature: 0,
|
|
max_tokens: 4096,
|
|
messages,
|
|
stream: true,
|
|
});
|
|
let full_response = '';
|
|
for await (const chunk of stream) {
|
|
const content = chunk.choices[0]?.delta?.content || '';
|
|
full_response += content;
|
|
callback(content);
|
|
}
|
|
|
|
return full_response;
|
|
}
|