增加 openai 的模型兼容

This commit is contained in:
jwangkun
2026-01-08 17:09:34 +08:00
parent 6558006a4d
commit 579071ac95
18 changed files with 5185 additions and 212 deletions

View File

@@ -1,6 +1,11 @@
import { ModelOption, ExpertResult } from '../../types';
import { getSynthesisPrompt } from './prompts';
import { withRetry } from '../utils/retry';
import { generateContentStream as generateOpenAIStream } from './openaiClient';
const isGoogleProvider = (ai: any): boolean => {
return ai?.models?.generateContentStream !== undefined;
};
export const streamSynthesisResponse = async (
ai: any,
@@ -13,38 +18,63 @@ export const streamSynthesisResponse = async (
onChunk: (text: string, thought: string) => void
): Promise<void> => {
const prompt = getSynthesisPrompt(historyContext, query, expertResults);
const isGoogle = isGoogleProvider(ai);
const synthesisStream = await withRetry(() => ai.models.generateContentStream({
model: model,
contents: prompt,
config: {
thinkingConfig: {
if (isGoogle) {
const synthesisStream = await withRetry(() => ai.models.generateContentStream({
model: model,
contents: prompt,
config: {
thinkingConfig: {
thinkingBudget: budget,
includeThoughts: true
}
}
}));
}
}
}));
try {
for await (const chunk of synthesisStream) {
if (signal.aborted) break;
try {
for await (const chunk of (synthesisStream as any)) {
if (signal.aborted) break;
let chunkText = "";
let chunkThought = "";
let chunkText = "";
let chunkThought = "";
if (chunk.candidates?.[0]?.content?.parts) {
if (chunk.candidates?.[0]?.content?.parts) {
for (const part of chunk.candidates[0].content.parts) {
if (part.thought) {
chunkThought += (part.text || "");
} else if (part.text) {
chunkText += part.text;
}
if (part.thought) {
chunkThought += (part.text || "");
} else if (part.text) {
chunkText += part.text;
}
}
onChunk(chunkText, chunkThought);
}
}
} catch (streamError) {
console.error("Synthesis stream interrupted:", streamError);
throw streamError;
}
} else {
const stream = generateOpenAIStream(ai, {
model,
systemInstruction: undefined,
content: prompt,
temperature: 0.7,
thinkingConfig: {
thinkingBudget: budget,
includeThoughts: true
}
});
try {
for await (const chunk of (stream as any)) {
if (signal.aborted) break;
onChunk(chunk.text, chunk.thought || '');
}
} catch (streamError) {
console.error("Synthesis stream interrupted:", streamError);
throw streamError;
}
} catch (streamError) {
console.error("Synthesis stream interrupted:", streamError);
throw streamError;
}
};