This commit is contained in:
从何开始123
2026-01-12 18:03:31 +08:00
parent bd297716b0
commit 25dffcc02e
10 changed files with 290 additions and 27 deletions

View File

@@ -3,6 +3,7 @@ import { ModelOption, ExpertResult, MessageAttachment } from '../../types';
import { getExpertSystemInstruction } from './prompts';
import { withRetry } from '../utils/retry';
import { generateContentStream as generateOpenAIStream } from './openaiClient';
import { logger } from '../logger';
const isGoogleProvider = (ai: any): boolean => {
return ai?.models?.generateContentStream !== undefined;
@@ -44,7 +45,8 @@ export const streamExpertResponse = async (
systemInstruction: getExpertSystemInstruction(expert.role, expert.description, context),
temperature: expert.temperature,
thinkingConfig: {
thinkingBudget: budget
thinkingBudget: budget,
includeThoughts: true
}
}
}));
@@ -53,11 +55,22 @@ export const streamExpertResponse = async (
for await (const chunk of (streamResult as any)) {
if (signal.aborted) break;
const chunkText = chunk.text || "";
onChunk(chunkText, "");
let chunkText = "";
let chunkThought = "";
if (chunk.candidates?.[0]?.content?.parts) {
for (const part of chunk.candidates[0].content.parts) {
if (part.thought) {
chunkThought += (part.text || "");
} else if (part.text) {
chunkText += part.text;
}
}
onChunk(chunkText, chunkThought);
}
}
} catch (streamError) {
console.error(`Stream interrupted for expert ${expert.role}:`, streamError);
logger.error("Expert", `Stream interrupted for expert ${expert.role}`, streamError);
throw streamError;
}
} else {
@@ -95,7 +108,7 @@ export const streamExpertResponse = async (
onChunk(chunk.text, chunk.thought || '');
}
} catch (streamError) {
console.error(`Stream interrupted for expert ${expert.role}:`, streamError);
logger.error("Expert", `Stream interrupted for expert ${expert.role} (OpenAI)`, streamError);
throw streamError;
}
}

View File

@@ -5,6 +5,7 @@ import { cleanJsonString } from '../../utils';
import { MANAGER_SYSTEM_PROMPT, MANAGER_REVIEW_SYSTEM_PROMPT } from './prompts';
import { withRetry } from '../utils/retry';
import { generateContent as generateOpenAIContent } from './openaiClient';
import { logger } from '../logger';
const isGoogleProvider = (ai: any): boolean => {
return ai?.models?.generateContent !== undefined;
@@ -68,6 +69,7 @@ export const executeManagerAnalysis = async (
responseMimeType: "application/json",
responseSchema: managerSchema,
thinkingConfig: {
includeThoughts: true,
thinkingBudget: budget
}
}
@@ -82,7 +84,7 @@ export const executeManagerAnalysis = async (
}
return analysisJson;
} catch (e) {
console.error("Manager Analysis Error:", e);
logger.error("Manager", "Analysis generation failed", e);
return {
thought_process: "Direct processing fallback due to analysis error.",
experts: []
@@ -135,7 +137,7 @@ export const executeManagerAnalysis = async (
}
return analysisJson;
} catch (e) {
console.error("Manager Analysis Error:", e);
logger.error("Manager", "Analysis generation failed (OpenAI)", e);
return {
thought_process: "Direct processing fallback due to analysis error.",
experts: []
@@ -192,6 +194,7 @@ export const executeManagerReview = async (
responseMimeType: "application/json",
responseSchema: reviewSchema,
thinkingConfig: {
includeThoughts: true,
thinkingBudget: budget
}
}
@@ -201,7 +204,7 @@ export const executeManagerReview = async (
const cleanText = cleanJsonString(rawText);
return JSON.parse(cleanText) as ReviewResult;
} catch (e) {
console.error("Review Error:", e);
logger.error("Manager", "Review generation failed", e);
return { satisfied: true, critique: "Processing Error, proceeding to synthesis." };
}
} else {
@@ -220,7 +223,7 @@ export const executeManagerReview = async (
return JSON.parse(response.text) as ReviewResult;
} catch (e) {
console.error("Review Error:", e);
logger.error("Manager", "Review generation failed (OpenAI)", e);
return { satisfied: true, critique: "Processing Error, proceeding to synthesis." };
}
}

View File

@@ -3,6 +3,7 @@ import { ModelOption, ExpertResult, MessageAttachment } from '../../types';
import { getSynthesisPrompt } from './prompts';
import { withRetry } from '../utils/retry';
import { generateContentStream as generateOpenAIStream } from './openaiClient';
import { logger } from '../logger';
const isGoogleProvider = (ai: any): boolean => {
return ai?.models?.generateContentStream !== undefined;
@@ -44,7 +45,8 @@ export const streamSynthesisResponse = async (
contents: contents,
config: {
thinkingConfig: {
thinkingBudget: budget
thinkingBudget: budget,
includeThoughts: true
}
}
}));
@@ -52,12 +54,23 @@ export const streamSynthesisResponse = async (
try {
for await (const chunk of (synthesisStream as any)) {
if (signal.aborted) break;
const chunkText = chunk.text || "";
onChunk(chunkText, "");
let chunkText = "";
let chunkThought = "";
if (chunk.candidates?.[0]?.content?.parts) {
for (const part of chunk.candidates[0].content.parts) {
if (part.thought) {
chunkThought += (part.text || "");
} else if (part.text) {
chunkText += part.text;
}
}
onChunk(chunkText, chunkThought);
}
}
} catch (streamError) {
console.error("Synthesis stream interrupted:", streamError);
logger.error("Synthesis", "Stream interrupted", streamError);
throw streamError;
}
} else {
@@ -95,7 +108,7 @@ export const streamSynthesisResponse = async (
onChunk(chunk.text, chunk.thought || '');
}
} catch (streamError) {
console.error("Synthesis stream interrupted:", streamError);
logger.error("Synthesis", "Stream interrupted (OpenAI)", streamError);
throw streamError;
}
}