1
This commit is contained in:
@@ -33,7 +33,8 @@ const App = () => {
|
|||||||
handleNewChat,
|
handleNewChat,
|
||||||
handleSelectSession,
|
handleSelectSession,
|
||||||
handleDeleteSession,
|
handleDeleteSession,
|
||||||
stopDeepThink
|
stopDeepThink,
|
||||||
|
focusTrigger
|
||||||
} = useAppLogic();
|
} = useAppLogic();
|
||||||
|
|
||||||
return (
|
return (
|
||||||
@@ -85,6 +86,7 @@ const App = () => {
|
|||||||
onRun={handleRun}
|
onRun={handleRun}
|
||||||
onStop={stopDeepThink}
|
onStop={stopDeepThink}
|
||||||
appState={appState}
|
appState={appState}
|
||||||
|
focusTrigger={focusTrigger}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -24,19 +24,20 @@ const ChatArea = ({
|
|||||||
processStartTime,
|
processStartTime,
|
||||||
processEndTime
|
processEndTime
|
||||||
}: ChatAreaProps) => {
|
}: ChatAreaProps) => {
|
||||||
|
const isIdle = messages.length === 0 && appState === 'idle';
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex-1 overflow-y-auto custom-scrollbar scroll-smooth">
|
<div className="flex-1 overflow-y-auto custom-scrollbar scroll-smooth">
|
||||||
<div className="pb-40">
|
{isIdle ? (
|
||||||
{messages.length === 0 && appState === 'idle' && (
|
<div className="h-full flex flex-col items-center justify-center opacity-70 px-4 text-center">
|
||||||
<div className="h-full flex flex-col items-center justify-center pt-32 opacity-70 px-4 text-center">
|
|
||||||
<Logo className="w-24 h-24 mb-6 drop-shadow-xl animate-pulse-slow" />
|
<Logo className="w-24 h-24 mb-6 drop-shadow-xl animate-pulse-slow" />
|
||||||
<p className="text-xl font-bold text-slate-900">Prisma</p>
|
<p className="text-xl font-bold text-slate-900">Prisma</p>
|
||||||
<p className="text-sm text-slate-500 max-w-xs mt-2">
|
<p className="text-sm text-slate-500 max-w-xs mt-2">
|
||||||
Deep multi-agent reasoning.
|
Deep multi-agent reasoning.
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
)}
|
) : (
|
||||||
|
<div className="pb-40">
|
||||||
{/* History */}
|
{/* History */}
|
||||||
{messages.map((msg, idx) => (
|
{messages.map((msg, idx) => (
|
||||||
<ChatMessageItem
|
<ChatMessageItem
|
||||||
@@ -85,6 +86,7 @@ const ChatArea = ({
|
|||||||
</div>
|
</div>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
|
|
||||||
import React, { useState } from 'react';
|
import React, { useState } from 'react';
|
||||||
import { User, Sparkles, ChevronDown, ChevronRight } from 'lucide-react';
|
import { User, Sparkles, ChevronDown, ChevronRight, Copy, Check } from 'lucide-react';
|
||||||
import MarkdownRenderer from './MarkdownRenderer';
|
import MarkdownRenderer from './MarkdownRenderer';
|
||||||
import ProcessFlow from './ProcessFlow';
|
import ProcessFlow from './ProcessFlow';
|
||||||
import { ChatMessage } from '../types';
|
import { ChatMessage } from '../types';
|
||||||
@@ -12,10 +13,18 @@ interface ChatMessageProps {
|
|||||||
const ChatMessageItem = ({ message, isLast }: ChatMessageProps) => {
|
const ChatMessageItem = ({ message, isLast }: ChatMessageProps) => {
|
||||||
const isUser = message.role === 'user';
|
const isUser = message.role === 'user';
|
||||||
const [showThinking, setShowThinking] = useState(false);
|
const [showThinking, setShowThinking] = useState(false);
|
||||||
|
const [copied, setCopied] = useState(false);
|
||||||
|
|
||||||
// Check if there is any thinking data to show
|
// Check if there is any thinking data to show
|
||||||
const hasThinkingData = message.analysis || (message.experts && message.experts.length > 0);
|
const hasThinkingData = message.analysis || (message.experts && message.experts.length > 0);
|
||||||
|
|
||||||
|
const handleCopy = () => {
|
||||||
|
if (!message.content) return;
|
||||||
|
navigator.clipboard.writeText(message.content);
|
||||||
|
setCopied(true);
|
||||||
|
setTimeout(() => setCopied(false), 2000);
|
||||||
|
};
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className={`group w-full text-slate-800 ${isUser ? 'bg-transparent' : 'bg-transparent'}`}>
|
<div className={`group w-full text-slate-800 ${isUser ? 'bg-transparent' : 'bg-transparent'}`}>
|
||||||
<div className="max-w-6xl mx-auto px-4 py-8 flex gap-4 md:gap-6">
|
<div className="max-w-6xl mx-auto px-4 py-8 flex gap-4 md:gap-6">
|
||||||
@@ -36,9 +45,31 @@ const ChatMessageItem = ({ message, isLast }: ChatMessageProps) => {
|
|||||||
|
|
||||||
{/* Content */}
|
{/* Content */}
|
||||||
<div className="relative flex-1 overflow-hidden">
|
<div className="relative flex-1 overflow-hidden">
|
||||||
<div className="font-semibold text-sm text-slate-900 mb-1">
|
<div className="flex items-center justify-between mb-1">
|
||||||
|
<div className="font-semibold text-sm text-slate-900">
|
||||||
{isUser ? 'You' : 'Prisma'}
|
{isUser ? 'You' : 'Prisma'}
|
||||||
</div>
|
</div>
|
||||||
|
{message.content && (
|
||||||
|
<button
|
||||||
|
onClick={handleCopy}
|
||||||
|
className={`p-1.5 rounded-md transition-all duration-200 flex items-center gap-1.5
|
||||||
|
${copied
|
||||||
|
? 'text-emerald-600 bg-emerald-50'
|
||||||
|
: 'text-slate-400 hover:text-slate-600 hover:bg-slate-100 opacity-0 group-hover:opacity-100 focus:opacity-100'
|
||||||
|
}`}
|
||||||
|
title="Copy message"
|
||||||
|
>
|
||||||
|
{copied ? (
|
||||||
|
<>
|
||||||
|
<Check size={14} />
|
||||||
|
<span className="text-[10px] font-medium uppercase tracking-wider">Copied</span>
|
||||||
|
</>
|
||||||
|
) : (
|
||||||
|
<Copy size={14} />
|
||||||
|
)}
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
{/* Thinking Process Accordion (Only for AI) */}
|
{/* Thinking Process Accordion (Only for AI) */}
|
||||||
{!isUser && hasThinkingData && (
|
{!isUser && hasThinkingData && (
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ interface HeaderProps {
|
|||||||
|
|
||||||
const Header = ({ selectedModel, setSelectedModel, onOpenSettings, onToggleSidebar, onNewChat }: HeaderProps) => {
|
const Header = ({ selectedModel, setSelectedModel, onOpenSettings, onToggleSidebar, onNewChat }: HeaderProps) => {
|
||||||
return (
|
return (
|
||||||
<header className="sticky top-0 z-50 bg-white/80 backdrop-blur-md border-b border-slate-100">
|
<header className="sticky top-0 z-50 bg-white/80 backdrop-blur-md">
|
||||||
<div className="w-full px-4 h-16 flex items-center justify-between">
|
<div className="w-full px-4 h-16 flex items-center justify-between">
|
||||||
<div className="flex items-center gap-4">
|
<div className="flex items-center gap-4">
|
||||||
<button
|
<button
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
import React, { useRef, useLayoutEffect, useState, useEffect } from 'react';
|
import React, { useRef, useLayoutEffect, useState, useEffect } from 'react';
|
||||||
import { ArrowUp, Square } from 'lucide-react';
|
import { ArrowUp, Square } from 'lucide-react';
|
||||||
import { AppState } from '../types';
|
import { AppState } from '../types';
|
||||||
@@ -8,9 +9,10 @@ interface InputSectionProps {
|
|||||||
onRun: () => void;
|
onRun: () => void;
|
||||||
onStop: () => void;
|
onStop: () => void;
|
||||||
appState: AppState;
|
appState: AppState;
|
||||||
|
focusTrigger?: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
const InputSection = ({ query, setQuery, onRun, onStop, appState }: InputSectionProps) => {
|
const InputSection = ({ query, setQuery, onRun, onStop, appState, focusTrigger }: InputSectionProps) => {
|
||||||
const textareaRef = useRef<HTMLTextAreaElement>(null);
|
const textareaRef = useRef<HTMLTextAreaElement>(null);
|
||||||
const [isComposing, setIsComposing] = useState(false);
|
const [isComposing, setIsComposing] = useState(false);
|
||||||
|
|
||||||
@@ -35,11 +37,12 @@ const InputSection = ({ query, setQuery, onRun, onStop, appState }: InputSection
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Focus input on mount and when app becomes idle (e.g. after "New Chat" or completion)
|
// Focus input on mount and when app becomes idle (e.g. after "New Chat" or completion)
|
||||||
|
// or when explicitly triggered by focusTrigger
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (appState === 'idle' && textareaRef.current) {
|
if (appState === 'idle' && textareaRef.current) {
|
||||||
textareaRef.current.focus();
|
textareaRef.current.focus();
|
||||||
}
|
}
|
||||||
}, [appState]);
|
}, [appState, focusTrigger]);
|
||||||
|
|
||||||
// useLayoutEffect prevents visual flickering by adjusting height before paint
|
// useLayoutEffect prevents visual flickering by adjusting height before paint
|
||||||
useLayoutEffect(() => {
|
useLayoutEffect(() => {
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
import React, { useState, useEffect } from 'react';
|
import React, { useState, useEffect } from 'react';
|
||||||
import { Users, Zap, Brain, Loader2, CheckCircle2, Clock } from 'lucide-react';
|
import { Users, Zap, Brain, Loader2, CheckCircle2, Clock } from 'lucide-react';
|
||||||
import { AppState, AnalysisResult, ExpertResult } from '../types';
|
import { AppState, AnalysisResult, ExpertResult } from '../types';
|
||||||
@@ -66,7 +67,7 @@ const ProcessFlow = ({ appState, managerAnalysis, experts, defaultExpanded = tru
|
|||||||
const expertsStatus = anyExpertWorking ? 'active' : (allExpertsDone ? 'completed' : 'idle');
|
const expertsStatus = anyExpertWorking ? 'active' : (allExpertsDone ? 'completed' : 'idle');
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="relative space-y-4 pt-4">
|
<div className="relative space-y-4 pt-4 w-full">
|
||||||
|
|
||||||
{/* Global Timer Overlay */}
|
{/* Global Timer Overlay */}
|
||||||
<GlobalTimer start={processStartTime} end={processEndTime} appState={appState} />
|
<GlobalTimer start={processStartTime} end={processEndTime} appState={appState} />
|
||||||
@@ -115,7 +116,7 @@ const ProcessFlow = ({ appState, managerAnalysis, experts, defaultExpanded = tru
|
|||||||
isExpanded={isExpanded}
|
isExpanded={isExpanded}
|
||||||
onToggle={() => setIsExpanded(!isExpanded)}
|
onToggle={() => setIsExpanded(!isExpanded)}
|
||||||
>
|
>
|
||||||
<div className="grid grid-cols-1 gap-3 pt-2">
|
<div className="grid grid-cols-1 md:grid-cols-2 xl:grid-cols-3 gap-4 pt-2">
|
||||||
{experts.map((expert) => (
|
{experts.map((expert) => (
|
||||||
<ExpertCard key={expert.id} expert={expert} />
|
<ExpertCard key={expert.id} expert={expert} />
|
||||||
))}
|
))}
|
||||||
|
|||||||
@@ -21,6 +21,7 @@ export const useAppLogic = () => {
|
|||||||
// UI State
|
// UI State
|
||||||
const [isSidebarOpen, setIsSidebarOpen] = useState(true);
|
const [isSidebarOpen, setIsSidebarOpen] = useState(true);
|
||||||
const [isSettingsOpen, setIsSettingsOpen] = useState(false);
|
const [isSettingsOpen, setIsSettingsOpen] = useState(false);
|
||||||
|
const [focusTrigger, setFocusTrigger] = useState(0); // Trigger for input focus
|
||||||
|
|
||||||
// Active Chat State
|
// Active Chat State
|
||||||
const [messages, setMessages] = useState<ChatMessage[]>([]);
|
const [messages, setMessages] = useState<ChatMessage[]>([]);
|
||||||
@@ -149,6 +150,8 @@ export const useAppLogic = () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
resetDeepThink();
|
resetDeepThink();
|
||||||
|
// Refocus after completion
|
||||||
|
setFocusTrigger(prev => prev + 1);
|
||||||
}
|
}
|
||||||
}, [appState, finalOutput, managerAnalysis, experts, synthesisThoughts, resetDeepThink, processStartTime, processEndTime, currentSessionId, messages, selectedModel, createSession, updateSessionMessages]);
|
}, [appState, finalOutput, managerAnalysis, experts, synthesisThoughts, resetDeepThink, processStartTime, processEndTime, currentSessionId, messages, selectedModel, createSession, updateSessionMessages]);
|
||||||
|
|
||||||
@@ -181,6 +184,7 @@ export const useAppLogic = () => {
|
|||||||
setMessages([]);
|
setMessages([]);
|
||||||
setQuery('');
|
setQuery('');
|
||||||
resetDeepThink();
|
resetDeepThink();
|
||||||
|
setFocusTrigger(prev => prev + 1); // Trigger focus
|
||||||
if (window.innerWidth < 1024) setIsSidebarOpen(false);
|
if (window.innerWidth < 1024) setIsSidebarOpen(false);
|
||||||
}, [stopDeepThink, setCurrentSessionId, resetDeepThink]);
|
}, [stopDeepThink, setCurrentSessionId, resetDeepThink]);
|
||||||
|
|
||||||
@@ -188,6 +192,7 @@ export const useAppLogic = () => {
|
|||||||
stopDeepThink();
|
stopDeepThink();
|
||||||
resetDeepThink();
|
resetDeepThink();
|
||||||
setCurrentSessionId(id);
|
setCurrentSessionId(id);
|
||||||
|
setFocusTrigger(prev => prev + 1); // Trigger focus
|
||||||
if (window.innerWidth < 1024) setIsSidebarOpen(false);
|
if (window.innerWidth < 1024) setIsSidebarOpen(false);
|
||||||
}, [stopDeepThink, resetDeepThink, setCurrentSessionId]);
|
}, [stopDeepThink, resetDeepThink, setCurrentSessionId]);
|
||||||
|
|
||||||
@@ -223,6 +228,7 @@ export const useAppLogic = () => {
|
|||||||
handleNewChat,
|
handleNewChat,
|
||||||
handleSelectSession,
|
handleSelectSession,
|
||||||
handleDeleteSession,
|
handleDeleteSession,
|
||||||
stopDeepThink
|
stopDeepThink,
|
||||||
|
focusTrigger
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -4,6 +4,8 @@
|
|||||||
<meta charset="UTF-8" />
|
<meta charset="UTF-8" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
<title>Prisma</title>
|
<title>Prisma</title>
|
||||||
|
<!-- SVG Favicon -->
|
||||||
|
<link rel="icon" type="image/svg+xml" href="data:image/svg+xml,%3Csvg viewBox='0 0 600 600' xmlns='http://www.w3.org/2000/svg'%3E%3Cg stroke-width='16' stroke-linecap='round' stroke-linejoin='round' fill='none' stroke='%23334155'%3E%3Cpath d='M300 180 L200 420 L400 420 Z'/%3E%3Cpath d='M300 50 L300 180'/%3E%3Cpath d='M100 480 L200 420'/%3E%3Cpath d='M500 480 L400 420'/%3E%3Cpath d='M300 50 L100 480 L500 480 Z'/%3E%3C/g%3E%3Cg stroke-width='12' stroke-linejoin='round' fill='none'%3E%3Cline x1='0' y1='275' x2='195' y2='275' stroke='%23334155'/%3E%3Cpolyline points='194,270 380,225 600,245' stroke='%232563eb' opacity='0.95'/%3E%3Cpolyline points='194,275 400,275 600,305' stroke='%234ade80' opacity='0.95'/%3E%3Cpolyline points='194,280 420,325 600,370' stroke='%239333ea' opacity='0.95'/%3E%3C/g%3E%3C/svg%3E">
|
||||||
<script src="https://cdn.tailwindcss.com"></script>
|
<script src="https://cdn.tailwindcss.com"></script>
|
||||||
<script src="https://cdn.tailwindcss.com?plugins=typography"></script>
|
<script src="https://cdn.tailwindcss.com?plugins=typography"></script>
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&family=JetBrains+Mono:wght@400;700&display=swap" rel="stylesheet">
|
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&family=JetBrains+Mono:wght@400;700&display=swap" rel="stylesheet">
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { ModelOption, ExpertResult } from '../../types';
|
import { ModelOption, ExpertResult } from '../../types';
|
||||||
import { getExpertSystemInstruction } from './prompts';
|
import { getExpertSystemInstruction } from './prompts';
|
||||||
|
import { withRetry } from '../utils/retry';
|
||||||
|
|
||||||
export const streamExpertResponse = async (
|
export const streamExpertResponse = async (
|
||||||
ai: any,
|
ai: any,
|
||||||
@@ -10,7 +11,10 @@ export const streamExpertResponse = async (
|
|||||||
signal: AbortSignal,
|
signal: AbortSignal,
|
||||||
onChunk: (text: string, thought: string) => void
|
onChunk: (text: string, thought: string) => void
|
||||||
): Promise<void> => {
|
): Promise<void> => {
|
||||||
const streamResult = await ai.models.generateContentStream({
|
// We wrap the stream initiation in retry.
|
||||||
|
// If the stream is successfully established but fails during iteration,
|
||||||
|
// we catch that separately.
|
||||||
|
const streamResult = await withRetry(() => ai.models.generateContentStream({
|
||||||
model: model,
|
model: model,
|
||||||
contents: expert.prompt,
|
contents: expert.prompt,
|
||||||
config: {
|
config: {
|
||||||
@@ -21,8 +25,9 @@ export const streamExpertResponse = async (
|
|||||||
includeThoughts: true
|
includeThoughts: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
}));
|
||||||
|
|
||||||
|
try {
|
||||||
for await (const chunk of streamResult) {
|
for await (const chunk of streamResult) {
|
||||||
if (signal.aborted) break;
|
if (signal.aborted) break;
|
||||||
|
|
||||||
@@ -40,4 +45,10 @@ export const streamExpertResponse = async (
|
|||||||
onChunk(chunkText, chunkThought);
|
onChunk(chunkText, chunkThought);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} catch (streamError) {
|
||||||
|
console.error(`Stream interrupted for expert ${expert.role}:`, streamError);
|
||||||
|
// We don't retry mid-stream automatically here to avoid complex state management,
|
||||||
|
// but the initial connection is protected by withRetry.
|
||||||
|
throw streamError;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { Type } from "@google/genai";
|
|||||||
import { ModelOption, AnalysisResult, ExpertResult, ReviewResult } from '../../types';
|
import { ModelOption, AnalysisResult, ExpertResult, ReviewResult } from '../../types';
|
||||||
import { cleanJsonString } from '../../utils';
|
import { cleanJsonString } from '../../utils';
|
||||||
import { MANAGER_SYSTEM_PROMPT, MANAGER_REVIEW_SYSTEM_PROMPT } from './prompts';
|
import { MANAGER_SYSTEM_PROMPT, MANAGER_REVIEW_SYSTEM_PROMPT } from './prompts';
|
||||||
|
import { withRetry } from '../utils/retry';
|
||||||
|
|
||||||
export const executeManagerAnalysis = async (
|
export const executeManagerAnalysis = async (
|
||||||
ai: any,
|
ai: any,
|
||||||
@@ -31,7 +32,8 @@ export const executeManagerAnalysis = async (
|
|||||||
required: ["thought_process", "experts"]
|
required: ["thought_process", "experts"]
|
||||||
};
|
};
|
||||||
|
|
||||||
const analysisResp = await ai.models.generateContent({
|
try {
|
||||||
|
const analysisResp = await withRetry(() => ai.models.generateContent({
|
||||||
model: model,
|
model: model,
|
||||||
contents: `Context:\n${context}\n\nCurrent Query: "${query}"`,
|
contents: `Context:\n${context}\n\nCurrent Query: "${query}"`,
|
||||||
config: {
|
config: {
|
||||||
@@ -43,20 +45,23 @@ export const executeManagerAnalysis = async (
|
|||||||
thinkingBudget: budget
|
thinkingBudget: budget
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
}));
|
||||||
|
|
||||||
const rawText = analysisResp.text || '{}';
|
const rawText = analysisResp.text || '{}';
|
||||||
const cleanText = cleanJsonString(rawText);
|
const cleanText = cleanJsonString(rawText);
|
||||||
|
|
||||||
try {
|
|
||||||
const analysisJson = JSON.parse(cleanText) as AnalysisResult;
|
const analysisJson = JSON.parse(cleanText) as AnalysisResult;
|
||||||
if (!analysisJson.experts || !Array.isArray(analysisJson.experts)) {
|
if (!analysisJson.experts || !Array.isArray(analysisJson.experts)) {
|
||||||
throw new Error("Invalid schema structure");
|
throw new Error("Invalid schema structure");
|
||||||
}
|
}
|
||||||
return analysisJson;
|
return analysisJson;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("JSON Parse Error:", e, rawText);
|
console.error("Manager Analysis Error:", e);
|
||||||
return { thought_process: "Direct processing.", experts: [] };
|
// Return a fallback so the process doesn't completely die if planning fails
|
||||||
|
return {
|
||||||
|
thought_process: "Direct processing fallback due to analysis error.",
|
||||||
|
experts: []
|
||||||
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -97,7 +102,8 @@ export const executeManagerReview = async (
|
|||||||
|
|
||||||
const content = `User Query: "${query}"\n\nCurrent Expert Outputs:\n${expertOutputs}`;
|
const content = `User Query: "${query}"\n\nCurrent Expert Outputs:\n${expertOutputs}`;
|
||||||
|
|
||||||
const resp = await ai.models.generateContent({
|
try {
|
||||||
|
const resp = await withRetry(() => ai.models.generateContent({
|
||||||
model: model,
|
model: model,
|
||||||
contents: content,
|
contents: content,
|
||||||
config: {
|
config: {
|
||||||
@@ -109,16 +115,14 @@ export const executeManagerReview = async (
|
|||||||
thinkingBudget: budget
|
thinkingBudget: budget
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
}));
|
||||||
|
|
||||||
const rawText = resp.text || '{}';
|
const rawText = resp.text || '{}';
|
||||||
const cleanText = cleanJsonString(rawText);
|
const cleanText = cleanJsonString(rawText);
|
||||||
|
|
||||||
try {
|
|
||||||
return JSON.parse(cleanText) as ReviewResult;
|
return JSON.parse(cleanText) as ReviewResult;
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error("Review JSON Parse Error:", e);
|
console.error("Review Error:", e);
|
||||||
// Fallback: Assume satisfied if JSON fails to avoid infinite loops due to format errors
|
// Fallback: Assume satisfied if JSON or API fails to avoid infinite loops
|
||||||
return { satisfied: true, critique: "JSON Error, proceeding to synthesis." };
|
return { satisfied: true, critique: "Processing Error, proceeding to synthesis." };
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
import { ModelOption, ExpertResult } from '../../types';
|
import { ModelOption, ExpertResult } from '../../types';
|
||||||
import { getSynthesisPrompt } from './prompts';
|
import { getSynthesisPrompt } from './prompts';
|
||||||
|
import { withRetry } from '../utils/retry';
|
||||||
|
|
||||||
export const streamSynthesisResponse = async (
|
export const streamSynthesisResponse = async (
|
||||||
ai: any,
|
ai: any,
|
||||||
@@ -13,7 +14,7 @@ export const streamSynthesisResponse = async (
|
|||||||
): Promise<void> => {
|
): Promise<void> => {
|
||||||
const prompt = getSynthesisPrompt(historyContext, query, expertResults);
|
const prompt = getSynthesisPrompt(historyContext, query, expertResults);
|
||||||
|
|
||||||
const synthesisStream = await ai.models.generateContentStream({
|
const synthesisStream = await withRetry(() => ai.models.generateContentStream({
|
||||||
model: model,
|
model: model,
|
||||||
contents: prompt,
|
contents: prompt,
|
||||||
config: {
|
config: {
|
||||||
@@ -22,8 +23,9 @@ export const streamSynthesisResponse = async (
|
|||||||
includeThoughts: true
|
includeThoughts: true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
}));
|
||||||
|
|
||||||
|
try {
|
||||||
for await (const chunk of synthesisStream) {
|
for await (const chunk of synthesisStream) {
|
||||||
if (signal.aborted) break;
|
if (signal.aborted) break;
|
||||||
|
|
||||||
@@ -41,4 +43,8 @@ export const streamSynthesisResponse = async (
|
|||||||
onChunk(chunkText, chunkThought);
|
onChunk(chunkText, chunkThought);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} catch (streamError) {
|
||||||
|
console.error("Synthesis stream interrupted:", streamError);
|
||||||
|
throw streamError;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
50
prisma/services/utils/retry.ts
Normal file
50
prisma/services/utils/retry.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
/**
|
||||||
|
* Retry Utility for API calls
|
||||||
|
* Implements exponential backoff and handles transient errors (429, 5xx).
|
||||||
|
*/
|
||||||
|
|
||||||
|
export async function withRetry<T>(
|
||||||
|
fn: () => Promise<T>,
|
||||||
|
maxRetries: number = 3,
|
||||||
|
initialDelay: number = 1500
|
||||||
|
): Promise<T> {
|
||||||
|
let lastError: any;
|
||||||
|
|
||||||
|
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
||||||
|
try {
|
||||||
|
return await fn();
|
||||||
|
} catch (error: any) {
|
||||||
|
lastError = error;
|
||||||
|
|
||||||
|
// Determine if the error is transient
|
||||||
|
// 429: Too Many Requests
|
||||||
|
// 5xx: Server Errors
|
||||||
|
// Network failures (no status)
|
||||||
|
const status = error?.status || error?.response?.status;
|
||||||
|
const message = error?.message || "";
|
||||||
|
|
||||||
|
const isRateLimit = status === 429;
|
||||||
|
const isServerError = status >= 500 && status < 600;
|
||||||
|
const isNetworkError = !status;
|
||||||
|
const isTransient = isRateLimit || isServerError || isNetworkError;
|
||||||
|
|
||||||
|
// If we reached max retries or the error isn't transient, throw immediately
|
||||||
|
if (attempt === maxRetries || !isTransient) {
|
||||||
|
console.error(`[Prisma] Final attempt ${attempt} failed:`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate delay with exponential backoff: 1.5s, 3s, 6s...
|
||||||
|
const delay = initialDelay * Math.pow(2, attempt - 1);
|
||||||
|
|
||||||
|
console.warn(
|
||||||
|
`[Prisma] API call failed (Attempt ${attempt}/${maxRetries}). ` +
|
||||||
|
`Status: ${status || 'Network Error'}. Retrying in ${delay}ms...`
|
||||||
|
);
|
||||||
|
|
||||||
|
await new Promise(resolve => setTimeout(resolve, delay));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw lastError || new Error("Maximum retries reached without success");
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user