This commit is contained in:
从何开始123
2026-01-08 11:56:00 +08:00
parent 54e9bf5906
commit 1561c054b7
24 changed files with 1105 additions and 449 deletions

BIN
.DS_Store vendored Normal file

Binary file not shown.

View File

@@ -1,8 +1,6 @@
import React, { useState, useEffect } from 'react';
import { ModelOption, AppConfig, ChatMessage } from './types';
import { getValidThinkingLevels } from './config';
import { useDeepThink } from './hooks/useDeepThink';
import { useChatSessions } from './hooks/useChatSessions';
import React from 'react';
import { useAppLogic } from './hooks/useAppLogic';
import SettingsModal from './SettingsModal';
import Header from './components/Header';
@@ -11,149 +9,32 @@ import Sidebar from './components/Sidebar';
import ChatArea from './components/ChatArea';
const App = () => {
// Session Management
const {
sessions,
currentSessionId,
setCurrentSessionId,
createSession,
updateSessionMessages,
deleteSession,
getSession
} = useChatSessions();
// UI State
const [isSidebarOpen, setIsSidebarOpen] = useState(true);
const [isSettingsOpen, setIsSettingsOpen] = useState(false);
// Active Chat State
const [messages, setMessages] = useState<ChatMessage[]>([]);
const [query, setQuery] = useState('');
// App Configuration
const [selectedModel, setSelectedModel] = useState<ModelOption>('gemini-3-flash-preview');
const [config, setConfig] = useState<AppConfig>({
planningLevel: 'high',
expertLevel: 'high',
synthesisLevel: 'high',
customApiKey: '',
customBaseUrl: '',
enableCustomApi: false
});
// Deep Think Engine
const {
messages,
query,
setQuery,
selectedModel,
setSelectedModel,
config,
setConfig,
isSidebarOpen,
setIsSidebarOpen,
isSettingsOpen,
setIsSettingsOpen,
appState,
managerAnalysis,
experts,
finalOutput,
synthesisThoughts,
runDynamicDeepThink,
stopDeepThink,
resetDeepThink,
processStartTime,
processEndTime
} = useDeepThink();
// Handle Model Constraints
useEffect(() => {
const validLevels = getValidThinkingLevels(selectedModel);
setConfig(prev => ({
...prev,
planningLevel: validLevels.includes(prev.planningLevel) ? prev.planningLevel : 'low',
expertLevel: validLevels.includes(prev.expertLevel) ? prev.expertLevel : 'low',
synthesisLevel: validLevels.includes(prev.synthesisLevel) ? prev.synthesisLevel : 'high',
}));
}, [selectedModel]);
// Sync Messages when switching sessions
useEffect(() => {
if (currentSessionId) {
const session = getSession(currentSessionId);
if (session) {
setMessages(session.messages);
setSelectedModel(session.model || 'gemini-3-flash-preview');
}
} else {
setMessages([]);
}
}, [currentSessionId, getSession]);
// Handle AI Completion
useEffect(() => {
if (appState === 'completed') {
const finalizedMessage: ChatMessage = {
id: `ai-${Date.now()}`,
role: 'model',
content: finalOutput,
analysis: managerAnalysis,
experts: experts,
synthesisThoughts: synthesisThoughts,
isThinking: false,
totalDuration: (processStartTime && processEndTime) ? (processEndTime - processStartTime) : undefined
};
const newMessages = [...messages, finalizedMessage];
setMessages(newMessages);
if (currentSessionId) {
updateSessionMessages(currentSessionId, newMessages);
} else {
createSession(newMessages, selectedModel);
}
resetDeepThink();
}
}, [appState, finalOutput, managerAnalysis, experts, synthesisThoughts, resetDeepThink, processStartTime, processEndTime, currentSessionId, messages, selectedModel, createSession, updateSessionMessages]);
const handleRun = () => {
if (!query.trim()) return;
const userMsg: ChatMessage = {
id: `user-${Date.now()}`,
role: 'user',
content: query
};
const newMessages = [...messages, userMsg];
setMessages(newMessages); // Optimistic update
// Manage Session Persistence
let activeSessionId = currentSessionId;
if (!activeSessionId) {
activeSessionId = createSession(newMessages, selectedModel);
} else {
updateSessionMessages(activeSessionId, newMessages);
}
// Run AI
runDynamicDeepThink(query, messages, selectedModel, config);
setQuery('');
};
const handleNewChat = () => {
stopDeepThink();
setCurrentSessionId(null);
setMessages([]);
setQuery('');
resetDeepThink();
if (window.innerWidth < 1024) setIsSidebarOpen(false);
};
const handleSelectSession = (id: string) => {
stopDeepThink();
resetDeepThink();
setCurrentSessionId(id);
if (window.innerWidth < 1024) setIsSidebarOpen(false);
};
const handleDeleteSession = (id: string, e: React.MouseEvent) => {
e.stopPropagation();
deleteSession(id);
if (currentSessionId === id) {
handleNewChat();
}
};
processEndTime,
handleRun,
handleNewChat,
handleSelectSession,
handleDeleteSession,
stopDeepThink
} = useAppLogic();
return (
<div className="flex flex-col h-screen bg-white text-slate-800 font-sans selection:bg-blue-100 selection:text-blue-900">
@@ -196,7 +77,6 @@ const App = () => {
processEndTime={processEndTime}
/>
{/* Floating Footer Input */}
<div className="absolute bottom-0 left-0 right-0 z-20 pointer-events-none p-4 pb-6 flex justify-center bg-gradient-to-t from-white via-white/80 to-transparent">
<div className="pointer-events-auto w-full max-w-4xl">
<ChatInput

View File

@@ -1,5 +1,5 @@
import React, { useState, useEffect } from 'react';
import { Bot, Loader2, CheckCircle2, X, BrainCircuit, MessageSquareText, Thermometer, Timer } from 'lucide-react';
import { Bot, Loader2, CheckCircle2, X, BrainCircuit, MessageSquareText, Thermometer, Timer, Repeat } from 'lucide-react';
import MarkdownRenderer from './components/MarkdownRenderer';
import { ExpertResult } from './types';
@@ -47,6 +47,7 @@ const ExpertCard = ({ expert }: { expert: ExpertResult }) => {
const isDone = expert.status === 'completed';
const isPending = expert.status === 'pending';
const isError = expert.status === 'error';
const round = expert.round || 1;
// Auto-switch to thoughts if that's all we have so far
React.useEffect(() => {
@@ -72,7 +73,15 @@ const ExpertCard = ({ expert }: { expert: ExpertResult }) => {
</div>
<div className="flex-1 min-w-0">
<div className="flex items-center justify-between mb-0.5">
<h3 className="text-sm font-bold text-slate-800 leading-tight truncate mr-2">{expert.role}</h3>
<div className="flex items-center gap-2">
<h3 className="text-sm font-bold text-slate-800 leading-tight truncate">{expert.role}</h3>
{round > 1 && (
<div className="flex items-center gap-0.5 px-1.5 py-0.5 rounded-md bg-indigo-100 text-indigo-700 text-[9px] font-bold uppercase tracking-wider border border-indigo-200">
<Repeat size={8} />
Round {round}
</div>
)}
</div>
{/* Timer for Expert */}
<TimeDisplay start={expert.startTime} end={expert.endTime} status={expert.status} />

View File

@@ -1,7 +1,18 @@
import React from 'react';
import { Settings, X, ChevronDown, Key, Globe } from 'lucide-react';
import { AppConfig, ModelOption, ThinkingLevel } from './types';
import { getValidThinkingLevels } from './config';
import { Settings, X } from 'lucide-react';
import { AppConfig, ModelOption } from './types';
import ApiSection from './components/settings/ApiSection';
import ThinkingSection from './components/settings/ThinkingSection';
import GithubSection from './components/settings/GithubSection';
interface SettingsModalProps {
isOpen: boolean;
onClose: () => void;
config: AppConfig;
setConfig: (c: AppConfig) => void;
model: ModelOption;
}
const SettingsModal = ({
isOpen,
@@ -9,141 +20,42 @@ const SettingsModal = ({
config,
setConfig,
model
}: {
isOpen: boolean;
onClose: () => void;
config: AppConfig;
setConfig: (c: AppConfig) => void;
model: ModelOption;
}) => {
}: SettingsModalProps) => {
if (!isOpen) return null;
const validLevels = getValidThinkingLevels(model);
const LevelSelect = ({
label,
value,
onChange,
desc
}: {
label: string,
value: ThinkingLevel,
onChange: (v: ThinkingLevel) => void,
desc: string
}) => (
<div className="space-y-2">
<div className="flex justify-between items-baseline">
<label className="text-sm font-medium text-slate-700">{label}</label>
<span className="text-xs text-slate-500 uppercase tracking-wider bg-slate-100 border border-slate-200 px-2 py-0.5 rounded">{value}</span>
</div>
<div className="relative">
<select
value={value}
onChange={(e) => onChange(e.target.value as ThinkingLevel)}
className="w-full bg-slate-50 border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 outline-none appearance-none cursor-pointer"
>
{validLevels.map(l => (
<option key={l} value={l}>{l.charAt(0).toUpperCase() + l.slice(1)}</option>
))}
</select>
<ChevronDown className="absolute right-3 top-3 text-slate-400 pointer-events-none" size={14} />
</div>
<p className="text-xs text-slate-500">{desc}</p>
</div>
);
return (
<div className="fixed inset-0 z-[100] flex items-center justify-center p-4 bg-slate-900/40 backdrop-blur-sm animate-in fade-in duration-200">
<div className="bg-white border border-slate-200 rounded-xl w-full max-w-md shadow-2xl overflow-hidden animate-in zoom-in-95 duration-200 flex flex-col max-h-[90vh]">
{/* Header */}
<div className="flex items-center justify-between p-4 border-b border-slate-100 bg-slate-50/50">
<div className="flex items-center gap-2">
<Settings size={18} className="text-blue-600" />
<h2 className="font-semibold text-slate-800">Configuration</h2>
</div>
<button onClick={onClose} className="text-slate-400 hover:text-slate-700 transition-colors">
<button onClick={onClose} className="text-slate-400 hover:text-slate-700 transition-colors rounded-full p-1 hover:bg-slate-200/50">
<X size={20} />
</button>
</div>
{/* Body */}
<div className="p-6 space-y-6 overflow-y-auto custom-scrollbar">
{/* Connection Settings */}
<div className="space-y-4 pt-1">
<div className="flex items-center justify-between mb-2">
<h3 className="text-xs font-bold text-slate-400 uppercase tracking-wider">API Connection</h3>
{/* Toggle Switch */}
<label className="relative inline-flex items-center cursor-pointer">
<input
type="checkbox"
checked={config.enableCustomApi ?? false}
onChange={(e) => setConfig({ ...config, enableCustomApi: e.target.checked })}
className="sr-only peer"
/>
<div className="w-11 h-6 bg-slate-200 peer-focus:outline-none rounded-full peer peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:start-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-5 after:w-5 after:transition-all peer-checked:bg-blue-600"></div>
</label>
</div>
<ApiSection config={config} setConfig={setConfig} />
{config.enableCustomApi && (
<div className="space-y-4 p-4 bg-slate-50 rounded-lg border border-slate-100 animate-in fade-in slide-in-from-top-1 duration-200">
<div className="space-y-2">
<label className="text-sm font-medium text-slate-700 flex items-center gap-2">
<Key size={14} className="text-slate-400" />
Custom API Key
</label>
<input
type="password"
placeholder="sk-..."
value={config.customApiKey || ''}
onChange={(e) => setConfig({ ...config, customApiKey: e.target.value })}
className="w-full bg-white border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 outline-none placeholder:text-slate-400"
/>
</div>
<ThinkingSection
config={config}
setConfig={setConfig}
model={model}
/>
<div className="space-y-2">
<label className="text-sm font-medium text-slate-700 flex items-center gap-2">
<Globe size={14} className="text-slate-400" />
Custom Base URL
</label>
<input
type="text"
placeholder="https://generativelanguage.googleapis.com"
value={config.customBaseUrl || ''}
onChange={(e) => setConfig({ ...config, customBaseUrl: e.target.value })}
className="w-full bg-white border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 outline-none placeholder:text-slate-400"
/>
</div>
</div>
)}
</div>
<div className="border-t border-slate-100 pt-4 space-y-4">
<h3 className="text-xs font-bold text-slate-400 uppercase tracking-wider mb-3">Thinking Process</h3>
<LevelSelect
label="Manager: Planning Strategy"
value={config.planningLevel}
onChange={(v) => setConfig({ ...config, planningLevel: v })}
desc="Controls the depth of initial query analysis and expert delegation."
/>
<LevelSelect
label="Experts: Execution Depth"
value={config.expertLevel}
onChange={(v) => setConfig({ ...config, expertLevel: v })}
desc="Determines how deeply each expert persona thinks about their specific task."
/>
<LevelSelect
label="Manager: Final Synthesis"
value={config.synthesisLevel}
onChange={(v) => setConfig({ ...config, synthesisLevel: v })}
desc="Controls the reasoning effort for aggregating results into the final answer."
/>
</div>
<GithubSection isOpen={isOpen} />
</div>
{/* Footer */}
<div className="p-4 bg-slate-50 border-t border-slate-100 flex justify-end shrink-0">
<button
onClick={onClose}
className="px-4 py-2 bg-blue-600 hover:bg-blue-700 text-white text-sm font-medium rounded-lg transition-colors shadow-sm"
className="px-6 py-2 bg-blue-600 hover:bg-blue-700 text-white text-sm font-medium rounded-lg transition-all shadow-md active:scale-95"
>
Done
</button>

View File

@@ -1,7 +1,9 @@
import React from 'react';
import { ChatMessage, AppState, AnalysisResult, ExpertResult } from '../types';
import ChatMessageItem from './ChatMessage';
import ProcessFlow from './ProcessFlow';
import Logo from './Logo';
interface ChatAreaProps {
messages: ChatMessage[];
@@ -26,12 +28,12 @@ const ChatArea = ({
<div className="flex-1 overflow-y-auto custom-scrollbar scroll-smooth">
<div className="pb-40">
{messages.length === 0 && appState === 'idle' && (
<div className="h-full flex flex-col items-center justify-center pt-32 opacity-50 px-4 text-center">
<div className="w-16 h-16 bg-gradient-to-br from-blue-500 to-purple-600 rounded-xl mb-6 shadow-lg rotate-3 flex items-center justify-center text-white font-bold text-2xl">
Pr
</div>
<p className="text-lg font-medium">Prisma</p>
<p className="text-sm">Ask a complex question to start.</p>
<div className="h-full flex flex-col items-center justify-center pt-32 opacity-70 px-4 text-center">
<Logo className="w-24 h-24 mb-6 drop-shadow-xl animate-pulse-slow" />
<p className="text-xl font-bold text-slate-900">Prisma</p>
<p className="text-sm text-slate-500 max-w-xs mt-2">
Deep multi-agent reasoning.
</p>
</div>
)}

View File

@@ -1,7 +1,9 @@
import React from 'react';
import { Settings, ChevronDown, Menu, History } from 'lucide-react';
import { Settings, ChevronDown, Menu } from 'lucide-react';
import { MODELS } from '../config';
import { ModelOption } from '../types';
import Logo from './Logo';
interface HeaderProps {
selectedModel: ModelOption;
@@ -13,7 +15,7 @@ interface HeaderProps {
const Header = ({ selectedModel, setSelectedModel, onOpenSettings, onToggleSidebar, onNewChat }: HeaderProps) => {
return (
<header className="sticky top-0 z-50 bg-white/80 backdrop-blur-md">
<header className="sticky top-0 z-50 bg-white/80 backdrop-blur-md border-b border-slate-100">
<div className="w-full px-4 h-16 flex items-center justify-between">
<div className="flex items-center gap-4">
<button
@@ -25,15 +27,13 @@ const Header = ({ selectedModel, setSelectedModel, onOpenSettings, onToggleSideb
</button>
<div
className="flex items-center gap-2 cursor-pointer group"
className="flex items-center gap-3 cursor-pointer group"
onClick={onNewChat}
title="Start New Chat"
>
<h1 className="font-bold text-lg tracking-tight text-slate-900 hidden sm:block group-hover:opacity-70 transition-opacity">
Gemini <span className="text-blue-600 font-light">Prisma</span>
</h1>
<h1 className="font-bold text-lg tracking-tight text-slate-900 sm:hidden group-hover:opacity-70 transition-opacity">
Prisma
<Logo className="w-8 h-8 transition-transform group-hover:scale-110" />
<h1 className="font-bold text-lg tracking-tight text-blue-600 group-hover:opacity-70 transition-opacity">
<span className="font-light">Prisma</span>
</h1>
</div>
</div>

View File

@@ -1,4 +1,4 @@
import React, { useRef, useLayoutEffect, useState } from 'react';
import React, { useRef, useLayoutEffect, useState, useEffect } from 'react';
import { ArrowUp, Square } from 'lucide-react';
import { AppState } from '../types';
@@ -34,6 +34,13 @@ const InputSection = ({ query, setQuery, onRun, onStop, appState }: InputSection
}
};
// Focus input on mount and when app becomes idle (e.g. after "New Chat" or completion)
useEffect(() => {
if (appState === 'idle' && textareaRef.current) {
textareaRef.current.focus();
}
}, [appState]);
// useLayoutEffect prevents visual flickering by adjusting height before paint
useLayoutEffect(() => {
adjustHeight();
@@ -70,6 +77,7 @@ const InputSection = ({ query, setQuery, onRun, onStop, appState }: InputSection
onCompositionEnd={() => setIsComposing(false)}
placeholder="Ask a complex question..."
rows={1}
autoFocus
className="flex-1 max-h-[200px] py-3 pl-4 pr-2 bg-transparent border-none focus:ring-0 resize-none outline-none text-slate-800 placeholder:text-slate-400 leading-relaxed custom-scrollbar text-base"
style={{ minHeight: '48px' }}
/>

105
prisma/components/Logo.tsx Normal file
View File

@@ -0,0 +1,105 @@
import React from 'react';
interface LogoProps {
className?: string;
}
const Logo = ({ className = "w-8 h-8" }: LogoProps) => {
return (
<svg
viewBox="0 0 600 600"
xmlns="http://www.w3.org/2000/svg"
className={className}
>
<g id="prism">
{/* Inner Triangle */}
<path
fill="none"
stroke="currentColor"
strokeWidth="16"
strokeLinecap="round"
strokeLinejoin="round"
d="M300 180 L200 420 L400 420 Z"
className="text-slate-700"
/>
{/* Connecting Struts */}
<path
fill="none"
stroke="currentColor"
strokeWidth="16"
strokeLinecap="round"
strokeLinejoin="round"
d="M300 50 L300 180"
className="text-slate-700"
/>
<path
fill="none"
stroke="currentColor"
strokeWidth="16"
strokeLinecap="round"
strokeLinejoin="round"
d="M100 480 L200 420"
className="text-slate-700"
/>
<path
fill="none"
stroke="currentColor"
strokeWidth="16"
strokeLinecap="round"
strokeLinejoin="round"
d="M500 480 L400 420"
className="text-slate-700"
/>
{/* Outer Triangle */}
<path
fill="none"
stroke="currentColor"
strokeWidth="16"
strokeLinecap="round"
strokeLinejoin="round"
d="M300 50 L100 480 L500 480 Z"
className="text-slate-700"
/>
</g>
<g id="beams">
{/* Input Beam */}
<line x1="0" y1="275" x2="195" y2="275" stroke="currentColor" strokeWidth="12" className="text-slate-700" />
{/* Blue Beam */}
<polyline
points="194,270 380,225 600,245"
fill="none"
stroke="#2563eb"
strokeWidth="12"
strokeLinejoin="round"
opacity="0.95"
/>
{/* Green Beam */}
<polyline
points="194,275 400,275 600,305"
fill="none"
stroke="#4ade80"
strokeWidth="12"
strokeLinejoin="round"
opacity="0.95"
/>
{/* Purple Beam */}
<polyline
points="194,280 420,325 600,370"
fill="none"
stroke="#9333ea"
strokeWidth="12"
strokeLinejoin="round"
opacity="0.95"
/>
</g>
</svg>
);
};
export default Logo;

View File

@@ -1,5 +1,7 @@
import React, { useState } from 'react';
import ReactMarkdown from 'react-markdown';
import remarkMath from 'remark-math';
import rehypeKatex from 'rehype-katex';
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
import { vscDarkPlus } from 'react-syntax-highlighter/dist/esm/styles/prism';
import { Copy, Check, Terminal } from 'lucide-react';
@@ -70,14 +72,35 @@ const CodeBlock = ({ node, inline, className, children, ...props }: any) => {
};
const MarkdownRenderer = ({ content, className }: { content: string, className?: string }) => {
/**
* Pre-process content to handle common LaTeX delimiters from Gemini
* and optimize Markdown compatibility.
*/
const preprocessMarkdown = (text: string) => {
if (!text) return "";
return text
// Replace \[ ... \] with $$ ... $$
.replace(/\\\[/g, '$$$$')
.replace(/\\\]/g, '$$$$')
// Replace \( ... \) with $ ... $
.replace(/\\\(/g, '$$')
.replace(/\\\)/g, '$$')
// Fix potential spacing issues between bold marks and math delimiters
.replace(/\*\*(\$)/g, '** $1')
.replace(/(\$)\*\*/g, '$1 **');
};
return (
<div className={className}>
<ReactMarkdown
remarkPlugins={[remarkMath]}
rehypePlugins={[[rehypeKatex, { throwOnError: false, strict: false }]]}
components={{
code: CodeBlock
}}
>
{content}
{preprocessMarkdown(content)}
</ReactMarkdown>
</div>
);

View File

@@ -0,0 +1,62 @@
import React from 'react';
import { Key, Globe } from 'lucide-react';
import { AppConfig } from '../../types';
interface ApiSectionProps {
config: AppConfig;
setConfig: (c: AppConfig) => void;
}
const ApiSection = ({ config, setConfig }: ApiSectionProps) => {
return (
<div className="space-y-4 pt-1">
<div className="flex items-center justify-between mb-2">
<h3 className="text-xs font-bold text-slate-400 uppercase tracking-wider">API Connection</h3>
<label className="relative inline-flex items-center cursor-pointer">
<input
type="checkbox"
checked={config.enableCustomApi ?? false}
onChange={(e) => setConfig({ ...config, enableCustomApi: e.target.checked })}
className="sr-only peer"
/>
<div className="w-11 h-6 bg-slate-200 peer-focus:outline-none rounded-full peer peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:start-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-5 after:w-5 after:transition-all peer-checked:bg-blue-600"></div>
</label>
</div>
{config.enableCustomApi && (
<div className="space-y-4 p-4 bg-slate-50 rounded-lg border border-slate-100 animate-in fade-in slide-in-from-top-1 duration-200">
<div className="space-y-2">
<label className="text-sm font-medium text-slate-700 flex items-center gap-2">
<Key size={14} className="text-slate-400" />
Custom API Key
</label>
<input
type="password"
placeholder="sk-..."
value={config.customApiKey || ''}
onChange={(e) => setConfig({ ...config, customApiKey: e.target.value })}
className="w-full bg-white border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 outline-none placeholder:text-slate-400"
/>
</div>
<div className="space-y-2">
<label className="text-sm font-medium text-slate-700 flex items-center gap-2">
<Globe size={14} className="text-slate-400" />
Custom Base URL
</label>
<input
type="text"
placeholder="https://generativelanguage.googleapis.com"
value={config.customBaseUrl || ''}
onChange={(e) => setConfig({ ...config, customBaseUrl: e.target.value })}
className="w-full bg-white border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 outline-none placeholder:text-slate-400"
/>
</div>
</div>
)}
</div>
);
};
export default ApiSection;

View File

@@ -0,0 +1,50 @@
import React, { useState, useEffect } from 'react';
import { Github, Star } from 'lucide-react';
const GithubSection = ({ isOpen }: { isOpen: boolean }) => {
const [stars, setStars] = useState<number | null>(null);
useEffect(() => {
if (isOpen) {
fetch('https://api.github.com/repos/yeahhe365/Prisma')
.then(res => res.json())
.then(data => {
if (data && typeof data.stargazers_count === 'number') {
setStars(data.stargazers_count);
}
})
.catch(err => console.error("Error fetching stars:", err));
}
}, [isOpen]);
return (
<div className="border-t border-slate-100 pt-6">
<a
href="https://github.com/yeahhe365/Prisma"
target="_blank"
rel="noopener noreferrer"
className="flex items-center justify-between p-3 rounded-lg border border-slate-200 bg-slate-50 hover:bg-slate-100 transition-colors group"
>
<div className="flex items-center gap-3">
<div className="p-2 bg-slate-900 text-white rounded-lg group-hover:scale-110 transition-transform">
<Github size={18} />
</div>
<div>
<p className="text-sm font-semibold text-slate-800">yeahhe365 / Prisma</p>
<p className="text-xs text-slate-500">Open source on GitHub</p>
</div>
</div>
{stars !== null && (
<div className="flex items-center gap-1 px-2 py-1 bg-white border border-slate-200 rounded-md shadow-sm">
<Star size={14} className="text-amber-500 fill-amber-500" />
<span className="text-xs font-bold text-slate-700">{stars.toLocaleString()}</span>
</div>
)}
</a>
</div>
);
};
export default GithubSection;

View File

@@ -0,0 +1,42 @@
import React from 'react';
import { ChevronDown } from 'lucide-react';
import { ThinkingLevel } from '../../types';
interface LevelSelectProps {
label: string;
value: ThinkingLevel;
validLevels: ThinkingLevel[];
onChange: (v: ThinkingLevel) => void;
desc: string;
}
const LevelSelect = ({
label,
value,
validLevels,
onChange,
desc
}: LevelSelectProps) => (
<div className="space-y-2">
<div className="flex justify-between items-baseline">
<label className="text-sm font-medium text-slate-700">{label}</label>
<span className="text-xs text-slate-500 uppercase tracking-wider bg-slate-100 border border-slate-200 px-2 py-0.5 rounded">{value}</span>
</div>
<div className="relative">
<select
value={value}
onChange={(e) => onChange(e.target.value as ThinkingLevel)}
className="w-full bg-slate-50 border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 outline-none appearance-none cursor-pointer transition-colors hover:border-slate-300"
>
{validLevels.map(l => (
<option key={l} value={l}>{l.charAt(0).toUpperCase() + l.slice(1)}</option>
))}
</select>
<ChevronDown className="absolute right-3 top-3 text-slate-400 pointer-events-none" size={14} />
</div>
<p className="text-xs text-slate-500 leading-relaxed">{desc}</p>
</div>
);
export default LevelSelect;

View File

@@ -0,0 +1,69 @@
import React from 'react';
import { RefreshCw } from 'lucide-react';
import { AppConfig, ModelOption } from '../../types';
import { getValidThinkingLevels } from '../../config';
import LevelSelect from './LevelSelect';
interface ThinkingSectionProps {
config: AppConfig;
setConfig: (c: AppConfig) => void;
model: ModelOption;
}
const ThinkingSection = ({ config, setConfig, model }: ThinkingSectionProps) => {
const validLevels = getValidThinkingLevels(model);
return (
<div className="border-t border-slate-100 pt-4 space-y-4">
<div className="flex items-center justify-between">
<h3 className="text-xs font-bold text-slate-400 uppercase tracking-wider">Thinking Process</h3>
</div>
<div className="p-3 bg-indigo-50 border border-indigo-100 rounded-lg flex items-center justify-between">
<div className="flex items-center gap-2">
<RefreshCw size={16} className="text-indigo-600" />
<div>
<p className="text-sm font-medium text-indigo-900">Recursive Refinement</p>
<p className="text-[10px] text-indigo-600/80">Loops expert generation until satisfied.</p>
</div>
</div>
<label className="relative inline-flex items-center cursor-pointer">
<input
type="checkbox"
checked={config.enableRecursiveLoop ?? false}
onChange={(e) => setConfig({ ...config, enableRecursiveLoop: e.target.checked })}
className="sr-only peer"
/>
<div className="w-9 h-5 bg-slate-300 peer-focus:outline-none rounded-full peer peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:start-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-4 after:w-4 after:transition-all peer-checked:bg-indigo-600"></div>
</label>
</div>
<LevelSelect
label="Manager: Planning Strategy"
value={config.planningLevel}
validLevels={validLevels}
onChange={(v) => setConfig({ ...config, planningLevel: v })}
desc="Controls the depth of initial query analysis and expert delegation."
/>
<LevelSelect
label="Experts: Execution Depth"
value={config.expertLevel}
validLevels={validLevels}
onChange={(v) => setConfig({ ...config, expertLevel: v })}
desc="Determines how deeply each expert persona thinks about their specific task."
/>
<LevelSelect
label="Manager: Final Synthesis"
value={config.synthesisLevel}
validLevels={validLevels}
onChange={(v) => setConfig({ ...config, synthesisLevel: v })}
desc="Controls the reasoning effort for aggregating results into the final answer."
/>
</div>
);
};
export default ThinkingSection;

View File

@@ -1,4 +1,5 @@
import { ModelOption, ThinkingLevel } from './types';
import { ModelOption, ThinkingLevel, AppConfig } from './types';
export const MODELS: { value: ModelOption; label: string; desc: string }[] = [
{
@@ -13,6 +14,22 @@ export const MODELS: { value: ModelOption; label: string; desc: string }[] = [
},
];
export const STORAGE_KEYS = {
SETTINGS: 'prisma-settings',
MODEL: 'prisma-selected-model',
SESSION_ID: 'prisma-active-session-id'
};
export const DEFAULT_CONFIG: AppConfig = {
planningLevel: 'high',
expertLevel: 'high',
synthesisLevel: 'high',
customApiKey: '',
customBaseUrl: '',
enableCustomApi: false,
enableRecursiveLoop: false
};
export const getValidThinkingLevels = (model: ModelOption): ThinkingLevel[] => {
if (model === 'gemini-3-pro-preview') {
return ['low', 'high'];

228
prisma/hooks/useAppLogic.ts Normal file
View File

@@ -0,0 +1,228 @@
import { useState, useEffect, useCallback } from 'react';
import { ModelOption, AppConfig, ChatMessage } from '../types';
import { STORAGE_KEYS, DEFAULT_CONFIG, getValidThinkingLevels } from '../config';
import { useDeepThink } from './useDeepThink';
import { useChatSessions } from './useChatSessions';
import { setInterceptorUrl } from '../interceptor';
export const useAppLogic = () => {
// Session Management
const {
sessions,
currentSessionId,
setCurrentSessionId,
createSession,
updateSessionMessages,
deleteSession,
getSession
} = useChatSessions();
// UI State
const [isSidebarOpen, setIsSidebarOpen] = useState(true);
const [isSettingsOpen, setIsSettingsOpen] = useState(false);
// Active Chat State
const [messages, setMessages] = useState<ChatMessage[]>([]);
const [query, setQuery] = useState('');
// App Configuration with Persistence
const [selectedModel, setSelectedModel] = useState<ModelOption>(() => {
const cached = localStorage.getItem(STORAGE_KEYS.MODEL);
return (cached as ModelOption) || 'gemini-3-flash-preview';
});
const [config, setConfig] = useState<AppConfig>(() => {
const cached = localStorage.getItem(STORAGE_KEYS.SETTINGS);
if (cached) {
try {
return { ...DEFAULT_CONFIG, ...JSON.parse(cached) };
} catch (e) {
return DEFAULT_CONFIG;
}
}
return DEFAULT_CONFIG;
});
// Deep Think Engine
const {
appState,
managerAnalysis,
experts,
finalOutput,
synthesisThoughts,
runDynamicDeepThink,
stopDeepThink,
resetDeepThink,
processStartTime,
processEndTime
} = useDeepThink();
// Network Interceptor Sync
useEffect(() => {
if (config.enableCustomApi && config.customBaseUrl) {
setInterceptorUrl(config.customBaseUrl);
} else {
setInterceptorUrl(null);
}
return () => setInterceptorUrl(null);
}, [config.enableCustomApi, config.customBaseUrl]);
// Persistence Effects
useEffect(() => {
localStorage.setItem(STORAGE_KEYS.SETTINGS, JSON.stringify(config));
}, [config]);
useEffect(() => {
localStorage.setItem(STORAGE_KEYS.MODEL, selectedModel);
}, [selectedModel]);
useEffect(() => {
const cachedSessionId = localStorage.getItem(STORAGE_KEYS.SESSION_ID);
if (cachedSessionId && sessions.some(s => s.id === cachedSessionId)) {
setCurrentSessionId(cachedSessionId);
}
}, [sessions, setCurrentSessionId]);
useEffect(() => {
if (currentSessionId) {
localStorage.setItem(STORAGE_KEYS.SESSION_ID, currentSessionId);
} else {
localStorage.removeItem(STORAGE_KEYS.SESSION_ID);
}
}, [currentSessionId]);
// Handle Model Constraints
useEffect(() => {
const validLevels = getValidThinkingLevels(selectedModel);
setConfig(prev => {
const newPlanning = validLevels.includes(prev.planningLevel) ? prev.planningLevel : 'low';
const newExpert = validLevels.includes(prev.expertLevel) ? prev.expertLevel : 'low';
const newSynthesis = validLevels.includes(prev.synthesisLevel) ? prev.synthesisLevel : 'high';
if (newPlanning !== prev.planningLevel || newExpert !== prev.expertLevel || newSynthesis !== prev.synthesisLevel) {
return {
...prev,
planningLevel: newPlanning as any,
expertLevel: newExpert as any,
synthesisLevel: newSynthesis as any,
};
}
return prev;
});
}, [selectedModel]);
// Sync Messages when switching sessions
useEffect(() => {
if (currentSessionId) {
const session = getSession(currentSessionId);
if (session) {
setMessages(session.messages);
setSelectedModel(session.model || 'gemini-3-flash-preview');
}
} else {
setMessages([]);
}
}, [currentSessionId, getSession]);
// Handle AI Completion
useEffect(() => {
if (appState === 'completed') {
const finalizedMessage: ChatMessage = {
id: `ai-${Date.now()}`,
role: 'model',
content: finalOutput,
analysis: managerAnalysis,
experts: experts,
synthesisThoughts: synthesisThoughts,
isThinking: false,
totalDuration: (processStartTime && processEndTime) ? (processEndTime - processStartTime) : undefined
};
const newMessages = [...messages, finalizedMessage];
setMessages(newMessages);
if (currentSessionId) {
updateSessionMessages(currentSessionId, newMessages);
} else {
createSession(newMessages, selectedModel);
}
resetDeepThink();
}
}, [appState, finalOutput, managerAnalysis, experts, synthesisThoughts, resetDeepThink, processStartTime, processEndTime, currentSessionId, messages, selectedModel, createSession, updateSessionMessages]);
const handleRun = useCallback(() => {
if (!query.trim()) return;
const userMsg: ChatMessage = {
id: `user-${Date.now()}`,
role: 'user',
content: query
};
const newMessages = [...messages, userMsg];
setMessages(newMessages);
let activeSessionId = currentSessionId;
if (!activeSessionId) {
activeSessionId = createSession(newMessages, selectedModel);
} else {
updateSessionMessages(activeSessionId, newMessages);
}
runDynamicDeepThink(query, messages, selectedModel, config);
setQuery('');
}, [query, messages, currentSessionId, selectedModel, config, createSession, updateSessionMessages, runDynamicDeepThink]);
const handleNewChat = useCallback(() => {
stopDeepThink();
setCurrentSessionId(null);
setMessages([]);
setQuery('');
resetDeepThink();
if (window.innerWidth < 1024) setIsSidebarOpen(false);
}, [stopDeepThink, setCurrentSessionId, resetDeepThink]);
const handleSelectSession = useCallback((id: string) => {
stopDeepThink();
resetDeepThink();
setCurrentSessionId(id);
if (window.innerWidth < 1024) setIsSidebarOpen(false);
}, [stopDeepThink, resetDeepThink, setCurrentSessionId]);
const handleDeleteSession = useCallback((id: string, e: React.MouseEvent) => {
e.stopPropagation();
deleteSession(id);
if (currentSessionId === id) {
handleNewChat();
}
}, [deleteSession, currentSessionId, handleNewChat]);
return {
sessions,
currentSessionId,
messages,
query,
setQuery,
selectedModel,
setSelectedModel,
config,
setConfig,
isSidebarOpen,
setIsSidebarOpen,
isSettingsOpen,
setIsSettingsOpen,
appState,
managerAnalysis,
experts,
finalOutput,
processStartTime,
processEndTime,
handleRun,
handleNewChat,
handleSelectSession,
handleDeleteSession,
stopDeepThink
};
};

View File

@@ -1,52 +1,37 @@
import { useState, useRef, useCallback } from 'react';
import { useCallback } from 'react';
import { getAI } from '../api';
import { getThinkingBudget } from '../config';
import { AppConfig, ModelOption, AppState, AnalysisResult, ExpertResult, ChatMessage } from '../types';
import { AppConfig, ModelOption, ExpertResult, ChatMessage } from '../types';
import { executeManagerAnalysis } from '../services/deepThink/manager';
import { executeManagerAnalysis, executeManagerReview } from '../services/deepThink/manager';
import { streamExpertResponse } from '../services/deepThink/expert';
import { streamSynthesisResponse } from '../services/deepThink/synthesis';
import { useDeepThinkState } from './useDeepThinkState';
export const useDeepThink = () => {
const [appState, setAppState] = useState<AppState>('idle');
const [managerAnalysis, setManagerAnalysis] = useState<AnalysisResult | null>(null);
const [experts, setExperts] = useState<ExpertResult[]>([]);
const [finalOutput, setFinalOutput] = useState('');
const [synthesisThoughts, setSynthesisThoughts] = useState('');
const {
appState, setAppState,
managerAnalysis, setManagerAnalysis,
experts, expertsDataRef,
finalOutput, setFinalOutput,
synthesisThoughts, setSynthesisThoughts,
processStartTime, setProcessStartTime,
processEndTime, setProcessEndTime,
abortControllerRef,
resetDeepThink,
stopDeepThink,
updateExpertAt,
setInitialExperts,
appendExperts
} = useDeepThinkState();
// Timing state
const [processStartTime, setProcessStartTime] = useState<number | null>(null);
const [processEndTime, setProcessEndTime] = useState<number | null>(null);
// Refs for data consistency during high-frequency streaming updates
const expertsDataRef = useRef<ExpertResult[]>([]);
const abortControllerRef = useRef<AbortController | null>(null);
const stopDeepThink = useCallback(() => {
if (abortControllerRef.current) {
abortControllerRef.current.abort();
abortControllerRef.current = null;
}
setAppState('idle');
setProcessEndTime(Date.now());
}, []);
const resetDeepThink = useCallback(() => {
setAppState('idle');
setManagerAnalysis(null);
setExperts([]);
expertsDataRef.current = [];
setFinalOutput('');
setSynthesisThoughts('');
setProcessStartTime(null);
setProcessEndTime(null);
abortControllerRef.current = null;
}, []);
// Helper: Orchestrate a single expert's lifecycle (Start -> Stream -> End)
/**
* Orchestrates a single expert's lifecycle (Start -> Stream -> End)
*/
const runExpertLifecycle = async (
expert: ExpertResult,
index: number,
globalIndex: number,
ai: any,
model: ModelOption,
context: string,
@@ -55,17 +40,10 @@ export const useDeepThink = () => {
): Promise<ExpertResult> => {
if (signal.aborted) return expert;
// 1. Mark as thinking
const startTime = Date.now();
expertsDataRef.current[index] = {
...expert,
status: 'thinking',
startTime
};
setExperts([...expertsDataRef.current]);
updateExpertAt(globalIndex, { status: 'thinking', startTime });
try {
// 2. Stream execution via service
let fullContent = "";
let fullThoughts = "";
@@ -79,44 +57,27 @@ export const useDeepThink = () => {
(textChunk, thoughtChunk) => {
fullContent += textChunk;
fullThoughts += thoughtChunk;
// Update Ref & State live
expertsDataRef.current[index] = {
...expertsDataRef.current[index],
thoughts: fullThoughts,
content: fullContent
};
setExperts([...expertsDataRef.current]);
updateExpertAt(globalIndex, { thoughts: fullThoughts, content: fullContent });
}
);
if (signal.aborted) return expertsDataRef.current[index];
if (signal.aborted) return expertsDataRef.current[globalIndex];
// 3. Mark as completed
expertsDataRef.current[index] = {
...expertsDataRef.current[index],
status: 'completed',
endTime: Date.now()
};
setExperts([...expertsDataRef.current]);
return expertsDataRef.current[index];
updateExpertAt(globalIndex, { status: 'completed', endTime: Date.now() });
return expertsDataRef.current[globalIndex];
} catch (error) {
console.error(`Expert ${expert.role} error:`, error);
if (!signal.aborted) {
expertsDataRef.current[index] = {
...expertsDataRef.current[index],
status: 'error',
content: "Failed to generate response.",
endTime: Date.now()
};
setExperts([...expertsDataRef.current]);
updateExpertAt(globalIndex, { status: 'error', content: "Failed to generate response.", endTime: Date.now() });
}
return expertsDataRef.current[index];
return expertsDataRef.current[globalIndex];
}
};
/**
* Main Orchestration logic
*/
const runDynamicDeepThink = async (
query: string,
history: ChatMessage[],
@@ -125,20 +86,16 @@ export const useDeepThink = () => {
) => {
if (!query.trim()) return;
// Reset previous run
if (abortControllerRef.current) {
abortControllerRef.current.abort();
}
if (abortControllerRef.current) abortControllerRef.current.abort();
abortControllerRef.current = new AbortController();
const signal = abortControllerRef.current.signal;
// Reset UI state
setAppState('analyzing');
setManagerAnalysis(null);
setExperts([]);
expertsDataRef.current = [];
setInitialExperts([]);
setFinalOutput('');
setSynthesisThoughts('');
setProcessStartTime(Date.now());
setProcessEndTime(null);
@@ -152,33 +109,8 @@ export const useDeepThink = () => {
`${msg.role === 'user' ? 'User' : 'Model'}: ${msg.content}`
).join('\n');
// --- 1. Initialize Primary Expert IMMEDIATELY ---
const primaryExpert: ExpertResult = {
id: 'expert-0',
role: "Primary Responder",
description: "Directly addresses the user's original query.",
temperature: 1,
prompt: query,
status: 'pending'
};
// --- Phase 1: Planning & Initial Experts ---
expertsDataRef.current = [primaryExpert];
setExperts([primaryExpert]);
// --- 2. Start Parallel Execution ---
// Task A: Run Primary Expert (Index 0)
const primaryExpertTask = runExpertLifecycle(
primaryExpert,
0,
ai,
model,
recentHistory,
getThinkingBudget(config.expertLevel, model),
signal
);
// Task B: Run Manager Analysis via Service
const managerTask = executeManagerAnalysis(
ai,
model,
@@ -187,60 +119,97 @@ export const useDeepThink = () => {
getThinkingBudget(config.planningLevel, model)
);
// Wait for Manager Analysis
const analysisJson = await managerTask;
const primaryExpert: ExpertResult = {
id: 'expert-0',
role: "Primary Responder",
description: "Directly addresses the user's original query.",
temperature: 1,
prompt: query,
status: 'pending',
round: 1
};
setInitialExperts([primaryExpert]);
const primaryTask = runExpertLifecycle(
primaryExpert, 0, ai, model, recentHistory,
getThinkingBudget(config.expertLevel, model), signal
);
const analysisJson = await managerTask;
if (signal.aborted) return;
setManagerAnalysis(analysisJson);
// --- 3. Initialize & Run Supplementary Experts ---
const generatedExperts: ExpertResult[] = analysisJson.experts.map((exp, idx) => ({
const round1Experts: ExpertResult[] = analysisJson.experts.map((exp, idx) => ({
...exp,
id: `expert-${idx + 1}`,
status: 'pending'
id: `expert-r1-${idx + 1}`,
status: 'pending',
round: 1
}));
// Update state: Keep Primary (0) and append new ones
const currentPrimary = expertsDataRef.current[0];
const allExperts = [currentPrimary, ...generatedExperts];
expertsDataRef.current = allExperts;
setExperts([...allExperts]);
appendExperts(round1Experts);
setAppState('experts_working');
// Task C: Run Supplementary Experts (Offset indices by 1)
const supplementaryTasks = generatedExperts.map((exp, idx) =>
runExpertLifecycle(
exp,
idx + 1,
ai,
model,
recentHistory,
getThinkingBudget(config.expertLevel, model),
signal
)
const round1Tasks = round1Experts.map((exp, idx) =>
runExpertLifecycle(exp, idx + 1, ai, model, recentHistory,
getThinkingBudget(config.expertLevel, model), signal)
);
// --- 4. Wait for ALL Experts ---
const allResults = await Promise.all([primaryExpertTask, ...supplementaryTasks]);
await Promise.all([primaryTask, ...round1Tasks]);
if (signal.aborted) return;
// --- Phase 2: Recursive Loop (Optional) ---
let roundCounter = 1;
const MAX_ROUNDS = 3;
let loopActive = config.enableRecursiveLoop ?? false;
while (loopActive && roundCounter < MAX_ROUNDS) {
if (signal.aborted) return;
setAppState('reviewing');
const reviewResult = await executeManagerReview(
ai, model, query, expertsDataRef.current,
getThinkingBudget(config.planningLevel, model)
);
if (signal.aborted) return;
if (reviewResult.satisfied) {
loopActive = false;
} else {
roundCounter++;
const nextRoundExperts = (reviewResult.refined_experts || []).map((exp, idx) => ({
...exp, id: `expert-r${roundCounter}-${idx}`, status: 'pending' as const, round: roundCounter
}));
if (nextRoundExperts.length === 0) {
loopActive = false;
break;
}
const startIndex = expertsDataRef.current.length;
appendExperts(nextRoundExperts);
setAppState('experts_working');
const nextRoundTasks = nextRoundExperts.map((exp, idx) =>
runExpertLifecycle(exp, startIndex + idx, ai, model, recentHistory,
getThinkingBudget(config.expertLevel, model), signal)
);
await Promise.all(nextRoundTasks);
}
}
if (signal.aborted) return;
// --- 5. Synthesis ---
// --- Phase 3: Synthesis ---
setAppState('synthesizing');
let fullFinalText = '';
let fullFinalThoughts = '';
await streamSynthesisResponse(
ai,
model,
query,
recentHistory,
allResults,
getThinkingBudget(config.synthesisLevel, model),
signal,
ai, model, query, recentHistory, expertsDataRef.current,
getThinkingBudget(config.synthesisLevel, model), signal,
(textChunk, thoughtChunk) => {
fullFinalText += textChunk;
fullFinalThoughts += thoughtChunk;
@@ -255,9 +224,7 @@ export const useDeepThink = () => {
}
} catch (e: any) {
if (signal.aborted) {
console.log('Operation aborted by user');
} else {
if (!signal.aborted) {
console.error(e);
setAppState('idle');
setProcessEndTime(Date.now());

View File

@@ -0,0 +1,73 @@
import { useState, useRef, useCallback } from 'react';
import { AppState, AnalysisResult, ExpertResult } from '../types';
export const useDeepThinkState = () => {
const [appState, setAppState] = useState<AppState>('idle');
const [managerAnalysis, setManagerAnalysis] = useState<AnalysisResult | null>(null);
const [experts, setExperts] = useState<ExpertResult[]>([]);
const [finalOutput, setFinalOutput] = useState('');
const [synthesisThoughts, setSynthesisThoughts] = useState('');
// Timing state
const [processStartTime, setProcessStartTime] = useState<number | null>(null);
const [processEndTime, setProcessEndTime] = useState<number | null>(null);
// Refs for data consistency
const expertsDataRef = useRef<ExpertResult[]>([]);
const abortControllerRef = useRef<AbortController | null>(null);
const resetDeepThink = useCallback(() => {
setAppState('idle');
setManagerAnalysis(null);
setExperts([]);
expertsDataRef.current = [];
setFinalOutput('');
setSynthesisThoughts('');
setProcessStartTime(null);
setProcessEndTime(null);
abortControllerRef.current = null;
}, []);
const stopDeepThink = useCallback(() => {
if (abortControllerRef.current) {
abortControllerRef.current.abort();
abortControllerRef.current = null;
}
setAppState('idle');
setProcessEndTime(Date.now());
}, []);
const updateExpertAt = useCallback((index: number, update: Partial<ExpertResult> | ((prev: ExpertResult) => ExpertResult)) => {
const current = expertsDataRef.current[index];
const next = typeof update === 'function' ? update(current) : { ...current, ...update };
expertsDataRef.current[index] = next;
setExperts([...expertsDataRef.current]);
}, []);
const setInitialExperts = useCallback((initialList: ExpertResult[]) => {
expertsDataRef.current = initialList;
setExperts(initialList);
}, []);
const appendExperts = useCallback((newList: ExpertResult[]) => {
expertsDataRef.current = [...expertsDataRef.current, ...newList];
setExperts([...expertsDataRef.current]);
}, []);
return {
appState, setAppState,
managerAnalysis, setManagerAnalysis,
experts, setExperts, expertsDataRef,
finalOutput, setFinalOutput,
synthesisThoughts, setSynthesisThoughts,
processStartTime, setProcessStartTime,
processEndTime, setProcessEndTime,
abortControllerRef,
resetDeepThink,
stopDeepThink,
updateExpertAt,
setInitialExperts,
appendExperts
};
};

View File

@@ -7,6 +7,7 @@
<script src="https://cdn.tailwindcss.com"></script>
<script src="https://cdn.tailwindcss.com?plugins=typography"></script>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&family=JetBrains+Mono:wght@400;700&display=swap" rel="stylesheet">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/katex@0.16.8/dist/katex.min.css">
<style>
body {
font-family: 'Inter', sans-serif;
@@ -71,6 +72,8 @@
{
"imports": {
"react-markdown": "https://esm.sh/react-markdown@^10.1.0",
"remark-math": "https://esm.sh/remark-math@6.0.0",
"rehype-katex": "https://esm.sh/rehype-katex@7.0.0",
"@google/genai": "https://esm.sh/@google/genai@^1.34.0",
"lucide-react": "https://esm.sh/lucide-react@^0.562.0",
"react-dom/": "https://esm.sh/react-dom@^19.2.3/",

View File

@@ -1,9 +1,118 @@
/**
* Network Interceptor
*
* Disabled: Direct SDK configuration is now used for custom base URLs.
* Intercepts global fetch calls to redirect Gemini API requests
* from the default endpoint to a user-defined custom base URL.
*
* Uses Object.defineProperty to bypass "getter-only" restrictions on window.fetch
* in certain sandboxed or strict environments.
*/
export const setInterceptorUrl = (url: string | null) => {
// No-op
const originalFetch = window.fetch;
/**
* Robustly applies a function to the window.fetch property.
*/
const applyFetch = (fn: typeof window.fetch) => {
try {
Object.defineProperty(window, 'fetch', {
value: fn,
configurable: true,
writable: true,
enumerable: true
});
} catch (e) {
// Fallback for environments where defineProperty on window might fail
try {
(window as any).fetch = fn;
} catch (err) {
console.error("[Prisma] Critical: Failed to intercept fetch.", err);
}
}
};
export const setInterceptorUrl = (baseUrl: string | null) => {
if (!baseUrl) {
// Restore original fetch when disabled
applyFetch(originalFetch);
return;
}
// Normalize the base URL
let normalizedBase = baseUrl.trim();
try {
// Basic validation
new URL(normalizedBase);
} catch (e) {
console.warn("[Prisma] Invalid Base URL provided:", normalizedBase);
return;
}
if (normalizedBase.endsWith('/')) {
normalizedBase = normalizedBase.slice(0, -1);
}
const interceptedFetch = async (input: RequestInfo | URL, init?: RequestInit): Promise<Response> => {
let urlString: string;
if (typeof input === 'string') {
urlString = input;
} else if (input instanceof URL) {
urlString = input.toString();
} else {
urlString = input.url;
}
const defaultHost = 'generativelanguage.googleapis.com';
// Check if the request is targeting the Google Gemini API
if (urlString.includes(defaultHost)) {
try {
const url = new URL(urlString);
const proxy = new URL(normalizedBase);
// Replace protocol and host
url.protocol = proxy.protocol;
url.host = proxy.host;
// Prepend proxy path if it exists (e.g., /v1/proxy)
if (proxy.pathname !== '/') {
const cleanPath = proxy.pathname.endsWith('/') ? proxy.pathname.slice(0, -1) : proxy.pathname;
// Ensure we don't double up slashes
url.pathname = cleanPath + url.pathname;
}
const newUrl = url.toString();
// Handle the different types of fetch inputs
if (input instanceof Request) {
// Re-create the request with the new URL and original properties
const requestData: RequestInit = {
method: input.method,
headers: input.headers,
body: input.body,
mode: input.mode,
credentials: input.credentials,
cache: input.cache,
redirect: input.redirect,
referrer: input.referrer,
integrity: input.integrity,
};
// Merge with init if provided
const mergedInit = { ...requestData, ...init };
return originalFetch(new URL(newUrl), mergedInit);
}
return originalFetch(newUrl, init);
} catch (e) {
console.error("[Prisma Interceptor] Failed to redirect request:", e);
}
}
return originalFetch(input, init);
};
applyFetch(interceptedFetch);
};

View File

@@ -1,5 +1,6 @@
{
"description": "Generated by Gemini.",
"description": "Prisma - Deep multi-agent reasoning application.",
"requestFramePermissions": [],
"name": "Prisma"
}

View File

@@ -10,6 +10,8 @@
},
"dependencies": {
"react-markdown": "^10.1.0",
"remark-math": "6.0.0",
"rehype-katex": "7.0.0",
"@google/genai": "^1.34.0",
"lucide-react": "^0.562.0",
"react-dom": "^19.2.3",

View File

@@ -1,7 +1,7 @@
import { Type } from "@google/genai";
import { ModelOption, AnalysisResult } from '../../types';
import { ModelOption, AnalysisResult, ExpertResult, ReviewResult } from '../../types';
import { cleanJsonString } from '../../utils';
import { MANAGER_SYSTEM_PROMPT } from './prompts';
import { MANAGER_SYSTEM_PROMPT, MANAGER_REVIEW_SYSTEM_PROMPT } from './prompts';
export const executeManagerAnalysis = async (
ai: any,
@@ -59,3 +59,66 @@ export const executeManagerAnalysis = async (
return { thought_process: "Direct processing.", experts: [] };
}
};
export const executeManagerReview = async (
ai: any,
model: ModelOption,
query: string,
currentExperts: ExpertResult[],
budget: number
): Promise<ReviewResult> => {
const reviewSchema = {
type: Type.OBJECT,
properties: {
satisfied: { type: Type.BOOLEAN, description: "True if the experts have fully answered the query with high quality." },
critique: { type: Type.STRING, description: "If not satisfied, explain why and what is missing." },
next_round_strategy: { type: Type.STRING, description: "Plan for the next iteration." },
refined_experts: {
type: Type.ARRAY,
description: "The list of experts for the next round. Can be the same roles or new ones.",
items: {
type: Type.OBJECT,
properties: {
role: { type: Type.STRING },
description: { type: Type.STRING },
temperature: { type: Type.NUMBER },
prompt: { type: Type.STRING }
},
required: ["role", "description", "temperature", "prompt"]
}
}
},
required: ["satisfied", "critique"]
};
const expertOutputs = currentExperts.map(e =>
`--- [Round ${e.round}] Expert: ${e.role} ---\nOutput: ${e.content?.slice(0, 2000)}...`
).join('\n\n');
const content = `User Query: "${query}"\n\nCurrent Expert Outputs:\n${expertOutputs}`;
const resp = await ai.models.generateContent({
model: model,
contents: content,
config: {
systemInstruction: MANAGER_REVIEW_SYSTEM_PROMPT,
responseMimeType: "application/json",
responseSchema: reviewSchema,
thinkingConfig: {
includeThoughts: true,
thinkingBudget: budget
}
}
});
const rawText = resp.text || '{}';
const cleanText = cleanJsonString(rawText);
try {
return JSON.parse(cleanText) as ReviewResult;
} catch (e) {
console.error("Review JSON Parse Error:", e);
// Fallback: Assume satisfied if JSON fails to avoid infinite loops due to format errors
return { satisfied: true, critique: "JSON Error, proceeding to synthesis." };
}
};

View File

@@ -1,13 +1,35 @@
import { ExpertResult } from '../../types';
export const MANAGER_SYSTEM_PROMPT = `
You are the "Dynamic Planning Engine". Your goal is to analyze a user query (considering the conversation context) and decompose it into a set of specialized expert personas (2 to 4) who can collaboratively solve specific aspects of the problem.
export const MANAGER_SYSTEM_PROMPT = `You are the "Dynamic Planning Engine". Your goal is to analyze a user query (considering the conversation context) and decompose it into a set of specialized expert personas (2 to 4) who can collaboratively solve specific aspects of the problem.
Your job is to create SUPPLEMENTARY experts to aid the Primary Responder.
DO NOT create an expert that just repeats the user query. The Primary Responder is already doing that.
Focus on specialized angles: specific coding patterns, historical context, devil's advocate, security analyst, etc.
Your job is to create SUPPLEMENTARY experts
For each expert, you must assign a specific 'temperature' (0.0 to 2.0).
For each expert, you must assign a specific 'temperature' (0.0 to 2.0) based on the nature of their task:
* High temperature (1.0 - 2.0)
* Low temperature (0.0 - 0.4)
* Medium temperature (0.4 - 1.0)`;
export const MANAGER_REVIEW_SYSTEM_PROMPT = `
You are the "Quality Assurance & Orchestration Engine".
You have just received outputs from a team of AI experts.
Your goal is to evaluate if these outputs are sufficient to fully answer the user's complex request with high quality.
Criteria for "Not Satisfied":
- Conflicting information between experts that isn't resolved.
- Missing code implementation details or edge cases.
- Shallow analysis that doesn't go deep enough.
- Logic errors or hallucinations.
If you are NOT satisfied:
1. Provide a "critique" explaining exactly what is missing or wrong.
2. Define a "next_round_strategy" (briefly) to fix it.
3. Define the *refined_experts* for the next round. You can keep the same roles or create new ones. Their prompts MUST include the feedback/critique.
If you ARE satisfied:
1. Set satisfied to true.
2. Leave refined_experts empty.
`;
export const getExpertSystemInstruction = (role: string, description: string, context: string) => {
@@ -23,11 +45,11 @@ ${recentHistory}
Original User Query: "${query}"
Here are the analyses from your expert panel:
${expertResults.map(e => `--- Expert: ${e.role} (Temp: ${e.temperature}) ---\n${e.content || "(No output)"}\n`).join('\n')}
Here are the analyses from your expert panel (potentially across multiple rounds of refinement):
${expertResults.map(e => `--- [Round ${e.round || 1}] Expert: ${e.role} (Temp: ${e.temperature}) ---\n${e.content || "(No output)"}\n`).join('\n')}
Your Task:
1. Reflect on the experts' inputs. Identify conflicts and consensus.
1. Reflect on the experts' inputs. Identify conflicts, consensus, and evolution of thought across rounds.
2. Synthesize a final, comprehensive, and high-quality answer to the user's original query.
3. Do not simply summarize; integrate the knowledge into a cohesive response.
`;

View File

@@ -16,6 +16,7 @@ export type ExpertResult = ExpertConfig & {
thoughtProcess?: string;
startTime?: number;
endTime?: number;
round?: number; // Track which iteration this expert belongs to
};
export type AnalysisResult = {
@@ -23,7 +24,14 @@ export type AnalysisResult = {
experts: Omit<ExpertConfig, 'id'>[];
};
export type AppState = 'idle' | 'analyzing' | 'experts_working' | 'synthesizing' | 'completed';
export type ReviewResult = {
satisfied: boolean;
critique: string;
next_round_strategy?: string;
refined_experts?: Omit<ExpertConfig, 'id'>[];
};
export type AppState = 'idle' | 'analyzing' | 'experts_working' | 'reviewing' | 'synthesizing' | 'completed';
export type AppConfig = {
planningLevel: ThinkingLevel;
@@ -32,6 +40,7 @@ export type AppConfig = {
customApiKey?: string;
customBaseUrl?: string;
enableCustomApi?: boolean;
enableRecursiveLoop?: boolean; // New toggle for loop mode
};
export type ChatMessage = {