This commit is contained in:
从何开始123
2026-01-08 02:16:42 +08:00
parent 83b4df1167
commit 54e9bf5906
31 changed files with 2201 additions and 0 deletions

24
prisma/.gitignore vendored Normal file
View File

@@ -0,0 +1,24 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

217
prisma/App.tsx Normal file
View File

@@ -0,0 +1,217 @@
import React, { useState, useEffect } from 'react';
import { ModelOption, AppConfig, ChatMessage } from './types';
import { getValidThinkingLevels } from './config';
import { useDeepThink } from './hooks/useDeepThink';
import { useChatSessions } from './hooks/useChatSessions';
import SettingsModal from './SettingsModal';
import Header from './components/Header';
import ChatInput from './components/InputSection';
import Sidebar from './components/Sidebar';
import ChatArea from './components/ChatArea';
const App = () => {
// Session Management
const {
sessions,
currentSessionId,
setCurrentSessionId,
createSession,
updateSessionMessages,
deleteSession,
getSession
} = useChatSessions();
// UI State
const [isSidebarOpen, setIsSidebarOpen] = useState(true);
const [isSettingsOpen, setIsSettingsOpen] = useState(false);
// Active Chat State
const [messages, setMessages] = useState<ChatMessage[]>([]);
const [query, setQuery] = useState('');
// App Configuration
const [selectedModel, setSelectedModel] = useState<ModelOption>('gemini-3-flash-preview');
const [config, setConfig] = useState<AppConfig>({
planningLevel: 'high',
expertLevel: 'high',
synthesisLevel: 'high',
customApiKey: '',
customBaseUrl: '',
enableCustomApi: false
});
// Deep Think Engine
const {
appState,
managerAnalysis,
experts,
finalOutput,
synthesisThoughts,
runDynamicDeepThink,
stopDeepThink,
resetDeepThink,
processStartTime,
processEndTime
} = useDeepThink();
// Handle Model Constraints
useEffect(() => {
const validLevels = getValidThinkingLevels(selectedModel);
setConfig(prev => ({
...prev,
planningLevel: validLevels.includes(prev.planningLevel) ? prev.planningLevel : 'low',
expertLevel: validLevels.includes(prev.expertLevel) ? prev.expertLevel : 'low',
synthesisLevel: validLevels.includes(prev.synthesisLevel) ? prev.synthesisLevel : 'high',
}));
}, [selectedModel]);
// Sync Messages when switching sessions
useEffect(() => {
if (currentSessionId) {
const session = getSession(currentSessionId);
if (session) {
setMessages(session.messages);
setSelectedModel(session.model || 'gemini-3-flash-preview');
}
} else {
setMessages([]);
}
}, [currentSessionId, getSession]);
// Handle AI Completion
useEffect(() => {
if (appState === 'completed') {
const finalizedMessage: ChatMessage = {
id: `ai-${Date.now()}`,
role: 'model',
content: finalOutput,
analysis: managerAnalysis,
experts: experts,
synthesisThoughts: synthesisThoughts,
isThinking: false,
totalDuration: (processStartTime && processEndTime) ? (processEndTime - processStartTime) : undefined
};
const newMessages = [...messages, finalizedMessage];
setMessages(newMessages);
if (currentSessionId) {
updateSessionMessages(currentSessionId, newMessages);
} else {
createSession(newMessages, selectedModel);
}
resetDeepThink();
}
}, [appState, finalOutput, managerAnalysis, experts, synthesisThoughts, resetDeepThink, processStartTime, processEndTime, currentSessionId, messages, selectedModel, createSession, updateSessionMessages]);
const handleRun = () => {
if (!query.trim()) return;
const userMsg: ChatMessage = {
id: `user-${Date.now()}`,
role: 'user',
content: query
};
const newMessages = [...messages, userMsg];
setMessages(newMessages); // Optimistic update
// Manage Session Persistence
let activeSessionId = currentSessionId;
if (!activeSessionId) {
activeSessionId = createSession(newMessages, selectedModel);
} else {
updateSessionMessages(activeSessionId, newMessages);
}
// Run AI
runDynamicDeepThink(query, messages, selectedModel, config);
setQuery('');
};
const handleNewChat = () => {
stopDeepThink();
setCurrentSessionId(null);
setMessages([]);
setQuery('');
resetDeepThink();
if (window.innerWidth < 1024) setIsSidebarOpen(false);
};
const handleSelectSession = (id: string) => {
stopDeepThink();
resetDeepThink();
setCurrentSessionId(id);
if (window.innerWidth < 1024) setIsSidebarOpen(false);
};
const handleDeleteSession = (id: string, e: React.MouseEvent) => {
e.stopPropagation();
deleteSession(id);
if (currentSessionId === id) {
handleNewChat();
}
};
return (
<div className="flex flex-col h-screen bg-white text-slate-800 font-sans selection:bg-blue-100 selection:text-blue-900">
<SettingsModal
isOpen={isSettingsOpen}
onClose={() => setIsSettingsOpen(false)}
config={config}
setConfig={setConfig}
model={selectedModel}
/>
<Header
selectedModel={selectedModel}
setSelectedModel={setSelectedModel}
onOpenSettings={() => setIsSettingsOpen(true)}
onToggleSidebar={() => setIsSidebarOpen(!isSidebarOpen)}
onNewChat={handleNewChat}
/>
<div className="flex flex-1 overflow-hidden relative">
<Sidebar
isOpen={isSidebarOpen}
onClose={() => setIsSidebarOpen(false)}
sessions={sessions}
currentSessionId={currentSessionId}
onSelectSession={handleSelectSession}
onNewChat={handleNewChat}
onDeleteSession={handleDeleteSession}
/>
<main className="flex-1 flex flex-col min-w-0 bg-white relative">
<ChatArea
messages={messages}
appState={appState}
managerAnalysis={managerAnalysis}
experts={experts}
finalOutput={finalOutput}
processStartTime={processStartTime}
processEndTime={processEndTime}
/>
{/* Floating Footer Input */}
<div className="absolute bottom-0 left-0 right-0 z-20 pointer-events-none p-4 pb-6 flex justify-center bg-gradient-to-t from-white via-white/80 to-transparent">
<div className="pointer-events-auto w-full max-w-4xl">
<ChatInput
query={query}
setQuery={setQuery}
onRun={handleRun}
onStop={stopDeepThink}
appState={appState}
/>
</div>
</div>
</main>
</div>
</div>
);
};
export default App;

163
prisma/ExpertCard.tsx Normal file
View File

@@ -0,0 +1,163 @@
import React, { useState, useEffect } from 'react';
import { Bot, Loader2, CheckCircle2, X, BrainCircuit, MessageSquareText, Thermometer, Timer } from 'lucide-react';
import MarkdownRenderer from './components/MarkdownRenderer';
import { ExpertResult } from './types';
// Simple component to format milliseconds to ss.ms or mm:ss
const TimeDisplay = ({ start, end, status }: { start?: number, end?: number, status: string }) => {
const [elapsed, setElapsed] = useState(0);
useEffect(() => {
let interval: any;
// Update live timer
if (status === 'thinking' && start) {
// Calculate initial diff immediately
setElapsed(Date.now() - start);
interval = setInterval(() => {
setElapsed(Date.now() - start);
}, 100);
}
// Show final duration
else if ((status === 'completed' || status === 'error') && start && end) {
setElapsed(end - start);
}
else {
setElapsed(0);
}
return () => clearInterval(interval);
}, [status, start, end]);
if (!start) return null;
const seconds = (elapsed / 1000).toFixed(1);
return (
<div className="flex items-center gap-1 text-[10px] font-mono font-medium text-slate-500 bg-slate-100 px-1.5 py-0.5 rounded border border-slate-200">
<Timer size={10} />
<span>{seconds}s</span>
</div>
);
};
const ExpertCard = ({ expert }: { expert: ExpertResult }) => {
const [view, setView] = useState<'thoughts' | 'output'>('output');
const isWorking = expert.status === 'thinking';
const isDone = expert.status === 'completed';
const isPending = expert.status === 'pending';
const isError = expert.status === 'error';
// Auto-switch to thoughts if that's all we have so far
React.useEffect(() => {
if (isWorking && expert.thoughts && !expert.content) {
setView('thoughts');
} else if (expert.content && view === 'thoughts' && !expert.thoughts) {
setView('output');
}
}, [expert.thoughts, expert.content, isWorking]);
return (
<div className={`
relative flex flex-col h-80 rounded-xl border transition-all duration-300 shadow-sm overflow-hidden
${isWorking ? 'border-blue-400 bg-white shadow-[0_0_15px_rgba(59,130,246,0.1)]' : ''}
${isDone ? 'border-emerald-400 bg-white' : ''}
${isPending ? 'border-slate-200 bg-slate-50/50' : ''}
${isError ? 'border-red-400 bg-red-50' : ''}
`}>
{/* Header */}
<div className={`p-3 border-b flex items-start gap-3 ${isDone ? 'bg-emerald-50/30 border-emerald-100' : 'bg-slate-50/50 border-slate-100'}`}>
<div className={`mt-0.5 p-1.5 rounded-lg ${isWorking ? 'bg-blue-100 text-blue-600' : (isError ? 'bg-red-100 text-red-600' : 'bg-slate-200 text-slate-600')}`}>
<Bot size={18} />
</div>
<div className="flex-1 min-w-0">
<div className="flex items-center justify-between mb-0.5">
<h3 className="text-sm font-bold text-slate-800 leading-tight truncate mr-2">{expert.role}</h3>
{/* Timer for Expert */}
<TimeDisplay start={expert.startTime} end={expert.endTime} status={expert.status} />
</div>
<div className="flex items-center gap-2">
<p className="text-[10px] text-slate-500 truncate flex-1">{expert.description}</p>
{expert.temperature !== undefined && (
<div className="flex items-center gap-0.5 px-1.5 py-0.5 rounded-full bg-slate-200/50 border border-slate-200 text-[9px] font-mono text-slate-500 shrink-0" title={`Temperature: ${expert.temperature}`}>
<Thermometer size={8} />
<span>{expert.temperature}</span>
</div>
)}
</div>
</div>
<div className="flex-shrink-0 pt-0.5">
{isWorking && <Loader2 size={16} className="animate-spin text-blue-600" />}
{isDone && <CheckCircle2 size={16} className="text-emerald-600" />}
{isError && <X size={16} className="text-red-600" />}
</div>
</div>
{/* Tabs */}
{!isPending && (
<div className="flex border-b border-slate-100 text-[10px] font-medium uppercase tracking-wider">
<button
onClick={() => setView('thoughts')}
className={`flex-1 py-2 flex items-center justify-center gap-1.5 transition-colors ${view === 'thoughts' ? 'bg-slate-100 text-slate-800 border-b-2 border-blue-500' : 'text-slate-400 hover:text-slate-600 hover:bg-slate-50'}`}
>
<BrainCircuit size={12} />
Reasoning
</button>
<button
onClick={() => setView('output')}
className={`flex-1 py-2 flex items-center justify-center gap-1.5 transition-colors ${view === 'output' ? 'bg-white text-slate-800 border-b-2 border-emerald-500' : 'text-slate-400 hover:text-slate-600 hover:bg-slate-50'}`}
>
<MessageSquareText size={12} />
Output
</button>
</div>
)}
{/* Content Area */}
<div className="flex-1 overflow-y-auto p-4 custom-scrollbar bg-white">
{isPending ? (
<div className="h-full flex flex-col items-center justify-center text-slate-300">
<Bot size={32} className="mb-2 opacity-50" />
<span className="text-xs italic">Waiting for assignment...</span>
</div>
) : (
<>
{view === 'thoughts' && (
<div className="prose prose-xs max-w-none">
{expert.thoughts ? (
<MarkdownRenderer
content={expert.thoughts}
className="text-slate-500 font-mono text-[11px] leading-relaxed"
/>
) : (
<span className="italic opacity-50 text-[11px]">Initializing thought process...</span>
)}
{isWorking && <span className="inline-block w-1.5 h-3 ml-1 bg-blue-400 animate-pulse"/>}
</div>
)}
{view === 'output' && (
<div className="prose prose-sm max-w-none">
{expert.content ? (
<MarkdownRenderer
content={expert.content}
className="text-slate-700 text-xs leading-relaxed"
/>
) : (
<span className="text-slate-400 italic text-[11px]">
{isWorking ? "Formulating output..." : "No output generated."}
</span>
)}
{isWorking && !expert.content && <span className="inline-block w-1.5 h-3 ml-1 bg-emerald-400 animate-pulse"/>}
</div>
)}
</>
)}
</div>
</div>
);
};
export default ExpertCard;

61
prisma/ProcessNode.tsx Normal file
View File

@@ -0,0 +1,61 @@
import React from 'react';
import { Loader2, CheckCircle2, ChevronUp, ChevronDown } from 'lucide-react';
interface ProcessNodeProps {
icon: React.ElementType;
title: string;
status: 'idle' | 'active' | 'completed';
children?: React.ReactNode;
isExpanded: boolean;
onToggle: () => void;
}
const ProcessNode = ({
icon: Icon,
title,
status,
children,
isExpanded,
onToggle
}: ProcessNodeProps) => {
const isActive = status === 'active';
const isCompleted = status === 'completed';
return (
<div className={`relative z-10 rounded-xl border ${isActive ? 'border-blue-400 bg-blue-50/50' : 'border-slate-200 bg-white'} transition-all duration-500 overflow-hidden shadow-sm`}>
<div
className="flex items-center justify-between p-4 cursor-pointer hover:bg-slate-50"
onClick={onToggle}
>
<div className="flex items-center gap-3">
<div className={`
w-8 h-8 rounded-full flex items-center justify-center transition-colors duration-300
${isActive ? 'bg-blue-600 text-white animate-pulse' : ''}
${isCompleted ? 'bg-green-600 text-white' : 'bg-slate-100 text-slate-400'}
`}>
{isActive ? <Loader2 size={16} className="animate-spin" /> : (isCompleted ? <CheckCircle2 size={16} /> : <Icon size={16} />)}
</div>
<div>
<h3 className={`text-sm font-semibold ${isActive ? 'text-blue-900' : (isCompleted ? 'text-slate-800' : 'text-slate-500')}`}>
{title}
</h3>
{isActive && <p className="text-xs text-blue-600">Processing...</p>}
</div>
</div>
{children && (
<div className="text-slate-400 hover:text-slate-700">
{isExpanded ? <ChevronUp size={18} /> : <ChevronDown size={18} />}
</div>
)}
</div>
{isExpanded && children && (
<div className="border-t border-slate-100 bg-slate-50/50 p-4 animate-in slide-in-from-top-2 duration-300">
{children}
</div>
)}
</div>
);
};
export default ProcessNode;

20
prisma/README.md Normal file
View File

@@ -0,0 +1,20 @@
<div align="center">
<img width="1200" height="475" alt="GHBanner" src="https://github.com/user-attachments/assets/0aa67016-6eaf-458a-adb2-6e31a0763ed6" />
</div>
# Run and deploy your AI Studio app
This contains everything you need to run your app locally.
View your app in AI Studio: https://ai.studio/apps/drive/1JWPILJ3NT10NR4eOeGiqBi6OZuRaEszO
## Run Locally
**Prerequisites:** Node.js
1. Install dependencies:
`npm install`
2. Set the `GEMINI_API_KEY` in [.env.local](.env.local) to your Gemini API key
3. Run the app:
`npm run dev`

156
prisma/SettingsModal.tsx Normal file
View File

@@ -0,0 +1,156 @@
import React from 'react';
import { Settings, X, ChevronDown, Key, Globe } from 'lucide-react';
import { AppConfig, ModelOption, ThinkingLevel } from './types';
import { getValidThinkingLevels } from './config';
const SettingsModal = ({
isOpen,
onClose,
config,
setConfig,
model
}: {
isOpen: boolean;
onClose: () => void;
config: AppConfig;
setConfig: (c: AppConfig) => void;
model: ModelOption;
}) => {
if (!isOpen) return null;
const validLevels = getValidThinkingLevels(model);
const LevelSelect = ({
label,
value,
onChange,
desc
}: {
label: string,
value: ThinkingLevel,
onChange: (v: ThinkingLevel) => void,
desc: string
}) => (
<div className="space-y-2">
<div className="flex justify-between items-baseline">
<label className="text-sm font-medium text-slate-700">{label}</label>
<span className="text-xs text-slate-500 uppercase tracking-wider bg-slate-100 border border-slate-200 px-2 py-0.5 rounded">{value}</span>
</div>
<div className="relative">
<select
value={value}
onChange={(e) => onChange(e.target.value as ThinkingLevel)}
className="w-full bg-slate-50 border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 outline-none appearance-none cursor-pointer"
>
{validLevels.map(l => (
<option key={l} value={l}>{l.charAt(0).toUpperCase() + l.slice(1)}</option>
))}
</select>
<ChevronDown className="absolute right-3 top-3 text-slate-400 pointer-events-none" size={14} />
</div>
<p className="text-xs text-slate-500">{desc}</p>
</div>
);
return (
<div className="fixed inset-0 z-[100] flex items-center justify-center p-4 bg-slate-900/40 backdrop-blur-sm animate-in fade-in duration-200">
<div className="bg-white border border-slate-200 rounded-xl w-full max-w-md shadow-2xl overflow-hidden animate-in zoom-in-95 duration-200 flex flex-col max-h-[90vh]">
<div className="flex items-center justify-between p-4 border-b border-slate-100 bg-slate-50/50">
<div className="flex items-center gap-2">
<Settings size={18} className="text-blue-600" />
<h2 className="font-semibold text-slate-800">Configuration</h2>
</div>
<button onClick={onClose} className="text-slate-400 hover:text-slate-700 transition-colors">
<X size={20} />
</button>
</div>
<div className="p-6 space-y-6 overflow-y-auto custom-scrollbar">
{/* Connection Settings */}
<div className="space-y-4 pt-1">
<div className="flex items-center justify-between mb-2">
<h3 className="text-xs font-bold text-slate-400 uppercase tracking-wider">API Connection</h3>
{/* Toggle Switch */}
<label className="relative inline-flex items-center cursor-pointer">
<input
type="checkbox"
checked={config.enableCustomApi ?? false}
onChange={(e) => setConfig({ ...config, enableCustomApi: e.target.checked })}
className="sr-only peer"
/>
<div className="w-11 h-6 bg-slate-200 peer-focus:outline-none rounded-full peer peer-checked:after:translate-x-full peer-checked:after:border-white after:content-[''] after:absolute after:top-[2px] after:start-[2px] after:bg-white after:border-gray-300 after:border after:rounded-full after:h-5 after:w-5 after:transition-all peer-checked:bg-blue-600"></div>
</label>
</div>
{config.enableCustomApi && (
<div className="space-y-4 p-4 bg-slate-50 rounded-lg border border-slate-100 animate-in fade-in slide-in-from-top-1 duration-200">
<div className="space-y-2">
<label className="text-sm font-medium text-slate-700 flex items-center gap-2">
<Key size={14} className="text-slate-400" />
Custom API Key
</label>
<input
type="password"
placeholder="sk-..."
value={config.customApiKey || ''}
onChange={(e) => setConfig({ ...config, customApiKey: e.target.value })}
className="w-full bg-white border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 outline-none placeholder:text-slate-400"
/>
</div>
<div className="space-y-2">
<label className="text-sm font-medium text-slate-700 flex items-center gap-2">
<Globe size={14} className="text-slate-400" />
Custom Base URL
</label>
<input
type="text"
placeholder="https://generativelanguage.googleapis.com"
value={config.customBaseUrl || ''}
onChange={(e) => setConfig({ ...config, customBaseUrl: e.target.value })}
className="w-full bg-white border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 outline-none placeholder:text-slate-400"
/>
</div>
</div>
)}
</div>
<div className="border-t border-slate-100 pt-4 space-y-4">
<h3 className="text-xs font-bold text-slate-400 uppercase tracking-wider mb-3">Thinking Process</h3>
<LevelSelect
label="Manager: Planning Strategy"
value={config.planningLevel}
onChange={(v) => setConfig({ ...config, planningLevel: v })}
desc="Controls the depth of initial query analysis and expert delegation."
/>
<LevelSelect
label="Experts: Execution Depth"
value={config.expertLevel}
onChange={(v) => setConfig({ ...config, expertLevel: v })}
desc="Determines how deeply each expert persona thinks about their specific task."
/>
<LevelSelect
label="Manager: Final Synthesis"
value={config.synthesisLevel}
onChange={(v) => setConfig({ ...config, synthesisLevel: v })}
desc="Controls the reasoning effort for aggregating results into the final answer."
/>
</div>
</div>
<div className="p-4 bg-slate-50 border-t border-slate-100 flex justify-end shrink-0">
<button
onClick={onClose}
className="px-4 py-2 bg-blue-600 hover:bg-blue-700 text-white text-sm font-medium rounded-lg transition-colors shadow-sm"
>
Done
</button>
</div>
</div>
</div>
);
};
export default SettingsModal;

13
prisma/api.ts Normal file
View File

@@ -0,0 +1,13 @@
import { GoogleGenAI } from "@google/genai";
export const getAI = (config?: { apiKey?: string; baseUrl?: string }) => {
const options: any = {
apiKey: config?.apiKey || process.env.API_KEY,
};
if (config?.baseUrl) {
options.baseUrl = config.baseUrl;
}
return new GoogleGenAI(options);
};

View File

@@ -0,0 +1,90 @@
import React from 'react';
import { ChatMessage, AppState, AnalysisResult, ExpertResult } from '../types';
import ChatMessageItem from './ChatMessage';
import ProcessFlow from './ProcessFlow';
interface ChatAreaProps {
messages: ChatMessage[];
appState: AppState;
managerAnalysis: AnalysisResult | null;
experts: ExpertResult[];
finalOutput: string;
processStartTime: number | null;
processEndTime: number | null;
}
const ChatArea = ({
messages,
appState,
managerAnalysis,
experts,
finalOutput,
processStartTime,
processEndTime
}: ChatAreaProps) => {
return (
<div className="flex-1 overflow-y-auto custom-scrollbar scroll-smooth">
<div className="pb-40">
{messages.length === 0 && appState === 'idle' && (
<div className="h-full flex flex-col items-center justify-center pt-32 opacity-50 px-4 text-center">
<div className="w-16 h-16 bg-gradient-to-br from-blue-500 to-purple-600 rounded-xl mb-6 shadow-lg rotate-3 flex items-center justify-center text-white font-bold text-2xl">
Pr
</div>
<p className="text-lg font-medium">Prisma</p>
<p className="text-sm">Ask a complex question to start.</p>
</div>
)}
{/* History */}
{messages.map((msg, idx) => (
<ChatMessageItem
key={msg.id}
message={msg}
isLast={idx === messages.length - 1}
/>
))}
{/* Active Generation (Ghost Message) */}
{appState !== 'idle' && appState !== 'completed' && (
<div className="group w-full bg-transparent text-slate-800">
<div className="max-w-6xl mx-auto px-4 py-8 flex gap-6">
<div className="flex-shrink-0 w-8 h-8 rounded-full bg-white border border-blue-200 shadow-sm flex items-center justify-center">
<div className="animate-spin w-4 h-4 border-2 border-blue-600 border-t-transparent rounded-full"></div>
</div>
<div className="flex-1 min-w-0">
<div className="font-semibold text-sm text-slate-900 mb-2">Prisma</div>
{/* Active Thinking Process */}
<div className="mb-4 bg-white border border-blue-100 rounded-xl p-4 shadow-sm">
<ProcessFlow
appState={appState}
managerAnalysis={managerAnalysis}
experts={experts}
processStartTime={processStartTime}
processEndTime={processEndTime}
/>
</div>
{/* Streaming Output */}
{finalOutput && (
<div className="prose prose-slate max-w-none">
<ChatMessageItem
message={{
id: 'streaming',
role: 'model',
content: finalOutput,
isThinking: false
}}
/>
</div>
)}
</div>
</div>
</div>
)}
</div>
</div>
);
};
export default ChatArea;

View File

@@ -0,0 +1,103 @@
import React, { useState } from 'react';
import { User, Sparkles, ChevronDown, ChevronRight } from 'lucide-react';
import MarkdownRenderer from './MarkdownRenderer';
import ProcessFlow from './ProcessFlow';
import { ChatMessage } from '../types';
interface ChatMessageProps {
message: ChatMessage;
isLast?: boolean;
}
const ChatMessageItem = ({ message, isLast }: ChatMessageProps) => {
const isUser = message.role === 'user';
const [showThinking, setShowThinking] = useState(false);
// Check if there is any thinking data to show
const hasThinkingData = message.analysis || (message.experts && message.experts.length > 0);
return (
<div className={`group w-full text-slate-800 ${isUser ? 'bg-transparent' : 'bg-transparent'}`}>
<div className="max-w-6xl mx-auto px-4 py-8 flex gap-4 md:gap-6">
{/* Avatar */}
<div className="flex-shrink-0 flex flex-col relative items-end">
<div className={`w-8 h-8 rounded-full flex items-center justify-center border ${
isUser
? 'bg-slate-100 border-slate-200'
: 'bg-white border-blue-100 shadow-sm'
}`}>
{isUser ? (
<User size={16} className="text-slate-500" />
) : (
<Sparkles size={16} className="text-blue-600" />
)}
</div>
</div>
{/* Content */}
<div className="relative flex-1 overflow-hidden">
<div className="font-semibold text-sm text-slate-900 mb-1">
{isUser ? 'You' : 'Prisma'}
</div>
{/* Thinking Process Accordion (Only for AI) */}
{!isUser && hasThinkingData && (
<div className="mb-4">
<button
onClick={() => setShowThinking(!showThinking)}
className="flex items-center gap-2 text-xs font-medium text-slate-500 hover:text-slate-800 bg-slate-50 hover:bg-slate-100 border border-slate-200 rounded-lg px-3 py-2 transition-colors w-full md:w-auto"
>
<span>
{message.isThinking
? "Thinking..."
: (message.totalDuration
? `Thought for ${(message.totalDuration / 1000).toFixed(1)} seconds`
: "Reasoning Process")
}
</span>
{showThinking ? <ChevronDown size={14} /> : <ChevronRight size={14} />}
</button>
{showThinking && (
<div className="mt-3 p-4 bg-white border border-slate-200 rounded-xl shadow-sm animate-in fade-in slide-in-from-top-2">
<ProcessFlow
appState={message.isThinking ? 'experts_working' : 'completed'} // Visual approximation for history
managerAnalysis={message.analysis || null}
experts={message.experts || []}
defaultExpanded={true}
/>
</div>
)}
</div>
)}
{/* Text Content */}
<div className="prose prose-slate max-w-none prose-p:leading-7 prose-pre:bg-slate-900 prose-pre:text-slate-50">
{message.content ? (
<MarkdownRenderer content={message.content} />
) : (
message.isThinking && <span className="inline-block w-2 h-4 bg-blue-400 animate-pulse" />
)}
</div>
{/* Internal Monologue (Synthesis Thoughts) - Optional Footer */}
{!isUser && message.synthesisThoughts && (
<div className="mt-4 pt-4 border-t border-slate-100">
<details className="group/thoughts">
<summary className="cursor-pointer list-none text-xs text-slate-400 hover:text-slate-600 flex items-center gap-1">
<ChevronRight size={12} className="group-open/thoughts:rotate-90 transition-transform" />
Show Internal Monologue
</summary>
<div className="mt-2 text-xs font-mono text-slate-500 bg-slate-50 p-3 rounded border border-slate-100 whitespace-pre-wrap max-h-40 overflow-y-auto">
{message.synthesisThoughts}
</div>
</details>
</div>
)}
</div>
</div>
</div>
);
};
export default ChatMessageItem;

View File

@@ -0,0 +1,68 @@
import React from 'react';
import { Settings, ChevronDown, Menu, History } from 'lucide-react';
import { MODELS } from '../config';
import { ModelOption } from '../types';
interface HeaderProps {
selectedModel: ModelOption;
setSelectedModel: (model: ModelOption) => void;
onOpenSettings: () => void;
onToggleSidebar: () => void;
onNewChat: () => void;
}
const Header = ({ selectedModel, setSelectedModel, onOpenSettings, onToggleSidebar, onNewChat }: HeaderProps) => {
return (
<header className="sticky top-0 z-50 bg-white/80 backdrop-blur-md">
<div className="w-full px-4 h-16 flex items-center justify-between">
<div className="flex items-center gap-4">
<button
onClick={onToggleSidebar}
className="p-2 -ml-2 text-slate-500 hover:bg-slate-100 rounded-lg transition-colors"
title="Toggle History"
>
<Menu size={20} />
</button>
<div
className="flex items-center gap-2 cursor-pointer group"
onClick={onNewChat}
title="Start New Chat"
>
<h1 className="font-bold text-lg tracking-tight text-slate-900 hidden sm:block group-hover:opacity-70 transition-opacity">
Gemini <span className="text-blue-600 font-light">Prisma</span>
</h1>
<h1 className="font-bold text-lg tracking-tight text-slate-900 sm:hidden group-hover:opacity-70 transition-opacity">
Prisma
</h1>
</div>
</div>
<div className="flex items-center gap-2 sm:gap-3">
<div className="relative group">
<select
value={selectedModel}
onChange={(e) => setSelectedModel(e.target.value as ModelOption)}
className="relative bg-white border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-auto p-2.5 outline-none appearance-none cursor-pointer pl-3 pr-8 shadow-sm font-medium hover:bg-slate-50 transition-colors"
>
{MODELS.map(m => (
<option key={m.value} value={m.value}>{m.label}</option>
))}
</select>
<ChevronDown className="absolute right-3 top-3 text-slate-400 pointer-events-none group-hover:text-slate-600 transition-colors" size={14} />
</div>
<button
onClick={onOpenSettings}
className="p-2.5 rounded-lg bg-white border border-slate-200 hover:bg-slate-50 hover:border-slate-300 transition-colors text-slate-500 hover:text-slate-900 shadow-sm"
title="Configuration"
>
<Settings size={18} />
</button>
</div>
</div>
</header>
);
};
export default Header;

View File

@@ -0,0 +1,102 @@
import React, { useRef, useLayoutEffect, useState } from 'react';
import { ArrowUp, Square } from 'lucide-react';
import { AppState } from '../types';
interface InputSectionProps {
query: string;
setQuery: (q: string) => void;
onRun: () => void;
onStop: () => void;
appState: AppState;
}
const InputSection = ({ query, setQuery, onRun, onStop, appState }: InputSectionProps) => {
const textareaRef = useRef<HTMLTextAreaElement>(null);
const [isComposing, setIsComposing] = useState(false);
const adjustHeight = () => {
if (textareaRef.current) {
// Reset height to auto to allow shrinking when text is deleted
textareaRef.current.style.height = 'auto';
const scrollHeight = textareaRef.current.scrollHeight;
const maxHeight = 200;
// Set new height based on scrollHeight, capped at 200px
textareaRef.current.style.height = `${Math.min(scrollHeight, maxHeight)}px`;
// Only show scrollbar if we hit the max height limit
if (scrollHeight > maxHeight) {
textareaRef.current.style.overflowY = 'auto';
} else {
textareaRef.current.style.overflowY = 'hidden';
}
}
};
// useLayoutEffect prevents visual flickering by adjusting height before paint
useLayoutEffect(() => {
adjustHeight();
}, [query]);
const handleKeyDown = (e: React.KeyboardEvent) => {
// If user presses Enter without Shift
if (e.key === 'Enter' && !e.shiftKey) {
// robust check for IME composition (e.g. Chinese/Japanese inputs)
if (isComposing || (e.nativeEvent as any).isComposing) {
return;
}
e.preventDefault();
if (query.trim() && appState === 'idle') {
onRun();
}
}
};
const isRunning = appState !== 'idle';
return (
<div className="w-full">
{/* Container: Flex items-end ensures button stays at bottom right as text grows */}
<div className="w-full flex items-end p-2 bg-white/70 backdrop-blur-xl border border-slate-200/50 rounded-[26px] shadow-2xl focus-within:ring-2 focus-within:ring-blue-500/20 focus-within:bg-white/90 transition-colors duration-200">
<textarea
ref={textareaRef}
value={query}
onChange={(e) => setQuery(e.target.value)}
onKeyDown={handleKeyDown}
onCompositionStart={() => setIsComposing(true)}
onCompositionEnd={() => setIsComposing(false)}
placeholder="Ask a complex question..."
rows={1}
className="flex-1 max-h-[200px] py-3 pl-4 pr-2 bg-transparent border-none focus:ring-0 resize-none outline-none text-slate-800 placeholder:text-slate-400 leading-relaxed custom-scrollbar text-base"
style={{ minHeight: '48px' }}
/>
<div className="flex-shrink-0 pb-0.5 pr-0.5">
{isRunning ? (
<button
onClick={onStop}
className="flex items-center justify-center w-10 h-10 rounded-full bg-slate-900 text-white hover:bg-slate-700 transition-colors shadow-md"
>
<Square size={14} className="fill-current" />
</button>
) : (
<button
onClick={() => {
if (query.trim()) onRun();
}}
disabled={!query.trim()}
className="flex items-center justify-center w-10 h-10 rounded-full bg-blue-600 text-white hover:bg-blue-700 disabled:bg-slate-200 disabled:text-slate-400 transition-all shadow-md hover:scale-105 active:scale-95"
>
<ArrowUp size={20} />
</button>
)}
</div>
</div>
</div>
);
};
export default InputSection;

View File

@@ -0,0 +1,86 @@
import React, { useState } from 'react';
import ReactMarkdown from 'react-markdown';
import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter';
import { vscDarkPlus } from 'react-syntax-highlighter/dist/esm/styles/prism';
import { Copy, Check, Terminal } from 'lucide-react';
const CodeBlock = ({ node, inline, className, children, ...props }: any) => {
const [copied, setCopied] = useState(false);
const match = /language-(\w+)/.exec(className || '');
const language = match ? match[1] : '';
// Inline code (e.g. `const x = 1`)
if (inline) {
return (
<code className={`${className} bg-slate-100 text-slate-800 px-1 py-0.5 rounded text-sm font-mono border border-slate-200`} {...props}>
{children}
</code>
);
}
const codeString = String(children).replace(/\n$/, '');
const handleCopy = () => {
navigator.clipboard.writeText(codeString);
setCopied(true);
setTimeout(() => setCopied(false), 2000);
};
return (
<div className="relative group my-4 rounded-lg overflow-hidden border border-slate-200 bg-[#1e1e1e] shadow-sm">
{/* Code Header */}
<div className="flex items-center justify-between px-3 py-2 bg-[#252526] border-b border-[#333] text-xs text-slate-400">
<div className="flex items-center gap-2">
<Terminal size={14} />
<span className="font-mono text-slate-300">{language || 'text'}</span>
</div>
<button
onClick={handleCopy}
className="flex items-center gap-1.5 hover:text-white transition-colors"
>
{copied ? <Check size={14} className="text-emerald-400" /> : <Copy size={14} />}
<span>{copied ? 'Copied!' : 'Copy'}</span>
</button>
</div>
{/* Syntax Highlighter */}
<div className="overflow-x-auto">
<SyntaxHighlighter
language={language}
style={vscDarkPlus}
customStyle={{
margin: 0,
padding: '1rem',
background: 'transparent',
fontSize: '0.875rem', // text-sm
lineHeight: '1.5',
fontFamily: 'JetBrains Mono, monospace',
}}
codeTagProps={{
style: { fontFamily: 'JetBrains Mono, monospace' }
}}
wrapLines={true}
{...props}
>
{codeString}
</SyntaxHighlighter>
</div>
</div>
);
};
const MarkdownRenderer = ({ content, className }: { content: string, className?: string }) => {
return (
<div className={className}>
<ReactMarkdown
components={{
code: CodeBlock
}}
>
{content}
</ReactMarkdown>
</div>
);
};
export default MarkdownRenderer;

View File

View File

@@ -0,0 +1,155 @@
import React, { useState, useEffect } from 'react';
import { Users, Zap, Brain, Loader2, CheckCircle2, Clock } from 'lucide-react';
import { AppState, AnalysisResult, ExpertResult } from '../types';
import ProcessNode from '../ProcessNode';
import ExpertCard from '../ExpertCard';
interface ProcessFlowProps {
appState: AppState;
managerAnalysis: AnalysisResult | null;
experts: ExpertResult[];
defaultExpanded?: boolean;
processStartTime?: number | null;
processEndTime?: number | null;
}
const GlobalTimer = ({ start, end, appState }: { start: number | null | undefined, end: number | null | undefined, appState: AppState }) => {
const [elapsed, setElapsed] = useState(0);
useEffect(() => {
let interval: any;
const isRunning = appState !== 'idle' && appState !== 'completed' && start;
if (isRunning) {
interval = setInterval(() => {
setElapsed(Date.now() - (start || 0));
}, 100);
} else if (appState === 'completed' && start && end) {
setElapsed(end - start);
} else if (appState === 'idle') {
setElapsed(0);
}
return () => clearInterval(interval);
}, [appState, start, end]);
if (!start) return null;
const seconds = (elapsed / 1000).toFixed(1);
return (
<div className="absolute right-0 top-0 flex items-center gap-1.5 bg-slate-800 text-slate-100 text-xs font-mono py-1 px-2 rounded-lg shadow-sm">
<Clock size={12} className="text-blue-400" />
<span>{seconds}s</span>
</div>
);
};
const ProcessFlow = ({ appState, managerAnalysis, experts, defaultExpanded = true, processStartTime, processEndTime }: ProcessFlowProps) => {
const [isExpanded, setIsExpanded] = useState(defaultExpanded);
// Status computation helpers
const isAnalysisDone = !!managerAnalysis;
const isSynthesisActive = appState === 'synthesizing';
const isComplete = appState === 'completed';
// Experts are active if ANY expert is currently thinking or pending
// We use this logic instead of just `appState` because now experts run IN PARALLEL with analysis
const hasExperts = experts.length > 0;
const anyExpertWorking = experts.some(e => e.status === 'thinking' || e.status === 'pending');
const allExpertsDone = experts.length > 0 && experts.every(e => e.status === 'completed' || e.status === 'error');
// Logic for Node Active States
// 1. Manager: Active if analyzing, OR if we don't have analysis yet but experts have started (edge case), Completed if analysis exists.
const managerStatus = (appState === 'analyzing' && !managerAnalysis) ? 'active' : (isAnalysisDone ? 'completed' : 'idle');
// 2. Experts: Active if any is working, Completed if all are done, Idle otherwise
const expertsStatus = anyExpertWorking ? 'active' : (allExpertsDone ? 'completed' : 'idle');
return (
<div className="relative space-y-4 pt-4">
{/* Global Timer Overlay */}
<GlobalTimer start={processStartTime} end={processEndTime} appState={appState} />
<div className="relative space-y-2">
{/* Connector Line */}
<div className={`absolute left-8 top-2 bottom-2 w-0.5 bg-slate-100 transition-opacity duration-300 ${isExpanded ? 'opacity-100' : 'opacity-0'}`} />
{/* Node 1: Manager Analysis */}
<ProcessNode
icon={Users}
title="Planning Strategy"
status={managerStatus}
isExpanded={isExpanded}
onToggle={() => setIsExpanded(!isExpanded)}
>
<div className="space-y-3 pl-2">
{managerAnalysis ? (
<>
<p className="text-sm text-slate-600 italic border-l-2 border-slate-300 pl-3">
"{managerAnalysis.thought_process}"
</p>
<div className="flex flex-wrap gap-2 mt-2">
{managerAnalysis.experts?.map((exp, i) => (
<span key={i} className="text-[10px] bg-slate-50 text-slate-600 px-2 py-1 rounded border border-slate-200 font-medium uppercase tracking-wide">
{exp.role}
</span>
))}
</div>
</>
) : (
<div className="flex items-center gap-3 text-slate-500 text-sm">
<Loader2 size={14} className="animate-spin text-blue-500" />
<span>Analyzing request...</span>
</div>
)}
</div>
</ProcessNode>
{/* Node 2: Expert Pool */}
{hasExperts && (
<ProcessNode
icon={Zap}
title="Expert Execution"
status={expertsStatus}
isExpanded={isExpanded}
onToggle={() => setIsExpanded(!isExpanded)}
>
<div className="grid grid-cols-1 gap-3 pt-2">
{experts.map((expert) => (
<ExpertCard key={expert.id} expert={expert} />
))}
</div>
</ProcessNode>
)}
{/* Node 3: Synthesis */}
{(isSynthesisActive || isComplete) && (
<ProcessNode
icon={Brain}
title="Final Synthesis"
status={isSynthesisActive ? 'active' : (isComplete ? 'completed' : 'idle')}
isExpanded={isExpanded}
onToggle={() => setIsExpanded(!isExpanded)}
>
<div className="text-sm text-slate-600 pl-2">
{isSynthesisActive ? (
<div className="flex items-center gap-2">
<Loader2 className="animate-spin text-purple-600" size={14} />
<span>Synthesizing final answer...</span>
</div>
) : (
<div className="flex items-center gap-2 text-emerald-600">
<CheckCircle2 size={14} />
<span>Reasoning complete.</span>
</div>
)}
</div>
</ProcessNode>
)}
</div>
</div>
);
};
export default ProcessFlow;

View File

@@ -0,0 +1,112 @@
import React from 'react';
import { Plus, MessageSquare, Trash2, X, History } from 'lucide-react';
import { ChatSession } from '../types';
interface SidebarProps {
isOpen: boolean;
onClose: () => void;
sessions: ChatSession[];
currentSessionId: string | null;
onSelectSession: (id: string) => void;
onNewChat: () => void;
onDeleteSession: (id: string, e: React.MouseEvent) => void;
}
const Sidebar = ({
isOpen,
onClose,
sessions,
currentSessionId,
onSelectSession,
onNewChat,
onDeleteSession
}: SidebarProps) => {
return (
<>
{/* Mobile Overlay */}
{isOpen && (
<div
className="fixed inset-0 bg-slate-900/20 backdrop-blur-sm z-30 lg:hidden"
onClick={onClose}
/>
)}
{/* Sidebar Container */}
<div className={`
fixed lg:static inset-y-0 left-0 z-40
w-[280px] bg-slate-50 border-r border-slate-200 transform transition-transform duration-300 ease-in-out
${isOpen ? 'translate-x-0' : '-translate-x-full lg:translate-x-0 lg:w-0 lg:border-r-0 lg:overflow-hidden'}
flex flex-col h-full
`}>
{/* Header */}
<div className="p-4 border-b border-slate-100 flex items-center justify-between shrink-0">
<div className="flex items-center gap-2 text-slate-700 font-semibold">
<History size={18} />
<span>History</span>
</div>
<button onClick={onClose} className="lg:hidden text-slate-400 hover:text-slate-600">
<X size={20} />
</button>
</div>
{/* New Chat Button */}
<div className="p-4 shrink-0">
<button
onClick={() => {
onNewChat();
if (window.innerWidth < 1024) onClose();
}}
className="w-full flex items-center justify-center gap-2 bg-blue-600 hover:bg-blue-700 text-white py-2.5 px-4 rounded-lg transition-colors shadow-sm font-medium text-sm"
>
<Plus size={16} />
New Chat
</button>
</div>
{/* Session List */}
<div className="flex-1 overflow-y-auto custom-scrollbar px-3 pb-4 space-y-1">
{sessions.length === 0 ? (
<div className="text-center py-10 text-slate-400 text-sm">
<p>No chat history yet.</p>
</div>
) : (
sessions.map((session) => (
<div
key={session.id}
onClick={() => {
onSelectSession(session.id);
if (window.innerWidth < 1024) onClose();
}}
className={`
group relative flex items-center gap-3 p-3 rounded-lg cursor-pointer transition-all
${currentSessionId === session.id
? 'bg-white shadow-sm border border-slate-200 text-slate-900'
: 'text-slate-600 hover:bg-slate-100 border border-transparent'}
`}
>
<MessageSquare size={16} className={`shrink-0 ${currentSessionId === session.id ? 'text-blue-500' : 'text-slate-400'}`} />
<div className="flex-1 min-w-0">
<h4 className="text-sm font-medium truncate pr-6">{session.title}</h4>
<span className="text-[10px] text-slate-400">
{new Date(session.createdAt).toLocaleDateString()}
</span>
</div>
<button
onClick={(e) => onDeleteSession(session.id, e)}
className="absolute right-2 top-1/2 -translate-y-1/2 p-1.5 rounded-md opacity-0 group-hover:opacity-100 hover:bg-red-50 hover:text-red-600 text-slate-400 transition-all"
title="Delete Chat"
>
<Trash2 size={14} />
</button>
</div>
))
)}
</div>
</div>
</>
);
};
export default Sidebar;

33
prisma/config.ts Normal file
View File

@@ -0,0 +1,33 @@
import { ModelOption, ThinkingLevel } from './types';
export const MODELS: { value: ModelOption; label: string; desc: string }[] = [
{
value: 'gemini-3-flash-preview',
label: 'Gemini 3 Flash',
desc: 'Low latency, high throughput, dynamic thinking.'
},
{
value: 'gemini-3-pro-preview',
label: 'Gemini 3 Pro',
desc: 'Deep reasoning, complex tasks, higher intelligence.'
},
];
export const getValidThinkingLevels = (model: ModelOption): ThinkingLevel[] => {
if (model === 'gemini-3-pro-preview') {
return ['low', 'high'];
}
return ['minimal', 'low', 'medium', 'high'];
};
export const getThinkingBudget = (level: ThinkingLevel, model: ModelOption): number => {
const isPro = model === 'gemini-3-pro-preview';
switch (level) {
case 'minimal': return 0; // Disables thinking
case 'low': return 2048;
case 'medium': return 8192;
case 'high': return isPro ? 32768 : 16384;
default: return 0;
}
};

View File

@@ -0,0 +1,68 @@
import { useState, useEffect, useCallback } from 'react';
import { ChatSession, ChatMessage, ModelOption } from '../types';
export const useChatSessions = () => {
const [sessions, setSessions] = useState<ChatSession[]>(() => {
try {
// Migrate from old key (deepthink-sessions) if present, otherwise use new key (prisma-sessions)
const saved = localStorage.getItem('prisma-sessions') || localStorage.getItem('deepthink-sessions');
return saved ? JSON.parse(saved) : [];
} catch (e) {
return [];
}
});
const [currentSessionId, setCurrentSessionId] = useState<string | null>(null);
useEffect(() => {
localStorage.setItem('prisma-sessions', JSON.stringify(sessions));
}, [sessions]);
const getSession = useCallback((id: string) => {
return sessions.find(s => s.id === id);
}, [sessions]);
const createSession = useCallback((initialMessages: ChatMessage[], model: ModelOption) => {
const newId = Date.now().toString();
const title = initialMessages[0].content.slice(0, 40) + (initialMessages[0].content.length > 40 ? '...' : '');
const newSession: ChatSession = {
id: newId,
title,
messages: initialMessages,
createdAt: Date.now(),
model
};
setSessions(prev => [newSession, ...prev]);
setCurrentSessionId(newId);
return newId;
}, []);
const updateSessionMessages = useCallback((sessionId: string, messages: ChatMessage[]) => {
setSessions(prev => prev.map(s =>
s.id === sessionId ? { ...s, messages } : s
));
}, []);
const deleteSession = useCallback((id: string) => {
setSessions(prev => prev.filter(s => s.id !== id));
if (currentSessionId === id) {
setCurrentSessionId(null);
}
}, [currentSessionId]);
const clearCurrentSession = useCallback(() => {
setCurrentSessionId(null);
}, []);
return {
sessions,
currentSessionId,
setCurrentSessionId,
createSession,
updateSessionMessages,
deleteSession,
clearCurrentSession,
getSession
};
};

View File

@@ -0,0 +1,282 @@
import { useState, useRef, useCallback } from 'react';
import { getAI } from '../api';
import { getThinkingBudget } from '../config';
import { AppConfig, ModelOption, AppState, AnalysisResult, ExpertResult, ChatMessage } from '../types';
import { executeManagerAnalysis } from '../services/deepThink/manager';
import { streamExpertResponse } from '../services/deepThink/expert';
import { streamSynthesisResponse } from '../services/deepThink/synthesis';
export const useDeepThink = () => {
const [appState, setAppState] = useState<AppState>('idle');
const [managerAnalysis, setManagerAnalysis] = useState<AnalysisResult | null>(null);
const [experts, setExperts] = useState<ExpertResult[]>([]);
const [finalOutput, setFinalOutput] = useState('');
const [synthesisThoughts, setSynthesisThoughts] = useState('');
// Timing state
const [processStartTime, setProcessStartTime] = useState<number | null>(null);
const [processEndTime, setProcessEndTime] = useState<number | null>(null);
// Refs for data consistency during high-frequency streaming updates
const expertsDataRef = useRef<ExpertResult[]>([]);
const abortControllerRef = useRef<AbortController | null>(null);
const stopDeepThink = useCallback(() => {
if (abortControllerRef.current) {
abortControllerRef.current.abort();
abortControllerRef.current = null;
}
setAppState('idle');
setProcessEndTime(Date.now());
}, []);
const resetDeepThink = useCallback(() => {
setAppState('idle');
setManagerAnalysis(null);
setExperts([]);
expertsDataRef.current = [];
setFinalOutput('');
setSynthesisThoughts('');
setProcessStartTime(null);
setProcessEndTime(null);
abortControllerRef.current = null;
}, []);
// Helper: Orchestrate a single expert's lifecycle (Start -> Stream -> End)
const runExpertLifecycle = async (
expert: ExpertResult,
index: number,
ai: any,
model: ModelOption,
context: string,
budget: number,
signal: AbortSignal
): Promise<ExpertResult> => {
if (signal.aborted) return expert;
// 1. Mark as thinking
const startTime = Date.now();
expertsDataRef.current[index] = {
...expert,
status: 'thinking',
startTime
};
setExperts([...expertsDataRef.current]);
try {
// 2. Stream execution via service
let fullContent = "";
let fullThoughts = "";
await streamExpertResponse(
ai,
model,
expert,
context,
budget,
signal,
(textChunk, thoughtChunk) => {
fullContent += textChunk;
fullThoughts += thoughtChunk;
// Update Ref & State live
expertsDataRef.current[index] = {
...expertsDataRef.current[index],
thoughts: fullThoughts,
content: fullContent
};
setExperts([...expertsDataRef.current]);
}
);
if (signal.aborted) return expertsDataRef.current[index];
// 3. Mark as completed
expertsDataRef.current[index] = {
...expertsDataRef.current[index],
status: 'completed',
endTime: Date.now()
};
setExperts([...expertsDataRef.current]);
return expertsDataRef.current[index];
} catch (error) {
console.error(`Expert ${expert.role} error:`, error);
if (!signal.aborted) {
expertsDataRef.current[index] = {
...expertsDataRef.current[index],
status: 'error',
content: "Failed to generate response.",
endTime: Date.now()
};
setExperts([...expertsDataRef.current]);
}
return expertsDataRef.current[index];
}
};
const runDynamicDeepThink = async (
query: string,
history: ChatMessage[],
model: ModelOption,
config: AppConfig
) => {
if (!query.trim()) return;
// Reset previous run
if (abortControllerRef.current) {
abortControllerRef.current.abort();
}
abortControllerRef.current = new AbortController();
const signal = abortControllerRef.current.signal;
setAppState('analyzing');
setManagerAnalysis(null);
setExperts([]);
expertsDataRef.current = [];
setFinalOutput('');
setSynthesisThoughts('');
setProcessStartTime(Date.now());
setProcessEndTime(null);
const ai = getAI({
apiKey: config.enableCustomApi ? config.customApiKey : undefined,
baseUrl: (config.enableCustomApi && config.customBaseUrl) ? config.customBaseUrl : undefined
});
try {
const recentHistory = history.slice(-5).map(msg =>
`${msg.role === 'user' ? 'User' : 'Model'}: ${msg.content}`
).join('\n');
// --- 1. Initialize Primary Expert IMMEDIATELY ---
const primaryExpert: ExpertResult = {
id: 'expert-0',
role: "Primary Responder",
description: "Directly addresses the user's original query.",
temperature: 1,
prompt: query,
status: 'pending'
};
expertsDataRef.current = [primaryExpert];
setExperts([primaryExpert]);
// --- 2. Start Parallel Execution ---
// Task A: Run Primary Expert (Index 0)
const primaryExpertTask = runExpertLifecycle(
primaryExpert,
0,
ai,
model,
recentHistory,
getThinkingBudget(config.expertLevel, model),
signal
);
// Task B: Run Manager Analysis via Service
const managerTask = executeManagerAnalysis(
ai,
model,
query,
recentHistory,
getThinkingBudget(config.planningLevel, model)
);
// Wait for Manager Analysis
const analysisJson = await managerTask;
if (signal.aborted) return;
setManagerAnalysis(analysisJson);
// --- 3. Initialize & Run Supplementary Experts ---
const generatedExperts: ExpertResult[] = analysisJson.experts.map((exp, idx) => ({
...exp,
id: `expert-${idx + 1}`,
status: 'pending'
}));
// Update state: Keep Primary (0) and append new ones
const currentPrimary = expertsDataRef.current[0];
const allExperts = [currentPrimary, ...generatedExperts];
expertsDataRef.current = allExperts;
setExperts([...allExperts]);
setAppState('experts_working');
// Task C: Run Supplementary Experts (Offset indices by 1)
const supplementaryTasks = generatedExperts.map((exp, idx) =>
runExpertLifecycle(
exp,
idx + 1,
ai,
model,
recentHistory,
getThinkingBudget(config.expertLevel, model),
signal
)
);
// --- 4. Wait for ALL Experts ---
const allResults = await Promise.all([primaryExpertTask, ...supplementaryTasks]);
if (signal.aborted) return;
// --- 5. Synthesis ---
setAppState('synthesizing');
let fullFinalText = '';
let fullFinalThoughts = '';
await streamSynthesisResponse(
ai,
model,
query,
recentHistory,
allResults,
getThinkingBudget(config.synthesisLevel, model),
signal,
(textChunk, thoughtChunk) => {
fullFinalText += textChunk;
fullFinalThoughts += thoughtChunk;
setFinalOutput(fullFinalText);
setSynthesisThoughts(fullFinalThoughts);
}
);
if (!signal.aborted) {
setAppState('completed');
setProcessEndTime(Date.now());
}
} catch (e: any) {
if (signal.aborted) {
console.log('Operation aborted by user');
} else {
console.error(e);
setAppState('idle');
setProcessEndTime(Date.now());
}
} finally {
abortControllerRef.current = null;
}
};
return {
appState,
managerAnalysis,
experts,
finalOutput,
synthesisThoughts,
runDynamicDeepThink,
stopDeepThink,
resetDeepThink,
processStartTime,
processEndTime
};
};

91
prisma/index.html Normal file
View File

@@ -0,0 +1,91 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Prisma</title>
<script src="https://cdn.tailwindcss.com"></script>
<script src="https://cdn.tailwindcss.com?plugins=typography"></script>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&family=JetBrains+Mono:wght@400;700&display=swap" rel="stylesheet">
<style>
body {
font-family: 'Inter', sans-serif;
background-color: #f8fafc; /* Slate 50 */
color: #1e293b; /* Slate 800 */
}
.mono {
font-family: 'JetBrains Mono', monospace;
}
/* Custom scrollbar */
::-webkit-scrollbar {
width: 8px;
height: 8px;
}
::-webkit-scrollbar-track {
background: #f1f5f9;
}
::-webkit-scrollbar-thumb {
background: #cbd5e1;
border-radius: 4px;
}
::-webkit-scrollbar-thumb:hover {
background: #94a3b8;
}
.animate-pulse-slow {
animation: pulse 3s cubic-bezier(0.4, 0, 0.6, 1) infinite;
}
@keyframes flow {
0% { background-position: 0% 50%; }
50% { background-position: 100% 50%; }
100% { background-position: 0% 50%; }
}
.gradient-border {
position: relative;
border-radius: 0.5rem;
background: #ffffff;
}
.gradient-border::before {
content: "";
position: absolute;
inset: -1px;
z-index: -1;
border-radius: 0.6rem;
background: linear-gradient(45deg, #3b82f6, #8b5cf6, #ec4899);
background-size: 200% 200%;
animation: flow 4s ease infinite;
opacity: 0.4;
}
.node-connector {
position: absolute;
width: 2px;
background-color: #e2e8f0;
z-index: 0;
}
</style>
<script type="importmap">
{
"imports": {
"react-markdown": "https://esm.sh/react-markdown@^10.1.0",
"@google/genai": "https://esm.sh/@google/genai@^1.34.0",
"lucide-react": "https://esm.sh/lucide-react@^0.562.0",
"react-dom/": "https://esm.sh/react-dom@^19.2.3/",
"react/": "https://esm.sh/react@^19.2.3/",
"react": "https://esm.sh/react@^19.2.3",
"react-syntax-highlighter": "https://esm.sh/react-syntax-highlighter@15.6.1?external=react,react-dom",
"react-syntax-highlighter/dist/esm/styles/prism": "https://esm.sh/react-syntax-highlighter@15.6.1/dist/esm/styles/prism?external=react,react-dom",
"react-syntax-highlighter/": "https://esm.sh/react-syntax-highlighter@^16.1.0/"
}
}
</script>
<link rel="stylesheet" href="/index.css">
</head>
<body>
<div id="root"></div>
<script type="module" src="/index.tsx"></script>
</body>
</html>

6
prisma/index.tsx Normal file
View File

@@ -0,0 +1,6 @@
import React from 'react';
import { createRoot } from 'react-dom/client';
import App from './App';
const root = createRoot(document.getElementById('root')!);
root.render(<App />);

9
prisma/interceptor.ts Normal file
View File

@@ -0,0 +1,9 @@
/**
* Network Interceptor
*
* Disabled: Direct SDK configuration is now used for custom base URLs.
*/
export const setInterceptorUrl = (url: string | null) => {
// No-op
};

5
prisma/metadata.json Normal file
View File

@@ -0,0 +1,5 @@
{
"description": "Generated by Gemini.",
"requestFramePermissions": [],
"name": "Prisma"
}

25
prisma/package.json Normal file
View File

@@ -0,0 +1,25 @@
{
"name": "prisma",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "vite build",
"preview": "vite preview"
},
"dependencies": {
"react-markdown": "^10.1.0",
"@google/genai": "^1.34.0",
"lucide-react": "^0.562.0",
"react-dom": "^19.2.3",
"react": "^19.2.3",
"react-syntax-highlighter": "^16.1.0"
},
"devDependencies": {
"@types/node": "^22.14.0",
"@vitejs/plugin-react": "^5.0.0",
"typescript": "~5.8.2",
"vite": "^6.2.0"
}
}

View File

@@ -0,0 +1,43 @@
import { ModelOption, ExpertResult } from '../../types';
import { getExpertSystemInstruction } from './prompts';
export const streamExpertResponse = async (
ai: any,
model: ModelOption,
expert: ExpertResult,
context: string,
budget: number,
signal: AbortSignal,
onChunk: (text: string, thought: string) => void
): Promise<void> => {
const streamResult = await ai.models.generateContentStream({
model: model,
contents: expert.prompt,
config: {
systemInstruction: getExpertSystemInstruction(expert.role, expert.description, context),
temperature: expert.temperature,
thinkingConfig: {
thinkingBudget: budget,
includeThoughts: true
}
}
});
for await (const chunk of streamResult) {
if (signal.aborted) break;
let chunkText = "";
let chunkThought = "";
if (chunk.candidates?.[0]?.content?.parts) {
for (const part of chunk.candidates[0].content.parts) {
if (part.thought) {
chunkThought += (part.text || "");
} else if (part.text) {
chunkText += part.text;
}
}
onChunk(chunkText, chunkThought);
}
}
};

View File

@@ -0,0 +1,61 @@
import { Type } from "@google/genai";
import { ModelOption, AnalysisResult } from '../../types';
import { cleanJsonString } from '../../utils';
import { MANAGER_SYSTEM_PROMPT } from './prompts';
export const executeManagerAnalysis = async (
ai: any,
model: ModelOption,
query: string,
context: string,
budget: number
): Promise<AnalysisResult> => {
const managerSchema = {
type: Type.OBJECT,
properties: {
thought_process: { type: Type.STRING, description: "Brief explanation of why these supplementary experts were chosen." },
experts: {
type: Type.ARRAY,
items: {
type: Type.OBJECT,
properties: {
role: { type: Type.STRING },
description: { type: Type.STRING },
temperature: { type: Type.NUMBER },
prompt: { type: Type.STRING }
},
required: ["role", "description", "temperature", "prompt"]
}
}
},
required: ["thought_process", "experts"]
};
const analysisResp = await ai.models.generateContent({
model: model,
contents: `Context:\n${context}\n\nCurrent Query: "${query}"`,
config: {
systemInstruction: MANAGER_SYSTEM_PROMPT,
responseMimeType: "application/json",
responseSchema: managerSchema,
thinkingConfig: {
includeThoughts: true,
thinkingBudget: budget
}
}
});
const rawText = analysisResp.text || '{}';
const cleanText = cleanJsonString(rawText);
try {
const analysisJson = JSON.parse(cleanText) as AnalysisResult;
if (!analysisJson.experts || !Array.isArray(analysisJson.experts)) {
throw new Error("Invalid schema structure");
}
return analysisJson;
} catch (e) {
console.error("JSON Parse Error:", e, rawText);
return { thought_process: "Direct processing.", experts: [] };
}
};

View File

@@ -0,0 +1,34 @@
import { ExpertResult } from '../../types';
export const MANAGER_SYSTEM_PROMPT = `
You are the "Dynamic Planning Engine". Your goal is to analyze a user query (considering the conversation context) and decompose it into a set of specialized expert personas (2 to 4) who can collaboratively solve specific aspects of the problem.
Your job is to create SUPPLEMENTARY experts to aid the Primary Responder.
DO NOT create an expert that just repeats the user query. The Primary Responder is already doing that.
Focus on specialized angles: specific coding patterns, historical context, devil's advocate, security analyst, etc.
For each expert, you must assign a specific 'temperature' (0.0 to 2.0).
`;
export const getExpertSystemInstruction = (role: string, description: string, context: string) => {
return `You are a ${role}. ${description}. Context: ${context}`;
};
export const getSynthesisPrompt = (recentHistory: string, query: string, expertResults: ExpertResult[]) => {
return `
You are the "Synthesis Engine".
Context:
${recentHistory}
Original User Query: "${query}"
Here are the analyses from your expert panel:
${expertResults.map(e => `--- Expert: ${e.role} (Temp: ${e.temperature}) ---\n${e.content || "(No output)"}\n`).join('\n')}
Your Task:
1. Reflect on the experts' inputs. Identify conflicts and consensus.
2. Synthesize a final, comprehensive, and high-quality answer to the user's original query.
3. Do not simply summarize; integrate the knowledge into a cohesive response.
`;
};

View File

@@ -0,0 +1,44 @@
import { ModelOption, ExpertResult } from '../../types';
import { getSynthesisPrompt } from './prompts';
export const streamSynthesisResponse = async (
ai: any,
model: ModelOption,
query: string,
historyContext: string,
expertResults: ExpertResult[],
budget: number,
signal: AbortSignal,
onChunk: (text: string, thought: string) => void
): Promise<void> => {
const prompt = getSynthesisPrompt(historyContext, query, expertResults);
const synthesisStream = await ai.models.generateContentStream({
model: model,
contents: prompt,
config: {
thinkingConfig: {
thinkingBudget: budget,
includeThoughts: true
}
}
});
for await (const chunk of synthesisStream) {
if (signal.aborted) break;
let chunkText = "";
let chunkThought = "";
if (chunk.candidates?.[0]?.content?.parts) {
for (const part of chunk.candidates[0].content.parts) {
if (part.thought) {
chunkThought += (part.text || "");
} else if (part.text) {
chunkText += part.text;
}
}
onChunk(chunkText, chunkThought);
}
}
};

29
prisma/tsconfig.json Normal file
View File

@@ -0,0 +1,29 @@
{
"compilerOptions": {
"target": "ES2022",
"experimentalDecorators": true,
"useDefineForClassFields": false,
"module": "ESNext",
"lib": [
"ES2022",
"DOM",
"DOM.Iterable"
],
"skipLibCheck": true,
"types": [
"node"
],
"moduleResolution": "bundler",
"isolatedModules": true,
"moduleDetection": "force",
"allowJs": true,
"jsx": "react-jsx",
"paths": {
"@/*": [
"./*"
]
},
"allowImportingTsExtensions": true,
"noEmit": true
}
}

55
prisma/types.ts Normal file
View File

@@ -0,0 +1,55 @@
export type ModelOption = 'gemini-3-flash-preview' | 'gemini-3-pro-preview';
export type ThinkingLevel = 'minimal' | 'low' | 'medium' | 'high';
export type ExpertConfig = {
id: string;
role: string;
description: string;
temperature: number;
prompt: string;
};
export type ExpertResult = ExpertConfig & {
status: 'pending' | 'thinking' | 'completed' | 'error';
content?: string;
thoughts?: string;
thoughtProcess?: string;
startTime?: number;
endTime?: number;
};
export type AnalysisResult = {
thought_process: string;
experts: Omit<ExpertConfig, 'id'>[];
};
export type AppState = 'idle' | 'analyzing' | 'experts_working' | 'synthesizing' | 'completed';
export type AppConfig = {
planningLevel: ThinkingLevel;
expertLevel: ThinkingLevel;
synthesisLevel: ThinkingLevel;
customApiKey?: string;
customBaseUrl?: string;
enableCustomApi?: boolean;
};
export type ChatMessage = {
id: string;
role: 'user' | 'model';
content: string;
// DeepThink Artifacts (only for model messages)
analysis?: AnalysisResult | null;
experts?: ExpertResult[];
synthesisThoughts?: string;
isThinking?: boolean;
totalDuration?: number; // Total time in ms
};
export type ChatSession = {
id: string;
title: string;
messages: ChatMessage[];
createdAt: number;
model: ModelOption;
};

23
prisma/utils.ts Normal file
View File

@@ -0,0 +1,23 @@
/**
* Cleans a JSON string that might be wrapped in Markdown code blocks or contain explanatory text.
*/
export const cleanJsonString = (str: string) => {
if (!str) return "{}";
// 1. Try to find markdown JSON block
const markdownMatch = str.match(/```(?:json)?\s*(\{[\s\S]*?\})\s*```/);
if (markdownMatch && markdownMatch[1]) {
return markdownMatch[1].trim();
}
// 2. Try to find the first '{' and the last '}'
const firstOpen = str.indexOf('{');
const lastClose = str.lastIndexOf('}');
if (firstOpen !== -1 && lastClose !== -1 && lastClose > firstOpen) {
return str.substring(firstOpen, lastClose + 1);
}
// 3. Fallback: return original if it looks like JSON, otherwise empty object
return str.trim().startsWith('{') ? str : "{}";
};

23
prisma/vite.config.ts Normal file
View File

@@ -0,0 +1,23 @@
import path from 'path';
import { defineConfig, loadEnv } from 'vite';
import react from '@vitejs/plugin-react';
export default defineConfig(({ mode }) => {
const env = loadEnv(mode, '.', '');
return {
server: {
port: 3000,
host: '0.0.0.0',
},
plugins: [react()],
define: {
'process.env.API_KEY': JSON.stringify(env.GEMINI_API_KEY),
'process.env.GEMINI_API_KEY': JSON.stringify(env.GEMINI_API_KEY)
},
resolve: {
alias: {
'@': path.resolve(__dirname, '.'),
}
}
};
});