diff --git a/prisma/.gitignore b/prisma/.gitignore new file mode 100644 index 0000000..a547bf3 --- /dev/null +++ b/prisma/.gitignore @@ -0,0 +1,24 @@ +# Logs +logs +*.log +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* +lerna-debug.log* + +node_modules +dist +dist-ssr +*.local + +# Editor directories and files +.vscode/* +!.vscode/extensions.json +.idea +.DS_Store +*.suo +*.ntvs* +*.njsproj +*.sln +*.sw? diff --git a/prisma/App.tsx b/prisma/App.tsx new file mode 100644 index 0000000..21190a3 --- /dev/null +++ b/prisma/App.tsx @@ -0,0 +1,217 @@ +import React, { useState, useEffect } from 'react'; +import { ModelOption, AppConfig, ChatMessage } from './types'; +import { getValidThinkingLevels } from './config'; +import { useDeepThink } from './hooks/useDeepThink'; +import { useChatSessions } from './hooks/useChatSessions'; + +import SettingsModal from './SettingsModal'; +import Header from './components/Header'; +import ChatInput from './components/InputSection'; +import Sidebar from './components/Sidebar'; +import ChatArea from './components/ChatArea'; + +const App = () => { + // Session Management + const { + sessions, + currentSessionId, + setCurrentSessionId, + createSession, + updateSessionMessages, + deleteSession, + getSession + } = useChatSessions(); + + // UI State + const [isSidebarOpen, setIsSidebarOpen] = useState(true); + const [isSettingsOpen, setIsSettingsOpen] = useState(false); + + // Active Chat State + const [messages, setMessages] = useState([]); + const [query, setQuery] = useState(''); + + // App Configuration + const [selectedModel, setSelectedModel] = useState('gemini-3-flash-preview'); + const [config, setConfig] = useState({ + planningLevel: 'high', + expertLevel: 'high', + synthesisLevel: 'high', + customApiKey: '', + customBaseUrl: '', + enableCustomApi: false + }); + + // Deep Think Engine + const { + appState, + managerAnalysis, + experts, + finalOutput, + synthesisThoughts, + runDynamicDeepThink, + stopDeepThink, + resetDeepThink, + processStartTime, + processEndTime + } = useDeepThink(); + + // Handle Model Constraints + useEffect(() => { + const validLevels = getValidThinkingLevels(selectedModel); + setConfig(prev => ({ + ...prev, + planningLevel: validLevels.includes(prev.planningLevel) ? prev.planningLevel : 'low', + expertLevel: validLevels.includes(prev.expertLevel) ? prev.expertLevel : 'low', + synthesisLevel: validLevels.includes(prev.synthesisLevel) ? prev.synthesisLevel : 'high', + })); + }, [selectedModel]); + + // Sync Messages when switching sessions + useEffect(() => { + if (currentSessionId) { + const session = getSession(currentSessionId); + if (session) { + setMessages(session.messages); + setSelectedModel(session.model || 'gemini-3-flash-preview'); + } + } else { + setMessages([]); + } + }, [currentSessionId, getSession]); + + // Handle AI Completion + useEffect(() => { + if (appState === 'completed') { + const finalizedMessage: ChatMessage = { + id: `ai-${Date.now()}`, + role: 'model', + content: finalOutput, + analysis: managerAnalysis, + experts: experts, + synthesisThoughts: synthesisThoughts, + isThinking: false, + totalDuration: (processStartTime && processEndTime) ? (processEndTime - processStartTime) : undefined + }; + + const newMessages = [...messages, finalizedMessage]; + setMessages(newMessages); + + if (currentSessionId) { + updateSessionMessages(currentSessionId, newMessages); + } else { + createSession(newMessages, selectedModel); + } + + resetDeepThink(); + } + }, [appState, finalOutput, managerAnalysis, experts, synthesisThoughts, resetDeepThink, processStartTime, processEndTime, currentSessionId, messages, selectedModel, createSession, updateSessionMessages]); + + const handleRun = () => { + if (!query.trim()) return; + + const userMsg: ChatMessage = { + id: `user-${Date.now()}`, + role: 'user', + content: query + }; + + const newMessages = [...messages, userMsg]; + setMessages(newMessages); // Optimistic update + + // Manage Session Persistence + let activeSessionId = currentSessionId; + if (!activeSessionId) { + activeSessionId = createSession(newMessages, selectedModel); + } else { + updateSessionMessages(activeSessionId, newMessages); + } + + // Run AI + runDynamicDeepThink(query, messages, selectedModel, config); + setQuery(''); + }; + + const handleNewChat = () => { + stopDeepThink(); + setCurrentSessionId(null); + setMessages([]); + setQuery(''); + resetDeepThink(); + if (window.innerWidth < 1024) setIsSidebarOpen(false); + }; + + const handleSelectSession = (id: string) => { + stopDeepThink(); + resetDeepThink(); + setCurrentSessionId(id); + if (window.innerWidth < 1024) setIsSidebarOpen(false); + }; + + const handleDeleteSession = (id: string, e: React.MouseEvent) => { + e.stopPropagation(); + deleteSession(id); + if (currentSessionId === id) { + handleNewChat(); + } + }; + + return ( +
+ + setIsSettingsOpen(false)} + config={config} + setConfig={setConfig} + model={selectedModel} + /> + +
setIsSettingsOpen(true)} + onToggleSidebar={() => setIsSidebarOpen(!isSidebarOpen)} + onNewChat={handleNewChat} + /> + +
+ setIsSidebarOpen(false)} + sessions={sessions} + currentSessionId={currentSessionId} + onSelectSession={handleSelectSession} + onNewChat={handleNewChat} + onDeleteSession={handleDeleteSession} + /> + +
+ + + {/* Floating Footer Input */} +
+
+ +
+
+
+
+
+ ); +}; + +export default App; \ No newline at end of file diff --git a/prisma/ExpertCard.tsx b/prisma/ExpertCard.tsx new file mode 100644 index 0000000..a4c68bb --- /dev/null +++ b/prisma/ExpertCard.tsx @@ -0,0 +1,163 @@ +import React, { useState, useEffect } from 'react'; +import { Bot, Loader2, CheckCircle2, X, BrainCircuit, MessageSquareText, Thermometer, Timer } from 'lucide-react'; +import MarkdownRenderer from './components/MarkdownRenderer'; +import { ExpertResult } from './types'; + +// Simple component to format milliseconds to ss.ms or mm:ss +const TimeDisplay = ({ start, end, status }: { start?: number, end?: number, status: string }) => { + const [elapsed, setElapsed] = useState(0); + + useEffect(() => { + let interval: any; + + // Update live timer + if (status === 'thinking' && start) { + // Calculate initial diff immediately + setElapsed(Date.now() - start); + interval = setInterval(() => { + setElapsed(Date.now() - start); + }, 100); + } + // Show final duration + else if ((status === 'completed' || status === 'error') && start && end) { + setElapsed(end - start); + } + else { + setElapsed(0); + } + + return () => clearInterval(interval); + }, [status, start, end]); + + if (!start) return null; + + const seconds = (elapsed / 1000).toFixed(1); + return ( +
+ + {seconds}s +
+ ); +}; + +const ExpertCard = ({ expert }: { expert: ExpertResult }) => { + const [view, setView] = useState<'thoughts' | 'output'>('output'); + + const isWorking = expert.status === 'thinking'; + const isDone = expert.status === 'completed'; + const isPending = expert.status === 'pending'; + const isError = expert.status === 'error'; + + // Auto-switch to thoughts if that's all we have so far + React.useEffect(() => { + if (isWorking && expert.thoughts && !expert.content) { + setView('thoughts'); + } else if (expert.content && view === 'thoughts' && !expert.thoughts) { + setView('output'); + } + }, [expert.thoughts, expert.content, isWorking]); + + return ( +
+ {/* Header */} +
+
+ +
+
+
+

{expert.role}

+ + {/* Timer for Expert */} + +
+ +
+

{expert.description}

+ {expert.temperature !== undefined && ( +
+ + {expert.temperature} +
+ )} +
+
+
+ {isWorking && } + {isDone && } + {isError && } +
+
+ + {/* Tabs */} + {!isPending && ( +
+ + +
+ )} + + {/* Content Area */} +
+ {isPending ? ( +
+ + Waiting for assignment... +
+ ) : ( + <> + {view === 'thoughts' && ( +
+ {expert.thoughts ? ( + + ) : ( + Initializing thought process... + )} + {isWorking && } +
+ )} + + {view === 'output' && ( +
+ {expert.content ? ( + + ) : ( + + {isWorking ? "Formulating output..." : "No output generated."} + + )} + {isWorking && !expert.content && } +
+ )} + + )} +
+
+ ); +}; + +export default ExpertCard; \ No newline at end of file diff --git a/prisma/ProcessNode.tsx b/prisma/ProcessNode.tsx new file mode 100644 index 0000000..2257b8f --- /dev/null +++ b/prisma/ProcessNode.tsx @@ -0,0 +1,61 @@ +import React from 'react'; +import { Loader2, CheckCircle2, ChevronUp, ChevronDown } from 'lucide-react'; + +interface ProcessNodeProps { + icon: React.ElementType; + title: string; + status: 'idle' | 'active' | 'completed'; + children?: React.ReactNode; + isExpanded: boolean; + onToggle: () => void; +} + +const ProcessNode = ({ + icon: Icon, + title, + status, + children, + isExpanded, + onToggle +}: ProcessNodeProps) => { + const isActive = status === 'active'; + const isCompleted = status === 'completed'; + + return ( +
+
+
+
+ {isActive ? : (isCompleted ? : )} +
+
+

+ {title} +

+ {isActive &&

Processing...

} +
+
+ {children && ( +
+ {isExpanded ? : } +
+ )} +
+ + {isExpanded && children && ( +
+ {children} +
+ )} +
+ ); +}; + +export default ProcessNode; \ No newline at end of file diff --git a/prisma/README.md b/prisma/README.md new file mode 100644 index 0000000..012335e --- /dev/null +++ b/prisma/README.md @@ -0,0 +1,20 @@ +
+GHBanner +
+ +# Run and deploy your AI Studio app + +This contains everything you need to run your app locally. + +View your app in AI Studio: https://ai.studio/apps/drive/1JWPILJ3NT10NR4eOeGiqBi6OZuRaEszO + +## Run Locally + +**Prerequisites:** Node.js + + +1. Install dependencies: + `npm install` +2. Set the `GEMINI_API_KEY` in [.env.local](.env.local) to your Gemini API key +3. Run the app: + `npm run dev` diff --git a/prisma/SettingsModal.tsx b/prisma/SettingsModal.tsx new file mode 100644 index 0000000..c50383c --- /dev/null +++ b/prisma/SettingsModal.tsx @@ -0,0 +1,156 @@ +import React from 'react'; +import { Settings, X, ChevronDown, Key, Globe } from 'lucide-react'; +import { AppConfig, ModelOption, ThinkingLevel } from './types'; +import { getValidThinkingLevels } from './config'; + +const SettingsModal = ({ + isOpen, + onClose, + config, + setConfig, + model +}: { + isOpen: boolean; + onClose: () => void; + config: AppConfig; + setConfig: (c: AppConfig) => void; + model: ModelOption; +}) => { + if (!isOpen) return null; + + const validLevels = getValidThinkingLevels(model); + + const LevelSelect = ({ + label, + value, + onChange, + desc + }: { + label: string, + value: ThinkingLevel, + onChange: (v: ThinkingLevel) => void, + desc: string + }) => ( +
+
+ + {value} +
+
+ + +
+

{desc}

+
+ ); + + return ( +
+
+
+
+ +

Configuration

+
+ +
+ +
+ {/* Connection Settings */} +
+
+

API Connection

+ {/* Toggle Switch */} + +
+ + {config.enableCustomApi && ( +
+
+ + setConfig({ ...config, customApiKey: e.target.value })} + className="w-full bg-white border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 outline-none placeholder:text-slate-400" + /> +
+ +
+ + setConfig({ ...config, customBaseUrl: e.target.value })} + className="w-full bg-white border border-slate-200 text-slate-800 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block p-2.5 outline-none placeholder:text-slate-400" + /> +
+
+ )} +
+ +
+

Thinking Process

+ setConfig({ ...config, planningLevel: v })} + desc="Controls the depth of initial query analysis and expert delegation." + /> + + setConfig({ ...config, expertLevel: v })} + desc="Determines how deeply each expert persona thinks about their specific task." + /> + + setConfig({ ...config, synthesisLevel: v })} + desc="Controls the reasoning effort for aggregating results into the final answer." + /> +
+
+ +
+ +
+
+
+ ); +}; + +export default SettingsModal; \ No newline at end of file diff --git a/prisma/api.ts b/prisma/api.ts new file mode 100644 index 0000000..5511de7 --- /dev/null +++ b/prisma/api.ts @@ -0,0 +1,13 @@ +import { GoogleGenAI } from "@google/genai"; + +export const getAI = (config?: { apiKey?: string; baseUrl?: string }) => { + const options: any = { + apiKey: config?.apiKey || process.env.API_KEY, + }; + + if (config?.baseUrl) { + options.baseUrl = config.baseUrl; + } + + return new GoogleGenAI(options); +}; \ No newline at end of file diff --git a/prisma/components/ChatArea.tsx b/prisma/components/ChatArea.tsx new file mode 100644 index 0000000..8dac15e --- /dev/null +++ b/prisma/components/ChatArea.tsx @@ -0,0 +1,90 @@ +import React from 'react'; +import { ChatMessage, AppState, AnalysisResult, ExpertResult } from '../types'; +import ChatMessageItem from './ChatMessage'; +import ProcessFlow from './ProcessFlow'; + +interface ChatAreaProps { + messages: ChatMessage[]; + appState: AppState; + managerAnalysis: AnalysisResult | null; + experts: ExpertResult[]; + finalOutput: string; + processStartTime: number | null; + processEndTime: number | null; +} + +const ChatArea = ({ + messages, + appState, + managerAnalysis, + experts, + finalOutput, + processStartTime, + processEndTime +}: ChatAreaProps) => { + return ( +
+
+ {messages.length === 0 && appState === 'idle' && ( +
+
+ Pr +
+

Prisma

+

Ask a complex question to start.

+
+ )} + + {/* History */} + {messages.map((msg, idx) => ( + + ))} + + {/* Active Generation (Ghost Message) */} + {appState !== 'idle' && appState !== 'completed' && ( +
+
+
+
+
+
+
Prisma
+ + {/* Active Thinking Process */} +
+ +
+ + {/* Streaming Output */} + {finalOutput && ( +
+ +
+ )} +
+
+
+ )} +
+
+ ); +}; + +export default ChatArea; \ No newline at end of file diff --git a/prisma/components/ChatMessage.tsx b/prisma/components/ChatMessage.tsx new file mode 100644 index 0000000..b2bc61b --- /dev/null +++ b/prisma/components/ChatMessage.tsx @@ -0,0 +1,103 @@ +import React, { useState } from 'react'; +import { User, Sparkles, ChevronDown, ChevronRight } from 'lucide-react'; +import MarkdownRenderer from './MarkdownRenderer'; +import ProcessFlow from './ProcessFlow'; +import { ChatMessage } from '../types'; + +interface ChatMessageProps { + message: ChatMessage; + isLast?: boolean; +} + +const ChatMessageItem = ({ message, isLast }: ChatMessageProps) => { + const isUser = message.role === 'user'; + const [showThinking, setShowThinking] = useState(false); + + // Check if there is any thinking data to show + const hasThinkingData = message.analysis || (message.experts && message.experts.length > 0); + + return ( +
+
+ {/* Avatar */} +
+
+ {isUser ? ( + + ) : ( + + )} +
+
+ + {/* Content */} +
+
+ {isUser ? 'You' : 'Prisma'} +
+ + {/* Thinking Process Accordion (Only for AI) */} + {!isUser && hasThinkingData && ( +
+ + + {showThinking && ( +
+ +
+ )} +
+ )} + + {/* Text Content */} +
+ {message.content ? ( + + ) : ( + message.isThinking && + )} +
+ + {/* Internal Monologue (Synthesis Thoughts) - Optional Footer */} + {!isUser && message.synthesisThoughts && ( +
+
+ + + Show Internal Monologue + +
+ {message.synthesisThoughts} +
+
+
+ )} +
+
+
+ ); +}; + +export default ChatMessageItem; \ No newline at end of file diff --git a/prisma/components/Header.tsx b/prisma/components/Header.tsx new file mode 100644 index 0000000..9977b4c --- /dev/null +++ b/prisma/components/Header.tsx @@ -0,0 +1,68 @@ +import React from 'react'; +import { Settings, ChevronDown, Menu, History } from 'lucide-react'; +import { MODELS } from '../config'; +import { ModelOption } from '../types'; + +interface HeaderProps { + selectedModel: ModelOption; + setSelectedModel: (model: ModelOption) => void; + onOpenSettings: () => void; + onToggleSidebar: () => void; + onNewChat: () => void; +} + +const Header = ({ selectedModel, setSelectedModel, onOpenSettings, onToggleSidebar, onNewChat }: HeaderProps) => { + return ( +
+
+
+ + +
+

+ Gemini Prisma +

+

+ Prisma +

+
+
+ +
+
+ + +
+ + +
+
+
+ ); +}; + +export default Header; \ No newline at end of file diff --git a/prisma/components/InputSection.tsx b/prisma/components/InputSection.tsx new file mode 100644 index 0000000..fa5a32f --- /dev/null +++ b/prisma/components/InputSection.tsx @@ -0,0 +1,102 @@ +import React, { useRef, useLayoutEffect, useState } from 'react'; +import { ArrowUp, Square } from 'lucide-react'; +import { AppState } from '../types'; + +interface InputSectionProps { + query: string; + setQuery: (q: string) => void; + onRun: () => void; + onStop: () => void; + appState: AppState; +} + +const InputSection = ({ query, setQuery, onRun, onStop, appState }: InputSectionProps) => { + const textareaRef = useRef(null); + const [isComposing, setIsComposing] = useState(false); + + const adjustHeight = () => { + if (textareaRef.current) { + // Reset height to auto to allow shrinking when text is deleted + textareaRef.current.style.height = 'auto'; + + const scrollHeight = textareaRef.current.scrollHeight; + const maxHeight = 200; + + // Set new height based on scrollHeight, capped at 200px + textareaRef.current.style.height = `${Math.min(scrollHeight, maxHeight)}px`; + + // Only show scrollbar if we hit the max height limit + if (scrollHeight > maxHeight) { + textareaRef.current.style.overflowY = 'auto'; + } else { + textareaRef.current.style.overflowY = 'hidden'; + } + } + }; + + // useLayoutEffect prevents visual flickering by adjusting height before paint + useLayoutEffect(() => { + adjustHeight(); + }, [query]); + + const handleKeyDown = (e: React.KeyboardEvent) => { + // If user presses Enter without Shift + if (e.key === 'Enter' && !e.shiftKey) { + // robust check for IME composition (e.g. Chinese/Japanese inputs) + if (isComposing || (e.nativeEvent as any).isComposing) { + return; + } + + e.preventDefault(); + if (query.trim() && appState === 'idle') { + onRun(); + } + } + }; + + const isRunning = appState !== 'idle'; + + return ( +
+ {/* Container: Flex items-end ensures button stays at bottom right as text grows */} +
+ +