diff --git a/.DS_Store b/.DS_Store
index de2c147..2934f8a 100644
Binary files a/.DS_Store and b/.DS_Store differ
diff --git a/prisma/SettingsModal.tsx b/prisma/SettingsModal.tsx
index 386acb4..bf5d4c1 100644
--- a/prisma/SettingsModal.tsx
+++ b/prisma/SettingsModal.tsx
@@ -6,6 +6,7 @@ import ApiSection from './components/settings/ApiSection';
import ModelSection from './components/settings/ModelSection';
import ThinkingSection from './components/settings/ThinkingSection';
import GithubSection from './components/settings/GithubSection';
+import LogSection from './components/settings/LogSection';
interface SettingsModalProps {
isOpen: boolean;
@@ -49,6 +50,8 @@ const SettingsModal = ({
setConfig={setConfig}
model={model}
/>
+
+
diff --git a/prisma/api.ts b/prisma/api.ts
index 7a14457..c3f71fd 100644
--- a/prisma/api.ts
+++ b/prisma/api.ts
@@ -1,6 +1,8 @@
+
import { GoogleGenAI } from "@google/genai";
import OpenAI from "openai";
import { ApiProvider, CustomModel } from './types';
+import { logger } from './services/logger';
type AIProviderConfig = {
provider?: ApiProvider;
@@ -46,7 +48,8 @@ if (typeof window !== 'undefined' && originalFetch) {
if (urlString.includes('/custom-api') && currentCustomApiUrl) {
const headers = new Headers(init?.headers);
headers.set('X-Target-URL', currentCustomApiUrl);
- console.log('[Fetch] Adding X-Target-URL header:', currentCustomApiUrl);
+
+ logger.debug('API', 'Using Custom Proxy', { target: currentCustomApiUrl, path: urlString });
return originalFetch(input, {
...init,
@@ -59,6 +62,7 @@ if (typeof window !== 'undefined' && originalFetch) {
try {
window.fetch = proxyFetch;
+ logger.info('System', 'Fetch proxy interceptor installed');
} catch (e) {
try {
Object.defineProperty(window, 'fetch', {
@@ -69,6 +73,7 @@ if (typeof window !== 'undefined' && originalFetch) {
});
} catch (e2) {
console.error('[API] Failed to intercept fetch:', e2);
+ logger.error('System', 'Failed to intercept fetch', e2);
}
}
}
@@ -90,10 +95,6 @@ export const getAI = (config?: AIProviderConfig) => {
currentCustomApiUrl = config.baseUrl;
// Use proxy path
options.baseURL = `${window.location.origin}/custom-api`;
- console.log('[API] Using custom API proxy:', {
- proxyPath: options.baseURL,
- targetUrl: currentCustomApiUrl,
- });
} else {
// In production, use the URL directly
options.baseURL = config.baseUrl;
@@ -110,12 +111,12 @@ export const getAI = (config?: AIProviderConfig) => {
}
}
- console.log('[API] OpenAI client config:', {
+ logger.info('API', 'Initializing OpenAI Client', {
provider,
- baseURL: options.baseURL,
- hasApiKey: !!options.apiKey,
- customTarget: currentCustomApiUrl,
+ baseURL: options.baseURL,
+ isCustom: provider === 'custom'
});
+
return new OpenAI(options);
} else {
const options: any = {
@@ -126,6 +127,7 @@ export const getAI = (config?: AIProviderConfig) => {
options.baseUrl = config.baseUrl;
}
+ logger.info('API', 'Initializing Google GenAI Client');
return new GoogleGenAI(options);
}
};
@@ -150,4 +152,4 @@ export const getAIProvider = (model: string): ApiProvider => {
return 'custom';
}
return 'google';
-};
\ No newline at end of file
+};
diff --git a/prisma/components/settings/LogSection.tsx b/prisma/components/settings/LogSection.tsx
new file mode 100644
index 0000000..ab868f2
--- /dev/null
+++ b/prisma/components/settings/LogSection.tsx
@@ -0,0 +1,78 @@
+
+import React, { useState, useEffect } from 'react';
+import { FileText, Download, Trash2, Activity } from 'lucide-react';
+import { logger } from '../../services/logger';
+
+const LogSection = () => {
+ const [logCount, setLogCount] = useState(0);
+
+ useEffect(() => {
+ // Initial count
+ setLogCount(logger.getLogs().length);
+
+ // Simple poller to update count while settings are open
+ const interval = setInterval(() => {
+ setLogCount(logger.getLogs().length);
+ }, 1000);
+
+ return () => clearInterval(interval);
+ }, []);
+
+ const handleDownload = () => {
+ logger.download();
+ };
+
+ const handleClear = () => {
+ if (confirm('Are you sure you want to clear all execution logs?')) {
+ logger.clear();
+ setLogCount(0);
+ }
+ };
+
+ return (
+
+
+
System Logs
+
+
+
+
+
+
+
Debug & Execution Logs
+
+ Record of reasoning processes, API calls, and errors. Useful for debugging specific issues.
+
+
+ Current entries: {logCount}
+
+
+
+
+
+
+
+
+
+
+
+ );
+};
+
+export default LogSection;
diff --git a/prisma/hooks/useAppLogic.ts b/prisma/hooks/useAppLogic.ts
index 3f7eb90..a6e4221 100644
--- a/prisma/hooks/useAppLogic.ts
+++ b/prisma/hooks/useAppLogic.ts
@@ -5,6 +5,7 @@ import { STORAGE_KEYS, DEFAULT_CONFIG, getValidThinkingLevels } from '../config'
import { useDeepThink } from './useDeepThink';
import { useChatSessions } from './useChatSessions';
import { setInterceptorUrl } from '../interceptor';
+import { logger } from '../services/logger';
export const useAppLogic = () => {
// Session Management
@@ -72,10 +73,12 @@ export const useAppLogic = () => {
// Persistence Effects
useEffect(() => {
localStorage.setItem(STORAGE_KEYS.SETTINGS, JSON.stringify(config));
+ logger.info('System', 'Settings updated', config);
}, [config]);
useEffect(() => {
localStorage.setItem(STORAGE_KEYS.MODEL, selectedModel);
+ logger.info('User', 'Model changed', { model: selectedModel });
}, [selectedModel]);
useEffect(() => {
@@ -120,6 +123,7 @@ export const useAppLogic = () => {
if (session) {
setMessages(session.messages);
setSelectedModel(session.model || 'gemini-3-flash-preview');
+ logger.debug('User', 'Session switched', { id: currentSessionId, title: session.title });
}
} else {
setMessages([]);
@@ -129,6 +133,9 @@ export const useAppLogic = () => {
// Handle AI Completion
useEffect(() => {
if (appState === 'completed') {
+ const duration = (processStartTime && processEndTime) ? (processEndTime - processStartTime) : undefined;
+ logger.info('System', 'Request processing completed', { duration });
+
const finalizedMessage: ChatMessage = {
id: `ai-${Date.now()}`,
role: 'model',
@@ -137,7 +144,7 @@ export const useAppLogic = () => {
experts: experts,
synthesisThoughts: synthesisThoughts,
isThinking: false,
- totalDuration: (processStartTime && processEndTime) ? (processEndTime - processStartTime) : undefined
+ totalDuration: duration
};
const newMessages = [...messages, finalizedMessage];
@@ -158,6 +165,8 @@ export const useAppLogic = () => {
const handleRun = useCallback((attachments: MessageAttachment[] = []) => {
if (!query.trim() && attachments.length === 0) return;
+ logger.info('User', 'New Request', { query, hasAttachments: attachments.length > 0 });
+
const userMsg: ChatMessage = {
id: `user-${Date.now()}`,
role: 'user',
@@ -180,6 +189,7 @@ export const useAppLogic = () => {
}, [query, messages, currentSessionId, selectedModel, config, createSession, updateSessionMessages, runDynamicDeepThink]);
const handleNewChat = useCallback(() => {
+ logger.info('User', 'New Chat initiated');
stopDeepThink();
setCurrentSessionId(null);
setMessages([]);
@@ -199,6 +209,7 @@ export const useAppLogic = () => {
const handleDeleteSession = useCallback((id: string, e: React.MouseEvent) => {
e.stopPropagation();
+ logger.info('User', 'Session deleted', { id });
deleteSession(id);
if (currentSessionId === id) {
handleNewChat();
diff --git a/prisma/hooks/useDeepThink.ts b/prisma/hooks/useDeepThink.ts
index a9700d0..a583bca 100644
--- a/prisma/hooks/useDeepThink.ts
+++ b/prisma/hooks/useDeepThink.ts
@@ -8,6 +8,7 @@ import { executeManagerAnalysis, executeManagerReview } from '../services/deepTh
import { streamExpertResponse } from '../services/deepThink/expert';
import { streamSynthesisResponse } from '../services/deepThink/synthesis';
import { useDeepThinkState } from './useDeepThinkState';
+import { logger } from '../services/logger';
export const useDeepThink = () => {
const {
@@ -41,6 +42,7 @@ export const useDeepThink = () => {
): Promise => {
if (signal.aborted) return expert;
+ logger.info('Expert', `Starting expert: ${expert.role}`, { id: expert.id, round: expert.round });
const startTime = Date.now();
updateExpertAt(globalIndex, { status: 'thinking', startTime });
@@ -63,13 +65,18 @@ export const useDeepThink = () => {
}
);
- if (signal.aborted) return expertsDataRef.current[globalIndex];
+ if (signal.aborted) {
+ logger.warn('Expert', `Expert aborted: ${expert.role}`);
+ return expertsDataRef.current[globalIndex];
+ }
+ logger.info('Expert', `Expert completed: ${expert.role}`);
updateExpertAt(globalIndex, { status: 'completed', endTime: Date.now() });
return expertsDataRef.current[globalIndex];
} catch (error) {
console.error(`Expert ${expert.role} error:`, error);
+ logger.error('Expert', `Expert failed: ${expert.role}`, error);
if (!signal.aborted) {
updateExpertAt(globalIndex, { status: 'error', content: "Failed to generate response.", endTime: Date.now() });
}
@@ -92,6 +99,8 @@ export const useDeepThink = () => {
abortControllerRef.current = new AbortController();
const signal = abortControllerRef.current.signal;
+ logger.info('System', 'Starting DeepThink Process', { model, provider: getAIProvider(model) });
+
// Reset UI state
setAppState('analyzing');
setManagerAnalysis(null);
@@ -120,6 +129,7 @@ export const useDeepThink = () => {
).join('\n');
// --- Phase 1: Planning & Initial Experts ---
+ logger.debug('Manager', 'Phase 1: Planning started');
const managerTask = executeManagerAnalysis(
ai,
@@ -151,6 +161,7 @@ export const useDeepThink = () => {
const analysisJson = await managerTask;
if (signal.aborted) return;
setManagerAnalysis(analysisJson);
+ logger.info('Manager', 'Plan generated', analysisJson);
const round1Experts: ExpertResult[] = analysisJson.experts.map((exp, idx) => ({
...exp,
@@ -181,6 +192,7 @@ export const useDeepThink = () => {
while (loopActive && roundCounter < MAX_ROUNDS) {
if (signal.aborted) return;
+ logger.info('Manager', `Phase 2: Reviewing Round ${roundCounter}`);
setAppState('reviewing');
const reviewResult = await executeManagerReview(
@@ -189,6 +201,9 @@ export const useDeepThink = () => {
);
if (signal.aborted) return;
+
+ logger.info('Manager', `Review Result: ${reviewResult.satisfied ? 'Satisfied' : 'Not Satisfied'}`, reviewResult);
+
if (reviewResult.satisfied) {
loopActive = false;
} else {
@@ -198,6 +213,7 @@ export const useDeepThink = () => {
}));
if (nextRoundExperts.length === 0) {
+ logger.warn('Manager', 'Not satisfied but no new experts proposed. Breaking loop.');
loopActive = false;
break;
}
@@ -219,6 +235,7 @@ export const useDeepThink = () => {
// --- Phase 3: Synthesis ---
setAppState('synthesizing');
+ logger.info('Synthesis', 'Phase 3: Synthesis started');
let fullFinalText = '';
let fullFinalThoughts = '';
@@ -236,6 +253,7 @@ export const useDeepThink = () => {
);
if (!signal.aborted) {
+ logger.info('Synthesis', 'Response generation completed');
setAppState('completed');
setProcessEndTime(Date.now());
}
@@ -243,8 +261,11 @@ export const useDeepThink = () => {
} catch (e: any) {
if (!signal.aborted) {
console.error(e);
+ logger.error('System', 'DeepThink Process Error', e);
setAppState('idle');
setProcessEndTime(Date.now());
+ } else {
+ logger.warn('System', 'Process aborted by user');
}
} finally {
abortControllerRef.current = null;
diff --git a/prisma/services/deepThink/expert.ts b/prisma/services/deepThink/expert.ts
index a9128b7..562a578 100644
--- a/prisma/services/deepThink/expert.ts
+++ b/prisma/services/deepThink/expert.ts
@@ -3,6 +3,7 @@ import { ModelOption, ExpertResult, MessageAttachment } from '../../types';
import { getExpertSystemInstruction } from './prompts';
import { withRetry } from '../utils/retry';
import { generateContentStream as generateOpenAIStream } from './openaiClient';
+import { logger } from '../logger';
const isGoogleProvider = (ai: any): boolean => {
return ai?.models?.generateContentStream !== undefined;
@@ -44,7 +45,8 @@ export const streamExpertResponse = async (
systemInstruction: getExpertSystemInstruction(expert.role, expert.description, context),
temperature: expert.temperature,
thinkingConfig: {
- thinkingBudget: budget
+ thinkingBudget: budget,
+ includeThoughts: true
}
}
}));
@@ -53,11 +55,22 @@ export const streamExpertResponse = async (
for await (const chunk of (streamResult as any)) {
if (signal.aborted) break;
- const chunkText = chunk.text || "";
- onChunk(chunkText, "");
+ let chunkText = "";
+ let chunkThought = "";
+
+ if (chunk.candidates?.[0]?.content?.parts) {
+ for (const part of chunk.candidates[0].content.parts) {
+ if (part.thought) {
+ chunkThought += (part.text || "");
+ } else if (part.text) {
+ chunkText += part.text;
+ }
+ }
+ onChunk(chunkText, chunkThought);
+ }
}
} catch (streamError) {
- console.error(`Stream interrupted for expert ${expert.role}:`, streamError);
+ logger.error("Expert", `Stream interrupted for expert ${expert.role}`, streamError);
throw streamError;
}
} else {
@@ -95,7 +108,7 @@ export const streamExpertResponse = async (
onChunk(chunk.text, chunk.thought || '');
}
} catch (streamError) {
- console.error(`Stream interrupted for expert ${expert.role}:`, streamError);
+ logger.error("Expert", `Stream interrupted for expert ${expert.role} (OpenAI)`, streamError);
throw streamError;
}
}
diff --git a/prisma/services/deepThink/manager.ts b/prisma/services/deepThink/manager.ts
index a1f612b..e23e65e 100644
--- a/prisma/services/deepThink/manager.ts
+++ b/prisma/services/deepThink/manager.ts
@@ -5,6 +5,7 @@ import { cleanJsonString } from '../../utils';
import { MANAGER_SYSTEM_PROMPT, MANAGER_REVIEW_SYSTEM_PROMPT } from './prompts';
import { withRetry } from '../utils/retry';
import { generateContent as generateOpenAIContent } from './openaiClient';
+import { logger } from '../logger';
const isGoogleProvider = (ai: any): boolean => {
return ai?.models?.generateContent !== undefined;
@@ -68,6 +69,7 @@ export const executeManagerAnalysis = async (
responseMimeType: "application/json",
responseSchema: managerSchema,
thinkingConfig: {
+ includeThoughts: true,
thinkingBudget: budget
}
}
@@ -82,7 +84,7 @@ export const executeManagerAnalysis = async (
}
return analysisJson;
} catch (e) {
- console.error("Manager Analysis Error:", e);
+ logger.error("Manager", "Analysis generation failed", e);
return {
thought_process: "Direct processing fallback due to analysis error.",
experts: []
@@ -135,7 +137,7 @@ export const executeManagerAnalysis = async (
}
return analysisJson;
} catch (e) {
- console.error("Manager Analysis Error:", e);
+ logger.error("Manager", "Analysis generation failed (OpenAI)", e);
return {
thought_process: "Direct processing fallback due to analysis error.",
experts: []
@@ -192,6 +194,7 @@ export const executeManagerReview = async (
responseMimeType: "application/json",
responseSchema: reviewSchema,
thinkingConfig: {
+ includeThoughts: true,
thinkingBudget: budget
}
}
@@ -201,7 +204,7 @@ export const executeManagerReview = async (
const cleanText = cleanJsonString(rawText);
return JSON.parse(cleanText) as ReviewResult;
} catch (e) {
- console.error("Review Error:", e);
+ logger.error("Manager", "Review generation failed", e);
return { satisfied: true, critique: "Processing Error, proceeding to synthesis." };
}
} else {
@@ -220,7 +223,7 @@ export const executeManagerReview = async (
return JSON.parse(response.text) as ReviewResult;
} catch (e) {
- console.error("Review Error:", e);
+ logger.error("Manager", "Review generation failed (OpenAI)", e);
return { satisfied: true, critique: "Processing Error, proceeding to synthesis." };
}
}
diff --git a/prisma/services/deepThink/synthesis.ts b/prisma/services/deepThink/synthesis.ts
index a1dd913..2499fd6 100644
--- a/prisma/services/deepThink/synthesis.ts
+++ b/prisma/services/deepThink/synthesis.ts
@@ -3,6 +3,7 @@ import { ModelOption, ExpertResult, MessageAttachment } from '../../types';
import { getSynthesisPrompt } from './prompts';
import { withRetry } from '../utils/retry';
import { generateContentStream as generateOpenAIStream } from './openaiClient';
+import { logger } from '../logger';
const isGoogleProvider = (ai: any): boolean => {
return ai?.models?.generateContentStream !== undefined;
@@ -44,7 +45,8 @@ export const streamSynthesisResponse = async (
contents: contents,
config: {
thinkingConfig: {
- thinkingBudget: budget
+ thinkingBudget: budget,
+ includeThoughts: true
}
}
}));
@@ -52,12 +54,23 @@ export const streamSynthesisResponse = async (
try {
for await (const chunk of (synthesisStream as any)) {
if (signal.aborted) break;
-
- const chunkText = chunk.text || "";
- onChunk(chunkText, "");
+
+ let chunkText = "";
+ let chunkThought = "";
+
+ if (chunk.candidates?.[0]?.content?.parts) {
+ for (const part of chunk.candidates[0].content.parts) {
+ if (part.thought) {
+ chunkThought += (part.text || "");
+ } else if (part.text) {
+ chunkText += part.text;
+ }
+ }
+ onChunk(chunkText, chunkThought);
+ }
}
} catch (streamError) {
- console.error("Synthesis stream interrupted:", streamError);
+ logger.error("Synthesis", "Stream interrupted", streamError);
throw streamError;
}
} else {
@@ -95,7 +108,7 @@ export const streamSynthesisResponse = async (
onChunk(chunk.text, chunk.thought || '');
}
} catch (streamError) {
- console.error("Synthesis stream interrupted:", streamError);
+ logger.error("Synthesis", "Stream interrupted (OpenAI)", streamError);
throw streamError;
}
}
diff --git a/prisma/services/logger.ts b/prisma/services/logger.ts
new file mode 100644
index 0000000..19480b3
--- /dev/null
+++ b/prisma/services/logger.ts
@@ -0,0 +1,119 @@
+
+export type LogLevel = 'info' | 'warn' | 'error' | 'debug';
+export type LogCategory = 'System' | 'User' | 'API' | 'Manager' | 'Expert' | 'Synthesis';
+
+export interface LogEntry {
+ timestamp: string;
+ level: LogLevel;
+ category: LogCategory;
+ message: string;
+ data?: any;
+}
+
+class LoggerService {
+ private logs: LogEntry[] = [];
+ private maxLogs: number = 5000;
+
+ constructor() {
+ // Attempt to restore logs from sessionStorage on load (optional persistence)
+ try {
+ const saved = sessionStorage.getItem('prisma_logs');
+ if (saved) {
+ this.logs = JSON.parse(saved);
+ }
+ } catch (e) {
+ console.warn('Failed to restore logs');
+ }
+
+ this.info('System', 'Logger service initialized');
+ }
+
+ private persist() {
+ try {
+ sessionStorage.setItem('prisma_logs', JSON.stringify(this.logs.slice(-500))); // Persist last 500 only
+ } catch (e) {
+ // Ignore quota errors
+ }
+ }
+
+ add(level: LogLevel, category: LogCategory, message: string, data?: any) {
+ const entry: LogEntry = {
+ timestamp: new Date().toISOString(),
+ level,
+ category,
+ message,
+ data: data ? JSON.parse(JSON.stringify(data, this.replacer)) : undefined
+ };
+
+ this.logs.push(entry);
+
+ // Trim if too large
+ if (this.logs.length > this.maxLogs) {
+ this.logs = this.logs.slice(this.logs.length - this.maxLogs);
+ }
+
+ // Mirror to console for dev
+ if (import.meta.env.DEV) {
+ const style = level === 'error' ? 'color: red' : level === 'warn' ? 'color: orange' : 'color: cyan';
+ console.log(`%c[${category}] ${message}`, style, data || '');
+ }
+
+ this.persist();
+ }
+
+ // Circular reference replacer for JSON
+ private replacer(key: string, value: any) {
+ if (key === 'apiKey') return '***REDACTED***';
+ if (key === 'auth') return '***REDACTED***';
+ return value;
+ }
+
+ info(category: LogCategory, message: string, data?: any) {
+ this.add('info', category, message, data);
+ }
+
+ warn(category: LogCategory, message: string, data?: any) {
+ this.add('warn', category, message, data);
+ }
+
+ error(category: LogCategory, message: string, data?: any) {
+ this.add('error', category, message, data);
+ }
+
+ debug(category: LogCategory, message: string, data?: any) {
+ this.add('debug', category, message, data);
+ }
+
+ getLogs() {
+ return this.logs;
+ }
+
+ clear() {
+ this.logs = [];
+ this.persist();
+ this.info('System', 'Logs cleared by user');
+ }
+
+ download() {
+ const textContent = this.logs.map(entry => {
+ const date = new Date(entry.timestamp).toLocaleTimeString();
+ let line = `[${date}] [${entry.level.toUpperCase()}] [${entry.category}]: ${entry.message}`;
+ if (entry.data) {
+ line += `\n Data: ${JSON.stringify(entry.data, null, 2)}`;
+ }
+ return line;
+ }).join('\n----------------------------------------\n');
+
+ const blob = new Blob([textContent], { type: 'text/plain' });
+ const url = URL.createObjectURL(blob);
+ const a = document.createElement('a');
+ a.href = url;
+ a.download = `prisma-debug-log-${new Date().toISOString().slice(0, 19).replace(/:/g, '-')}.txt`;
+ document.body.appendChild(a);
+ a.click();
+ document.body.removeChild(a);
+ URL.revokeObjectURL(url);
+ }
+}
+
+export const logger = new LoggerService();