This commit is contained in:
从何开始123
2026-01-12 18:03:31 +08:00
parent bd297716b0
commit 25dffcc02e
10 changed files with 290 additions and 27 deletions

BIN
.DS_Store vendored

Binary file not shown.

View File

@@ -6,6 +6,7 @@ import ApiSection from './components/settings/ApiSection';
import ModelSection from './components/settings/ModelSection'; import ModelSection from './components/settings/ModelSection';
import ThinkingSection from './components/settings/ThinkingSection'; import ThinkingSection from './components/settings/ThinkingSection';
import GithubSection from './components/settings/GithubSection'; import GithubSection from './components/settings/GithubSection';
import LogSection from './components/settings/LogSection';
interface SettingsModalProps { interface SettingsModalProps {
isOpen: boolean; isOpen: boolean;
@@ -49,6 +50,8 @@ const SettingsModal = ({
setConfig={setConfig} setConfig={setConfig}
model={model} model={model}
/> />
<LogSection />
<GithubSection isOpen={isOpen} /> <GithubSection isOpen={isOpen} />
</div> </div>

View File

@@ -1,6 +1,8 @@
import { GoogleGenAI } from "@google/genai"; import { GoogleGenAI } from "@google/genai";
import OpenAI from "openai"; import OpenAI from "openai";
import { ApiProvider, CustomModel } from './types'; import { ApiProvider, CustomModel } from './types';
import { logger } from './services/logger';
type AIProviderConfig = { type AIProviderConfig = {
provider?: ApiProvider; provider?: ApiProvider;
@@ -46,7 +48,8 @@ if (typeof window !== 'undefined' && originalFetch) {
if (urlString.includes('/custom-api') && currentCustomApiUrl) { if (urlString.includes('/custom-api') && currentCustomApiUrl) {
const headers = new Headers(init?.headers); const headers = new Headers(init?.headers);
headers.set('X-Target-URL', currentCustomApiUrl); headers.set('X-Target-URL', currentCustomApiUrl);
console.log('[Fetch] Adding X-Target-URL header:', currentCustomApiUrl);
logger.debug('API', 'Using Custom Proxy', { target: currentCustomApiUrl, path: urlString });
return originalFetch(input, { return originalFetch(input, {
...init, ...init,
@@ -59,6 +62,7 @@ if (typeof window !== 'undefined' && originalFetch) {
try { try {
window.fetch = proxyFetch; window.fetch = proxyFetch;
logger.info('System', 'Fetch proxy interceptor installed');
} catch (e) { } catch (e) {
try { try {
Object.defineProperty(window, 'fetch', { Object.defineProperty(window, 'fetch', {
@@ -69,6 +73,7 @@ if (typeof window !== 'undefined' && originalFetch) {
}); });
} catch (e2) { } catch (e2) {
console.error('[API] Failed to intercept fetch:', e2); console.error('[API] Failed to intercept fetch:', e2);
logger.error('System', 'Failed to intercept fetch', e2);
} }
} }
} }
@@ -90,10 +95,6 @@ export const getAI = (config?: AIProviderConfig) => {
currentCustomApiUrl = config.baseUrl; currentCustomApiUrl = config.baseUrl;
// Use proxy path // Use proxy path
options.baseURL = `${window.location.origin}/custom-api`; options.baseURL = `${window.location.origin}/custom-api`;
console.log('[API] Using custom API proxy:', {
proxyPath: options.baseURL,
targetUrl: currentCustomApiUrl,
});
} else { } else {
// In production, use the URL directly // In production, use the URL directly
options.baseURL = config.baseUrl; options.baseURL = config.baseUrl;
@@ -110,12 +111,12 @@ export const getAI = (config?: AIProviderConfig) => {
} }
} }
console.log('[API] OpenAI client config:', { logger.info('API', 'Initializing OpenAI Client', {
provider, provider,
baseURL: options.baseURL, baseURL: options.baseURL,
hasApiKey: !!options.apiKey, isCustom: provider === 'custom'
customTarget: currentCustomApiUrl,
}); });
return new OpenAI(options); return new OpenAI(options);
} else { } else {
const options: any = { const options: any = {
@@ -126,6 +127,7 @@ export const getAI = (config?: AIProviderConfig) => {
options.baseUrl = config.baseUrl; options.baseUrl = config.baseUrl;
} }
logger.info('API', 'Initializing Google GenAI Client');
return new GoogleGenAI(options); return new GoogleGenAI(options);
} }
}; };
@@ -150,4 +152,4 @@ export const getAIProvider = (model: string): ApiProvider => {
return 'custom'; return 'custom';
} }
return 'google'; return 'google';
}; };

View File

@@ -0,0 +1,78 @@
import React, { useState, useEffect } from 'react';
import { FileText, Download, Trash2, Activity } from 'lucide-react';
import { logger } from '../../services/logger';
const LogSection = () => {
const [logCount, setLogCount] = useState(0);
useEffect(() => {
// Initial count
setLogCount(logger.getLogs().length);
// Simple poller to update count while settings are open
const interval = setInterval(() => {
setLogCount(logger.getLogs().length);
}, 1000);
return () => clearInterval(interval);
}, []);
const handleDownload = () => {
logger.download();
};
const handleClear = () => {
if (confirm('Are you sure you want to clear all execution logs?')) {
logger.clear();
setLogCount(0);
}
};
return (
<div className="border-t border-slate-100 pt-4 space-y-4">
<div className="flex items-center justify-between">
<h3 className="text-xs font-bold text-slate-400 uppercase tracking-wider">System Logs</h3>
</div>
<div className="p-4 bg-slate-50 border border-slate-200 rounded-lg">
<div className="flex items-start gap-3 mb-4">
<div className="p-2 bg-slate-200 rounded-lg text-slate-600">
<Activity size={18} />
</div>
<div>
<h4 className="text-sm font-medium text-slate-800">Debug & Execution Logs</h4>
<p className="text-xs text-slate-500 mt-1">
Record of reasoning processes, API calls, and errors. Useful for debugging specific issues.
</p>
<p className="text-xs font-mono text-slate-400 mt-2">
Current entries: <span className="text-slate-700 font-bold">{logCount}</span>
</p>
</div>
</div>
<div className="flex gap-2">
<button
onClick={handleDownload}
disabled={logCount === 0}
className="flex-1 flex items-center justify-center gap-2 px-3 py-2 bg-white border border-slate-200 hover:border-slate-300 text-slate-700 text-xs font-medium rounded-md shadow-sm transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
>
<Download size={14} />
Download .txt
</button>
<button
onClick={handleClear}
disabled={logCount === 0}
className="flex items-center justify-center gap-2 px-3 py-2 bg-white border border-slate-200 hover:border-red-200 hover:bg-red-50 text-slate-700 hover:text-red-600 text-xs font-medium rounded-md shadow-sm transition-colors disabled:opacity-50 disabled:cursor-not-allowed"
>
<Trash2 size={14} />
Clear
</button>
</div>
</div>
</div>
);
};
export default LogSection;

View File

@@ -5,6 +5,7 @@ import { STORAGE_KEYS, DEFAULT_CONFIG, getValidThinkingLevels } from '../config'
import { useDeepThink } from './useDeepThink'; import { useDeepThink } from './useDeepThink';
import { useChatSessions } from './useChatSessions'; import { useChatSessions } from './useChatSessions';
import { setInterceptorUrl } from '../interceptor'; import { setInterceptorUrl } from '../interceptor';
import { logger } from '../services/logger';
export const useAppLogic = () => { export const useAppLogic = () => {
// Session Management // Session Management
@@ -72,10 +73,12 @@ export const useAppLogic = () => {
// Persistence Effects // Persistence Effects
useEffect(() => { useEffect(() => {
localStorage.setItem(STORAGE_KEYS.SETTINGS, JSON.stringify(config)); localStorage.setItem(STORAGE_KEYS.SETTINGS, JSON.stringify(config));
logger.info('System', 'Settings updated', config);
}, [config]); }, [config]);
useEffect(() => { useEffect(() => {
localStorage.setItem(STORAGE_KEYS.MODEL, selectedModel); localStorage.setItem(STORAGE_KEYS.MODEL, selectedModel);
logger.info('User', 'Model changed', { model: selectedModel });
}, [selectedModel]); }, [selectedModel]);
useEffect(() => { useEffect(() => {
@@ -120,6 +123,7 @@ export const useAppLogic = () => {
if (session) { if (session) {
setMessages(session.messages); setMessages(session.messages);
setSelectedModel(session.model || 'gemini-3-flash-preview'); setSelectedModel(session.model || 'gemini-3-flash-preview');
logger.debug('User', 'Session switched', { id: currentSessionId, title: session.title });
} }
} else { } else {
setMessages([]); setMessages([]);
@@ -129,6 +133,9 @@ export const useAppLogic = () => {
// Handle AI Completion // Handle AI Completion
useEffect(() => { useEffect(() => {
if (appState === 'completed') { if (appState === 'completed') {
const duration = (processStartTime && processEndTime) ? (processEndTime - processStartTime) : undefined;
logger.info('System', 'Request processing completed', { duration });
const finalizedMessage: ChatMessage = { const finalizedMessage: ChatMessage = {
id: `ai-${Date.now()}`, id: `ai-${Date.now()}`,
role: 'model', role: 'model',
@@ -137,7 +144,7 @@ export const useAppLogic = () => {
experts: experts, experts: experts,
synthesisThoughts: synthesisThoughts, synthesisThoughts: synthesisThoughts,
isThinking: false, isThinking: false,
totalDuration: (processStartTime && processEndTime) ? (processEndTime - processStartTime) : undefined totalDuration: duration
}; };
const newMessages = [...messages, finalizedMessage]; const newMessages = [...messages, finalizedMessage];
@@ -158,6 +165,8 @@ export const useAppLogic = () => {
const handleRun = useCallback((attachments: MessageAttachment[] = []) => { const handleRun = useCallback((attachments: MessageAttachment[] = []) => {
if (!query.trim() && attachments.length === 0) return; if (!query.trim() && attachments.length === 0) return;
logger.info('User', 'New Request', { query, hasAttachments: attachments.length > 0 });
const userMsg: ChatMessage = { const userMsg: ChatMessage = {
id: `user-${Date.now()}`, id: `user-${Date.now()}`,
role: 'user', role: 'user',
@@ -180,6 +189,7 @@ export const useAppLogic = () => {
}, [query, messages, currentSessionId, selectedModel, config, createSession, updateSessionMessages, runDynamicDeepThink]); }, [query, messages, currentSessionId, selectedModel, config, createSession, updateSessionMessages, runDynamicDeepThink]);
const handleNewChat = useCallback(() => { const handleNewChat = useCallback(() => {
logger.info('User', 'New Chat initiated');
stopDeepThink(); stopDeepThink();
setCurrentSessionId(null); setCurrentSessionId(null);
setMessages([]); setMessages([]);
@@ -199,6 +209,7 @@ export const useAppLogic = () => {
const handleDeleteSession = useCallback((id: string, e: React.MouseEvent) => { const handleDeleteSession = useCallback((id: string, e: React.MouseEvent) => {
e.stopPropagation(); e.stopPropagation();
logger.info('User', 'Session deleted', { id });
deleteSession(id); deleteSession(id);
if (currentSessionId === id) { if (currentSessionId === id) {
handleNewChat(); handleNewChat();

View File

@@ -8,6 +8,7 @@ import { executeManagerAnalysis, executeManagerReview } from '../services/deepTh
import { streamExpertResponse } from '../services/deepThink/expert'; import { streamExpertResponse } from '../services/deepThink/expert';
import { streamSynthesisResponse } from '../services/deepThink/synthesis'; import { streamSynthesisResponse } from '../services/deepThink/synthesis';
import { useDeepThinkState } from './useDeepThinkState'; import { useDeepThinkState } from './useDeepThinkState';
import { logger } from '../services/logger';
export const useDeepThink = () => { export const useDeepThink = () => {
const { const {
@@ -41,6 +42,7 @@ export const useDeepThink = () => {
): Promise<ExpertResult> => { ): Promise<ExpertResult> => {
if (signal.aborted) return expert; if (signal.aborted) return expert;
logger.info('Expert', `Starting expert: ${expert.role}`, { id: expert.id, round: expert.round });
const startTime = Date.now(); const startTime = Date.now();
updateExpertAt(globalIndex, { status: 'thinking', startTime }); updateExpertAt(globalIndex, { status: 'thinking', startTime });
@@ -63,13 +65,18 @@ export const useDeepThink = () => {
} }
); );
if (signal.aborted) return expertsDataRef.current[globalIndex]; if (signal.aborted) {
logger.warn('Expert', `Expert aborted: ${expert.role}`);
return expertsDataRef.current[globalIndex];
}
logger.info('Expert', `Expert completed: ${expert.role}`);
updateExpertAt(globalIndex, { status: 'completed', endTime: Date.now() }); updateExpertAt(globalIndex, { status: 'completed', endTime: Date.now() });
return expertsDataRef.current[globalIndex]; return expertsDataRef.current[globalIndex];
} catch (error) { } catch (error) {
console.error(`Expert ${expert.role} error:`, error); console.error(`Expert ${expert.role} error:`, error);
logger.error('Expert', `Expert failed: ${expert.role}`, error);
if (!signal.aborted) { if (!signal.aborted) {
updateExpertAt(globalIndex, { status: 'error', content: "Failed to generate response.", endTime: Date.now() }); updateExpertAt(globalIndex, { status: 'error', content: "Failed to generate response.", endTime: Date.now() });
} }
@@ -92,6 +99,8 @@ export const useDeepThink = () => {
abortControllerRef.current = new AbortController(); abortControllerRef.current = new AbortController();
const signal = abortControllerRef.current.signal; const signal = abortControllerRef.current.signal;
logger.info('System', 'Starting DeepThink Process', { model, provider: getAIProvider(model) });
// Reset UI state // Reset UI state
setAppState('analyzing'); setAppState('analyzing');
setManagerAnalysis(null); setManagerAnalysis(null);
@@ -120,6 +129,7 @@ export const useDeepThink = () => {
).join('\n'); ).join('\n');
// --- Phase 1: Planning & Initial Experts --- // --- Phase 1: Planning & Initial Experts ---
logger.debug('Manager', 'Phase 1: Planning started');
const managerTask = executeManagerAnalysis( const managerTask = executeManagerAnalysis(
ai, ai,
@@ -151,6 +161,7 @@ export const useDeepThink = () => {
const analysisJson = await managerTask; const analysisJson = await managerTask;
if (signal.aborted) return; if (signal.aborted) return;
setManagerAnalysis(analysisJson); setManagerAnalysis(analysisJson);
logger.info('Manager', 'Plan generated', analysisJson);
const round1Experts: ExpertResult[] = analysisJson.experts.map((exp, idx) => ({ const round1Experts: ExpertResult[] = analysisJson.experts.map((exp, idx) => ({
...exp, ...exp,
@@ -181,6 +192,7 @@ export const useDeepThink = () => {
while (loopActive && roundCounter < MAX_ROUNDS) { while (loopActive && roundCounter < MAX_ROUNDS) {
if (signal.aborted) return; if (signal.aborted) return;
logger.info('Manager', `Phase 2: Reviewing Round ${roundCounter}`);
setAppState('reviewing'); setAppState('reviewing');
const reviewResult = await executeManagerReview( const reviewResult = await executeManagerReview(
@@ -189,6 +201,9 @@ export const useDeepThink = () => {
); );
if (signal.aborted) return; if (signal.aborted) return;
logger.info('Manager', `Review Result: ${reviewResult.satisfied ? 'Satisfied' : 'Not Satisfied'}`, reviewResult);
if (reviewResult.satisfied) { if (reviewResult.satisfied) {
loopActive = false; loopActive = false;
} else { } else {
@@ -198,6 +213,7 @@ export const useDeepThink = () => {
})); }));
if (nextRoundExperts.length === 0) { if (nextRoundExperts.length === 0) {
logger.warn('Manager', 'Not satisfied but no new experts proposed. Breaking loop.');
loopActive = false; loopActive = false;
break; break;
} }
@@ -219,6 +235,7 @@ export const useDeepThink = () => {
// --- Phase 3: Synthesis --- // --- Phase 3: Synthesis ---
setAppState('synthesizing'); setAppState('synthesizing');
logger.info('Synthesis', 'Phase 3: Synthesis started');
let fullFinalText = ''; let fullFinalText = '';
let fullFinalThoughts = ''; let fullFinalThoughts = '';
@@ -236,6 +253,7 @@ export const useDeepThink = () => {
); );
if (!signal.aborted) { if (!signal.aborted) {
logger.info('Synthesis', 'Response generation completed');
setAppState('completed'); setAppState('completed');
setProcessEndTime(Date.now()); setProcessEndTime(Date.now());
} }
@@ -243,8 +261,11 @@ export const useDeepThink = () => {
} catch (e: any) { } catch (e: any) {
if (!signal.aborted) { if (!signal.aborted) {
console.error(e); console.error(e);
logger.error('System', 'DeepThink Process Error', e);
setAppState('idle'); setAppState('idle');
setProcessEndTime(Date.now()); setProcessEndTime(Date.now());
} else {
logger.warn('System', 'Process aborted by user');
} }
} finally { } finally {
abortControllerRef.current = null; abortControllerRef.current = null;

View File

@@ -3,6 +3,7 @@ import { ModelOption, ExpertResult, MessageAttachment } from '../../types';
import { getExpertSystemInstruction } from './prompts'; import { getExpertSystemInstruction } from './prompts';
import { withRetry } from '../utils/retry'; import { withRetry } from '../utils/retry';
import { generateContentStream as generateOpenAIStream } from './openaiClient'; import { generateContentStream as generateOpenAIStream } from './openaiClient';
import { logger } from '../logger';
const isGoogleProvider = (ai: any): boolean => { const isGoogleProvider = (ai: any): boolean => {
return ai?.models?.generateContentStream !== undefined; return ai?.models?.generateContentStream !== undefined;
@@ -44,7 +45,8 @@ export const streamExpertResponse = async (
systemInstruction: getExpertSystemInstruction(expert.role, expert.description, context), systemInstruction: getExpertSystemInstruction(expert.role, expert.description, context),
temperature: expert.temperature, temperature: expert.temperature,
thinkingConfig: { thinkingConfig: {
thinkingBudget: budget thinkingBudget: budget,
includeThoughts: true
} }
} }
})); }));
@@ -53,11 +55,22 @@ export const streamExpertResponse = async (
for await (const chunk of (streamResult as any)) { for await (const chunk of (streamResult as any)) {
if (signal.aborted) break; if (signal.aborted) break;
const chunkText = chunk.text || ""; let chunkText = "";
onChunk(chunkText, ""); let chunkThought = "";
if (chunk.candidates?.[0]?.content?.parts) {
for (const part of chunk.candidates[0].content.parts) {
if (part.thought) {
chunkThought += (part.text || "");
} else if (part.text) {
chunkText += part.text;
}
}
onChunk(chunkText, chunkThought);
}
} }
} catch (streamError) { } catch (streamError) {
console.error(`Stream interrupted for expert ${expert.role}:`, streamError); logger.error("Expert", `Stream interrupted for expert ${expert.role}`, streamError);
throw streamError; throw streamError;
} }
} else { } else {
@@ -95,7 +108,7 @@ export const streamExpertResponse = async (
onChunk(chunk.text, chunk.thought || ''); onChunk(chunk.text, chunk.thought || '');
} }
} catch (streamError) { } catch (streamError) {
console.error(`Stream interrupted for expert ${expert.role}:`, streamError); logger.error("Expert", `Stream interrupted for expert ${expert.role} (OpenAI)`, streamError);
throw streamError; throw streamError;
} }
} }

View File

@@ -5,6 +5,7 @@ import { cleanJsonString } from '../../utils';
import { MANAGER_SYSTEM_PROMPT, MANAGER_REVIEW_SYSTEM_PROMPT } from './prompts'; import { MANAGER_SYSTEM_PROMPT, MANAGER_REVIEW_SYSTEM_PROMPT } from './prompts';
import { withRetry } from '../utils/retry'; import { withRetry } from '../utils/retry';
import { generateContent as generateOpenAIContent } from './openaiClient'; import { generateContent as generateOpenAIContent } from './openaiClient';
import { logger } from '../logger';
const isGoogleProvider = (ai: any): boolean => { const isGoogleProvider = (ai: any): boolean => {
return ai?.models?.generateContent !== undefined; return ai?.models?.generateContent !== undefined;
@@ -68,6 +69,7 @@ export const executeManagerAnalysis = async (
responseMimeType: "application/json", responseMimeType: "application/json",
responseSchema: managerSchema, responseSchema: managerSchema,
thinkingConfig: { thinkingConfig: {
includeThoughts: true,
thinkingBudget: budget thinkingBudget: budget
} }
} }
@@ -82,7 +84,7 @@ export const executeManagerAnalysis = async (
} }
return analysisJson; return analysisJson;
} catch (e) { } catch (e) {
console.error("Manager Analysis Error:", e); logger.error("Manager", "Analysis generation failed", e);
return { return {
thought_process: "Direct processing fallback due to analysis error.", thought_process: "Direct processing fallback due to analysis error.",
experts: [] experts: []
@@ -135,7 +137,7 @@ export const executeManagerAnalysis = async (
} }
return analysisJson; return analysisJson;
} catch (e) { } catch (e) {
console.error("Manager Analysis Error:", e); logger.error("Manager", "Analysis generation failed (OpenAI)", e);
return { return {
thought_process: "Direct processing fallback due to analysis error.", thought_process: "Direct processing fallback due to analysis error.",
experts: [] experts: []
@@ -192,6 +194,7 @@ export const executeManagerReview = async (
responseMimeType: "application/json", responseMimeType: "application/json",
responseSchema: reviewSchema, responseSchema: reviewSchema,
thinkingConfig: { thinkingConfig: {
includeThoughts: true,
thinkingBudget: budget thinkingBudget: budget
} }
} }
@@ -201,7 +204,7 @@ export const executeManagerReview = async (
const cleanText = cleanJsonString(rawText); const cleanText = cleanJsonString(rawText);
return JSON.parse(cleanText) as ReviewResult; return JSON.parse(cleanText) as ReviewResult;
} catch (e) { } catch (e) {
console.error("Review Error:", e); logger.error("Manager", "Review generation failed", e);
return { satisfied: true, critique: "Processing Error, proceeding to synthesis." }; return { satisfied: true, critique: "Processing Error, proceeding to synthesis." };
} }
} else { } else {
@@ -220,7 +223,7 @@ export const executeManagerReview = async (
return JSON.parse(response.text) as ReviewResult; return JSON.parse(response.text) as ReviewResult;
} catch (e) { } catch (e) {
console.error("Review Error:", e); logger.error("Manager", "Review generation failed (OpenAI)", e);
return { satisfied: true, critique: "Processing Error, proceeding to synthesis." }; return { satisfied: true, critique: "Processing Error, proceeding to synthesis." };
} }
} }

View File

@@ -3,6 +3,7 @@ import { ModelOption, ExpertResult, MessageAttachment } from '../../types';
import { getSynthesisPrompt } from './prompts'; import { getSynthesisPrompt } from './prompts';
import { withRetry } from '../utils/retry'; import { withRetry } from '../utils/retry';
import { generateContentStream as generateOpenAIStream } from './openaiClient'; import { generateContentStream as generateOpenAIStream } from './openaiClient';
import { logger } from '../logger';
const isGoogleProvider = (ai: any): boolean => { const isGoogleProvider = (ai: any): boolean => {
return ai?.models?.generateContentStream !== undefined; return ai?.models?.generateContentStream !== undefined;
@@ -44,7 +45,8 @@ export const streamSynthesisResponse = async (
contents: contents, contents: contents,
config: { config: {
thinkingConfig: { thinkingConfig: {
thinkingBudget: budget thinkingBudget: budget,
includeThoughts: true
} }
} }
})); }));
@@ -52,12 +54,23 @@ export const streamSynthesisResponse = async (
try { try {
for await (const chunk of (synthesisStream as any)) { for await (const chunk of (synthesisStream as any)) {
if (signal.aborted) break; if (signal.aborted) break;
const chunkText = chunk.text || ""; let chunkText = "";
onChunk(chunkText, ""); let chunkThought = "";
if (chunk.candidates?.[0]?.content?.parts) {
for (const part of chunk.candidates[0].content.parts) {
if (part.thought) {
chunkThought += (part.text || "");
} else if (part.text) {
chunkText += part.text;
}
}
onChunk(chunkText, chunkThought);
}
} }
} catch (streamError) { } catch (streamError) {
console.error("Synthesis stream interrupted:", streamError); logger.error("Synthesis", "Stream interrupted", streamError);
throw streamError; throw streamError;
} }
} else { } else {
@@ -95,7 +108,7 @@ export const streamSynthesisResponse = async (
onChunk(chunk.text, chunk.thought || ''); onChunk(chunk.text, chunk.thought || '');
} }
} catch (streamError) { } catch (streamError) {
console.error("Synthesis stream interrupted:", streamError); logger.error("Synthesis", "Stream interrupted (OpenAI)", streamError);
throw streamError; throw streamError;
} }
} }

119
prisma/services/logger.ts Normal file
View File

@@ -0,0 +1,119 @@
export type LogLevel = 'info' | 'warn' | 'error' | 'debug';
export type LogCategory = 'System' | 'User' | 'API' | 'Manager' | 'Expert' | 'Synthesis';
export interface LogEntry {
timestamp: string;
level: LogLevel;
category: LogCategory;
message: string;
data?: any;
}
class LoggerService {
private logs: LogEntry[] = [];
private maxLogs: number = 5000;
constructor() {
// Attempt to restore logs from sessionStorage on load (optional persistence)
try {
const saved = sessionStorage.getItem('prisma_logs');
if (saved) {
this.logs = JSON.parse(saved);
}
} catch (e) {
console.warn('Failed to restore logs');
}
this.info('System', 'Logger service initialized');
}
private persist() {
try {
sessionStorage.setItem('prisma_logs', JSON.stringify(this.logs.slice(-500))); // Persist last 500 only
} catch (e) {
// Ignore quota errors
}
}
add(level: LogLevel, category: LogCategory, message: string, data?: any) {
const entry: LogEntry = {
timestamp: new Date().toISOString(),
level,
category,
message,
data: data ? JSON.parse(JSON.stringify(data, this.replacer)) : undefined
};
this.logs.push(entry);
// Trim if too large
if (this.logs.length > this.maxLogs) {
this.logs = this.logs.slice(this.logs.length - this.maxLogs);
}
// Mirror to console for dev
if (import.meta.env.DEV) {
const style = level === 'error' ? 'color: red' : level === 'warn' ? 'color: orange' : 'color: cyan';
console.log(`%c[${category}] ${message}`, style, data || '');
}
this.persist();
}
// Circular reference replacer for JSON
private replacer(key: string, value: any) {
if (key === 'apiKey') return '***REDACTED***';
if (key === 'auth') return '***REDACTED***';
return value;
}
info(category: LogCategory, message: string, data?: any) {
this.add('info', category, message, data);
}
warn(category: LogCategory, message: string, data?: any) {
this.add('warn', category, message, data);
}
error(category: LogCategory, message: string, data?: any) {
this.add('error', category, message, data);
}
debug(category: LogCategory, message: string, data?: any) {
this.add('debug', category, message, data);
}
getLogs() {
return this.logs;
}
clear() {
this.logs = [];
this.persist();
this.info('System', 'Logs cleared by user');
}
download() {
const textContent = this.logs.map(entry => {
const date = new Date(entry.timestamp).toLocaleTimeString();
let line = `[${date}] [${entry.level.toUpperCase()}] [${entry.category}]: ${entry.message}`;
if (entry.data) {
line += `\n Data: ${JSON.stringify(entry.data, null, 2)}`;
}
return line;
}).join('\n----------------------------------------\n');
const blob = new Blob([textContent], { type: 'text/plain' });
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `prisma-debug-log-${new Date().toISOString().slice(0, 19).replace(/:/g, '-')}.txt`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
URL.revokeObjectURL(url);
}
}
export const logger = new LoggerService();