log
This commit is contained in:
@@ -3,6 +3,7 @@ import { ModelOption, ExpertResult, MessageAttachment } from '../../types';
|
||||
import { getExpertSystemInstruction } from './prompts';
|
||||
import { withRetry } from '../utils/retry';
|
||||
import { generateContentStream as generateOpenAIStream } from './openaiClient';
|
||||
import { logger } from '../logger';
|
||||
|
||||
const isGoogleProvider = (ai: any): boolean => {
|
||||
return ai?.models?.generateContentStream !== undefined;
|
||||
@@ -44,7 +45,8 @@ export const streamExpertResponse = async (
|
||||
systemInstruction: getExpertSystemInstruction(expert.role, expert.description, context),
|
||||
temperature: expert.temperature,
|
||||
thinkingConfig: {
|
||||
thinkingBudget: budget
|
||||
thinkingBudget: budget,
|
||||
includeThoughts: true
|
||||
}
|
||||
}
|
||||
}));
|
||||
@@ -53,11 +55,22 @@ export const streamExpertResponse = async (
|
||||
for await (const chunk of (streamResult as any)) {
|
||||
if (signal.aborted) break;
|
||||
|
||||
const chunkText = chunk.text || "";
|
||||
onChunk(chunkText, "");
|
||||
let chunkText = "";
|
||||
let chunkThought = "";
|
||||
|
||||
if (chunk.candidates?.[0]?.content?.parts) {
|
||||
for (const part of chunk.candidates[0].content.parts) {
|
||||
if (part.thought) {
|
||||
chunkThought += (part.text || "");
|
||||
} else if (part.text) {
|
||||
chunkText += part.text;
|
||||
}
|
||||
}
|
||||
onChunk(chunkText, chunkThought);
|
||||
}
|
||||
}
|
||||
} catch (streamError) {
|
||||
console.error(`Stream interrupted for expert ${expert.role}:`, streamError);
|
||||
logger.error("Expert", `Stream interrupted for expert ${expert.role}`, streamError);
|
||||
throw streamError;
|
||||
}
|
||||
} else {
|
||||
@@ -95,7 +108,7 @@ export const streamExpertResponse = async (
|
||||
onChunk(chunk.text, chunk.thought || '');
|
||||
}
|
||||
} catch (streamError) {
|
||||
console.error(`Stream interrupted for expert ${expert.role}:`, streamError);
|
||||
logger.error("Expert", `Stream interrupted for expert ${expert.role} (OpenAI)`, streamError);
|
||||
throw streamError;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import { cleanJsonString } from '../../utils';
|
||||
import { MANAGER_SYSTEM_PROMPT, MANAGER_REVIEW_SYSTEM_PROMPT } from './prompts';
|
||||
import { withRetry } from '../utils/retry';
|
||||
import { generateContent as generateOpenAIContent } from './openaiClient';
|
||||
import { logger } from '../logger';
|
||||
|
||||
const isGoogleProvider = (ai: any): boolean => {
|
||||
return ai?.models?.generateContent !== undefined;
|
||||
@@ -68,6 +69,7 @@ export const executeManagerAnalysis = async (
|
||||
responseMimeType: "application/json",
|
||||
responseSchema: managerSchema,
|
||||
thinkingConfig: {
|
||||
includeThoughts: true,
|
||||
thinkingBudget: budget
|
||||
}
|
||||
}
|
||||
@@ -82,7 +84,7 @@ export const executeManagerAnalysis = async (
|
||||
}
|
||||
return analysisJson;
|
||||
} catch (e) {
|
||||
console.error("Manager Analysis Error:", e);
|
||||
logger.error("Manager", "Analysis generation failed", e);
|
||||
return {
|
||||
thought_process: "Direct processing fallback due to analysis error.",
|
||||
experts: []
|
||||
@@ -135,7 +137,7 @@ export const executeManagerAnalysis = async (
|
||||
}
|
||||
return analysisJson;
|
||||
} catch (e) {
|
||||
console.error("Manager Analysis Error:", e);
|
||||
logger.error("Manager", "Analysis generation failed (OpenAI)", e);
|
||||
return {
|
||||
thought_process: "Direct processing fallback due to analysis error.",
|
||||
experts: []
|
||||
@@ -192,6 +194,7 @@ export const executeManagerReview = async (
|
||||
responseMimeType: "application/json",
|
||||
responseSchema: reviewSchema,
|
||||
thinkingConfig: {
|
||||
includeThoughts: true,
|
||||
thinkingBudget: budget
|
||||
}
|
||||
}
|
||||
@@ -201,7 +204,7 @@ export const executeManagerReview = async (
|
||||
const cleanText = cleanJsonString(rawText);
|
||||
return JSON.parse(cleanText) as ReviewResult;
|
||||
} catch (e) {
|
||||
console.error("Review Error:", e);
|
||||
logger.error("Manager", "Review generation failed", e);
|
||||
return { satisfied: true, critique: "Processing Error, proceeding to synthesis." };
|
||||
}
|
||||
} else {
|
||||
@@ -220,7 +223,7 @@ export const executeManagerReview = async (
|
||||
|
||||
return JSON.parse(response.text) as ReviewResult;
|
||||
} catch (e) {
|
||||
console.error("Review Error:", e);
|
||||
logger.error("Manager", "Review generation failed (OpenAI)", e);
|
||||
return { satisfied: true, critique: "Processing Error, proceeding to synthesis." };
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { ModelOption, ExpertResult, MessageAttachment } from '../../types';
|
||||
import { getSynthesisPrompt } from './prompts';
|
||||
import { withRetry } from '../utils/retry';
|
||||
import { generateContentStream as generateOpenAIStream } from './openaiClient';
|
||||
import { logger } from '../logger';
|
||||
|
||||
const isGoogleProvider = (ai: any): boolean => {
|
||||
return ai?.models?.generateContentStream !== undefined;
|
||||
@@ -44,7 +45,8 @@ export const streamSynthesisResponse = async (
|
||||
contents: contents,
|
||||
config: {
|
||||
thinkingConfig: {
|
||||
thinkingBudget: budget
|
||||
thinkingBudget: budget,
|
||||
includeThoughts: true
|
||||
}
|
||||
}
|
||||
}));
|
||||
@@ -52,12 +54,23 @@ export const streamSynthesisResponse = async (
|
||||
try {
|
||||
for await (const chunk of (synthesisStream as any)) {
|
||||
if (signal.aborted) break;
|
||||
|
||||
const chunkText = chunk.text || "";
|
||||
onChunk(chunkText, "");
|
||||
|
||||
let chunkText = "";
|
||||
let chunkThought = "";
|
||||
|
||||
if (chunk.candidates?.[0]?.content?.parts) {
|
||||
for (const part of chunk.candidates[0].content.parts) {
|
||||
if (part.thought) {
|
||||
chunkThought += (part.text || "");
|
||||
} else if (part.text) {
|
||||
chunkText += part.text;
|
||||
}
|
||||
}
|
||||
onChunk(chunkText, chunkThought);
|
||||
}
|
||||
}
|
||||
} catch (streamError) {
|
||||
console.error("Synthesis stream interrupted:", streamError);
|
||||
logger.error("Synthesis", "Stream interrupted", streamError);
|
||||
throw streamError;
|
||||
}
|
||||
} else {
|
||||
@@ -95,7 +108,7 @@ export const streamSynthesisResponse = async (
|
||||
onChunk(chunk.text, chunk.thought || '');
|
||||
}
|
||||
} catch (streamError) {
|
||||
console.error("Synthesis stream interrupted:", streamError);
|
||||
logger.error("Synthesis", "Stream interrupted (OpenAI)", streamError);
|
||||
throw streamError;
|
||||
}
|
||||
}
|
||||
|
||||
119
prisma/services/logger.ts
Normal file
119
prisma/services/logger.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
|
||||
export type LogLevel = 'info' | 'warn' | 'error' | 'debug';
|
||||
export type LogCategory = 'System' | 'User' | 'API' | 'Manager' | 'Expert' | 'Synthesis';
|
||||
|
||||
export interface LogEntry {
|
||||
timestamp: string;
|
||||
level: LogLevel;
|
||||
category: LogCategory;
|
||||
message: string;
|
||||
data?: any;
|
||||
}
|
||||
|
||||
class LoggerService {
|
||||
private logs: LogEntry[] = [];
|
||||
private maxLogs: number = 5000;
|
||||
|
||||
constructor() {
|
||||
// Attempt to restore logs from sessionStorage on load (optional persistence)
|
||||
try {
|
||||
const saved = sessionStorage.getItem('prisma_logs');
|
||||
if (saved) {
|
||||
this.logs = JSON.parse(saved);
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('Failed to restore logs');
|
||||
}
|
||||
|
||||
this.info('System', 'Logger service initialized');
|
||||
}
|
||||
|
||||
private persist() {
|
||||
try {
|
||||
sessionStorage.setItem('prisma_logs', JSON.stringify(this.logs.slice(-500))); // Persist last 500 only
|
||||
} catch (e) {
|
||||
// Ignore quota errors
|
||||
}
|
||||
}
|
||||
|
||||
add(level: LogLevel, category: LogCategory, message: string, data?: any) {
|
||||
const entry: LogEntry = {
|
||||
timestamp: new Date().toISOString(),
|
||||
level,
|
||||
category,
|
||||
message,
|
||||
data: data ? JSON.parse(JSON.stringify(data, this.replacer)) : undefined
|
||||
};
|
||||
|
||||
this.logs.push(entry);
|
||||
|
||||
// Trim if too large
|
||||
if (this.logs.length > this.maxLogs) {
|
||||
this.logs = this.logs.slice(this.logs.length - this.maxLogs);
|
||||
}
|
||||
|
||||
// Mirror to console for dev
|
||||
if (import.meta.env.DEV) {
|
||||
const style = level === 'error' ? 'color: red' : level === 'warn' ? 'color: orange' : 'color: cyan';
|
||||
console.log(`%c[${category}] ${message}`, style, data || '');
|
||||
}
|
||||
|
||||
this.persist();
|
||||
}
|
||||
|
||||
// Circular reference replacer for JSON
|
||||
private replacer(key: string, value: any) {
|
||||
if (key === 'apiKey') return '***REDACTED***';
|
||||
if (key === 'auth') return '***REDACTED***';
|
||||
return value;
|
||||
}
|
||||
|
||||
info(category: LogCategory, message: string, data?: any) {
|
||||
this.add('info', category, message, data);
|
||||
}
|
||||
|
||||
warn(category: LogCategory, message: string, data?: any) {
|
||||
this.add('warn', category, message, data);
|
||||
}
|
||||
|
||||
error(category: LogCategory, message: string, data?: any) {
|
||||
this.add('error', category, message, data);
|
||||
}
|
||||
|
||||
debug(category: LogCategory, message: string, data?: any) {
|
||||
this.add('debug', category, message, data);
|
||||
}
|
||||
|
||||
getLogs() {
|
||||
return this.logs;
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.logs = [];
|
||||
this.persist();
|
||||
this.info('System', 'Logs cleared by user');
|
||||
}
|
||||
|
||||
download() {
|
||||
const textContent = this.logs.map(entry => {
|
||||
const date = new Date(entry.timestamp).toLocaleTimeString();
|
||||
let line = `[${date}] [${entry.level.toUpperCase()}] [${entry.category}]: ${entry.message}`;
|
||||
if (entry.data) {
|
||||
line += `\n Data: ${JSON.stringify(entry.data, null, 2)}`;
|
||||
}
|
||||
return line;
|
||||
}).join('\n----------------------------------------\n');
|
||||
|
||||
const blob = new Blob([textContent], { type: 'text/plain' });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = `prisma-debug-log-${new Date().toISOString().slice(0, 19).replace(/:/g, '-')}.txt`;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
}
|
||||
|
||||
export const logger = new LoggerService();
|
||||
Reference in New Issue
Block a user