diff --git a/.DS_Store b/.DS_Store index b963966..cc36e27 100644 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/prisma/config.ts b/prisma/config.ts index 5f0d9bd..78ce52b 100644 --- a/prisma/config.ts +++ b/prisma/config.ts @@ -14,6 +14,12 @@ export const MODELS: { value: ModelOption; label: string; desc: string; provider desc: 'Deep reasoning, complex tasks, higher intelligence.', provider: 'google' }, + { + value: 'deepseek-reasoner', + label: 'DeepSeek R1', + desc: 'State-of-the-art open reasoning model.', + provider: 'deepseek' + }, { value: 'custom', label: 'Custom Model', diff --git a/prisma/services/deepThink/openaiClient.ts b/prisma/services/deepThink/openaiClient.ts index 5f7c9a3..f45f9e6 100644 --- a/prisma/services/deepThink/openaiClient.ts +++ b/prisma/services/deepThink/openaiClient.ts @@ -65,7 +65,15 @@ export const generateContent = async ( try { const response = await withRetry(() => ai.chat.completions.create(requestOptions)); - const content = response.choices[0]?.message?.content || ''; + const message = response.choices[0]?.message; + const content = message?.content || ''; + + // Check for DeepSeek native reasoning field + const reasoningContent = (message as any)?.reasoning_content; + + if (reasoningContent && config.thinkingConfig?.includeThoughts) { + return { text: content, thought: reasoningContent }; + } if (config.thinkingConfig?.includeThoughts) { const { thought, text } = parseThinkingTokens(content); @@ -112,6 +120,13 @@ export async function* generateContentStream( for await (const chunk of (stream as any)) { const delta = chunk.choices[0]?.delta?.content || ''; + // Support DeepSeek native reasoning field + const reasoningDelta = (chunk.choices[0]?.delta as any)?.reasoning_content || ''; + + // If we have native reasoning content, yield it immediately as thought + if (reasoningDelta) { + yield { text: '', thought: reasoningDelta }; + } if (!delta) continue;