Skip to content

Commit a07ddf0

Browse files
committed
Turn on includeThoughts for Gemini
1 parent e5bb467 commit a07ddf0

File tree

1 file changed

+26
-4
lines changed

1 file changed

+26
-4
lines changed

backend/src/llm-apis/vercel-ai-sdk/ai-sdk.ts

Lines changed: 26 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { anthropic } from '@ai-sdk/anthropic'
2-
import { google } from '@ai-sdk/google'
2+
import { google, GoogleGenerativeAIProviderOptions } from '@ai-sdk/google'
33
import { openai } from '@ai-sdk/openai'
44
import {
55
CoreAssistantMessage,
@@ -71,13 +71,35 @@ export const promptAiSdkStream = async function* (
7171
const response = streamText({
7272
...options,
7373
model: aiSDKModel,
74+
providerOptions: {
75+
google: {
76+
thinkingConfig: {
77+
includeThoughts: true,
78+
},
79+
} satisfies GoogleGenerativeAIProviderOptions,
80+
},
7481
})
7582

7683
let content = ''
84+
let hasReasoning = false
85+
let finishedReasoning = false
7786

78-
for await (const chunk of response.textStream) {
79-
content += chunk
80-
yield chunk
87+
for await (const chunk of response.fullStream) {
88+
if (chunk.type === 'reasoning') {
89+
if (!hasReasoning) {
90+
hasReasoning = true
91+
yield '<think_deeply>\n<thought>'
92+
}
93+
yield chunk.textDelta
94+
}
95+
if (chunk.type === 'text-delta') {
96+
if (hasReasoning && !finishedReasoning) {
97+
finishedReasoning = true
98+
yield '</thought>\n</think_deeply>'
99+
}
100+
content += chunk.textDelta
101+
yield chunk.textDelta
102+
}
81103
}
82104

83105
const usage = await response.usage

0 commit comments

Comments
 (0)