From 684f41692d4b78faf2d621aaae4678f77dafedcb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9D=A8=E8=89=BA=E9=9F=AC=28yangyitao=29?= Date: Sun, 15 Mar 2026 02:31:00 +0000 Subject: [PATCH] fix(ollama): send think param for thinking models Ollama thinking models (deepseek-r1, qwq, etc.) require an explicit `think` boolean in the /api/chat request body to control reasoning. Previously this parameter was never sent, so thinking models always used their default behavior regardless of the user's thinking-level config. Now the Ollama stream function forwards the reasoning level as `think: true` when reasoning is enabled and `think: false` when reasoning is disabled. Closes #46680 --- src/agents/ollama-stream.test.ts | 54 ++++++++++++++++++++++++++++++++ src/agents/ollama-stream.ts | 13 ++++++++ 2 files changed, 67 insertions(+) diff --git a/src/agents/ollama-stream.test.ts b/src/agents/ollama-stream.test.ts index ded8064ea19..5382aaddb45 100644 --- a/src/agents/ollama-stream.test.ts +++ b/src/agents/ollama-stream.test.ts @@ -544,6 +544,60 @@ describe("createOllamaStreamFn", () => { [{ type: "text", text: "final answer" }], ); }); + + it("sends think:true when reasoning level is set", async () => { + await withMockNdjsonFetch( + [ + '{"model":"m","created_at":"t","message":{"role":"assistant","content":"ok"},"done":false}', + '{"model":"m","created_at":"t","message":{"role":"assistant","content":""},"done":true,"prompt_eval_count":1,"eval_count":1}', + ], + async (fetchMock) => { + const streamFn = createOllamaStreamFn("http://ollama-host:11434"); + const stream = streamFn( + { + id: "deepseek-r1:32b", + api: "ollama", + provider: "ollama", + contextWindow: 131072, + } as never, + { messages: [{ role: "user", content: "hello" }] } as never, + { reasoning: "medium" } as never, + ); + await collectStreamEvents(stream); + + const [, reqInit] = fetchMock.mock.calls[0] as unknown as [string, RequestInit]; + const body = JSON.parse(reqInit.body as string) as { think?: boolean }; + expect(body.think).toBe(true); + }, + ); + }); + + it("sends think:false when reasoning is not set but options are present", async () => { + await withMockNdjsonFetch( + [ + '{"model":"m","created_at":"t","message":{"role":"assistant","content":"ok"},"done":false}', + '{"model":"m","created_at":"t","message":{"role":"assistant","content":""},"done":true,"prompt_eval_count":1,"eval_count":1}', + ], + async (fetchMock) => { + const streamFn = createOllamaStreamFn("http://ollama-host:11434"); + const stream = streamFn( + { + id: "deepseek-r1:32b", + api: "ollama", + provider: "ollama", + contextWindow: 131072, + } as never, + { messages: [{ role: "user", content: "hello" }] } as never, + {} as never, + ); + await collectStreamEvents(stream); + + const [, reqInit] = fetchMock.mock.calls[0] as unknown as [string, RequestInit]; + const body = JSON.parse(reqInit.body as string) as { think?: boolean }; + expect(body.think).toBe(false); + }, + ); + }); }); describe("resolveOllamaBaseUrlForRun", () => { diff --git a/src/agents/ollama-stream.ts b/src/agents/ollama-stream.ts index f332ad1fd83..e92b0897144 100644 --- a/src/agents/ollama-stream.ts +++ b/src/agents/ollama-stream.ts @@ -42,6 +42,7 @@ interface OllamaChatRequest { model: string; messages: OllamaChatMessage[]; stream: boolean; + think?: boolean; tools?: OllamaTool[]; options?: Record; } @@ -459,10 +460,22 @@ export function createOllamaStreamFn( ollamaOptions.num_predict = options.maxTokens; } + // Ollama thinking models (e.g. deepseek-r1, qwq) respect a top-level + // `think` boolean. Forward the reasoning level so `think: false` is + // sent explicitly when thinking is disabled (#46680). + const thinkParam: { think?: boolean } = {}; + if (options?.reasoning) { + thinkParam.think = true; + } else if (options && !options.reasoning) { + // Thinking explicitly disabled – tell Ollama not to think. + thinkParam.think = false; + } + const body: OllamaChatRequest = { model: model.id, messages: ollamaMessages, stream: true, + ...thinkParam, ...(ollamaTools.length > 0 ? { tools: ollamaTools } : {}), options: ollamaOptions, };