DeepInfra: wire thinking level through proxy stream wrapper

This commit is contained in:
Georgi Atsev 2026-03-20 09:46:15 +02:00
parent 774b5ee75e
commit 762c281ffd
4 changed files with 95 additions and 3 deletions

View File

@ -1,7 +1,11 @@
import { definePluginEntry } from "openclaw/plugin-sdk/core";
import { createProviderApiKeyAuthMethod } from "openclaw/plugin-sdk/provider-auth";
import { buildSingleProviderApiKeyCatalog } from "openclaw/plugin-sdk/provider-catalog";
import { createDeepInfraSystemCacheWrapper } from "openclaw/plugin-sdk/provider-stream";
import {
createDeepInfraSystemCacheWrapper,
createDeepInfraWrapper,
isProxyReasoningUnsupported,
} from "openclaw/plugin-sdk/provider-stream";
import { applyDeepInfraConfig, DEEPINFRA_DEFAULT_MODEL_REF } from "./onboard.js";
import { buildDeepInfraProviderWithDiscovery } from "./provider-catalog.js";
@ -66,7 +70,14 @@ export default definePluginEntry({
geminiThoughtSignatureModelHints: ["gemini"],
dropThinkingBlockModelHints: ["claude"],
},
wrapStreamFn: (ctx) => createDeepInfraSystemCacheWrapper(ctx.streamFn),
wrapStreamFn: (ctx) => {
const thinkingLevel = isProxyReasoningUnsupported(ctx.modelId)
? undefined
: ctx.thinkingLevel;
let streamFn = createDeepInfraWrapper(ctx.streamFn, thinkingLevel);
streamFn = createDeepInfraSystemCacheWrapper(streamFn);
return streamFn;
},
isCacheTtlEligible: (ctx) => isDeepInfraCacheTtlModel(ctx.modelId),
});
},

View File

@ -2,7 +2,7 @@ import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { Context, Model } from "@mariozechner/pi-ai";
import { createAssistantMessageEventStream } from "@mariozechner/pi-ai";
import { describe, expect, it } from "vitest";
import { createOpenRouterWrapper } from "./proxy-stream-wrappers.js";
import { createDeepInfraWrapper, createOpenRouterWrapper } from "./proxy-stream-wrappers.js";
describe("proxy stream wrappers", () => {
it("adds OpenRouter attribution headers to stream options", () => {
@ -35,4 +35,67 @@ describe("proxy stream wrappers", () => {
},
]);
});
describe("createDeepInfraWrapper", () => {
function capturePayloads() {
const payloads: unknown[] = [];
const baseStreamFn: StreamFn = (_model, _context, options) => {
const payload = { model: "test" };
options?.onPayload?.(payload, _model);
payloads.push(structuredClone(payload));
return createAssistantMessageEventStream();
};
return { baseStreamFn, payloads };
}
const model = {
api: "openai-completions",
provider: "deepinfra",
id: "moonshotai/Kimi-K2.5",
} as Model<"openai-completions">;
const context: Context = { messages: [] };
it("injects reasoning effort when thinkingLevel is set", () => {
const { baseStreamFn, payloads } = capturePayloads();
const wrapped = createDeepInfraWrapper(baseStreamFn, "high");
void wrapped(model, context, {});
expect(payloads[0]).toEqual({
model: "test",
reasoning: { effort: "high" },
});
});
it("maps 'off' to no reasoning field", () => {
const { baseStreamFn, payloads } = capturePayloads();
const wrapped = createDeepInfraWrapper(baseStreamFn, "off");
void wrapped(model, context, {});
expect(payloads[0]).toEqual({ model: "test" });
});
it("does not inject reasoning when thinkingLevel is undefined", () => {
const { baseStreamFn, payloads } = capturePayloads();
const wrapped = createDeepInfraWrapper(baseStreamFn, undefined);
void wrapped(model, context, {});
expect(payloads[0]).toEqual({ model: "test" });
});
it("preserves existing onPayload callback", () => {
const { baseStreamFn } = capturePayloads();
const wrapped = createDeepInfraWrapper(baseStreamFn, "low");
const seen: unknown[] = [];
void wrapped(model, context, {
onPayload: (payload) => {
seen.push(structuredClone(payload));
},
});
expect(seen[0]).toEqual({
model: "test",
reasoning: { effort: "low" },
});
});
});
});

View File

@ -135,6 +135,23 @@ export function isProxyReasoningUnsupported(modelId: string): boolean {
return modelId.toLowerCase().startsWith("x-ai/");
}
export function createDeepInfraWrapper(
baseStreamFn: StreamFn | undefined,
thinkingLevel?: ThinkLevel,
): StreamFn {
const underlying = baseStreamFn ?? streamSimple;
return (model, context, options) => {
const onPayload = options?.onPayload;
return underlying(model, context, {
...options,
onPayload: (payload) => {
normalizeProxyReasoningPayload(payload, thinkingLevel);
return onPayload?.(payload, model);
},
});
};
}
export function createKilocodeWrapper(
baseStreamFn: StreamFn | undefined,
thinkingLevel?: ThinkLevel,

View File

@ -10,6 +10,7 @@ export {
} from "../agents/pi-embedded-runner/google-stream-wrappers.js";
export {
createDeepInfraSystemCacheWrapper,
createDeepInfraWrapper,
createKilocodeWrapper,
createOpenRouterSystemCacheWrapper,
createOpenRouterWrapper,