refactor(kilocode-tests): share extra-params harness

This commit is contained in:
Peter Steinberger 2026-03-17 07:18:10 +00:00
parent 1b9704df4d
commit 7bb36efd7b
5 changed files with 137 additions and 155 deletions

View File

@ -1,15 +1,8 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { Context, Model } from "@mariozechner/pi-ai";
import { createAssistantMessageEventStream } from "@mariozechner/pi-ai";
import type { Model } from "@mariozechner/pi-ai";
import { afterEach, describe, expect, it } from "vitest";
import type { OpenClawConfig } from "../../config/config.js";
import { captureEnv } from "../../test-utils/env.js";
import { applyExtraParamsToAgent } from "./extra-params.js";
type CapturedCall = {
headers?: Record<string, string>;
payload?: Record<string, unknown>;
};
import { runExtraParamsCase } from "./extra-params.test-support.js";
const TEST_CFG = {
plugins: {
@ -26,30 +19,19 @@ function applyAndCapture(params: {
modelId: string;
callerHeaders?: Record<string, string>;
cfg?: OpenClawConfig;
}): CapturedCall {
const captured: CapturedCall = {};
const baseStreamFn: StreamFn = (model, _context, options) => {
captured.headers = options?.headers;
options?.onPayload?.({}, model);
return createAssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(agent, params.cfg ?? TEST_CFG, params.provider, params.modelId);
const model = {
api: "openai-completions",
provider: params.provider,
id: params.modelId,
} as Model<"openai-completions">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {
headers: params.callerHeaders,
}) {
return runExtraParamsCase({
applyModelId: params.modelId,
applyProvider: params.provider,
callerHeaders: params.callerHeaders,
cfg: params.cfg ?? TEST_CFG,
model: {
api: "openai-completions",
provider: params.provider,
id: params.modelId,
} as Model<"openai-completions">,
payload: {},
});
return captured;
}
function applyAndCaptureReasoning(params: {
@ -58,35 +40,18 @@ function applyAndCaptureReasoning(params: {
initialPayload?: Record<string, unknown>;
thinkingLevel?: "minimal" | "low" | "medium" | "high";
}) {
let capturedPayload: Record<string, unknown> | undefined;
const baseStreamFn: StreamFn = (model, _context, options) => {
const payload: Record<string, unknown> = { ...params.initialPayload };
options?.onPayload?.(payload, model);
capturedPayload = payload;
return createAssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(
agent,
params.cfg ?? TEST_CFG,
"kilocode",
params.modelId,
undefined,
params.thinkingLevel ?? "high",
);
const model = {
api: "openai-completions",
provider: "kilocode",
id: params.modelId,
} as Model<"openai-completions">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
return capturedPayload;
return runExtraParamsCase({
applyModelId: params.modelId,
applyProvider: "kilocode",
cfg: params.cfg ?? TEST_CFG,
model: {
api: "openai-completions",
provider: "kilocode",
id: params.modelId,
} as Model<"openai-completions">,
payload: { ...params.initialPayload },
thinkingLevel: params.thinkingLevel ?? "high",
}).payload;
}
describe("extra-params: Kilocode wrapper", () => {
@ -191,26 +156,18 @@ describe("extra-params: Kilocode kilo/auto reasoning", () => {
});
it("does not inject reasoning.effort for x-ai models", () => {
let capturedPayload: Record<string, unknown> | undefined;
const baseStreamFn: StreamFn = (model, _context, options) => {
const payload: Record<string, unknown> = { reasoning_effort: "high" };
options?.onPayload?.(payload, model);
capturedPayload = payload;
return createAssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(agent, TEST_CFG, "kilocode", "x-ai/grok-3", undefined, "high");
const model = {
api: "openai-completions",
provider: "kilocode",
id: "x-ai/grok-3",
} as Model<"openai-completions">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
const capturedPayload = runExtraParamsCase({
applyModelId: "x-ai/grok-3",
applyProvider: "kilocode",
cfg: TEST_CFG,
model: {
api: "openai-completions",
provider: "kilocode",
id: "x-ai/grok-3",
} as Model<"openai-completions">,
payload: { reasoning_effort: "high" },
thinkingLevel: "high",
}).payload;
// x-ai models reject reasoning.effort — should be skipped
expect(capturedPayload?.reasoning).toBeUndefined();

View File

@ -1,41 +1,26 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { Context, Model } from "@mariozechner/pi-ai";
import { createAssistantMessageEventStream } from "@mariozechner/pi-ai";
import type { Model } from "@mariozechner/pi-ai";
import { afterEach, describe, expect, it } from "vitest";
import { captureEnv } from "../../test-utils/env.js";
import { applyExtraParamsToAgent } from "./extra-params.js";
type CapturedCall = {
headers?: Record<string, string>;
};
import { runExtraParamsCase } from "./extra-params.test-support.js";
function applyAndCapture(params: {
provider: string;
modelId: string;
baseUrl?: string;
callerHeaders?: Record<string, string>;
}): CapturedCall {
const captured: CapturedCall = {};
const baseStreamFn: StreamFn = (model, _context, options) => {
captured.headers = options?.headers;
options?.onPayload?.({}, model);
return createAssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(agent, undefined, params.provider, params.modelId);
const model = {
api: "openai-responses",
provider: params.provider,
id: params.modelId,
baseUrl: params.baseUrl,
} as Model<"openai-responses">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, { headers: params.callerHeaders });
return captured;
}) {
return runExtraParamsCase({
applyModelId: params.modelId,
applyProvider: params.provider,
callerHeaders: params.callerHeaders,
model: {
api: "openai-responses",
provider: params.provider,
id: params.modelId,
baseUrl: params.baseUrl,
} as Model<"openai-responses">,
payload: {},
});
}
describe("extra-params: OpenAI attribution", () => {

View File

@ -1,9 +1,6 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { Context, Model } from "@mariozechner/pi-ai";
import { createAssistantMessageEventStream } from "@mariozechner/pi-ai";
import type { Model } from "@mariozechner/pi-ai";
import { describe, expect, it } from "vitest";
import type { OpenClawConfig } from "../../config/config.js";
import { applyExtraParamsToAgent } from "./extra-params.js";
import { runExtraParamsCase } from "./extra-params.test-support.js";
type StreamPayload = {
messages: Array<{
@ -13,31 +10,23 @@ type StreamPayload = {
};
function runOpenRouterPayload(payload: StreamPayload, modelId: string) {
const baseStreamFn: StreamFn = (model, _context, options) => {
options?.onPayload?.(payload, model);
return createAssistantMessageEventStream();
};
const agent = { streamFn: baseStreamFn };
const cfg = {
plugins: {
entries: {
openrouter: {
enabled: true,
runExtraParamsCase({
cfg: {
plugins: {
entries: {
openrouter: {
enabled: true,
},
},
},
},
} satisfies OpenClawConfig;
applyExtraParamsToAgent(agent, cfg, "openrouter", modelId);
const model = {
api: "openai-completions",
provider: "openrouter",
id: modelId,
} as Model<"openai-completions">;
const context: Context = { messages: [] };
void agent.streamFn?.(model, context, {});
model: {
api: "openai-completions",
provider: "openrouter",
id: modelId,
} as Model<"openai-completions">,
payload,
});
}
describe("extra-params: OpenRouter Anthropic cache_control", () => {

View File

@ -0,0 +1,56 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { Context, Model, SimpleStreamOptions } from "@mariozechner/pi-ai";
import type { OpenClawConfig } from "../../config/config.js";
import { applyExtraParamsToAgent } from "./extra-params.js";
export type ExtraParamsCapture<TPayload extends Record<string, unknown>> = {
headers?: Record<string, string>;
payload: TPayload;
};
type RunExtraParamsCaseParams<
TApi extends "openai-completions" | "openai-responses",
TPayload extends Record<string, unknown>,
> = {
applyModelId?: string;
applyProvider?: string;
callerHeaders?: Record<string, string>;
cfg?: OpenClawConfig;
model: Model<TApi>;
options?: SimpleStreamOptions;
payload: TPayload;
thinkingLevel?: "minimal" | "low" | "medium" | "high";
};
export function runExtraParamsCase<
TApi extends "openai-completions" | "openai-responses",
TPayload extends Record<string, unknown>,
>(params: RunExtraParamsCaseParams<TApi, TPayload>): ExtraParamsCapture<TPayload> {
const captured: ExtraParamsCapture<TPayload> = {
payload: params.payload,
};
const baseStreamFn: StreamFn = (model, _context, options) => {
captured.headers = options?.headers;
options?.onPayload?.(params.payload, model);
return {} as ReturnType<StreamFn>;
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(
agent,
params.cfg,
params.applyProvider ?? params.model.provider,
params.applyModelId ?? params.model.id,
undefined,
params.thinkingLevel,
);
const context: Context = { messages: [] };
void agent.streamFn?.(params.model, context, {
...params.options,
headers: params.callerHeaders ?? params.options?.headers,
});
return captured;
}

View File

@ -1,7 +1,7 @@
import type { StreamFn } from "@mariozechner/pi-agent-core";
import type { Context, Model, SimpleStreamOptions } from "@mariozechner/pi-ai";
import type { Model, SimpleStreamOptions } from "@mariozechner/pi-ai";
import { describe, expect, it, vi } from "vitest";
import { applyExtraParamsToAgent } from "./extra-params.js";
import type { OpenClawConfig } from "../../config/config.js";
import { runExtraParamsCase } from "./extra-params.test-support.js";
// Mock streamSimple for testing
vi.mock("@mariozechner/pi-ai", () => ({
@ -15,24 +15,19 @@ type ToolStreamCase = {
applyProvider: string;
applyModelId: string;
model: Model<"openai-completions">;
cfg?: Parameters<typeof applyExtraParamsToAgent>[1];
cfg?: OpenClawConfig;
options?: SimpleStreamOptions;
};
function runToolStreamCase(params: ToolStreamCase) {
const payload: Record<string, unknown> = { model: params.model.id, messages: [] };
const baseStreamFn: StreamFn = (model, _context, options) => {
options?.onPayload?.(payload, model);
return {} as ReturnType<StreamFn>;
};
const agent = { streamFn: baseStreamFn };
applyExtraParamsToAgent(agent, params.cfg, params.applyProvider, params.applyModelId);
const context: Context = { messages: [] };
void agent.streamFn?.(params.model, context, params.options ?? {});
return payload;
return runExtraParamsCase({
applyModelId: params.applyModelId,
applyProvider: params.applyProvider,
cfg: params.cfg,
model: params.model,
options: params.options,
payload: { model: params.model.id, messages: [] },
}).payload;
}
describe("extra-params: Z.AI tool_stream support", () => {