From d0641c2e9e91e3474fcbf3e7899f3a16a26900dc Mon Sep 17 00:00:00 2001 From: Rudi Cilibrasi Date: Fri, 20 Mar 2026 22:51:27 -0700 Subject: [PATCH] Fix stale-metadata tests to exercise the preferResolvedModel/preserveDiscoveredTransportMetadata code path The stale-metadata tests called mockOpenAICodexTemplateModel() then immediately overwrote the discovery mock with one that only returned gpt-5.4, causing hasDynamicOverrideTemplate to return false and the override path to be skipped. Introduce mockStaleCodexDiscovery() helper that preserves the gpt-5.2-codex template in the registry alongside the stale gpt-5.4 model, so the intended code path is now exercised by all 8 stale-metadata tests. Co-Authored-By: Claude Opus 4.6 (1M context) --- .../pi-embedded-runner/model.test-harness.ts | 19 +++ src/agents/pi-embedded-runner/model.test.ts | 157 ++++++------------ 2 files changed, 66 insertions(+), 110 deletions(-) diff --git a/src/agents/pi-embedded-runner/model.test-harness.ts b/src/agents/pi-embedded-runner/model.test-harness.ts index b91ca8b8c5f..4ce53098e46 100644 --- a/src/agents/pi-embedded-runner/model.test-harness.ts +++ b/src/agents/pi-embedded-runner/model.test-harness.ts @@ -124,3 +124,22 @@ export function mockDiscoveredModel(params: { }), } as unknown as ReturnType); } + +/** + * Mock a stale discovered gpt-5.4 model while keeping the gpt-5.2-codex + * template visible so `hasDynamicOverrideTemplate` returns true and the + * `preferResolvedModel`/`preserveDiscoveredTransportMetadata` path is exercised. + */ +export function mockStaleCodexDiscovery(staleModel: Record): void { + vi.mocked(discoverModels).mockReturnValue({ + find: vi.fn((provider: string, modelId: string) => { + if (provider === "openai-codex" && modelId === "gpt-5.2-codex") { + return OPENAI_CODEX_TEMPLATE_MODEL; + } + if (provider === "openai-codex" && modelId === "gpt-5.4") { + return staleModel; + } + return null; + }), + } as unknown as ReturnType); +} diff --git a/src/agents/pi-embedded-runner/model.test.ts b/src/agents/pi-embedded-runner/model.test.ts index 0f0d410ec8d..459ce5c805e 100644 --- a/src/agents/pi-embedded-runner/model.test.ts +++ b/src/agents/pi-embedded-runner/model.test.ts @@ -27,6 +27,7 @@ import { makeModel, mockDiscoveredModel, mockOpenAICodexTemplateModel, + mockStaleCodexDiscovery, resetMockDiscoverModels, } from "./model.test-harness.js"; @@ -668,19 +669,11 @@ describe("resolveModel", () => { }); it("prefers the codex gpt-5.4 forward-compat model over stale discovered metadata", () => { - mockOpenAICodexTemplateModel(); - vi.mocked(discoverModels).mockReturnValue({ - find: vi.fn((provider: string, modelId: string) => { - if (provider !== "openai-codex" || modelId !== "gpt-5.4") { - return null; - } - return { - ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), - contextWindow: 272000, - maxTokens: 128000, - }; - }), - } as unknown as ReturnType); + mockStaleCodexDiscovery({ + ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), + contextWindow: 272000, + maxTokens: 128000, + }); const result = resolveModel("openai-codex", "gpt-5.4", "/tmp/agent"); @@ -693,19 +686,11 @@ describe("resolveModel", () => { }); it("preserves configured openai-codex overrides when stale discovery loses to the dynamic gpt-5.4 model", () => { - mockOpenAICodexTemplateModel(); - vi.mocked(discoverModels).mockReturnValue({ - find: vi.fn((provider: string, modelId: string) => { - if (provider !== "openai-codex" || modelId !== "gpt-5.4") { - return null; - } - return { - ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), - contextWindow: 272000, - maxTokens: 64000, - }; - }), - } as unknown as ReturnType); + mockStaleCodexDiscovery({ + ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), + contextWindow: 272000, + maxTokens: 64000, + }); const cfg: OpenClawConfig = { models: { @@ -730,19 +715,11 @@ describe("resolveModel", () => { }); it("prefers the codex gpt-5.4 forward-compat model on async resolve when discovery is stale", async () => { - mockOpenAICodexTemplateModel(); - vi.mocked(discoverModels).mockReturnValue({ - find: vi.fn((provider: string, modelId: string) => { - if (provider !== "openai-codex" || modelId !== "gpt-5.4") { - return null; - } - return { - ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), - contextWindow: 272000, - maxTokens: 128000, - }; - }), - } as unknown as ReturnType); + mockStaleCodexDiscovery({ + ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), + contextWindow: 272000, + maxTokens: 128000, + }); const result = await resolveModelAsync("openai-codex", "gpt-5.4", "/tmp/agent"); @@ -755,21 +732,13 @@ describe("resolveModel", () => { }); it("preserves discovered baseUrl and headers when dynamic gpt-5.4 wins", () => { - mockOpenAICodexTemplateModel(); - vi.mocked(discoverModels).mockReturnValue({ - find: vi.fn((provider: string, modelId: string) => { - if (provider !== "openai-codex" || modelId !== "gpt-5.4") { - return null; - } - return { - ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), - contextWindow: 272000, - maxTokens: 128000, - baseUrl: "https://proxy.example.com/backend-api", - headers: { "X-Test-Route": "tenant-a" }, - }; - }), - } as unknown as ReturnType); + mockStaleCodexDiscovery({ + ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), + contextWindow: 272000, + maxTokens: 128000, + baseUrl: "https://proxy.example.com/backend-api", + headers: { "X-Test-Route": "tenant-a" }, + }); const result = resolveModel("openai-codex", "gpt-5.4", "/tmp/agent"); @@ -785,21 +754,13 @@ describe("resolveModel", () => { }); it("preserves discovered api and compat when dynamic gpt-5.4 wins", () => { - mockOpenAICodexTemplateModel(); - vi.mocked(discoverModels).mockReturnValue({ - find: vi.fn((provider: string, modelId: string) => { - if (provider !== "openai-codex" || modelId !== "gpt-5.4") { - return null; - } - return { - ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), - api: "openai-completions", - compat: { supportsStore: false }, - contextWindow: 272000, - maxTokens: 128000, - }; - }), - } as unknown as ReturnType); + mockStaleCodexDiscovery({ + ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), + api: "openai-completions", + compat: { supportsStore: false }, + contextWindow: 272000, + maxTokens: 128000, + }); const result = resolveModel("openai-codex", "gpt-5.4", "/tmp/agent"); @@ -814,20 +775,12 @@ describe("resolveModel", () => { }); it("keeps model-level api overrides when dynamic gpt-5.4 wins", () => { - mockOpenAICodexTemplateModel(); - vi.mocked(discoverModels).mockReturnValue({ - find: vi.fn((provider: string, modelId: string) => { - if (provider !== "openai-codex" || modelId !== "gpt-5.4") { - return null; - } - return { - ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), - api: "openai-completions", - contextWindow: 272000, - maxTokens: 128000, - }; - }), - } as unknown as ReturnType); + mockStaleCodexDiscovery({ + ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), + api: "openai-completions", + contextWindow: 272000, + maxTokens: 128000, + }); const cfg: OpenClawConfig = { models: { @@ -852,20 +805,12 @@ describe("resolveModel", () => { }); it("preserves discovered input when dynamic gpt-5.4 wins", () => { - mockOpenAICodexTemplateModel(); - vi.mocked(discoverModels).mockReturnValue({ - find: vi.fn((provider: string, modelId: string) => { - if (provider !== "openai-codex" || modelId !== "gpt-5.4") { - return null; - } - return { - ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), - input: ["text"], - contextWindow: 272000, - maxTokens: 128000, - }; - }), - } as unknown as ReturnType); + mockStaleCodexDiscovery({ + ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), + input: ["text"], + contextWindow: 272000, + maxTokens: 128000, + }); const result = resolveModel("openai-codex", "gpt-5.4", "/tmp/agent"); @@ -879,19 +824,11 @@ describe("resolveModel", () => { }); it("preserves discovered maxTokens when dynamic gpt-5.4 wins", () => { - mockOpenAICodexTemplateModel(); - vi.mocked(discoverModels).mockReturnValue({ - find: vi.fn((provider: string, modelId: string) => { - if (provider !== "openai-codex" || modelId !== "gpt-5.4") { - return null; - } - return { - ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), - maxTokens: 64_000, - contextWindow: 272000, - }; - }), - } as unknown as ReturnType); + mockStaleCodexDiscovery({ + ...buildOpenAICodexForwardCompatExpectation("gpt-5.4"), + maxTokens: 64_000, + contextWindow: 272000, + }); const result = resolveModel("openai-codex", "gpt-5.4", "/tmp/agent");