Merge 579cf19054a7589eaae80f944b737d12f32b0a13 into 8a05c05596ca9ba0735dafd8e359885de4c2c969

This commit is contained in:
Jackal Xin 2026-03-21 05:46:03 +00:00 committed by GitHub
commit 4f269df609
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 367 additions and 4 deletions

View File

@ -0,0 +1,86 @@
import type { Api, Model } from "@mariozechner/pi-ai";
import { normalizeModelCompat } from "./model-compat.js";
import { normalizeProviderId } from "./model-selection.js";
export const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api";
function isOpenAIApiBaseUrl(baseUrl?: string): boolean {
const trimmed = baseUrl?.trim();
if (!trimmed) {
return false;
}
return /^https?:\/\/api\.openai\.com(?:\/v1)?\/?$/i.test(trimmed);
}
function isOpenAICodexBaseUrl(baseUrl?: string): boolean {
const trimmed = baseUrl?.trim();
if (!trimmed) {
return false;
}
return /^https?:\/\/chatgpt\.com\/backend-api\/?$/i.test(trimmed);
}
function normalizeOpenAICodexTransport(params: {
provider: string;
model: Model<Api>;
}): Model<Api> {
if (normalizeProviderId(params.provider) !== "openai-codex") {
return params.model;
}
const useCodexTransport =
!params.model.baseUrl ||
isOpenAIApiBaseUrl(params.model.baseUrl) ||
isOpenAICodexBaseUrl(params.model.baseUrl);
const nextApi =
useCodexTransport && params.model.api === "openai-responses"
? ("openai-codex-responses" as const)
: params.model.api;
const nextBaseUrl =
nextApi === "openai-codex-responses" &&
(!params.model.baseUrl || isOpenAIApiBaseUrl(params.model.baseUrl))
? OPENAI_CODEX_BASE_URL
: params.model.baseUrl;
if (nextApi === params.model.api && nextBaseUrl === params.model.baseUrl) {
return params.model;
}
return {
...params.model,
api: nextApi,
baseUrl: nextBaseUrl,
} as Model<Api>;
}
function normalizeOpenAITransport(params: { provider: string; model: Model<Api> }): Model<Api> {
if (normalizeProviderId(params.provider) !== "openai") {
return params.model;
}
const useResponsesTransport =
params.model.api === "openai-completions" &&
(!params.model.baseUrl || isOpenAIApiBaseUrl(params.model.baseUrl));
if (!useResponsesTransport) {
return params.model;
}
return {
...params.model,
api: "openai-responses",
} as Model<Api>;
}
export function normalizeResolvedProviderModel(params: {
provider: string;
model: Model<Api>;
}): Model<Api> {
const normalizedOpenAI = normalizeOpenAITransport(params);
const normalizedCodex = normalizeOpenAICodexTransport({
provider: params.provider,
model: normalizedOpenAI,
});
return normalizeModelCompat(normalizedCodex);
}

View File

@ -0,0 +1,120 @@
import { afterEach, describe, expect, it, vi } from "vitest";
const STALE_CODEX_MODEL = {
id: "gpt-5.4",
name: "GPT-5.4",
provider: "openai-codex",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
input: ["text", "image"],
reasoning: true,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
maxTokens: 128_000,
};
const OPENAI_MODEL = {
id: "gpt-5.4",
name: "GPT-5.4",
provider: "openai",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
input: ["text", "image"],
reasoning: true,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
maxTokens: 128_000,
};
afterEach(() => {
vi.resetModules();
vi.doUnmock("@mariozechner/pi-coding-agent");
});
describe("discoverModels", () => {
it("normalizes openai-codex models returned by registry discovery", async () => {
vi.doMock("@mariozechner/pi-coding-agent", () => {
class MockAuthStorage {}
class MockModelRegistry {
find(provider: string, modelId: string) {
if (provider === "openai-codex" && modelId === "gpt-5.4") {
return { ...STALE_CODEX_MODEL };
}
if (provider === "openai" && modelId === "gpt-5.4") {
return { ...OPENAI_MODEL };
}
return null;
}
getAll() {
return [{ ...STALE_CODEX_MODEL }, { ...OPENAI_MODEL }];
}
getAvailable() {
return [{ ...STALE_CODEX_MODEL }];
}
}
return {
AuthStorage: MockAuthStorage,
ModelRegistry: MockModelRegistry,
};
});
const { discoverModels } = await import("./pi-model-discovery.js");
const registry = discoverModels({} as never, "/tmp/openclaw-agent");
expect(registry.find("openai-codex", "gpt-5.4")).toMatchObject({
provider: "openai-codex",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
});
expect(registry.find("openai", "gpt-5.4")).toMatchObject({
provider: "openai",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
});
expect(registry.getAll()).toContainEqual(
expect.objectContaining({
provider: "openai-codex",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
}),
);
expect(registry.getAvailable()).toContainEqual(
expect.objectContaining({
provider: "openai-codex",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
}),
);
});
it("does not rewrite custom openai-codex proxy endpoints", async () => {
vi.doMock("@mariozechner/pi-coding-agent", () => {
class MockAuthStorage {}
class MockModelRegistry {
find() {
return {
...STALE_CODEX_MODEL,
baseUrl: "https://proxy.example.com/v1",
};
}
}
return {
AuthStorage: MockAuthStorage,
ModelRegistry: MockModelRegistry,
};
});
const { discoverModels } = await import("./pi-model-discovery.js");
const registry = discoverModels({} as never, "/tmp/openclaw-agent");
expect(registry.find("openai-codex", "gpt-5.4")).toMatchObject({
provider: "openai-codex",
api: "openai-responses",
baseUrl: "https://proxy.example.com/v1",
});
});
});

View File

@ -1,11 +1,13 @@
import fs from "node:fs";
import path from "node:path";
import type { Api, Model } from "@mariozechner/pi-ai";
import * as PiCodingAgent from "@mariozechner/pi-coding-agent";
import type {
AuthStorage as PiAuthStorage,
ModelRegistry as PiModelRegistry,
} from "@mariozechner/pi-coding-agent";
import { ensureAuthProfileStore } from "./auth-profiles.js";
import { normalizeResolvedProviderModel } from "./model.provider-normalization.js";
import { resolvePiCredentialMapFromStore, type PiCredentialMap } from "./pi-auth-credentials.js";
const PiAuthStorageClass = PiCodingAgent.AuthStorage;
@ -148,5 +150,44 @@ export function discoverAuthStorage(agentDir: string): PiAuthStorage {
}
export function discoverModels(authStorage: PiAuthStorage, agentDir: string): PiModelRegistry {
return new PiModelRegistryClass(authStorage, path.join(agentDir, "models.json"));
const registry = new PiModelRegistryClass(authStorage, path.join(agentDir, "models.json"));
return wrapModelRegistryWithProviderNormalization(registry);
}
function normalizeRegistryModel(model: unknown): unknown {
if (!model || typeof model !== "object") {
return model;
}
const provider = (model as { provider?: unknown }).provider;
if (typeof provider !== "string" || !provider.trim()) {
return model;
}
return normalizeResolvedProviderModel({
provider,
model: model as Model<Api>,
});
}
function wrapModelRegistryWithProviderNormalization(registry: PiModelRegistry): PiModelRegistry {
return new Proxy(registry, {
get(target, prop, receiver) {
const value = Reflect.get(target, prop, receiver);
if (typeof value !== "function" || prop === "constructor") {
return value;
}
if (prop === "find") {
return (provider: string, modelId: string) =>
normalizeRegistryModel(Reflect.apply(value, target, [provider, modelId]));
}
if (prop === "getAll" || prop === "getAvailable") {
return () => {
const result = Reflect.apply(value, target, []);
return Array.isArray(result)
? result.map((model) => normalizeRegistryModel(model))
: result;
};
}
return value.bind(target);
},
});
}

View File

@ -0,0 +1,52 @@
import { describe, expect, it } from "vitest";
import { resolveModelFromRegistry } from "./media-tool-shared.js";
const STALE_CODEX_MODEL = {
id: "gpt-5.4",
name: "GPT-5.4",
provider: "openai-codex",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
input: ["text", "image"],
reasoning: true,
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
contextWindow: 1_050_000,
maxTokens: 128_000,
} as const;
describe("resolveModelFromRegistry", () => {
it("normalizes stale openai-codex transport/baseUrl pairs", () => {
const model = resolveModelFromRegistry({
modelRegistry: {
find: () => ({ ...STALE_CODEX_MODEL }),
},
provider: "openai-codex",
modelId: "gpt-5.4",
});
expect(model).toMatchObject({
provider: "openai-codex",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
});
});
it("keeps custom openai-codex proxy endpoints unchanged", () => {
const model = resolveModelFromRegistry({
modelRegistry: {
find: () => ({
...STALE_CODEX_MODEL,
baseUrl: "https://proxy.example.com/v1",
}),
},
provider: "openai-codex",
modelId: "gpt-5.4",
});
expect(model).toMatchObject({
provider: "openai-codex",
api: "openai-responses",
baseUrl: "https://proxy.example.com/v1",
});
});
});

View File

@ -1,6 +1,7 @@
import { type Api, type Model } from "@mariozechner/pi-ai";
import type { OpenClawConfig } from "../../config/config.js";
import { getDefaultLocalRoots } from "../../media/web-media.js";
import { normalizeResolvedProviderModel } from "../model.provider-normalization.js";
import type { ImageModelConfig } from "./image-tool.helpers.js";
import type { ToolModelConfig } from "./model-config.helpers.js";
import { getApiKeyForModel, normalizeWorkspaceDir, requireApiKey } from "./tool-runtime.helpers.js";
@ -107,7 +108,10 @@ export function resolveModelFromRegistry(params: {
if (!model) {
throw new Error(`Unknown model: ${params.provider}/${params.modelId}`);
}
return model;
return normalizeResolvedProviderModel({
provider: params.provider,
model,
});
}
export async function resolveModelRuntimeApiKey(params: {

View File

@ -147,6 +147,58 @@ describe("describeImageWithModel", () => {
expect(minimaxUnderstandImageMock).not.toHaveBeenCalled();
});
it("normalizes openai-codex models before generic completion", async () => {
discoverModelsMock.mockReturnValue({
find: vi.fn(() => ({
provider: "openai-codex",
id: "gpt-5.4",
api: "openai-responses",
baseUrl: "https://api.openai.com/v1",
input: ["text", "image"],
})),
});
completeMock.mockResolvedValue({
role: "assistant",
api: "openai-codex-responses",
provider: "openai-codex",
model: "gpt-5.4",
stopReason: "stop",
timestamp: Date.now(),
content: [{ type: "text", text: "codex ok" }],
});
const { describeImageWithModel } = await import("./image.js");
const result = await describeImageWithModel({
cfg: {},
agentDir: "/tmp/openclaw-agent",
provider: "openai-codex",
model: "gpt-5.4",
buffer: Buffer.from("png-bytes"),
fileName: "image.png",
mime: "image/png",
prompt: "Describe the image.",
timeoutMs: 1000,
});
expect(result).toEqual({
text: "codex ok",
model: "gpt-5.4",
});
expect(setRuntimeApiKeyMock).toHaveBeenCalledWith("openai-codex", "oauth-test");
expect(completeMock).toHaveBeenCalledWith(
expect.objectContaining({
provider: "openai-codex",
api: "openai-codex-responses",
baseUrl: "https://chatgpt.com/backend-api",
}),
expect.any(Object),
expect.objectContaining({
apiKey: "oauth-test", // pragma: allowlist secret
}),
);
});
it("normalizes deprecated google flash ids before lookup and keeps profile auth selection", async () => {
const findMock = vi.fn((provider: string, modelId: string) => {
expect(provider).toBe("google");

View File

@ -7,6 +7,7 @@ import {
resolveApiKeyForProvider,
} from "../../agents/model-auth.js";
import { normalizeModelRef } from "../../agents/model-selection.js";
import { normalizeResolvedProviderModel } from "../../agents/model.provider-normalization.js";
import { ensureOpenClawModelsJson } from "../../agents/models-config.js";
import { coerceImageAssistantText } from "../../agents/tools/image-tool.helpers.js";
import type {
@ -49,10 +50,17 @@ async function resolveImageRuntime(params: {
const authStorage = discoverAuthStorage(params.agentDir);
const modelRegistry = discoverModels(authStorage, params.agentDir);
const resolvedRef = normalizeModelRef(params.provider, params.model);
const model = modelRegistry.find(resolvedRef.provider, resolvedRef.model) as Model<Api> | null;
if (!model) {
const discoveredModel = modelRegistry.find(
resolvedRef.provider,
resolvedRef.model,
) as Model<Api> | null;
if (!discoveredModel) {
throw new Error(`Unknown model: ${resolvedRef.provider}/${resolvedRef.model}`);
}
const model = normalizeResolvedProviderModel({
provider: resolvedRef.provider,
model: discoveredModel,
});
if (!model.input?.includes("image")) {
throw new Error(`Model does not support images: ${params.provider}/${params.model}`);
}