Merge 579cf19054a7589eaae80f944b737d12f32b0a13 into 8a05c05596ca9ba0735dafd8e359885de4c2c969
This commit is contained in:
commit
4f269df609
86
src/agents/model.provider-normalization.ts
Normal file
86
src/agents/model.provider-normalization.ts
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
import type { Api, Model } from "@mariozechner/pi-ai";
|
||||||
|
import { normalizeModelCompat } from "./model-compat.js";
|
||||||
|
import { normalizeProviderId } from "./model-selection.js";
|
||||||
|
|
||||||
|
export const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api";
|
||||||
|
|
||||||
|
function isOpenAIApiBaseUrl(baseUrl?: string): boolean {
|
||||||
|
const trimmed = baseUrl?.trim();
|
||||||
|
if (!trimmed) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return /^https?:\/\/api\.openai\.com(?:\/v1)?\/?$/i.test(trimmed);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isOpenAICodexBaseUrl(baseUrl?: string): boolean {
|
||||||
|
const trimmed = baseUrl?.trim();
|
||||||
|
if (!trimmed) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return /^https?:\/\/chatgpt\.com\/backend-api\/?$/i.test(trimmed);
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeOpenAICodexTransport(params: {
|
||||||
|
provider: string;
|
||||||
|
model: Model<Api>;
|
||||||
|
}): Model<Api> {
|
||||||
|
if (normalizeProviderId(params.provider) !== "openai-codex") {
|
||||||
|
return params.model;
|
||||||
|
}
|
||||||
|
|
||||||
|
const useCodexTransport =
|
||||||
|
!params.model.baseUrl ||
|
||||||
|
isOpenAIApiBaseUrl(params.model.baseUrl) ||
|
||||||
|
isOpenAICodexBaseUrl(params.model.baseUrl);
|
||||||
|
|
||||||
|
const nextApi =
|
||||||
|
useCodexTransport && params.model.api === "openai-responses"
|
||||||
|
? ("openai-codex-responses" as const)
|
||||||
|
: params.model.api;
|
||||||
|
const nextBaseUrl =
|
||||||
|
nextApi === "openai-codex-responses" &&
|
||||||
|
(!params.model.baseUrl || isOpenAIApiBaseUrl(params.model.baseUrl))
|
||||||
|
? OPENAI_CODEX_BASE_URL
|
||||||
|
: params.model.baseUrl;
|
||||||
|
|
||||||
|
if (nextApi === params.model.api && nextBaseUrl === params.model.baseUrl) {
|
||||||
|
return params.model;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...params.model,
|
||||||
|
api: nextApi,
|
||||||
|
baseUrl: nextBaseUrl,
|
||||||
|
} as Model<Api>;
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeOpenAITransport(params: { provider: string; model: Model<Api> }): Model<Api> {
|
||||||
|
if (normalizeProviderId(params.provider) !== "openai") {
|
||||||
|
return params.model;
|
||||||
|
}
|
||||||
|
|
||||||
|
const useResponsesTransport =
|
||||||
|
params.model.api === "openai-completions" &&
|
||||||
|
(!params.model.baseUrl || isOpenAIApiBaseUrl(params.model.baseUrl));
|
||||||
|
|
||||||
|
if (!useResponsesTransport) {
|
||||||
|
return params.model;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...params.model,
|
||||||
|
api: "openai-responses",
|
||||||
|
} as Model<Api>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function normalizeResolvedProviderModel(params: {
|
||||||
|
provider: string;
|
||||||
|
model: Model<Api>;
|
||||||
|
}): Model<Api> {
|
||||||
|
const normalizedOpenAI = normalizeOpenAITransport(params);
|
||||||
|
const normalizedCodex = normalizeOpenAICodexTransport({
|
||||||
|
provider: params.provider,
|
||||||
|
model: normalizedOpenAI,
|
||||||
|
});
|
||||||
|
return normalizeModelCompat(normalizedCodex);
|
||||||
|
}
|
||||||
120
src/agents/pi-model-discovery.models.test.ts
Normal file
120
src/agents/pi-model-discovery.models.test.ts
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
import { afterEach, describe, expect, it, vi } from "vitest";
|
||||||
|
|
||||||
|
const STALE_CODEX_MODEL = {
|
||||||
|
id: "gpt-5.4",
|
||||||
|
name: "GPT-5.4",
|
||||||
|
provider: "openai-codex",
|
||||||
|
api: "openai-responses",
|
||||||
|
baseUrl: "https://api.openai.com/v1",
|
||||||
|
input: ["text", "image"],
|
||||||
|
reasoning: true,
|
||||||
|
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||||
|
contextWindow: 1_050_000,
|
||||||
|
maxTokens: 128_000,
|
||||||
|
};
|
||||||
|
|
||||||
|
const OPENAI_MODEL = {
|
||||||
|
id: "gpt-5.4",
|
||||||
|
name: "GPT-5.4",
|
||||||
|
provider: "openai",
|
||||||
|
api: "openai-responses",
|
||||||
|
baseUrl: "https://api.openai.com/v1",
|
||||||
|
input: ["text", "image"],
|
||||||
|
reasoning: true,
|
||||||
|
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||||
|
contextWindow: 1_050_000,
|
||||||
|
maxTokens: 128_000,
|
||||||
|
};
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
vi.resetModules();
|
||||||
|
vi.doUnmock("@mariozechner/pi-coding-agent");
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("discoverModels", () => {
|
||||||
|
it("normalizes openai-codex models returned by registry discovery", async () => {
|
||||||
|
vi.doMock("@mariozechner/pi-coding-agent", () => {
|
||||||
|
class MockAuthStorage {}
|
||||||
|
class MockModelRegistry {
|
||||||
|
find(provider: string, modelId: string) {
|
||||||
|
if (provider === "openai-codex" && modelId === "gpt-5.4") {
|
||||||
|
return { ...STALE_CODEX_MODEL };
|
||||||
|
}
|
||||||
|
if (provider === "openai" && modelId === "gpt-5.4") {
|
||||||
|
return { ...OPENAI_MODEL };
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
getAll() {
|
||||||
|
return [{ ...STALE_CODEX_MODEL }, { ...OPENAI_MODEL }];
|
||||||
|
}
|
||||||
|
|
||||||
|
getAvailable() {
|
||||||
|
return [{ ...STALE_CODEX_MODEL }];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
AuthStorage: MockAuthStorage,
|
||||||
|
ModelRegistry: MockModelRegistry,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const { discoverModels } = await import("./pi-model-discovery.js");
|
||||||
|
const registry = discoverModels({} as never, "/tmp/openclaw-agent");
|
||||||
|
|
||||||
|
expect(registry.find("openai-codex", "gpt-5.4")).toMatchObject({
|
||||||
|
provider: "openai-codex",
|
||||||
|
api: "openai-codex-responses",
|
||||||
|
baseUrl: "https://chatgpt.com/backend-api",
|
||||||
|
});
|
||||||
|
expect(registry.find("openai", "gpt-5.4")).toMatchObject({
|
||||||
|
provider: "openai",
|
||||||
|
api: "openai-responses",
|
||||||
|
baseUrl: "https://api.openai.com/v1",
|
||||||
|
});
|
||||||
|
expect(registry.getAll()).toContainEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
provider: "openai-codex",
|
||||||
|
api: "openai-codex-responses",
|
||||||
|
baseUrl: "https://chatgpt.com/backend-api",
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
expect(registry.getAvailable()).toContainEqual(
|
||||||
|
expect.objectContaining({
|
||||||
|
provider: "openai-codex",
|
||||||
|
api: "openai-codex-responses",
|
||||||
|
baseUrl: "https://chatgpt.com/backend-api",
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("does not rewrite custom openai-codex proxy endpoints", async () => {
|
||||||
|
vi.doMock("@mariozechner/pi-coding-agent", () => {
|
||||||
|
class MockAuthStorage {}
|
||||||
|
class MockModelRegistry {
|
||||||
|
find() {
|
||||||
|
return {
|
||||||
|
...STALE_CODEX_MODEL,
|
||||||
|
baseUrl: "https://proxy.example.com/v1",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
AuthStorage: MockAuthStorage,
|
||||||
|
ModelRegistry: MockModelRegistry,
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
const { discoverModels } = await import("./pi-model-discovery.js");
|
||||||
|
const registry = discoverModels({} as never, "/tmp/openclaw-agent");
|
||||||
|
|
||||||
|
expect(registry.find("openai-codex", "gpt-5.4")).toMatchObject({
|
||||||
|
provider: "openai-codex",
|
||||||
|
api: "openai-responses",
|
||||||
|
baseUrl: "https://proxy.example.com/v1",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@ -1,11 +1,13 @@
|
|||||||
import fs from "node:fs";
|
import fs from "node:fs";
|
||||||
import path from "node:path";
|
import path from "node:path";
|
||||||
|
import type { Api, Model } from "@mariozechner/pi-ai";
|
||||||
import * as PiCodingAgent from "@mariozechner/pi-coding-agent";
|
import * as PiCodingAgent from "@mariozechner/pi-coding-agent";
|
||||||
import type {
|
import type {
|
||||||
AuthStorage as PiAuthStorage,
|
AuthStorage as PiAuthStorage,
|
||||||
ModelRegistry as PiModelRegistry,
|
ModelRegistry as PiModelRegistry,
|
||||||
} from "@mariozechner/pi-coding-agent";
|
} from "@mariozechner/pi-coding-agent";
|
||||||
import { ensureAuthProfileStore } from "./auth-profiles.js";
|
import { ensureAuthProfileStore } from "./auth-profiles.js";
|
||||||
|
import { normalizeResolvedProviderModel } from "./model.provider-normalization.js";
|
||||||
import { resolvePiCredentialMapFromStore, type PiCredentialMap } from "./pi-auth-credentials.js";
|
import { resolvePiCredentialMapFromStore, type PiCredentialMap } from "./pi-auth-credentials.js";
|
||||||
|
|
||||||
const PiAuthStorageClass = PiCodingAgent.AuthStorage;
|
const PiAuthStorageClass = PiCodingAgent.AuthStorage;
|
||||||
@ -148,5 +150,44 @@ export function discoverAuthStorage(agentDir: string): PiAuthStorage {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function discoverModels(authStorage: PiAuthStorage, agentDir: string): PiModelRegistry {
|
export function discoverModels(authStorage: PiAuthStorage, agentDir: string): PiModelRegistry {
|
||||||
return new PiModelRegistryClass(authStorage, path.join(agentDir, "models.json"));
|
const registry = new PiModelRegistryClass(authStorage, path.join(agentDir, "models.json"));
|
||||||
|
return wrapModelRegistryWithProviderNormalization(registry);
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeRegistryModel(model: unknown): unknown {
|
||||||
|
if (!model || typeof model !== "object") {
|
||||||
|
return model;
|
||||||
|
}
|
||||||
|
const provider = (model as { provider?: unknown }).provider;
|
||||||
|
if (typeof provider !== "string" || !provider.trim()) {
|
||||||
|
return model;
|
||||||
|
}
|
||||||
|
return normalizeResolvedProviderModel({
|
||||||
|
provider,
|
||||||
|
model: model as Model<Api>,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function wrapModelRegistryWithProviderNormalization(registry: PiModelRegistry): PiModelRegistry {
|
||||||
|
return new Proxy(registry, {
|
||||||
|
get(target, prop, receiver) {
|
||||||
|
const value = Reflect.get(target, prop, receiver);
|
||||||
|
if (typeof value !== "function" || prop === "constructor") {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
if (prop === "find") {
|
||||||
|
return (provider: string, modelId: string) =>
|
||||||
|
normalizeRegistryModel(Reflect.apply(value, target, [provider, modelId]));
|
||||||
|
}
|
||||||
|
if (prop === "getAll" || prop === "getAvailable") {
|
||||||
|
return () => {
|
||||||
|
const result = Reflect.apply(value, target, []);
|
||||||
|
return Array.isArray(result)
|
||||||
|
? result.map((model) => normalizeRegistryModel(model))
|
||||||
|
: result;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return value.bind(target);
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
52
src/agents/tools/media-tool-shared.test.ts
Normal file
52
src/agents/tools/media-tool-shared.test.ts
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
import { describe, expect, it } from "vitest";
|
||||||
|
import { resolveModelFromRegistry } from "./media-tool-shared.js";
|
||||||
|
|
||||||
|
const STALE_CODEX_MODEL = {
|
||||||
|
id: "gpt-5.4",
|
||||||
|
name: "GPT-5.4",
|
||||||
|
provider: "openai-codex",
|
||||||
|
api: "openai-responses",
|
||||||
|
baseUrl: "https://api.openai.com/v1",
|
||||||
|
input: ["text", "image"],
|
||||||
|
reasoning: true,
|
||||||
|
cost: { input: 0, output: 0, cacheRead: 0, cacheWrite: 0 },
|
||||||
|
contextWindow: 1_050_000,
|
||||||
|
maxTokens: 128_000,
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
describe("resolveModelFromRegistry", () => {
|
||||||
|
it("normalizes stale openai-codex transport/baseUrl pairs", () => {
|
||||||
|
const model = resolveModelFromRegistry({
|
||||||
|
modelRegistry: {
|
||||||
|
find: () => ({ ...STALE_CODEX_MODEL }),
|
||||||
|
},
|
||||||
|
provider: "openai-codex",
|
||||||
|
modelId: "gpt-5.4",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(model).toMatchObject({
|
||||||
|
provider: "openai-codex",
|
||||||
|
api: "openai-codex-responses",
|
||||||
|
baseUrl: "https://chatgpt.com/backend-api",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("keeps custom openai-codex proxy endpoints unchanged", () => {
|
||||||
|
const model = resolveModelFromRegistry({
|
||||||
|
modelRegistry: {
|
||||||
|
find: () => ({
|
||||||
|
...STALE_CODEX_MODEL,
|
||||||
|
baseUrl: "https://proxy.example.com/v1",
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
provider: "openai-codex",
|
||||||
|
modelId: "gpt-5.4",
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(model).toMatchObject({
|
||||||
|
provider: "openai-codex",
|
||||||
|
api: "openai-responses",
|
||||||
|
baseUrl: "https://proxy.example.com/v1",
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
@ -1,6 +1,7 @@
|
|||||||
import { type Api, type Model } from "@mariozechner/pi-ai";
|
import { type Api, type Model } from "@mariozechner/pi-ai";
|
||||||
import type { OpenClawConfig } from "../../config/config.js";
|
import type { OpenClawConfig } from "../../config/config.js";
|
||||||
import { getDefaultLocalRoots } from "../../media/web-media.js";
|
import { getDefaultLocalRoots } from "../../media/web-media.js";
|
||||||
|
import { normalizeResolvedProviderModel } from "../model.provider-normalization.js";
|
||||||
import type { ImageModelConfig } from "./image-tool.helpers.js";
|
import type { ImageModelConfig } from "./image-tool.helpers.js";
|
||||||
import type { ToolModelConfig } from "./model-config.helpers.js";
|
import type { ToolModelConfig } from "./model-config.helpers.js";
|
||||||
import { getApiKeyForModel, normalizeWorkspaceDir, requireApiKey } from "./tool-runtime.helpers.js";
|
import { getApiKeyForModel, normalizeWorkspaceDir, requireApiKey } from "./tool-runtime.helpers.js";
|
||||||
@ -107,7 +108,10 @@ export function resolveModelFromRegistry(params: {
|
|||||||
if (!model) {
|
if (!model) {
|
||||||
throw new Error(`Unknown model: ${params.provider}/${params.modelId}`);
|
throw new Error(`Unknown model: ${params.provider}/${params.modelId}`);
|
||||||
}
|
}
|
||||||
return model;
|
return normalizeResolvedProviderModel({
|
||||||
|
provider: params.provider,
|
||||||
|
model,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function resolveModelRuntimeApiKey(params: {
|
export async function resolveModelRuntimeApiKey(params: {
|
||||||
|
|||||||
@ -147,6 +147,58 @@ describe("describeImageWithModel", () => {
|
|||||||
expect(minimaxUnderstandImageMock).not.toHaveBeenCalled();
|
expect(minimaxUnderstandImageMock).not.toHaveBeenCalled();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("normalizes openai-codex models before generic completion", async () => {
|
||||||
|
discoverModelsMock.mockReturnValue({
|
||||||
|
find: vi.fn(() => ({
|
||||||
|
provider: "openai-codex",
|
||||||
|
id: "gpt-5.4",
|
||||||
|
api: "openai-responses",
|
||||||
|
baseUrl: "https://api.openai.com/v1",
|
||||||
|
input: ["text", "image"],
|
||||||
|
})),
|
||||||
|
});
|
||||||
|
completeMock.mockResolvedValue({
|
||||||
|
role: "assistant",
|
||||||
|
api: "openai-codex-responses",
|
||||||
|
provider: "openai-codex",
|
||||||
|
model: "gpt-5.4",
|
||||||
|
stopReason: "stop",
|
||||||
|
timestamp: Date.now(),
|
||||||
|
content: [{ type: "text", text: "codex ok" }],
|
||||||
|
});
|
||||||
|
|
||||||
|
const { describeImageWithModel } = await import("./image.js");
|
||||||
|
|
||||||
|
const result = await describeImageWithModel({
|
||||||
|
cfg: {},
|
||||||
|
agentDir: "/tmp/openclaw-agent",
|
||||||
|
provider: "openai-codex",
|
||||||
|
model: "gpt-5.4",
|
||||||
|
buffer: Buffer.from("png-bytes"),
|
||||||
|
fileName: "image.png",
|
||||||
|
mime: "image/png",
|
||||||
|
prompt: "Describe the image.",
|
||||||
|
timeoutMs: 1000,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result).toEqual({
|
||||||
|
text: "codex ok",
|
||||||
|
model: "gpt-5.4",
|
||||||
|
});
|
||||||
|
expect(setRuntimeApiKeyMock).toHaveBeenCalledWith("openai-codex", "oauth-test");
|
||||||
|
expect(completeMock).toHaveBeenCalledWith(
|
||||||
|
expect.objectContaining({
|
||||||
|
provider: "openai-codex",
|
||||||
|
api: "openai-codex-responses",
|
||||||
|
baseUrl: "https://chatgpt.com/backend-api",
|
||||||
|
}),
|
||||||
|
expect.any(Object),
|
||||||
|
expect.objectContaining({
|
||||||
|
apiKey: "oauth-test", // pragma: allowlist secret
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
it("normalizes deprecated google flash ids before lookup and keeps profile auth selection", async () => {
|
it("normalizes deprecated google flash ids before lookup and keeps profile auth selection", async () => {
|
||||||
const findMock = vi.fn((provider: string, modelId: string) => {
|
const findMock = vi.fn((provider: string, modelId: string) => {
|
||||||
expect(provider).toBe("google");
|
expect(provider).toBe("google");
|
||||||
|
|||||||
@ -7,6 +7,7 @@ import {
|
|||||||
resolveApiKeyForProvider,
|
resolveApiKeyForProvider,
|
||||||
} from "../../agents/model-auth.js";
|
} from "../../agents/model-auth.js";
|
||||||
import { normalizeModelRef } from "../../agents/model-selection.js";
|
import { normalizeModelRef } from "../../agents/model-selection.js";
|
||||||
|
import { normalizeResolvedProviderModel } from "../../agents/model.provider-normalization.js";
|
||||||
import { ensureOpenClawModelsJson } from "../../agents/models-config.js";
|
import { ensureOpenClawModelsJson } from "../../agents/models-config.js";
|
||||||
import { coerceImageAssistantText } from "../../agents/tools/image-tool.helpers.js";
|
import { coerceImageAssistantText } from "../../agents/tools/image-tool.helpers.js";
|
||||||
import type {
|
import type {
|
||||||
@ -49,10 +50,17 @@ async function resolveImageRuntime(params: {
|
|||||||
const authStorage = discoverAuthStorage(params.agentDir);
|
const authStorage = discoverAuthStorage(params.agentDir);
|
||||||
const modelRegistry = discoverModels(authStorage, params.agentDir);
|
const modelRegistry = discoverModels(authStorage, params.agentDir);
|
||||||
const resolvedRef = normalizeModelRef(params.provider, params.model);
|
const resolvedRef = normalizeModelRef(params.provider, params.model);
|
||||||
const model = modelRegistry.find(resolvedRef.provider, resolvedRef.model) as Model<Api> | null;
|
const discoveredModel = modelRegistry.find(
|
||||||
if (!model) {
|
resolvedRef.provider,
|
||||||
|
resolvedRef.model,
|
||||||
|
) as Model<Api> | null;
|
||||||
|
if (!discoveredModel) {
|
||||||
throw new Error(`Unknown model: ${resolvedRef.provider}/${resolvedRef.model}`);
|
throw new Error(`Unknown model: ${resolvedRef.provider}/${resolvedRef.model}`);
|
||||||
}
|
}
|
||||||
|
const model = normalizeResolvedProviderModel({
|
||||||
|
provider: resolvedRef.provider,
|
||||||
|
model: discoveredModel,
|
||||||
|
});
|
||||||
if (!model.input?.includes("image")) {
|
if (!model.input?.includes("image")) {
|
||||||
throw new Error(`Model does not support images: ${params.provider}/${params.model}`);
|
throw new Error(`Model does not support images: ${params.provider}/${params.model}`);
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user