Compare commits

...

4 Commits

Author SHA1 Message Date
Peter Steinberger
087fe1f72a fix: clean up openai-codex implicit provider merge (#39860) (thanks @xdanger) 2026-03-08 13:48:02 +00:00
Kros Dai
c7fddd95c3 Models: fix codex follow-up CI issues 2026-03-08 13:44:38 +00:00
Kros Dai
b77a115f67 Models: scope implicit codex baseUrl override 2026-03-08 13:44:38 +00:00
Kros Dai
3053324110 fix: add implicit openai-codex provider snapshot 2026-03-08 13:44:38 +00:00
5 changed files with 247 additions and 7 deletions

View File

@ -14,6 +14,7 @@ Docs: https://docs.openclaw.ai
- Mattermost replies: keep `root_id` pinned to the existing thread root when an agent replies inside a thread, while still using reply-target threading for top-level posts. (#27744) thanks @hnykda.
- Agents/failover: detect Amazon Bedrock `Too many tokens per day` quota errors as rate limits across fallback, cron retry, and memory embeddings while keeping context-window `too many tokens per request` errors out of the rate-limit lane. (#39377) Thanks @gambletan.
- Android/Play distribution: remove self-update, background location, `screen.record`, and background mic capture from the Android app, narrow the foreground service to `dataSync` only, and clean up the legacy `location.enabledMode=always` preference migration. (#39660) Thanks @obviyus.
- Models/openai-codex snapshot merge: synthesize the implicit `openai-codex` runtime provider from OAuth presence and replace stale agent `models.json` `baseUrl` values only when the provider API surface has changed, while preserving matching agent-local base URL overrides. (#39860) Thanks @xdanger.
## 2026.3.7

View File

@ -22,7 +22,7 @@ enum HostEnvSecurityPolicy {
"PS4",
"GCONV_PATH",
"IFS",
"SSLKEYLOGFILE",
"SSLKEYLOGFILE"
]
static let blockedOverrideKeys: Set<String> = [
@ -50,17 +50,17 @@ enum HostEnvSecurityPolicy {
"OPENSSL_ENGINES",
"PYTHONSTARTUP",
"WGETRC",
"CURL_HOME",
"CURL_HOME"
]
static let blockedOverridePrefixes: [String] = [
"GIT_CONFIG_",
"NPM_CONFIG_",
"NPM_CONFIG_"
]
static let blockedPrefixes: [String] = [
"DYLD_",
"LD_",
"BASH_FUNC_",
"BASH_FUNC_"
]
}

View File

@ -0,0 +1,193 @@
import fs from "node:fs/promises";
import path from "node:path";
import { describe, expect, it } from "vitest";
import { resolveOpenClawAgentDir } from "./agent-paths.js";
import {
installModelsConfigTestHooks,
MODELS_CONFIG_IMPLICIT_ENV_VARS,
unsetEnv,
withModelsTempHome,
withTempEnv,
} from "./models-config.e2e-harness.js";
import { ensureOpenClawModelsJson } from "./models-config.js";
import { resolveImplicitProviders } from "./models-config.providers.js";
import { readGeneratedModelsJson } from "./models-config.test-utils.js";
installModelsConfigTestHooks();
async function writeCodexOauthProfile(agentDir: string) {
await fs.mkdir(agentDir, { recursive: true });
await fs.writeFile(
path.join(agentDir, "auth-profiles.json"),
JSON.stringify(
{
version: 1,
profiles: {
"openai-codex:default": {
type: "oauth",
provider: "openai-codex",
access: "access-token",
refresh: "refresh-token",
expires: Date.now() + 60_000,
},
},
order: {
"openai-codex": ["openai-codex:default"],
},
},
null,
2,
),
"utf8",
);
}
describe("openai-codex implicit provider", () => {
it("injects an implicit provider when Codex OAuth exists", async () => {
await withModelsTempHome(async () => {
await withTempEnv(MODELS_CONFIG_IMPLICIT_ENV_VARS, async () => {
unsetEnv(MODELS_CONFIG_IMPLICIT_ENV_VARS);
const agentDir = resolveOpenClawAgentDir();
await writeCodexOauthProfile(agentDir);
const providers = await resolveImplicitProviders({ agentDir });
expect(providers?.["openai-codex"]).toMatchObject({
baseUrl: "https://chatgpt.com/backend-api",
api: "openai-codex-responses",
models: [],
});
expect(providers?.["openai-codex"]).not.toHaveProperty("apiKey");
});
});
});
it("replaces stale openai-codex baseUrl in generated models.json", async () => {
await withModelsTempHome(async () => {
await withTempEnv(MODELS_CONFIG_IMPLICIT_ENV_VARS, async () => {
unsetEnv(MODELS_CONFIG_IMPLICIT_ENV_VARS);
const agentDir = resolveOpenClawAgentDir();
await writeCodexOauthProfile(agentDir);
await fs.writeFile(
path.join(agentDir, "models.json"),
JSON.stringify(
{
providers: {
"openai-codex": {
baseUrl: "https://api.openai.com/v1",
api: "openai-responses",
models: [
{
id: "gpt-5.4",
name: "GPT-5.4",
api: "openai-responses",
contextWindow: 1_000_000,
maxTokens: 100_000,
},
],
},
},
},
null,
2,
),
"utf8",
);
await ensureOpenClawModelsJson({});
const parsed = await readGeneratedModelsJson<{
providers: Record<string, { baseUrl?: string; api?: string }>;
}>();
expect(parsed.providers["openai-codex"]).toMatchObject({
baseUrl: "https://chatgpt.com/backend-api",
api: "openai-codex-responses",
});
});
});
});
it("preserves existing baseUrl when the api surface already matches", async () => {
await withModelsTempHome(async () => {
await withTempEnv(MODELS_CONFIG_IMPLICIT_ENV_VARS, async () => {
unsetEnv(MODELS_CONFIG_IMPLICIT_ENV_VARS);
const agentDir = resolveOpenClawAgentDir();
await writeCodexOauthProfile(agentDir);
await fs.writeFile(
path.join(agentDir, "models.json"),
JSON.stringify(
{
providers: {
"openai-codex": {
baseUrl: "https://proxy.example/codex",
api: "openai-codex-responses",
models: [],
},
},
},
null,
2,
),
"utf8",
);
await ensureOpenClawModelsJson({});
const parsed = await readGeneratedModelsJson<{
providers: Record<string, { baseUrl?: string; api?: string }>;
}>();
expect(parsed.providers["openai-codex"]).toMatchObject({
baseUrl: "https://proxy.example/codex",
api: "openai-codex-responses",
});
});
});
});
it("preserves an existing baseUrl for explicit openai-codex config without oauth synthesis", async () => {
await withModelsTempHome(async () => {
await withTempEnv(MODELS_CONFIG_IMPLICIT_ENV_VARS, async () => {
unsetEnv(MODELS_CONFIG_IMPLICIT_ENV_VARS);
const agentDir = resolveOpenClawAgentDir();
await fs.mkdir(agentDir, { recursive: true });
await fs.writeFile(
path.join(agentDir, "models.json"),
JSON.stringify(
{
providers: {
"openai-codex": {
baseUrl: "https://chatgpt.com/backend-api",
api: "openai-codex-responses",
models: [],
},
},
},
null,
2,
),
"utf8",
);
await ensureOpenClawModelsJson({
models: {
mode: "merge",
providers: {
"openai-codex": {
baseUrl: "",
api: "openai-codex-responses",
models: [],
},
},
},
});
const parsed = await readGeneratedModelsJson<{
providers: Record<string, { baseUrl?: string; api?: string }>;
}>();
expect(parsed.providers["openai-codex"]).toMatchObject({
baseUrl: "https://chatgpt.com/backend-api",
api: "openai-codex-responses",
});
});
});
});
});

View File

@ -207,6 +207,8 @@ const NVIDIA_DEFAULT_COST = {
cacheWrite: 0,
};
const OPENAI_CODEX_BASE_URL = "https://chatgpt.com/backend-api";
const log = createSubsystemLogger("agents/model-providers");
interface OllamaModel {
@ -994,6 +996,16 @@ function buildOpenrouterProvider(): ProviderConfig {
};
}
function buildOpenAICodexProvider(): ProviderConfig {
return {
baseUrl: OPENAI_CODEX_BASE_URL,
api: "openai-codex-responses",
// Like Copilot, Codex resolves OAuth credentials from auth-profiles at
// runtime, so the snapshot only needs the canonical API surface.
models: [],
};
}
async function buildVllmProvider(params?: {
baseUrl?: string;
apiKey?: string;
@ -1302,6 +1314,11 @@ export async function resolveImplicitProviders(params: {
providers.openrouter = { ...buildOpenrouterProvider(), apiKey: openrouterKey };
}
const openaiCodexProfiles = listProfilesForProvider(authStore, "openai-codex");
if (openaiCodexProfiles.length > 0) {
providers["openai-codex"] = buildOpenAICodexProvider();
}
const nvidiaKey = resolveProviderApiKey("nvidia").apiKey;
if (nvidiaKey) {
providers.nvidia = { ...buildNvidiaProvider(), apiKey: nvidiaKey };

View File

@ -165,6 +165,32 @@ async function resolveProvidersForModelsJson(params: {
return providers;
}
function shouldPreserveExistingBaseUrl(params: {
key: string;
existing: NonNullable<ModelsConfig["providers"]>[string] & { baseUrl?: string; api?: string };
nextProvider: ProviderConfig;
explicitBaseUrlProviders: ReadonlySet<string>;
}): boolean {
if (params.explicitBaseUrlProviders.has(params.key)) {
return false;
}
if (typeof params.existing.baseUrl !== "string" || !params.existing.baseUrl) {
return false;
}
const existingApi =
typeof params.existing.api === "string" ? params.existing.api.trim() : undefined;
const nextApi = typeof params.nextProvider.api === "string" ? params.nextProvider.api.trim() : "";
// Merge mode preserves existing baseUrl values for agent-local customization,
// but not when the resolved provider API surface has changed underneath them.
if (existingApi && nextApi && existingApi !== nextApi) {
return false;
}
return true;
}
function mergeWithExistingProviderSecrets(params: {
nextProviders: Record<string, ProviderConfig>;
existingProviders: Record<string, NonNullable<ModelsConfig["providers"]>[string]>;
@ -198,9 +224,12 @@ function mergeWithExistingProviderSecrets(params: {
preserved.apiKey = existing.apiKey;
}
if (
!explicitBaseUrlProviders.has(key) &&
typeof existing.baseUrl === "string" &&
existing.baseUrl
shouldPreserveExistingBaseUrl({
key,
existing,
nextProvider: newEntry,
explicitBaseUrlProviders,
})
) {
preserved.baseUrl = existing.baseUrl;
}