Compare commits

...

3 Commits

Author SHA1 Message Date
Peter Steinberger
931c040a30 fix: normalize control UI model picker cache (#47581) (thanks @chrishham) 2026-03-16 03:59:28 +00:00
Christopher Chamaletsos
5cc35a2e95 fix: format default model label as 'model · provider' for consistency
The default option showed 'Default (openai/gpt-5.2)' while individual
options used the friendlier 'gpt-5.2 · openai' format.
2026-03-16 03:59:28 +00:00
Christopher Chamaletsos
c0237d8544 fix: control UI sends correct provider prefix when switching models
The model selector was using just the model ID (e.g. "gpt-5.2") as the
option value. When sent to sessions.patch, the server would fall back to
the session's current provider ("anthropic") yielding "anthropic/gpt-5.2"
instead of "openai/gpt-5.2".

Now option values use "provider/model" format, and resolveModelOverrideValue
and resolveDefaultModelValue also return the full provider-prefixed key so
selected state stays consistent.
2026-03-16 03:59:28 +00:00
8 changed files with 125 additions and 25 deletions

View File

@ -78,6 +78,7 @@ Docs: https://docs.openclaw.ai
- Gateway/watch mode: restart on bundled-plugin package and manifest metadata changes, rebuild `dist` for extension source and `tsdown.config.ts` changes, and still ignore extension docs. (#47571) thanks @gumadeiras. - Gateway/watch mode: restart on bundled-plugin package and manifest metadata changes, rebuild `dist` for extension source and `tsdown.config.ts` changes, and still ignore extension docs. (#47571) thanks @gumadeiras.
- Gateway/watch mode: recreate bundled plugin runtime metadata after clean or stale `dist` states, so `pnpm gateway:watch` no longer fails on missing `dist/extensions/*/openclaw.plugin.json` manifests after a rebuild. Thanks @gumadeiras. - Gateway/watch mode: recreate bundled plugin runtime metadata after clean or stale `dist` states, so `pnpm gateway:watch` no longer fails on missing `dist/extensions/*/openclaw.plugin.json` manifests after a rebuild. Thanks @gumadeiras.
- Plugins/context engines: enforce owner-aware context-engine registration on both loader and public SDK paths so plugins cannot spoof privileged ownership, claim the core `legacy` engine id, or overwrite an existing engine id through direct SDK imports. (#47595) Thanks @vincentkoc. - Plugins/context engines: enforce owner-aware context-engine registration on both loader and public SDK paths so plugins cannot spoof privileged ownership, claim the core `legacy` engine id, or overwrite an existing engine id through direct SDK imports. (#47595) Thanks @vincentkoc.
- Control UI/model picker: normalize cached bare `/model` overrides to provider-qualified selector keys so the dropdown stays aligned with the real catalog entry across model changes and refreshes. (#47581) Thanks @chrishham.
## 2026.3.13 ## 2026.3.13

View File

@ -93,7 +93,7 @@ describe("handleSendChat", () => {
ts: 0, ts: 0,
path: "", path: "",
count: 0, count: 0,
defaults: { model: "gpt-5", contextTokens: null }, defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: [], sessions: [],
}; };
} }

View File

@ -518,27 +518,65 @@ function resolveActiveSessionRow(state: AppViewState) {
return state.sessionsResult?.sessions?.find((row) => row.key === state.sessionKey); return state.sessionsResult?.sessions?.find((row) => row.key === state.sessionKey);
} }
function buildQualifiedModelValue(model: string, provider?: string | null): string {
const trimmedModel = model.trim();
if (!trimmedModel) {
return "";
}
const trimmedProvider = provider?.trim();
return trimmedProvider ? `${trimmedProvider}/${trimmedModel}` : trimmedModel;
}
function normalizeModelOptionValue(rawValue: string, catalog: ModelCatalogEntry[]): string {
const trimmed = rawValue.trim();
if (!trimmed || trimmed.includes("/")) {
return trimmed;
}
let matchedValue = "";
for (const entry of catalog) {
if (entry.id.trim().toLowerCase() !== trimmed.toLowerCase()) {
continue;
}
const candidate = buildQualifiedModelValue(entry.id, entry.provider);
if (!matchedValue) {
matchedValue = candidate;
continue;
}
if (matchedValue.toLowerCase() !== candidate.toLowerCase()) {
return trimmed;
}
}
return matchedValue || trimmed;
}
function resolveModelOverrideValue(state: AppViewState): string { function resolveModelOverrideValue(state: AppViewState): string {
// Prefer the local cache — it reflects in-flight patches before sessionsResult refreshes. // Prefer the local cache — it reflects in-flight patches before sessionsResult refreshes.
const cached = state.chatModelOverrides[state.sessionKey]; const cached = state.chatModelOverrides[state.sessionKey];
if (typeof cached === "string") { if (typeof cached === "string") {
return cached.trim(); return normalizeModelOptionValue(cached, state.chatModelCatalog ?? []);
} }
// cached === null means explicitly cleared to default. // cached === null means explicitly cleared to default.
if (cached === null) { if (cached === null) {
return ""; return "";
} }
// No local override recorded yet — fall back to server data. // No local override recorded yet — fall back to server data.
// Include provider prefix so the value matches option keys (provider/model).
const activeRow = resolveActiveSessionRow(state); const activeRow = resolveActiveSessionRow(state);
if (activeRow) { if (activeRow && typeof activeRow.model === "string" && activeRow.model.trim()) {
return typeof activeRow.model === "string" ? activeRow.model.trim() : ""; return buildQualifiedModelValue(activeRow.model, activeRow.modelProvider);
} }
return ""; return "";
} }
function resolveDefaultModelValue(state: AppViewState): string { function resolveDefaultModelValue(state: AppViewState): string {
const model = state.sessionsResult?.defaults?.model; const defaults = state.sessionsResult?.defaults;
return typeof model === "string" ? model.trim() : ""; const model = defaults?.model;
if (typeof model !== "string" || !model.trim()) {
return "";
}
return buildQualifiedModelValue(model, defaults?.modelProvider);
} }
function buildChatModelOptions( function buildChatModelOptions(
@ -563,7 +601,8 @@ function buildChatModelOptions(
for (const entry of catalog) { for (const entry of catalog) {
const provider = entry.provider?.trim(); const provider = entry.provider?.trim();
addOption(entry.id, provider ? `${entry.id} · ${provider}` : entry.id); const value = buildQualifiedModelValue(entry.id, provider);
addOption(value, provider ? `${entry.id} · ${provider}` : entry.id);
} }
if (currentOverride) { if (currentOverride) {
@ -583,7 +622,10 @@ function renderChatModelSelect(state: AppViewState) {
currentOverride, currentOverride,
defaultModel, defaultModel,
); );
const defaultLabel = defaultModel ? `Default (${defaultModel})` : "Default model"; const defaultDisplay = defaultModel.includes("/")
? `${defaultModel.slice(defaultModel.indexOf("/") + 1)} · ${defaultModel.slice(0, defaultModel.indexOf("/"))}`
: defaultModel;
const defaultLabel = defaultModel ? `Default (${defaultDisplay})` : "Default model";
const busy = const busy =
state.chatLoading || state.chatSending || Boolean(state.chatRunId) || state.chatStream !== null; state.chatLoading || state.chatSending || Boolean(state.chatRunId) || state.chatStream !== null;
const disabled = const disabled =

View File

@ -235,7 +235,7 @@ describe("executeSlashCommand directives", () => {
const request = vi.fn(async (method: string, _payload?: unknown) => { const request = vi.fn(async (method: string, _payload?: unknown) => {
if (method === "sessions.list") { if (method === "sessions.list") {
return { return {
defaults: { model: "default-model" }, defaults: { modelProvider: "openai", model: "default-model" },
sessions: [ sessions: [
row("agent:main:main", { row("agent:main:main", {
model: "gpt-4.1-mini", model: "gpt-4.1-mini",

View File

@ -316,6 +316,7 @@ export type PresenceEntry = {
}; };
export type GatewaySessionsDefaults = { export type GatewaySessionsDefaults = {
modelProvider: string | null;
model: string | null; model: string | null;
contextTokens: number | null; contextTokens: number | null;
}; };

View File

@ -31,7 +31,7 @@ function createProps(overrides: Partial<ChatProps> = {}): ChatProps {
ts: 0, ts: 0,
path: "", path: "",
count: 1, count: 1,
defaults: { model: "gpt-5", contextTokens: null }, defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: [ sessions: [
{ {
key: "main", key: "main",

View File

@ -14,7 +14,7 @@ function createSessions(): SessionsListResult {
ts: 0, ts: 0,
path: "", path: "",
count: 0, count: 0,
defaults: { model: null, contextTokens: null }, defaults: { modelProvider: null, model: null, contextTokens: null },
sessions: [], sessions: [],
}; };
} }
@ -27,6 +27,7 @@ function createChatHeaderState(
} = {}, } = {},
): { state: AppViewState; request: ReturnType<typeof vi.fn> } { ): { state: AppViewState; request: ReturnType<typeof vi.fn> } {
let currentModel = overrides.model ?? null; let currentModel = overrides.model ?? null;
let currentModelProvider = currentModel ? "openai" : undefined;
const omitSessionFromList = overrides.omitSessionFromList ?? false; const omitSessionFromList = overrides.omitSessionFromList ?? false;
const catalog = overrides.models ?? [ const catalog = overrides.models ?? [
{ id: "gpt-5", name: "GPT-5", provider: "openai" }, { id: "gpt-5", name: "GPT-5", provider: "openai" },
@ -34,7 +35,26 @@ function createChatHeaderState(
]; ];
const request = vi.fn(async (method: string, params: Record<string, unknown>) => { const request = vi.fn(async (method: string, params: Record<string, unknown>) => {
if (method === "sessions.patch") { if (method === "sessions.patch") {
currentModel = (params.model as string | null | undefined) ?? null; const nextModel = (params.model as string | null | undefined) ?? null;
if (!nextModel) {
currentModel = null;
currentModelProvider = undefined;
} else {
const normalized = nextModel.trim();
const slashIndex = normalized.indexOf("/");
if (slashIndex > 0) {
currentModelProvider = normalized.slice(0, slashIndex);
currentModel = normalized.slice(slashIndex + 1);
} else {
currentModel = normalized;
const matchingProviders = catalog
.filter((entry) => entry.id === normalized)
.map((entry) => entry.provider)
.filter(Boolean);
currentModelProvider =
matchingProviders.length === 1 ? matchingProviders[0] : currentModelProvider;
}
}
return { ok: true, key: "main" }; return { ok: true, key: "main" };
} }
if (method === "chat.history") { if (method === "chat.history") {
@ -45,10 +65,18 @@ function createChatHeaderState(
ts: 0, ts: 0,
path: "", path: "",
count: omitSessionFromList ? 0 : 1, count: omitSessionFromList ? 0 : 1,
defaults: { model: "gpt-5", contextTokens: null }, defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: omitSessionFromList sessions: omitSessionFromList
? [] ? []
: [{ key: "main", kind: "direct", updatedAt: null, model: currentModel }], : [
{
key: "main",
kind: "direct",
updatedAt: null,
modelProvider: currentModelProvider,
model: currentModel,
},
],
}; };
} }
if (method === "models.list") { if (method === "models.list") {
@ -64,10 +92,18 @@ function createChatHeaderState(
ts: 0, ts: 0,
path: "", path: "",
count: omitSessionFromList ? 0 : 1, count: omitSessionFromList ? 0 : 1,
defaults: { model: "gpt-5", contextTokens: null }, defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: omitSessionFromList sessions: omitSessionFromList
? [] ? []
: [{ key: "main", kind: "direct", updatedAt: null, model: currentModel }], : [
{
key: "main",
kind: "direct",
updatedAt: null,
modelProvider: currentModelProvider,
model: currentModel,
},
],
}, },
chatModelOverrides: {}, chatModelOverrides: {},
chatModelCatalog: catalog, chatModelCatalog: catalog,
@ -565,13 +601,13 @@ describe("chat view", () => {
expect(modelSelect).not.toBeNull(); expect(modelSelect).not.toBeNull();
expect(modelSelect?.value).toBe(""); expect(modelSelect?.value).toBe("");
modelSelect!.value = "gpt-5-mini"; modelSelect!.value = "openai/gpt-5-mini";
modelSelect!.dispatchEvent(new Event("change", { bubbles: true })); modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
await flushTasks(); await flushTasks();
expect(request).toHaveBeenCalledWith("sessions.patch", { expect(request).toHaveBeenCalledWith("sessions.patch", {
key: "main", key: "main",
model: "gpt-5-mini", model: "openai/gpt-5-mini",
}); });
expect(request).not.toHaveBeenCalledWith("chat.history", expect.anything()); expect(request).not.toHaveBeenCalledWith("chat.history", expect.anything());
expect(state.sessionsResult?.sessions[0]?.model).toBe("gpt-5-mini"); expect(state.sessionsResult?.sessions[0]?.model).toBe("gpt-5-mini");
@ -593,7 +629,7 @@ describe("chat view", () => {
'select[data-chat-model-select="true"]', 'select[data-chat-model-select="true"]',
); );
expect(modelSelect).not.toBeNull(); expect(modelSelect).not.toBeNull();
expect(modelSelect?.value).toBe("gpt-5-mini"); expect(modelSelect?.value).toBe("openai/gpt-5-mini");
modelSelect!.value = ""; modelSelect!.value = "";
modelSelect!.dispatchEvent(new Event("change", { bubbles: true })); modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
@ -637,7 +673,7 @@ describe("chat view", () => {
); );
expect(modelSelect).not.toBeNull(); expect(modelSelect).not.toBeNull();
modelSelect!.value = "gpt-5-mini"; modelSelect!.value = "openai/gpt-5-mini";
modelSelect!.dispatchEvent(new Event("change", { bubbles: true })); modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
await flushTasks(); await flushTasks();
render(renderChatSessionSelect(state), container); render(renderChatSessionSelect(state), container);
@ -645,10 +681,30 @@ describe("chat view", () => {
const rerendered = container.querySelector<HTMLSelectElement>( const rerendered = container.querySelector<HTMLSelectElement>(
'select[data-chat-model-select="true"]', 'select[data-chat-model-select="true"]',
); );
expect(rerendered?.value).toBe("gpt-5-mini"); expect(rerendered?.value).toBe("openai/gpt-5-mini");
vi.unstubAllGlobals(); vi.unstubAllGlobals();
}); });
it("normalizes cached bare /model overrides to the matching catalog option", () => {
const { state } = createChatHeaderState();
state.chatModelOverrides = { main: "gpt-5-mini" };
const container = document.createElement("div");
render(renderChatSessionSelect(state), container);
const modelSelect = container.querySelector<HTMLSelectElement>(
'select[data-chat-model-select="true"]',
);
expect(modelSelect).not.toBeNull();
expect(modelSelect?.value).toBe("openai/gpt-5-mini");
const optionValues = Array.from(modelSelect?.querySelectorAll("option") ?? []).map(
(option) => option.value,
);
expect(optionValues).toContain("openai/gpt-5-mini");
expect(optionValues).not.toContain("gpt-5-mini");
});
it("prefers the session label over displayName in the grouped chat session selector", () => { it("prefers the session label over displayName in the grouped chat session selector", () => {
const { state } = createChatHeaderState({ omitSessionFromList: true }); const { state } = createChatHeaderState({ omitSessionFromList: true });
state.sessionKey = "agent:main:subagent:4f2146de-887b-4176-9abe-91140082959b"; state.sessionKey = "agent:main:subagent:4f2146de-887b-4176-9abe-91140082959b";
@ -657,7 +713,7 @@ describe("chat view", () => {
ts: 0, ts: 0,
path: "", path: "",
count: 1, count: 1,
defaults: { model: "gpt-5", contextTokens: null }, defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: [ sessions: [
{ {
key: state.sessionKey, key: state.sessionKey,
@ -707,7 +763,7 @@ describe("chat view", () => {
ts: 0, ts: 0,
path: "", path: "",
count: 1, count: 1,
defaults: { model: "gpt-5", contextTokens: null }, defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: [ sessions: [
{ {
key: state.sessionKey, key: state.sessionKey,
@ -736,7 +792,7 @@ describe("chat view", () => {
ts: 0, ts: 0,
path: "", path: "",
count: 2, count: 2,
defaults: { model: "gpt-5", contextTokens: null }, defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
sessions: [ sessions: [
{ {
key: "agent:main:subagent:4f2146de-887b-4176-9abe-91140082959b", key: "agent:main:subagent:4f2146de-887b-4176-9abe-91140082959b",

View File

@ -8,7 +8,7 @@ function buildResult(session: SessionsListResult["sessions"][number]): SessionsL
ts: Date.now(), ts: Date.now(),
path: "(multiple)", path: "(multiple)",
count: 1, count: 1,
defaults: { model: null, contextTokens: null }, defaults: { modelProvider: null, model: null, contextTokens: null },
sessions: [session], sessions: [session],
}; };
} }