Compare commits
3 Commits
main
...
fix/contro
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
931c040a30 | ||
|
|
5cc35a2e95 | ||
|
|
c0237d8544 |
@ -78,6 +78,7 @@ Docs: https://docs.openclaw.ai
|
||||
- Gateway/watch mode: restart on bundled-plugin package and manifest metadata changes, rebuild `dist` for extension source and `tsdown.config.ts` changes, and still ignore extension docs. (#47571) thanks @gumadeiras.
|
||||
- Gateway/watch mode: recreate bundled plugin runtime metadata after clean or stale `dist` states, so `pnpm gateway:watch` no longer fails on missing `dist/extensions/*/openclaw.plugin.json` manifests after a rebuild. Thanks @gumadeiras.
|
||||
- Plugins/context engines: enforce owner-aware context-engine registration on both loader and public SDK paths so plugins cannot spoof privileged ownership, claim the core `legacy` engine id, or overwrite an existing engine id through direct SDK imports. (#47595) Thanks @vincentkoc.
|
||||
- Control UI/model picker: normalize cached bare `/model` overrides to provider-qualified selector keys so the dropdown stays aligned with the real catalog entry across model changes and refreshes. (#47581) Thanks @chrishham.
|
||||
|
||||
## 2026.3.13
|
||||
|
||||
|
||||
@ -93,7 +93,7 @@ describe("handleSendChat", () => {
|
||||
ts: 0,
|
||||
path: "",
|
||||
count: 0,
|
||||
defaults: { model: "gpt-5", contextTokens: null },
|
||||
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
|
||||
sessions: [],
|
||||
};
|
||||
}
|
||||
|
||||
@ -518,27 +518,65 @@ function resolveActiveSessionRow(state: AppViewState) {
|
||||
return state.sessionsResult?.sessions?.find((row) => row.key === state.sessionKey);
|
||||
}
|
||||
|
||||
function buildQualifiedModelValue(model: string, provider?: string | null): string {
|
||||
const trimmedModel = model.trim();
|
||||
if (!trimmedModel) {
|
||||
return "";
|
||||
}
|
||||
const trimmedProvider = provider?.trim();
|
||||
return trimmedProvider ? `${trimmedProvider}/${trimmedModel}` : trimmedModel;
|
||||
}
|
||||
|
||||
function normalizeModelOptionValue(rawValue: string, catalog: ModelCatalogEntry[]): string {
|
||||
const trimmed = rawValue.trim();
|
||||
if (!trimmed || trimmed.includes("/")) {
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
let matchedValue = "";
|
||||
for (const entry of catalog) {
|
||||
if (entry.id.trim().toLowerCase() !== trimmed.toLowerCase()) {
|
||||
continue;
|
||||
}
|
||||
const candidate = buildQualifiedModelValue(entry.id, entry.provider);
|
||||
if (!matchedValue) {
|
||||
matchedValue = candidate;
|
||||
continue;
|
||||
}
|
||||
if (matchedValue.toLowerCase() !== candidate.toLowerCase()) {
|
||||
return trimmed;
|
||||
}
|
||||
}
|
||||
|
||||
return matchedValue || trimmed;
|
||||
}
|
||||
|
||||
function resolveModelOverrideValue(state: AppViewState): string {
|
||||
// Prefer the local cache — it reflects in-flight patches before sessionsResult refreshes.
|
||||
const cached = state.chatModelOverrides[state.sessionKey];
|
||||
if (typeof cached === "string") {
|
||||
return cached.trim();
|
||||
return normalizeModelOptionValue(cached, state.chatModelCatalog ?? []);
|
||||
}
|
||||
// cached === null means explicitly cleared to default.
|
||||
if (cached === null) {
|
||||
return "";
|
||||
}
|
||||
// No local override recorded yet — fall back to server data.
|
||||
// Include provider prefix so the value matches option keys (provider/model).
|
||||
const activeRow = resolveActiveSessionRow(state);
|
||||
if (activeRow) {
|
||||
return typeof activeRow.model === "string" ? activeRow.model.trim() : "";
|
||||
if (activeRow && typeof activeRow.model === "string" && activeRow.model.trim()) {
|
||||
return buildQualifiedModelValue(activeRow.model, activeRow.modelProvider);
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
function resolveDefaultModelValue(state: AppViewState): string {
|
||||
const model = state.sessionsResult?.defaults?.model;
|
||||
return typeof model === "string" ? model.trim() : "";
|
||||
const defaults = state.sessionsResult?.defaults;
|
||||
const model = defaults?.model;
|
||||
if (typeof model !== "string" || !model.trim()) {
|
||||
return "";
|
||||
}
|
||||
return buildQualifiedModelValue(model, defaults?.modelProvider);
|
||||
}
|
||||
|
||||
function buildChatModelOptions(
|
||||
@ -563,7 +601,8 @@ function buildChatModelOptions(
|
||||
|
||||
for (const entry of catalog) {
|
||||
const provider = entry.provider?.trim();
|
||||
addOption(entry.id, provider ? `${entry.id} · ${provider}` : entry.id);
|
||||
const value = buildQualifiedModelValue(entry.id, provider);
|
||||
addOption(value, provider ? `${entry.id} · ${provider}` : entry.id);
|
||||
}
|
||||
|
||||
if (currentOverride) {
|
||||
@ -583,7 +622,10 @@ function renderChatModelSelect(state: AppViewState) {
|
||||
currentOverride,
|
||||
defaultModel,
|
||||
);
|
||||
const defaultLabel = defaultModel ? `Default (${defaultModel})` : "Default model";
|
||||
const defaultDisplay = defaultModel.includes("/")
|
||||
? `${defaultModel.slice(defaultModel.indexOf("/") + 1)} · ${defaultModel.slice(0, defaultModel.indexOf("/"))}`
|
||||
: defaultModel;
|
||||
const defaultLabel = defaultModel ? `Default (${defaultDisplay})` : "Default model";
|
||||
const busy =
|
||||
state.chatLoading || state.chatSending || Boolean(state.chatRunId) || state.chatStream !== null;
|
||||
const disabled =
|
||||
|
||||
@ -235,7 +235,7 @@ describe("executeSlashCommand directives", () => {
|
||||
const request = vi.fn(async (method: string, _payload?: unknown) => {
|
||||
if (method === "sessions.list") {
|
||||
return {
|
||||
defaults: { model: "default-model" },
|
||||
defaults: { modelProvider: "openai", model: "default-model" },
|
||||
sessions: [
|
||||
row("agent:main:main", {
|
||||
model: "gpt-4.1-mini",
|
||||
|
||||
@ -316,6 +316,7 @@ export type PresenceEntry = {
|
||||
};
|
||||
|
||||
export type GatewaySessionsDefaults = {
|
||||
modelProvider: string | null;
|
||||
model: string | null;
|
||||
contextTokens: number | null;
|
||||
};
|
||||
|
||||
@ -31,7 +31,7 @@ function createProps(overrides: Partial<ChatProps> = {}): ChatProps {
|
||||
ts: 0,
|
||||
path: "",
|
||||
count: 1,
|
||||
defaults: { model: "gpt-5", contextTokens: null },
|
||||
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
|
||||
sessions: [
|
||||
{
|
||||
key: "main",
|
||||
|
||||
@ -14,7 +14,7 @@ function createSessions(): SessionsListResult {
|
||||
ts: 0,
|
||||
path: "",
|
||||
count: 0,
|
||||
defaults: { model: null, contextTokens: null },
|
||||
defaults: { modelProvider: null, model: null, contextTokens: null },
|
||||
sessions: [],
|
||||
};
|
||||
}
|
||||
@ -27,6 +27,7 @@ function createChatHeaderState(
|
||||
} = {},
|
||||
): { state: AppViewState; request: ReturnType<typeof vi.fn> } {
|
||||
let currentModel = overrides.model ?? null;
|
||||
let currentModelProvider = currentModel ? "openai" : undefined;
|
||||
const omitSessionFromList = overrides.omitSessionFromList ?? false;
|
||||
const catalog = overrides.models ?? [
|
||||
{ id: "gpt-5", name: "GPT-5", provider: "openai" },
|
||||
@ -34,7 +35,26 @@ function createChatHeaderState(
|
||||
];
|
||||
const request = vi.fn(async (method: string, params: Record<string, unknown>) => {
|
||||
if (method === "sessions.patch") {
|
||||
currentModel = (params.model as string | null | undefined) ?? null;
|
||||
const nextModel = (params.model as string | null | undefined) ?? null;
|
||||
if (!nextModel) {
|
||||
currentModel = null;
|
||||
currentModelProvider = undefined;
|
||||
} else {
|
||||
const normalized = nextModel.trim();
|
||||
const slashIndex = normalized.indexOf("/");
|
||||
if (slashIndex > 0) {
|
||||
currentModelProvider = normalized.slice(0, slashIndex);
|
||||
currentModel = normalized.slice(slashIndex + 1);
|
||||
} else {
|
||||
currentModel = normalized;
|
||||
const matchingProviders = catalog
|
||||
.filter((entry) => entry.id === normalized)
|
||||
.map((entry) => entry.provider)
|
||||
.filter(Boolean);
|
||||
currentModelProvider =
|
||||
matchingProviders.length === 1 ? matchingProviders[0] : currentModelProvider;
|
||||
}
|
||||
}
|
||||
return { ok: true, key: "main" };
|
||||
}
|
||||
if (method === "chat.history") {
|
||||
@ -45,10 +65,18 @@ function createChatHeaderState(
|
||||
ts: 0,
|
||||
path: "",
|
||||
count: omitSessionFromList ? 0 : 1,
|
||||
defaults: { model: "gpt-5", contextTokens: null },
|
||||
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
|
||||
sessions: omitSessionFromList
|
||||
? []
|
||||
: [{ key: "main", kind: "direct", updatedAt: null, model: currentModel }],
|
||||
: [
|
||||
{
|
||||
key: "main",
|
||||
kind: "direct",
|
||||
updatedAt: null,
|
||||
modelProvider: currentModelProvider,
|
||||
model: currentModel,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
if (method === "models.list") {
|
||||
@ -64,10 +92,18 @@ function createChatHeaderState(
|
||||
ts: 0,
|
||||
path: "",
|
||||
count: omitSessionFromList ? 0 : 1,
|
||||
defaults: { model: "gpt-5", contextTokens: null },
|
||||
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
|
||||
sessions: omitSessionFromList
|
||||
? []
|
||||
: [{ key: "main", kind: "direct", updatedAt: null, model: currentModel }],
|
||||
: [
|
||||
{
|
||||
key: "main",
|
||||
kind: "direct",
|
||||
updatedAt: null,
|
||||
modelProvider: currentModelProvider,
|
||||
model: currentModel,
|
||||
},
|
||||
],
|
||||
},
|
||||
chatModelOverrides: {},
|
||||
chatModelCatalog: catalog,
|
||||
@ -565,13 +601,13 @@ describe("chat view", () => {
|
||||
expect(modelSelect).not.toBeNull();
|
||||
expect(modelSelect?.value).toBe("");
|
||||
|
||||
modelSelect!.value = "gpt-5-mini";
|
||||
modelSelect!.value = "openai/gpt-5-mini";
|
||||
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
|
||||
await flushTasks();
|
||||
|
||||
expect(request).toHaveBeenCalledWith("sessions.patch", {
|
||||
key: "main",
|
||||
model: "gpt-5-mini",
|
||||
model: "openai/gpt-5-mini",
|
||||
});
|
||||
expect(request).not.toHaveBeenCalledWith("chat.history", expect.anything());
|
||||
expect(state.sessionsResult?.sessions[0]?.model).toBe("gpt-5-mini");
|
||||
@ -593,7 +629,7 @@ describe("chat view", () => {
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
expect(modelSelect?.value).toBe("gpt-5-mini");
|
||||
expect(modelSelect?.value).toBe("openai/gpt-5-mini");
|
||||
|
||||
modelSelect!.value = "";
|
||||
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
|
||||
@ -637,7 +673,7 @@ describe("chat view", () => {
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
|
||||
modelSelect!.value = "gpt-5-mini";
|
||||
modelSelect!.value = "openai/gpt-5-mini";
|
||||
modelSelect!.dispatchEvent(new Event("change", { bubbles: true }));
|
||||
await flushTasks();
|
||||
render(renderChatSessionSelect(state), container);
|
||||
@ -645,10 +681,30 @@ describe("chat view", () => {
|
||||
const rerendered = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(rerendered?.value).toBe("gpt-5-mini");
|
||||
expect(rerendered?.value).toBe("openai/gpt-5-mini");
|
||||
vi.unstubAllGlobals();
|
||||
});
|
||||
|
||||
it("normalizes cached bare /model overrides to the matching catalog option", () => {
|
||||
const { state } = createChatHeaderState();
|
||||
state.chatModelOverrides = { main: "gpt-5-mini" };
|
||||
|
||||
const container = document.createElement("div");
|
||||
render(renderChatSessionSelect(state), container);
|
||||
|
||||
const modelSelect = container.querySelector<HTMLSelectElement>(
|
||||
'select[data-chat-model-select="true"]',
|
||||
);
|
||||
expect(modelSelect).not.toBeNull();
|
||||
expect(modelSelect?.value).toBe("openai/gpt-5-mini");
|
||||
|
||||
const optionValues = Array.from(modelSelect?.querySelectorAll("option") ?? []).map(
|
||||
(option) => option.value,
|
||||
);
|
||||
expect(optionValues).toContain("openai/gpt-5-mini");
|
||||
expect(optionValues).not.toContain("gpt-5-mini");
|
||||
});
|
||||
|
||||
it("prefers the session label over displayName in the grouped chat session selector", () => {
|
||||
const { state } = createChatHeaderState({ omitSessionFromList: true });
|
||||
state.sessionKey = "agent:main:subagent:4f2146de-887b-4176-9abe-91140082959b";
|
||||
@ -657,7 +713,7 @@ describe("chat view", () => {
|
||||
ts: 0,
|
||||
path: "",
|
||||
count: 1,
|
||||
defaults: { model: "gpt-5", contextTokens: null },
|
||||
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
|
||||
sessions: [
|
||||
{
|
||||
key: state.sessionKey,
|
||||
@ -707,7 +763,7 @@ describe("chat view", () => {
|
||||
ts: 0,
|
||||
path: "",
|
||||
count: 1,
|
||||
defaults: { model: "gpt-5", contextTokens: null },
|
||||
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
|
||||
sessions: [
|
||||
{
|
||||
key: state.sessionKey,
|
||||
@ -736,7 +792,7 @@ describe("chat view", () => {
|
||||
ts: 0,
|
||||
path: "",
|
||||
count: 2,
|
||||
defaults: { model: "gpt-5", contextTokens: null },
|
||||
defaults: { modelProvider: "openai", model: "gpt-5", contextTokens: null },
|
||||
sessions: [
|
||||
{
|
||||
key: "agent:main:subagent:4f2146de-887b-4176-9abe-91140082959b",
|
||||
|
||||
@ -8,7 +8,7 @@ function buildResult(session: SessionsListResult["sessions"][number]): SessionsL
|
||||
ts: Date.now(),
|
||||
path: "(multiple)",
|
||||
count: 1,
|
||||
defaults: { model: null, contextTokens: null },
|
||||
defaults: { modelProvider: null, model: null, contextTokens: null },
|
||||
sessions: [session],
|
||||
};
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user