fix(onboard): restore openai-responses API for all Azure URLs

Regression introduced in 91104ac740 broke local vLLM endpoints that rely
on openai-responses API compatibility.

The commit narrowed Azure detection from 'isAzure' to 'isAzureOpenAi'
(*.openai.azure.com only), causing non-Azure OpenAI-compatible endpoints
to route to /v1/chat/completions instead of /responses.

This resulted in 404 errors for local vLLM setups configured with
openai-responses mode.

Fix: Revert providerApi logic to use 'isAzure' (covers both
*.services.ai.azure.com and *.openai.azure.com) to preserve backward
compatibility.

Fixes #50719
This commit is contained in:
OpenClaw 2026-03-20 08:31:31 +07:00
parent 55e12bd236
commit ba811b4629
2 changed files with 2 additions and 2 deletions

View File

@ -506,7 +506,7 @@ describe("applyCustomApiConfig", () => {
const provider = result.config.models?.providers?.[providerId];
expect(provider?.baseUrl).toBe("https://my-resource.services.ai.azure.com/openai/v1");
expect(provider?.api).toBe("openai-completions");
expect(provider?.api).toBe("openai-responses");
expect(provider?.authHeader).toBe(false);
expect(provider?.headers).toEqual({ "api-key": "key123" });

View File

@ -686,7 +686,7 @@ export function applyCustomApiConfig(params: ApplyCustomApiConfigParams): Custom
normalizeOptionalProviderApiKey(params.apiKey) ??
normalizeOptionalProviderApiKey(existingApiKey);
const providerApi = isAzureOpenAi
const providerApi = isAzure
? ("openai-responses" as const)
: resolveProviderApi(params.compatibility);
const azureHeaders = isAzure && normalizedApiKey ? { "api-key": normalizedApiKey } : undefined;