diff --git a/src/agents/openai-ws-connection.test.ts b/src/agents/openai-ws-connection.test.ts index 2a7b95f7eb9..4f3f2d4e706 100644 --- a/src/agents/openai-ws-connection.test.ts +++ b/src/agents/openai-ws-connection.test.ts @@ -167,6 +167,8 @@ function buildManager(opts?: ConstructorParameters + new MockWebSocket(url, options as Record) as never, ...opts, }); } @@ -232,6 +234,22 @@ describe("OpenAIWebSocketManager", () => { await connectPromise; }); + it("adds OpenClaw attribution headers on the native OpenAI websocket", async () => { + const manager = buildManager(); + const connectPromise = manager.connect("sk-test-key"); + + const sock = lastSocket(); + expect(sock.options).toMatchObject({ + headers: expect.objectContaining({ + originator: "openclaw", + "User-Agent": expect.stringMatching(/^openclaw\//), + }), + }); + + sock.simulateOpen(); + await connectPromise; + }); + it("resolves when the connection opens", async () => { const manager = buildManager(); const connectPromise = manager.connect("sk-test"); diff --git a/src/agents/openai-ws-connection.ts b/src/agents/openai-ws-connection.ts index 2d9c6ffe7e6..1765eb00172 100644 --- a/src/agents/openai-ws-connection.ts +++ b/src/agents/openai-ws-connection.ts @@ -15,6 +15,7 @@ import { EventEmitter } from "node:events"; import WebSocket from "ws"; +import { resolveProviderAttributionHeaders } from "./provider-attribution.js"; // ───────────────────────────────────────────────────────────────────────────── // WebSocket Event Types (Server → Client) @@ -251,6 +252,14 @@ const MAX_RETRIES = 5; /** Backoff delays in ms: 1s, 2s, 4s, 8s, 16s */ const BACKOFF_DELAYS_MS = [1000, 2000, 4000, 8000, 16000] as const; +function isOpenAIPublicWebSocketUrl(url: string): boolean { + try { + return new URL(url).hostname.toLowerCase() === "api.openai.com"; + } catch { + return url.toLowerCase().includes("api.openai.com"); + } +} + export interface OpenAIWebSocketManagerOptions { /** Override the default WebSocket URL (useful for testing) */ url?: string; @@ -258,6 +267,8 @@ export interface OpenAIWebSocketManagerOptions { maxRetries?: number; /** Custom backoff delays in ms (default: [1000, 2000, 4000, 8000, 16000]) */ backoffDelaysMs?: readonly number[]; + /** Custom socket factory for tests. */ + socketFactory?: (url: string, options: ConstructorParameters[1]) => WebSocket; } type InternalEvents = { @@ -297,12 +308,18 @@ export class OpenAIWebSocketManager extends EventEmitter { private readonly wsUrl: string; private readonly maxRetries: number; private readonly backoffDelaysMs: readonly number[]; + private readonly socketFactory: ( + url: string, + options: ConstructorParameters[1], + ) => WebSocket; constructor(options: OpenAIWebSocketManagerOptions = {}) { super(); this.wsUrl = options.url ?? OPENAI_WS_URL; this.maxRetries = options.maxRetries ?? MAX_RETRIES; this.backoffDelaysMs = options.backoffDelaysMs ?? BACKOFF_DELAYS_MS; + this.socketFactory = + options.socketFactory ?? ((url, socketOptions) => new WebSocket(url, socketOptions)); } // ─── Public API ──────────────────────────────────────────────────────────── @@ -382,10 +399,13 @@ export class OpenAIWebSocketManager extends EventEmitter { return; } - const socket = new WebSocket(this.wsUrl, { + const socket = this.socketFactory(this.wsUrl, { headers: { Authorization: `Bearer ${this.apiKey}`, "OpenAI-Beta": "responses-websocket=v1", + ...(isOpenAIPublicWebSocketUrl(this.wsUrl) + ? resolveProviderAttributionHeaders("openai") + : undefined), }, }); diff --git a/src/agents/pi-embedded-runner/extra-params.openai.test.ts b/src/agents/pi-embedded-runner/extra-params.openai.test.ts new file mode 100644 index 00000000000..92e26c95ee0 --- /dev/null +++ b/src/agents/pi-embedded-runner/extra-params.openai.test.ts @@ -0,0 +1,110 @@ +import type { StreamFn } from "@mariozechner/pi-agent-core"; +import type { Context, Model } from "@mariozechner/pi-ai"; +import { createAssistantMessageEventStream } from "@mariozechner/pi-ai"; +import { afterEach, describe, expect, it } from "vitest"; +import { captureEnv } from "../../test-utils/env.js"; +import { applyExtraParamsToAgent } from "./extra-params.js"; + +type CapturedCall = { + headers?: Record; +}; + +function applyAndCapture(params: { + provider: string; + modelId: string; + baseUrl?: string; + callerHeaders?: Record; +}): CapturedCall { + const captured: CapturedCall = {}; + const baseStreamFn: StreamFn = (model, _context, options) => { + captured.headers = options?.headers; + options?.onPayload?.({}, model); + return createAssistantMessageEventStream(); + }; + const agent = { streamFn: baseStreamFn }; + + applyExtraParamsToAgent(agent, undefined, params.provider, params.modelId); + + const model = { + api: "openai-responses", + provider: params.provider, + id: params.modelId, + baseUrl: params.baseUrl, + } as Model<"openai-responses">; + const context: Context = { messages: [] }; + + void agent.streamFn?.(model, context, { headers: params.callerHeaders }); + + return captured; +} + +describe("extra-params: OpenAI attribution", () => { + const envSnapshot = captureEnv(["OPENCLAW_VERSION"]); + + afterEach(() => { + envSnapshot.restore(); + }); + + it("injects originator and release-based user agent for native OpenAI", () => { + process.env.OPENCLAW_VERSION = "2026.3.14"; + + const { headers } = applyAndCapture({ + provider: "openai", + modelId: "gpt-5.4", + baseUrl: "https://api.openai.com/v1", + }); + + expect(headers).toEqual({ + originator: "openclaw", + "User-Agent": "openclaw/2026.3.14", + }); + }); + + it("overrides caller-supplied OpenAI attribution headers", () => { + process.env.OPENCLAW_VERSION = "2026.3.14"; + + const { headers } = applyAndCapture({ + provider: "openai", + modelId: "gpt-5.4", + baseUrl: "https://api.openai.com/v1", + callerHeaders: { + originator: "spoofed", + "User-Agent": "spoofed/0.0.0", + "X-Custom": "1", + }, + }); + + expect(headers).toEqual({ + originator: "openclaw", + "User-Agent": "openclaw/2026.3.14", + "X-Custom": "1", + }); + }); + + it("does not inject attribution on non-native OpenAI-compatible base URLs", () => { + process.env.OPENCLAW_VERSION = "2026.3.14"; + + const { headers } = applyAndCapture({ + provider: "openai", + modelId: "gpt-5.4", + baseUrl: "https://proxy.example.com/v1", + }); + + expect(headers).toBeUndefined(); + }); + + it("injects attribution for ChatGPT-backed OpenAI Codex traffic", () => { + process.env.OPENCLAW_VERSION = "2026.3.14"; + + const { headers } = applyAndCapture({ + provider: "openai-codex", + modelId: "gpt-5.4", + baseUrl: "https://chatgpt.com/backend-api", + }); + + expect(headers).toEqual({ + originator: "openclaw", + "User-Agent": "openclaw/2026.3.14", + }); + }); +}); diff --git a/src/agents/pi-embedded-runner/extra-params.ts b/src/agents/pi-embedded-runner/extra-params.ts index 7a73280802c..e3aa8b1dbcc 100644 --- a/src/agents/pi-embedded-runner/extra-params.ts +++ b/src/agents/pi-embedded-runner/extra-params.ts @@ -26,6 +26,7 @@ import { shouldApplySiliconFlowThinkingOffCompat, } from "./moonshot-stream-wrappers.js"; import { + createOpenAIAttributionHeadersWrapper, createOpenAIDefaultTransportWrapper, createOpenAIFastModeWrapper, createOpenAIResponsesContextManagementWrapper, @@ -303,9 +304,12 @@ export function applyExtraParamsToAgent( }, }) ?? merged; - if (provider === "openai") { - // Default OpenAI Responses to WebSocket-first with transparent SSE fallback. - agent.streamFn = createOpenAIDefaultTransportWrapper(agent.streamFn); + if (provider === "openai" || provider === "openai-codex") { + if (provider === "openai") { + // Default OpenAI Responses to WebSocket-first with transparent SSE fallback. + agent.streamFn = createOpenAIDefaultTransportWrapper(agent.streamFn); + } + agent.streamFn = createOpenAIAttributionHeadersWrapper(agent.streamFn); } const wrappedStreamFn = createStreamFnWithExtraParams( agent.streamFn, diff --git a/src/agents/pi-embedded-runner/openai-stream-wrappers.ts b/src/agents/pi-embedded-runner/openai-stream-wrappers.ts index 8542f329cbe..4131a33f08d 100644 --- a/src/agents/pi-embedded-runner/openai-stream-wrappers.ts +++ b/src/agents/pi-embedded-runner/openai-stream-wrappers.ts @@ -1,6 +1,7 @@ import type { StreamFn } from "@mariozechner/pi-agent-core"; import type { SimpleStreamOptions } from "@mariozechner/pi-ai"; import { streamSimple } from "@mariozechner/pi-ai"; +import { resolveProviderAttributionHeaders } from "../provider-attribution.js"; import { log } from "./logger.js"; import { streamWithPayloadPatch } from "./stream-payload-utils.js"; @@ -42,6 +43,40 @@ function isOpenAIPublicApiBaseUrl(baseUrl: unknown): boolean { } } +function isOpenAICodexBaseUrl(baseUrl: unknown): boolean { + if (typeof baseUrl !== "string" || !baseUrl.trim()) { + return false; + } + + try { + return new URL(baseUrl).hostname.toLowerCase() === "chatgpt.com"; + } catch { + return baseUrl.toLowerCase().includes("chatgpt.com"); + } +} + +function shouldApplyOpenAIAttributionHeaders(model: { + api?: unknown; + provider?: unknown; + baseUrl?: unknown; +}): "openai" | "openai-codex" | undefined { + if ( + model.provider === "openai" && + (model.api === "openai-completions" || model.api === "openai-responses") && + isOpenAIPublicApiBaseUrl(model.baseUrl) + ) { + return "openai"; + } + if ( + model.provider === "openai-codex" && + (model.api === "openai-codex-responses" || model.api === "openai-responses") && + isOpenAICodexBaseUrl(model.baseUrl) + ) { + return "openai-codex"; + } + return undefined; +} + function shouldForceResponsesStore(model: { api?: unknown; provider?: unknown; @@ -357,3 +392,22 @@ export function createOpenAIDefaultTransportWrapper(baseStreamFn: StreamFn | und return underlying(model, context, mergedOptions); }; } + +export function createOpenAIAttributionHeadersWrapper( + baseStreamFn: StreamFn | undefined, +): StreamFn { + const underlying = baseStreamFn ?? streamSimple; + return (model, context, options) => { + const attributionProvider = shouldApplyOpenAIAttributionHeaders(model); + if (!attributionProvider) { + return underlying(model, context, options); + } + return underlying(model, context, { + ...options, + headers: { + ...options?.headers, + ...resolveProviderAttributionHeaders(attributionProvider), + }, + }); + }; +} diff --git a/src/agents/provider-attribution.test.ts b/src/agents/provider-attribution.test.ts index 693e165ba21..04c7d040b17 100644 --- a/src/agents/provider-attribution.test.ts +++ b/src/agents/provider-attribution.test.ts @@ -52,18 +52,44 @@ describe("provider attribution", () => { }); }); - it("tracks SDK-hook-only providers without enabling them", () => { + it("returns a hidden-spec OpenAI attribution policy", () => { expect(resolveProviderAttributionPolicy("openai", { OPENCLAW_VERSION: "2026.3.14" })).toEqual({ provider: "openai", - enabledByDefault: false, - verification: "vendor-sdk-hook-only", - hook: "default-headers", + enabledByDefault: true, + verification: "vendor-hidden-api-spec", + hook: "request-headers", reviewNote: - "OpenAI JS SDK exposes defaultHeaders, but public app attribution support is not yet verified.", + "OpenAI native traffic supports hidden originator/User-Agent attribution. Verified against the Codex wire contract.", product: "OpenClaw", version: "2026.3.14", + headers: { + originator: "openclaw", + "User-Agent": "openclaw/2026.3.14", + }, + }); + expect(resolveProviderAttributionHeaders("openai", { OPENCLAW_VERSION: "2026.3.14" })).toEqual({ + originator: "openclaw", + "User-Agent": "openclaw/2026.3.14", + }); + }); + + it("returns a hidden-spec OpenAI Codex attribution policy", () => { + expect( + resolveProviderAttributionPolicy("openai-codex", { OPENCLAW_VERSION: "2026.3.14" }), + ).toEqual({ + provider: "openai-codex", + enabledByDefault: true, + verification: "vendor-hidden-api-spec", + hook: "request-headers", + reviewNote: + "OpenAI Codex ChatGPT-backed traffic supports the same hidden originator/User-Agent attribution contract.", + product: "OpenClaw", + version: "2026.3.14", + headers: { + originator: "openclaw", + "User-Agent": "openclaw/2026.3.14", + }, }); - expect(resolveProviderAttributionHeaders("openai")).toBeUndefined(); }); it("lists the current attribution support matrix", () => { @@ -76,11 +102,12 @@ describe("provider attribution", () => { ]), ).toEqual([ ["openrouter", true, "vendor-documented", "request-headers"], + ["openai", true, "vendor-hidden-api-spec", "request-headers"], + ["openai-codex", true, "vendor-hidden-api-spec", "request-headers"], ["anthropic", false, "vendor-sdk-hook-only", "default-headers"], ["google", false, "vendor-sdk-hook-only", "user-agent-extra"], ["groq", false, "vendor-sdk-hook-only", "default-headers"], ["mistral", false, "vendor-sdk-hook-only", "custom-user-agent"], - ["openai", false, "vendor-sdk-hook-only", "default-headers"], ["together", false, "vendor-sdk-hook-only", "default-headers"], ]); }); diff --git a/src/agents/provider-attribution.ts b/src/agents/provider-attribution.ts index 52fe5c8d4c7..f1111a8e5bd 100644 --- a/src/agents/provider-attribution.ts +++ b/src/agents/provider-attribution.ts @@ -4,6 +4,7 @@ import { normalizeProviderId } from "./model-selection.js"; export type ProviderAttributionVerification = | "vendor-documented" + | "vendor-hidden-api-spec" | "vendor-sdk-hook-only" | "internal-runtime"; @@ -28,6 +29,7 @@ export type ProviderAttributionPolicy = { export type ProviderAttributionIdentity = Pick; const OPENCLAW_ATTRIBUTION_PRODUCT = "OpenClaw"; +const OPENCLAW_ATTRIBUTION_ORIGINATOR = "openclaw"; export function resolveProviderAttributionIdentity( env: RuntimeVersionEnv = process.env as RuntimeVersionEnv, @@ -58,6 +60,44 @@ function buildOpenRouterAttributionPolicy( }; } +function buildOpenAIAttributionPolicy( + env: RuntimeVersionEnv = process.env as RuntimeVersionEnv, +): ProviderAttributionPolicy { + const identity = resolveProviderAttributionIdentity(env); + return { + provider: "openai", + enabledByDefault: true, + verification: "vendor-hidden-api-spec", + hook: "request-headers", + reviewNote: + "OpenAI native traffic supports hidden originator/User-Agent attribution. Verified against the Codex wire contract.", + ...identity, + headers: { + originator: OPENCLAW_ATTRIBUTION_ORIGINATOR, + "User-Agent": `${OPENCLAW_ATTRIBUTION_ORIGINATOR}/${identity.version}`, + }, + }; +} + +function buildOpenAICodexAttributionPolicy( + env: RuntimeVersionEnv = process.env as RuntimeVersionEnv, +): ProviderAttributionPolicy { + const identity = resolveProviderAttributionIdentity(env); + return { + provider: "openai-codex", + enabledByDefault: true, + verification: "vendor-hidden-api-spec", + hook: "request-headers", + reviewNote: + "OpenAI Codex ChatGPT-backed traffic supports the same hidden originator/User-Agent attribution contract.", + ...identity, + headers: { + originator: OPENCLAW_ATTRIBUTION_ORIGINATOR, + "User-Agent": `${OPENCLAW_ATTRIBUTION_ORIGINATOR}/${identity.version}`, + }, + }; +} + function buildSdkHookOnlyPolicy( provider: string, hook: ProviderAttributionHook, @@ -79,6 +119,8 @@ export function listProviderAttributionPolicies( ): ProviderAttributionPolicy[] { return [ buildOpenRouterAttributionPolicy(env), + buildOpenAIAttributionPolicy(env), + buildOpenAICodexAttributionPolicy(env), buildSdkHookOnlyPolicy( "anthropic", "default-headers", @@ -103,12 +145,6 @@ export function listProviderAttributionPolicies( "Mistral JS SDK exposes a custom userAgent option, but app attribution is not yet verified.", env, ), - buildSdkHookOnlyPolicy( - "openai", - "default-headers", - "OpenAI JS SDK exposes defaultHeaders, but public app attribution support is not yet verified.", - env, - ), buildSdkHookOnlyPolicy( "together", "default-headers",