openclaw/src/pairing/pairing-store.ts

497 lines
15 KiB
TypeScript
Raw Normal View History

2026-01-07 05:06:04 +01:00
import crypto from "node:crypto";
import fs from "node:fs";
import os from "node:os";
import path from "node:path";
2026-01-07 05:06:04 +01:00
import lockfile from "proper-lockfile";
import type { ChannelId, ChannelPairingAdapter } from "../channels/plugins/types.js";
import { getPairingAdapter } from "../channels/plugins/pairing.js";
import { resolveOAuthDir, resolveStateDir } from "../config/paths.js";
2026-01-07 05:06:04 +01:00
const PAIRING_CODE_LENGTH = 8;
const PAIRING_CODE_ALPHABET = "ABCDEFGHJKLMNPQRSTUVWXYZ23456789";
const PAIRING_PENDING_TTL_MS = 60 * 60 * 1000;
const PAIRING_PENDING_MAX = 3;
2026-01-07 05:06:04 +01:00
const PAIRING_STORE_LOCK_OPTIONS = {
retries: {
retries: 10,
factor: 2,
minTimeout: 100,
maxTimeout: 10_000,
randomize: true,
},
stale: 30_000,
} as const;
export type PairingChannel = ChannelId;
export type PairingRequest = {
id: string;
code: string;
createdAt: string;
lastSeenAt: string;
meta?: Record<string, string>;
};
type PairingStore = {
version: 1;
requests: PairingRequest[];
};
type AllowFromStore = {
version: 1;
allowFrom: string[];
};
function resolveCredentialsDir(env: NodeJS.ProcessEnv = process.env): string {
const stateDir = resolveStateDir(env, os.homedir);
return resolveOAuthDir(env, stateDir);
}
/** Sanitize channel ID for use in filenames (prevent path traversal). */
function safeChannelKey(channel: PairingChannel): string {
const raw = String(channel).trim().toLowerCase();
if (!raw) {
throw new Error("invalid pairing channel");
}
const safe = raw.replace(/[\\/:*?"<>|]/g, "_").replace(/\.\./g, "_");
if (!safe || safe === "_") {
throw new Error("invalid pairing channel");
}
return safe;
}
function resolvePairingPath(channel: PairingChannel, env: NodeJS.ProcessEnv = process.env): string {
return path.join(resolveCredentialsDir(env), `${safeChannelKey(channel)}-pairing.json`);
}
function resolveAllowFromPath(
channel: PairingChannel,
env: NodeJS.ProcessEnv = process.env,
): string {
return path.join(resolveCredentialsDir(env), `${safeChannelKey(channel)}-allowFrom.json`);
}
function safeParseJson<T>(raw: string): T | null {
try {
return JSON.parse(raw) as T;
} catch {
return null;
}
}
async function readJsonFile<T>(
filePath: string,
fallback: T,
): Promise<{ value: T; exists: boolean }> {
try {
const raw = await fs.promises.readFile(filePath, "utf-8");
const parsed = safeParseJson<T>(raw);
if (parsed == null) {
return { value: fallback, exists: true };
}
return { value: parsed, exists: true };
} catch (err) {
const code = (err as { code?: string }).code;
if (code === "ENOENT") {
return { value: fallback, exists: false };
}
return { value: fallback, exists: false };
}
}
async function writeJsonFile(filePath: string, value: unknown): Promise<void> {
2026-01-07 05:06:04 +01:00
const dir = path.dirname(filePath);
await fs.promises.mkdir(dir, { recursive: true, mode: 0o700 });
const tmp = path.join(dir, `${path.basename(filePath)}.${crypto.randomUUID()}.tmp`);
2026-01-07 05:06:04 +01:00
await fs.promises.writeFile(tmp, `${JSON.stringify(value, null, 2)}\n`, {
encoding: "utf-8",
});
await fs.promises.chmod(tmp, 0o600);
await fs.promises.rename(tmp, filePath);
}
async function ensureJsonFile(filePath: string, fallback: unknown) {
try {
await fs.promises.access(filePath);
} catch {
await writeJsonFile(filePath, fallback);
}
}
async function withFileLock<T>(
filePath: string,
fallback: unknown,
fn: () => Promise<T>,
): Promise<T> {
await ensureJsonFile(filePath, fallback);
let release: (() => Promise<void>) | undefined;
try {
release = await lockfile.lock(filePath, PAIRING_STORE_LOCK_OPTIONS);
return await fn();
} finally {
if (release) {
try {
await release();
} catch {
// ignore unlock errors
}
}
}
}
function parseTimestamp(value: string | undefined): number | null {
if (!value) {
return null;
}
2026-01-07 05:06:04 +01:00
const parsed = Date.parse(value);
if (!Number.isFinite(parsed)) {
return null;
}
2026-01-07 05:06:04 +01:00
return parsed;
}
function isExpired(entry: PairingRequest, nowMs: number): boolean {
const createdAt = parseTimestamp(entry.createdAt);
if (!createdAt) {
return true;
}
2026-01-07 05:06:04 +01:00
return nowMs - createdAt > PAIRING_PENDING_TTL_MS;
}
function pruneExpiredRequests(reqs: PairingRequest[], nowMs: number) {
const kept: PairingRequest[] = [];
let removed = false;
for (const req of reqs) {
if (isExpired(req, nowMs)) {
removed = true;
continue;
}
kept.push(req);
}
return { requests: kept, removed };
}
function resolveLastSeenAt(entry: PairingRequest): number {
return parseTimestamp(entry.lastSeenAt) ?? parseTimestamp(entry.createdAt) ?? 0;
}
function pruneExcessRequests(reqs: PairingRequest[], maxPending: number) {
if (maxPending <= 0 || reqs.length <= maxPending) {
return { requests: reqs, removed: false };
}
const sorted = reqs.slice().toSorted((a, b) => resolveLastSeenAt(a) - resolveLastSeenAt(b));
return { requests: sorted.slice(-maxPending), removed: true };
}
function randomCode(): string {
// Human-friendly: 8 chars, upper, no ambiguous chars (0O1I).
let out = "";
2026-01-07 05:06:04 +01:00
for (let i = 0; i < PAIRING_CODE_LENGTH; i++) {
const idx = crypto.randomInt(0, PAIRING_CODE_ALPHABET.length);
out += PAIRING_CODE_ALPHABET[idx];
}
return out;
}
2026-01-07 05:06:04 +01:00
function generateUniqueCode(existing: Set<string>): string {
for (let attempt = 0; attempt < 500; attempt += 1) {
const code = randomCode();
if (!existing.has(code)) {
return code;
}
2026-01-07 05:06:04 +01:00
}
throw new Error("failed to generate unique pairing code");
}
function normalizeId(value: string | number): string {
return String(value).trim();
}
function normalizeAllowEntry(channel: PairingChannel, entry: string): string {
const trimmed = entry.trim();
if (!trimmed) {
return "";
}
if (trimmed === "*") {
return "";
}
const adapter = getPairingAdapter(channel);
const normalized = adapter?.normalizeAllowEntry ? adapter.normalizeAllowEntry(trimmed) : trimmed;
Move provider to a plugin-architecture (#661) * refactor: introduce provider plugin registry * refactor: move provider CLI to plugins * docs: add provider plugin implementation notes * refactor: shift provider runtime logic into plugins * refactor: add plugin defaults and summaries * docs: update provider plugin notes * feat(commands): add /commands slash list * Auto-reply: tidy help message * Auto-reply: fix status command lint * Tests: align google shared expectations * Auto-reply: tidy help message * Auto-reply: fix status command lint * refactor: move provider routing into plugins * test: align agent routing expectations * docs: update provider plugin notes * refactor: route replies via provider plugins * docs: note route-reply plugin hooks * refactor: extend provider plugin contract * refactor: derive provider status from plugins * refactor: unify gateway provider control * refactor: use plugin metadata in auto-reply * fix: parenthesize cron target selection * refactor: derive gateway methods from plugins * refactor: generalize provider logout * refactor: route provider logout through plugins * refactor: move WhatsApp web login methods into plugin * refactor: generalize provider log prefixes * refactor: centralize default chat provider * refactor: derive provider lists from registry * refactor: move provider reload noops into plugins * refactor: resolve web login provider via alias * refactor: derive CLI provider options from plugins * refactor: derive prompt provider list from plugins * style: apply biome lint fixes * fix: resolve provider routing edge cases * docs: update provider plugin refactor notes * fix(gateway): harden agent provider routing * refactor: move provider routing into plugins * refactor: move provider CLI to plugins * refactor: derive provider lists from registry * fix: restore slash command parsing * refactor: align provider ids for schema * refactor: unify outbound target resolution * fix: keep outbound labels stable * feat: add msteams to cron surfaces * fix: clean up lint build issues * refactor: localize chat provider alias normalization * refactor: drive gateway provider lists from plugins * docs: update provider plugin notes * style: format message-provider * fix: avoid provider registry init cycles * style: sort message-provider imports * fix: relax provider alias map typing * refactor: move provider routing into plugins * refactor: add plugin pairing/config adapters * refactor: route pairing and provider removal via plugins * refactor: align auto-reply provider typing * test: stabilize telegram media mocks * docs: update provider plugin refactor notes * refactor: pluginize outbound targets * refactor: pluginize provider selection * refactor: generalize text chunk limits * docs: update provider plugin notes * refactor: generalize group session/config * fix: normalize provider id for room detection * fix: avoid provider init in system prompt * style: formatting cleanup * refactor: normalize agent delivery targets * test: update outbound delivery labels * chore: fix lint regressions * refactor: extend provider plugin adapters * refactor: move elevated/block streaming defaults to plugins * refactor: defer outbound send deps to plugins * docs: note plugin-driven streaming/elevated defaults * refactor: centralize webchat provider constant * refactor: add provider setup adapters * refactor: delegate provider add config to plugins * docs: document plugin-driven provider add * refactor: add plugin state/binding metadata * refactor: build agent provider status from plugins * docs: note plugin-driven agent bindings * refactor: centralize internal provider constant usage * fix: normalize WhatsApp targets for groups and E.164 (#631) (thanks @imfing) * refactor: centralize default chat provider * refactor: centralize WhatsApp target normalization * refactor: move provider routing into plugins * refactor: normalize agent delivery targets * chore: fix lint regressions * fix: normalize WhatsApp targets for groups and E.164 (#631) (thanks @imfing) * feat: expand provider plugin adapters * refactor: route auto-reply via provider plugins * fix: align WhatsApp target normalization * fix: normalize WhatsApp targets for groups and E.164 (#631) (thanks @imfing) * refactor: centralize WhatsApp target normalization * feat: add /config chat config updates * docs: add /config get alias * feat(commands): add /commands slash list * refactor: centralize default chat provider * style: apply biome lint fixes * chore: fix lint regressions * fix: clean up whatsapp allowlist typing * style: format config command helpers * refactor: pluginize tool threading context * refactor: normalize session announce targets * docs: note new plugin threading and announce hooks * refactor: pluginize message actions * docs: update provider plugin actions notes * fix: align provider action adapters * refactor: centralize webchat checks * style: format message provider helpers * refactor: move provider onboarding into adapters * docs: note onboarding provider adapters * feat: add msteams onboarding adapter * style: organize onboarding imports * fix: normalize msteams allowFrom types * feat: add plugin text chunk limits * refactor: use plugin chunk limit fallbacks * feat: add provider mention stripping hooks * style: organize provider plugin type imports * refactor: generalize health snapshots * refactor: update macOS health snapshot handling * docs: refresh health snapshot notes * style: format health snapshot updates * refactor: drive security warnings via plugins * docs: note provider security adapter * style: format provider security adapters * refactor: centralize provider account defaults * refactor: type gateway client identity constants * chore: regen gateway protocol swift * fix: degrade health on failed provider probe * refactor: centralize pairing approve hint * docs: add plugin CLI command references * refactor: route auth and tool sends through plugins * docs: expand provider plugin hooks * refactor: document provider docking touchpoints * refactor: normalize internal provider defaults * refactor: streamline outbound delivery wiring * refactor: make provider onboarding plugin-owned * refactor: support provider-owned agent tools * refactor: move telegram draft chunking into telegram module * refactor: infer provider tool sends via extractToolSend * fix: repair plugin onboarding imports * refactor: de-dup outbound target normalization * style: tidy plugin and agent imports * refactor: data-drive provider selection line * fix: satisfy lint after provider plugin rebase * test: deflake gateway-cli coverage * style: format gateway-cli coverage test * refactor(provider-plugins): simplify provider ids * test(pairing-cli): avoid provider-specific ternary * style(macos): swiftformat HealthStore * refactor(sandbox): derive provider tool denylist * fix(sandbox): avoid plugin init in defaults * refactor(provider-plugins): centralize provider aliases * style(test): satisfy biome * refactor(protocol): v3 providers.status maps * refactor(ui): adapt to protocol v3 * refactor(macos): adapt to protocol v3 * test: update providers.status v3 fixtures * refactor(gateway): map provider runtime snapshot * test(gateway): update reload runtime snapshot * refactor(whatsapp): normalize heartbeat provider id * docs(refactor): update provider plugin notes * style: satisfy biome after rebase * fix: describe sandboxed elevated in prompt * feat(gateway): add agent image attachments + live probe * refactor: derive CLI provider options from plugins * fix(gateway): harden agent provider routing * fix(gateway): harden agent provider routing * refactor: align provider ids for schema * fix(protocol): keep agent provider string * fix(gateway): harden agent provider routing * fix(protocol): keep agent provider string * refactor: normalize agent delivery targets * refactor: support provider-owned agent tools * refactor(config): provider-keyed elevated allowFrom * style: satisfy biome * fix(gateway): appease provider narrowing * style: satisfy biome * refactor(reply): move group intro hints into plugin * fix(reply): avoid plugin registry init cycle * refactor(providers): add lightweight provider dock * refactor(gateway): use typed client id in connect * refactor(providers): document docks and avoid init cycles * refactor(providers): make media limit helper generic * fix(providers): break plugin registry import cycles * style: satisfy biome * refactor(status-all): build providers table from plugins * refactor(gateway): delegate web login to provider plugin * refactor(provider): drop web alias * refactor(provider): lazy-load monitors * style: satisfy lint/format * style: format status-all providers table * style: swiftformat gateway discovery model * test: make reload plan plugin-driven * fix: avoid token stringification in status-all * refactor: make provider IDs explicit in status * feat: warn on signal/imessage provider runtime errors * test: cover gateway provider runtime warnings in status * fix: add runtime kind to provider status issues * test: cover health degradation on probe failure * fix: keep routeReply lightweight * style: organize routeReply imports * refactor(web): extract auth-store helpers * refactor(whatsapp): lazy login imports * refactor(outbound): route replies via plugin outbound * docs: update provider plugin notes * style: format provider status issues * fix: make sandbox scope warning wrap-safe * refactor: load outbound adapters from provider plugins * docs: update provider plugin outbound notes * style(macos): fix swiftformat lint * docs: changelog for provider plugins * fix(macos): satisfy swiftformat * fix(macos): open settings via menu action * style: format after rebase * fix(macos): open Settings via menu action --------- Co-authored-by: LK <luke@kyohere.com> Co-authored-by: Luke K (pr-0f3t) <2609441+lc0rp@users.noreply.github.com> Co-authored-by: Xin <xin@imfing.com>
2026-01-11 11:45:25 +00:00
return String(normalized).trim();
}
export async function readChannelAllowFromStore(
channel: PairingChannel,
env: NodeJS.ProcessEnv = process.env,
): Promise<string[]> {
const filePath = resolveAllowFromPath(channel, env);
const { value } = await readJsonFile<AllowFromStore>(filePath, {
version: 1,
allowFrom: [],
});
const list = Array.isArray(value.allowFrom) ? value.allowFrom : [];
return list.map((v) => normalizeAllowEntry(channel, String(v))).filter(Boolean);
}
export async function addChannelAllowFromStoreEntry(params: {
channel: PairingChannel;
entry: string | number;
env?: NodeJS.ProcessEnv;
}): Promise<{ changed: boolean; allowFrom: string[] }> {
const env = params.env ?? process.env;
const filePath = resolveAllowFromPath(params.channel, env);
2026-01-07 05:06:04 +01:00
return await withFileLock(
filePath,
{ version: 1, allowFrom: [] } satisfies AllowFromStore,
async () => {
const { value } = await readJsonFile<AllowFromStore>(filePath, {
version: 1,
allowFrom: [],
});
const current = (Array.isArray(value.allowFrom) ? value.allowFrom : [])
.map((v) => normalizeAllowEntry(params.channel, String(v)))
2026-01-07 05:06:04 +01:00
.filter(Boolean);
const normalized = normalizeAllowEntry(params.channel, normalizeId(params.entry));
if (!normalized) {
return { changed: false, allowFrom: current };
}
if (current.includes(normalized)) {
return { changed: false, allowFrom: current };
}
2026-01-07 05:06:04 +01:00
const next = [...current, normalized];
await writeJsonFile(filePath, {
2026-01-21 05:31:07 +00:00
version: 1,
allowFrom: next,
} satisfies AllowFromStore);
return { changed: true, allowFrom: next };
},
);
}
export async function removeChannelAllowFromStoreEntry(params: {
channel: PairingChannel;
entry: string | number;
env?: NodeJS.ProcessEnv;
}): Promise<{ changed: boolean; allowFrom: string[] }> {
const env = params.env ?? process.env;
const filePath = resolveAllowFromPath(params.channel, env);
return await withFileLock(
filePath,
{ version: 1, allowFrom: [] } satisfies AllowFromStore,
async () => {
const { value } = await readJsonFile<AllowFromStore>(filePath, {
version: 1,
allowFrom: [],
});
const current = (Array.isArray(value.allowFrom) ? value.allowFrom : [])
.map((v) => normalizeAllowEntry(params.channel, String(v)))
.filter(Boolean);
const normalized = normalizeAllowEntry(params.channel, normalizeId(params.entry));
if (!normalized) {
return { changed: false, allowFrom: current };
}
2026-01-21 05:31:07 +00:00
const next = current.filter((entry) => entry !== normalized);
if (next.length === current.length) {
return { changed: false, allowFrom: current };
}
2026-01-21 05:31:07 +00:00
await writeJsonFile(filePath, {
2026-01-07 05:06:04 +01:00
version: 1,
allowFrom: next,
} satisfies AllowFromStore);
return { changed: true, allowFrom: next };
},
);
}
export async function listChannelPairingRequests(
channel: PairingChannel,
env: NodeJS.ProcessEnv = process.env,
): Promise<PairingRequest[]> {
const filePath = resolvePairingPath(channel, env);
2026-01-07 05:06:04 +01:00
return await withFileLock(
filePath,
{ version: 1, requests: [] } satisfies PairingStore,
async () => {
const { value } = await readJsonFile<PairingStore>(filePath, {
version: 1,
requests: [],
});
const reqs = Array.isArray(value.requests) ? value.requests : [];
const nowMs = Date.now();
const { requests: prunedExpired, removed: expiredRemoved } = pruneExpiredRequests(
reqs,
nowMs,
);
const { requests: pruned, removed: cappedRemoved } = pruneExcessRequests(
prunedExpired,
PAIRING_PENDING_MAX,
);
if (expiredRemoved || cappedRemoved) {
2026-01-07 05:06:04 +01:00
await writeJsonFile(filePath, {
version: 1,
requests: pruned,
} satisfies PairingStore);
}
return pruned
.filter(
(r) =>
r &&
typeof r.id === "string" &&
typeof r.code === "string" &&
typeof r.createdAt === "string",
)
.slice()
.toSorted((a, b) => a.createdAt.localeCompare(b.createdAt));
2026-01-07 05:06:04 +01:00
},
);
}
export async function upsertChannelPairingRequest(params: {
channel: PairingChannel;
id: string | number;
meta?: Record<string, string | undefined | null>;
env?: NodeJS.ProcessEnv;
/** Extension channels can pass their adapter directly to bypass registry lookup. */
pairingAdapter?: ChannelPairingAdapter;
}): Promise<{ code: string; created: boolean }> {
const env = params.env ?? process.env;
const filePath = resolvePairingPath(params.channel, env);
2026-01-07 05:06:04 +01:00
return await withFileLock(
filePath,
{ version: 1, requests: [] } satisfies PairingStore,
async () => {
const { value } = await readJsonFile<PairingStore>(filePath, {
version: 1,
requests: [],
});
const now = new Date().toISOString();
const nowMs = Date.now();
const id = normalizeId(params.id);
const meta =
params.meta && typeof params.meta === "object"
? Object.fromEntries(
Object.entries(params.meta)
.map(([k, v]) => [k, String(v ?? "").trim()] as const)
.filter(([_, v]) => Boolean(v)),
)
: undefined;
2026-01-07 05:06:04 +01:00
let reqs = Array.isArray(value.requests) ? value.requests : [];
const { requests: prunedExpired, removed: expiredRemoved } = pruneExpiredRequests(
reqs,
nowMs,
);
reqs = prunedExpired;
2026-01-07 05:06:04 +01:00
const existingIdx = reqs.findIndex((r) => r.id === id);
const existingCodes = new Set(
reqs.map((req) =>
String(req.code ?? "")
.trim()
.toUpperCase(),
),
);
2026-01-07 05:06:04 +01:00
if (existingIdx >= 0) {
const existing = reqs[existingIdx];
const existingCode =
existing && typeof existing.code === "string" ? existing.code.trim() : "";
2026-01-07 05:06:04 +01:00
const code = existingCode || generateUniqueCode(existingCodes);
const next: PairingRequest = {
id,
code,
createdAt: existing?.createdAt ?? now,
lastSeenAt: now,
meta: meta ?? existing?.meta,
};
reqs[existingIdx] = next;
const { requests: capped } = pruneExcessRequests(reqs, PAIRING_PENDING_MAX);
2026-01-07 05:06:04 +01:00
await writeJsonFile(filePath, {
version: 1,
requests: capped,
2026-01-07 05:06:04 +01:00
} satisfies PairingStore);
return { code, created: false };
}
const { requests: capped, removed: cappedRemoved } = pruneExcessRequests(
reqs,
PAIRING_PENDING_MAX,
);
reqs = capped;
if (PAIRING_PENDING_MAX > 0 && reqs.length >= PAIRING_PENDING_MAX) {
if (expiredRemoved || cappedRemoved) {
await writeJsonFile(filePath, {
version: 1,
requests: reqs,
} satisfies PairingStore);
}
return { code: "", created: false };
}
2026-01-07 05:06:04 +01:00
const code = generateUniqueCode(existingCodes);
const next: PairingRequest = {
id,
code,
createdAt: now,
lastSeenAt: now,
...(meta ? { meta } : {}),
};
await writeJsonFile(filePath, {
version: 1,
requests: [...reqs, next],
} satisfies PairingStore);
return { code, created: true };
},
);
}
export async function approveChannelPairingCode(params: {
channel: PairingChannel;
code: string;
env?: NodeJS.ProcessEnv;
}): Promise<{ id: string; entry?: PairingRequest } | null> {
const env = params.env ?? process.env;
const code = params.code.trim().toUpperCase();
if (!code) {
return null;
}
const filePath = resolvePairingPath(params.channel, env);
2026-01-07 05:06:04 +01:00
return await withFileLock(
filePath,
{ version: 1, requests: [] } satisfies PairingStore,
async () => {
const { value } = await readJsonFile<PairingStore>(filePath, {
version: 1,
requests: [],
});
const reqs = Array.isArray(value.requests) ? value.requests : [];
const nowMs = Date.now();
const { requests: pruned, removed } = pruneExpiredRequests(reqs, nowMs);
const idx = pruned.findIndex((r) => String(r.code ?? "").toUpperCase() === code);
2026-01-07 05:06:04 +01:00
if (idx < 0) {
if (removed) {
await writeJsonFile(filePath, {
version: 1,
requests: pruned,
} satisfies PairingStore);
}
return null;
}
const entry = pruned[idx];
if (!entry) {
return null;
}
2026-01-07 05:06:04 +01:00
pruned.splice(idx, 1);
await writeJsonFile(filePath, {
version: 1,
requests: pruned,
} satisfies PairingStore);
await addChannelAllowFromStoreEntry({
channel: params.channel,
2026-01-07 05:06:04 +01:00
entry: entry.id,
env,
});
return { id: entry.id, entry };
},
);
}