2026-01-04 04:05:18 +01:00
|
|
|
import { randomUUID } from "node:crypto";
|
2026-01-18 22:49:55 +00:00
|
|
|
import { listAgentIds } from "../../agents/agent-scope.js";
|
2026-02-14 19:18:15 -08:00
|
|
|
import { BARE_SESSION_RESET_PROMPT } from "../../auto-reply/reply/session-reset-prompt.js";
|
2026-02-01 10:03:47 +09:00
|
|
|
import { agentCommand } from "../../commands/agent.js";
|
2026-01-04 04:16:38 +01:00
|
|
|
import { loadConfig } from "../../config/config.js";
|
2026-01-06 18:25:37 +00:00
|
|
|
import {
|
|
|
|
|
resolveAgentIdFromSessionKey,
|
2026-01-18 22:49:55 +00:00
|
|
|
resolveExplicitAgentSessionKey,
|
2026-01-06 18:25:37 +00:00
|
|
|
resolveAgentMainSessionKey,
|
|
|
|
|
type SessionEntry,
|
2026-01-15 23:06:42 +00:00
|
|
|
updateSessionStore,
|
2026-01-06 18:25:37 +00:00
|
|
|
} from "../../config/sessions.js";
|
2026-01-04 04:05:18 +01:00
|
|
|
import { registerAgentRunContext } from "../../infra/agent-events.js";
|
2026-01-17 06:54:12 +00:00
|
|
|
import {
|
|
|
|
|
resolveAgentDeliveryPlan,
|
|
|
|
|
resolveAgentOutboundTarget,
|
|
|
|
|
} from "../../infra/outbound/agent-delivery.js";
|
2026-02-22 11:20:33 +01:00
|
|
|
import { resolveMessageChannelSelection } from "../../infra/outbound/channel-selection.js";
|
2026-02-15 12:46:14 -03:00
|
|
|
import { classifySessionKeyShape, normalizeAgentId } from "../../routing/session-key.js";
|
2026-01-04 04:05:18 +01:00
|
|
|
import { defaultRuntime } from "../../runtime.js";
|
2026-02-13 02:01:53 +01:00
|
|
|
import { normalizeInputProvenance, type InputProvenance } from "../../sessions/input-provenance.js";
|
2026-01-04 04:05:18 +01:00
|
|
|
import { resolveSendPolicy } from "../../sessions/send-policy.js";
|
2026-01-17 06:01:30 +00:00
|
|
|
import { normalizeSessionDeliveryFields } from "../../utils/delivery-context.js";
|
2026-01-09 23:00:23 +01:00
|
|
|
import {
|
2026-01-13 06:16:43 +00:00
|
|
|
INTERNAL_MESSAGE_CHANNEL,
|
|
|
|
|
isDeliverableMessageChannel,
|
|
|
|
|
isGatewayMessageChannel,
|
|
|
|
|
normalizeMessageChannel,
|
|
|
|
|
} from "../../utils/message-channel.js";
|
2026-02-01 10:03:47 +09:00
|
|
|
import { resolveAssistantIdentity } from "../assistant-identity.js";
|
2026-01-10 20:34:34 +00:00
|
|
|
import { parseMessageWithAttachments } from "../chat-attachments.js";
|
2026-02-01 10:03:47 +09:00
|
|
|
import { resolveAssistantAvatarUrl } from "../control-ui-shared.js";
|
2026-02-04 17:12:16 -05:00
|
|
|
import { GATEWAY_CLIENT_CAPS, hasGatewayClientCap } from "../protocol/client-info.js";
|
2026-01-04 04:05:18 +01:00
|
|
|
import {
|
|
|
|
|
ErrorCodes,
|
|
|
|
|
errorShape,
|
|
|
|
|
formatValidationErrors,
|
2026-01-22 06:47:37 +00:00
|
|
|
validateAgentIdentityParams,
|
2026-01-04 04:05:18 +01:00
|
|
|
validateAgentParams,
|
|
|
|
|
validateAgentWaitParams,
|
|
|
|
|
} from "../protocol/index.js";
|
2026-02-13 16:42:24 -03:00
|
|
|
import {
|
|
|
|
|
canonicalizeSpawnedByForAgent,
|
|
|
|
|
loadSessionEntry,
|
|
|
|
|
pruneLegacyStoreKeys,
|
|
|
|
|
resolveGatewaySessionStoreTarget,
|
|
|
|
|
} from "../session-utils.js";
|
2026-01-04 04:16:38 +01:00
|
|
|
import { formatForLog } from "../ws-log.js";
|
2026-01-04 04:05:18 +01:00
|
|
|
import { waitForAgentJob } from "./agent-job.js";
|
2026-02-01 10:03:47 +09:00
|
|
|
import { injectTimestamp, timestampOptsFromConfig } from "./agent-timestamp.js";
|
2026-02-15 13:30:37 +00:00
|
|
|
import { normalizeRpcAttachmentsToChatAttachments } from "./attachment-normalize.js";
|
2026-02-14 19:18:15 -08:00
|
|
|
import { sessionsHandlers } from "./sessions.js";
|
2026-02-18 01:34:35 +00:00
|
|
|
import type { GatewayRequestHandlerOptions, GatewayRequestHandlers } from "./types.js";
|
2026-02-14 19:18:15 -08:00
|
|
|
|
|
|
|
|
const RESET_COMMAND_RE = /^\/(new|reset)(?:\s+([\s\S]*))?$/i;
|
|
|
|
|
|
|
|
|
|
function isGatewayErrorShape(value: unknown): value is { code: string; message: string } {
|
|
|
|
|
if (!value || typeof value !== "object") {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
const candidate = value as { code?: unknown; message?: unknown };
|
|
|
|
|
return typeof candidate.code === "string" && typeof candidate.message === "string";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
async function runSessionResetFromAgent(params: {
|
|
|
|
|
key: string;
|
|
|
|
|
reason: "new" | "reset";
|
|
|
|
|
idempotencyKey: string;
|
|
|
|
|
context: GatewayRequestHandlerOptions["context"];
|
|
|
|
|
client: GatewayRequestHandlerOptions["client"];
|
|
|
|
|
isWebchatConnect: GatewayRequestHandlerOptions["isWebchatConnect"];
|
|
|
|
|
}): Promise<
|
|
|
|
|
| { ok: true; key: string; sessionId?: string }
|
|
|
|
|
| { ok: false; error: ReturnType<typeof errorShape> }
|
|
|
|
|
> {
|
|
|
|
|
return await new Promise((resolve) => {
|
|
|
|
|
let settled = false;
|
|
|
|
|
const settle = (
|
|
|
|
|
result:
|
|
|
|
|
| { ok: true; key: string; sessionId?: string }
|
|
|
|
|
| { ok: false; error: ReturnType<typeof errorShape> },
|
|
|
|
|
) => {
|
|
|
|
|
if (settled) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
settled = true;
|
|
|
|
|
resolve(result);
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const respond: GatewayRequestHandlerOptions["respond"] = (ok, payload, error) => {
|
|
|
|
|
if (!ok) {
|
|
|
|
|
settle({
|
|
|
|
|
ok: false,
|
|
|
|
|
error: isGatewayErrorShape(error)
|
|
|
|
|
? error
|
|
|
|
|
: errorShape(ErrorCodes.UNAVAILABLE, String(error ?? "sessions.reset failed")),
|
|
|
|
|
});
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
const payloadObj = payload as
|
|
|
|
|
| {
|
|
|
|
|
key?: unknown;
|
|
|
|
|
entry?: {
|
|
|
|
|
sessionId?: unknown;
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
| undefined;
|
|
|
|
|
const key = typeof payloadObj?.key === "string" ? payloadObj.key : params.key;
|
|
|
|
|
const sessionId =
|
|
|
|
|
payloadObj?.entry && typeof payloadObj.entry.sessionId === "string"
|
|
|
|
|
? payloadObj.entry.sessionId
|
|
|
|
|
: undefined;
|
|
|
|
|
settle({ ok: true, key, sessionId });
|
|
|
|
|
};
|
|
|
|
|
|
2026-02-15 03:22:13 +00:00
|
|
|
const resetResult = sessionsHandlers["sessions.reset"]({
|
2026-02-14 19:18:15 -08:00
|
|
|
req: {
|
|
|
|
|
type: "req",
|
|
|
|
|
id: `${params.idempotencyKey}:reset`,
|
|
|
|
|
method: "sessions.reset",
|
|
|
|
|
},
|
|
|
|
|
params: {
|
|
|
|
|
key: params.key,
|
|
|
|
|
reason: params.reason,
|
|
|
|
|
},
|
|
|
|
|
context: params.context,
|
|
|
|
|
client: params.client,
|
|
|
|
|
isWebchatConnect: params.isWebchatConnect,
|
|
|
|
|
respond,
|
2026-02-15 03:22:13 +00:00
|
|
|
});
|
|
|
|
|
|
2026-02-14 19:29:32 -08:00
|
|
|
void (async () => {
|
|
|
|
|
try {
|
|
|
|
|
await resetResult;
|
2026-02-14 19:18:15 -08:00
|
|
|
if (!settled) {
|
|
|
|
|
settle({
|
|
|
|
|
ok: false,
|
|
|
|
|
error: errorShape(
|
|
|
|
|
ErrorCodes.UNAVAILABLE,
|
|
|
|
|
"sessions.reset completed without returning a response",
|
|
|
|
|
),
|
|
|
|
|
});
|
|
|
|
|
}
|
2026-02-14 19:29:32 -08:00
|
|
|
} catch (err: unknown) {
|
2026-02-14 19:18:15 -08:00
|
|
|
settle({
|
|
|
|
|
ok: false,
|
|
|
|
|
error: errorShape(ErrorCodes.UNAVAILABLE, String(err)),
|
|
|
|
|
});
|
2026-02-14 19:29:32 -08:00
|
|
|
}
|
|
|
|
|
})();
|
2026-02-14 19:18:15 -08:00
|
|
|
});
|
|
|
|
|
}
|
2026-01-04 04:05:18 +01:00
|
|
|
|
|
|
|
|
export const agentHandlers: GatewayRequestHandlers = {
|
2026-02-14 19:18:15 -08:00
|
|
|
agent: async ({ params, respond, context, client, isWebchatConnect }) => {
|
2026-01-31 16:03:28 +09:00
|
|
|
const p = params;
|
2026-01-04 04:05:18 +01:00
|
|
|
if (!validateAgentParams(p)) {
|
|
|
|
|
respond(
|
|
|
|
|
false,
|
|
|
|
|
undefined,
|
|
|
|
|
errorShape(
|
|
|
|
|
ErrorCodes.INVALID_REQUEST,
|
|
|
|
|
`invalid agent params: ${formatValidationErrors(validateAgentParams.errors)}`,
|
|
|
|
|
),
|
|
|
|
|
);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
const request = p as {
|
|
|
|
|
message: string;
|
2026-01-18 22:49:55 +00:00
|
|
|
agentId?: string;
|
2026-01-04 04:05:18 +01:00
|
|
|
to?: string;
|
2026-01-18 22:49:55 +00:00
|
|
|
replyTo?: string;
|
2026-01-04 04:05:18 +01:00
|
|
|
sessionId?: string;
|
|
|
|
|
sessionKey?: string;
|
|
|
|
|
thinking?: string;
|
|
|
|
|
deliver?: boolean;
|
2026-01-10 20:34:34 +00:00
|
|
|
attachments?: Array<{
|
|
|
|
|
type?: string;
|
|
|
|
|
mimeType?: string;
|
|
|
|
|
fileName?: string;
|
|
|
|
|
content?: unknown;
|
|
|
|
|
}>;
|
2026-01-13 06:16:43 +00:00
|
|
|
channel?: string;
|
2026-01-18 22:49:55 +00:00
|
|
|
replyChannel?: string;
|
2026-01-17 02:09:32 +00:00
|
|
|
accountId?: string;
|
2026-01-18 22:49:55 +00:00
|
|
|
replyAccountId?: string;
|
2026-01-20 17:22:07 +00:00
|
|
|
threadId?: string;
|
2026-01-24 05:49:23 +00:00
|
|
|
groupId?: string;
|
|
|
|
|
groupChannel?: string;
|
|
|
|
|
groupSpace?: string;
|
2026-01-04 04:05:18 +01:00
|
|
|
lane?: string;
|
|
|
|
|
extraSystemPrompt?: string;
|
|
|
|
|
idempotencyKey: string;
|
|
|
|
|
timeout?: number;
|
feat(sessions): expose label in sessions.list and support label lookup in sessions_send
- Add `label` field to session entries and expose it in `sessions.list`
- Display label column in the web UI sessions table
- Support `label` parameter in `sessions_send` for lookup by label instead of sessionKey
- `sessions.patch`: Accept and store `label` field
- `sessions.list`: Return `label` in session entries
- `sessions_spawn`: Pass label through to registry and announce flow
- `sessions_send`: Accept optional `label` param, lookup session by label if sessionKey not provided
- `agent` method: Accept `label` and `spawnedBy` params (stored in session entry)
- Add `label` column to sessions table in web UI
- Changed session store writes to merge with existing entry (`{ ...existing, ...new }`)
to preserve fields like `label` that might be set separately
We attempted to implement label persistence "properly" by passing the label
through the `agent` call and storing it during session initialization. However,
the auto-reply flow has multiple write points that overwrite the session entry,
and making all of them merge-aware proved unreliable.
The working solution patches the label in the `finally` block of
`runSubagentAnnounceFlow`, after all other session writes complete.
This is a workaround but robust - the patch happens at the very end,
just before potential cleanup.
A future refactor could make session writes consistently merge-based,
which would allow the cleaner approach of setting label at spawn time.
```typescript
// Spawn with label
sessions_spawn({ task: "...", label: "my-worker" })
// Later, find by label
sessions_send({ label: "my-worker", message: "continue..." })
// Or use sessions_list to see labels
sessions_list() // includes label field in response
```
2026-01-08 23:17:08 +00:00
|
|
|
label?: string;
|
|
|
|
|
spawnedBy?: string;
|
2026-02-13 02:01:53 +01:00
|
|
|
inputProvenance?: InputProvenance;
|
2026-01-04 04:05:18 +01:00
|
|
|
};
|
2026-01-18 22:49:55 +00:00
|
|
|
const cfg = loadConfig();
|
2026-01-04 04:05:18 +01:00
|
|
|
const idem = request.idempotencyKey;
|
2026-01-24 05:49:23 +00:00
|
|
|
const groupIdRaw = typeof request.groupId === "string" ? request.groupId.trim() : "";
|
|
|
|
|
const groupChannelRaw =
|
|
|
|
|
typeof request.groupChannel === "string" ? request.groupChannel.trim() : "";
|
|
|
|
|
const groupSpaceRaw = typeof request.groupSpace === "string" ? request.groupSpace.trim() : "";
|
|
|
|
|
let resolvedGroupId: string | undefined = groupIdRaw || undefined;
|
|
|
|
|
let resolvedGroupChannel: string | undefined = groupChannelRaw || undefined;
|
|
|
|
|
let resolvedGroupSpace: string | undefined = groupSpaceRaw || undefined;
|
|
|
|
|
let spawnedByValue =
|
|
|
|
|
typeof request.spawnedBy === "string" ? request.spawnedBy.trim() : undefined;
|
2026-02-13 02:01:53 +01:00
|
|
|
const inputProvenance = normalizeInputProvenance(request.inputProvenance);
|
2026-01-04 04:05:18 +01:00
|
|
|
const cached = context.dedupe.get(`agent:${idem}`);
|
|
|
|
|
if (cached) {
|
|
|
|
|
respond(cached.ok, cached.payload, cached.error, {
|
|
|
|
|
cached: true,
|
|
|
|
|
});
|
|
|
|
|
return;
|
|
|
|
|
}
|
2026-02-15 13:30:37 +00:00
|
|
|
const normalizedAttachments = normalizeRpcAttachmentsToChatAttachments(request.attachments);
|
2026-01-10 20:34:34 +00:00
|
|
|
|
2026-02-22 13:20:53 +01:00
|
|
|
let message = (request.message ?? "").trim();
|
2026-01-10 20:34:34 +00:00
|
|
|
let images: Array<{ type: "image"; data: string; mimeType: string }> = [];
|
|
|
|
|
if (normalizedAttachments.length > 0) {
|
|
|
|
|
try {
|
2026-01-14 14:31:43 +00:00
|
|
|
const parsed = await parseMessageWithAttachments(message, normalizedAttachments, {
|
|
|
|
|
maxBytes: 5_000_000,
|
|
|
|
|
log: context.logGateway,
|
|
|
|
|
});
|
2026-01-10 20:34:34 +00:00
|
|
|
message = parsed.message.trim();
|
|
|
|
|
images = parsed.images;
|
|
|
|
|
} catch (err) {
|
2026-01-14 14:31:43 +00:00
|
|
|
respond(false, undefined, errorShape(ErrorCodes.INVALID_REQUEST, String(err)));
|
2026-01-10 20:34:34 +00:00
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
feat(gateway): inject timestamps into agent handler messages
Messages arriving through the gateway agent method (TUI, web, spawned
subagents, sessions_send, heartbeats) now get a timestamp prefix
automatically. This gives all agent contexts date/time awareness
without modifying the system prompt (which is cached for stability).
Channel messages (Discord, Telegram, etc.) already have timestamps
via envelope formatting in a separate code path and never reach
the agent handler, so there is no double-stamping risk.
Cron jobs also inject their own 'Current time:' prefix and are
detected and skipped.
Extracted as a pure function (injectTimestamp) with 12 unit tests
covering: timezone handling, 12/24h format, midnight boundaries,
envelope detection, cron detection, and empty messages.
Integration test verifies the agent handler wires it in correctly.
Closes #3658
Refs: #1897, #1928, #2108
2026-01-28 21:31:08 -05:00
|
|
|
|
2026-01-18 22:49:55 +00:00
|
|
|
const isKnownGatewayChannel = (value: string): boolean => isGatewayMessageChannel(value);
|
|
|
|
|
const channelHints = [request.channel, request.replyChannel]
|
|
|
|
|
.filter((value): value is string => typeof value === "string")
|
|
|
|
|
.map((value) => value.trim())
|
|
|
|
|
.filter(Boolean);
|
|
|
|
|
for (const rawChannel of channelHints) {
|
2026-01-13 06:16:43 +00:00
|
|
|
const normalized = normalizeMessageChannel(rawChannel);
|
2026-01-15 05:12:29 +00:00
|
|
|
if (normalized && normalized !== "last" && !isKnownGatewayChannel(normalized)) {
|
2026-01-09 23:09:07 +01:00
|
|
|
respond(
|
|
|
|
|
false,
|
|
|
|
|
undefined,
|
|
|
|
|
errorShape(
|
|
|
|
|
ErrorCodes.INVALID_REQUEST,
|
2026-01-15 05:11:54 +00:00
|
|
|
`invalid agent params: unknown channel: ${String(normalized)}`,
|
2026-01-09 23:09:07 +01:00
|
|
|
),
|
|
|
|
|
);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-01-04 04:05:18 +01:00
|
|
|
|
2026-01-18 22:49:55 +00:00
|
|
|
const agentIdRaw = typeof request.agentId === "string" ? request.agentId.trim() : "";
|
|
|
|
|
const agentId = agentIdRaw ? normalizeAgentId(agentIdRaw) : undefined;
|
|
|
|
|
if (agentId) {
|
|
|
|
|
const knownAgents = listAgentIds(cfg);
|
|
|
|
|
if (!knownAgents.includes(agentId)) {
|
|
|
|
|
respond(
|
|
|
|
|
false,
|
|
|
|
|
undefined,
|
|
|
|
|
errorShape(
|
|
|
|
|
ErrorCodes.INVALID_REQUEST,
|
|
|
|
|
`invalid agent params: unknown agent id "${request.agentId}"`,
|
|
|
|
|
),
|
|
|
|
|
);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const requestedSessionKeyRaw =
|
2026-01-04 04:05:18 +01:00
|
|
|
typeof request.sessionKey === "string" && request.sessionKey.trim()
|
|
|
|
|
? request.sessionKey.trim()
|
|
|
|
|
: undefined;
|
2026-02-15 12:46:14 -03:00
|
|
|
if (
|
|
|
|
|
requestedSessionKeyRaw &&
|
|
|
|
|
classifySessionKeyShape(requestedSessionKeyRaw) === "malformed_agent"
|
|
|
|
|
) {
|
|
|
|
|
respond(
|
|
|
|
|
false,
|
|
|
|
|
undefined,
|
|
|
|
|
errorShape(
|
|
|
|
|
ErrorCodes.INVALID_REQUEST,
|
|
|
|
|
`invalid agent params: malformed session key "${requestedSessionKeyRaw}"`,
|
|
|
|
|
),
|
|
|
|
|
);
|
|
|
|
|
return;
|
|
|
|
|
}
|
2026-02-14 19:18:15 -08:00
|
|
|
let requestedSessionKey =
|
2026-01-18 22:49:55 +00:00
|
|
|
requestedSessionKeyRaw ??
|
|
|
|
|
resolveExplicitAgentSessionKey({
|
|
|
|
|
cfg,
|
|
|
|
|
agentId,
|
|
|
|
|
});
|
|
|
|
|
if (agentId && requestedSessionKeyRaw) {
|
|
|
|
|
const sessionAgentId = resolveAgentIdFromSessionKey(requestedSessionKeyRaw);
|
|
|
|
|
if (sessionAgentId !== agentId) {
|
|
|
|
|
respond(
|
|
|
|
|
false,
|
|
|
|
|
undefined,
|
|
|
|
|
errorShape(
|
|
|
|
|
ErrorCodes.INVALID_REQUEST,
|
|
|
|
|
`invalid agent params: agent "${request.agentId}" does not match session key agent "${sessionAgentId}"`,
|
|
|
|
|
),
|
|
|
|
|
);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-01-04 04:05:18 +01:00
|
|
|
let resolvedSessionId = request.sessionId?.trim() || undefined;
|
|
|
|
|
let sessionEntry: SessionEntry | undefined;
|
|
|
|
|
let bestEffortDeliver = false;
|
|
|
|
|
let cfgForAgent: ReturnType<typeof loadConfig> | undefined;
|
2026-02-13 16:42:24 -03:00
|
|
|
let resolvedSessionKey = requestedSessionKey;
|
2026-02-14 19:18:15 -08:00
|
|
|
let skipTimestampInjection = false;
|
|
|
|
|
|
|
|
|
|
const resetCommandMatch = message.match(RESET_COMMAND_RE);
|
|
|
|
|
if (resetCommandMatch && requestedSessionKey) {
|
|
|
|
|
const resetReason = resetCommandMatch[1]?.toLowerCase() === "new" ? "new" : "reset";
|
|
|
|
|
const resetResult = await runSessionResetFromAgent({
|
|
|
|
|
key: requestedSessionKey,
|
|
|
|
|
reason: resetReason,
|
|
|
|
|
idempotencyKey: idem,
|
|
|
|
|
context,
|
|
|
|
|
client,
|
|
|
|
|
isWebchatConnect,
|
|
|
|
|
});
|
|
|
|
|
if (!resetResult.ok) {
|
|
|
|
|
respond(false, undefined, resetResult.error);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
requestedSessionKey = resetResult.key;
|
|
|
|
|
resolvedSessionId = resetResult.sessionId ?? resolvedSessionId;
|
|
|
|
|
const postResetMessage = resetCommandMatch[2]?.trim() ?? "";
|
|
|
|
|
if (postResetMessage) {
|
|
|
|
|
message = postResetMessage;
|
|
|
|
|
} else {
|
|
|
|
|
// Keep bare /new and /reset behavior aligned with chat.send:
|
|
|
|
|
// reset first, then run a fresh-session greeting prompt in-place.
|
|
|
|
|
message = BARE_SESSION_RESET_PROMPT;
|
|
|
|
|
skipTimestampInjection = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Inject timestamp into user-authored messages that don't already have one.
|
|
|
|
|
// Channel messages (Discord, Telegram, etc.) get timestamps via envelope
|
|
|
|
|
// formatting in a separate code path — they never reach this handler.
|
|
|
|
|
// See: https://github.com/moltbot/moltbot/issues/3658
|
|
|
|
|
if (!skipTimestampInjection) {
|
|
|
|
|
message = injectTimestamp(message, timestampOptsFromConfig(cfg));
|
|
|
|
|
}
|
2026-01-04 04:05:18 +01:00
|
|
|
|
|
|
|
|
if (requestedSessionKey) {
|
2026-01-15 23:06:42 +00:00
|
|
|
const { cfg, storePath, entry, canonicalKey } = loadSessionEntry(requestedSessionKey);
|
2026-01-04 04:05:18 +01:00
|
|
|
cfgForAgent = cfg;
|
|
|
|
|
const now = Date.now();
|
|
|
|
|
const sessionId = entry?.sessionId ?? randomUUID();
|
feat(sessions): expose label in sessions.list and support label lookup in sessions_send
- Add `label` field to session entries and expose it in `sessions.list`
- Display label column in the web UI sessions table
- Support `label` parameter in `sessions_send` for lookup by label instead of sessionKey
- `sessions.patch`: Accept and store `label` field
- `sessions.list`: Return `label` in session entries
- `sessions_spawn`: Pass label through to registry and announce flow
- `sessions_send`: Accept optional `label` param, lookup session by label if sessionKey not provided
- `agent` method: Accept `label` and `spawnedBy` params (stored in session entry)
- Add `label` column to sessions table in web UI
- Changed session store writes to merge with existing entry (`{ ...existing, ...new }`)
to preserve fields like `label` that might be set separately
We attempted to implement label persistence "properly" by passing the label
through the `agent` call and storing it during session initialization. However,
the auto-reply flow has multiple write points that overwrite the session entry,
and making all of them merge-aware proved unreliable.
The working solution patches the label in the `finally` block of
`runSubagentAnnounceFlow`, after all other session writes complete.
This is a workaround but robust - the patch happens at the very end,
just before potential cleanup.
A future refactor could make session writes consistently merge-based,
which would allow the cleaner approach of setting label at spawn time.
```typescript
// Spawn with label
sessions_spawn({ task: "...", label: "my-worker" })
// Later, find by label
sessions_send({ label: "my-worker", message: "continue..." })
// Or use sessions_list to see labels
sessions_list() // includes label field in response
```
2026-01-08 23:17:08 +00:00
|
|
|
const labelValue = request.label?.trim() || entry?.label;
|
2026-02-13 16:42:24 -03:00
|
|
|
const sessionAgent = resolveAgentIdFromSessionKey(canonicalKey);
|
|
|
|
|
spawnedByValue = canonicalizeSpawnedByForAgent(
|
|
|
|
|
cfg,
|
|
|
|
|
sessionAgent,
|
|
|
|
|
spawnedByValue || entry?.spawnedBy,
|
|
|
|
|
);
|
2026-01-24 05:49:23 +00:00
|
|
|
let inheritedGroup:
|
|
|
|
|
| { groupId?: string; groupChannel?: string; groupSpace?: string }
|
|
|
|
|
| undefined;
|
|
|
|
|
if (spawnedByValue && (!resolvedGroupId || !resolvedGroupChannel || !resolvedGroupSpace)) {
|
|
|
|
|
try {
|
|
|
|
|
const parentEntry = loadSessionEntry(spawnedByValue)?.entry;
|
|
|
|
|
inheritedGroup = {
|
|
|
|
|
groupId: parentEntry?.groupId,
|
|
|
|
|
groupChannel: parentEntry?.groupChannel,
|
|
|
|
|
groupSpace: parentEntry?.space,
|
|
|
|
|
};
|
|
|
|
|
} catch {
|
|
|
|
|
inheritedGroup = undefined;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
resolvedGroupId = resolvedGroupId || inheritedGroup?.groupId;
|
|
|
|
|
resolvedGroupChannel = resolvedGroupChannel || inheritedGroup?.groupChannel;
|
|
|
|
|
resolvedGroupSpace = resolvedGroupSpace || inheritedGroup?.groupSpace;
|
2026-01-17 06:01:30 +00:00
|
|
|
const deliveryFields = normalizeSessionDeliveryFields(entry);
|
2026-01-06 18:25:37 +00:00
|
|
|
const nextEntry: SessionEntry = {
|
2026-01-04 04:05:18 +01:00
|
|
|
sessionId,
|
|
|
|
|
updatedAt: now,
|
|
|
|
|
thinkingLevel: entry?.thinkingLevel,
|
|
|
|
|
verboseLevel: entry?.verboseLevel,
|
2026-01-07 06:16:38 +01:00
|
|
|
reasoningLevel: entry?.reasoningLevel,
|
2026-01-04 04:05:18 +01:00
|
|
|
systemSent: entry?.systemSent,
|
|
|
|
|
sendPolicy: entry?.sendPolicy,
|
|
|
|
|
skillsSnapshot: entry?.skillsSnapshot,
|
2026-01-17 06:01:30 +00:00
|
|
|
deliveryContext: deliveryFields.deliveryContext,
|
|
|
|
|
lastChannel: deliveryFields.lastChannel ?? entry?.lastChannel,
|
|
|
|
|
lastTo: deliveryFields.lastTo ?? entry?.lastTo,
|
|
|
|
|
lastAccountId: deliveryFields.lastAccountId ?? entry?.lastAccountId,
|
2026-01-06 22:30:29 +00:00
|
|
|
modelOverride: entry?.modelOverride,
|
|
|
|
|
providerOverride: entry?.providerOverride,
|
feat(sessions): expose label in sessions.list and support label lookup in sessions_send
- Add `label` field to session entries and expose it in `sessions.list`
- Display label column in the web UI sessions table
- Support `label` parameter in `sessions_send` for lookup by label instead of sessionKey
- `sessions.patch`: Accept and store `label` field
- `sessions.list`: Return `label` in session entries
- `sessions_spawn`: Pass label through to registry and announce flow
- `sessions_send`: Accept optional `label` param, lookup session by label if sessionKey not provided
- `agent` method: Accept `label` and `spawnedBy` params (stored in session entry)
- Add `label` column to sessions table in web UI
- Changed session store writes to merge with existing entry (`{ ...existing, ...new }`)
to preserve fields like `label` that might be set separately
We attempted to implement label persistence "properly" by passing the label
through the `agent` call and storing it during session initialization. However,
the auto-reply flow has multiple write points that overwrite the session entry,
and making all of them merge-aware proved unreliable.
The working solution patches the label in the `finally` block of
`runSubagentAnnounceFlow`, after all other session writes complete.
This is a workaround but robust - the patch happens at the very end,
just before potential cleanup.
A future refactor could make session writes consistently merge-based,
which would allow the cleaner approach of setting label at spawn time.
```typescript
// Spawn with label
sessions_spawn({ task: "...", label: "my-worker" })
// Later, find by label
sessions_send({ label: "my-worker", message: "continue..." })
// Or use sessions_list to see labels
sessions_list() // includes label field in response
```
2026-01-08 23:17:08 +00:00
|
|
|
label: labelValue,
|
|
|
|
|
spawnedBy: spawnedByValue,
|
Agents: add nested subagent orchestration controls and reduce subagent token waste (#14447)
* Agents: add subagent orchestration controls
* Agents: add subagent orchestration controls (WIP uncommitted changes)
* feat(subagents): add depth-based spawn gating for sub-sub-agents
* feat(subagents): tool policy, registry, and announce chain for nested agents
* feat(subagents): system prompt, docs, changelog for nested sub-agents
* fix(subagents): prevent model fallback override, show model during active runs, and block context overflow fallback
Bug 1: When a session has an explicit model override (e.g., gpt/openai-codex),
the fallback candidate logic in resolveFallbackCandidates silently appended the
global primary model (opus) as a backstop. On reinjection/steer with a transient
error, the session could fall back to opus which has a smaller context window
and crash. Fix: when storedModelOverride is set, pass fallbacksOverride ?? []
instead of undefined, preventing the implicit primary backstop.
Bug 2: Active subagents showed 'model n/a' in /subagents list because
resolveModelDisplay only read entry.model/modelProvider (populated after run
completes). Fix: fall back to modelOverride/providerOverride fields which are
populated at spawn time via sessions.patch.
Bug 3: Context overflow errors (prompt too long, context_length_exceeded) could
theoretically escape runEmbeddedPiAgent and be treated as failover candidates
in runWithModelFallback, causing a switch to a model with a smaller context
window. Fix: in runWithModelFallback, detect context overflow errors via
isLikelyContextOverflowError and rethrow them immediately instead of trying the
next model candidate.
* fix(subagents): track spawn depth in session store and fix announce routing for nested agents
* Fix compaction status tracking and dedupe overflow compaction triggers
* fix(subagents): enforce depth block via session store and implement cascade kill
* fix: inject group chat context into system prompt
* fix(subagents): always write model to session store at spawn time
* Preserve spawnDepth when agent handler rewrites session entry
* fix(subagents): suppress announce on steer-restart
* fix(subagents): fallback spawned session model to runtime default
* fix(subagents): enforce spawn depth when caller key resolves by sessionId
* feat(subagents): implement active-first ordering for numeric targets and enhance task display
- Added a test to verify that subagents with numeric targets follow an active-first list ordering.
- Updated `resolveSubagentTarget` to sort subagent runs based on active status and recent activity.
- Enhanced task display in command responses to prevent truncation of long task descriptions.
- Introduced new utility functions for compacting task text and managing subagent run states.
* fix(subagents): show model for active runs via run record fallback
When the spawned model matches the agent's default model, the session
store's override fields are intentionally cleared (isDefault: true).
The model/modelProvider fields are only populated after the run
completes. This left active subagents showing 'model n/a'.
Fix: store the resolved model on SubagentRunRecord at registration
time, and use it as a fallback in both display paths (subagents tool
and /subagents command) when the session store entry has no model info.
Changes:
- SubagentRunRecord: add optional model field
- registerSubagentRun: accept and persist model param
- sessions-spawn-tool: pass resolvedModel to registerSubagentRun
- subagents-tool: pass run record model as fallback to resolveModelDisplay
- commands-subagents: pass run record model as fallback to resolveModelDisplay
* feat(chat): implement session key resolution and reset on sidebar navigation
- Added functions to resolve the main session key and reset chat state when switching sessions from the sidebar.
- Updated the `renderTab` function to handle session key changes when navigating to the chat tab.
- Introduced a test to verify that the session resets to "main" when opening chat from the sidebar navigation.
* fix: subagent timeout=0 passthrough and fallback prompt duplication
Bug 1: runTimeoutSeconds=0 now means 'no timeout' instead of applying 600s default
- sessions-spawn-tool: default to undefined (not 0) when neither timeout param
is provided; use != null check so explicit 0 passes through to gateway
- agent.ts: accept 0 as valid timeout (resolveAgentTimeoutMs already handles
0 → MAX_SAFE_TIMEOUT_MS)
Bug 2: model fallback no longer re-injects the original prompt as a duplicate
- agent.ts: track fallback attempt index; on retries use a short continuation
message instead of the full original prompt since the session file already
contains it from the first attempt
- Also skip re-sending images on fallback retries (already in session)
* feat(subagents): truncate long task descriptions in subagents command output
- Introduced a new utility function to format task previews, limiting their length to improve readability.
- Updated the command handler to use the new formatting function, ensuring task descriptions are truncated appropriately.
- Adjusted related tests to verify that long task descriptions are now truncated in the output.
* refactor(subagents): update subagent registry path resolution and improve command output formatting
- Replaced direct import of STATE_DIR with a utility function to resolve the state directory dynamically.
- Enhanced the formatting of command output for active and recent subagents, adding separators for better readability.
- Updated related tests to reflect changes in command output structure.
* fix(subagent): default sessions_spawn to no timeout when runTimeoutSeconds omitted
The previous fix (75a791106) correctly handled the case where
runTimeoutSeconds was explicitly set to 0 ("no timeout"). However,
when models omit the parameter entirely (which is common since the
schema marks it as optional), runTimeoutSeconds resolved to undefined.
undefined flowed through the chain as:
sessions_spawn → timeout: undefined (since undefined != null is false)
→ gateway agent handler → agentCommand opts.timeout: undefined
→ resolveAgentTimeoutMs({ overrideSeconds: undefined })
→ DEFAULT_AGENT_TIMEOUT_SECONDS (600s = 10 minutes)
This caused subagents to be killed at exactly 10 minutes even though
the user's intent (via TOOLS.md) was for subagents to run without a
timeout.
Fix: default runTimeoutSeconds to 0 (no timeout) when neither
runTimeoutSeconds nor timeoutSeconds is provided by the caller.
Subagent spawns are long-running by design and should not inherit the
600s agent-command default timeout.
* fix(subagent): accept timeout=0 in agent-via-gateway path (second 600s default)
* fix: thread timeout override through getReplyFromConfig dispatch path
getReplyFromConfig called resolveAgentTimeoutMs({ cfg }) with no override,
always falling back to the config default (600s). Add timeoutOverrideSeconds
to GetReplyOptions and pass it through as overrideSeconds so callers of the
dispatch chain can specify a custom timeout (0 = no timeout).
This complements the existing timeout threading in agentCommand and the
cron isolated-agent runner, which already pass overrideSeconds correctly.
* feat(model-fallback): normalize OpenAI Codex model references and enhance fallback handling
- Added normalization for OpenAI Codex model references, specifically converting "gpt-5.3-codex" to "openai-codex" before execution.
- Updated the `resolveFallbackCandidates` function to utilize the new normalization logic.
- Enhanced tests to verify the correct behavior of model normalization and fallback mechanisms.
- Introduced a new test case to ensure that the normalization process works as expected for various input formats.
* feat(tests): add unit tests for steer failure behavior in openclaw-tools
- Introduced a new test file to validate the behavior of subagents when steer replacement dispatch fails.
- Implemented tests to ensure that the announce behavior is restored correctly and that the suppression reason is cleared as expected.
- Enhanced the subagent registry with a new function to clear steer restart suppression.
- Updated related components to support the new test scenarios.
* fix(subagents): replace stop command with kill in slash commands and documentation
- Updated the `/subagents` command to replace `stop` with `kill` for consistency in controlling sub-agent runs.
- Modified related documentation to reflect the change in command usage.
- Removed legacy timeoutSeconds references from the sessions-spawn-tool schema and tests to streamline timeout handling.
- Enhanced tests to ensure correct behavior of the updated commands and their interactions.
* feat(tests): add unit tests for readLatestAssistantReply function
- Introduced a new test file for the `readLatestAssistantReply` function to validate its behavior with various message scenarios.
- Implemented tests to ensure the function correctly retrieves the latest assistant message and handles cases where the latest message has no text.
- Mocked the gateway call to simulate different message histories for comprehensive testing.
* feat(tests): enhance subagent kill-all cascade tests and announce formatting
- Added a new test to verify that the `kill-all` command cascades through ended parents to active descendants in subagents.
- Updated the subagent announce formatting tests to reflect changes in message structure, including the replacement of "Findings:" with "Result:" and the addition of new expectations for message content.
- Improved the handling of long findings and stats in the announce formatting logic to ensure concise output.
- Refactored related functions to enhance clarity and maintainability in the subagent registry and tools.
* refactor(subagent): update announce formatting and remove unused constants
- Modified the subagent announce formatting to replace "Findings:" with "Result:" and adjusted related expectations in tests.
- Removed constants for maximum announce findings characters and summary words, simplifying the announcement logic.
- Updated the handling of findings to retain full content instead of truncating, ensuring more informative outputs.
- Cleaned up unused imports in the commands-subagents file to enhance code clarity.
* feat(tests): enhance billing error handling in user-facing text
- Added tests to ensure that normal text mentioning billing plans is not rewritten, preserving user context.
- Updated the `isBillingErrorMessage` and `sanitizeUserFacingText` functions to improve handling of billing-related messages.
- Introduced new test cases for various scenarios involving billing messages to ensure accurate processing and output.
- Enhanced the subagent announce flow to correctly manage active descendant runs, preventing premature announcements.
* feat(subagent): enhance workflow guidance and auto-announcement clarity
- Added a new guideline in the subagent system prompt to emphasize trust in push-based completion, discouraging busy polling for status updates.
- Updated documentation to clarify that sub-agents will automatically announce their results, improving user understanding of the workflow.
- Enhanced tests to verify the new guidance on avoiding polling loops and to ensure the accuracy of the updated prompts.
* fix(cron): avoid announcing interim subagent spawn acks
* chore: clean post-rebase imports
* fix(cron): fall back to child replies when parent stays interim
* fix(subagents): make active-run guidance advisory
* fix(subagents): update announce flow to handle active descendants and enhance test coverage
- Modified the announce flow to defer announcements when active descendant runs are present, ensuring accurate status reporting.
- Updated tests to verify the new behavior, including scenarios where no fallback requester is available and ensuring proper handling of finished subagents.
- Enhanced the announce formatting to include an `expectFinal` flag for better clarity in the announcement process.
* fix(subagents): enhance announce flow and formatting for user updates
- Updated the announce flow to provide clearer instructions for user updates based on active subagent runs and requester context.
- Refactored the announcement logic to improve clarity and ensure internal context remains private.
- Enhanced tests to verify the new message expectations and formatting, including updated prompts for user-facing updates.
- Introduced a new function to build reply instructions based on session context, improving the overall announcement process.
* fix: resolve prep blockers and changelog placement (#14447) (thanks @tyler6204)
* fix: restore cron delivery-plan import after rebase (#14447) (thanks @tyler6204)
* fix: resolve test failures from rebase conflicts (#14447) (thanks @tyler6204)
* fix: apply formatting after rebase (#14447) (thanks @tyler6204)
2026-02-14 22:03:45 -08:00
|
|
|
spawnDepth: entry?.spawnDepth,
|
2026-01-24 05:49:23 +00:00
|
|
|
channel: entry?.channel ?? request.channel?.trim(),
|
|
|
|
|
groupId: resolvedGroupId ?? entry?.groupId,
|
|
|
|
|
groupChannel: resolvedGroupChannel ?? entry?.groupChannel,
|
|
|
|
|
space: resolvedGroupSpace ?? entry?.space,
|
2026-01-25 19:56:04 +01:00
|
|
|
cliSessionIds: entry?.cliSessionIds,
|
|
|
|
|
claudeCliSessionId: entry?.claudeCliSessionId,
|
2026-01-04 04:05:18 +01:00
|
|
|
};
|
2026-01-06 18:25:37 +00:00
|
|
|
sessionEntry = nextEntry;
|
2026-01-04 04:05:18 +01:00
|
|
|
const sendPolicy = resolveSendPolicy({
|
|
|
|
|
cfg,
|
|
|
|
|
entry,
|
2026-02-13 16:42:24 -03:00
|
|
|
sessionKey: canonicalKey,
|
2026-01-13 06:16:43 +00:00
|
|
|
channel: entry?.channel,
|
2026-01-04 04:05:18 +01:00
|
|
|
chatType: entry?.chatType,
|
|
|
|
|
});
|
|
|
|
|
if (sendPolicy === "deny") {
|
|
|
|
|
respond(
|
|
|
|
|
false,
|
|
|
|
|
undefined,
|
2026-01-14 14:31:43 +00:00
|
|
|
errorShape(ErrorCodes.INVALID_REQUEST, "send blocked by session policy"),
|
2026-01-04 04:05:18 +01:00
|
|
|
);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
resolvedSessionId = sessionId;
|
2026-01-12 01:21:44 +00:00
|
|
|
const canonicalSessionKey = canonicalKey;
|
2026-02-13 16:42:24 -03:00
|
|
|
resolvedSessionKey = canonicalSessionKey;
|
2026-01-12 01:05:43 +00:00
|
|
|
const agentId = resolveAgentIdFromSessionKey(canonicalSessionKey);
|
|
|
|
|
const mainSessionKey = resolveAgentMainSessionKey({ cfg, agentId });
|
2026-01-15 23:06:42 +00:00
|
|
|
if (storePath) {
|
|
|
|
|
await updateSessionStore(storePath, (store) => {
|
2026-02-13 16:42:24 -03:00
|
|
|
const target = resolveGatewaySessionStoreTarget({
|
|
|
|
|
cfg,
|
|
|
|
|
key: requestedSessionKey,
|
|
|
|
|
store,
|
|
|
|
|
});
|
|
|
|
|
pruneLegacyStoreKeys({
|
|
|
|
|
store,
|
|
|
|
|
canonicalKey: target.canonicalKey,
|
|
|
|
|
candidates: target.storeKeys,
|
|
|
|
|
});
|
2026-01-15 23:06:42 +00:00
|
|
|
store[canonicalSessionKey] = nextEntry;
|
|
|
|
|
});
|
2026-01-11 07:06:25 +00:00
|
|
|
}
|
2026-01-14 14:31:43 +00:00
|
|
|
if (canonicalSessionKey === mainSessionKey || canonicalSessionKey === "global") {
|
2026-01-04 04:05:18 +01:00
|
|
|
context.addChatRun(idem, {
|
2026-02-13 16:42:24 -03:00
|
|
|
sessionKey: canonicalSessionKey,
|
2026-01-04 04:05:18 +01:00
|
|
|
clientRunId: idem,
|
|
|
|
|
});
|
|
|
|
|
bestEffortDeliver = true;
|
|
|
|
|
}
|
2026-02-13 16:42:24 -03:00
|
|
|
registerAgentRunContext(idem, { sessionKey: canonicalSessionKey });
|
2026-01-04 04:05:18 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const runId = idem;
|
2026-02-04 17:12:16 -05:00
|
|
|
const connId = typeof client?.connId === "string" ? client.connId : undefined;
|
|
|
|
|
const wantsToolEvents = hasGatewayClientCap(
|
|
|
|
|
client?.connect?.caps,
|
|
|
|
|
GATEWAY_CLIENT_CAPS.TOOL_EVENTS,
|
|
|
|
|
);
|
|
|
|
|
if (connId && wantsToolEvents) {
|
|
|
|
|
context.registerToolEventRecipient(runId, connId);
|
2026-02-10 19:17:21 -06:00
|
|
|
// Register for any other active runs *in the same session* so
|
|
|
|
|
// late-joining clients (e.g. page refresh mid-response) receive
|
|
|
|
|
// in-progress tool events without leaking cross-session data.
|
|
|
|
|
for (const [activeRunId, active] of context.chatAbortControllers) {
|
|
|
|
|
if (activeRunId !== runId && active.sessionKey === requestedSessionKey) {
|
|
|
|
|
context.registerToolEventRecipient(activeRunId, connId);
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-02-04 17:12:16 -05:00
|
|
|
}
|
2026-01-04 04:05:18 +01:00
|
|
|
|
2026-01-17 06:01:30 +00:00
|
|
|
const wantsDelivery = request.deliver === true;
|
2026-01-17 04:24:47 +00:00
|
|
|
const explicitTo =
|
2026-01-18 22:49:55 +00:00
|
|
|
typeof request.replyTo === "string" && request.replyTo.trim()
|
|
|
|
|
? request.replyTo.trim()
|
|
|
|
|
: typeof request.to === "string" && request.to.trim()
|
|
|
|
|
? request.to.trim()
|
|
|
|
|
: undefined;
|
2026-01-20 17:22:07 +00:00
|
|
|
const explicitThreadId =
|
|
|
|
|
typeof request.threadId === "string" && request.threadId.trim()
|
|
|
|
|
? request.threadId.trim()
|
|
|
|
|
: undefined;
|
2026-01-17 06:38:15 +00:00
|
|
|
const deliveryPlan = resolveAgentDeliveryPlan({
|
|
|
|
|
sessionEntry,
|
2026-01-18 22:49:55 +00:00
|
|
|
requestedChannel: request.replyChannel ?? request.channel,
|
2026-01-17 06:01:30 +00:00
|
|
|
explicitTo,
|
2026-01-20 17:22:07 +00:00
|
|
|
explicitThreadId,
|
2026-01-18 22:49:55 +00:00
|
|
|
accountId: request.replyAccountId ?? request.accountId,
|
2026-01-17 06:38:15 +00:00
|
|
|
wantsDelivery,
|
2026-01-17 06:01:30 +00:00
|
|
|
});
|
2026-01-09 22:32:59 +01:00
|
|
|
|
2026-02-22 11:20:33 +01:00
|
|
|
let resolvedChannel = deliveryPlan.resolvedChannel;
|
|
|
|
|
let deliveryTargetMode = deliveryPlan.deliveryTargetMode;
|
|
|
|
|
let resolvedAccountId = deliveryPlan.resolvedAccountId;
|
2026-01-17 06:38:15 +00:00
|
|
|
let resolvedTo = deliveryPlan.resolvedTo;
|
2026-02-22 11:20:33 +01:00
|
|
|
let effectivePlan = deliveryPlan;
|
|
|
|
|
|
|
|
|
|
if (wantsDelivery && resolvedChannel === INTERNAL_MESSAGE_CHANNEL) {
|
|
|
|
|
const cfgResolved = cfgForAgent ?? cfg;
|
|
|
|
|
try {
|
|
|
|
|
const selection = await resolveMessageChannelSelection({ cfg: cfgResolved });
|
|
|
|
|
resolvedChannel = selection.channel;
|
|
|
|
|
deliveryTargetMode = deliveryTargetMode ?? "implicit";
|
|
|
|
|
effectivePlan = {
|
|
|
|
|
...deliveryPlan,
|
|
|
|
|
resolvedChannel,
|
|
|
|
|
deliveryTargetMode,
|
|
|
|
|
resolvedAccountId,
|
|
|
|
|
};
|
|
|
|
|
} catch (err) {
|
|
|
|
|
respond(false, undefined, errorShape(ErrorCodes.INVALID_REQUEST, String(err)));
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
2026-01-09 23:00:23 +01:00
|
|
|
|
2026-01-13 06:16:43 +00:00
|
|
|
if (!resolvedTo && isDeliverableMessageChannel(resolvedChannel)) {
|
2026-01-18 22:49:55 +00:00
|
|
|
const cfgResolved = cfgForAgent ?? cfg;
|
2026-01-17 06:54:12 +00:00
|
|
|
const fallback = resolveAgentOutboundTarget({
|
2026-01-18 22:49:55 +00:00
|
|
|
cfg: cfgResolved,
|
2026-02-22 11:20:33 +01:00
|
|
|
plan: effectivePlan,
|
|
|
|
|
targetMode: deliveryTargetMode ?? "implicit",
|
2026-01-17 06:54:12 +00:00
|
|
|
validateExplicitTarget: false,
|
2026-01-11 11:45:25 +00:00
|
|
|
});
|
2026-01-17 06:54:12 +00:00
|
|
|
if (fallback.resolvedTarget?.ok) {
|
|
|
|
|
resolvedTo = fallback.resolvedTo;
|
2026-01-04 04:05:18 +01:00
|
|
|
}
|
2026-01-11 11:45:25 +00:00
|
|
|
}
|
2026-01-04 04:05:18 +01:00
|
|
|
|
2026-02-22 11:20:33 +01:00
|
|
|
if (wantsDelivery && resolvedChannel === INTERNAL_MESSAGE_CHANNEL) {
|
|
|
|
|
respond(
|
|
|
|
|
false,
|
|
|
|
|
undefined,
|
|
|
|
|
errorShape(
|
|
|
|
|
ErrorCodes.INVALID_REQUEST,
|
|
|
|
|
"delivery channel is required: pass --channel/--reply-channel or use a main session with a previous channel",
|
|
|
|
|
),
|
|
|
|
|
);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2026-01-14 14:31:43 +00:00
|
|
|
const deliver = request.deliver === true && resolvedChannel !== INTERNAL_MESSAGE_CHANNEL;
|
2026-01-04 04:05:18 +01:00
|
|
|
|
|
|
|
|
const accepted = {
|
|
|
|
|
runId,
|
|
|
|
|
status: "accepted" as const,
|
|
|
|
|
acceptedAt: Date.now(),
|
|
|
|
|
};
|
|
|
|
|
// Store an in-flight ack so retries do not spawn a second run.
|
|
|
|
|
context.dedupe.set(`agent:${idem}`, {
|
|
|
|
|
ts: Date.now(),
|
|
|
|
|
ok: true,
|
|
|
|
|
payload: accepted,
|
|
|
|
|
});
|
|
|
|
|
respond(true, accepted, undefined, { runId });
|
|
|
|
|
|
2026-01-20 17:22:07 +00:00
|
|
|
const resolvedThreadId = explicitThreadId ?? deliveryPlan.resolvedThreadId;
|
|
|
|
|
|
2026-01-04 04:05:18 +01:00
|
|
|
void agentCommand(
|
|
|
|
|
{
|
|
|
|
|
message,
|
2026-01-10 20:34:34 +00:00
|
|
|
images,
|
2026-01-11 11:45:25 +00:00
|
|
|
to: resolvedTo,
|
2026-01-04 04:05:18 +01:00
|
|
|
sessionId: resolvedSessionId,
|
2026-02-13 16:42:24 -03:00
|
|
|
sessionKey: resolvedSessionKey,
|
2026-01-04 04:05:18 +01:00
|
|
|
thinking: request.thinking,
|
|
|
|
|
deliver,
|
2026-01-11 11:45:25 +00:00
|
|
|
deliveryTargetMode,
|
2026-01-13 06:16:43 +00:00
|
|
|
channel: resolvedChannel,
|
2026-01-17 02:09:32 +00:00
|
|
|
accountId: resolvedAccountId,
|
2026-01-20 17:22:07 +00:00
|
|
|
threadId: resolvedThreadId,
|
2026-01-19 00:45:03 +00:00
|
|
|
runContext: {
|
|
|
|
|
messageChannel: resolvedChannel,
|
|
|
|
|
accountId: resolvedAccountId,
|
2026-01-24 05:49:23 +00:00
|
|
|
groupId: resolvedGroupId,
|
|
|
|
|
groupChannel: resolvedGroupChannel,
|
|
|
|
|
groupSpace: resolvedGroupSpace,
|
2026-01-20 17:22:07 +00:00
|
|
|
currentThreadTs: resolvedThreadId != null ? String(resolvedThreadId) : undefined,
|
2026-01-19 00:45:03 +00:00
|
|
|
},
|
2026-01-24 05:49:23 +00:00
|
|
|
groupId: resolvedGroupId,
|
|
|
|
|
groupChannel: resolvedGroupChannel,
|
|
|
|
|
groupSpace: resolvedGroupSpace,
|
|
|
|
|
spawnedBy: spawnedByValue,
|
2026-01-04 04:05:18 +01:00
|
|
|
timeout: request.timeout?.toString(),
|
|
|
|
|
bestEffortDeliver,
|
2026-01-13 06:16:43 +00:00
|
|
|
messageChannel: resolvedChannel,
|
2026-01-04 04:05:18 +01:00
|
|
|
runId,
|
|
|
|
|
lane: request.lane,
|
|
|
|
|
extraSystemPrompt: request.extraSystemPrompt,
|
2026-02-13 02:01:53 +01:00
|
|
|
inputProvenance,
|
2026-01-04 04:05:18 +01:00
|
|
|
},
|
|
|
|
|
defaultRuntime,
|
|
|
|
|
context.deps,
|
|
|
|
|
)
|
2026-01-06 03:30:27 +01:00
|
|
|
.then((result) => {
|
2026-01-04 04:05:18 +01:00
|
|
|
const payload = {
|
|
|
|
|
runId,
|
|
|
|
|
status: "ok" as const,
|
|
|
|
|
summary: "completed",
|
2026-01-06 03:30:27 +01:00
|
|
|
result,
|
2026-01-04 04:05:18 +01:00
|
|
|
};
|
|
|
|
|
context.dedupe.set(`agent:${idem}`, {
|
|
|
|
|
ts: Date.now(),
|
|
|
|
|
ok: true,
|
|
|
|
|
payload,
|
|
|
|
|
});
|
|
|
|
|
// Send a second res frame (same id) so TS clients with expectFinal can wait.
|
|
|
|
|
// Swift clients will typically treat the first res as the result and ignore this.
|
|
|
|
|
respond(true, payload, undefined, { runId });
|
|
|
|
|
})
|
|
|
|
|
.catch((err) => {
|
|
|
|
|
const error = errorShape(ErrorCodes.UNAVAILABLE, String(err));
|
|
|
|
|
const payload = {
|
|
|
|
|
runId,
|
|
|
|
|
status: "error" as const,
|
|
|
|
|
summary: String(err),
|
|
|
|
|
};
|
|
|
|
|
context.dedupe.set(`agent:${idem}`, {
|
|
|
|
|
ts: Date.now(),
|
|
|
|
|
ok: false,
|
|
|
|
|
payload,
|
|
|
|
|
error,
|
|
|
|
|
});
|
|
|
|
|
respond(false, payload, error, {
|
|
|
|
|
runId,
|
|
|
|
|
error: formatForLog(err),
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
},
|
2026-01-22 06:47:37 +00:00
|
|
|
"agent.identity.get": ({ params, respond }) => {
|
|
|
|
|
if (!validateAgentIdentityParams(params)) {
|
|
|
|
|
respond(
|
|
|
|
|
false,
|
|
|
|
|
undefined,
|
|
|
|
|
errorShape(
|
|
|
|
|
ErrorCodes.INVALID_REQUEST,
|
|
|
|
|
`invalid agent.identity.get params: ${formatValidationErrors(
|
|
|
|
|
validateAgentIdentityParams.errors,
|
|
|
|
|
)}`,
|
|
|
|
|
),
|
|
|
|
|
);
|
|
|
|
|
return;
|
|
|
|
|
}
|
2026-01-31 16:03:28 +09:00
|
|
|
const p = params;
|
2026-01-22 06:47:37 +00:00
|
|
|
const agentIdRaw = typeof p.agentId === "string" ? p.agentId.trim() : "";
|
|
|
|
|
const sessionKeyRaw = typeof p.sessionKey === "string" ? p.sessionKey.trim() : "";
|
|
|
|
|
let agentId = agentIdRaw ? normalizeAgentId(agentIdRaw) : undefined;
|
|
|
|
|
if (sessionKeyRaw) {
|
2026-02-15 12:46:14 -03:00
|
|
|
if (classifySessionKeyShape(sessionKeyRaw) === "malformed_agent") {
|
|
|
|
|
respond(
|
|
|
|
|
false,
|
|
|
|
|
undefined,
|
|
|
|
|
errorShape(
|
|
|
|
|
ErrorCodes.INVALID_REQUEST,
|
|
|
|
|
`invalid agent.identity.get params: malformed session key "${sessionKeyRaw}"`,
|
|
|
|
|
),
|
|
|
|
|
);
|
|
|
|
|
return;
|
|
|
|
|
}
|
2026-01-22 06:47:37 +00:00
|
|
|
const resolved = resolveAgentIdFromSessionKey(sessionKeyRaw);
|
|
|
|
|
if (agentId && resolved !== agentId) {
|
|
|
|
|
respond(
|
|
|
|
|
false,
|
|
|
|
|
undefined,
|
|
|
|
|
errorShape(
|
|
|
|
|
ErrorCodes.INVALID_REQUEST,
|
|
|
|
|
`invalid agent.identity.get params: agent "${agentIdRaw}" does not match session key agent "${resolved}"`,
|
|
|
|
|
),
|
|
|
|
|
);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
agentId = resolved;
|
|
|
|
|
}
|
|
|
|
|
const cfg = loadConfig();
|
|
|
|
|
const identity = resolveAssistantIdentity({ cfg, agentId });
|
2026-01-22 21:57:02 +00:00
|
|
|
const avatarValue =
|
|
|
|
|
resolveAssistantAvatarUrl({
|
|
|
|
|
avatar: identity.avatar,
|
|
|
|
|
agentId: identity.agentId,
|
|
|
|
|
basePath: cfg.gateway?.controlUi?.basePath,
|
|
|
|
|
}) ?? identity.avatar;
|
2026-01-22 15:16:31 -05:00
|
|
|
respond(true, { ...identity, avatar: avatarValue }, undefined);
|
2026-01-22 06:47:37 +00:00
|
|
|
},
|
2026-01-04 04:05:18 +01:00
|
|
|
"agent.wait": async ({ params, respond }) => {
|
|
|
|
|
if (!validateAgentWaitParams(params)) {
|
|
|
|
|
respond(
|
|
|
|
|
false,
|
|
|
|
|
undefined,
|
|
|
|
|
errorShape(
|
|
|
|
|
ErrorCodes.INVALID_REQUEST,
|
|
|
|
|
`invalid agent.wait params: ${formatValidationErrors(validateAgentWaitParams.errors)}`,
|
|
|
|
|
),
|
|
|
|
|
);
|
|
|
|
|
return;
|
|
|
|
|
}
|
2026-01-31 16:03:28 +09:00
|
|
|
const p = params;
|
2026-02-22 13:20:53 +01:00
|
|
|
const runId = (p.runId ?? "").trim();
|
2026-01-04 04:05:18 +01:00
|
|
|
const timeoutMs =
|
|
|
|
|
typeof p.timeoutMs === "number" && Number.isFinite(p.timeoutMs)
|
|
|
|
|
? Math.max(0, Math.floor(p.timeoutMs))
|
|
|
|
|
: 30_000;
|
|
|
|
|
|
|
|
|
|
const snapshot = await waitForAgentJob({
|
|
|
|
|
runId,
|
|
|
|
|
timeoutMs,
|
|
|
|
|
});
|
|
|
|
|
if (!snapshot) {
|
|
|
|
|
respond(true, {
|
|
|
|
|
runId,
|
|
|
|
|
status: "timeout",
|
|
|
|
|
});
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
respond(true, {
|
|
|
|
|
runId,
|
2026-01-05 05:55:02 +01:00
|
|
|
status: snapshot.status,
|
2026-01-04 04:05:18 +01:00
|
|
|
startedAt: snapshot.startedAt,
|
|
|
|
|
endedAt: snapshot.endedAt,
|
|
|
|
|
error: snapshot.error,
|
|
|
|
|
});
|
|
|
|
|
},
|
|
|
|
|
};
|