profile id fix

This commit is contained in:
Alexander Davydov 2026-03-17 14:52:21 +03:00
parent ea8ef0697e
commit 4b10bac423
6 changed files with 178 additions and 8 deletions

View File

@ -230,10 +230,7 @@ export function resolveGigachatAuthProfileMetadata(
);
for (const profileId of profileIds) {
const credential = store.profiles[profileId];
if (
credential?.type === "api_key" &&
(credential as ApiKeyCredential).provider === "gigachat"
) {
if (credential?.type === "api_key" && credential.provider === "gigachat") {
return credential.metadata;
}
}
@ -1938,7 +1935,7 @@ export async function runEmbeddedAttempt(
const gigachatStore = ensureAuthProfileStore(agentDir, { allowKeychainPrompt: false });
const gigachatMeta = resolveGigachatAuthProfileMetadata(
gigachatStore,
params.attempt.authProfileId,
params.authProfileId,
);
const gigachatStreamFn = createGigachatStreamFn({

View File

@ -15,6 +15,7 @@ const {
buildThreadingToolContext,
buildEmbeddedRunBaseParams,
buildEmbeddedRunContexts,
normalizeFollowupRun,
resolveModelFallbackOptions,
resolveProviderScopedAuthProfile,
} = await import("./agent-runner-utils.js");
@ -157,6 +158,27 @@ describe("agent-runner-utils", () => {
});
});
it("normalizes legacy flattened followup runs", () => {
const run = makeRun({
authProfileId: "profile-openai",
authProfileIdSource: "auto",
});
const normalized = normalizeFollowupRun({
prompt: "hello",
enqueuedAt: Date.now(),
...run,
} as unknown as FollowupRun);
expect(normalized.run).toMatchObject({
sessionId: run.sessionId,
provider: run.provider,
model: run.model,
authProfileId: "profile-openai",
authProfileIdSource: "auto",
});
});
it("prefers OriginatingChannel over Provider for messageProvider", () => {
const run = makeRun();

View File

@ -154,6 +154,61 @@ export const appendUsageLine = (payloads: ReplyPayload[], line: string): ReplyPa
export const resolveEnforceFinalTag = (run: FollowupRun["run"], provider: string) =>
Boolean(run.enforceFinalTag || isReasoningTagProvider(provider));
function isFollowupRunObject(value: unknown): value is FollowupRun["run"] {
return Boolean(value && typeof value === "object");
}
/**
* Accept both the current nested followup shape (`{ run: {...} }`) and the
* older flattened form where run fields lived at the top level.
*/
export function normalizeFollowupRun<T extends FollowupRun>(followupRun: T): T {
if (isFollowupRunObject(followupRun.run)) {
return followupRun;
}
const legacyFollowupRun = followupRun as T & Partial<FollowupRun["run"]>;
return {
...followupRun,
run: {
agentId: legacyFollowupRun.agentId,
agentDir: legacyFollowupRun.agentDir,
sessionId: legacyFollowupRun.sessionId,
sessionKey: legacyFollowupRun.sessionKey,
messageProvider: legacyFollowupRun.messageProvider,
agentAccountId: legacyFollowupRun.agentAccountId,
groupId: legacyFollowupRun.groupId,
groupChannel: legacyFollowupRun.groupChannel,
groupSpace: legacyFollowupRun.groupSpace,
senderId: legacyFollowupRun.senderId,
senderName: legacyFollowupRun.senderName,
senderUsername: legacyFollowupRun.senderUsername,
senderE164: legacyFollowupRun.senderE164,
senderIsOwner: legacyFollowupRun.senderIsOwner,
sessionFile: legacyFollowupRun.sessionFile,
workspaceDir: legacyFollowupRun.workspaceDir,
config: legacyFollowupRun.config,
skillsSnapshot: legacyFollowupRun.skillsSnapshot,
provider: legacyFollowupRun.provider,
model: legacyFollowupRun.model,
authProfileId: legacyFollowupRun.authProfileId,
authProfileIdSource: legacyFollowupRun.authProfileIdSource,
thinkLevel: legacyFollowupRun.thinkLevel,
verboseLevel: legacyFollowupRun.verboseLevel,
reasoningLevel: legacyFollowupRun.reasoningLevel,
elevatedLevel: legacyFollowupRun.elevatedLevel,
execOverrides: legacyFollowupRun.execOverrides,
bashElevated: legacyFollowupRun.bashElevated,
timeoutMs: legacyFollowupRun.timeoutMs,
blockReplyBreak: legacyFollowupRun.blockReplyBreak,
ownerNumbers: legacyFollowupRun.ownerNumbers,
inputProvenance: legacyFollowupRun.inputProvenance,
extraSystemPrompt: legacyFollowupRun.extraSystemPrompt,
enforceFinalTag: legacyFollowupRun.enforceFinalTag,
} as FollowupRun["run"],
};
}
export function resolveModelFallbackOptions(run: FollowupRun["run"]) {
return {
cfg: run.config,

View File

@ -314,6 +314,97 @@ describe("runReplyAgent authProfileId fallback scoping", () => {
expect(call.authProfileId).toBeUndefined();
expect(call.authProfileIdSource).toBeUndefined();
});
it("accepts legacy flattened followup runs", async () => {
runWithModelFallbackMock.mockImplementationOnce(
async ({ run }: RunWithModelFallbackParams) => ({
result: await run("anthropic", "claude-opus"),
provider: "anthropic",
model: "claude-opus",
}),
);
runEmbeddedPiAgentMock.mockResolvedValue({ payloads: [{ text: "ok" }], meta: {} });
const typing = createMockTypingController();
const sessionCtx = {
Provider: "telegram",
OriginatingTo: "chat",
AccountId: "primary",
MessageSid: "msg",
Surface: "telegram",
} as unknown as TemplateContext;
const sessionKey = "main";
const sessionEntry = {
sessionId: "session",
updatedAt: Date.now(),
totalTokens: 1,
compactionCount: 0,
};
await runReplyAgent({
commandBody: "hello",
followupRun: {
prompt: "hello",
summaryLine: "hello",
enqueuedAt: Date.now(),
agentId: "main",
agentDir: "/tmp/agent",
sessionId: "session",
sessionKey,
messageProvider: "telegram",
sessionFile: "/tmp/session.jsonl",
workspaceDir: "/tmp",
config: {},
skillsSnapshot: {},
provider: "anthropic",
model: "claude-opus",
authProfileId: "anthropic:openclaw",
authProfileIdSource: "user",
thinkLevel: "low",
verboseLevel: "off",
elevatedLevel: "off",
bashElevated: {
enabled: false,
allowed: false,
defaultLevel: "off",
},
timeoutMs: 5_000,
blockReplyBreak: "message_end",
} as unknown as FollowupRun,
queueKey: sessionKey,
resolvedQueue: { mode: "interrupt" } as unknown as QueueSettings,
shouldSteer: false,
shouldFollowup: false,
isActive: false,
isStreaming: false,
typing,
sessionCtx,
sessionEntry,
sessionStore: { [sessionKey]: sessionEntry },
sessionKey,
storePath: undefined,
defaultModel: "anthropic/claude-opus-4-5",
agentCfgContextTokens: 100_000,
resolvedVerboseLevel: "off",
isNewSession: false,
blockStreamingEnabled: false,
resolvedBlockStreamingBreak: "message_end",
shouldInjectGroupIntro: false,
typingMode: "instant",
});
expect(runEmbeddedPiAgentMock).toHaveBeenCalledTimes(1);
const call = runEmbeddedPiAgentMock.mock.calls[0]?.[0] as {
authProfileId?: unknown;
sessionId?: unknown;
provider?: unknown;
};
expect(call.sessionId).toBe("session");
expect(call.provider).toBe("anthropic");
expect(call.authProfileId).toBe("anthropic:openclaw");
});
});
describe("runReplyAgent auto-compaction token update", () => {

View File

@ -44,7 +44,11 @@ import {
hasSessionRelatedCronJobs,
hasUnbackedReminderCommitment,
} from "./agent-runner-reminder-guard.js";
import { appendUsageLine, formatResponseUsageLine } from "./agent-runner-utils.js";
import {
appendUsageLine,
formatResponseUsageLine,
normalizeFollowupRun,
} from "./agent-runner-utils.js";
import { createAudioAsVoiceBuffer, createBlockReplyPipeline } from "./block-reply-pipeline.js";
import { resolveEffectiveBlockStreamingConfig } from "./block-streaming.js";
import { createFollowupRunner } from "./followup-runner.js";
@ -91,9 +95,9 @@ export async function runReplyAgent(params: {
shouldInjectGroupIntro: boolean;
typingMode: TypingMode;
}): Promise<ReplyPayload | ReplyPayload[] | undefined> {
const followupRun = normalizeFollowupRun(params.followupRun);
const {
commandBody,
followupRun,
queueKey,
resolvedQueue,
shouldSteer,

View File

@ -15,7 +15,7 @@ import { stripHeartbeatToken } from "../heartbeat.js";
import type { OriginatingChannelType } from "../templating.js";
import { isSilentReplyText, SILENT_REPLY_TOKEN } from "../tokens.js";
import type { GetReplyOptions, ReplyPayload } from "../types.js";
import { resolveRunAuthProfile } from "./agent-runner-utils.js";
import { normalizeFollowupRun, resolveRunAuthProfile } from "./agent-runner-utils.js";
import {
resolveOriginAccountId,
resolveOriginMessageProvider,
@ -131,6 +131,7 @@ export function createFollowupRunner(params: {
return async (queued: FollowupRun) => {
try {
queued = normalizeFollowupRun(queued);
const runId = crypto.randomUUID();
const shouldSurfaceToControlUi = isInternalMessageChannel(
resolveOriginMessageProvider({