Thinking and fix the loader after chat completed, and minimize after streaming message.

This commit is contained in:
Mark 2026-02-13 18:40:46 -08:00
parent 2f788ad4a2
commit 4a291e7901
4 changed files with 46 additions and 9 deletions

View File

@ -516,9 +516,8 @@ function groupToolSteps(tools: ToolPart[]): VisualItem[] {
/* ─── Main component ─── */
export function ChainOfThought({ parts }: { parts: ChainPart[] }) {
export function ChainOfThought({ parts, isStreaming }: { parts: ChainPart[]; isStreaming?: boolean }) {
const [isOpen, setIsOpen] = useState(true);
const prevActiveRef = useRef(true);
const isActive = parts.some(
(p) =>
@ -556,12 +555,17 @@ export function ChainOfThought({ parts }: { parts: ChainPart[] }) {
return rem > 0 ? `${m}m ${rem}s` : `${m}m`;
}, []);
// Collapse only when the parent stream truly ends — not on intermediate
// isActive flickers (e.g. gap between reasoning end and tool start).
const wasStreamingRef = useRef(false);
useEffect(() => {
if (prevActiveRef.current && !isActive && parts.length > 0) {
if (isStreaming) {
wasStreamingRef.current = true;
} else if (wasStreamingRef.current && parts.length > 0) {
wasStreamingRef.current = false;
setIsOpen(false);
}
prevActiveRef.current = isActive;
}, [isActive, parts.length]);
}, [isStreaming, parts.length]);
const statusParts = parts.filter(
(p): p is Extract<ChainPart, { kind: "status" }> =>

View File

@ -56,8 +56,17 @@ function groupParts(parts: UIMessage["parts"]): MessageSegment[] {
const segments: MessageSegment[] = [];
let chain: ChainPart[] = [];
const flush = () => {
const flush = (textFollows?: boolean) => {
if (chain.length > 0) {
// If text content follows this chain, all tools must have
// completed — force any stuck "running" tools to "done".
if (textFollows) {
for (const cp of chain) {
if (cp.kind === "tool" && cp.status === "running") {
cp.status = "done";
}
}
}
segments.push({ type: "chain", parts: [...chain] });
chain = [];
}
@ -65,7 +74,7 @@ function groupParts(parts: UIMessage["parts"]): MessageSegment[] {
for (const part of parts) {
if (part.type === "text") {
flush();
flush(true);
const text = (part as { type: "text"; text: string }).text;
if (hasReportBlocks(text)) {
segments.push(
@ -504,7 +513,7 @@ const mdComponents: Components = {
/* ─── Chat message ─── */
export function ChatMessage({ message }: { message: UIMessage }) {
export function ChatMessage({ message, isStreaming }: { message: UIMessage; isStreaming?: boolean }) {
const isUser = message.role === "user";
const segments = groupParts(message.parts);
@ -649,6 +658,7 @@ export function ChatMessage({ message }: { message: UIMessage }) {
<ChainOfThought
key={index}
parts={segment.parts}
isStreaming={isStreaming}
/>
);
})}

View File

@ -1315,10 +1315,11 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
<div
className={`${compact ? "" : "max-w-3xl mx-auto"} py-3`}
>
{messages.map((message) => (
{messages.map((message, i) => (
<ChatMessage
key={message.id}
message={message}
isStreaming={isStreaming && i === messages.length - 1}
/>
))}
<div ref={messagesEndRef} />

View File

@ -64,6 +64,28 @@ export function handleMessageUpdate(
: undefined;
const evtType = typeof assistantRecord?.type === "string" ? assistantRecord.type : "";
// Handle native extended thinking events (Anthropic API thinking blocks).
// These arrive as thinking_delta / thinking_start / thinking_end from the
// provider adapter and must be forwarded to the agent event bus so that
// active-runs.ts (web UI) and other consumers receive them.
if (evtType === "thinking_delta" || evtType === "thinking_start" || evtType === "thinking_end") {
if (evtType === "thinking_delta") {
const thinkingDelta = typeof assistantRecord?.delta === "string" ? assistantRecord.delta : "";
if (thinkingDelta) {
emitAgentEvent({
runId: ctx.params.runId,
stream: "thinking",
data: { delta: thinkingDelta },
});
void ctx.params.onAgentEvent?.({
stream: "thinking",
data: { delta: thinkingDelta },
});
}
}
return;
}
if (evtType !== "text_delta" && evtType !== "text_start" && evtType !== "text_end") {
return;
}