feat: async I/O, tags field type, rich chat messages, deploy verification

- Convert sync filesystem and DuckDB operations to async across API routes,
  workspace lib, and active-runs to prevent event loop blocking during tree
  discovery, object lookups, and database queries
- Add "tags" field type for free-form string arrays with parse-tags utility,
  TagsBadges/TagsInput UI components, filter operators, and CRM skill docs
- Preserve rich text formatting (bold, italic, code, @mentions) in user chat
  messages by sending HTML alongside plain text through the transport layer
- Detect empty-stream errors, improve agent error emission, and add file
  mutation queues for concurrent write safety in active-runs
- Add pre-publish standalone node_modules verification in deploy script
  checking serverExternalPackages are present
- Extract syncManagedSkills and discoverWorkspaceDirs for multi-workspace
  skill syncing, add ensureSeedAssets for runtime app dir
- Bump version 2.1.1 → 2.1.4
This commit is contained in:
kumarabhirup 2026-03-08 19:53:18 -07:00
parent 870650dbda
commit 039cbe6a43
No known key found for this signature in database
GPG Key ID: DB7CA2289CAB0167
39 changed files with 1824 additions and 466 deletions

View File

@ -45,7 +45,8 @@ export async function POST(req: Request) {
sessionId,
sessionKey,
distinctId,
}: { messages: UIMessage[]; sessionId?: string; sessionKey?: string; distinctId?: string } = await req.json();
userHtml,
}: { messages: UIMessage[]; sessionId?: string; sessionKey?: string; distinctId?: string; userHtml?: string } = await req.json();
const lastUserMessage = messages.filter((m) => m.role === "user").pop();
const userText =
@ -106,16 +107,17 @@ export async function POST(req: Request) {
task: info.task,
});
}
persistSubscribeUserMessage(sessionKey, {
await persistSubscribeUserMessage(sessionKey, {
id: lastUserMessage.id,
text: userText,
});
reactivateSubscribeRun(sessionKey, agentMessage);
} else if (sessionId && lastUserMessage) {
persistUserMessage(sessionId, {
await persistUserMessage(sessionId, {
id: lastUserMessage.id,
content: userText,
parts: lastUserMessage.parts as unknown[],
html: userHtml,
});
try {
startRun({

View File

@ -89,9 +89,9 @@ describe("Workspace DB & Reports API", () => {
});
it("executes query and returns rows", async () => {
const { safeResolvePath, duckdbQueryOnFile } = await import("@/lib/workspace");
const { safeResolvePath, duckdbQueryOnFileAsync } = await import("@/lib/workspace");
vi.mocked(safeResolvePath).mockReturnValue("/ws/test.duckdb");
vi.mocked(duckdbQueryOnFile).mockReturnValue([{ id: 1, name: "test" }]);
vi.mocked(duckdbQueryOnFileAsync).mockResolvedValue([{ id: 1, name: "test" }]);
const { POST } = await import("./db/query/route.js");
const req = new Request("http://localhost/api/workspace/db/query", {
@ -106,9 +106,9 @@ describe("Workspace DB & Reports API", () => {
});
it("returns empty rows for empty result", async () => {
const { safeResolvePath, duckdbQueryOnFile } = await import("@/lib/workspace");
const { safeResolvePath, duckdbQueryOnFileAsync } = await import("@/lib/workspace");
vi.mocked(safeResolvePath).mockReturnValue("/ws/test.duckdb");
vi.mocked(duckdbQueryOnFile).mockReturnValue([]);
vi.mocked(duckdbQueryOnFileAsync).mockResolvedValue([]);
const { POST } = await import("./db/query/route.js");
const req = new Request("http://localhost/api/workspace/db/query", {
@ -190,8 +190,8 @@ describe("Workspace DB & Reports API", () => {
it("executes report query successfully", async () => {
const { checkSqlSafety } = await import("@/lib/report-filters");
vi.mocked(checkSqlSafety).mockReturnValue(null);
const { duckdbQuery } = await import("@/lib/workspace");
vi.mocked(duckdbQuery).mockReturnValue([{ count: 42 }]);
const { duckdbQueryAsync } = await import("@/lib/workspace");
vi.mocked(duckdbQueryAsync).mockResolvedValue([{ count: 42 }]);
const { POST } = await import("./reports/execute/route.js");
const req = new Request("http://localhost/api/workspace/reports/execute", {
@ -210,8 +210,8 @@ describe("Workspace DB & Reports API", () => {
vi.mocked(checkSqlSafety).mockReturnValue(null);
vi.mocked(buildFilterClauses).mockReturnValue(['"Status" = \'Active\'']);
vi.mocked(injectFilters).mockReturnValue("SELECT * FROM filtered");
const { duckdbQuery } = await import("@/lib/workspace");
vi.mocked(duckdbQuery).mockReturnValue([{ count: 10 }]);
const { duckdbQueryAsync } = await import("@/lib/workspace");
vi.mocked(duckdbQueryAsync).mockResolvedValue([{ count: 10 }]);
const { POST } = await import("./reports/execute/route.js");
const req = new Request("http://localhost/api/workspace/reports/execute", {
@ -244,8 +244,8 @@ describe("Workspace DB & Reports API", () => {
});
it("executes query and returns rows", async () => {
const { duckdbQuery } = await import("@/lib/workspace");
vi.mocked(duckdbQuery).mockReturnValue([{ id: 1 }]);
const { duckdbQueryAsync } = await import("@/lib/workspace");
vi.mocked(duckdbQueryAsync).mockResolvedValue([{ id: 1 }]);
const { POST } = await import("./query/route.js");
const req = new Request("http://localhost/api/workspace/query", {

View File

@ -1,4 +1,4 @@
import { safeResolvePath, duckdbQueryOnFile } from "@/lib/workspace";
import { safeResolvePath, duckdbQueryOnFileAsync } from "@/lib/workspace";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -51,6 +51,6 @@ export async function POST(request: Request) {
);
}
const rows = duckdbQueryOnFile(absPath, sql);
const rows = await duckdbQueryOnFileAsync(absPath, sql);
return Response.json({ rows, sql });
}

View File

@ -8,13 +8,18 @@ vi.mock("node:child_process", () => ({
// Mock workspace
vi.mock("@/lib/workspace", () => ({
duckdbPath: vi.fn(() => null),
duckdbPathAsync: vi.fn(async () => null),
duckdbQueryOnFile: vi.fn(() => []),
duckdbQueryOnFileAsync: vi.fn(async () => []),
duckdbExecOnFile: vi.fn(() => true),
duckdbExecOnFileAsync: vi.fn(async () => true),
findDuckDBForObject: vi.fn(() => null),
findDuckDBForObjectAsync: vi.fn(async () => null),
getObjectViews: vi.fn(() => ({ views: [], activeView: null })),
parseRelationValue: vi.fn((v: string | null) => (v ? [v] : [])),
resolveDuckdbBin: vi.fn(() => null),
discoverDuckDBPaths: vi.fn(() => []),
discoverDuckDBPathsAsync: vi.fn(async () => []),
}));
describe("Workspace Objects API", () => {
@ -25,13 +30,18 @@ describe("Workspace Objects API", () => {
}));
vi.mock("@/lib/workspace", () => ({
duckdbPath: vi.fn(() => null),
duckdbPathAsync: vi.fn(async () => null),
duckdbQueryOnFile: vi.fn(() => []),
duckdbQueryOnFileAsync: vi.fn(async () => []),
duckdbExecOnFile: vi.fn(() => true),
duckdbExecOnFileAsync: vi.fn(async () => true),
findDuckDBForObject: vi.fn(() => null),
findDuckDBForObjectAsync: vi.fn(async () => null),
getObjectViews: vi.fn(() => ({ views: [], activeView: null })),
parseRelationValue: vi.fn((v: string | null) => (v ? [v] : [])),
resolveDuckdbBin: vi.fn(() => null),
discoverDuckDBPaths: vi.fn(() => []),
discoverDuckDBPathsAsync: vi.fn(async () => []),
}));
});
@ -67,10 +77,10 @@ describe("Workspace Objects API", () => {
});
it("returns 404 when object not found", async () => {
const { findDuckDBForObject, resolveDuckdbBin, duckdbPath: mockDuckdbPath } = await import("@/lib/workspace");
const { findDuckDBForObjectAsync, resolveDuckdbBin, duckdbPathAsync: mockDuckdbPath } = await import("@/lib/workspace");
vi.mocked(resolveDuckdbBin).mockReturnValue("/opt/homebrew/bin/duckdb");
vi.mocked(findDuckDBForObject).mockReturnValue(null);
vi.mocked(mockDuckdbPath).mockReturnValue(null);
vi.mocked(findDuckDBForObjectAsync).mockResolvedValue(null);
vi.mocked(mockDuckdbPath).mockResolvedValue(null);
const { GET } = await import("./objects/[name]/route.js");
const res = await GET(
@ -81,14 +91,14 @@ describe("Workspace Objects API", () => {
});
it("returns object schema and entries when found", async () => {
const { findDuckDBForObject, duckdbQueryOnFile, resolveDuckdbBin, discoverDuckDBPaths } = await import("@/lib/workspace");
vi.mocked(findDuckDBForObject).mockReturnValue("/ws/workspace.duckdb");
const { findDuckDBForObjectAsync, duckdbQueryOnFileAsync, resolveDuckdbBin, discoverDuckDBPathsAsync } = await import("@/lib/workspace");
vi.mocked(findDuckDBForObjectAsync).mockResolvedValue("/ws/workspace.duckdb");
vi.mocked(resolveDuckdbBin).mockReturnValue("/opt/homebrew/bin/duckdb");
vi.mocked(discoverDuckDBPaths).mockReturnValue(["/ws/workspace.duckdb"]);
vi.mocked(discoverDuckDBPathsAsync).mockResolvedValue(["/ws/workspace.duckdb"]);
// Mock different queries with a call counter
let queryCall = 0;
vi.mocked(duckdbQueryOnFile).mockImplementation(() => {
vi.mocked(duckdbQueryOnFileAsync).mockImplementation(async () => {
queryCall++;
if (queryCall === 1) {
// Object row
@ -120,18 +130,64 @@ describe("Workspace Objects API", () => {
expect(json.fields).toBeDefined();
});
it("loads same-db schema queries sequentially (prevents oscillating empty fields during live refresh)", async () => {
const {
findDuckDBForObjectAsync,
duckdbQueryOnFileAsync,
resolveDuckdbBin,
discoverDuckDBPathsAsync,
} = await import("@/lib/workspace");
vi.mocked(findDuckDBForObjectAsync).mockResolvedValue("/ws/workspace.duckdb");
vi.mocked(resolveDuckdbBin).mockReturnValue("/opt/homebrew/bin/duckdb");
vi.mocked(discoverDuckDBPathsAsync).mockResolvedValue(["/ws/workspace.duckdb"]);
let inFlight = 0;
vi.mocked(duckdbQueryOnFileAsync).mockImplementation(async (_dbFile, sql) => {
inFlight += 1;
const concurrent = inFlight > 1;
await new Promise((resolve) => setTimeout(resolve, 5));
inFlight -= 1;
if (sql.includes("SELECT * FROM objects WHERE name")) {
return [{ id: "obj1", name: "company", description: "Company object" }] as never;
}
if (sql.includes("SELECT * FROM fields")) {
return concurrent
? ([] as never)
: ([{ id: "f1", name: "Company Name", type: "text", sort_order: 0 }] as never);
}
if (sql.includes("SELECT * FROM statuses")) {
return concurrent
? ([] as never)
: ([{ id: "status1", name: "Active", sort_order: 0 }] as never);
}
return [] as never;
});
const { GET } = await import("./objects/[name]/route.js");
const res = await GET(
new Request("http://localhost/api/workspace/objects/company"),
{ params: Promise.resolve({ name: "company" }) },
);
expect(res.status).toBe(200);
const json = await res.json();
expect(json.fields).toHaveLength(1);
expect(json.statuses).toHaveLength(1);
});
it("returns saved views and active view from object yaml metadata", async () => {
const {
findDuckDBForObject,
duckdbQueryOnFile,
findDuckDBForObjectAsync,
duckdbQueryOnFileAsync,
resolveDuckdbBin,
discoverDuckDBPaths,
discoverDuckDBPathsAsync,
getObjectViews,
} = await import("@/lib/workspace");
vi.mocked(findDuckDBForObject).mockReturnValue("/ws/workspace.duckdb");
vi.mocked(findDuckDBForObjectAsync).mockResolvedValue("/ws/workspace.duckdb");
vi.mocked(resolveDuckdbBin).mockReturnValue("/opt/homebrew/bin/duckdb");
vi.mocked(discoverDuckDBPaths).mockReturnValue(["/ws/workspace.duckdb"]);
vi.mocked(discoverDuckDBPathsAsync).mockResolvedValue(["/ws/workspace.duckdb"]);
vi.mocked(getObjectViews).mockReturnValue({
views: [
{
@ -151,7 +207,7 @@ describe("Workspace Objects API", () => {
});
let queryCall = 0;
vi.mocked(duckdbQueryOnFile).mockImplementation(() => {
vi.mocked(duckdbQueryOnFileAsync).mockImplementation(async () => {
queryCall += 1;
if (queryCall === 1) {
return [{ id: "obj1", name: "leads", description: "Leads object", icon: "star" }];
@ -179,8 +235,8 @@ describe("Workspace Objects API", () => {
});
it("accepts underscored names", async () => {
const { findDuckDBForObject } = await import("@/lib/workspace");
vi.mocked(findDuckDBForObject).mockReturnValue(null);
const { findDuckDBForObjectAsync } = await import("@/lib/workspace");
vi.mocked(findDuckDBForObjectAsync).mockResolvedValue(null);
const { GET } = await import("./objects/[name]/route.js");
const res = await GET(

View File

@ -1,6 +1,14 @@
import { duckdbPath, parseRelationValue, resolveDuckdbBin, findDuckDBForObject, duckdbQueryOnFile, discoverDuckDBPaths, getObjectViews } from "@/lib/workspace";
import {
duckdbPathAsync,
parseRelationValue,
resolveDuckdbBin,
findDuckDBForObjectAsync,
duckdbQueryOnFileAsync,
discoverDuckDBPathsAsync,
getObjectViews,
duckdbExecOnFileAsync,
} from "@/lib/workspace";
import { deserializeFilters, buildWhereClause, buildOrderByClause, type FieldMeta } from "@/lib/object-filters";
import { execSync } from "node:child_process";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -49,29 +57,25 @@ type EavRow = {
// --- Schema migration (idempotent, runs once per process) ---
const migratedDbs = new Set<string>();
const migratedDbs = new Map<string, Promise<void>>();
/** Ensure the display_field column exists on a specific DB file. */
function ensureDisplayFieldColumn(dbFile: string) {
if (migratedDbs.has(dbFile)) {return;}
const bin = resolveDuckdbBin();
if (!bin) {return;}
try {
execSync(
`'${bin}' '${dbFile}' 'ALTER TABLE objects ADD COLUMN IF NOT EXISTS display_field VARCHAR'`,
{ encoding: "utf-8", timeout: 5_000, shell: "/bin/sh" },
);
} catch {
// migration might fail on DBs that don't have the objects table — skip
}
migratedDbs.add(dbFile);
async function ensureDisplayFieldColumn(dbFile: string): Promise<void> {
const existing = migratedDbs.get(dbFile);
if (existing) {return existing;}
const promise = duckdbExecOnFileAsync(
dbFile,
"ALTER TABLE objects ADD COLUMN IF NOT EXISTS display_field VARCHAR",
).then(() => undefined);
migratedDbs.set(dbFile, promise);
return promise;
}
// --- Helpers ---
/** Scoped query helper: queries a specific DB file. */
function q<T = Record<string, unknown>>(dbFile: string, sql: string): T[] {
return duckdbQueryOnFile<T>(dbFile, sql);
async function q<T = Record<string, unknown>>(dbFile: string, sql: string): Promise<T[]> {
return duckdbQueryOnFileAsync<T>(dbFile, sql);
}
/**
@ -141,14 +145,14 @@ function resolveDisplayField(
* Resolve relation field values to human-readable display labels.
* All queries target the same DB file where the object lives.
*/
function resolveRelationLabels(
async function resolveRelationLabels(
dbFile: string,
fields: FieldRow[],
entries: Record<string, unknown>[],
): {
): Promise<{
labels: Record<string, Record<string, string>>;
relatedObjectNames: Record<string, string>;
} {
}> {
const labels: Record<string, Record<string, string>> = {};
const relatedObjectNames: Record<string, string> = {};
@ -157,14 +161,14 @@ function resolveRelationLabels(
);
for (const rf of relationFields) {
const relatedObjs = q<ObjectRow>(dbFile,
const relatedObjs = await q<ObjectRow>(dbFile,
`SELECT * FROM objects WHERE id = '${sqlEscape(rf.related_object_id!)}' LIMIT 1`,
);
if (relatedObjs.length === 0) {continue;}
const relObj = relatedObjs[0];
relatedObjectNames[rf.name] = relObj.name;
const relFields = q<FieldRow>(dbFile,
const relFields = await q<FieldRow>(dbFile,
`SELECT * FROM fields WHERE object_id = '${sqlEscape(relObj.id)}' ORDER BY sort_order`,
);
const displayFieldName = resolveDisplayField(relObj, relFields);
@ -196,7 +200,7 @@ function resolveRelationLabels(
const idList = Array.from(entryIds)
.map((id) => `'${sqlEscape(id)}'`)
.join(",");
const displayRows = q<{ entry_id: string; value: string }>(dbFile,
const displayRows = await q<{ entry_id: string; value: string }>(dbFile,
`SELECT e.id as entry_id, ef.value
FROM entries e
JOIN entry_fields ef ON ef.entry_id = e.id
@ -232,12 +236,12 @@ type ReverseRelation = {
* Find reverse relations: other objects with relation fields pointing TO this object.
* Searches across ALL discovered databases to catch cross-DB relations.
*/
function findReverseRelations(objectId: string): ReverseRelation[] {
const dbPaths = discoverDuckDBPaths();
async function findReverseRelations(objectId: string): Promise<ReverseRelation[]> {
const dbPaths = await discoverDuckDBPathsAsync();
const result: ReverseRelation[] = [];
for (const db of dbPaths) {
const reverseFields = q<
const reverseFields = await q<
FieldRow & { source_object_id: string; source_object_name: string }
>(db,
`SELECT f.*, f.object_id as source_object_id, o.name as source_object_name
@ -248,17 +252,17 @@ function findReverseRelations(objectId: string): ReverseRelation[] {
);
for (const rrf of reverseFields) {
const sourceObjs = q<ObjectRow>(db,
const sourceObjs = await q<ObjectRow>(db,
`SELECT * FROM objects WHERE id = '${sqlEscape(rrf.source_object_id)}' LIMIT 1`,
);
if (sourceObjs.length === 0) {continue;}
const sourceFields = q<FieldRow>(db,
const sourceFields = await q<FieldRow>(db,
`SELECT * FROM fields WHERE object_id = '${sqlEscape(rrf.source_object_id)}' ORDER BY sort_order`,
);
const displayFieldName = resolveDisplayField(sourceObjs[0], sourceFields);
const refRows = q<{ source_entry_id: string; target_value: string }>(db,
const refRows = await q<{ source_entry_id: string; target_value: string }>(db,
`SELECT ef.entry_id as source_entry_id, ef.value as target_value
FROM entry_fields ef
WHERE ef.field_id = '${sqlEscape(rrf.id)}'
@ -270,7 +274,7 @@ function findReverseRelations(objectId: string): ReverseRelation[] {
const sourceEntryIds = [...new Set(refRows.map((r) => r.source_entry_id))];
const idList = sourceEntryIds.map((id) => `'${sqlEscape(id)}'`).join(",");
const displayRows = q<{ entry_id: string; value: string }>(db,
const displayRows = await q<{ entry_id: string; value: string }>(db,
`SELECT ef.entry_id, ef.value
FROM entry_fields ef
JOIN fields f ON f.id = ef.field_id
@ -333,10 +337,10 @@ export async function GET(
}
// Find which DuckDB file contains this object (searches all discovered DBs)
const dbFile = findDuckDBForObject(name);
const dbFile = await findDuckDBForObjectAsync(name);
if (!dbFile) {
// Fall back to primary DB check for a friendlier error message
if (!duckdbPath()) {
if (!await duckdbPathAsync()) {
return Response.json(
{ error: "DuckDB database not found" },
{ status: 404 },
@ -349,10 +353,10 @@ export async function GET(
}
// Ensure display_field column exists on this specific DB
ensureDisplayFieldColumn(dbFile);
await ensureDisplayFieldColumn(dbFile);
// All queries below target the specific DB that owns this object
const objects = q<ObjectRow>(dbFile,
const objects = await q<ObjectRow>(dbFile,
`SELECT * FROM objects WHERE name = '${name}' LIMIT 1`,
);
@ -365,11 +369,15 @@ export async function GET(
const obj = objects[0];
const fields = q<FieldRow>(dbFile,
// Keep same-DB schema reads sequential: parallel DuckDB CLI processes against
// one file can intermittently return empty results, which makes the object
// page oscillate between full and partial schemas during live refreshes.
const fields = await q<FieldRow>(
dbFile,
`SELECT * FROM fields WHERE object_id = '${obj.id}' ORDER BY sort_order`,
);
const statuses = q<StatusRow>(dbFile,
const statuses = await q<StatusRow>(
dbFile,
`SELECT * FROM statuses WHERE object_id = '${obj.id}' ORDER BY sort_order`,
);
@ -429,18 +437,18 @@ export async function GET(
try {
// Get total count with same WHERE clause but no LIMIT/OFFSET
const countResult = q<{ cnt: number }>(dbFile,
const countResult = await q<{ cnt: number }>(dbFile,
`SELECT COUNT(*) as cnt FROM v_${name}${whereClause}`,
);
totalCount = countResult[0]?.cnt ?? 0;
const pivotEntries = q(dbFile,
const pivotEntries = await q(dbFile,
`SELECT * FROM v_${name}${whereClause}${orderByClause}${limitClause}`,
);
entries = pivotEntries;
} catch {
// Pivot view might not exist or filter SQL may not apply; fall back
const rawRows = q<EavRow>(dbFile,
const rawRows = await q<EavRow>(dbFile,
`SELECT e.id as entry_id, e.created_at, e.updated_at,
f.name as field_name, ef.value
FROM entries e
@ -460,7 +468,7 @@ export async function GET(
}));
const { labels: relationLabels, relatedObjectNames } =
resolveRelationLabels(dbFile, fields, entries);
await resolveRelationLabels(dbFile, fields, entries);
const enrichedFields = parsedFields.map((f) => ({
...f,
@ -468,7 +476,7 @@ export async function GET(
f.type === "relation" ? relatedObjectNames[f.name] : undefined,
}));
const reverseRelations = findReverseRelations(obj.id);
const reverseRelations = await findReverseRelations(obj.id);
const effectiveDisplayField = resolveDisplayField(obj, fields);

View File

@ -1,4 +1,4 @@
import { duckdbQuery } from "@/lib/workspace";
import { duckdbQueryAsync } from "@/lib/workspace";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -38,6 +38,6 @@ export async function POST(req: Request) {
);
}
const rows = duckdbQuery(sql);
const rows = await duckdbQueryAsync(sql);
return Response.json({ rows });
}

View File

@ -1,4 +1,4 @@
import { duckdbQuery } from "@/lib/workspace";
import { duckdbQueryAsync } from "@/lib/workspace";
import { buildFilterClauses, injectFilters, checkSqlSafety } from "@/lib/report-filters";
import type { FilterEntry } from "@/lib/report-filters";
import { trackServer } from "@/lib/telemetry";
@ -44,7 +44,7 @@ export async function POST(req: Request) {
const finalSql = injectFilters(sql, filterClauses);
try {
const rows = duckdbQuery(finalSql);
const rows = await duckdbQueryAsync(finalSql);
trackServer("report_executed");
return Response.json({ rows, sql: finalSql });
} catch (err) {

View File

@ -9,6 +9,15 @@ vi.mock("node:fs", () => ({
statSync: vi.fn(() => ({ isDirectory: () => false, size: 100 })),
}));
vi.mock("node:fs/promises", () => ({
readdir: vi.fn(async () => []),
readFile: vi.fn(async () => ""),
access: vi.fn(async () => {
throw new Error("ENOENT");
}),
stat: vi.fn(async () => ({ isDirectory: () => false, isFile: () => false })),
}));
// Mock node:os
vi.mock("node:os", () => ({
homedir: vi.fn(() => "/home/testuser"),
@ -52,6 +61,14 @@ describe("Workspace Tree & Browse API", () => {
existsSync: vi.fn(() => false),
statSync: vi.fn(() => ({ isDirectory: () => false, size: 100 })),
}));
vi.mock("node:fs/promises", () => ({
readdir: vi.fn(async () => []),
readFile: vi.fn(async () => ""),
access: vi.fn(async () => {
throw new Error("ENOENT");
}),
stat: vi.fn(async () => ({ isDirectory: () => false, isFile: () => false })),
}));
vi.mock("node:os", () => ({
homedir: vi.fn(() => "/home/testuser"),
}));
@ -90,16 +107,15 @@ describe("Workspace Tree & Browse API", () => {
const { resolveWorkspaceRoot, getActiveWorkspaceName } = await import("@/lib/workspace");
vi.mocked(resolveWorkspaceRoot).mockReturnValue("/ws");
vi.mocked(getActiveWorkspaceName).mockReturnValue("default");
const { readdirSync: mockReaddir, existsSync: mockExists } = await import("node:fs");
vi.mocked(mockExists).mockReturnValue(true);
const { readdir: mockReaddir } = await import("node:fs/promises");
vi.mocked(mockReaddir).mockImplementation((dir) => {
if (String(dir) === "/ws") {
return [
return Promise.resolve([
makeDirent("knowledge", true),
makeDirent("readme.md", false),
] as unknown as Dirent[];
] as unknown as Dirent[]);
}
return [] as unknown as Dirent[];
return Promise.resolve([] as unknown as Dirent[]);
});
const { GET } = await import("./tree/route.js");
@ -114,8 +130,6 @@ describe("Workspace Tree & Browse API", () => {
it("includes workspaceRoot in response", async () => {
const { resolveWorkspaceRoot } = await import("@/lib/workspace");
vi.mocked(resolveWorkspaceRoot).mockReturnValue("/ws");
const { existsSync: mockExists } = await import("node:fs");
vi.mocked(mockExists).mockReturnValue(true);
const { GET } = await import("./tree/route.js");
const req = new Request("http://localhost/api/workspace/tree");
@ -127,16 +141,15 @@ describe("Workspace Tree & Browse API", () => {
it("includes root IDENTITY.md in the workspace tree", async () => {
const { resolveWorkspaceRoot } = await import("@/lib/workspace");
vi.mocked(resolveWorkspaceRoot).mockReturnValue("/ws");
const { readdirSync: mockReaddir, existsSync: mockExists } = await import("node:fs");
vi.mocked(mockExists).mockImplementation((p) => String(p) === "/ws");
const { readdir: mockReaddir } = await import("node:fs/promises");
vi.mocked(mockReaddir).mockImplementation((dir) => {
if (String(dir) === "/ws") {
return [
return Promise.resolve([
makeDirent("IDENTITY.md", false),
makeDirent("notes.md", false),
] as unknown as Dirent[];
] as unknown as Dirent[]);
}
return [] as unknown as Dirent[];
return Promise.resolve([] as unknown as Dirent[]);
});
const { GET } = await import("./tree/route.js");
@ -151,27 +164,30 @@ describe("Workspace Tree & Browse API", () => {
it("omits managed crm skill from the virtual skills folder", async () => {
const { resolveWorkspaceRoot } = await import("@/lib/workspace");
vi.mocked(resolveWorkspaceRoot).mockReturnValue("/ws");
const { readdirSync: mockReaddir, existsSync: mockExists } = await import("node:fs");
vi.mocked(mockExists).mockImplementation((p) => {
const { readdir: mockReaddir, access: mockAccess } = await import("node:fs/promises");
vi.mocked(mockAccess).mockImplementation(async (p) => {
const value = String(p);
return (
if (
value === "/ws" ||
value === "/ws/skills" ||
value === "/ws/skills/alpha/SKILL.md" ||
value === "/ws/skills/crm/SKILL.md"
);
) {
return;
}
throw new Error("ENOENT");
});
vi.mocked(mockReaddir).mockImplementation((dir) => {
if (String(dir) === "/ws") {
return [] as unknown as Dirent[];
return Promise.resolve([] as unknown as Dirent[]);
}
if (String(dir) === "/ws/skills") {
return [
return Promise.resolve([
makeDirent("alpha", true),
makeDirent("crm", true),
] as unknown as Dirent[];
] as unknown as Dirent[]);
}
return [] as unknown as Dirent[];
return Promise.resolve([] as unknown as Dirent[]);
});
const { GET } = await import("./tree/route.js");
@ -185,6 +201,41 @@ describe("Workspace Tree & Browse API", () => {
expect(skillPaths).toContain("~skills/alpha/SKILL.md");
expect(skillPaths).not.toContain("~skills/crm/SKILL.md");
});
it("yields before tree discovery completes (prevents UI freeze during active agent runs)", async () => {
const { resolveWorkspaceRoot, duckdbQueryAll, duckdbQueryAllAsync } = await import("@/lib/workspace");
vi.mocked(resolveWorkspaceRoot).mockReturnValue("/ws");
vi.mocked(duckdbQueryAll).mockImplementation(() => {
const start = Date.now();
while (Date.now() - start < 75) {
// busy wait: if the route ever regresses to the sync helper,
// this test should fail on the elapsed-time assertion below.
}
return [];
});
let releaseDuckdb: (rows: Array<{ name: string }>) => void;
const duckdbGate = new Promise<Array<{ name: string }>>((resolve) => {
releaseDuckdb = resolve;
});
vi.mocked(duckdbQueryAllAsync).mockReturnValue(duckdbGate);
const { readdir: mockReaddir } = await import("node:fs/promises");
vi.mocked(mockReaddir).mockResolvedValue([] as unknown as Dirent[]);
const { GET } = await import("./tree/route.js");
const req = new Request("http://localhost/api/workspace/tree");
const startedAt = Date.now();
const responsePromise = GET(req);
const elapsedMs = Date.now() - startedAt;
expect(elapsedMs).toBeLessThan(40);
releaseDuckdb!([]);
const res = await responsePromise;
expect(res.status).toBe(200);
});
});
// ─── GET /api/workspace/browse ──────────────────────────────────

View File

@ -1,6 +1,14 @@
import { readdirSync, readFileSync, existsSync, statSync, type Dirent } from "node:fs";
import type { Dirent } from "node:fs";
import { access, readdir, readFile, stat } from "node:fs/promises";
import { join } from "node:path";
import { resolveWorkspaceRoot, resolveOpenClawStateDir, getActiveWorkspaceName, parseSimpleYaml, duckdbQueryAll, isDatabaseFile } from "@/lib/workspace";
import {
resolveWorkspaceRoot,
resolveOpenClawStateDir,
getActiveWorkspaceName,
parseSimpleYaml,
duckdbQueryAllAsync,
isDatabaseFile,
} from "@/lib/workspace";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -25,14 +33,24 @@ type DbObject = {
};
/** Read .object.yaml metadata from a directory if it exists. */
function readObjectMeta(
async function pathExists(path: string): Promise<boolean> {
try {
await access(path);
return true;
} catch {
return false;
}
}
/** Read .object.yaml metadata from a directory if it exists. */
async function readObjectMeta(
dirPath: string,
): { icon?: string; defaultView?: string } | null {
): Promise<{ icon?: string; defaultView?: string } | null> {
const yamlPath = join(dirPath, ".object.yaml");
if (!existsSync(yamlPath)) {return null;}
if (!await pathExists(yamlPath)) {return null;}
try {
const content = readFileSync(yamlPath, "utf-8");
const content = await readFile(yamlPath, "utf-8");
const parsed = parseSimpleYaml(content);
return {
icon: parsed.icon as string | undefined,
@ -48,9 +66,9 @@ function readObjectMeta(
* directories even when .object.yaml files are missing.
* Shallower databases win on name conflicts (parent priority).
*/
function loadDbObjects(): Map<string, DbObject> {
async function loadDbObjects(): Promise<Map<string, DbObject>> {
const map = new Map<string, DbObject>();
const rows = duckdbQueryAll<DbObject & { name: string }>(
const rows = await duckdbQueryAllAsync<DbObject & { name: string }>(
"SELECT name, icon, default_view FROM objects",
"name",
);
@ -61,12 +79,15 @@ function loadDbObjects(): Map<string, DbObject> {
}
/** Resolve a dirent's effective type, following symlinks to their target. */
function resolveEntryType(entry: Dirent, absPath: string): "directory" | "file" | null {
async function resolveEntryType(
entry: Dirent,
absPath: string,
): Promise<"directory" | "file" | null> {
if (entry.isDirectory()) {return "directory";}
if (entry.isFile()) {return "file";}
if (entry.isSymbolicLink()) {
try {
const st = statSync(absPath);
const st = await stat(absPath);
if (st.isDirectory()) {return "directory";}
if (st.isFile()) {return "file";}
} catch {
@ -77,17 +98,17 @@ function resolveEntryType(entry: Dirent, absPath: string): "directory" | "file"
}
/** Recursively build a tree from a workspace directory. */
function buildTree(
async function buildTree(
absDir: string,
relativeBase: string,
dbObjects: Map<string, DbObject>,
showHidden = false,
): TreeNode[] {
): Promise<TreeNode[]> {
const nodes: TreeNode[] = [];
let entries: Dirent[];
try {
entries = readdirSync(absDir, { withFileTypes: true });
entries = await readdir(absDir, { withFileTypes: true });
} catch {
return nodes;
}
@ -100,33 +121,33 @@ function buildTree(
});
// Sort: directories first, then files, alphabetical within each group
const sorted = filtered.toSorted((a, b) => {
const absA = join(absDir, a.name);
const absB = join(absDir, b.name);
const typeA = resolveEntryType(a, absA);
const typeB = resolveEntryType(b, absB);
const dirA = typeA === "directory";
const dirB = typeB === "directory";
const typedEntries = await Promise.all(filtered.map(async (entry) => {
const absPath = join(absDir, entry.name);
const effectiveType = await resolveEntryType(entry, absPath);
return { entry, absPath, effectiveType };
}));
const sorted = typedEntries.toSorted((a, b) => {
const dirA = a.effectiveType === "directory";
const dirB = b.effectiveType === "directory";
if (dirA && !dirB) {return -1;}
if (!dirA && dirB) {return 1;}
return a.name.localeCompare(b.name);
return a.entry.name.localeCompare(b.entry.name);
});
for (const entry of sorted) {
for (const { entry, absPath, effectiveType } of sorted) {
// .object.yaml is consumed for metadata; only show it as a visible node when revealing hidden files
if (entry.name === ".object.yaml" && !showHidden) {continue;}
const absPath = join(absDir, entry.name);
const relPath = relativeBase
? `${relativeBase}/${entry.name}`
: entry.name;
const isSymlink = entry.isSymbolicLink();
const effectiveType = resolveEntryType(entry, absPath);
if (effectiveType === "directory") {
const objectMeta = readObjectMeta(absPath);
const objectMeta = await readObjectMeta(absPath);
const dbObject = dbObjects.get(entry.name);
const children = buildTree(absPath, relPath, dbObjects, showHidden);
const children = await buildTree(absPath, relPath, dbObjects, showHidden);
if (objectMeta || dbObject) {
nodes.push({
@ -184,7 +205,7 @@ function parseSkillFrontmatter(content: string): { name?: string; emoji?: string
}
/** Build a virtual "Skills" folder from <workspace>/skills/. */
function buildSkillsVirtualFolder(): TreeNode | null {
async function buildSkillsVirtualFolder(): Promise<TreeNode | null> {
const workspaceRoot = resolveWorkspaceRoot();
if (!workspaceRoot) {
return null;
@ -195,19 +216,19 @@ function buildSkillsVirtualFolder(): TreeNode | null {
const seen = new Set<string>();
for (const dir of dirs) {
if (!existsSync(dir)) {continue;}
if (!await pathExists(dir)) {continue;}
try {
const entries = readdirSync(dir, { withFileTypes: true });
const entries = await readdir(dir, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isDirectory() || seen.has(entry.name)) {continue;}
if (entry.name === "crm" || entry.name === "browser") {continue;}
const skillMdPath = join(dir, entry.name, "SKILL.md");
if (!existsSync(skillMdPath)) {continue;}
if (!await pathExists(skillMdPath)) {continue;}
seen.add(entry.name);
let displayName = entry.name;
try {
const content = readFileSync(skillMdPath, "utf-8");
const content = await readFile(skillMdPath, "utf-8");
const meta = parseSkillFrontmatter(content);
if (meta.name) {displayName = meta.name;}
if (meta.emoji) {displayName = `${meta.emoji} ${displayName}`;}
@ -249,16 +270,16 @@ export async function GET(req: Request) {
const root = resolveWorkspaceRoot();
if (!root) {
const tree: TreeNode[] = [];
const skillsFolder = buildSkillsVirtualFolder();
const skillsFolder = await buildSkillsVirtualFolder();
if (skillsFolder) {tree.push(skillsFolder);}
return Response.json({ tree, exists: false, workspaceRoot: null, openclawDir, workspace });
}
const dbObjects = loadDbObjects();
const dbObjects = await loadDbObjects();
const tree = buildTree(root, "", dbObjects, showHidden);
const tree = await buildTree(root, "", dbObjects, showHidden);
const skillsFolder = buildSkillsVirtualFolder();
const skillsFolder = await buildSkillsVirtualFolder();
if (skillsFolder) {tree.push(skillsFolder);}
return Response.json({ tree, exists: true, workspaceRoot: root, openclawDir, workspace });

View File

@ -757,7 +757,7 @@ function FeedbackButtons({ messageId, sessionId }: { messageId: string; sessionI
/* ─── Chat message ─── */
export const ChatMessage = memo(function ChatMessage({ message, isStreaming, onSubagentClick, onFilePathClick, sessionId }: { message: UIMessage; isStreaming?: boolean; onSubagentClick?: (task: string) => void; onFilePathClick?: FilePathClickHandler; sessionId?: string | null }) {
export const ChatMessage = memo(function ChatMessage({ message, isStreaming, onSubagentClick, onFilePathClick, sessionId, userHtmlMap }: { message: UIMessage; isStreaming?: boolean; onSubagentClick?: (task: string) => void; onFilePathClick?: FilePathClickHandler; sessionId?: string | null; userHtmlMap?: Map<string, string> }) {
const isUser = message.role === "user";
const segments = groupParts(message.parts);
const markdownComponents = useMemo(
@ -766,7 +766,6 @@ export const ChatMessage = memo(function ChatMessage({ message, isStreaming, onS
);
if (isUser) {
// User: right-aligned subtle pill
const textContent = segments
.filter(
(s): s is { type: "text"; text: string } =>
@ -775,16 +774,18 @@ export const ChatMessage = memo(function ChatMessage({ message, isStreaming, onS
.map((s) => s.text)
.join("\n");
// Parse attachment prefix from sent messages
const attachmentInfo = parseAttachments(textContent);
const richHtml = userHtmlMap?.get(message.id) ?? userHtmlMap?.get(textContent) ?? userHtmlMap?.get(attachmentInfo?.message ?? "");
const bubbleContent = richHtml
? <div className="chat-user-html-content" dangerouslySetInnerHTML={{ __html: richHtml }} />
: <p className="whitespace-pre-wrap break-words">{attachmentInfo?.message ?? textContent}</p>;
if (attachmentInfo) {
return (
<div className="flex flex-col items-end gap-1.5 py-2">
{/* Attachment previews — standalone above the text bubble */}
<AttachedFilesCard paths={attachmentInfo.paths} />
{/* Text bubble */}
{attachmentInfo.message && (
{(attachmentInfo.message || richHtml) && (
<div
className="max-w-[80%] w-fit rounded-2xl rounded-br-sm px-3 py-2 text-sm leading-6 break-words chat-message-font"
style={{
@ -792,9 +793,7 @@ export const ChatMessage = memo(function ChatMessage({ message, isStreaming, onS
color: "var(--color-user-bubble-text)",
}}
>
<p className="whitespace-pre-wrap break-words">
{attachmentInfo.message}
</p>
{bubbleContent}
</div>
)}
</div>
@ -810,9 +809,7 @@ export const ChatMessage = memo(function ChatMessage({ message, isStreaming, onS
color: "var(--color-user-bubble-text)",
}}
>
<p className="whitespace-pre-wrap break-words text-right">
{textContent}
</p>
{bubbleContent}
</div>
</div>
);

View File

@ -765,6 +765,7 @@ type FileScopedSession = {
type QueuedMessage = {
id: string;
text: string;
html: string;
mentionedFiles: Array<{ name: string; path: string }>;
attachedFiles: AttachedFile[];
createdAt: number;
@ -856,6 +857,9 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
const [isReconnecting, setIsReconnecting] = useState(false);
const reconnectAbortRef = useRef<AbortController | null>(null);
// ── Stream-level error (empty response detection) ──
const [streamError, setStreamError] = useState<string | null>(null);
// Track persisted messages to avoid double-saves
const savedMessageIdsRef = useRef<Set<string>>(new Set());
// Set when /new or + triggers a new session
@ -868,6 +872,10 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
FileScopedSession[]
>([]);
// ── Rich HTML for user messages (keyed by message ID or text fallback) ──
const userHtmlMapRef = useRef(new Map<string, string>());
const pendingHtmlRef = useRef<string | null>(null);
// ── Message queue (messages to send after current run completes) ──
const [queuedMessages, setQueuedMessages] = useState<QueuedMessage[]>([]);
const [rawView, _setRawView] = useState(false);
@ -926,10 +934,16 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
new DefaultChatTransport({
api: "/api/chat",
body: () => {
const extra: Record<string, unknown> = {};
const sk = subagentSessionKeyRef.current;
if (sk) {return { sessionKey: sk };}
if (sk) {extra.sessionKey = sk;}
const sid = sessionIdRef.current;
return sid ? { sessionId: sid } : {};
if (sid) {extra.sessionId = sid;}
if (pendingHtmlRef.current) {
extra.userHtml = pendingHtmlRef.current;
pendingHtmlRef.current = null;
}
return extra;
},
}),
[],
@ -1195,11 +1209,10 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
role: "user" | "assistant";
content: string;
parts?: Array<Record<string, unknown>>;
html?: string;
_streaming?: boolean;
}> = msgData.messages || [];
// Filter out in-progress streaming messages
// (will be rebuilt from the live SSE stream)
const hasStreaming = sessionMessages.some(
(m) => m._streaming,
);
@ -1209,6 +1222,12 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
)
: sessionMessages;
for (const msg of completedMessages) {
if (msg.role === "user" && msg.html) {
userHtmlMapRef.current.set(msg.id, msg.html);
}
}
const uiMessages = completedMessages.map(
(msg) => {
savedMessageIdsRef.current.add(msg.id);
@ -1224,9 +1243,9 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
};
},
);
if (!cancelled) {
setMessages(uiMessages);
}
if (!cancelled) {
setMessages(uiMessages);
}
if (!cancelled) {
await attemptReconnect(
@ -1292,6 +1311,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
role: "user" | "assistant";
content: string;
parts?: Array<Record<string, unknown>>;
html?: string;
_streaming?: boolean;
}> = msgData.messages || [];
@ -1300,6 +1320,11 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
: sessionMessages;
if (completedMessages.length > 0) {
for (const msg of completedMessages) {
if (msg.role === "user" && msg.html) {
userHtmlMapRef.current.set(msg.id, msg.html);
}
}
const uiMessages = completedMessages.map((msg) => {
savedMessageIdsRef.current.add(msg.id);
return {
@ -1313,7 +1338,6 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
setMessages(baseMessages);
}
}
} else {
// No persisted session file — use task message only
}
@ -1427,6 +1451,36 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
onSessionsChange,
]);
// ── Empty-stream error detection ──
// When the stream completes (submitted/streaming → ready) but no
// assistant message was produced, surface an error so the user knows
// the request was lost.
useEffect(() => {
const wasActive =
prevStatusRef.current === "streaming" ||
prevStatusRef.current === "submitted";
const isNowReady = status === "ready";
if (wasActive && isNowReady) {
const lastMsg = messages[messages.length - 1];
const hasAssistantContent =
lastMsg?.role === "assistant" &&
lastMsg.parts.some(
(p) =>
(p.type === "text" && (p as { text: string }).text.trim().length > 0) ||
p.type === "tool-invocation",
);
if (!hasAssistantContent && !error) {
setStreamError("No response received from agent.");
} else {
setStreamError(null);
}
}
if (status === "submitted") {
setStreamError(null);
}
}, [status, messages, error]);
// ── Actions ──
// Ref for handleNewSession so handleEditorSubmit doesn't depend on the hook order
@ -1438,6 +1492,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
async (
text: string,
mentionedFiles: Array<{ name: string; path: string }>,
html: string,
overrideAttachments?: AttachedFile[],
) => {
const hasText = text.trim().length > 0;
@ -1466,7 +1521,6 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
// Queue the message if the agent is still running.
if (isStreaming) {
// Clear attachment strip but keep blob URLs alive for queue thumbnails
if (!overrideAttachments) {
setAttachedFiles([]);
}
@ -1475,6 +1529,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
{
id: crypto.randomUUID(),
text: userText,
html,
mentionedFiles,
attachedFiles: currentAttachments,
createdAt: Date.now(),
@ -1505,13 +1560,13 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
onActiveSessionChange?.(sessionId);
onSessionsChange?.();
if (filePath) {
void fetchFileSessionsRef.current?.().then(
(sessions) => {
setFileSessions(sessions);
},
);
}
if (filePath) {
void fetchFileSessionsRef.current?.().then(
(sessions) => {
setFileSessions(sessions);
},
);
}
}
// Build message with optional attachment prefix
@ -1535,10 +1590,13 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
isFirstFileMessageRef.current = false;
}
// Reset scroll lock so we auto-scroll to the new user message
userScrolledAwayRef.current = false;
void sendMessage({ text: messageText });
},
// Store HTML for display and pipe to server via transport
userHtmlMapRef.current.set(messageText, html);
pendingHtmlRef.current = html;
userScrolledAwayRef.current = false;
void sendMessage({ text: messageText });
},
[
attachedFiles,
isStreaming,
@ -1570,7 +1628,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
}
// Use a microtask so React can settle the status update first.
queueMicrotask(() => {
void handleEditorSubmit(next.text, next.mentionedFiles, next.attachedFiles);
void handleEditorSubmit(next.text, next.mentionedFiles, next.html, next.attachedFiles);
});
}
}, [status, queuedMessages, handleEditorSubmit]);
@ -1607,6 +1665,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
role: "user" | "assistant";
content: string;
parts?: Array<Record<string, unknown>>;
html?: string;
_streaming?: boolean;
}> = data.messages || [];
@ -1619,39 +1678,46 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
)
: sessionMessages;
const uiMessages = completedMessages.map(
(msg) => {
savedMessageIdsRef.current.add(msg.id);
return {
id: msg.id,
role: msg.role,
parts: (msg.parts ?? [
{
type: "text" as const,
text: msg.content,
},
]) as UIMessage["parts"],
};
},
);
userHtmlMapRef.current.clear();
for (const msg of completedMessages) {
if (msg.role === "user" && msg.html) {
userHtmlMapRef.current.set(msg.id, msg.html);
}
}
setMessages(uiMessages);
const uiMessages = completedMessages.map(
(msg) => {
savedMessageIdsRef.current.add(msg.id);
return {
id: msg.id,
role: msg.role,
parts: (msg.parts ?? [
{
type: "text" as const,
text: msg.content,
},
]) as UIMessage["parts"],
};
},
);
// Clear loading state *before* reconnecting — the
// persisted messages are now visible. attemptReconnect
// manages its own `isReconnecting` state which shows
// "Resuming stream..." instead of "Loading session...".
setLoadingSession(false);
setMessages(uiMessages);
// Always try to reconnect -- the stream endpoint
// returns 404 gracefully if no active run exists,
// and this avoids missing runs whose _streaming
// flag hasn't been persisted yet.
await attemptReconnect(sessionId, uiMessages);
} catch (err) {
console.error("Error loading session:", err);
setLoadingSession(false);
}
// Clear loading state *before* reconnecting — the
// persisted messages are now visible. attemptReconnect
// manages its own `isReconnecting` state which shows
// "Resuming stream..." instead of "Loading session...".
setLoadingSession(false);
// Always try to reconnect -- the stream endpoint
// returns 404 gracefully if no active run exists,
// and this avoids missing runs whose _streaming
// flag hasn't been persisted yet.
await attemptReconnect(sessionId, uiMessages);
} catch (err) {
console.error("Error loading session:", err);
setLoadingSession(false);
}
},
[
currentSessionId,
@ -1662,19 +1728,19 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
],
);
const handleNewSession = useCallback(() => {
reconnectAbortRef.current?.abort();
void stop();
const handleNewSession = useCallback(() => {
reconnectAbortRef.current?.abort();
void stop();
setIsReconnecting(false);
setCurrentSessionId(null);
sessionIdRef.current = null;
onActiveSessionChange?.(null);
setMessages([]);
savedMessageIdsRef.current.clear();
userHtmlMapRef.current.clear();
isFirstFileMessageRef.current = true;
newSessionPendingRef.current = false;
setQueuedMessages([]);
// Focus the chat input after state updates so "New Chat" is ready to type.
requestAnimationFrame(() => {
editorRef.current?.focus();
});
@ -1758,7 +1824,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
await handleStop();
// Submit the message after a short delay to let status settle.
setTimeout(() => {
void handleEditorSubmit(msg.text, msg.mentionedFiles, msg.attachedFiles);
void handleEditorSubmit(msg.text, msg.mentionedFiles, msg.html, msg.attachedFiles);
}, 100);
},
[queuedMessages, handleStop, handleEditorSubmit],
@ -2403,6 +2469,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
onSubagentClick={onSubagentClick}
onFilePathClick={onFilePathClick}
sessionId={currentSessionId}
userHtmlMap={userHtmlMapRef.current}
/>
))}
{showInlineSpinner && (
@ -2419,8 +2486,8 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
)}
</div>
{/* Transport-level error display */}
{error && (
{/* Transport / stream-level error display */}
{(error || streamError) && (
<div
className="px-3 py-2 flex items-center gap-2 sticky bottom-[72px] z-10"
style={{
@ -2454,7 +2521,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
y2="16"
/>
</svg>
<p className="text-xs">{error.message}</p>
<p className="text-xs">{error?.message ?? streamError}</p>
</div>
)}
</div>

View File

@ -37,7 +37,7 @@ export type ChatEditorHandle = {
type ChatEditorProps = {
/** Called when user presses Enter (without Shift). */
onSubmit: (text: string, mentionedFiles: Array<{ name: string; path: string }>) => void;
onSubmit: (text: string, mentionedFiles: Array<{ name: string; path: string }>, html: string) => void;
/** Called on every content change. */
onChange?: (isEmpty: boolean) => void;
/** Called when native files (e.g. from Finder/Desktop) are dropped onto the editor. */
@ -102,39 +102,40 @@ function serializeContent(editor: ReturnType<typeof useEditor>): {
if (!editor) {return { text: "", mentionedFiles: [] };}
const mentionedFiles: Array<{ name: string; path: string }> = [];
const parts: string[] = [];
const lines: string[] = [];
editor.state.doc.descendants((node) => {
if (node.type.name === "chatFileMention") {
const label = node.attrs.label as string;
const path = node.attrs.path as string;
const mType = node.attrs.mentionType as string;
const objectName = node.attrs.objectName as string;
mentionedFiles.push({ name: label, path });
if (mType === "object") {
parts.push(`[object: ${label}]`);
} else if (mType === "entry") {
parts.push(`[entry: ${objectName ? `${objectName}/` : ""}${label}]`);
} else {
parts.push(`[file: ${path}]`);
}
return false;
editor.state.doc.forEach((node) => {
if (node.type.name === "paragraph" || node.type.name === "hardBreak") {
let lineText = "";
node.descendants((child) => {
if (child.type.name === "chatFileMention") {
const label = child.attrs.label as string;
const path = child.attrs.path as string;
const mType = child.attrs.mentionType as string;
const objectName = child.attrs.objectName as string;
mentionedFiles.push({ name: label, path });
if (mType === "object") {
lineText += `[object: ${label}]`;
} else if (mType === "entry") {
lineText += `[entry: ${objectName ? `${objectName}/` : ""}${label}]`;
} else {
lineText += `[file: ${path}]`;
}
return false;
}
if (child.isText && child.text) {
lineText += child.text;
}
if (child.type.name === "hardBreak") {
lineText += "\n";
}
return true;
});
lines.push(lineText);
}
if (node.isText && node.text) {
parts.push(node.text);
}
if (node.type.name === "paragraph" && parts.length > 0) {
const lastPart = parts[parts.length - 1];
if (lastPart !== undefined && lastPart !== "\n") {
parts.push("\n");
}
}
return true;
});
return { text: parts.join("").trim(), mentionedFiles };
return { text: lines.join("\n").trim(), mentionedFiles };
}
// ── File mention suggestion extension (wired to the async popup) ──
@ -359,14 +360,14 @@ export const ChatEditor = forwardRef<ChatEditorHandle, ChatEditorProps>(
const handleKeyDown = (event: KeyboardEvent) => {
if (event.key === "Enter" && !event.shiftKey && !event.isComposing) {
// Check if suggestion popup is active by checking if the plugin has active state
const suggestState = chatFileMentionPluginKey.getState(editor.state);
if (suggestState?.active) {return;} // Let suggestion handle it
if (suggestState?.active) {return;}
event.preventDefault();
const { text, mentionedFiles } = serializeContent(editor);
if (text.trim() || mentionedFiles.length > 0) {
submitRef.current(text, mentionedFiles);
const html = editor.getHTML();
submitRef.current(text, mentionedFiles, html);
editor.commands.clearContent(true);
}
}
@ -411,7 +412,8 @@ export const ChatEditor = forwardRef<ChatEditorHandle, ChatEditorProps>(
if (!editor) {return;}
const { text, mentionedFiles } = serializeContent(editor);
if (text.trim() || mentionedFiles.length > 0) {
submitRef.current(text, mentionedFiles);
const html = editor.getHTML();
submitRef.current(text, mentionedFiles, html);
editor.commands.clearContent(true);
}
},

View File

@ -3,6 +3,8 @@
import { useEffect, useState, useCallback, useRef } from "react";
import { RelationSelect } from "./relation-select";
import { FormattedFieldValue } from "./formatted-field-value";
import { formatWorkspaceFieldValue } from "@/lib/workspace-cell-format";
import { parseTagsValue } from "@/lib/parse-tags";
function safeString(val: unknown): string {
@ -211,6 +213,116 @@ function RelationChips({
);
}
function TagsBadges({ value }: { value: unknown }) {
const tags = parseTagsValue(value);
if (tags.length === 0) {return <EmptyValue />;}
const chipStyle = { background: "rgba(148, 163, 184, 0.12)", border: "1px solid var(--color-border)" };
return (
<span className="flex items-center gap-1.5 flex-wrap">
{tags.map((tag) => {
const formatted = formatWorkspaceFieldValue(tag);
const isLink = formatted.kind === "link" && formatted.href;
if (isLink) {
return (
<a
key={tag}
href={formatted.href!}
target={formatted.linkType === "url" || formatted.linkType === "file" ? "_blank" : undefined}
rel={formatted.linkType === "url" || formatted.linkType === "file" ? "noopener noreferrer" : undefined}
onClick={(e) => e.stopPropagation()}
className="inline-flex items-center px-2.5 py-1 rounded-full text-xs font-medium underline-offset-2 hover:underline"
style={{ ...chipStyle, color: "var(--color-accent)" }}
>
{formatted.text}
</a>
);
}
return (
<span
key={tag}
className="inline-flex items-center px-2.5 py-1 rounded-full text-xs font-medium"
style={{ ...chipStyle, color: "var(--color-text-muted)" }}
>
{tag}
</span>
);
})}
</span>
);
}
function TagsEditInput({
value,
onChange,
autoFocus,
}: {
value: string;
onChange: (val: string) => void;
autoFocus?: boolean;
}) {
const tags = parseTagsValue(value);
const [inputVal, setInputVal] = useState("");
const inputRef = useRef<HTMLInputElement>(null);
useEffect(() => {
if (autoFocus && inputRef.current) {inputRef.current.focus();}
}, [autoFocus]);
const addTag = (tag: string) => {
const t = tag.trim();
if (!t || tags.includes(t)) {return;}
const next = [...tags, t];
onChange(JSON.stringify(next));
setInputVal("");
};
const removeTag = (tag: string) => {
const next = tags.filter((t) => t !== tag);
onChange(next.length > 0 ? JSON.stringify(next) : "");
};
return (
<div className="flex items-center gap-1.5 flex-wrap min-h-[1.75rem]">
{tags.map((tag) => (
<span
key={tag}
className="inline-flex items-center gap-0.5 px-2.5 py-1 rounded-full text-xs font-medium"
style={{ background: "rgba(148, 163, 184, 0.12)", color: "var(--color-text-muted)", border: "1px solid var(--color-border)" }}
>
{tag}
<button
type="button"
onClick={() => removeTag(tag)}
className="ml-0.5 hover:opacity-70"
style={{ color: "var(--color-text-muted)" }}
>
<svg width="10" height="10" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2.5"><path d="M18 6 6 18" /><path d="m6 6 12 12" /></svg>
</button>
</span>
))}
<input
ref={inputRef}
type="text"
value={inputVal}
onChange={(e) => setInputVal(e.target.value)}
onKeyDown={(e) => {
if ((e.key === "Enter" || e.key === ",") && inputVal.trim()) {
e.preventDefault();
addTag(inputVal);
}
if (e.key === "Backspace" && !inputVal && tags.length > 0) {
removeTag(tags[tags.length - 1]);
}
}}
onBlur={() => { if (inputVal.trim()) {addTag(inputVal);} }}
placeholder={tags.length === 0 ? "Type and press Enter..." : ""}
className="flex-1 min-w-[80px] text-sm outline-none bg-transparent"
style={{ color: "var(--color-text)" }}
/>
</div>
);
}
function EmptyValue() {
return (
<span style={{ color: "var(--color-text-muted)", opacity: 0.5 }}>--</span>
@ -311,6 +423,8 @@ function FieldValue({
onNavigateEntry={onNavigateEntry}
/>
);
case "tags":
return <TagsBadges value={value} />;
case "email":
case "number":
case "date":
@ -319,7 +433,7 @@ function FieldValue({
case "file":
return <FormattedFieldValue value={value} fieldType={field.type} mode="detail" />;
case "richtext":
return <span className="whitespace-pre-wrap">{safeString(value)}</span>;
return <FormattedFieldValue value={value} fieldType={field.type} mode="detail" />;
default:
return <FormattedFieldValue value={value} fieldType={field.type} mode="detail" />;
}
@ -556,7 +670,20 @@ export function EntryDetailModal({
style={{ color: "var(--color-text)" }}
>
{editingField === field.name ? (
field.type === "relation" && field.related_object_name ? (
field.type === "tags" ? (
<div className="flex items-center gap-2 w-full">
<div className="flex-1 px-2 py-1 rounded-lg" style={{ background: "var(--color-surface-hover)", border: "2px solid var(--color-accent)" }}>
<TagsEditInput
value={safeString(value)}
onChange={(v) => { void handleSaveField(field.name, v); }}
autoFocus
/>
</div>
<button type="button" onClick={() => setEditingField(null)} className="px-2 py-1 text-xs rounded-lg flex-shrink-0" style={{ color: "var(--color-text-muted)", border: "1px solid var(--color-border)" }}>
Done
</button>
</div>
) : field.type === "relation" && field.related_object_name ? (
<div className="flex items-center gap-2 w-full">
<div className="flex-1">
<RelationSelect

View File

@ -789,6 +789,8 @@ export function FileManagerTree({ tree, activePath, onSelect, onRefresh, compact
const [newItemPrompt, setNewItemPrompt] = useState<{ kind: "file" | "folder"; parentPath: string } | null>(null);
const containerRef = useRef<HTMLDivElement>(null);
const dragExpandTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const currentDragOverRef = useRef<string | null>(null);
// Persist expanded paths to localStorage whenever they change
useEffect(() => {
@ -820,22 +822,32 @@ export function FileManagerTree({ tree, activePath, onSelect, onRefresh, compact
const handleDragOver = useCallback((event: DragOverEvent) => {
const overData = event.over?.data.current as { node?: TreeNode; rootDrop?: boolean } | undefined;
if (overData?.rootDrop) {
if (currentDragOverRef.current !== "__root__") {
if (dragExpandTimerRef.current) clearTimeout(dragExpandTimerRef.current);
currentDragOverRef.current = "__root__";
}
setDragOverPath("__root__");
} else if (overData?.node) {
setDragOverPath(overData.node.path);
// Auto-expand folders on drag hover (300ms delay)
const path = overData.node.path;
if (overData.node.type === "folder" || overData.node.type === "object") {
setTimeout(() => {
setExpandedPaths((prev) => {
if (prev.has(path)) {return prev;}
const next = new Set(prev);
next.add(path);
return next;
});
}, 300);
setDragOverPath(path);
if (currentDragOverRef.current !== path) {
if (dragExpandTimerRef.current) clearTimeout(dragExpandTimerRef.current);
currentDragOverRef.current = path;
if (overData.node.type === "folder" || overData.node.type === "object") {
dragExpandTimerRef.current = setTimeout(() => {
if (currentDragOverRef.current !== path) return;
setExpandedPaths((prev) => {
if (prev.has(path)) return prev;
const next = new Set(prev);
next.add(path);
return next;
});
}, 300);
}
}
} else {
if (dragExpandTimerRef.current) clearTimeout(dragExpandTimerRef.current);
currentDragOverRef.current = null;
setDragOverPath(null);
}
}, []);
@ -844,6 +856,8 @@ export function FileManagerTree({ tree, activePath, onSelect, onRefresh, compact
async (event: DragEndEvent) => {
setActiveNode(null);
setDragOverPath(null);
if (dragExpandTimerRef.current) clearTimeout(dragExpandTimerRef.current);
currentDragOverRef.current = null;
removePointerTracker();
const activeData = event.active.data.current as { node: TreeNode } | undefined;
@ -901,6 +915,8 @@ export function FileManagerTree({ tree, activePath, onSelect, onRefresh, compact
const handleDragCancel = useCallback(() => {
setActiveNode(null);
setDragOverPath(null);
if (dragExpandTimerRef.current) clearTimeout(dragExpandTimerRef.current);
currentDragOverRef.current = null;
removePointerTracker();
}, [removePointerTracker]);

View File

@ -1,5 +1,6 @@
"use client";
import { Fragment } from "react";
import { formatWorkspaceFieldValue } from "@/lib/workspace-cell-format";
type FormattedFieldValueProps = {
@ -53,6 +54,50 @@ function FileEmbed({
);
}
function normalizeNewlines(text: string): string {
return text.replace(/\\r\\n/g, "\n").replace(/\\n/g, "\n").replace(/\\r/g, "\n");
}
/**
* Render a single line/segment with auto-detected formatting.
* For text/richtext fields, uses heuristic detection so URLs, emails,
* phone numbers are rendered as clickable links.
*/
function FormattedSegment({ text, fieldType }: { text: string; fieldType?: string }) {
const trimmed = text.trim();
if (!trimmed) {return <>{text}</>;}
const detectType = !fieldType || fieldType === "text" || fieldType === "richtext" ? undefined : fieldType;
const fmt = formatWorkspaceFieldValue(trimmed, detectType);
if (fmt.kind === "link" && fmt.href) {
// In heuristic mode (text/richtext), file-path detection is prone to false positives
// on prose that happens to contain slashes and dotted words
// (e.g. "Alternate/legacy domains: getgc.ai, gc.ai").
// Only trust file links when the line has no spaces (i.e. a standalone path).
if (!detectType && fmt.linkType === "file" && trimmed.includes(" ")) {
return <>{fmt.text}</>;
}
const openInNewTab = fmt.linkType === "url" || fmt.linkType === "file";
return (
<a
href={fmt.href}
{...(openInNewTab ? { target: "_blank", rel: "noopener noreferrer" } : {})}
className="underline underline-offset-2"
style={{ color: "var(--color-accent)" }}
onClick={(e) => e.stopPropagation()}
>
{fmt.text}
</a>
);
}
if (fmt.kind === "number" || fmt.kind === "currency") {
return <span className="tabular-nums">{fmt.text}</span>;
}
return <>{fmt.text}</>;
}
export function FormattedFieldValue({
value,
fieldType,
@ -61,12 +106,32 @@ export function FormattedFieldValue({
}: FormattedFieldValueProps) {
const formatted = formatWorkspaceFieldValue(value, fieldType);
const isTableMode = mode === "table";
const textClassName = className ?? (isTableMode ? "truncate block max-w-[300px]" : "break-words");
if (formatted.kind === "empty") {
return <EmptyValue />;
}
const displayText = normalizeNewlines(formatted.text);
const hasNewlines = displayText.includes("\n");
if (hasNewlines) {
const lines = displayText.split("\n");
const containerClass = className ?? (isTableMode ? "block max-w-[300px] line-clamp-3" : "break-words");
return (
<span className={containerClass}>
{lines.map((line, i) => (
<Fragment key={i}>
{i > 0 && <br />}
<FormattedSegment text={line} fieldType={fieldType} />
</Fragment>
))}
</span>
);
}
// Single-line: full formatting with embeds
const textClassName = className ?? (isTableMode ? "truncate block max-w-[300px]" : "break-words");
if (formatted.kind === "link" && formatted.href) {
const openInNewTab = formatted.linkType === "url" || formatted.linkType === "file";
const canEmbedInModal = !isTableMode && !!formatted.embedUrl && !!formatted.mediaType;
@ -96,5 +161,5 @@ export function FormattedFieldValue({
return <span className={`tabular-nums ${textClassName}`}>{formatted.text}</span>;
}
return <span className={textClassName}>{formatted.text}</span>;
return <span className={textClassName}>{displayText}</span>;
}

View File

@ -569,7 +569,7 @@ function FilterRuleRow({
{/* Value editor */}
{!noValueNeeded && (
<>
{(fieldType === "text" || fieldType === "richtext" || fieldType === "email") && (
{(fieldType === "text" || fieldType === "richtext" || fieldType === "email" || fieldType === "tags") && (
<TextValueEditor
value={String(rule.value ?? "")}
onChange={(v) => onUpdate({ value: v })}

View File

@ -2,6 +2,8 @@
import { useMemo } from "react";
import { FormattedFieldValue } from "./formatted-field-value";
import { formatWorkspaceFieldValue } from "@/lib/workspace-cell-format";
import { parseTagsValue } from "@/lib/parse-tags";
// ---------------------------------------------------------------------------
// Types
@ -125,6 +127,7 @@ function GalleryCard({
{displayFields.map((field) => {
const val = entry[field.name];
if (val == null || safeString(val) === "") {return null;}
const tags = field.type === "tags" ? parseTagsValue(val) : [];
return (
<div key={field.id} className="flex items-baseline gap-2">
<span
@ -133,8 +136,23 @@ function GalleryCard({
>
{field.name}
</span>
<div className="text-[12px] truncate" style={{ color: "var(--color-text)" }}>
<FormattedFieldValue value={val} fieldType={field.type} mode="table" />
<div className="text-[12px] whitespace-pre-line line-clamp-2" style={{ color: "var(--color-text)" }}>
{field.type === "tags" ? (
<span className="flex items-center gap-0.5 flex-wrap">
{tags.slice(0, 3).map((tag) => {
const fmt = formatWorkspaceFieldValue(tag);
const isLink = fmt.kind === "link" && fmt.href;
return isLink ? (
<a key={tag} href={fmt.href!} target={fmt.linkType === "url" || fmt.linkType === "file" ? "_blank" : undefined} rel={fmt.linkType === "url" || fmt.linkType === "file" ? "noopener noreferrer" : undefined} onClick={(e) => e.stopPropagation()} className="inline-flex items-center px-1.5 py-0 rounded text-[11px] font-medium hover:underline underline-offset-2" style={{ background: "rgba(148, 163, 184, 0.12)", color: "var(--color-accent)" }}>{fmt.text}</a>
) : (
<span key={tag} className="inline-flex items-center px-1.5 py-0 rounded text-[11px] font-medium" style={{ background: "rgba(148, 163, 184, 0.12)", color: "var(--color-text-muted)" }}>{tag}</span>
);
})}
{tags.length > 3 && <span className="text-[10px]" style={{ color: "var(--color-text-muted)" }}>+{tags.length - 3}</span>}
</span>
) : (
<FormattedFieldValue value={val} fieldType={field.type} mode="table" />
)}
</div>
</div>
);

View File

@ -13,6 +13,8 @@ import {
type DragStartEvent,
type DragEndEvent,
} from "@dnd-kit/core";
import { formatWorkspaceFieldValue } from "@/lib/workspace-cell-format";
import { parseTagsValue } from "@/lib/parse-tags";
type Field = {
id: string;
@ -187,6 +189,8 @@ function CardContent({
displayVal = labels.join(", ");
}
const tags = field.type === "tags" ? parseTagsValue(val) : [];
return (
<div key={field.id} className="flex items-center gap-1.5 text-xs">
<span style={{ color: "var(--color-text-muted)" }}>
@ -198,6 +202,37 @@ function CardContent({
enumValues={field.enum_values}
enumColors={field.enum_colors}
/>
) : field.type === "tags" ? (
<span className="flex items-center gap-0.5 flex-wrap">
{tags.slice(0, 3).map((tag) => {
const fmt = formatWorkspaceFieldValue(tag);
const isLink = fmt.kind === "link" && fmt.href;
return isLink ? (
<a
key={tag}
href={fmt.href!}
target={fmt.linkType === "url" || fmt.linkType === "file" ? "_blank" : undefined}
rel={fmt.linkType === "url" || fmt.linkType === "file" ? "noopener noreferrer" : undefined}
onClick={(e) => e.stopPropagation()}
className="inline-flex items-center px-1.5 py-0 rounded text-[11px] font-medium hover:underline underline-offset-2"
style={{ background: "rgba(148, 163, 184, 0.12)", color: "var(--color-accent)" }}
>
{fmt.text}
</a>
) : (
<span
key={tag}
className="inline-flex items-center px-1.5 py-0 rounded text-[11px] font-medium"
style={{ background: "rgba(148, 163, 184, 0.12)", color: "var(--color-text-muted)" }}
>
{tag}
</span>
);
})}
{tags.length > 3 && (
<span style={{ color: "var(--color-text-muted)", opacity: 0.6 }}>+{tags.length - 3}</span>
)}
</span>
) : field.type === "relation" ? (
<span
className="truncate inline-flex items-center gap-0.5"
@ -222,7 +257,7 @@ function CardContent({
</span>
) : (
<span
className="truncate"
className="whitespace-pre-line line-clamp-2"
style={{ color: "var(--color-text)" }}
>
{displayVal}

View File

@ -1,6 +1,8 @@
"use client";
import { useMemo } from "react";
import { formatWorkspaceFieldValue } from "@/lib/workspace-cell-format";
import { parseTagsValue } from "@/lib/parse-tags";
// ---------------------------------------------------------------------------
// Types
@ -94,6 +96,9 @@ function ListRow({
const enumVal = enumField ? safeString(entry[enumField.name]) : null;
const badge = enumField && enumVal ? getEnumBadge(enumVal, enumField) : null;
const tagsField = fields.find((f) => f.type === "tags");
const tagsVal = tagsField ? parseTagsValue(entry[tagsField.name]) : [];
const dateField = fields.find((f) => f.type === "date");
const dateVal = dateField ? safeString(entry[dateField.name]) : null;
@ -127,15 +132,40 @@ function ListRow({
{badge.text}
</span>
)}
{tagsVal.slice(0, 3).map((tag) => {
const fmt = formatWorkspaceFieldValue(tag);
const isLink = fmt.kind === "link" && fmt.href;
return isLink ? (
<a
key={tag}
href={fmt.href!}
target={fmt.linkType === "url" || fmt.linkType === "file" ? "_blank" : undefined}
rel={fmt.linkType === "url" || fmt.linkType === "file" ? "noopener noreferrer" : undefined}
onClick={(e) => e.stopPropagation()}
className="text-[10px] px-1.5 py-0.5 rounded-full flex-shrink-0 hover:underline underline-offset-2"
style={{ background: "rgba(148, 163, 184, 0.12)", color: "var(--color-accent)", border: "1px solid var(--color-border)" }}
>
{fmt.text}
</a>
) : (
<span
key={tag}
className="text-[10px] px-1.5 py-0.5 rounded-full flex-shrink-0"
style={{ background: "rgba(148, 163, 184, 0.12)", color: "var(--color-text-muted)", border: "1px solid var(--color-border)" }}
>
{tag}
</span>
);
})}
</div>
{subtitle && (
<div
className="text-[11px] truncate mt-0.5"
style={{ color: "var(--color-text-muted)" }}
>
{subtitle}
</div>
)}
{subtitle && (
<div
className="text-[11px] whitespace-pre-line line-clamp-1 mt-0.5"
style={{ color: "var(--color-text-muted)" }}
>
{subtitle}
</div>
)}
</div>
{/* Date on the right */}

View File

@ -5,6 +5,8 @@ import { type ColumnDef, type CellContext } from "@tanstack/react-table";
import { DataTable, type RowAction } from "./data-table";
import { RelationSelect } from "./relation-select";
import { FormattedFieldValue } from "./formatted-field-value";
import { formatWorkspaceFieldValue } from "@/lib/workspace-cell-format";
import { parseTagsValue } from "@/lib/parse-tags";
/* ─── Types ─── */
@ -190,6 +192,117 @@ function RelationCell({
);
}
function TagChip({ tag }: { tag: string }) {
const formatted = formatWorkspaceFieldValue(tag);
const isLink = formatted.kind === "link" && formatted.href;
const chipStyle = { background: "rgba(148, 163, 184, 0.12)", border: "1px solid var(--color-border)" };
if (isLink) {
return (
<a
href={formatted.href!}
target={formatted.linkType === "url" || formatted.linkType === "file" ? "_blank" : undefined}
rel={formatted.linkType === "url" || formatted.linkType === "file" ? "noopener noreferrer" : undefined}
onClick={(e) => e.stopPropagation()}
className="inline-flex items-center px-2 py-0.5 rounded-full text-xs font-medium underline-offset-2 hover:underline"
style={{ ...chipStyle, color: "var(--color-accent)" }}
>
{formatted.text}
</a>
);
}
return (
<span
className="inline-flex items-center px-2 py-0.5 rounded-full text-xs font-medium"
style={{ ...chipStyle, color: "var(--color-text-muted)" }}
>
{tag}
</span>
);
}
function TagsCell({ value }: { value: unknown }) {
const tags = parseTagsValue(value);
if (tags.length === 0) {return <span style={{ color: "var(--color-text-muted)", opacity: 0.5 }}>--</span>;}
return (
<span className="flex items-center gap-1 flex-wrap">
{tags.slice(0, 5).map((tag) => <TagChip key={tag} tag={tag} />)}
{tags.length > 5 && <span className="text-xs" style={{ color: "var(--color-text-muted)" }}>+{tags.length - 5}</span>}
</span>
);
}
function TagsInput({
value,
onChange,
autoFocus,
}: {
value: string;
onChange: (val: string) => void;
autoFocus?: boolean;
}) {
const tags = parseTagsValue(value);
const [inputVal, setInputVal] = useState("");
const inputRef = useRef<HTMLInputElement>(null);
useEffect(() => {
if (autoFocus && inputRef.current) {inputRef.current.focus();}
}, [autoFocus]);
const addTag = (tag: string) => {
const t = tag.trim();
if (!t || tags.includes(t)) {return;}
const next = [...tags, t];
onChange(JSON.stringify(next));
setInputVal("");
};
const removeTag = (tag: string) => {
const next = tags.filter((t) => t !== tag);
onChange(next.length > 0 ? JSON.stringify(next) : "");
};
return (
<div className="flex items-center gap-1 flex-wrap min-h-[1.5em]">
{tags.map((tag) => (
<span
key={tag}
className="inline-flex items-center gap-0.5 px-2 py-0.5 rounded-full text-xs font-medium"
style={{ background: "rgba(148, 163, 184, 0.12)", color: "var(--color-text-muted)", border: "1px solid var(--color-border)" }}
>
{tag}
<button
type="button"
onClick={() => removeTag(tag)}
className="ml-0.5 hover:opacity-70"
style={{ color: "var(--color-text-muted)" }}
>
<svg width="10" height="10" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2.5"><path d="M18 6 6 18" /><path d="m6 6 12 12" /></svg>
</button>
</span>
))}
<input
ref={inputRef}
type="text"
value={inputVal}
onChange={(e) => setInputVal(e.target.value)}
onKeyDown={(e) => {
if ((e.key === "Enter" || e.key === ",") && inputVal.trim()) {
e.preventDefault();
addTag(inputVal);
}
if (e.key === "Backspace" && !inputVal && tags.length > 0) {
removeTag(tags[tags.length - 1]);
}
}}
onBlur={() => { if (inputVal.trim()) {addTag(inputVal);} }}
placeholder={tags.length === 0 ? "Type and press Enter..." : ""}
className="flex-1 min-w-[80px] text-xs outline-none bg-transparent"
style={{ color: "var(--color-text)" }}
/>
</div>
);
}
function ReverseRelationCell({ links, sourceObjectName, onNavigateObject, onNavigateEntry }: {
links: Array<{ id: string; label: string }>;
sourceObjectName: string;
@ -269,6 +382,7 @@ function EditableCell({
// Non-editable types: render read-only (relations are now editable via dropdown)
const isEditable = !["user"].includes(field.type);
const isRelation = field.type === "relation" && !!field.related_object_name;
const isTags = field.type === "tags";
const save = useCallback(async (val: string) => {
onLocalValueChange?.(val);
@ -327,6 +441,23 @@ function EditableCell({
</div>
);
}
if (isTags) {
return (
<div
className="-mx-3 -my-2 px-3 py-2"
style={{
background: "var(--color-bg)",
boxShadow: "inset 0 0 0 2px var(--color-accent)",
}}
>
<TagsInput
value={safeString(initialValue)}
onChange={(v) => { void save(v); }}
autoFocus
/>
</div>
);
}
if (field.type === "enum" && field.enum_values) {
editInput = (
<select
@ -408,6 +539,19 @@ function EditableCell({
);
}
// Tags fields: show tag chips with double-click to edit
if (isTags) {
return (
<div
onDoubleClick={() => setEditing(true)}
className="cursor-cell min-h-[1.5em]"
title="Double-click to edit"
>
<TagsCell value={displayValue} />
</div>
);
}
return (
<div
onDoubleClick={() => setEditing(true)}
@ -527,7 +671,7 @@ export function ObjectTable({
/>
);
},
size: field.type === "richtext" ? 300 : field.type === "relation" ? 200 : 180,
size: field.type === "richtext" ? 300 : field.type === "relation" || field.type === "tags" ? 200 : 180,
enableSorting: true,
}));
@ -835,23 +979,36 @@ function AddEntryModal({
)}
</label>
{field.type === "enum" && field.enum_values ? (
<select
{field.type === "tags" ? (
<div
className="w-full px-3 py-2 text-sm rounded-lg"
style={{
background: "var(--color-surface)",
border: "1px solid var(--color-border)",
}}
>
<TagsInput
value={values[field.name] ?? ""}
onChange={(e) => updateField(field.name, e.target.value)}
className="w-full px-3 py-2 text-sm rounded-lg outline-none"
style={{
background: "var(--color-surface)",
color: "var(--color-text)",
border: "1px solid var(--color-border)",
}}
>
<option value="">-- Select --</option>
{field.enum_values.map((v) => (
<option key={v} value={v}>{v}</option>
))}
</select>
) : field.type === "boolean" ? (
onChange={(v) => updateField(field.name, v)}
/>
</div>
) : field.type === "enum" && field.enum_values ? (
<select
value={values[field.name] ?? ""}
onChange={(e) => updateField(field.name, e.target.value)}
className="w-full px-3 py-2 text-sm rounded-lg outline-none"
style={{
background: "var(--color-surface)",
color: "var(--color-text)",
border: "1px solid var(--color-border)",
}}
>
<option value="">-- Select --</option>
{field.enum_values.map((v) => (
<option key={v} value={v}>{v}</option>
))}
</select>
) : field.type === "boolean" ? (
<select
value={values[field.name] ?? ""}
onChange={(e) => updateField(field.name, e.target.value)}

View File

@ -215,6 +215,48 @@ body {
font-family: "Bookerly", Georgia, "Times New Roman", serif;
}
.chat-user-html-content p {
margin: 0;
}
.chat-user-html-content p + p {
margin-top: 0.35em;
}
.chat-user-html-content p:empty {
min-height: 1em;
}
.chat-user-html-content strong {
font-weight: 700;
}
.chat-user-html-content em {
font-style: italic;
}
.chat-user-html-content code {
font-family: "SF Mono", "Fira Code", "JetBrains Mono", monospace;
font-size: 0.85em;
background: rgba(255,255,255,0.15);
border-radius: 4px;
padding: 0.1em 0.3em;
}
.chat-user-html-content span[data-chat-file-mention] {
display: inline-flex;
align-items: center;
gap: 4px;
padding: 1px 8px 1px 6px;
margin: 0 1px;
border-radius: 6px;
background: var(--mention-bg, rgba(255, 255, 255, 0.18));
color: inherit;
font-size: 0.85em;
font-weight: 500;
line-height: 1.6;
white-space: nowrap;
}
.chat-user-html-content span[data-chat-file-mention]::before {
content: "@";
opacity: 0.5;
font-size: 0.9em;
}
/* Smooth theme transitions */
*,
*::before,

View File

@ -40,6 +40,19 @@ vi.mock("node:fs", async (importOriginal) => {
};
});
vi.mock("node:fs/promises", async (importOriginal) => {
const original = await importOriginal<typeof import("node:fs/promises")>();
return {
...original,
access: vi.fn(async () => {
throw new Error("ENOENT");
}),
readFile: vi.fn(async () => ""),
writeFile: vi.fn(async () => undefined),
mkdir: vi.fn(async () => undefined),
};
});
import type { SseEvent } from "./active-runs.js";
/**
@ -136,6 +149,19 @@ describe("active-runs", () => {
mkdirSync: vi.fn(),
};
});
vi.mock("node:fs/promises", async (importOriginal) => {
const original =
await importOriginal<typeof import("node:fs/promises")>();
return {
...original,
access: vi.fn(async () => {
throw new Error("ENOENT");
}),
readFile: vi.fn(async () => ""),
writeFile: vi.fn(async () => undefined),
mkdir: vi.fn(async () => undefined),
};
});
});
afterEach(() => {

View File

@ -16,6 +16,12 @@ import {
existsSync,
mkdirSync,
} from "node:fs";
import {
access,
mkdir,
readFile,
writeFile,
} from "node:fs/promises";
import { resolveWebChatDir, resolveOpenClawStateDir, resolveActiveAgentId } from "./workspace";
import {
type AgentProcessHandle,
@ -193,6 +199,34 @@ const activeRuns: Map<string, ActiveRun> =
(globalThis as Record<string, unknown>)[GLOBAL_KEY] = activeRuns;
const fileMutationQueues = new Map<string, Promise<void>>();
async function pathExistsAsync(path: string): Promise<boolean> {
try {
await access(path);
return true;
} catch {
return false;
}
}
async function queueFileMutation<T>(
filePath: string,
mutate: () => Promise<T>,
): Promise<T> {
const previous = fileMutationQueues.get(filePath) ?? Promise.resolve();
const next = previous.catch(() => {}).then(mutate);
const settled = next.then(() => undefined, () => undefined);
fileMutationQueues.set(filePath, settled);
try {
return await next;
} finally {
if (fileMutationQueues.get(filePath) === settled) {
fileMutationQueues.delete(filePath);
}
}
}
// ── Public API ──
/** Retrieve an active or recently-completed run (within the grace period). */
@ -217,18 +251,25 @@ export function getRunningSessionIds(): string[] {
return ids;
}
/** Check if any subagent sessions are still running for a parent web session. */
export function hasRunningSubagentsForParent(parentWebSessionId: string): boolean {
function hasRunningSubagentsInMemory(parentWebSessionId: string): boolean {
for (const [_key, run] of activeRuns) {
if (run.isSubscribeOnly && run.parentSessionId === parentWebSessionId && run.status === "running") {
return true;
}
}
return false;
}
/** Check if any subagent sessions are still running for a parent web session. */
export async function hasRunningSubagentsForParent(parentWebSessionId: string): Promise<boolean> {
if (hasRunningSubagentsInMemory(parentWebSessionId)) {
return true;
}
// Fallback: check the gateway disk registry
const registryPath = join(resolveOpenClawStateDir(), "subagents", "runs.json");
if (!existsSync(registryPath)) {return false;}
if (!await pathExistsAsync(registryPath)) {return false;}
try {
const raw = JSON.parse(readFileSync(registryPath, "utf-8")) as {
const raw = JSON.parse(await readFile(registryPath, "utf-8")) as {
runs?: Record<string, Record<string, unknown>>;
};
const runs = raw?.runs;
@ -271,7 +312,14 @@ export function subscribeToRun(
}
// If the run already finished, signal completion immediately.
// Always replay buffered events for errored runs so error messages
// are never silently dropped due to replay:false timing.
if (run.status !== "running" && run.status !== "waiting-for-subagents") {
if (!replay && run.status === "error") {
for (const event of run.eventBuffer) {
callback(event);
}
}
callback(null);
return () => {};
}
@ -348,10 +396,10 @@ export function sendSubagentFollowUp(sessionKey: string, message: string): boole
* Emits a user-message event so reconnecting clients see the message,
* and writes the message to the session JSONL file on disk.
*/
export function persistSubscribeUserMessage(
export async function persistSubscribeUserMessage(
sessionKey: string,
msg: { id?: string; text: string },
): boolean {
): Promise<boolean> {
const run = activeRuns.get(sessionKey);
if (!run) {return false;}
const msgId = msg.id ?? `user-${Date.now()}-${Math.random().toString(36).slice(2)}`;
@ -368,15 +416,18 @@ export function persistSubscribeUserMessage(
// Write the user message to the session JSONL (same as persistUserMessage
// does for parent sessions) so it survives page reloads.
try {
ensureDir();
const fp = join(webChatDir(), `${sessionKey}.jsonl`);
if (!existsSync(fp)) {writeFileSync(fp, "");}
const content = readFileSync(fp, "utf-8");
const lines = content.split("\n").filter((l) => l.trim());
const alreadySaved = lines.some((l) => {
try { return JSON.parse(l).id === msgId; } catch { return false; }
});
if (!alreadySaved) {
await ensureDir();
await queueFileMutation(fp, async () => {
if (!await pathExistsAsync(fp)) {await writeFile(fp, "");}
const content = await readFile(fp, "utf-8");
const lines = content.split("\n").filter((l) => l.trim());
const alreadySaved = lines.some((l) => {
try { return JSON.parse(l).id === msgId; } catch { return false; }
});
if (alreadySaved) {
return;
}
const line = JSON.stringify({
id: msgId,
role: "user",
@ -384,8 +435,8 @@ export function persistSubscribeUserMessage(
parts: [{ type: "text", text: msg.text }],
timestamp: new Date().toISOString(),
});
writeFileSync(fp, [...lines, line].join("\n") + "\n");
}
await writeFile(fp, [...lines, line].join("\n") + "\n");
});
} catch { /* best effort */ }
schedulePersist(run);
@ -416,7 +467,7 @@ export function abortRun(sessionId: string): boolean {
sendGatewayAbort(sessionId);
// Flush persistence to save the partial response (without _streaming).
flushPersistence(run);
flushPersistence(run).catch(() => {});
// Signal subscribers that the stream ended.
for (const sub of run.subscribers) {
@ -550,7 +601,12 @@ export function startSubscribeRun(params: {
// The subscribe process also patches, but this gives us a head start.
void callGatewayRpc(
"sessions.patch",
{ key: sessionKey, verboseLevel: "full", reasoningLevel: "on" },
{
key: sessionKey,
thinkingLevel: "xhigh",
verboseLevel: "full",
reasoningLevel: "on",
},
{ timeoutMs: 4_000 },
).catch(() => {});
@ -892,9 +948,12 @@ function wireSubscribeOnlyProcess(
}
}
}
if (typeof ev.data?.stopReason === "string" && ev.data.stopReason === "error" && typeof ev.data?.errorMessage === "string" && !agentErrorReported) {
if (typeof ev.data?.stopReason === "string" && ev.data.stopReason === "error" && !agentErrorReported) {
agentErrorReported = true;
emitError(parseErrorBody(ev.data.errorMessage));
const errMsg = typeof ev.data?.errorMessage === "string"
? parseErrorBody(ev.data.errorMessage)
: (parseAgentErrorMessage(ev.data) ?? "Agent stopped with an error");
emitError(errMsg);
}
}
@ -1001,14 +1060,14 @@ function wireSubscribeOnlyProcess(
}
if (ev.event === "agent" && ev.stream === "lifecycle" && ev.data?.phase === "error" && !agentErrorReported) {
const msg = parseAgentErrorMessage(ev.data);
if (msg) { agentErrorReported = true; emitError(msg); }
agentErrorReported = true;
emitError(parseAgentErrorMessage(ev.data) ?? "Agent encountered an error");
finalizeSubscribeRun(run, "error");
}
if (ev.event === "error" && !agentErrorReported) {
const msg = parseAgentErrorMessage(ev.data ?? (ev as unknown as Record<string, unknown>));
if (msg) { agentErrorReported = true; emitError(msg); }
agentErrorReported = true;
emitError(parseAgentErrorMessage(ev.data ?? (ev as unknown as Record<string, unknown>)) ?? "An unknown error occurred");
}
};
@ -1071,7 +1130,7 @@ function finalizeSubscribeRun(run: ActiveRun, status: "completed" | "error" = "c
resetSubscribeRetryState(run);
run.status = status;
flushPersistence(run);
flushPersistence(run).catch(() => {});
for (const sub of run.subscribers) {
try { sub(null); } catch { /* ignore */ }
@ -1210,90 +1269,98 @@ export function enrichSubagentSessionFromTranscript(sessionKey: string): void {
// ── Persistence helpers (called from route to persist user messages) ──
/** Save a user message to the session JSONL (called once at run start). */
export function persistUserMessage(
export async function persistUserMessage(
sessionId: string,
msg: { id: string; content: string; parts?: unknown[] },
): void {
ensureDir();
msg: { id: string; content: string; parts?: unknown[]; html?: string },
): Promise<void> {
await ensureDir();
const filePath = join(webChatDir(), `${sessionId}.jsonl`);
if (!existsSync(filePath)) {writeFileSync(filePath, "");}
const line = JSON.stringify({
id: msg.id,
role: "user",
content: msg.content,
...(msg.parts ? { parts: msg.parts } : {}),
...(msg.html ? { html: msg.html } : {}),
timestamp: new Date().toISOString(),
});
// Avoid duplicates (e.g. retry).
const existing = readFileSync(filePath, "utf-8");
const lines = existing.split("\n").filter((l) => l.trim());
const alreadySaved = lines.some((l) => {
try {
return JSON.parse(l).id === msg.id;
} catch {
return false;
let alreadySaved = false;
await queueFileMutation(filePath, async () => {
if (!await pathExistsAsync(filePath)) {await writeFile(filePath, "");}
// Avoid duplicates (e.g. retry).
const existing = await readFile(filePath, "utf-8");
const lines = existing.split("\n").filter((l) => l.trim());
alreadySaved = lines.some((l) => {
try {
return JSON.parse(l).id === msg.id;
} catch {
return false;
}
});
if (!alreadySaved) {
await writeFile(filePath, [...lines, line].join("\n") + "\n");
}
});
if (!alreadySaved) {
writeFileSync(filePath, [...lines, line].join("\n") + "\n");
updateIndex(sessionId, { incrementCount: 1 });
await updateIndex(sessionId, { incrementCount: 1 });
}
}
// ── Internals ──
function ensureDir() {
const dir = webChatDir();
if (!existsSync(dir)) {
mkdirSync(dir, { recursive: true });
}
async function ensureDir() {
await mkdir(webChatDir(), { recursive: true });
}
function updateIndex(
async function updateIndex(
sessionId: string,
opts: { incrementCount?: number; title?: string },
) {
try {
const idxPath = indexFile();
let index: Array<Record<string, unknown>>;
if (!existsSync(idxPath)) {
// Auto-create index with a bootstrap entry for this session so
// orphaned .jsonl files become visible in the sidebar.
index = [{
id: sessionId,
title: opts.title || "New Chat",
createdAt: Date.now(),
updatedAt: Date.now(),
messageCount: opts.incrementCount || 0,
}];
writeFileSync(idxPath, JSON.stringify(index, null, 2));
return;
}
index = JSON.parse(
readFileSync(idxPath, "utf-8"),
) as Array<Record<string, unknown>>;
let session = index.find((s) => s.id === sessionId);
if (!session) {
// Session file exists but wasn't indexed — add it.
session = {
id: sessionId,
title: opts.title || "New Chat",
createdAt: Date.now(),
updatedAt: Date.now(),
messageCount: 0,
};
index.unshift(session);
}
session.updatedAt = Date.now();
if (opts.incrementCount) {
session.messageCount =
((session.messageCount as number) || 0) + opts.incrementCount;
}
if (opts.title) {session.title = opts.title;}
writeFileSync(idxPath, JSON.stringify(index, null, 2));
await ensureDir();
await queueFileMutation(idxPath, async () => {
let index: Array<Record<string, unknown>>;
if (!await pathExistsAsync(idxPath)) {
// Auto-create index with a bootstrap entry for this session so
// orphaned .jsonl files become visible in the sidebar.
index = [{
id: sessionId,
title: opts.title || "New Chat",
createdAt: Date.now(),
updatedAt: Date.now(),
messageCount: opts.incrementCount || 0,
}];
await writeFile(idxPath, JSON.stringify(index, null, 2));
return;
}
index = JSON.parse(
await readFile(idxPath, "utf-8"),
) as Array<Record<string, unknown>>;
let session = index.find((s) => s.id === sessionId);
if (!session) {
// Session file exists but wasn't indexed — add it.
session = {
id: sessionId,
title: opts.title || "New Chat",
createdAt: Date.now(),
updatedAt: Date.now(),
messageCount: 0,
};
index.unshift(session);
}
session.updatedAt = Date.now();
if (opts.incrementCount) {
session.messageCount =
((session.messageCount as number) || 0) + opts.incrementCount;
}
if (opts.title) {session.title = opts.title;}
await writeFile(idxPath, JSON.stringify(index, null, 2));
});
} catch {
/* best-effort */
}
@ -1534,11 +1601,13 @@ function wireChildProcess(run: ActiveRun): void {
if (
typeof ev.data?.stopReason === "string" &&
ev.data.stopReason === "error" &&
typeof ev.data?.errorMessage === "string" &&
!agentErrorReported
) {
agentErrorReported = true;
emitError(parseErrorBody(ev.data.errorMessage));
const errMsg = typeof ev.data?.errorMessage === "string"
? parseErrorBody(ev.data.errorMessage)
: (parseAgentErrorMessage(ev.data) ?? "Agent stopped with an error");
emitError(errMsg);
}
}
@ -1709,23 +1778,19 @@ function wireChildProcess(run: ActiveRun): void {
ev.data?.phase === "error" &&
!agentErrorReported
) {
const msg = parseAgentErrorMessage(ev.data);
if (msg) {
agentErrorReported = true;
emitError(msg);
}
agentErrorReported = true;
emitError(parseAgentErrorMessage(ev.data) ?? "Agent encountered an error");
}
// Top-level error event
if (ev.event === "error" && !agentErrorReported) {
const msg = parseAgentErrorMessage(
ev.data ??
(ev as unknown as Record<string, unknown>),
agentErrorReported = true;
emitError(
parseAgentErrorMessage(
ev.data ??
(ev as unknown as Record<string, unknown>),
) ?? "An unknown error occurred",
);
if (msg) {
agentErrorReported = true;
emitError(msg);
}
}
};
@ -1743,17 +1808,17 @@ function wireChildProcess(run: ActiveRun): void {
openStatusReasoning("Waiting for subagent results...");
waitingStatusAnnounced = true;
}
flushPersistence(run);
flushPersistence(run).catch(() => {});
};
const scheduleWaitingCompletionCheck = () => {
clearWaitingFinalizeTimer(run);
run._waitingFinalizeTimer = setTimeout(() => {
run._waitingFinalizeTimer = setTimeout(async () => {
run._waitingFinalizeTimer = null;
if (run.status !== "waiting-for-subagents") {
return;
}
if (hasRunningSubagentsForParent(run.sessionId)) {
if (await hasRunningSubagentsForParent(run.sessionId)) {
showWaitingStatus();
return;
}
@ -1761,18 +1826,25 @@ function wireChildProcess(run: ActiveRun): void {
}, WAITING_FINALIZE_RECONCILE_MS);
};
const reconcileWaitingState = () => {
if (run.status !== "waiting-for-subagents" && run.status !== "running") {
return;
}
if (hasRunningSubagentsInMemory(run.sessionId)) {
clearWaitingFinalizeTimer(run);
showWaitingStatus();
return;
}
scheduleWaitingCompletionCheck();
};
// Any new parent event means waiting completion should be reconsidered
// from this point forward, not from a prior end/final checkpoint.
clearWaitingFinalizeTimer(run);
processParentEvent(ev);
if (ev.stream === "lifecycle" && ev.data?.phase === "end") {
if (hasRunningSubagentsForParent(run.sessionId)) {
clearWaitingFinalizeTimer(run);
showWaitingStatus();
} else {
scheduleWaitingCompletionCheck();
}
reconcileWaitingState();
}
if (ev.event === "chat") {
const payload = ev.data;
@ -1780,12 +1852,7 @@ function wireChildProcess(run: ActiveRun): void {
const message = asRecord(payload?.message);
const role = typeof message?.role === "string" ? message.role : "";
if (state === "final" && role === "assistant") {
if (hasRunningSubagentsForParent(run.sessionId)) {
clearWaitingFinalizeTimer(run);
showWaitingStatus();
} else {
scheduleWaitingCompletionCheck();
}
reconcileWaitingState();
}
}
};
@ -1847,19 +1914,14 @@ function wireChildProcess(run: ActiveRun): void {
emit({ type: "text-end", id: tid });
accAppendText(errMsg);
} else if (!everSentText && exitedClean) {
const tid = nextId("text");
emit({ type: "text-start", id: tid });
const msg = "No response from agent.";
emit({ type: "text-delta", id: tid, delta: msg });
emit({ type: "text-end", id: tid });
accAppendText(msg);
emitError("No response from agent.");
} else {
closeText();
}
run.exitCode = code;
const hasRunningSubagents = hasRunningSubagentsForParent(run.sessionId);
const hasRunningSubagents = hasRunningSubagentsInMemory(run.sessionId);
// If the CLI exited cleanly and subagents are still running,
// keep the SSE stream open and wait for announcement-triggered
@ -1871,7 +1933,7 @@ function wireChildProcess(run: ActiveRun): void {
openStatusReasoning("Waiting for subagent results...");
waitingStatusAnnounced = true;
}
flushPersistence(run);
flushPersistence(run).catch(() => {});
startParentSubscribeStream(run, parentSessionKey, processParentSubscribeEvent);
return;
}
@ -1880,7 +1942,7 @@ function wireChildProcess(run: ActiveRun): void {
run.status = exitedClean ? "completed" : "error";
// Final persistence flush (removes _streaming flag).
flushPersistence(run);
flushPersistence(run).catch(() => {});
// Signal completion to all subscribers.
for (const sub of run.subscribers) {
@ -1910,7 +1972,7 @@ function wireChildProcess(run: ActiveRun): void {
const message = err instanceof Error ? err.message : String(err);
emitError(`Failed to start agent: ${message}`);
run.status = "error";
flushPersistence(run);
flushPersistence(run).catch(() => {});
for (const sub of run.subscribers) {
try {
sub(null);
@ -2011,7 +2073,7 @@ function finalizeWaitingRun(run: ActiveRun): void {
stopSubscribeProcess(run);
flushPersistence(run);
flushPersistence(run).catch(() => {});
for (const sub of run.subscribers) {
try { sub(null); } catch { /* ignore */ }
@ -2070,11 +2132,11 @@ function schedulePersist(run: ActiveRun) {
const delay = Math.max(0, PERSIST_INTERVAL_MS - elapsed);
run._persistTimer = setTimeout(() => {
run._persistTimer = null;
flushPersistence(run);
flushPersistence(run).catch(() => {});
}, delay);
}
function flushPersistence(run: ActiveRun) {
async function flushPersistence(run: ActiveRun) {
if (run._persistTimer) {
clearTimeout(run._persistTimer);
run._persistTimer = null;
@ -2111,7 +2173,7 @@ function flushPersistence(run: ActiveRun) {
}
try {
upsertMessage(run.sessionId, message);
await upsertMessage(run.sessionId, message);
} catch (err) {
console.error("[active-runs] Persistence error:", err);
}
@ -2121,43 +2183,43 @@ function flushPersistence(run: ActiveRun) {
* Upsert a single message into the session JSONL.
* If a line with the same `id` already exists it is replaced; otherwise appended.
*/
function upsertMessage(
async function upsertMessage(
sessionId: string,
message: Record<string, unknown>,
) {
ensureDir();
await ensureDir();
const fp = join(webChatDir(), `${sessionId}.jsonl`);
if (!existsSync(fp)) {writeFileSync(fp, "");}
const msgId = message.id as string;
const content = readFileSync(fp, "utf-8");
const lines = content.split("\n").filter((l) => l.trim());
let found = false;
const updated = lines.map((line) => {
try {
const parsed = JSON.parse(line);
if (parsed.id === msgId) {
found = true;
return JSON.stringify(message);
await queueFileMutation(fp, async () => {
if (!await pathExistsAsync(fp)) {await writeFile(fp, "");}
const content = await readFile(fp, "utf-8");
const lines = content.split("\n").filter((l) => l.trim());
const updated = lines.map((line) => {
try {
const parsed = JSON.parse(line);
if (parsed.id === msgId) {
found = true;
return JSON.stringify(message);
}
} catch {
/* keep as-is */
}
} catch {
/* keep as-is */
return line;
});
if (!found) {
updated.push(JSON.stringify(message));
}
return line;
await writeFile(fp, updated.join("\n") + "\n");
});
if (!found) {
updated.push(JSON.stringify(message));
}
writeFileSync(fp, updated.join("\n") + "\n");
if (!sessionId.includes(":subagent:")) {
if (!found) {
updateIndex(sessionId, { incrementCount: 1 });
await updateIndex(sessionId, { incrementCount: 1 });
} else {
updateIndex(sessionId, {});
await updateIndex(sessionId, {});
}
}
}

View File

@ -603,6 +603,7 @@ class GatewayProcessHandle
...(sessionKey ? { sessionKey } : {}),
deliver: false,
channel: "webchat",
thinking: "xhigh",
lane: this.params.lane ?? "web",
timeout: 0,
});
@ -668,6 +669,7 @@ class GatewayProcessHandle
try {
const patch = await this.client.request("sessions.patch", {
key: sessionKey,
thinkingLevel: "xhigh",
verboseLevel: "full",
reasoningLevel: "on",
});
@ -982,11 +984,29 @@ export function parseAgentErrorMessage(
// Direct error string
if (typeof data.error === "string") {return parseErrorBody(data.error);}
// Nested error object with message
if (typeof data.error === "object" && data.error !== null) {
const nested = data.error as Record<string, unknown>;
if (typeof nested.message === "string") {return parseErrorBody(nested.message);}
}
// Message field
if (typeof data.message === "string") {return parseErrorBody(data.message);}
// errorMessage field (may contain "402 {json}")
if (typeof data.errorMessage === "string")
{return parseErrorBody(data.errorMessage);}
// Common alternative fields
if (typeof data.detail === "string") {return parseErrorBody(data.detail);}
if (typeof data.reason === "string") {return parseErrorBody(data.reason);}
if (typeof data.description === "string") {return parseErrorBody(data.description);}
// Error code as last-resort hint
if (typeof data.code === "string" && data.code.trim()) {return data.code;}
// Fallback: serialize the entire payload so the error is never silently lost
try {
const json = JSON.stringify(data);
if (json !== "{}" && json.length <= 500) {return json;}
if (json.length > 500) {return `${json.slice(0, 497)}...`;}
} catch { /* ignore */ }
return undefined;
}

View File

@ -266,6 +266,12 @@ const RELATION_OPS: OperatorMeta[] = [
...UNIVERSAL_OPS,
];
const TAGS_OPS: OperatorMeta[] = [
{ value: "contains", label: "contains" },
{ value: "not_contains", label: "does not contain" },
...UNIVERSAL_OPS,
];
/**
* Return the operators valid for a given field type.
*/
@ -286,6 +292,8 @@ export function operatorsForFieldType(fieldType: string): OperatorMeta[] {
case "relation":
case "user":
return RELATION_OPS;
case "tags":
return TAGS_OPS;
default:
return TEXT_OPS;
}
@ -311,6 +319,8 @@ export function defaultOperatorForFieldType(fieldType: string): FilterOperator {
case "relation":
case "user":
return "has_any";
case "tags":
return "contains";
default:
return "contains";
}

View File

@ -0,0 +1,17 @@
/**
* Parse a tags field value into an array of strings.
* Handles: null/undefined, arrays, JSON-encoded arrays, and plain strings.
*/
export function parseTagsValue(value: unknown): string[] {
if (value == null) {return [];}
if (Array.isArray(value)) {return value.map(String).filter(Boolean);}
const str = String(value).trim();
if (!str) {return [];}
if (str.startsWith("[")) {
try {
const parsed = JSON.parse(str);
if (Array.isArray(parsed)) {return parsed.map(String).filter(Boolean);}
} catch { /* not JSON */ }
}
return [str];
}

View File

@ -10,6 +10,13 @@ vi.mock("node:fs", () => ({
mkdirSync: vi.fn(),
}));
vi.mock("node:fs/promises", () => ({
access: vi.fn(async () => {
throw new Error("ENOENT");
}),
readdir: vi.fn(async () => []),
}));
// Mock node:child_process
vi.mock("node:child_process", () => ({
execSync: vi.fn(() => ""),
@ -65,6 +72,12 @@ describe("workspace utilities", () => {
writeFileSync: vi.fn(),
mkdirSync: vi.fn(),
}));
vi.mock("node:fs/promises", () => ({
access: vi.fn(async () => {
throw new Error("ENOENT");
}),
readdir: vi.fn(async () => []),
}));
vi.mock("node:child_process", () => ({
execSync: vi.fn(() => ""),
exec: vi.fn((_cmd: string, _opts: unknown, cb: (err: Error | null, result: { stdout: string }) => void) => {
@ -83,6 +96,7 @@ describe("workspace utilities", () => {
/** Fresh import after mocks are wired. */
async function importWorkspace() {
const { existsSync: es, readFileSync: rfs, readdirSync: rds } = await import("node:fs");
const { access: acc, readdir: rda } = await import("node:fs/promises");
const { execSync: exs } = await import("node:child_process");
const mod = await import("./workspace.js");
return {
@ -90,6 +104,8 @@ describe("workspace utilities", () => {
mockExists: vi.mocked(es),
mockReadFile: vi.mocked(rfs),
mockReaddir: vi.mocked(rds),
mockAccess: vi.mocked(acc),
mockReaddirAsync: vi.mocked(rda),
mockExec: vi.mocked(exs),
};
}
@ -479,7 +495,7 @@ describe("workspace utilities", () => {
describe("duckdbQueryAsync", () => {
it("returns parsed JSON rows on success", async () => {
process.env.OPENCLAW_WORKSPACE = WS_DIR;
const { duckdbQueryAsync, mockExists } = await importWorkspace();
const { duckdbQueryAsync, mockExists, mockAccess } = await importWorkspace();
const { exec: mockExecFn } = await import("node:child_process");
const rootDb = join(WS_DIR, "workspace.duckdb");
const bin = "/opt/homebrew/bin/duckdb";
@ -487,6 +503,10 @@ describe("workspace utilities", () => {
const s = String(p);
return s === WS_DIR || s === rootDb || s === bin;
});
mockAccess.mockImplementation(async (p) => {
if (String(p) === rootDb) {return;}
throw new Error("ENOENT");
});
vi.mocked(mockExecFn).mockImplementation((_cmd: unknown, _opts: unknown, cb: unknown) => {
(cb as (err: null, r: { stdout: string }) => void)(null, { stdout: '[{"id":"1"}]' });
return {} as never;
@ -497,17 +517,21 @@ describe("workspace utilities", () => {
it("returns empty array when no db path", async () => {
delete process.env.OPENCLAW_WORKSPACE;
const { duckdbQueryAsync, mockExists } = await importWorkspace();
const { duckdbQueryAsync, mockExists, mockAccess } = await importWorkspace();
mockExists.mockReturnValue(false);
mockAccess.mockImplementation(async () => {
throw new Error("ENOENT");
});
const result = await duckdbQueryAsync("SELECT 1");
expect(result).toEqual([]);
});
it("returns empty array for empty stdout", async () => {
process.env.OPENCLAW_WORKSPACE = WS_DIR;
const { duckdbQueryAsync, mockExists } = await importWorkspace();
const { duckdbQueryAsync, mockExists, mockAccess } = await importWorkspace();
const { exec: mockExecFn } = await import("node:child_process");
mockExists.mockReturnValue(true);
mockAccess.mockImplementation(async () => undefined);
vi.mocked(mockExecFn).mockImplementation((_cmd: unknown, _opts: unknown, cb: unknown) => {
(cb as (err: null, r: { stdout: string }) => void)(null, { stdout: "" });
return {} as never;
@ -518,9 +542,10 @@ describe("workspace utilities", () => {
it("returns empty array on exec error", async () => {
process.env.OPENCLAW_WORKSPACE = WS_DIR;
const { duckdbQueryAsync, mockExists } = await importWorkspace();
const { duckdbQueryAsync, mockExists, mockAccess } = await importWorkspace();
const { exec: mockExecFn } = await import("node:child_process");
mockExists.mockReturnValue(true);
mockAccess.mockImplementation(async () => undefined);
vi.mocked(mockExecFn).mockImplementation((_cmd: unknown, _opts: unknown, cb: unknown) => {
(cb as (err: Error) => void)(new Error("fail"));
return {} as never;

View File

@ -1,4 +1,5 @@
import { existsSync, readFileSync, readdirSync, writeFileSync, mkdirSync } from "node:fs";
import { access, readdir as readdirAsync } from "node:fs/promises";
import { execSync, exec } from "node:child_process";
import { promisify } from "node:util";
import { join, resolve, normalize, relative } from "node:path";
@ -8,6 +9,15 @@ import { normalizeFilterGroup, type SavedView, type ViewTypeSettings } from "./o
const execAsync = promisify(exec);
async function pathExistsAsync(path: string): Promise<boolean> {
try {
await access(path);
return true;
} catch {
return false;
}
}
const UI_STATE_FILENAME = ".dench-ui-state.json";
const FIXED_STATE_DIRNAME = ".openclaw-dench";
const WORKSPACE_PREFIX = "workspace-";
@ -498,6 +508,41 @@ export function discoverDuckDBPaths(root?: string): string[] {
return results.map((r) => r.path);
}
/**
* Async version of discoverDuckDBPaths avoids blocking the event loop
* while recursively scanning large workspaces.
*/
export async function discoverDuckDBPathsAsync(root?: string): Promise<string[]> {
const wsRoot = root ?? resolveWorkspaceRoot();
if (!wsRoot) {return [];}
const results: Array<{ path: string; depth: number }> = [];
async function walk(dir: string, depth: number): Promise<void> {
const dbFile = join(dir, "workspace.duckdb");
if (await pathExistsAsync(dbFile)) {
results.push({ path: dbFile, depth });
}
try {
const entries = await readdirAsync(dir, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isDirectory()) {continue;}
if (entry.name.startsWith(".")) {continue;}
// Skip common non-workspace directories
if (entry.name === "tmp" || entry.name === "exports" || entry.name === "node_modules") {continue;}
await walk(join(dir, entry.name), depth + 1);
}
} catch {
// unreadable directory
}
}
await walk(wsRoot, 0);
results.sort((a, b) => a.depth - b.depth);
return results.map((r) => r.path);
}
/**
* Path to the primary DuckDB database file.
* Checks the workspace root first, then falls back to any workspace.duckdb
@ -516,6 +561,20 @@ export function duckdbPath(): string | null {
return all.length > 0 ? all[0] : null;
}
/** Async version of duckdbPath — avoids sync recursive discovery fallback. */
export async function duckdbPathAsync(): Promise<string | null> {
const root = resolveWorkspaceRoot();
if (!root) {return null;}
// Try root-level first (standard layout)
const rootDb = join(root, "workspace.duckdb");
if (await pathExistsAsync(rootDb)) {return rootDb;}
// Fallback: discover the shallowest workspace.duckdb in subdirectories
const all = await discoverDuckDBPathsAsync(root);
return all.length > 0 ? all[0] : null;
}
/**
* Compute the workspace-relative directory that a DuckDB file is authoritative for.
* e.g. for `~/.openclaw/workspace/subdir/workspace.duckdb` returns `"subdir"`.
@ -601,7 +660,7 @@ export function duckdbQuery<T = Record<string, unknown>>(
export async function duckdbQueryAsync<T = Record<string, unknown>>(
sql: string,
): Promise<T[]> {
const db = duckdbPath();
const db = await duckdbPathAsync();
if (!db) {return [];}
const bin = resolveDuckdbBin();
@ -681,7 +740,7 @@ export async function duckdbQueryAllAsync<T = Record<string, unknown>>(
sql: string,
dedupeKey?: keyof T,
): Promise<T[]> {
const dbPaths = discoverDuckDBPaths();
const dbPaths = await discoverDuckDBPathsAsync();
if (dbPaths.length === 0) {return [];}
const bin = resolveDuckdbBin();
@ -751,6 +810,33 @@ export function findDuckDBForObject(objectName: string): string | null {
return null;
}
/** Async version of findDuckDBForObject — avoids blocking recursive discovery. */
export async function findDuckDBForObjectAsync(objectName: string): Promise<string | null> {
const dbPaths = await discoverDuckDBPathsAsync();
if (dbPaths.length === 0) {return null;}
const bin = resolveDuckdbBin();
if (!bin) {return null;}
const sql = `SELECT id FROM objects WHERE name = '${objectName.replace(/'/g, "''")}' LIMIT 1`;
const escapedSql = sql.replace(/'/g, "'\\''");
for (const db of dbPaths) {
try {
const { stdout } = await execAsync(
`'${bin}' -json '${db}' '${escapedSql}'`,
{ encoding: "utf-8", timeout: 5_000, maxBuffer: 1024 * 1024, shell: "/bin/sh" },
);
const trimmed = stdout.trim();
if (trimmed && trimmed !== "[]") {return db;}
} catch {
// continue to next DB
}
}
return null;
}
/**
* Execute a DuckDB statement (no JSON output expected).
* Used for INSERT/UPDATE/ALTER operations.
@ -761,6 +847,13 @@ export function duckdbExec(sql: string): boolean {
return duckdbExecOnFile(db, sql);
}
/** Async version of duckdbExec — avoids sync DB discovery fallback. */
export async function duckdbExecAsync(sql: string): Promise<boolean> {
const db = await duckdbPathAsync();
if (!db) {return false;}
return duckdbExecOnFileAsync(db, sql);
}
/**
* Execute a DuckDB statement against a specific database file (no JSON output).
* Used for INSERT/UPDATE/ALTER operations on a targeted DB.
@ -782,6 +875,24 @@ export function duckdbExecOnFile(dbFilePath: string, sql: string): boolean {
}
}
/** Async version of duckdbExecOnFile — does not block the event loop. */
export async function duckdbExecOnFileAsync(dbFilePath: string, sql: string): Promise<boolean> {
const bin = resolveDuckdbBin();
if (!bin) {return false;}
try {
const escapedSql = sql.replace(/'/g, "'\\''");
await execAsync(`'${bin}' '${dbFilePath}' '${escapedSql}'`, {
encoding: "utf-8",
timeout: 10_000,
shell: "/bin/sh",
});
return true;
} catch {
return false;
}
}
/**
* Parse a relation field value which may be a single ID or a JSON array of IDs.
* Handles both many_to_one (single ID string) and many_to_many (JSON array).

View File

@ -41,6 +41,8 @@
"@tiptap/react": "^3.19.0",
"@tiptap/starter-kit": "^3.19.0",
"@tiptap/suggestion": "^3.19.0",
"@xterm/addon-fit": "^0.11.0",
"@xterm/xterm": "^6.0.0",
"ai": "^6.0.73",
"chokidar": "^5.0.0",
"clsx": "^2.1.1",
@ -52,6 +54,7 @@
"mammoth": "^1.11.0",
"monaco-editor": "^0.55.1",
"next": "^15.3.3",
"node-pty": "^1.1.0",
"posthog-js": "^1.358.1",
"posthog-node": "^5.27.1",
"react": "^19.1.0",

View File

@ -1,6 +1,6 @@
{
"name": "denchclaw",
"version": "2.1.1",
"version": "2.1.4",
"description": "Fully Managed OpenClaw Framework for managing your CRM, Sales Automation and Outreach agents. The only local productivity tool you need.",
"keywords": [],
"homepage": "https://github.com/DenchHQ/DenchClaw#readme",

View File

@ -1,6 +1,6 @@
{
"name": "dench",
"version": "2.1.1",
"version": "2.1.4",
"description": "Shorthand alias for denchclaw — AI-powered CRM platform CLI",
"license": "MIT",
"repository": {
@ -16,7 +16,7 @@
],
"type": "module",
"dependencies": {
"denchclaw": "^2.1.1"
"denchclaw": "^2.1.4"
},
"engines": {
"node": ">=22.12.0"

31
pnpm-lock.yaml generated
View File

@ -150,6 +150,12 @@ importers:
'@tiptap/suggestion':
specifier: ^3.19.0
version: 3.19.0(@tiptap/core@3.19.0(@tiptap/pm@3.19.0))(@tiptap/pm@3.19.0)
'@xterm/addon-fit':
specifier: ^0.11.0
version: 0.11.0
'@xterm/xterm':
specifier: ^6.0.0
version: 6.0.0
ai:
specifier: ^6.0.73
version: 6.0.86(zod@4.3.6)
@ -183,6 +189,9 @@ importers:
next:
specifier: ^15.3.3
version: 15.5.12(@opentelemetry/api@1.9.0)(react-dom@19.2.4(react@19.2.4))(react@19.2.4)
node-pty:
specifier: ^1.1.0
version: 1.1.0
posthog-js:
specifier: ^1.358.1
version: 1.358.1
@ -3426,6 +3435,12 @@ packages:
resolution: {integrity: sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw==}
engines: {node: '>=10.0.0'}
'@xterm/addon-fit@0.11.0':
resolution: {integrity: sha512-jYcgT6xtVYhnhgxh3QgYDnnNMYTcf8ElbxxFzX0IZo+vabQqSPAjC3c1wJrKB5E19VwQei89QCiZZP86DCPF7g==}
'@xterm/xterm@6.0.0':
resolution: {integrity: sha512-TQwDdQGtwwDt+2cgKDLn0IRaSxYu1tSUjgKarSDkUM0ZNiSRXFpjxEsvc/Zgc5kq5omJ+V0a8/kIM2WD3sMOYg==}
abbrev@1.1.1:
resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==}
@ -5377,6 +5392,9 @@ packages:
sass:
optional: true
node-addon-api@7.1.1:
resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==}
node-addon-api@8.5.0:
resolution: {integrity: sha512-/bRZty2mXUIFY/xU5HLvveNHlswNJej+RnxBjOMkidWfwZzgTbPG1E3K5TOxRLOR+5hX7bSofy8yf1hZevMS8A==}
engines: {node: ^18 || ^20 || >= 21}
@ -5416,6 +5434,9 @@ packages:
typescript:
optional: true
node-pty@1.1.0:
resolution: {integrity: sha512-20JqtutY6JPXTUnL0ij1uad7Qe1baT46lyolh2sSENDd4sTzKZ4nmAFkeAARDKwmlLjPx6XKRlwRUxwjOy+lUg==}
node-readable-to-web-readable-stream@0.4.2:
resolution: {integrity: sha512-/cMZNI34v//jUTrI+UIo4ieHAB5EZRY/+7OmXZgBxaWBMcW2tGdceIw06RFxWxrKZ5Jp3sI2i5TsRo+CBhtVLQ==}
@ -10638,6 +10659,10 @@ snapshots:
'@xmldom/xmldom@0.8.11': {}
'@xterm/addon-fit@0.11.0': {}
'@xterm/xterm@6.0.0': {}
abbrev@1.1.1:
optional: true
@ -12938,6 +12963,8 @@ snapshots:
- '@babel/core'
- babel-plugin-macros
node-addon-api@7.1.1: {}
node-addon-api@8.5.0: {}
node-api-headers@1.8.0: {}
@ -13013,6 +13040,10 @@ snapshots:
transitivePeerDependencies:
- supports-color
node-pty@1.1.0:
dependencies:
node-addon-api: 7.1.1
node-readable-to-web-readable-stream@0.4.2:
optional: true

View File

@ -242,18 +242,71 @@ export NEXT_PUBLIC_POSTHOG_KEY="${POSTHOG_KEY:-}"
# ── build ────────────────────────────────────────────────────────────────────
# The `prepack` script (triggered by `npm publish`) runs the DenchClaw build chain:
# pnpm build && pnpm web:build && pnpm web:prepack
# Running `pnpm build` here is a redundant fail-fast: catch CLI build errors
# before committing to a publish attempt.
# Run the full build chain here so we can verify the standalone output
# before publishing. The `prepack` hook in package.json re-runs the same
# steps during `npm publish` but that's harmless (idempotent).
if [[ "$SKIP_BUILD" != true ]]; then
echo "building..."
pnpm build
echo "building web app (standalone verification)..."
echo "building web app (standalone)..."
pnpm web:build
echo "flattening standalone deps..."
pnpm web:prepack
fi
# ── pre-publish: verify standalone node_modules ──────────────────────────────
STANDALONE_APP_NM="apps/web/.next/standalone/apps/web/node_modules"
# Auto-extract serverExternalPackages from next.config.ts — these are NOT
# bundled by webpack, so they must exist in standalone node_modules or the
# web runtime will crash with "fetch failed" for users.
# Also always verify next/react/react-dom which the standalone server needs.
#
# Optional native accelerators (bufferutil, utf-8-validate) are skipped —
# ws works without them.
OPTIONAL_NATIVE="bufferutil utf-8-validate"
SERVER_EXTERNAL="$(node -e "
import('file://${ROOT_DIR}/apps/web/next.config.ts')
.then(m => (m.default.serverExternalPackages || []).forEach(p => console.log(p)))
.catch(() => {})
" 2>/dev/null)"
STANDALONE_OK=true
CHECKED=""
for mod in next react react-dom $SERVER_EXTERNAL; do
[ -z "$mod" ] && continue
if [ ! -d "${STANDALONE_APP_NM}/${mod}" ]; then
case " $OPTIONAL_NATIVE " in
*" $mod "*) continue ;;
esac
echo "error: required module '${mod}' missing from standalone build (${STANDALONE_APP_NM}/${mod})"
STANDALONE_OK=false
fi
CHECKED="${CHECKED:+$CHECKED }$mod"
done
if [ "$STANDALONE_OK" != true ]; then
die "standalone build is missing required node_modules — web chat will crash at runtime.
Run 'pnpm web:build && pnpm web:prepack' and verify the output."
fi
# Quick sanity: try to resolve each server-external package from the standalone dir.
for mod in $SERVER_EXTERNAL; do
[ -z "$mod" ] && continue
case " $OPTIONAL_NATIVE " in
*" $mod "*) continue ;;
esac
if ! node -e "require.resolve('${mod}', { paths: ['${STANDALONE_APP_NM}'] })" 2>/dev/null; then
die "standalone '${mod}' module exists but cannot be resolved — check flatten-standalone-deps output"
fi
done
echo "standalone node_modules verified ($CHECKED)"
# ── publish ──────────────────────────────────────────────────────────────────
# Always tag as "latest" — npm skips the latest tag for prerelease versions
@ -302,12 +355,10 @@ if [[ "$SKIP_NPX_SMOKE" != true ]]; then
npx --yes "${PACKAGE_NAME}@${VERSION}" stop --help
fi
# Verify the standalone web app was included in the published package.
# `prepack` should have built it; if this file is missing, the web UI
# won't work for users who install globally.
# Post-publish sanity: confirm the standalone server was published.
STANDALONE_SERVER="apps/web/.next/standalone/apps/web/server.js"
if [[ ! -f "$STANDALONE_SERVER" ]]; then
echo "warning: standalone web app build not found after publish ($STANDALONE_SERVER)"
echo "warning: standalone web app server.js not found after publish ($STANDALONE_SERVER)"
echo " users may not get a working Web UI — check the prepack step"
fi

View File

@ -117,6 +117,7 @@ When creating objects with specific use cases, set `default_view` and `view_sett
| enum | is, is_not, is_any_of, is_none_of, is_empty, is_not_empty |
| boolean | is_true, is_false, is_empty, is_not_empty |
| relation/user | has_any, has_none, has_all, is_empty, is_not_empty |
| tags | contains, not_contains, is_empty, is_not_empty |
**System timestamp columns are always available on every object entry**:
@ -685,18 +686,19 @@ YAML
## Field Types Reference
| Type | Description | Storage | Query Cast |
| -------- | ------------------------------------- | ------------------ | ----------- |
| text | General text, names, descriptions | VARCHAR | none |
| email | Email addresses (validated) | VARCHAR | none |
| phone | Phone numbers (normalized) | VARCHAR | none |
| number | Numeric values (prices, scores) | VARCHAR | `::NUMERIC` |
| boolean | Yes/no flags | "true"/"false" | `= 'true'` |
| date | ISO 8601 dates | VARCHAR | `::DATE` |
| richtext | Rich text for Notes fields | VARCHAR | none |
| user | Member ID from workspace_context.yaml | VARCHAR | none |
| enum | Dropdown with predefined values | VARCHAR | none |
| relation | Link to entry in another object | VARCHAR (entry ID) | none |
| Type | Description | Storage | Query Cast |
| -------- | ------------------------------------- | ------------------------- | ----------- |
| text | General text, names, descriptions | VARCHAR | none |
| email | Email addresses (validated) | VARCHAR | none |
| phone | Phone numbers (normalized) | VARCHAR | none |
| number | Numeric values (prices, scores) | VARCHAR | `::NUMERIC` |
| boolean | Yes/no flags | "true"/"false" | `= 'true'` |
| date | ISO 8601 dates | VARCHAR | `::DATE` |
| richtext | Rich text for Notes fields | VARCHAR | none |
| user | Member ID from workspace_context.yaml | VARCHAR | none |
| enum | Dropdown with predefined values | VARCHAR | none |
| relation | Link to entry in another object | VARCHAR (entry ID) | none |
| tags | Free-form string array (labels, tags) | VARCHAR (JSON array str) | none |
### System Timestamp Columns (Always Present)
@ -717,6 +719,8 @@ Important:
**relation fields**: Field stores `related_object_id` and `relationship_type`. Entry stores the related entry ID. `many_to_one` for single select, `many_to_many` for multi-select (JSON array of IDs).
**tags fields**: Free-form string arrays for labels, domains, skills, keywords, etc. Value stored as JSON array string: `'["tag1","tag2","tag3"]'`. No predefined values — users can type any value. Displayed as removable chips in the UI.
## CRM Patterns
### Contact/Customer
@ -1010,6 +1014,7 @@ After creating a `.report.json` file:
- **USER FIELDS**: Resolve member name to ID from `workspace_context.yaml` BEFORE inserting
- **ENUM FIELDS**: Use type "enum" with `enum_values` JSON array
- **RELATION FIELDS**: Use type "relation" with `related_object_id`
- **TAGS FIELDS**: Use type "tags" for free-form string arrays. Value stored as `'["tag1","tag2"]'`
- **KANBAN**: Use `default_view = 'kanban'`, set `view_settings.kanbanField: "Status"`, auto-create Status and Assigned To fields
- **CALENDAR**: Use `default_view = 'calendar'`, set `view_settings.calendarDateField` to the date field
- **TIMELINE**: Use `default_view = 'timeline'`, set `view_settings.timelineStartField` and optionally `timelineEndField`

View File

@ -12,6 +12,15 @@ const promptMocks = vi.hoisted(() => ({
})),
}));
const workspaceSeedMocks = vi.hoisted(() => ({
discoverWorkspaceDirs: vi.fn((stateDir: string) => [`${stateDir}/workspace`]),
syncManagedSkills: vi.fn(() => ({
syncedSkills: ["crm", "browser"],
workspaceDirs: ["/tmp/.openclaw-dench/workspace"],
identityUpdated: true,
})),
}));
const spawnMock = vi.hoisted(() => vi.fn());
const webRuntimeMocks = vi.hoisted(() => ({
DEFAULT_WEB_APP_PORT: 3100,
@ -54,6 +63,11 @@ vi.mock("@clack/prompts", () => ({
spinner: promptMocks.spinner,
}));
vi.mock("./workspace-seed.js", () => ({
discoverWorkspaceDirs: workspaceSeedMocks.discoverWorkspaceDirs,
syncManagedSkills: workspaceSeedMocks.syncManagedSkills,
}));
vi.mock("node:child_process", () => ({
spawn: spawnMock,
}));
@ -117,6 +131,15 @@ describe("updateWebRuntimeCommand", () => {
promptMocks.isCancel.mockReset();
promptMocks.isCancel.mockImplementation(() => false);
workspaceSeedMocks.discoverWorkspaceDirs.mockReset();
workspaceSeedMocks.discoverWorkspaceDirs.mockReturnValue(["/tmp/.openclaw-dench/workspace"]);
workspaceSeedMocks.syncManagedSkills.mockReset();
workspaceSeedMocks.syncManagedSkills.mockReturnValue({
syncedSkills: ["crm", "browser"],
workspaceDirs: ["/tmp/.openclaw-dench/workspace"],
identityUpdated: true,
});
webRuntimeMocks.ensureManagedWebRuntime.mockReset();
webRuntimeMocks.ensureManagedWebRuntime.mockImplementation(
async () => ({ ready: true, reason: "ready" }) as { ready: boolean; reason: string },
@ -208,6 +231,26 @@ describe("updateWebRuntimeCommand", () => {
expect(summary.majorGate.required).toBe(true);
});
it("syncs managed skills during update and includes result in summary", async () => {
const runtime = runtimeStub();
const summary = await updateWebRuntimeCommand(
{ nonInteractive: true },
runtime,
);
expect(workspaceSeedMocks.discoverWorkspaceDirs).toHaveBeenCalledWith("/tmp/.openclaw-dench");
expect(workspaceSeedMocks.syncManagedSkills).toHaveBeenCalledWith({
workspaceDirs: ["/tmp/.openclaw-dench/workspace"],
packageRoot: "/tmp/pkg",
});
expect(summary.skillSync).toEqual({
syncedSkills: ["crm", "browser"],
workspaceDirs: ["/tmp/.openclaw-dench/workspace"],
identityUpdated: true,
});
});
it("skips OpenClaw update on minor upgrades while still refreshing runtime (avoids unnecessary blocking)", async () => {
webRuntimeMocks.evaluateMajorVersionTransition.mockReturnValue({
previousMajor: 2,

View File

@ -24,6 +24,7 @@ import {
stopManagedWebRuntime,
waitForWebRuntime,
} from "./web-runtime.js";
import { discoverWorkspaceDirs, syncManagedSkills, type SkillSyncResult } from "./workspace-seed.js";
type SpawnResult = {
code: number;
@ -68,6 +69,7 @@ export type UpdateWebRuntimeSummary = {
reason: string;
gatewayRestarted: boolean;
gatewayError?: string;
skillSync: SkillSyncResult;
};
export type StopWebRuntimeSummary = {
@ -409,6 +411,9 @@ export async function updateWebRuntimeCommand(
json: Boolean(opts.json),
});
const workspaceDirs = discoverWorkspaceDirs(stateDir);
const skillSyncResult = syncManagedSkills({ workspaceDirs, packageRoot });
const ensureResult = await ensureManagedWebRuntime({
stateDir,
packageRoot,
@ -432,6 +437,7 @@ export async function updateWebRuntimeCommand(
reason: ensureResult.reason,
gatewayRestarted: gatewayResult.restarted,
gatewayError: gatewayResult.error,
skillSync: skillSyncResult,
};
if (!opts.json) {
@ -452,6 +458,7 @@ export async function updateWebRuntimeCommand(
),
);
}
runtime.log(`Skills synced: ${summary.skillSync.syncedSkills.join(", ")} (${summary.skillSync.workspaceDirs.length} workspace${summary.skillSync.workspaceDirs.length === 1 ? "" : "s"})`);
runtime.log(`Web runtime: ${summary.ready ? "ready" : "not ready"}`);
if (!summary.ready) {
runtime.log(theme.warn(summary.reason));

View File

@ -636,6 +636,35 @@ function resolveSymlinkedPackage(
}
}
/**
* Copy assets/seed/ and skills/ into the runtime app dir so the web init
* route can locate them via resolveProjectRoot() (which walks up from
* process.cwd looking for package.json + assets/seed/workspace.duckdb).
* Without these, creating a new workspace in the web UI silently skips
* seeding objects (people, company, task), the DuckDB, and managed skills.
*/
function ensureSeedAssets(runtimeAppDir: string, packageRoot: string): void {
const pairs: Array<[src: string, dst: string]> = [
[
path.join(packageRoot, "assets", "seed"),
path.join(runtimeAppDir, "assets", "seed"),
],
[
path.join(packageRoot, "skills"),
path.join(runtimeAppDir, "skills"),
],
];
for (const [src, dst] of pairs) {
if (!existsSync(src)) continue;
try {
mkdirSync(path.dirname(dst), { recursive: true });
cpSync(src, dst, { recursive: true, dereference: true, force: true });
} catch {
// best-effort
}
}
}
/**
* Copy .next/static/ and public/ into the runtime app dir if they aren't
* already present. In production the prepack script copies these into the
@ -695,6 +724,7 @@ export function installManagedWebRuntime(params: {
dereferenceRuntimeNodeModules(runtimeAppDir, standaloneDir);
ensureStaticAssets(runtimeAppDir, params.packageRoot);
ensureSeedAssets(runtimeAppDir, params.packageRoot);
const manifest: ManagedWebRuntimeManifest = {
schemaVersion: 1,

View File

@ -2,7 +2,12 @@ import { existsSync, mkdirSync, readFileSync, rmSync, writeFileSync } from "node
import os from "node:os";
import path from "node:path";
import { afterEach, beforeEach, describe, expect, it } from "vitest";
import { seedWorkspaceFromAssets } from "./workspace-seed.js";
import {
discoverWorkspaceDirs,
MANAGED_SKILLS,
seedWorkspaceFromAssets,
syncManagedSkills,
} from "./workspace-seed.js";
function createTempDir(): string {
const dir = path.join(
@ -129,3 +134,160 @@ describe("seedWorkspaceFromAssets", () => {
expect(result.projectionFiles).toContain("IDENTITY.md");
});
});
describe("syncManagedSkills", () => {
let tempDir: string;
beforeEach(() => {
tempDir = createTempDir();
});
afterEach(() => {
rmSync(tempDir, { recursive: true, force: true });
});
it("syncs all managed skills and returns their names", () => {
const packageRoot = createPackageRoot(tempDir);
const workspaceDir = path.join(tempDir, "workspace-sync");
const result = syncManagedSkills({ workspaceDirs: [workspaceDir], packageRoot });
expect(result.syncedSkills).toEqual(MANAGED_SKILLS.map((s) => s.name));
expect(result.identityUpdated).toBe(true);
const skillPath = path.join(workspaceDir, "skills", "crm", "SKILL.md");
expect(existsSync(skillPath)).toBe(true);
});
it("updates IDENTITY.md", () => {
const packageRoot = createPackageRoot(tempDir);
const workspaceDir = path.join(tempDir, "workspace-identity");
syncManagedSkills({ workspaceDirs: [workspaceDir], packageRoot });
const identityPath = path.join(workspaceDir, "IDENTITY.md");
expect(existsSync(identityPath)).toBe(true);
expect(readFileSync(identityPath, "utf-8")).toContain("DenchClaw");
});
it("overwrites stale skills with updated content", () => {
const packageRoot = createPackageRoot(tempDir);
const workspaceDir = path.join(tempDir, "workspace-overwrite");
const skillPath = path.join(workspaceDir, "skills", "crm", "SKILL.md");
mkdirSync(path.dirname(skillPath), { recursive: true });
writeFileSync(skillPath, "# old stale skill content\n", "utf-8");
syncManagedSkills({ workspaceDirs: [workspaceDir], packageRoot });
const content = readFileSync(skillPath, "utf-8");
expect(content).toContain("database-crm-system");
expect(content).not.toContain("old stale skill content");
});
it("creates workspace dir if it does not exist", () => {
const packageRoot = createPackageRoot(tempDir);
const workspaceDir = path.join(tempDir, "workspace-fresh");
expect(existsSync(workspaceDir)).toBe(false);
syncManagedSkills({ workspaceDirs: [workspaceDir], packageRoot });
expect(existsSync(workspaceDir)).toBe(true);
});
it("syncs skills into multiple workspace directories", () => {
const packageRoot = createPackageRoot(tempDir);
const wsA = path.join(tempDir, "workspace-a");
const wsB = path.join(tempDir, "workspace-b");
const result = syncManagedSkills({ workspaceDirs: [wsA, wsB], packageRoot });
expect(result.workspaceDirs).toEqual([wsA, wsB]);
for (const ws of [wsA, wsB]) {
expect(existsSync(path.join(ws, "skills", "crm", "SKILL.md"))).toBe(true);
expect(readFileSync(path.join(ws, "IDENTITY.md"), "utf-8")).toContain("DenchClaw");
}
});
});
describe("discoverWorkspaceDirs", () => {
let tempDir: string;
beforeEach(() => {
tempDir = createTempDir();
});
afterEach(() => {
rmSync(tempDir, { recursive: true, force: true });
});
it("returns all workspace dirs from agents.list and agents.defaults.workspace", () => {
const wsDefault = path.join(tempDir, "workspace");
const wsUser = path.join(tempDir, "workspace-user");
mkdirSync(wsDefault, { recursive: true });
mkdirSync(wsUser, { recursive: true });
writeFileSync(
path.join(tempDir, "openclaw.json"),
JSON.stringify({
agents: {
defaults: { workspace: wsDefault },
list: [
{ id: "main", workspace: wsDefault },
{ id: "user", workspace: wsUser },
],
},
}),
"utf-8",
);
const dirs = discoverWorkspaceDirs(tempDir);
expect(dirs).toContain(path.resolve(wsDefault));
expect(dirs).toContain(path.resolve(wsUser));
expect(dirs).toHaveLength(2);
});
it("deduplicates workspace dirs", () => {
const ws = path.join(tempDir, "workspace");
mkdirSync(ws, { recursive: true });
writeFileSync(
path.join(tempDir, "openclaw.json"),
JSON.stringify({
agents: {
defaults: { workspace: ws },
list: [{ id: "main", workspace: ws }],
},
}),
"utf-8",
);
const dirs = discoverWorkspaceDirs(tempDir);
expect(dirs).toHaveLength(1);
expect(dirs[0]).toBe(path.resolve(ws));
});
it("falls back to stateDir/workspace when no config exists", () => {
const dirs = discoverWorkspaceDirs(tempDir);
expect(dirs).toEqual([path.join(tempDir, "workspace")]);
});
it("skips workspace dirs that do not exist on disk", () => {
const wsReal = path.join(tempDir, "workspace-real");
mkdirSync(wsReal, { recursive: true });
writeFileSync(
path.join(tempDir, "openclaw.json"),
JSON.stringify({
agents: {
list: [
{ id: "real", workspace: wsReal },
{ id: "ghost", workspace: path.join(tempDir, "workspace-ghost") },
],
},
}),
"utf-8",
);
const dirs = discoverWorkspaceDirs(tempDir);
expect(dirs).toEqual([path.resolve(wsReal)]);
});
});

View File

@ -214,6 +214,71 @@ export function seedSkill(
}
}
export type SkillSyncResult = {
syncedSkills: string[];
workspaceDirs: string[];
identityUpdated: boolean;
};
/**
* Read openclaw.json (or legacy config.json) and return all unique workspace
* directories referenced in `agents.list[*].workspace` and
* `agents.defaults.workspace`. Falls back to `stateDir/workspace` when no
* config is readable.
*/
export function discoverWorkspaceDirs(stateDir: string): string[] {
const dirs = new Set<string>();
for (const name of ["openclaw.json", "config.json"]) {
const configPath = path.join(stateDir, name);
if (!existsSync(configPath)) {
continue;
}
try {
const raw = JSON.parse(readFileSync(configPath, "utf-8")) as {
agents?: {
defaults?: { workspace?: string };
list?: Array<{ workspace?: string }>;
};
};
const defaultWs = raw?.agents?.defaults?.workspace?.trim();
if (defaultWs && existsSync(defaultWs)) {
dirs.add(path.resolve(defaultWs));
}
for (const agent of raw?.agents?.list ?? []) {
const ws = agent.workspace?.trim();
if (ws && existsSync(ws)) {
dirs.add(path.resolve(ws));
}
}
if (dirs.size > 0) {
return [...dirs];
}
} catch {
// Config unreadable; try next candidate.
}
}
const fallback = path.join(stateDir, "workspace");
return [fallback];
}
export function syncManagedSkills(params: {
workspaceDirs: string[];
packageRoot: string;
}): SkillSyncResult {
const synced: string[] = [];
for (const workspaceDir of params.workspaceDirs) {
mkdirSync(workspaceDir, { recursive: true });
for (const skill of MANAGED_SKILLS) {
seedSkill({ workspaceDir, packageRoot: params.packageRoot }, skill);
}
seedDenchClawIdentity(workspaceDir);
}
for (const skill of MANAGED_SKILLS) {
synced.push(skill.name);
}
return { syncedSkills: synced, workspaceDirs: params.workspaceDirs, identityUpdated: true };
}
export function writeIfMissing(filePath: string, content: string): boolean {
if (existsSync(filePath)) {
return false;
@ -242,11 +307,7 @@ export function seedWorkspaceFromAssets(params: {
...MANAGED_SKILLS.map((s) => `skills/${s.name}/SKILL.md`),
];
mkdirSync(workspaceDir, { recursive: true });
for (const skill of MANAGED_SKILLS) {
seedSkill({ workspaceDir, packageRoot: params.packageRoot }, skill);
}
seedDenchClawIdentity(workspaceDir);
syncManagedSkills({ workspaceDirs: [workspaceDir], packageRoot: params.packageRoot });
if (existsSync(dbPath)) {
return {