🚀 RELEASE: chat sesh stop + queue / workspace.duckdb flexibility / drag to root

This commit is contained in:
kumarabhirup 2026-02-15 23:00:25 -08:00
parent 39ba1aeaeb
commit 170231a54f
No known key found for this signature in database
GPG Key ID: DB7CA2289CAB0167
23 changed files with 1099 additions and 429 deletions

View File

@ -0,0 +1,124 @@
---
name: Workspace profile support
overview: Add full workspace profile and custom path support to the Ironclaw web app and the dench SKILL.md, so they respect OPENCLAW_PROFILE, OPENCLAW_HOME, OPENCLAW_STATE_DIR, and per-agent workspace config — matching the CLI's existing resolution logic.
todos:
- id: centralize-helpers
content: Add resolveOpenClawStateDir() to apps/web/lib/workspace.ts and update resolveWorkspaceRoot() with OPENCLAW_PROFILE + OPENCLAW_HOME + OPENCLAW_STATE_DIR support
status: pending
- id: fix-api-routes
content: Replace all hardcoded join(homedir(), '.openclaw', ...) in ~13 web app API routes/lib files with the new shared helpers
status: pending
- id: fix-empty-state-ui
content: Make empty-state.tsx show the resolved workspace path dynamically instead of hardcoded ~/.openclaw/workspace
status: pending
- id: fix-system-prompt
content: Replace hardcoded ~/.openclaw/web-chat/ in system-prompt.ts line 173 with a dynamic path from the state dir context
status: pending
- id: skill-substitution
content: Add workspace path substitution in buildWorkspaceSkillSnapshot() so injected SKILL.md content replaces ~/.openclaw/workspace with the actual resolved workspace dir
status: pending
- id: tree-api-profile
content: Expose active profile name in the tree API response so the UI can show profile-aware workspace labels
status: pending
isProject: false
---
# Full Workspace Profile and Custom Path Support
## Problem
The CLI core (`src/agents/workspace.ts`, `src/config/paths.ts`) already resolves workspace paths dynamically via `OPENCLAW_PROFILE`, `OPENCLAW_HOME`, `OPENCLAW_STATE_DIR`, and per-agent config — but the web app (`apps/web/`) and the injected dench skill (`skills/dench/SKILL.md`) hardcode `~/.openclaw` and `~/.openclaw/workspace` everywhere, ignoring profiles entirely.
**35 hardcoded `~/.openclaw` references** in `SKILL.md`, and **~15 hardcoded paths** across the web app API routes and UI.
## Approach
### 1. Centralize path resolution in the web app
Create two new helpers in [apps/web/lib/workspace.ts](apps/web/lib/workspace.ts) and update the existing `resolveWorkspaceRoot()`:
- `**resolveOpenClawStateDir()**` — mirrors `src/config/paths.ts:resolveStateDir()` logic: checks `OPENCLAW_STATE_DIR` env var, then `OPENCLAW_HOME`, falls back to `~/.openclaw`. Returns the base state directory (e.g. `~/.openclaw`).
- **Update `resolveWorkspaceRoot()**`— add`OPENCLAW_PROFILE`awareness between the`OPENCLAW_WORKSPACE` check and the fallback:
1. `OPENCLAW_WORKSPACE` env var (existing)
2. `OPENCLAW_PROFILE` -> `<stateDir>/workspace-<profile>` (new)
3. `<stateDir>/workspace` (existing, but now uses `resolveOpenClawStateDir()` instead of hardcoded `~/.openclaw`)
```typescript
export function resolveOpenClawStateDir(): string {
const override = process.env.OPENCLAW_STATE_DIR?.trim();
if (override) return override.startsWith("~") ? join(homedir(), override.slice(1)) : override;
const home = process.env.OPENCLAW_HOME?.trim() || homedir();
return join(home, ".openclaw");
}
export function resolveWorkspaceRoot(): string | null {
const stateDir = resolveOpenClawStateDir();
const profile = process.env.OPENCLAW_PROFILE?.trim();
const candidates = [
process.env.OPENCLAW_WORKSPACE,
profile && profile.toLowerCase() !== "default" ? join(stateDir, `workspace-${profile}`) : null,
join(stateDir, "workspace"),
].filter(Boolean) as string[];
for (const dir of candidates) {
if (existsSync(dir)) return dir;
}
return null;
}
```
### 2. Replace all hardcoded paths in web app API routes
Every file below uses `join(homedir(), ".openclaw", ...)` directly. Replace with calls to `resolveOpenClawStateDir()` or `resolveWorkspaceRoot()`:
| File | What to change |
| ------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------ |
| `apps/web/app/api/workspace/tree/route.ts` | `join(home, ".openclaw", "skills")` and `join(home, ".openclaw")` -> `resolveOpenClawStateDir()` |
| `apps/web/app/api/workspace/virtual-file/route.ts` | All 6 hardcoded paths in `resolveVirtualPath()` and `isSafePath()` -> derive from `resolveWorkspaceRoot()` and `resolveOpenClawStateDir()` |
| `apps/web/app/api/skills/route.ts` | `join(openclawDir, "skills")` and `join(openclawDir, "workspace", "skills")` -> use both helpers |
| `apps/web/app/api/sessions/route.ts` | `resolveOpenClawDir()` local helper -> use shared `resolveOpenClawStateDir()` |
| `apps/web/app/api/memories/route.ts` | `join(homedir(), ".openclaw", "workspace")` -> `resolveWorkspaceRoot()` |
| `apps/web/app/api/cron/jobs/route.ts` | Module-level `CRON_DIR` and `agentsDir` -> derive from `resolveOpenClawStateDir()` |
| `apps/web/app/api/cron/runs/search-transcript/route.ts` | agents dir -> `resolveOpenClawStateDir()` |
| `apps/web/app/api/cron/runs/[sessionId]/route.ts` | agents dir -> `resolveOpenClawStateDir()` |
| `apps/web/app/api/cron/jobs/[jobId]/runs/route.ts` | if hardcoded -> `resolveOpenClawStateDir()` |
| `apps/web/app/api/web-sessions/route.ts` | `WEB_CHAT_DIR` -> derive from `resolveOpenClawStateDir()` |
| `apps/web/app/api/web-sessions/[id]/route.ts` | same |
| `apps/web/app/api/web-sessions/[id]/messages/route.ts` | same |
| `apps/web/lib/active-runs.ts` | `WEB_CHAT_DIR` -> derive from `resolveOpenClawStateDir()` |
### 3. Update the UI empty state
In [apps/web/app/components/workspace/empty-state.tsx](apps/web/app/components/workspace/empty-state.tsx) (line 128): the hardcoded `~/.openclaw/workspace` display string should be dynamic. Two options:
- **Option A**: Pass the resolved workspace path from the tree API response (it already returns `workspaceRoot`). The empty state can show that or a user-friendly tilde-collapsed version.
- **Option B**: Add an API endpoint or server component that returns the expected workspace path.
Option A is simplest — the tree API already returns `openclawDir` and `workspaceRoot`. Thread the expected path into the empty state component.
### 4. Fix hardcoded path in system prompt
In [src/agents/system-prompt.ts](src/agents/system-prompt.ts) line 173: the hardcoded `~/.openclaw/web-chat/` should use the `workspaceDir` parameter (or derive from the state dir that's already available in the prompt builder context). Replace with a template string that references the actual state directory.
### 5. Add workspace variable substitution for injected SKILL.md content
The dench `SKILL.md` has **35 instances** of `~/.openclaw/workspace`. Since this content is injected verbatim into the system prompt via `readSkillContent()`, we need a substitution mechanism.
In [src/agents/skills/workspace.ts](src/agents/skills/workspace.ts) around line 271 where `readSkillContent()` is called for injected skills:
```typescript
// After reading content, substitute workspace path placeholders
const content = readSkillContent(entry.skill.filePath);
if (content) {
const resolved = content.replaceAll("~/.openclaw/workspace", workspaceDir);
injectedSkills.push({ name: entry.skill.name, content: resolved });
}
```
This requires threading `workspaceDir` into `buildWorkspaceSkillSnapshot()` — which it already receives as its first argument.
Then update `skills/dench/SKILL.md` to use `~/.openclaw/workspace` as a canonical placeholder (it already does), and the substitution will replace it with the actual resolved path at injection time. No changes needed to the SKILL.md content itself.
### 6. Expose workspace info in the tree API response
The tree API ([apps/web/app/api/workspace/tree/route.ts](apps/web/app/api/workspace/tree/route.ts)) already returns `workspaceRoot` and `openclawDir`. Consider also returning `profile` (from `OPENCLAW_PROFILE`) so the UI can display profile-aware context (e.g. "Workspace (staging)" in the sidebar).

3
.cursor/worktrees.json Normal file
View File

@ -0,0 +1,3 @@
{
"setup-worktree": ["npm install"]
}

View File

@ -1,4 +1,4 @@
import { duckdbQuery, duckdbPath, duckdbExec } from "@/lib/workspace";
import { duckdbQueryOnFile, duckdbExecOnFile, findDuckDBForObject } from "@/lib/workspace";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -14,13 +14,6 @@ export async function PATCH(
) {
const { name } = await params;
if (!duckdbPath()) {
return Response.json(
{ error: "DuckDB database not found" },
{ status: 404 },
);
}
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
return Response.json(
{ error: "Invalid object name" },
@ -28,6 +21,14 @@ export async function PATCH(
);
}
const dbFile = findDuckDBForObject(name);
if (!dbFile) {
return Response.json(
{ error: "DuckDB database not found" },
{ status: 404 },
);
}
const body = await req.json();
const { displayField } = body;
@ -39,12 +40,12 @@ export async function PATCH(
}
// Ensure display_field column exists
duckdbExec(
duckdbExecOnFile(dbFile,
"ALTER TABLE objects ADD COLUMN IF NOT EXISTS display_field VARCHAR",
);
// Verify the object exists
const objects = duckdbQuery<{ id: string }>(
const objects = duckdbQueryOnFile<{ id: string }>(dbFile,
`SELECT id FROM objects WHERE name = '${name}' LIMIT 1`,
);
if (objects.length === 0) {
@ -56,7 +57,7 @@ export async function PATCH(
// Verify the field exists on this object
const escapedField = displayField.replace(/'/g, "''");
const fieldCheck = duckdbQuery<{ id: string }>(
const fieldCheck = duckdbQueryOnFile<{ id: string }>(dbFile,
`SELECT id FROM fields WHERE object_id = '${objects[0].id}' AND name = '${escapedField}' LIMIT 1`,
);
if (fieldCheck.length === 0) {
@ -67,7 +68,7 @@ export async function PATCH(
}
// Update the display_field
const success = duckdbExec(
const success = duckdbExecOnFile(dbFile,
`UPDATE objects SET display_field = '${escapedField}', updated_at = now() WHERE name = '${name}'`,
);

View File

@ -1,7 +1,8 @@
import {
duckdbQuery,
duckdbExec,
duckdbPath,
duckdbQueryOnFile,
duckdbExecOnFile,
findDuckDBForObject,
discoverDuckDBPaths,
parseRelationValue,
} from "@/lib/workspace";
@ -71,6 +72,11 @@ function resolveDisplayField(
return fields[0]?.name ?? "id";
}
/** Scoped query shorthand. */
function q<T = Record<string, unknown>>(db: string, sql: string): T[] {
return duckdbQueryOnFile<T>(db, sql);
}
// --- Route handlers ---
/**
@ -83,13 +89,6 @@ export async function GET(
) {
const { name, id } = await params;
if (!duckdbPath()) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
return Response.json(
{ error: "Invalid object name" },
@ -103,8 +102,16 @@ export async function GET(
);
}
const dbFile = findDuckDBForObject(name);
if (!dbFile) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
// Fetch object
const objects = duckdbQuery<ObjectRow>(
const objects = q<ObjectRow>(dbFile,
`SELECT * FROM objects WHERE name = '${sqlEscape(name)}' LIMIT 1`,
);
if (objects.length === 0) {
@ -116,18 +123,18 @@ export async function GET(
const obj = objects[0];
// Fetch fields
const fields = duckdbQuery<FieldRow>(
const fields = q<FieldRow>(dbFile,
`SELECT * FROM fields WHERE object_id = '${sqlEscape(obj.id)}' ORDER BY sort_order`,
);
// Fetch entry field values
const entryRows = duckdbQuery<{
const entryRows = q<{
entry_id: string;
created_at: string;
updated_at: string;
field_name: string;
value: string | null;
}>(
}>(dbFile,
`SELECT e.id as entry_id, e.created_at, e.updated_at,
f.name as field_name, ef.value
FROM entries e
@ -138,7 +145,7 @@ export async function GET(
);
if (entryRows.length === 0) {
const exists = duckdbQuery<{ cnt: number }>(
const exists = q<{ cnt: number }>(dbFile,
`SELECT COUNT(*) as cnt FROM entries WHERE id = '${sqlEscape(id)}' AND object_id = '${sqlEscape(obj.id)}'`,
);
if (!exists[0] || exists[0].cnt === 0) {
@ -171,8 +178,7 @@ export async function GET(
}));
// Resolve relation labels for this entry
const relationLabels: Record<string, Record<string, string>> =
{};
const relationLabels: Record<string, Record<string, string>> = {};
const relatedObjectNames: Record<string, string> = {};
const relationFields = fields.filter(
@ -180,7 +186,7 @@ export async function GET(
);
for (const rf of relationFields) {
const relatedObjs = duckdbQuery<ObjectRow>(
const relatedObjs = q<ObjectRow>(dbFile,
`SELECT * FROM objects WHERE id = '${sqlEscape(rf.related_object_id!)}' LIMIT 1`,
);
if (relatedObjs.length === 0) {
@ -201,21 +207,15 @@ export async function GET(
continue;
}
const relFields = duckdbQuery<FieldRow>(
const relFields = q<FieldRow>(dbFile,
`SELECT * FROM fields WHERE object_id = '${sqlEscape(relObj.id)}' ORDER BY sort_order`,
);
const displayFieldName = resolveDisplayField(
relObj,
relFields,
);
const displayFieldName = resolveDisplayField(relObj, relFields);
const idList = ids
.map((i) => `'${sqlEscape(i)}'`)
.join(",");
const displayRows = duckdbQuery<{
entry_id: string;
value: string;
}>(
const displayRows = q<{ entry_id: string; value: string }>(dbFile,
`SELECT e.id as entry_id, ef.value
FROM entries e
JOIN entry_fields ef ON ef.entry_id = e.id
@ -246,11 +246,8 @@ export async function GET(
: undefined,
}));
// Find reverse relations for this entry
const reverseRelations = findReverseRelationsForEntry(
obj.id,
id,
);
// Find reverse relations for this entry (search across all DBs)
const reverseRelations = findReverseRelationsForEntry(obj.id, id);
const effectiveDisplayField = resolveDisplayField(obj, fields);
@ -275,12 +272,6 @@ export async function PATCH(
) {
const { name, id } = await params;
if (!duckdbPath()) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
return Response.json(
{ error: "Invalid object name" },
@ -288,8 +279,16 @@ export async function PATCH(
);
}
const dbFile = findDuckDBForObject(name);
if (!dbFile) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
// Find object
const objects = duckdbQuery<{ id: string }>(
const objects = q<{ id: string }>(dbFile,
`SELECT id FROM objects WHERE name = '${sqlEscape(name)}' LIMIT 1`,
);
if (objects.length === 0) {
@ -301,7 +300,7 @@ export async function PATCH(
const objectId = objects[0].id;
// Verify entry exists
const exists = duckdbQuery<{ cnt: number }>(
const exists = q<{ cnt: number }>(dbFile,
`SELECT COUNT(*) as cnt FROM entries WHERE id = '${sqlEscape(id)}' AND object_id = '${sqlEscape(objectId)}'`,
);
if (!exists[0] || exists[0].cnt === 0) {
@ -312,11 +311,10 @@ export async function PATCH(
}
const body = await req.json();
const fieldUpdates: Record<string, string> =
body.fields ?? {};
const fieldUpdates: Record<string, string> = body.fields ?? {};
// Get field IDs by name
const dbFields = duckdbQuery<{ id: string; name: string }>(
const dbFields = q<{ id: string; name: string }>(dbFile,
`SELECT id, name FROM fields WHERE object_id = '${sqlEscape(objectId)}'`,
);
const fieldMap = new Map(dbFields.map((f) => [f.name, f.id]));
@ -330,16 +328,16 @@ export async function PATCH(
value == null ? "NULL" : `'${sqlEscape(String(value))}'`;
// Try update first, then insert if no rows affected
const existingRows = duckdbQuery<{ cnt: number }>(
const existingRows = q<{ cnt: number }>(dbFile,
`SELECT COUNT(*) as cnt FROM entry_fields WHERE entry_id = '${sqlEscape(id)}' AND field_id = '${sqlEscape(fieldId)}'`,
);
if (existingRows[0]?.cnt > 0) {
duckdbExec(
duckdbExecOnFile(dbFile,
`UPDATE entry_fields SET value = ${escapedValue} WHERE entry_id = '${sqlEscape(id)}' AND field_id = '${sqlEscape(fieldId)}'`,
);
} else {
duckdbExec(
duckdbExecOnFile(dbFile,
`INSERT INTO entry_fields (entry_id, field_id, value) VALUES ('${sqlEscape(id)}', '${sqlEscape(fieldId)}', ${escapedValue})`,
);
}
@ -348,7 +346,7 @@ export async function PATCH(
// Touch updated_at on the entry
const now = new Date().toISOString();
duckdbExec(
duckdbExecOnFile(dbFile,
`UPDATE entries SET updated_at = '${now}' WHERE id = '${sqlEscape(id)}'`,
);
@ -365,12 +363,6 @@ export async function DELETE(
) {
const { name, id } = await params;
if (!duckdbPath()) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
return Response.json(
{ error: "Invalid object name" },
@ -378,8 +370,16 @@ export async function DELETE(
);
}
const dbFile = findDuckDBForObject(name);
if (!dbFile) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
// Find object
const objects = duckdbQuery<{ id: string }>(
const objects = q<{ id: string }>(dbFile,
`SELECT id FROM objects WHERE name = '${sqlEscape(name)}' LIMIT 1`,
);
if (objects.length === 0) {
@ -391,10 +391,10 @@ export async function DELETE(
const objectId = objects[0].id;
// Delete field values first, then entry
duckdbExec(
duckdbExecOnFile(dbFile,
`DELETE FROM entry_fields WHERE entry_id = '${sqlEscape(id)}'`,
);
duckdbExec(
duckdbExecOnFile(dbFile,
`DELETE FROM entries WHERE id = '${sqlEscape(id)}' AND object_id = '${sqlEscape(objectId)}'`,
);
@ -411,100 +411,96 @@ type ReverseRelation = {
links: Array<{ id: string; label: string }>;
};
/**
* Find reverse relations for a single entry, searching across ALL discovered databases.
*/
function findReverseRelationsForEntry(
objectId: string,
entryId: string,
): ReverseRelation[] {
const reverseFields = duckdbQuery<{
id: string;
name: string;
object_id: string;
source_object_name: string;
}>(
`SELECT f.id, f.name, f.object_id, o.name as source_object_name
FROM fields f
JOIN objects o ON o.id = f.object_id
WHERE f.type = 'relation'
AND f.related_object_id = '${sqlEscape(objectId)}'`,
);
if (reverseFields.length === 0) {
return [];
}
const dbPaths = discoverDuckDBPaths();
const result: ReverseRelation[] = [];
for (const rrf of reverseFields) {
const refRows = duckdbQuery<{
source_entry_id: string;
target_value: string;
}>(
`SELECT ef.entry_id as source_entry_id, ef.value as target_value
FROM entry_fields ef
WHERE ef.field_id = '${sqlEscape(rrf.id)}'
AND ef.value IS NOT NULL
AND ef.value != ''`,
for (const db of dbPaths) {
const reverseFields = q<{
id: string;
name: string;
object_id: string;
source_object_name: string;
}>(db,
`SELECT f.id, f.name, f.object_id, o.name as source_object_name
FROM fields f
JOIN objects o ON o.id = f.object_id
WHERE f.type = 'relation'
AND f.related_object_id = '${sqlEscape(objectId)}'`,
);
const matchingSourceIds: string[] = [];
for (const row of refRows) {
const targetIds = parseRelationValue(row.target_value);
if (targetIds.includes(entryId)) {
matchingSourceIds.push(row.source_entry_id);
for (const rrf of reverseFields) {
const refRows = q<{
source_entry_id: string;
target_value: string;
}>(db,
`SELECT ef.entry_id as source_entry_id, ef.value as target_value
FROM entry_fields ef
WHERE ef.field_id = '${sqlEscape(rrf.id)}'
AND ef.value IS NOT NULL
AND ef.value != ''`,
);
const matchingSourceIds: string[] = [];
for (const row of refRows) {
const targetIds = parseRelationValue(row.target_value);
if (targetIds.includes(entryId)) {
matchingSourceIds.push(row.source_entry_id);
}
}
if (matchingSourceIds.length === 0) {
continue;
}
const sourceObj = q<ObjectRow>(db,
`SELECT * FROM objects WHERE id = '${sqlEscape(rrf.object_id)}' LIMIT 1`,
);
if (sourceObj.length === 0) {
continue;
}
const sourceFields = q<FieldRow>(db,
`SELECT * FROM fields WHERE object_id = '${sqlEscape(rrf.object_id)}' ORDER BY sort_order`,
);
const displayFieldName = resolveDisplayField(sourceObj[0], sourceFields);
const idList = matchingSourceIds
.map((i) => `'${sqlEscape(i)}'`)
.join(",");
const displayRows = q<{ entry_id: string; value: string }>(db,
`SELECT ef.entry_id, ef.value
FROM entry_fields ef
JOIN fields f ON f.id = ef.field_id
WHERE ef.entry_id IN (${idList})
AND f.name = '${sqlEscape(displayFieldName)}'
AND f.object_id = '${sqlEscape(rrf.object_id)}'`,
);
const displayMap: Record<string, string> = {};
for (const row of displayRows) {
displayMap[row.entry_id] = row.value || row.entry_id;
}
const links = matchingSourceIds.map((sid) => ({
id: sid,
label: displayMap[sid] || sid,
}));
result.push({
fieldName: rrf.name,
sourceObjectName: rrf.source_object_name,
sourceObjectId: rrf.object_id,
displayField: displayFieldName,
links,
});
}
if (matchingSourceIds.length === 0) {
continue;
}
const sourceObj = duckdbQuery<ObjectRow>(
`SELECT * FROM objects WHERE id = '${sqlEscape(rrf.object_id)}' LIMIT 1`,
);
if (sourceObj.length === 0) {
continue;
}
const sourceFields = duckdbQuery<FieldRow>(
`SELECT * FROM fields WHERE object_id = '${sqlEscape(rrf.object_id)}' ORDER BY sort_order`,
);
const displayFieldName = resolveDisplayField(
sourceObj[0],
sourceFields,
);
const idList = matchingSourceIds
.map((i) => `'${sqlEscape(i)}'`)
.join(",");
const displayRows = duckdbQuery<{
entry_id: string;
value: string;
}>(
`SELECT ef.entry_id, ef.value
FROM entry_fields ef
JOIN fields f ON f.id = ef.field_id
WHERE ef.entry_id IN (${idList})
AND f.name = '${sqlEscape(displayFieldName)}'
AND f.object_id = '${sqlEscape(rrf.object_id)}'`,
);
const displayMap: Record<string, string> = {};
for (const row of displayRows) {
displayMap[row.entry_id] = row.value || row.entry_id;
}
const links = matchingSourceIds.map((sid) => ({
id: sid,
label: displayMap[sid] || sid,
}));
result.push({
fieldName: rrf.name,
sourceObjectName: rrf.source_object_name,
sourceObjectId: rrf.object_id,
displayField: displayFieldName,
links,
});
}
return result;

View File

@ -1,4 +1,4 @@
import { duckdbExec, duckdbQuery, duckdbPath } from "@/lib/workspace";
import { duckdbExecOnFile, duckdbQueryOnFile, findDuckDBForObject } from "@/lib/workspace";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -18,12 +18,6 @@ export async function POST(
) {
const { name } = await params;
if (!duckdbPath()) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
return Response.json(
{ error: "Invalid object name" },
@ -31,6 +25,14 @@ export async function POST(
);
}
const dbFile = findDuckDBForObject(name);
if (!dbFile) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
const body = await req.json();
const entryIds: string[] = body.entryIds;
@ -42,7 +44,7 @@ export async function POST(
}
// Validate object exists
const objects = duckdbQuery<{ id: string }>(
const objects = duckdbQueryOnFile<{ id: string }>(dbFile,
`SELECT id FROM objects WHERE name = '${sqlEscape(name)}' LIMIT 1`,
);
if (objects.length === 0) {
@ -58,10 +60,10 @@ export async function POST(
.join(",");
// Delete field values first, then entries
duckdbExec(
duckdbExecOnFile(dbFile,
`DELETE FROM entry_fields WHERE entry_id IN (${idList})`,
);
duckdbExec(
duckdbExecOnFile(dbFile,
`DELETE FROM entries WHERE id IN (${idList}) AND object_id = '${sqlEscape(objectId)}'`,
);

View File

@ -1,4 +1,4 @@
import { duckdbQuery, duckdbPath } from "@/lib/workspace";
import { duckdbQueryOnFile, findDuckDBForObject } from "@/lib/workspace";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -44,15 +44,16 @@ export async function GET(
) {
const { name } = await params;
if (!duckdbPath()) {
return Response.json({ error: "DuckDB not found" }, { status: 404 });
}
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
return Response.json({ error: "Invalid object name" }, { status: 400 });
}
const objects = duckdbQuery<ObjectRow>(
const dbFile = findDuckDBForObject(name);
if (!dbFile) {
return Response.json({ error: "DuckDB not found" }, { status: 404 });
}
const objects = duckdbQueryOnFile<ObjectRow>(dbFile,
`SELECT * FROM objects WHERE name = '${sqlEscape(name)}' LIMIT 1`,
);
if (objects.length === 0) {
@ -60,7 +61,7 @@ export async function GET(
}
const obj = objects[0];
const fields = duckdbQuery<FieldRow>(
const fields = duckdbQueryOnFile<FieldRow>(dbFile,
`SELECT * FROM fields WHERE object_id = '${sqlEscape(obj.id)}' ORDER BY sort_order`,
);
const displayFieldName = resolveDisplayField(obj, fields);
@ -70,7 +71,7 @@ export async function GET(
const query = url.searchParams.get("q")?.trim() ?? "";
// Fetch entries with their display field value
const rows = duckdbQuery<{ entry_id: string; label: string | null }>(
const rows = duckdbQueryOnFile<{ entry_id: string; label: string | null }>(dbFile,
`SELECT e.id as entry_id, ef.value as label
FROM entries e
LEFT JOIN entry_fields ef ON ef.entry_id = e.id

View File

@ -1,4 +1,4 @@
import { duckdbExec, duckdbQuery, duckdbPath } from "@/lib/workspace";
import { duckdbExecOnFile, duckdbQueryOnFile, findDuckDBForObject } from "@/lib/workspace";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -18,12 +18,6 @@ export async function POST(
) {
const { name } = await params;
if (!duckdbPath()) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
return Response.json(
{ error: "Invalid object name" },
@ -31,8 +25,16 @@ export async function POST(
);
}
const dbFile = findDuckDBForObject(name);
if (!dbFile) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
// Find object
const objects = duckdbQuery<{ id: string }>(
const objects = duckdbQueryOnFile<{ id: string }>(dbFile,
`SELECT id FROM objects WHERE name = '${sqlEscape(name)}' LIMIT 1`,
);
if (objects.length === 0) {
@ -44,7 +46,7 @@ export async function POST(
const objectId = objects[0].id;
// Generate UUID for the new entry
const idRows = duckdbQuery<{ id: string }>(
const idRows = duckdbQueryOnFile<{ id: string }>(dbFile,
"SELECT uuid()::VARCHAR as id",
);
const entryId = idRows[0]?.id;
@ -57,7 +59,7 @@ export async function POST(
// Create entry
const now = new Date().toISOString();
const ok = duckdbExec(
const ok = duckdbExecOnFile(dbFile,
`INSERT INTO entries (id, object_id, created_at, updated_at) VALUES ('${sqlEscape(entryId)}', '${sqlEscape(objectId)}', '${now}', '${now}')`,
);
if (!ok) {
@ -77,7 +79,7 @@ export async function POST(
if (body.fields && typeof body.fields === "object") {
// Get field IDs by name
const dbFields = duckdbQuery<{ id: string; name: string }>(
const dbFields = duckdbQueryOnFile<{ id: string; name: string }>(dbFile,
`SELECT id, name FROM fields WHERE object_id = '${sqlEscape(objectId)}'`,
);
const fieldMap = new Map(dbFields.map((f) => [f.name, f.id]));
@ -85,7 +87,7 @@ export async function POST(
for (const [fieldName, value] of Object.entries(body.fields)) {
const fieldId = fieldMap.get(fieldName);
if (!fieldId || value == null) {continue;}
duckdbExec(
duckdbExecOnFile(dbFile,
`INSERT INTO entry_fields (entry_id, field_id, value) VALUES ('${sqlEscape(entryId)}', '${sqlEscape(fieldId)}', '${sqlEscape(String(value))}')`,
);
}

View File

@ -1,4 +1,4 @@
import { duckdbExec, duckdbQuery, duckdbPath } from "@/lib/workspace";
import { duckdbExecOnFile, duckdbQueryOnFile, findDuckDBForObject } from "@/lib/workspace";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -20,12 +20,6 @@ export async function PATCH(
) {
const { name, fieldId } = await params;
if (!duckdbPath()) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
return Response.json(
{ error: "Invalid object name" },
@ -33,6 +27,14 @@ export async function PATCH(
);
}
const dbFile = findDuckDBForObject(name);
if (!dbFile) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
const body = await req.json();
const oldValue: string = body.oldValue;
const newValue: string = body.newValue;
@ -48,7 +50,7 @@ export async function PATCH(
}
// Validate object exists
const objects = duckdbQuery<{ id: string }>(
const objects = duckdbQueryOnFile<{ id: string }>(dbFile,
`SELECT id FROM objects WHERE name = '${sqlEscape(name)}' LIMIT 1`,
);
if (objects.length === 0) {
@ -60,7 +62,7 @@ export async function PATCH(
const objectId = objects[0].id;
// Validate field exists and is an enum
const fields = duckdbQuery<{ id: string; enum_values: string | null; enum_colors: string | null }>(
const fields = duckdbQueryOnFile<{ id: string; enum_values: string | null; enum_colors: string | null }>(dbFile,
`SELECT id, enum_values, enum_colors FROM fields WHERE id = '${sqlEscape(fieldId)}' AND object_id = '${sqlEscape(objectId)}'`,
);
if (fields.length === 0) {
@ -101,12 +103,12 @@ export async function PATCH(
enumValues[idx] = newValue.trim();
const newEnumJson = JSON.stringify(enumValues);
duckdbExec(
duckdbExecOnFile(dbFile,
`UPDATE fields SET enum_values = '${sqlEscape(newEnumJson)}' WHERE id = '${sqlEscape(fieldId)}'`,
);
// Update all entry_fields with the old value to the new value
const updatedEntries = duckdbExec(
const updatedEntries = duckdbExecOnFile(dbFile,
`UPDATE entry_fields SET value = '${sqlEscape(newValue.trim())}' WHERE field_id = '${sqlEscape(fieldId)}' AND value = '${sqlEscape(oldValue.trim())}'`,
);

View File

@ -1,4 +1,4 @@
import { duckdbExec, duckdbQuery, duckdbPath } from "@/lib/workspace";
import { duckdbExecOnFile, duckdbQueryOnFile, findDuckDBForObject } from "@/lib/workspace";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -20,12 +20,6 @@ export async function PATCH(
) {
const { name, fieldId } = await params;
if (!duckdbPath()) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
return Response.json(
{ error: "Invalid object name" },
@ -33,6 +27,14 @@ export async function PATCH(
);
}
const dbFile = findDuckDBForObject(name);
if (!dbFile) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
const body = await req.json();
const newName: string = body.name;
@ -48,7 +50,7 @@ export async function PATCH(
}
// Validate object exists
const objects = duckdbQuery<{ id: string }>(
const objects = duckdbQueryOnFile<{ id: string }>(dbFile,
`SELECT id FROM objects WHERE name = '${sqlEscape(name)}' LIMIT 1`,
);
if (objects.length === 0) {
@ -60,7 +62,7 @@ export async function PATCH(
const objectId = objects[0].id;
// Validate field exists and belongs to this object
const fieldExists = duckdbQuery<{ cnt: number }>(
const fieldExists = duckdbQueryOnFile<{ cnt: number }>(dbFile,
`SELECT COUNT(*) as cnt FROM fields WHERE id = '${sqlEscape(fieldId)}' AND object_id = '${sqlEscape(objectId)}'`,
);
if (!fieldExists[0] || fieldExists[0].cnt === 0) {
@ -71,7 +73,7 @@ export async function PATCH(
}
// Check for duplicate name
const duplicateCheck = duckdbQuery<{ cnt: number }>(
const duplicateCheck = duckdbQueryOnFile<{ cnt: number }>(dbFile,
`SELECT COUNT(*) as cnt FROM fields WHERE object_id = '${sqlEscape(objectId)}' AND name = '${sqlEscape(newName.trim())}' AND id != '${sqlEscape(fieldId)}'`,
);
if (duplicateCheck[0]?.cnt > 0) {
@ -81,7 +83,7 @@ export async function PATCH(
);
}
const ok = duckdbExec(
const ok = duckdbExecOnFile(dbFile,
`UPDATE fields SET name = '${sqlEscape(newName.trim())}' WHERE id = '${sqlEscape(fieldId)}'`,
);

View File

@ -1,4 +1,4 @@
import { duckdbExec, duckdbQuery, duckdbPath } from "@/lib/workspace";
import { duckdbExecOnFile, duckdbQueryOnFile, findDuckDBForObject } from "@/lib/workspace";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -18,12 +18,6 @@ export async function PATCH(
) {
const { name } = await params;
if (!duckdbPath()) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
return Response.json(
{ error: "Invalid object name" },
@ -31,6 +25,14 @@ export async function PATCH(
);
}
const dbFile = findDuckDBForObject(name);
if (!dbFile) {
return Response.json(
{ error: "DuckDB not found" },
{ status: 404 },
);
}
const body = await req.json();
const fieldOrder: string[] = body.fieldOrder;
@ -42,7 +44,7 @@ export async function PATCH(
}
// Validate object exists
const objects = duckdbQuery<{ id: string }>(
const objects = duckdbQueryOnFile<{ id: string }>(dbFile,
`SELECT id FROM objects WHERE name = '${sqlEscape(name)}' LIMIT 1`,
);
if (objects.length === 0) {
@ -55,7 +57,7 @@ export async function PATCH(
// Update sort_order for each field
for (let i = 0; i < fieldOrder.length; i++) {
duckdbExec(
duckdbExecOnFile(dbFile,
`UPDATE fields SET sort_order = ${i} WHERE id = '${sqlEscape(fieldOrder[i])}' AND object_id = '${sqlEscape(objectId)}'`,
);
}

View File

@ -1,4 +1,5 @@
import { duckdbQuery, duckdbPath, duckdbExec, parseRelationValue, resolveDuckdbBin } from "@/lib/workspace";
import { duckdbPath, parseRelationValue, resolveDuckdbBin, findDuckDBForObject, duckdbQueryOnFile, discoverDuckDBPaths } from "@/lib/workspace";
import { execSync } from "node:child_process";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -47,18 +48,31 @@ type EavRow = {
// --- Schema migration (idempotent, runs once per process) ---
let schemaMigrated = false;
const migratedDbs = new Set<string>();
function ensureDisplayFieldColumn() {
if (schemaMigrated) {return;}
duckdbExec(
"ALTER TABLE objects ADD COLUMN IF NOT EXISTS display_field VARCHAR",
);
schemaMigrated = true;
/** Ensure the display_field column exists on a specific DB file. */
function ensureDisplayFieldColumn(dbFile: string) {
if (migratedDbs.has(dbFile)) {return;}
const bin = resolveDuckdbBin();
if (!bin) {return;}
try {
execSync(
`'${bin}' '${dbFile}' 'ALTER TABLE objects ADD COLUMN IF NOT EXISTS display_field VARCHAR'`,
{ encoding: "utf-8", timeout: 5_000, shell: "/bin/sh" },
);
} catch {
// migration might fail on DBs that don't have the objects table — skip
}
migratedDbs.add(dbFile);
}
// --- Helpers ---
/** Scoped query helper: queries a specific DB file. */
function q<T = Record<string, unknown>>(dbFile: string, sql: string): T[] {
return duckdbQueryOnFile<T>(dbFile, sql);
}
/**
* Pivot raw EAV rows into one object per entry with field names as keys.
*/
@ -124,9 +138,10 @@ function resolveDisplayField(
/**
* Resolve relation field values to human-readable display labels.
* Returns: { fieldName: { entryId: displayLabel } }
* All queries target the same DB file where the object lives.
*/
function resolveRelationLabels(
dbFile: string,
fields: FieldRow[],
entries: Record<string, unknown>[],
): {
@ -141,20 +156,18 @@ function resolveRelationLabels(
);
for (const rf of relationFields) {
const relatedObjs = duckdbQuery<ObjectRow>(
const relatedObjs = q<ObjectRow>(dbFile,
`SELECT * FROM objects WHERE id = '${sqlEscape(rf.related_object_id!)}' LIMIT 1`,
);
if (relatedObjs.length === 0) {continue;}
const relObj = relatedObjs[0];
relatedObjectNames[rf.name] = relObj.name;
// Get related object's fields for display field resolution
const relFields = duckdbQuery<FieldRow>(
const relFields = q<FieldRow>(dbFile,
`SELECT * FROM fields WHERE object_id = '${sqlEscape(relObj.id)}' ORDER BY sort_order`,
);
const displayFieldName = resolveDisplayField(relObj, relFields);
// Collect all referenced entry IDs from our entries
const entryIds = new Set<string>();
for (const entry of entries) {
const val = entry[rf.name];
@ -169,14 +182,10 @@ function resolveRelationLabels(
continue;
}
// Query display values for the referenced entries
const idList = Array.from(entryIds)
.map((id) => `'${sqlEscape(id)}'`)
.join(",");
const displayRows = duckdbQuery<{
entry_id: string;
value: string;
}>(
const displayRows = q<{ entry_id: string; value: string }>(dbFile,
`SELECT e.id as entry_id, ef.value
FROM entries e
JOIN entry_fields ef ON ef.entry_id = e.id
@ -190,7 +199,6 @@ function resolveRelationLabels(
for (const row of displayRows) {
labelMap[row.entry_id] = row.value || row.entry_id;
}
// Fill in any IDs that didn't get a display label
for (const id of entryIds) {
if (!labelMap[id]) {labelMap[id] = id;}
}
@ -211,97 +219,80 @@ type ReverseRelation = {
/**
* Find reverse relations: other objects with relation fields pointing TO this object.
* For each, resolve the display labels and group by target entry ID.
* Searches across ALL discovered databases to catch cross-DB relations.
*/
function findReverseRelations(objectId: string): ReverseRelation[] {
// Find all relation fields in other objects that reference this object
const reverseFields = duckdbQuery<
FieldRow & { source_object_id: string; source_object_name: string }
>(
`SELECT f.*, f.object_id as source_object_id, o.name as source_object_name
FROM fields f
JOIN objects o ON o.id = f.object_id
WHERE f.type = 'relation'
AND f.related_object_id = '${sqlEscape(objectId)}'`,
);
if (reverseFields.length === 0) {return [];}
const dbPaths = discoverDuckDBPaths();
const result: ReverseRelation[] = [];
for (const rrf of reverseFields) {
// Get source object and its fields
const sourceObjs = duckdbQuery<ObjectRow>(
`SELECT * FROM objects WHERE id = '${sqlEscape(rrf.source_object_id)}' LIMIT 1`,
);
if (sourceObjs.length === 0) {continue;}
const sourceFields = duckdbQuery<FieldRow>(
`SELECT * FROM fields WHERE object_id = '${sqlEscape(rrf.source_object_id)}' ORDER BY sort_order`,
);
const displayFieldName = resolveDisplayField(sourceObjs[0], sourceFields);
// Fetch all source entries that have this relation field set
const refRows = duckdbQuery<{
source_entry_id: string;
target_value: string;
}>(
`SELECT ef.entry_id as source_entry_id, ef.value as target_value
FROM entry_fields ef
WHERE ef.field_id = '${sqlEscape(rrf.id)}'
AND ef.value IS NOT NULL
AND ef.value != ''`,
for (const db of dbPaths) {
const reverseFields = q<
FieldRow & { source_object_id: string; source_object_name: string }
>(db,
`SELECT f.*, f.object_id as source_object_id, o.name as source_object_name
FROM fields f
JOIN objects o ON o.id = f.object_id
WHERE f.type = 'relation'
AND f.related_object_id = '${sqlEscape(objectId)}'`,
);
if (refRows.length === 0) {continue;}
for (const rrf of reverseFields) {
const sourceObjs = q<ObjectRow>(db,
`SELECT * FROM objects WHERE id = '${sqlEscape(rrf.source_object_id)}' LIMIT 1`,
);
if (sourceObjs.length === 0) {continue;}
// Get display labels for the source entries
const sourceEntryIds = [
...new Set(refRows.map((r) => r.source_entry_id)),
];
const idList = sourceEntryIds
.map((id) => `'${sqlEscape(id)}'`)
.join(",");
const displayRows = duckdbQuery<{
entry_id: string;
value: string;
}>(
`SELECT ef.entry_id, ef.value
FROM entry_fields ef
JOIN fields f ON f.id = ef.field_id
WHERE ef.entry_id IN (${idList})
AND f.name = '${sqlEscape(displayFieldName)}'
AND f.object_id = '${sqlEscape(rrf.source_object_id)}'`,
);
const sourceFields = q<FieldRow>(db,
`SELECT * FROM fields WHERE object_id = '${sqlEscape(rrf.source_object_id)}' ORDER BY sort_order`,
);
const displayFieldName = resolveDisplayField(sourceObjs[0], sourceFields);
const displayMap: Record<string, string> = {};
for (const row of displayRows) {
displayMap[row.entry_id] = row.value || row.entry_id;
}
const refRows = q<{ source_entry_id: string; target_value: string }>(db,
`SELECT ef.entry_id as source_entry_id, ef.value as target_value
FROM entry_fields ef
WHERE ef.field_id = '${sqlEscape(rrf.id)}'
AND ef.value IS NOT NULL
AND ef.value != ''`,
);
// Build: target_entry_id -> [{id, label}]
const entriesMap: Record<
string,
Array<{ id: string; label: string }>
> = {};
for (const row of refRows) {
const targetIds = parseRelationValue(row.target_value);
for (const targetId of targetIds) {
if (!entriesMap[targetId]) {entriesMap[targetId] = [];}
entriesMap[targetId].push({
id: row.source_entry_id,
label: displayMap[row.source_entry_id] || row.source_entry_id,
});
if (refRows.length === 0) {continue;}
const sourceEntryIds = [...new Set(refRows.map((r) => r.source_entry_id))];
const idList = sourceEntryIds.map((id) => `'${sqlEscape(id)}'`).join(",");
const displayRows = q<{ entry_id: string; value: string }>(db,
`SELECT ef.entry_id, ef.value
FROM entry_fields ef
JOIN fields f ON f.id = ef.field_id
WHERE ef.entry_id IN (${idList})
AND f.name = '${sqlEscape(displayFieldName)}'
AND f.object_id = '${sqlEscape(rrf.source_object_id)}'`,
);
const displayMap: Record<string, string> = {};
for (const row of displayRows) {
displayMap[row.entry_id] = row.value || row.entry_id;
}
}
result.push({
fieldName: rrf.name,
sourceObjectName: rrf.source_object_name,
sourceObjectId: rrf.source_object_id,
displayField: displayFieldName,
entries: entriesMap,
});
const entriesMap: Record<string, Array<{ id: string; label: string }>> = {};
for (const row of refRows) {
const targetIds = parseRelationValue(row.target_value);
for (const targetId of targetIds) {
if (!entriesMap[targetId]) {entriesMap[targetId] = [];}
entriesMap[targetId].push({
id: row.source_entry_id,
label: displayMap[row.source_entry_id] || row.source_entry_id,
});
}
}
result.push({
fieldName: rrf.name,
sourceObjectName: rrf.source_object_name,
sourceObjectId: rrf.source_object_id,
displayField: displayFieldName,
entries: entriesMap,
});
}
}
return result;
@ -322,26 +313,35 @@ export async function GET(
);
}
if (!duckdbPath()) {
return Response.json(
{ error: "DuckDB database not found" },
{ status: 404 },
);
}
// Sanitize name to prevent injection (only allow alphanumeric + underscore)
if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(name)) {
// Sanitize name to prevent injection (only allow alphanumeric + underscore + hyphen)
if (!/^[a-zA-Z_][a-zA-Z0-9_-]*$/.test(name)) {
return Response.json(
{ error: "Invalid object name" },
{ status: 400 },
);
}
// Ensure display_field column exists (idempotent migration)
ensureDisplayFieldColumn();
// Find which DuckDB file contains this object (searches all discovered DBs)
const dbFile = findDuckDBForObject(name);
if (!dbFile) {
// Fall back to primary DB check for a friendlier error message
if (!duckdbPath()) {
return Response.json(
{ error: "DuckDB database not found" },
{ status: 404 },
);
}
return Response.json(
{ error: `Object '${name}' not found` },
{ status: 404 },
);
}
// Fetch object metadata
const objects = duckdbQuery<ObjectRow>(
// Ensure display_field column exists on this specific DB
ensureDisplayFieldColumn(dbFile);
// All queries below target the specific DB that owns this object
const objects = q<ObjectRow>(dbFile,
`SELECT * FROM objects WHERE name = '${name}' LIMIT 1`,
);
@ -354,27 +354,25 @@ export async function GET(
const obj = objects[0];
// Fetch fields for this object
const fields = duckdbQuery<FieldRow>(
const fields = q<FieldRow>(dbFile,
`SELECT * FROM fields WHERE object_id = '${obj.id}' ORDER BY sort_order`,
);
// Fetch statuses for this object
const statuses = duckdbQuery<StatusRow>(
const statuses = q<StatusRow>(dbFile,
`SELECT * FROM statuses WHERE object_id = '${obj.id}' ORDER BY sort_order`,
);
// Try the PIVOT view first, then fall back to raw EAV query + client-side pivot
let entries: Record<string, unknown>[] = [];
const pivotEntries = duckdbQuery(
const pivotEntries = q(dbFile,
`SELECT * FROM v_${name} ORDER BY created_at DESC LIMIT 200`,
);
if (pivotEntries.length > 0) {
entries = pivotEntries;
} else {
const rawRows = duckdbQuery<EavRow>(
const rawRows = q<EavRow>(dbFile,
`SELECT e.id as entry_id, e.created_at, e.updated_at,
f.name as field_name, ef.value
FROM entries e
@ -388,28 +386,23 @@ export async function GET(
entries = pivotEavRows(rawRows);
}
// Parse enum JSON strings in fields
const parsedFields = fields.map((f) => ({
...f,
enum_values: f.enum_values ? tryParseJson(f.enum_values) : undefined,
enum_colors: f.enum_colors ? tryParseJson(f.enum_colors) : undefined,
}));
// Resolve relation field values to human-readable display labels
const { labels: relationLabels, relatedObjectNames } =
resolveRelationLabels(fields, entries);
resolveRelationLabels(dbFile, fields, entries);
// Enrich fields with related object names for frontend display
const enrichedFields = parsedFields.map((f) => ({
...f,
related_object_name:
f.type === "relation" ? relatedObjectNames[f.name] : undefined,
}));
// Find reverse relations (other objects linking TO this one)
const reverseRelations = findReverseRelations(obj.id);
// Compute the effective display field for this object
const effectiveDisplayField = resolveDisplayField(obj, fields);
return Response.json({

View File

@ -3,8 +3,9 @@ import { join } from "node:path";
import {
resolveWorkspaceRoot,
parseSimpleYaml,
duckdbQueryAsync,
duckdbPath,
duckdbQueryAllAsync,
discoverDuckDBPaths,
duckdbQueryOnFileAsync,
isDatabaseFile,
} from "@/lib/workspace";
@ -167,31 +168,46 @@ function flattenTree(
}
}
/** Fetch all entries from all objects and produce search items. */
/**
* Fetch all entries from all objects across ALL discovered DuckDB files.
* Deduplicates objects by name (shallower DBs win).
*/
async function buildEntryItems(): Promise<SearchIndexItem[]> {
const items: SearchIndexItem[] = [];
const dbPaths = discoverDuckDBPaths();
if (dbPaths.length === 0) {return [];}
const objects = await duckdbQueryAsync<ObjectRow>(
"SELECT * FROM objects ORDER BY name",
);
// Collect all objects across DBs, deduplicating by name (shallowest wins)
const seenNames = new Set<string>();
const objectsWithDb: Array<{ obj: ObjectRow; dbPath: string }> = [];
for (const obj of objects) {
const fields = await duckdbQueryAsync<FieldRow>(
for (const dbPath of dbPaths) {
const objs = await duckdbQueryOnFileAsync<ObjectRow>(dbPath,
"SELECT * FROM objects ORDER BY name",
);
for (const obj of objs) {
if (seenNames.has(obj.name)) {continue;}
seenNames.add(obj.name);
objectsWithDb.push({ obj, dbPath });
}
}
for (const { obj, dbPath } of objectsWithDb) {
const fields = await duckdbQueryOnFileAsync<FieldRow>(dbPath,
`SELECT * FROM fields WHERE object_id = '${sqlEscape(obj.id)}' ORDER BY sort_order`,
);
const displayField = resolveDisplayField(obj, fields);
// Pick the first few text-like fields for searchable preview (max 4)
const previewFields = fields
.filter((f) => !["relation", "richtext"].includes(f.type))
.slice(0, 4);
// Try PIVOT view first, then raw EAV
let entries: Record<string, unknown>[] = await duckdbQueryAsync(
// Try PIVOT view first, then raw EAV (on the same DB)
let entries: Record<string, unknown>[] = await duckdbQueryOnFileAsync(dbPath,
`SELECT * FROM v_${obj.name} ORDER BY created_at DESC LIMIT 500`,
);
if (entries.length === 0) {
const rawRows = await duckdbQueryAsync<EavRow>(
const rawRows = await duckdbQueryOnFileAsync<EavRow>(dbPath,
`SELECT e.id as entry_id, e.created_at, e.updated_at,
f.name as field_name, ef.value
FROM entries e
@ -202,7 +218,6 @@ async function buildEntryItems(): Promise<SearchIndexItem[]> {
LIMIT 2500`,
);
// Pivot manually
const grouped = new Map<string, Record<string, unknown>>();
for (const row of rawRows) {
let entry = grouped.get(row.entry_id);
@ -252,20 +267,21 @@ export async function GET() {
// 1. Files + objects from tree
const root = resolveWorkspaceRoot();
if (root) {
// Aggregate objects from ALL discovered DuckDB files (shallower wins)
const dbObjects = new Map<string, ObjectRow>();
if (duckdbPath()) {
const objs = await duckdbQueryAsync<ObjectRow>(
"SELECT * FROM objects",
);
for (const o of objs) {dbObjects.set(o.name, o);}
}
const objs = await duckdbQueryAllAsync<ObjectRow & { name: string }>(
"SELECT * FROM objects",
"name",
);
for (const o of objs) {dbObjects.set(o.name, o);}
// Scan workspace root (the workspace folder IS the knowledge base)
flattenTree(root, "", dbObjects, items);
}
// 2. Entries from all objects
if (duckdbPath()) {
// 2. Entries from all objects across all discovered DBs
const dbPaths = discoverDuckDBPaths();
if (dbPaths.length > 0) {
items.push(...await buildEntryItems());
}

View File

@ -1,7 +1,7 @@
import { readdirSync, readFileSync, existsSync, type Dirent } from "node:fs";
import { join } from "node:path";
import { homedir } from "node:os";
import { resolveWorkspaceRoot, parseSimpleYaml, duckdbQuery, isDatabaseFile } from "@/lib/workspace";
import { resolveWorkspaceRoot, parseSimpleYaml, duckdbQueryAll, isDatabaseFile } from "@/lib/workspace";
export const dynamic = "force-dynamic";
export const runtime = "nodejs";
@ -43,13 +43,15 @@ function readObjectMeta(
}
/**
* Query DuckDB for all objects so we can identify object directories
* even when .object.yaml files are missing.
* Query ALL discovered DuckDB files for objects so we can identify object
* directories even when .object.yaml files are missing.
* Shallower databases win on name conflicts (parent priority).
*/
function loadDbObjects(): Map<string, DbObject> {
const map = new Map<string, DbObject>();
const rows = duckdbQuery<DbObject>(
const rows = duckdbQueryAll<DbObject & { name: string }>(
"SELECT name, icon, default_view FROM objects",
"name",
);
for (const row of rows) {
map.set(row.name, row);

View File

@ -440,6 +440,15 @@ type FileScopedSession = {
messageCount: number;
};
/** A message waiting to be sent after the current agent run finishes. */
type QueuedMessage = {
id: string;
text: string;
mentionedFiles: Array<{ name: string; path: string }>;
attachedFiles: AttachedFile[];
createdAt: number;
};
type ChatPanelProps = {
/** When set, scopes sessions to this file and prepends content as context. */
fileContext?: FileContext;
@ -499,6 +508,9 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
FileScopedSession[]
>([]);
// ── Message queue (messages to send after current run completes) ──
const [queuedMessages, setQueuedMessages] = useState<QueuedMessage[]>([]);
const filePath = fileContext?.path ?? null;
// ── Ref-based session ID for transport ──
@ -877,7 +889,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
const hasText = text.trim().length > 0;
const hasMentions = mentionedFiles.length > 0;
const hasFiles = attachedFiles.length > 0;
if ((!hasText && !hasMentions && !hasFiles) || isStreaming) {
if (!hasText && !hasMentions && !hasFiles) {
return;
}
@ -894,6 +906,21 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
return;
}
// Queue the message if the agent is still running.
if (isStreaming) {
setQueuedMessages((prev) => [
...prev,
{
id: crypto.randomUUID(),
text: userText,
mentionedFiles,
attachedFiles: currentAttachments,
createdAt: Date.now(),
},
]);
return;
}
let sessionId = currentSessionId;
if (!sessionId) {
const titleSource =
@ -955,6 +982,24 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
],
);
// ── Queue flush: send the next queued message once the stream finishes ──
const prevFlushStatusRef = useRef(status);
useEffect(() => {
const wasStreaming =
prevFlushStatusRef.current === "streaming" ||
prevFlushStatusRef.current === "submitted";
const isNowReady = status === "ready";
prevFlushStatusRef.current = status;
if (wasStreaming && isNowReady && queuedMessages.length > 0) {
const [next, ...rest] = queuedMessages;
setQueuedMessages(rest);
// Use a microtask so React can settle the status update first.
queueMicrotask(() => {
void handleEditorSubmit(next.text, next.mentionedFiles);
});
}
}, [status, queuedMessages, handleEditorSubmit]);
const handleSessionSelect = useCallback(
async (sessionId: string) => {
@ -972,6 +1017,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
onActiveSessionChange?.(sessionId);
savedMessageIdsRef.current.clear();
isFirstFileMessageRef.current = false;
setQueuedMessages([]);
try {
const response = await fetch(
@ -1050,6 +1096,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
savedMessageIdsRef.current.clear();
isFirstFileMessageRef.current = true;
newSessionPendingRef.current = false;
setQueuedMessages([]);
if (!filePath) {
setStartingNewSession(true);
@ -1115,6 +1162,29 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
void stop();
}, [currentSessionId, stop]);
// ── Queue handlers ──
const removeQueuedMessage = useCallback((id: string) => {
setQueuedMessages((prev) => prev.filter((m) => m.id !== id));
}, []);
/** Force-send: stop the agent, then immediately submit this queued message. */
const forceSendQueuedMessage = useCallback(
async (id: string) => {
const msg = queuedMessages.find((m) => m.id === id);
if (!msg) {return;}
// Remove it from the queue first.
setQueuedMessages((prev) => prev.filter((m) => m.id !== id));
// Stop the current agent run.
await handleStop();
// Submit the message after a short delay to let status settle.
setTimeout(() => {
void handleEditorSubmit(msg.text, msg.mentionedFiles);
}, 100);
},
[queuedMessages, handleStop, handleEditorSubmit],
);
// ── Attachment handlers ──
const handleFilesSelected = useCallback(
@ -1443,6 +1513,70 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
}
}}
>
{/* Queued messages indicator */}
{queuedMessages.length > 0 && (
<div className={`${compact ? "px-2 pt-2" : "px-3 pt-3"}`}>
<div
className="rounded-xl overflow-hidden"
style={{
border: "1px dashed var(--color-border-strong)",
background: "var(--color-bg-elevated)",
}}
>
<div
className="flex items-center justify-between px-3 py-1.5"
style={{ borderBottom: "1px solid var(--color-border)" }}
>
<span
className="text-[11px] font-medium tracking-wide uppercase"
style={{ color: "var(--color-text-muted)", fontFamily: "var(--font-mono, monospace)" }}
>
Queued ({queuedMessages.length})
</span>
</div>
<div className="flex flex-col gap-1 p-1.5">
{queuedMessages.map((msg) => (
<div
key={msg.id}
className="flex items-start gap-2 rounded-lg px-2.5 py-2 group"
style={{ background: "var(--color-bg-secondary)" }}
>
<p
className="flex-1 text-[13px] leading-[1.45] line-clamp-3"
style={{ color: "var(--color-text)", whiteSpace: "pre-wrap" }}
>
{msg.text || (msg.attachedFiles.length > 0 ? `${msg.attachedFiles.length} file(s)` : "")}
</p>
<div className="flex items-center gap-1 shrink-0 opacity-0 group-hover:opacity-100 transition-opacity">
<button
type="button"
className="rounded-md px-2 py-0.5 text-[11px] font-medium transition-colors hover:bg-[var(--color-bg)]"
style={{ color: "var(--color-accent)" }}
title="Stop agent and send this message now"
onClick={() => forceSendQueuedMessage(msg.id)}
>
Send now
</button>
<button
type="button"
className="rounded-md p-1 transition-colors hover:bg-[var(--color-bg)]"
style={{ color: "var(--color-text-muted)" }}
title="Remove from queue"
onClick={() => removeQueuedMessage(msg.id)}
>
<svg width="12" height="12" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2.5" strokeLinecap="round" strokeLinejoin="round">
<path d="M18 6 6 18" />
<path d="m6 6 12 12" />
</svg>
</button>
</div>
</div>
))}
</div>
</div>
</div>
)}
<ChatEditor
ref={editorRef}
onSubmit={handleEditorSubmit}
@ -1452,13 +1586,14 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
placeholder={
compact && fileContext
? `Ask about ${fileContext.isDirectory ? "this folder" : fileContext.filename}...`
: isStreaming
? "Type to queue a message..."
: attachedFiles.length >
0
? "Add a message or send files..."
: "Type @ to mention files..."
}
disabled={
isStreaming ||
loadingSession ||
startingNewSession
}
@ -1511,8 +1646,9 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
</svg>
</button>
</div>
{/* Send / Stop button */}
{isStreaming ? (
{/* Send / Stop / Queue buttons */}
<div className="flex items-center gap-1.5">
{isStreaming && (
<button
type="button"
onClick={() => handleStop()}
@ -1532,7 +1668,7 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
<rect width="10" height="10" rx="1.5" />
</svg>
</button>
) : (
)}
<button
type="button"
onClick={() => {
@ -1551,11 +1687,33 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
!editorEmpty ||
attachedFiles.length >
0
? "var(--color-accent)"
? isStreaming
? "var(--color-text-muted)"
: "var(--color-accent)"
: "var(--color-border-strong)",
color: "white",
}}
title={isStreaming ? "Queue message" : "Send message"}
>
{isStreaming && !editorEmpty ? (
/* Queue icon: stacked lines */
<svg
width="14"
height="14"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth="2.5"
strokeLinecap="round"
strokeLinejoin="round"
>
<path d="M16 3h5v5" />
<path d="m21 3-7 7" />
<path d="M12 19V5" />
<path d="m5 12 7-7 7 7" />
</svg>
) : (
/* Normal send arrow */
<svg
width="14"
height="14"
@ -1569,8 +1727,9 @@ export const ChatPanel = forwardRef<ChatPanelHandle, ChatPanelProps>(
<path d="M12 19V5" />
<path d="m5 12 7-7 7 7" />
</svg>
)}
</button>
)}
</div>
</div>
</div>
</div>

View File

@ -27,7 +27,7 @@ export function Breadcrumbs({ path, onNavigate }: BreadcrumbsProps) {
(e.currentTarget as HTMLElement).style.background = "transparent";
}}
>
workspace
All Chats
</button>
{segments.map((segment, idx) => {

View File

@ -56,17 +56,24 @@ type FileManagerTreeProps = {
// --- System file detection (client-side mirror) ---
const SYSTEM_PATTERNS = [
/** Always protected regardless of depth. */
const ALWAYS_SYSTEM_PATTERNS = [
/^\.object\.yaml$/,
/^workspace\.duckdb/,
/^workspace_context\.yaml$/,
/\.wal$/,
/\.tmp$/,
];
/** Only protected at the workspace root (no "/" in the relative path). */
const ROOT_ONLY_SYSTEM_PATTERNS = [
/^workspace\.duckdb/,
/^workspace_context\.yaml$/,
];
function isSystemFile(path: string): boolean {
const base = path.split("/").pop() ?? "";
return SYSTEM_PATTERNS.some((p) => p.test(base));
if (ALWAYS_SYSTEM_PATTERNS.some((p) => p.test(base))) {return true;}
const isRoot = !path.includes("/");
return isRoot && ROOT_ONLY_SYSTEM_PATTERNS.some((p) => p.test(base));
}
// --- Icons (inline SVG, zero-dep) ---
@ -599,6 +606,40 @@ function DraggableNode({
);
}
// --- Root drop zone (allows dropping items back to the top level) ---
function RootDropZone({ isDragging }: { isDragging: boolean }) {
const { setNodeRef, isOver } = useDroppable({
id: "drop-__root__",
data: { rootDrop: true },
});
const showHighlight = isOver && isDragging;
return (
<div
ref={setNodeRef}
className="flex-1 min-h-[48px]"
style={{
margin: isDragging ? "4px 8px" : undefined,
borderRadius: "6px",
border: showHighlight ? "1.5px dashed var(--color-accent)" : isDragging ? "1.5px dashed var(--color-border)" : "1.5px dashed transparent",
background: showHighlight ? "var(--color-accent-light)" : "transparent",
transition: "all 150ms ease",
display: "flex",
alignItems: "center",
justifyContent: "center",
}}
>
{isDragging && (
<span className="text-[11px] select-none" style={{ color: showHighlight ? "var(--color-accent)" : "var(--color-text-muted)", opacity: showHighlight ? 1 : 0.6 }}>
Drop here to move to root
</span>
)}
</div>
);
}
// --- Drag Overlay ---
function DragOverlayContent({ node }: { node: TreeNode }) {
@ -713,8 +754,10 @@ export function FileManagerTree({ tree, activePath, onSelect, onRefresh, compact
}, []);
const handleDragOver = useCallback((event: DragOverEvent) => {
const overData = event.over?.data.current as { node: TreeNode } | undefined;
if (overData?.node) {
const overData = event.over?.data.current as { node?: TreeNode; rootDrop?: boolean } | undefined;
if (overData?.rootDrop) {
setDragOverPath("__root__");
} else if (overData?.node) {
setDragOverPath(overData.node.path);
// Auto-expand folders on drag hover (300ms delay)
const path = overData.node.path;
@ -739,11 +782,25 @@ export function FileManagerTree({ tree, activePath, onSelect, onRefresh, compact
setDragOverPath(null);
const activeData = event.active.data.current as { node: TreeNode } | undefined;
const overData = event.over?.data.current as { node: TreeNode } | undefined;
const overData = event.over?.data.current as { node?: TreeNode; rootDrop?: boolean } | undefined;
if (!activeData?.node || !overData?.node) {return;}
if (!activeData?.node) {return;}
const source = activeData.node;
// Drop onto root level
if (overData?.rootDrop) {
// Already at root? No-op
if (parentPath(source.path) === ".") {return;}
const result = await apiMove(source.path, ".");
if (result.ok) {
onRefresh();
}
return;
}
if (!overData?.node) {return;}
const target = overData.node;
// Only drop onto expandable targets (folders/objects)
@ -1052,7 +1109,7 @@ export function FileManagerTree({ tree, activePath, onSelect, onRefresh, compact
>
<div
ref={containerRef}
className="py-1 outline-none"
className="py-1 outline-none flex flex-col min-h-full"
tabIndex={0}
role="tree"
onKeyDown={handleKeyDown}
@ -1109,6 +1166,8 @@ export function FileManagerTree({ tree, activePath, onSelect, onRefresh, compact
workspaceRoot={workspaceRoot}
/>
))}
{/* Root-level drop zone: fills remaining space so items can be moved to root */}
<RootDropZone isDragging={!!activeNode} />
</div>
{/* Drag overlay (ghost) */}

View File

@ -29,6 +29,8 @@ type WorkspaceSidebarProps = {
onFileSearchSelect?: (item: SuggestItem) => void;
/** Absolute path of the workspace root folder, used to render it as a special entry in browse mode. */
workspaceRoot?: string | null;
/** Navigate to the main chat / home panel. */
onGoToChat?: () => void;
};
function WorkspaceLogo() {
@ -401,6 +403,7 @@ export function WorkspaceSidebar({
onGoHome,
onFileSearchSelect,
workspaceRoot,
onGoToChat,
}: WorkspaceSidebarProps) {
const isBrowsing = browseDir != null;
@ -462,15 +465,23 @@ export function WorkspaceSidebar({
</>
) : (
<>
<span
className="w-8 h-8 rounded-lg flex items-center justify-center flex-shrink-0"
<button
type="button"
onClick={onGoToChat}
className="w-8 h-8 rounded-lg flex items-center justify-center flex-shrink-0 cursor-pointer transition-opacity"
style={{
background: "var(--color-accent-light)",
color: "var(--color-accent)",
}}
title="All Chats"
onMouseEnter={(e) => { (e.currentTarget as HTMLElement).style.opacity = "0.7"; }}
onMouseLeave={(e) => { (e.currentTarget as HTMLElement).style.opacity = "1"; }}
>
<WorkspaceLogo />
</span>
<svg width="20" height="20" viewBox="0 0 24 24" fill="none" stroke="currentColor" strokeWidth="2" strokeLinecap="round" strokeLinejoin="round">
<path d="m3 9 9-7 9 7v11a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2z" />
<polyline points="9 22 9 12 15 12 15 22" />
</svg>
</button>
<div className="flex-1 min-w-0">
<div
className="text-sm font-medium truncate"

View File

@ -534,6 +534,13 @@ function WorkspacePageInner() {
setBrowseDir(null);
}, [setBrowseDir]);
// Navigate to the main chat / home panel
const handleGoToChat = useCallback(() => {
setActivePath(null);
setContent({ kind: "none" });
router.replace("/workspace", { scroll: false });
}, [router]);
// Handle file search selection: navigate sidebar to the file's location and open it
const handleFileSearchSelect = useCallback(
(item: { name: string; path: string; type: string }) => {
@ -811,6 +818,7 @@ function WorkspacePageInner() {
onGoHome={handleGoHome}
onFileSearchSelect={handleFileSearchSelect}
workspaceRoot={workspaceRoot}
onGoToChat={handleGoToChat}
/>
{/* Main content */}

View File

@ -1,4 +1,4 @@
import { existsSync, readFileSync } from "node:fs";
import { existsSync, readFileSync, readdirSync } from "node:fs";
import { execSync, exec } from "node:child_process";
import { promisify } from "node:util";
import { join, resolve, normalize, relative } from "node:path";
@ -50,12 +50,79 @@ export function resolveAgentWorkspacePrefix(): string | null {
return root;
}
/** Path to the DuckDB database file, or null if workspace doesn't exist. */
// ---------------------------------------------------------------------------
// Hierarchical DuckDB discovery
//
// Supports multiple workspace.duckdb files in a tree structure. Each
// subdirectory may contain its own workspace.duckdb that is authoritative
// for the objects in that subtree. Shallower (closer to workspace root)
// databases take priority when objects share the same name.
// ---------------------------------------------------------------------------
/**
* Recursively discover all workspace.duckdb files under `root`.
* Returns absolute paths sorted by depth (shallowest first) so that
* root-level databases have priority over deeper ones.
*/
export function discoverDuckDBPaths(root?: string): string[] {
const wsRoot = root ?? resolveWorkspaceRoot();
if (!wsRoot) {return [];}
const results: Array<{ path: string; depth: number }> = [];
function walk(dir: string, depth: number) {
const dbFile = join(dir, "workspace.duckdb");
if (existsSync(dbFile)) {
results.push({ path: dbFile, depth });
}
try {
const entries = readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isDirectory()) {continue;}
if (entry.name.startsWith(".")) {continue;}
// Skip common non-workspace directories
if (entry.name === "tmp" || entry.name === "exports" || entry.name === "node_modules") {continue;}
walk(join(dir, entry.name), depth + 1);
}
} catch {
// unreadable directory
}
}
walk(wsRoot, 0);
results.sort((a, b) => a.depth - b.depth);
return results.map((r) => r.path);
}
/**
* Path to the primary DuckDB database file.
* Checks the workspace root first, then falls back to any workspace.duckdb
* discovered in subdirectories (backward compat with dench/ layout).
*/
export function duckdbPath(): string | null {
const root = resolveWorkspaceRoot();
if (!root) {return null;}
const dbPath = join(root, "workspace.duckdb");
return existsSync(dbPath) ? dbPath : null;
// Try root-level first (standard layout)
const rootDb = join(root, "workspace.duckdb");
if (existsSync(rootDb)) {return rootDb;}
// Fallback: discover the shallowest workspace.duckdb in subdirectories
const all = discoverDuckDBPaths(root);
return all.length > 0 ? all[0] : null;
}
/**
* Compute the workspace-relative directory that a DuckDB file is authoritative for.
* e.g. for `~/.openclaw/workspace/dench/workspace.duckdb` returns `"dench"`.
* For the root DB returns `""` (empty string).
*/
export function duckdbRelativeScope(dbPath: string): string {
const root = resolveWorkspaceRoot();
if (!root) {return "";}
const dir = resolve(dbPath, "..");
const rel = relative(root, dir);
return rel === "." ? "" : rel;
}
/**
@ -153,6 +220,133 @@ export async function duckdbQueryAsync<T = Record<string, unknown>>(
}
}
// ---------------------------------------------------------------------------
// Multi-DB query helpers — aggregate results from all discovered databases
// ---------------------------------------------------------------------------
/**
* Query ALL discovered workspace.duckdb files and merge results.
* Shallower databases are queried first; use `dedupeKey` to drop duplicates
* from deeper databases (shallower wins).
*/
export function duckdbQueryAll<T = Record<string, unknown>>(
sql: string,
dedupeKey?: keyof T,
): T[] {
const dbPaths = discoverDuckDBPaths();
if (dbPaths.length === 0) {return [];}
const bin = resolveDuckdbBin();
if (!bin) {return [];}
const seen = new Set<unknown>();
const merged: T[] = [];
for (const db of dbPaths) {
try {
const escapedSql = sql.replace(/'/g, "'\\''");
const result = execSync(`'${bin}' -json '${db}' '${escapedSql}'`, {
encoding: "utf-8",
timeout: 10_000,
maxBuffer: 10 * 1024 * 1024,
shell: "/bin/sh",
});
const trimmed = result.trim();
if (!trimmed || trimmed === "[]") {continue;}
const rows = JSON.parse(trimmed) as T[];
for (const row of rows) {
if (dedupeKey) {
const key = row[dedupeKey];
if (seen.has(key)) {continue;}
seen.add(key);
}
merged.push(row);
}
} catch {
// skip failing DBs
}
}
return merged;
}
/**
* Async version of duckdbQueryAll.
*/
export async function duckdbQueryAllAsync<T = Record<string, unknown>>(
sql: string,
dedupeKey?: keyof T,
): Promise<T[]> {
const dbPaths = discoverDuckDBPaths();
if (dbPaths.length === 0) {return [];}
const bin = resolveDuckdbBin();
if (!bin) {return [];}
const seen = new Set<unknown>();
const merged: T[] = [];
for (const db of dbPaths) {
try {
const escapedSql = sql.replace(/'/g, "'\\''");
const { stdout } = await execAsync(`'${bin}' -json '${db}' '${escapedSql}'`, {
encoding: "utf-8",
timeout: 10_000,
maxBuffer: 10 * 1024 * 1024,
shell: "/bin/sh",
});
const trimmed = stdout.trim();
if (!trimmed || trimmed === "[]") {continue;}
const rows = JSON.parse(trimmed) as T[];
for (const row of rows) {
if (dedupeKey) {
const key = row[dedupeKey];
if (seen.has(key)) {continue;}
seen.add(key);
}
merged.push(row);
}
} catch {
// skip failing DBs
}
}
return merged;
}
/**
* Find the DuckDB file that contains a specific object by name.
* Returns the absolute path to the database, or null if not found.
* Checks shallower databases first (parent takes priority).
*/
export function findDuckDBForObject(objectName: string): string | null {
const dbPaths = discoverDuckDBPaths();
if (dbPaths.length === 0) {return null;}
const bin = resolveDuckdbBin();
if (!bin) {return null;}
// Build the SQL then apply the same shell-escape as duckdbQuery:
// replace every ' with '\'' so the single-quoted shell arg stays valid.
const sql = `SELECT id FROM objects WHERE name = '${objectName.replace(/'/g, "''")}' LIMIT 1`;
const escapedSql = sql.replace(/'/g, "'\\''");
for (const db of dbPaths) {
try {
const result = execSync(
`'${bin}' -json '${db}' '${escapedSql}'`,
{ encoding: "utf-8", timeout: 5_000, maxBuffer: 1024 * 1024, shell: "/bin/sh" },
);
const trimmed = result.trim();
if (trimmed && trimmed !== "[]") {return db;}
} catch {
// continue to next DB
}
}
return null;
}
/**
* Execute a DuckDB statement (no JSON output expected).
* Used for INSERT/UPDATE/ALTER operations.
@ -160,13 +354,20 @@ export async function duckdbQueryAsync<T = Record<string, unknown>>(
export function duckdbExec(sql: string): boolean {
const db = duckdbPath();
if (!db) {return false;}
return duckdbExecOnFile(db, sql);
}
/**
* Execute a DuckDB statement against a specific database file (no JSON output).
* Used for INSERT/UPDATE/ALTER operations on a targeted DB.
*/
export function duckdbExecOnFile(dbFilePath: string, sql: string): boolean {
const bin = resolveDuckdbBin();
if (!bin) {return false;}
try {
const escapedSql = sql.replace(/'/g, "'\\''");
execSync(`'${bin}' '${db}' '${escapedSql}'`, {
execSync(`'${bin}' '${dbFilePath}' '${escapedSql}'`, {
encoding: "utf-8",
timeout: 10_000,
shell: "/bin/sh",
@ -343,18 +544,25 @@ export function parseSimpleYaml(
// --- System file protection ---
const SYSTEM_FILE_PATTERNS = [
/** Always protected regardless of depth. */
const ALWAYS_SYSTEM_PATTERNS = [
/^\.object\.yaml$/,
/^workspace\.duckdb/,
/^workspace_context\.yaml$/,
/\.wal$/,
/\.tmp$/,
];
/** Only protected at the workspace root (no "/" in the relative path). */
const ROOT_ONLY_SYSTEM_PATTERNS = [
/^workspace\.duckdb/,
/^workspace_context\.yaml$/,
];
/** Check if a workspace-relative path refers to a protected system file. */
export function isSystemFile(relativePath: string): boolean {
const base = relativePath.split("/").pop() ?? "";
return SYSTEM_FILE_PATTERNS.some((p) => p.test(base));
if (ALWAYS_SYSTEM_PATTERNS.some((p) => p.test(base))) {return true;}
const isRoot = !relativePath.includes("/");
return isRoot && ROOT_ONLY_SYSTEM_PATTERNS.some((p) => p.test(base));
}
/**

File diff suppressed because one or more lines are too long

View File

@ -7,7 +7,7 @@ metadata: { "openclaw": { "inject": true, "always": true, "emoji": "📊" } }
# CRM / Database in Workspace / Guide on handling any data
You manage a Dench workspace stored at `~/.openclaw/workspace`.
All structured data lives in **DuckDB** (`~/.openclaw/workspace/workspace.duckdb`). Documents are **markdown files** in `~/.openclaw/workspace/**`. Organization context is in `~/.openclaw/workspace/workspace_context.yaml` (READ-ONLY).
All structured data lives in **DuckDB**. The primary database is `~/.openclaw/workspace/workspace.duckdb`, but subdirectories may contain their own `workspace.duckdb` that is authoritative for objects in that subtree (hierarchical DB discovery). Shallower databases take priority when objects share the same name. Documents are **markdown files** in `~/.openclaw/workspace/**`. Organization context will be in `~/.openclaw/workspace/workspace_context.yaml` if an organisation exists (READ-ONLY).
All actions should look into / edit and work on `~/.openclaw/workspace/**` by default unless told otherwise. Exceptions to this are the `SOUL.md`, `skills/`, `memory/`, `USER.md`, `IDENTITY.md`, `TOOLS.md`, `AGENTS.md` and `MEMORY.md` and other such files.

View File

@ -228,42 +228,80 @@ async function agentViaGatewayStreamJson(opts: AgentCliOpts, _runtime: RuntimeEn
const channel = normalizeMessageChannel(opts.channel) ?? DEFAULT_CHAT_CHANNEL;
const idempotencyKey = opts.runId?.trim() || randomIdempotencyKey();
const response = await callGateway<GatewayAgentResponse>({
method: "agent",
params: {
message: body,
agentId,
to: opts.to,
replyTo: opts.replyTo,
sessionId: opts.sessionId,
sessionKey,
thinking: opts.thinking,
deliver: Boolean(opts.deliver),
channel,
replyChannel: opts.replyChannel,
replyAccountId: opts.replyAccount,
timeout: timeoutSeconds,
lane: opts.lane,
extraSystemPrompt: opts.extraSystemPrompt,
idempotencyKey,
},
expectFinal: true,
timeoutMs: gatewayTimeoutMs,
clientName: GATEWAY_CLIENT_NAMES.CLI,
mode: GATEWAY_CLIENT_MODES.CLI,
// Request tool-events capability so the gateway streams tool start/result
// events alongside assistant text, thinking, and lifecycle events.
caps: ["tool-events"],
onEvent: (evt) => {
// Emit each gateway event as an NDJSON line (chat deltas, agent tool/lifecycle events).
emitNdjsonLine({ event: evt.event, ...(evt.payload as Record<string, unknown>) });
},
});
// Capture the runId from early gateway events so we can abort the
// correct run when the process receives SIGTERM/SIGINT.
let capturedRunId: string | undefined;
const abortController = new AbortController();
const onSignal = () => {
if (!abortController.signal.aborted) {
abortController.abort();
}
};
process.on("SIGTERM", onSignal);
process.on("SIGINT", onSignal);
// Emit the final result as the last NDJSON line.
emitNdjsonLine({ event: "result", ...response });
try {
const response = await callGateway<GatewayAgentResponse>({
method: "agent",
params: {
message: body,
agentId,
to: opts.to,
replyTo: opts.replyTo,
sessionId: opts.sessionId,
sessionKey,
thinking: opts.thinking,
deliver: Boolean(opts.deliver),
channel,
replyChannel: opts.replyChannel,
replyAccountId: opts.replyAccount,
timeout: timeoutSeconds,
lane: opts.lane,
extraSystemPrompt: opts.extraSystemPrompt,
idempotencyKey,
},
expectFinal: true,
timeoutMs: gatewayTimeoutMs,
clientName: GATEWAY_CLIENT_NAMES.CLI,
mode: GATEWAY_CLIENT_MODES.CLI,
// Request tool-events capability so the gateway streams tool start/result
// events alongside assistant text, thinking, and lifecycle events.
caps: ["tool-events"],
signal: abortController.signal,
onEvent: (evt) => {
// Capture runId from the first event that carries one (lifecycle/accepted).
if (!capturedRunId) {
const payload = evt.payload as Record<string, unknown> | undefined;
if (payload?.runId) {
capturedRunId = String(payload.runId);
}
}
// Emit each gateway event as an NDJSON line (chat deltas, agent tool/lifecycle events).
emitNdjsonLine({ event: evt.event, ...(evt.payload as Record<string, unknown>) });
},
onAbort: async (client) => {
// Best-effort: tell the gateway to abort the agent run before we exit.
if (capturedRunId) {
await client.request("chat.abort", { sessionKey, runId: capturedRunId }).catch(() => {});
}
},
});
return response;
// Emit the final result as the last NDJSON line.
emitNdjsonLine({ event: "result", ...response });
return response;
} catch (err) {
// Re-throw everything except AbortError (expected on SIGTERM/SIGINT).
if (err instanceof DOMException && err.name === "AbortError") {
emitNdjsonLine({ event: "aborted", reason: "signal" });
return {} as GatewayAgentResponse;
}
throw err;
} finally {
process.removeListener("SIGTERM", onSignal);
process.removeListener("SIGINT", onSignal);
}
}
export async function agentCliCommand(opts: AgentCliOpts, runtime: RuntimeEnv, deps?: CliDeps) {

View File

@ -46,6 +46,14 @@ export type CallGatewayOptions = {
onEvent?: (evt: { event: string; payload?: unknown; seq?: number }) => void;
/** Client capabilities to advertise during the WebSocket handshake (e.g. "tool-events"). */
caps?: string[];
/** When aborted, triggers {@link onAbort} then rejects the call with an AbortError. */
signal?: AbortSignal;
/**
* Called with the live GatewayClient when {@link signal} fires.
* Use this to send a best-effort cleanup request (e.g. `chat.abort`)
* before the connection is torn down.
*/
onAbort?: (client: GatewayClient) => Promise<void>;
};
export type GatewayConnectionDetails = {
@ -248,7 +256,7 @@ export async function callGateway<T = Record<string, unknown>>(
return await new Promise<T>((resolve, reject) => {
let settled = false;
let ignoreClose = false;
const stop = (err?: Error, value?: T) => {
let stop = (err?: Error, value?: T) => {
if (settled) {
return;
}
@ -311,6 +319,39 @@ export async function callGateway<T = Record<string, unknown>>(
stop(new Error(formatTimeoutError()));
}, safeTimerTimeoutMs);
// Wire up external abort signal → best-effort onAbort callback, then tear down.
if (opts.signal) {
const handleAbort = () => {
if (settled) {
return;
}
const doAbort = async () => {
if (opts.onAbort) {
try {
await opts.onAbort(client);
} catch {
// best-effort; swallow errors
}
}
ignoreClose = true;
client.stop();
stop(new DOMException("The operation was aborted", "AbortError"));
};
void doAbort();
};
if (opts.signal.aborted) {
handleAbort();
} else {
opts.signal.addEventListener("abort", handleAbort, { once: true });
// Clean up the listener if the call settles normally before abort fires.
const origStop = stop;
stop = (err?: Error, value?: T) => {
opts.signal!.removeEventListener("abort", handleAbort);
origStop(err, value);
};
}
}
client.start();
});
}