Merge pull request #1 from techfitmaster/fix/issue-39446

fix: add retry logic for file lock errors (EBUSY, EACCES, EPERM)
This commit is contained in:
Albert 2026-03-08 23:00:40 +08:00 committed by GitHub
commit e84266e520
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 245 additions and 83 deletions

View File

@ -219,6 +219,7 @@
## NPM + 1Password (publish/verify)
- Use the 1password skill; all `op` commands must run inside a fresh tmux session.
- Correct 1Password path for npm release auth: `op://Private/Npmjs` (use that item; OTP stays `op://Private/Npmjs/one-time password?attribute=otp`).
- Sign in: `eval "$(op signin --account my.1password.com)"` (app unlocked + integration on).
- OTP: `op read 'op://Private/Npmjs/one-time password?attribute=otp'`.
- Publish: `npm publish --access public --otp="<otp>"` (run from the package dir).

View File

@ -39,6 +39,7 @@ Docs: https://docs.openclaw.ai
### Fixes
- Models/MiniMax: stop advertising removed `MiniMax-M2.5-Lightning` in built-in provider catalogs, onboarding metadata, and docs; keep the supported fast-tier model as `MiniMax-M2.5-highspeed`.
- Security/Config: fail closed when `loadConfig()` hits validation or read errors so invalid configs cannot silently fall back to permissive runtime defaults. (#9040) Thanks @joetomasone.
- Memory/Hybrid search: preserve negative FTS5 BM25 relevance ordering in `bm25RankToScore()` so stronger keyword matches rank above weaker ones instead of collapsing or reversing scores. (#33757) Thanks @lsdcc01.
- LINE/`requireMention` group gating: align inbound and reply-stage LINE group policy resolution across raw, `group:`, and `room:` keys (including account-scoped group config), preserve plugin-backed reply-stage fallback behavior, and add regression coverage for prefixed-only group/room config plus reply-stage policy resolution. (#35847) Thanks @kirisame-wang.

View File

@ -118,6 +118,12 @@ COPY --from=build --chown=node:node /app/extensions ./extensions
COPY --from=build --chown=node:node /app/skills ./skills
COPY --from=build --chown=node:node /app/docs ./docs
# Docker live-test runners invoke `pnpm` inside the runtime image.
# Activate the exact pinned package manager now so the container does not
# rely on a first-run network fetch or missing shims under the non-root user.
RUN corepack enable && \
corepack prepare "$(node -p "require('./package.json').packageManager")" --activate
# Install additional system packages needed by your skills or extensions.
# Example: docker build --build-arg OPENCLAW_DOCKER_APT_PACKAGES="python3 wget" .
ARG OPENCLAW_DOCKER_APT_PACKAGES=""

View File

@ -2186,7 +2186,7 @@ Fix checklist:
2. Make sure MiniMax is configured (wizard or JSON), or that a MiniMax API key
exists in env/auth profiles so the provider can be injected.
3. Use the exact model id (case-sensitive): `minimax/MiniMax-M2.5` or
`minimax/MiniMax-M2.5-highspeed` (legacy: `minimax/MiniMax-M2.5-Lightning`).
`minimax/MiniMax-M2.5-highspeed`.
4. Run:
```bash

View File

@ -353,6 +353,10 @@ These run `pnpm test:live` inside the repo Docker image, mounting your local con
- Gateway networking (two containers, WS auth + health): `pnpm test:docker:gateway-network` (script: `scripts/e2e/gateway-network-docker.sh`)
- Plugins (custom extension load + registry smoke): `pnpm test:docker:plugins` (script: `scripts/e2e/plugins-docker.sh`)
The live-model Docker runners also bind-mount the current checkout read-only and
stage it into a temporary workdir inside the container. This keeps the runtime
image slim while still running Vitest against your exact local source/config.
Manual ACP plain-language thread smoke (not CI):
- `bun scripts/dev/discord-acp-plain-language-smoke.ts --channel <discord-channel-id> ...`

View File

@ -31,8 +31,7 @@ MiniMax highlights these improvements in M2.5:
- **Speed:** `MiniMax-M2.5-highspeed` is the official fast tier in MiniMax docs.
- **Cost:** MiniMax pricing lists the same input cost and a higher output cost for highspeed.
- **Compatibility:** OpenClaw still accepts legacy `MiniMax-M2.5-Lightning` configs, but prefer
`MiniMax-M2.5-highspeed` for new setup.
- **Current model IDs:** use `MiniMax-M2.5` or `MiniMax-M2.5-highspeed`.
## Choose a setup
@ -210,7 +209,6 @@ Make sure the model id is **casesensitive**:
- `minimax/MiniMax-M2.5`
- `minimax/MiniMax-M2.5-highspeed`
- `minimax/MiniMax-M2.5-Lightning` (legacy)
Then recheck with:

View File

@ -223,9 +223,9 @@
"android:run": "cd apps/android && ./gradlew :app:installDebug && adb shell am start -n ai.openclaw.app/.MainActivity",
"android:test": "cd apps/android && ./gradlew :app:testDebugUnitTest",
"android:test:integration": "OPENCLAW_LIVE_TEST=1 OPENCLAW_LIVE_ANDROID_NODE=1 vitest run --config vitest.live.config.ts src/gateway/android-node.capabilities.live.test.ts",
"build": "pnpm canvas:a2ui:bundle && tsdown && node scripts/copy-plugin-sdk-root-alias.mjs && pnpm build:plugin-sdk:dts && node --import tsx scripts/write-plugin-sdk-entry-dts.ts && node --import tsx scripts/canvas-a2ui-copy.ts && node --import tsx scripts/copy-hook-metadata.ts && node --import tsx scripts/copy-export-html-templates.ts && node --import tsx scripts/write-build-info.ts && node --import tsx scripts/write-cli-startup-metadata.ts && node --import tsx scripts/write-cli-compat.ts",
"build": "pnpm canvas:a2ui:bundle && node scripts/tsdown-build.mjs && node scripts/copy-plugin-sdk-root-alias.mjs && pnpm build:plugin-sdk:dts && node --import tsx scripts/write-plugin-sdk-entry-dts.ts && node --import tsx scripts/canvas-a2ui-copy.ts && node --import tsx scripts/copy-hook-metadata.ts && node --import tsx scripts/copy-export-html-templates.ts && node --import tsx scripts/write-build-info.ts && node --import tsx scripts/write-cli-startup-metadata.ts && node --import tsx scripts/write-cli-compat.ts",
"build:plugin-sdk:dts": "tsc -p tsconfig.plugin-sdk.dts.json",
"build:strict-smoke": "pnpm canvas:a2ui:bundle && tsdown && node scripts/copy-plugin-sdk-root-alias.mjs && pnpm build:plugin-sdk:dts",
"build:strict-smoke": "pnpm canvas:a2ui:bundle && node scripts/tsdown-build.mjs && node scripts/copy-plugin-sdk-root-alias.mjs && pnpm build:plugin-sdk:dts",
"canvas:a2ui:bundle": "bash scripts/bundle-a2ui.sh",
"check": "pnpm format:check && pnpm tsgo && pnpm lint && pnpm lint:tmp:no-random-messaging && pnpm lint:tmp:channel-agnostic-boundaries && pnpm lint:tmp:no-raw-channel-fetch && pnpm lint:agent:ingress-owner && pnpm lint:plugins:no-register-http-handler && pnpm lint:plugins:no-monolithic-plugin-sdk-entry-imports && pnpm lint:webhook:no-low-level-body-read && pnpm lint:auth:no-pairing-store-group && pnpm lint:auth:pairing-account-scope && pnpm check:host-env-policy:swift",
"check:docs": "pnpm format:docs:check && pnpm lint:docs && pnpm docs:check-links",

View File

@ -9,6 +9,7 @@ import { fileURLToPath } from "node:url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const projectRoot = path.resolve(__dirname, "..");
const verbose = process.env.OPENCLAW_BUILD_VERBOSE === "1";
const srcDir = path.join(projectRoot, "src", "auto-reply", "reply", "export-html");
const distDir = path.join(projectRoot, "dist", "export-html");
@ -26,12 +27,16 @@ function copyExportHtmlTemplates() {
// Copy main template files
const templateFiles = ["template.html", "template.css", "template.js"];
let copiedCount = 0;
for (const file of templateFiles) {
const srcFile = path.join(srcDir, file);
const distFile = path.join(distDir, file);
if (fs.existsSync(srcFile)) {
fs.copyFileSync(srcFile, distFile);
console.log(`[copy-export-html-templates] Copied ${file}`);
copiedCount += 1;
if (verbose) {
console.log(`[copy-export-html-templates] Copied ${file}`);
}
}
}
@ -48,12 +53,15 @@ function copyExportHtmlTemplates() {
const distFile = path.join(distVendor, file);
if (fs.statSync(srcFile).isFile()) {
fs.copyFileSync(srcFile, distFile);
console.log(`[copy-export-html-templates] Copied vendor/${file}`);
copiedCount += 1;
if (verbose) {
console.log(`[copy-export-html-templates] Copied vendor/${file}`);
}
}
}
}
console.log("[copy-export-html-templates] Done");
console.log(`[copy-export-html-templates] Copied ${copiedCount} export-html assets.`);
}
copyExportHtmlTemplates();

View File

@ -9,6 +9,7 @@ import { fileURLToPath } from "node:url";
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const projectRoot = path.resolve(__dirname, "..");
const verbose = process.env.OPENCLAW_BUILD_VERBOSE === "1";
const srcBundled = path.join(projectRoot, "src", "hooks", "bundled");
const distBundled = path.join(projectRoot, "dist", "bundled");
@ -24,6 +25,7 @@ function copyHookMetadata() {
}
const entries = fs.readdirSync(srcBundled, { withFileTypes: true });
let copiedCount = 0;
for (const entry of entries) {
if (!entry.isDirectory()) {
@ -46,10 +48,13 @@ function copyHookMetadata() {
}
fs.copyFileSync(srcHookMd, distHookMd);
console.log(`[copy-hook-metadata] Copied ${hookName}/HOOK.md`);
copiedCount += 1;
if (verbose) {
console.log(`[copy-hook-metadata] Copied ${hookName}/HOOK.md`);
}
}
console.log("[copy-hook-metadata] Done");
console.log(`[copy-hook-metadata] Copied ${copiedCount} hook metadata files.`);
}
copyHookMetadata();

View File

@ -12,6 +12,27 @@ if [[ -f "$PROFILE_FILE" ]]; then
PROFILE_MOUNT=(-v "$PROFILE_FILE":/home/node/.profile:ro)
fi
read -r -d '' LIVE_TEST_CMD <<'EOF' || true
set -euo pipefail
[ -f "$HOME/.profile" ] && source "$HOME/.profile" || true
tmp_dir="$(mktemp -d)"
cleanup() {
rm -rf "$tmp_dir"
}
trap cleanup EXIT
tar -C /src \
--exclude=.git \
--exclude=node_modules \
--exclude=dist \
--exclude=ui/dist \
--exclude=ui/node_modules \
-cf - . | tar -C "$tmp_dir" -xf -
ln -s /app/node_modules "$tmp_dir/node_modules"
ln -s /app/dist "$tmp_dir/dist"
cd "$tmp_dir"
pnpm test:live
EOF
echo "==> Build image: $IMAGE_NAME"
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/Dockerfile" "$ROOT_DIR"
@ -26,8 +47,9 @@ docker run --rm -t \
-e OPENCLAW_LIVE_GATEWAY_PROVIDERS="${OPENCLAW_LIVE_GATEWAY_PROVIDERS:-${CLAWDBOT_LIVE_GATEWAY_PROVIDERS:-}}" \
-e OPENCLAW_LIVE_GATEWAY_MAX_MODELS="${OPENCLAW_LIVE_GATEWAY_MAX_MODELS:-${CLAWDBOT_LIVE_GATEWAY_MAX_MODELS:-24}}" \
-e OPENCLAW_LIVE_GATEWAY_MODEL_TIMEOUT_MS="${OPENCLAW_LIVE_GATEWAY_MODEL_TIMEOUT_MS:-${CLAWDBOT_LIVE_GATEWAY_MODEL_TIMEOUT_MS:-}}" \
-v "$ROOT_DIR":/src:ro \
-v "$CONFIG_DIR":/home/node/.openclaw \
-v "$WORKSPACE_DIR":/home/node/.openclaw/workspace \
"${PROFILE_MOUNT[@]}" \
"$IMAGE_NAME" \
-lc "set -euo pipefail; [ -f \"$HOME/.profile\" ] && source \"$HOME/.profile\" || true; cd /app && pnpm test:live"
-lc "$LIVE_TEST_CMD"

View File

@ -12,6 +12,27 @@ if [[ -f "$PROFILE_FILE" ]]; then
PROFILE_MOUNT=(-v "$PROFILE_FILE":/home/node/.profile:ro)
fi
read -r -d '' LIVE_TEST_CMD <<'EOF' || true
set -euo pipefail
[ -f "$HOME/.profile" ] && source "$HOME/.profile" || true
tmp_dir="$(mktemp -d)"
cleanup() {
rm -rf "$tmp_dir"
}
trap cleanup EXIT
tar -C /src \
--exclude=.git \
--exclude=node_modules \
--exclude=dist \
--exclude=ui/dist \
--exclude=ui/node_modules \
-cf - . | tar -C "$tmp_dir" -xf -
ln -s /app/node_modules "$tmp_dir/node_modules"
ln -s /app/dist "$tmp_dir/dist"
cd "$tmp_dir"
pnpm test:live
EOF
echo "==> Build image: $IMAGE_NAME"
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/Dockerfile" "$ROOT_DIR"
@ -27,8 +48,9 @@ docker run --rm -t \
-e OPENCLAW_LIVE_MAX_MODELS="${OPENCLAW_LIVE_MAX_MODELS:-${CLAWDBOT_LIVE_MAX_MODELS:-48}}" \
-e OPENCLAW_LIVE_MODEL_TIMEOUT_MS="${OPENCLAW_LIVE_MODEL_TIMEOUT_MS:-${CLAWDBOT_LIVE_MODEL_TIMEOUT_MS:-}}" \
-e OPENCLAW_LIVE_REQUIRE_PROFILE_KEYS="${OPENCLAW_LIVE_REQUIRE_PROFILE_KEYS:-${CLAWDBOT_LIVE_REQUIRE_PROFILE_KEYS:-}}" \
-v "$ROOT_DIR":/src:ro \
-v "$CONFIG_DIR":/home/node/.openclaw \
-v "$WORKSPACE_DIR":/home/node/.openclaw/workspace \
"${PROFILE_MOUNT[@]}" \
"$IMAGE_NAME" \
-lc "set -euo pipefail; [ -f \"$HOME/.profile\" ] && source \"$HOME/.profile\" || true; cd /app && pnpm test:live"
-lc "$LIVE_TEST_CMD"

19
scripts/tsdown-build.mjs Normal file
View File

@ -0,0 +1,19 @@
#!/usr/bin/env node
import { spawnSync } from "node:child_process";
const logLevel = process.env.OPENCLAW_BUILD_VERBOSE ? "info" : "warn";
const result = spawnSync(
"pnpm",
["exec", "tsdown", "--config-loader", "unrun", "--logLevel", logLevel],
{
stdio: "inherit",
shell: process.platform === "win32",
},
);
if (typeof result.status === "number") {
process.exit(result.status);
}
process.exit(1);

View File

@ -0,0 +1,49 @@
import { mkdtempSync } from "node:fs";
import { writeFile } from "node:fs/promises";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { describe, expect, it } from "vitest";
import { resolveImplicitProviders } from "./models-config.providers.js";
describe("minimax provider catalog", () => {
it("does not advertise the removed lightning model for api-key or oauth providers", async () => {
const agentDir = mkdtempSync(join(tmpdir(), "openclaw-test-"));
await writeFile(
join(agentDir, "auth-profiles.json"),
JSON.stringify(
{
version: 1,
profiles: {
"minimax:default": {
type: "api_key",
provider: "minimax",
key: "sk-minimax-test", // pragma: allowlist secret
},
"minimax-portal:default": {
type: "oauth",
provider: "minimax-portal",
access: "access-token",
refresh: "refresh-token",
expires: Date.now() + 60_000,
},
},
},
null,
2,
),
"utf8",
);
const providers = await resolveImplicitProviders({ agentDir });
expect(providers?.minimax?.models?.map((model) => model.id)).toEqual([
"MiniMax-VL-01",
"MiniMax-M2.5",
"MiniMax-M2.5-highspeed",
]);
expect(providers?.["minimax-portal"]?.models?.map((model) => model.id)).toEqual([
"MiniMax-VL-01",
"MiniMax-M2.5",
"MiniMax-M2.5-highspeed",
]);
});
});

View File

@ -765,11 +765,6 @@ function buildMinimaxProvider(): ProviderConfig {
name: "MiniMax M2.5 Highspeed",
reasoning: true,
}),
buildMinimaxTextModel({
id: "MiniMax-M2.5-Lightning",
name: "MiniMax M2.5 Lightning",
reasoning: true,
}),
],
};
}
@ -796,11 +791,6 @@ function buildMinimaxPortalProvider(): ProviderConfig {
name: "MiniMax M2.5 Highspeed",
reasoning: true,
}),
buildMinimaxTextModel({
id: "MiniMax-M2.5-Lightning",
name: "MiniMax M2.5 Lightning",
reasoning: true,
}),
],
};
}

View File

@ -123,7 +123,7 @@ describe("directive behavior", () => {
workspace: path.join(home, "openclaw"),
models: {
"minimax/MiniMax-M2.5": {},
"minimax/MiniMax-M2.5-Lightning": {},
"minimax/MiniMax-M2.5-highspeed": {},
"lmstudio/minimax-m2.5-gs32": {},
},
},
@ -157,7 +157,7 @@ describe("directive behavior", () => {
workspace: path.join(home, "openclaw"),
models: {
"minimax/MiniMax-M2.5": {},
"minimax/MiniMax-M2.5-Lightning": {},
"minimax/MiniMax-M2.5-highspeed": {},
},
},
},
@ -170,7 +170,7 @@ describe("directive behavior", () => {
api: "anthropic-messages",
models: [
makeModelDefinition("MiniMax-M2.5", "MiniMax M2.5"),
makeModelDefinition("MiniMax-M2.5-Lightning", "MiniMax M2.5 Lightning"),
makeModelDefinition("MiniMax-M2.5-highspeed", "MiniMax M2.5 Highspeed"),
],
},
},

View File

@ -295,7 +295,7 @@ const BASE_AUTH_CHOICE_OPTIONS: ReadonlyArray<AuthChoiceOption> = [
{
value: "minimax-api-lightning",
label: "MiniMax M2.5 Highspeed",
hint: "Official fast tier (legacy: Lightning)",
hint: "Official fast tier",
},
{ value: "custom-api-key", label: "Custom Provider" },
];

View File

@ -91,7 +91,6 @@ export const ZAI_DEFAULT_COST = {
const MINIMAX_MODEL_CATALOG = {
"MiniMax-M2.5": { name: "MiniMax M2.5", reasoning: true },
"MiniMax-M2.5-highspeed": { name: "MiniMax M2.5 Highspeed", reasoning: true },
"MiniMax-M2.5-Lightning": { name: "MiniMax M2.5 Lightning", reasoning: true },
} as const;
type MinimaxCatalogId = keyof typeof MINIMAX_MODEL_CATALOG;

View File

@ -2,6 +2,10 @@ import { randomUUID } from "node:crypto";
import fs from "node:fs/promises";
import path from "node:path";
const FILE_LOCK_ERRORS = new Set(["EBUSY", "EACCES", "EPERM"]);
const MAX_RETRIES = 3;
const RETRY_DELAY_MS = 100;
export async function readJsonFile<T>(filePath: string): Promise<T | null> {
try {
const raw = await fs.readFile(filePath, "utf8");
@ -36,24 +40,44 @@ export async function writeTextAtomic(
if (typeof options?.ensureDirMode === "number") {
mkdirOptions.mode = options.ensureDirMode;
}
await fs.mkdir(path.dirname(filePath), mkdirOptions);
const tmp = `${filePath}.${randomUUID()}.tmp`;
try {
await fs.writeFile(tmp, payload, "utf8");
const attemptWrite = async (): Promise<void> => {
await fs.mkdir(path.dirname(filePath), mkdirOptions);
const tmp = `${filePath}.${randomUUID()}.tmp`;
try {
await fs.chmod(tmp, mode);
} catch {
// best-effort; ignore on platforms without chmod
await fs.writeFile(tmp, payload, "utf8");
try {
await fs.chmod(tmp, mode);
} catch {
// best-effort; ignore on platforms without chmod
}
await fs.rename(tmp, filePath);
try {
await fs.chmod(filePath, mode);
} catch {
// best-effort; ignore on platforms without chmod
}
} finally {
await fs.rm(tmp, { force: true }).catch(() => undefined);
}
await fs.rename(tmp, filePath);
};
let lastError: Error | undefined;
for (let attempt = 0; attempt < MAX_RETRIES; attempt++) {
try {
await fs.chmod(filePath, mode);
} catch {
// best-effort; ignore on platforms without chmod
await attemptWrite();
return;
} catch (err) {
lastError = err as Error;
const errWithCode = err as { code?: string };
if (attempt < MAX_RETRIES - 1 && FILE_LOCK_ERRORS.has(errWithCode.code ?? "")) {
await new Promise((resolve) => setTimeout(resolve, RETRY_DELAY_MS * (attempt + 1)));
continue;
}
throw err;
}
} finally {
await fs.rm(tmp, { force: true }).catch(() => undefined);
}
throw lastError;
}
export function createAsyncLock() {

View File

@ -4,6 +4,42 @@ const env = {
NODE_ENV: "production",
};
function buildInputOptions(options: { onLog?: unknown; [key: string]: unknown }) {
if (process.env.OPENCLAW_BUILD_VERBOSE === "1") {
return undefined;
}
const previousOnLog = typeof options.onLog === "function" ? options.onLog : undefined;
return {
...options,
onLog(
level: string,
log: { code?: string },
defaultHandler: (level: string, log: { code?: string }) => void,
) {
if (log.code === "PLUGIN_TIMINGS") {
return;
}
if (typeof previousOnLog === "function") {
previousOnLog(level, log, defaultHandler);
return;
}
defaultHandler(level, log);
},
};
}
function nodeBuildConfig(config: Record<string, unknown>) {
return {
...config,
env,
fixedExtension: false,
platform: "node",
inputOptions: buildInputOptions,
};
}
const pluginSdkEntrypoints = [
"index",
"core",
@ -52,32 +88,20 @@ const pluginSdkEntrypoints = [
] as const;
export default defineConfig([
{
nodeBuildConfig({
entry: "src/index.ts",
env,
fixedExtension: false,
platform: "node",
},
{
}),
nodeBuildConfig({
entry: "src/entry.ts",
env,
fixedExtension: false,
platform: "node",
},
{
}),
nodeBuildConfig({
// Ensure this module is bundled as an entry so legacy CLI shims can resolve its exports.
entry: "src/cli/daemon-cli.ts",
env,
fixedExtension: false,
platform: "node",
},
{
}),
nodeBuildConfig({
entry: "src/infra/warning-filter.ts",
env,
fixedExtension: false,
platform: "node",
},
{
}),
nodeBuildConfig({
// Keep sync lazy-runtime channel modules as concrete dist files.
entry: {
"channels/plugins/agent-tools/whatsapp-login":
@ -91,27 +115,17 @@ export default defineConfig([
"line/send": "src/line/send.ts",
"line/template-messages": "src/line/template-messages.ts",
},
env,
fixedExtension: false,
platform: "node",
},
...pluginSdkEntrypoints.map((entry) => ({
entry: `src/plugin-sdk/${entry}.ts`,
outDir: "dist/plugin-sdk",
env,
fixedExtension: false,
platform: "node" as const,
})),
{
}),
...pluginSdkEntrypoints.map((entry) =>
nodeBuildConfig({
entry: `src/plugin-sdk/${entry}.ts`,
outDir: "dist/plugin-sdk",
}),
),
nodeBuildConfig({
entry: "src/extensionAPI.ts",
env,
fixedExtension: false,
platform: "node",
},
{
}),
nodeBuildConfig({
entry: ["src/hooks/bundled/*/handler.ts", "src/hooks/llm-slug-generator.ts"],
env,
fixedExtension: false,
platform: "node",
},
}),
]);