Merge 892d9605f05f4811af39190b8a3c21a3aade32b8 into 598f1826d8b2bc969aace2c6459824737667218c

This commit is contained in:
Boqian 2026-03-21 05:33:17 +01:00 committed by GitHub
commit 2602c30f4e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 416 additions and 0 deletions

4
.github/labeler.yml vendored
View File

@ -269,6 +269,10 @@
- changed-files:
- any-glob-to-any-file:
- "extensions/minimax/**"
"extensions: meganova":
- changed-files:
- any-glob-to-any-file:
- "extensions/meganova/**"
"extensions: modelstudio":
- changed-files:
- any-glob-to-any-file:

View File

@ -1152,6 +1152,7 @@
"providers/openai",
"providers/opencode-go",
"providers/opencode",
"providers/meganova",
"providers/openrouter",
"providers/perplexity-provider",
"providers/qianfan",

View File

@ -35,6 +35,7 @@ Looking for chat channel docs (WhatsApp/Telegram/Discord/Slack/Mattermost (plugi
- [Hugging Face (Inference)](/providers/huggingface)
- [Kilocode](/providers/kilocode)
- [LiteLLM (unified gateway)](/providers/litellm)
- [MegaNova AI](/providers/meganova)
- [MiniMax](/providers/minimax)
- [Mistral](/providers/mistral)
- [Model Studio (Alibaba Cloud)](/providers/modelstudio)

View File

@ -0,0 +1,70 @@
---
summary: "MegaNova AI setup (auth + model selection)"
read_when:
- You want to use MegaNova AI with OpenClaw
- You need the API key env var or CLI auth choice
---
# MegaNova AI
[MegaNova AI](https://meganova.ai) provides access to 15+ open-source and reasoning
models through an OpenAI-compatible API, including reasoning models (GLM-5,
DeepSeek-R1, Kimi-K2) and open-source models (DeepSeek, Llama, Qwen).
- Provider: `meganova`
- Auth: `MEGANOVA_API_KEY`
- API: OpenAI-compatible
## Quick start
1. Set the API key (recommended: store it for the Gateway):
```bash
openclaw onboard --auth-choice meganova-api-key
```
2. Set a default model:
```json5
{
agents: {
defaults: {
model: { primary: "meganova/zai-org/GLM-5" },
},
},
}
```
## Non-interactive example
```bash
openclaw onboard --non-interactive \
--mode local \
--auth-choice meganova-api-key \
--meganova-api-key "$MEGANOVA_API_KEY"
```
This will set `meganova/zai-org/GLM-5` as the default model.
## Environment note
If the Gateway runs as a daemon (launchd/systemd), make sure `MEGANOVA_API_KEY`
is available to that process (for example, in `~/.clawdbot/.env` or via
`env.shellEnv`).
## Available models
MegaNova provides access to open-source and reasoning models:
- **GLM 5** (default) - reasoning model, 202K context
- **GLM 4.7 / 4.6** - reasoning models
- **DeepSeek R1-0528 / V3.1** - reasoning models
- **DeepSeek V3.2 / V3-0324** - open-source coding models
- **Kimi K2 Thinking / K2.5** - Moonshot reasoning models
- **Qwen3 235B** - Alibaba multilingual model
- **Llama 3.3 70B Instruct** - Meta open-source model
- **MiniMax M2.1 / M2.5** - MiniMax models
- **MiMo V2 Flash** - Xiaomi model
- **Mistral Nemo Instruct** - Mistral open-source model
All models support standard chat completions and are OpenAI API compatible.

View File

@ -0,0 +1,58 @@
import { definePluginEntry } from "openclaw/plugin-sdk/core";
import { createProviderApiKeyAuthMethod } from "openclaw/plugin-sdk/provider-auth";
import { buildSingleProviderApiKeyCatalog } from "openclaw/plugin-sdk/provider-catalog";
import { applyMeganovaConfig, MEGANOVA_DEFAULT_MODEL_REF } from "./onboard.js";
import { buildMeganovaProvider } from "./provider-catalog.js";
const PROVIDER_ID = "meganova";
export default definePluginEntry({
id: PROVIDER_ID,
name: "MegaNova Provider",
description: "Bundled MegaNova AI provider plugin",
register(api) {
api.registerProvider({
id: PROVIDER_ID,
label: "MegaNova AI",
docsPath: "/providers/meganova",
envVars: ["MEGANOVA_API_KEY"],
auth: [
createProviderApiKeyAuthMethod({
providerId: PROVIDER_ID,
methodId: "api-key",
label: "MegaNova AI API key",
hint: "API key (30+ models)",
optionKey: "meganovaApiKey",
flagName: "--meganova-api-key",
envVar: "MEGANOVA_API_KEY",
promptMessage: "Enter MegaNova API key",
defaultModel: MEGANOVA_DEFAULT_MODEL_REF,
expectedProviders: ["meganova"],
applyConfig: (cfg) => applyMeganovaConfig(cfg),
noteMessage: [
"MegaNova AI provides access to 30+ leading models through an OpenAI-compatible API,",
"including Claude, GPT-5, Gemini, DeepSeek, Llama, Qwen, and more.",
"Get your API key at: https://meganova.ai",
].join("\n"),
noteTitle: "MegaNova AI",
wizard: {
choiceId: "meganova-api-key",
choiceLabel: "MegaNova AI API key",
groupId: "meganova",
groupLabel: "MegaNova AI",
groupHint: "API key (30+ models)",
},
}),
],
catalog: {
order: "simple",
run: (ctx) =>
buildSingleProviderApiKeyCatalog({
ctx,
providerId: PROVIDER_ID,
buildProvider: buildMeganovaProvider,
}),
},
});
},
});

View File

@ -0,0 +1,32 @@
import {
buildMeganovaModelDefinition,
MEGANOVA_BASE_URL,
MEGANOVA_DEFAULT_MODEL_REF,
MEGANOVA_MODEL_CATALOG,
} from "openclaw/plugin-sdk/provider-models";
import {
applyAgentDefaultModelPrimary,
applyProviderConfigWithModelCatalog,
type OpenClawConfig,
} from "openclaw/plugin-sdk/provider-onboard";
export { MEGANOVA_DEFAULT_MODEL_REF };
export function applyMeganovaProviderConfig(cfg: OpenClawConfig): OpenClawConfig {
const models = { ...cfg.agents?.defaults?.models };
return applyProviderConfigWithModelCatalog(cfg, {
agentModels: models,
providerId: "meganova",
api: "openai-completions",
baseUrl: MEGANOVA_BASE_URL,
catalogModels: MEGANOVA_MODEL_CATALOG.map(buildMeganovaModelDefinition),
});
}
export function applyMeganovaConfig(cfg: OpenClawConfig): OpenClawConfig {
return applyAgentDefaultModelPrimary(
applyMeganovaProviderConfig(cfg),
MEGANOVA_DEFAULT_MODEL_REF,
);
}

View File

@ -0,0 +1,27 @@
{
"id": "meganova",
"providers": ["meganova"],
"providerAuthEnvVars": {
"meganova": ["MEGANOVA_API_KEY"]
},
"providerAuthChoices": [
{
"provider": "meganova",
"method": "api-key",
"choiceId": "meganova-api-key",
"choiceLabel": "MegaNova AI API key",
"groupId": "meganova",
"groupLabel": "MegaNova AI",
"groupHint": "API key (30+ models)",
"optionKey": "meganovaApiKey",
"cliFlag": "--meganova-api-key",
"cliOption": "--meganova-api-key <key>",
"cliDescription": "MegaNova API key"
}
],
"configSchema": {
"type": "object",
"additionalProperties": false,
"properties": {}
}
}

View File

@ -0,0 +1,12 @@
{
"name": "@openclaw/meganova-provider",
"version": "2026.3.14",
"private": true,
"description": "OpenClaw MegaNova AI provider plugin",
"type": "module",
"openclaw": {
"extensions": [
"./index.ts"
]
}
}

View File

@ -0,0 +1,14 @@
import {
buildMeganovaModelDefinition,
MEGANOVA_BASE_URL,
MEGANOVA_MODEL_CATALOG,
type ModelProviderConfig,
} from "openclaw/plugin-sdk/provider-models";
export async function buildMeganovaProvider(): Promise<ModelProviderConfig> {
return {
baseUrl: MEGANOVA_BASE_URL,
api: "openai-completions",
models: MEGANOVA_MODEL_CATALOG.map(buildMeganovaModelDefinition),
};
}

2
pnpm-lock.yaml generated
View File

@ -426,6 +426,8 @@ importers:
specifier: ^4.3.6
version: 4.3.6
extensions/meganova: {}
extensions/memory-core:
dependencies:
openclaw:

View File

@ -0,0 +1,165 @@
import type { ModelDefinitionConfig } from "../config/types.models.js";
export const MEGANOVA_BASE_URL = "https://api.meganova.ai/v1";
const MEGANOVA_DEFAULT_MODEL_ID = "zai-org/GLM-5";
export const MEGANOVA_DEFAULT_MODEL_REF = `meganova/${MEGANOVA_DEFAULT_MODEL_ID}`;
export const MEGANOVA_MODEL_CATALOG: ModelDefinitionConfig[] = [
// --- Reasoning models ---
{
id: "zai-org/GLM-5",
name: "GLM 5",
reasoning: true,
input: ["text"],
contextWindow: 202752,
maxTokens: 32768,
cost: { input: 0.8, output: 2.56, cacheRead: 0.8, cacheWrite: 2.56 },
},
{
id: "zai-org/GLM-4.7",
name: "GLM 4.7",
reasoning: true,
input: ["text"],
contextWindow: 202752,
maxTokens: 8192,
cost: { input: 0.2, output: 0.8, cacheRead: 0.2, cacheWrite: 0.8 },
},
{
id: "zai-org/GLM-4.6",
name: "GLM 4.6",
reasoning: true,
input: ["text"],
contextWindow: 202752,
maxTokens: 8192,
cost: { input: 0.45, output: 1.9, cacheRead: 0.45, cacheWrite: 1.9 },
},
{
id: "deepseek-ai/DeepSeek-R1-0528",
name: "DeepSeek R1 0528",
reasoning: true,
input: ["text"],
contextWindow: 163840,
maxTokens: 32768,
cost: { input: 0.5, output: 2.15, cacheRead: 0.5, cacheWrite: 2.15 },
},
{
id: "deepseek-ai/DeepSeek-V3.1",
name: "DeepSeek V3.1",
reasoning: true,
input: ["text"],
contextWindow: 163840,
maxTokens: 8192,
cost: { input: 0.27, output: 1.0, cacheRead: 0.27, cacheWrite: 1.0 },
},
{
id: "moonshotai/Kimi-K2-Thinking",
name: "Kimi K2 Thinking",
reasoning: true,
input: ["text"],
contextWindow: 262144,
maxTokens: 32768,
cost: { input: 0.6, output: 2.6, cacheRead: 0.6, cacheWrite: 2.6 },
},
// --- Open-source / general models ---
{
id: "deepseek-ai/DeepSeek-V3.2",
name: "DeepSeek V3.2",
reasoning: false,
input: ["text"],
contextWindow: 163840,
maxTokens: 8192,
cost: { input: 0.26, output: 0.38, cacheRead: 0.26, cacheWrite: 0.38 },
},
{
id: "deepseek-ai/DeepSeek-V3-0324",
name: "DeepSeek V3 0324",
reasoning: false,
input: ["text"],
contextWindow: 163840,
maxTokens: 8192,
cost: { input: 0.25, output: 0.88, cacheRead: 0.25, cacheWrite: 0.88 },
},
{
id: "meta-llama/Llama-3.3-70B-Instruct",
name: "Llama 3.3 70B Instruct",
reasoning: false,
input: ["text"],
contextWindow: 131072,
maxTokens: 10000,
cost: { input: 0.1, output: 0.3, cacheRead: 0.1, cacheWrite: 0.3 },
},
{
id: "Qwen/Qwen3-235B-A22B-Instruct-2507",
name: "Qwen3 235B A22B Instruct",
reasoning: false,
input: ["text"],
contextWindow: 262144,
maxTokens: 32768,
cost: { input: 0.09, output: 0.57, cacheRead: 0.09, cacheWrite: 0.57 },
},
{
id: "moonshotai/Kimi-K2.5",
name: "Kimi K2.5",
reasoning: false,
input: ["text", "image"],
contextWindow: 262144,
maxTokens: 32768,
cost: { input: 0.45, output: 2.8, cacheRead: 0.45, cacheWrite: 2.8 },
},
{
id: "MiniMaxAI/MiniMax-M2.1",
name: "MiniMax M2.1",
reasoning: false,
input: ["text"],
contextWindow: 196608,
maxTokens: 8192,
cost: { input: 0.28, output: 1.2, cacheRead: 0.28, cacheWrite: 1.2 },
},
{
id: "MiniMaxAI/MiniMax-M2.5",
name: "MiniMax M2.5",
reasoning: false,
input: ["text"],
contextWindow: 204800,
maxTokens: 8192,
cost: { input: 0.3, output: 1.2, cacheRead: 0.3, cacheWrite: 1.2 },
},
{
id: "XiaomiMiMo/MiMo-V2-Flash",
name: "MiMo V2 Flash",
reasoning: false,
input: ["text"],
contextWindow: 262144,
maxTokens: 8192,
cost: { input: 0.1, output: 0.3, cacheRead: 0.1, cacheWrite: 0.3 },
},
{
id: "mistralai/Mistral-Nemo-Instruct-2407",
name: "Mistral Nemo Instruct",
reasoning: false,
input: ["text"],
contextWindow: 131072,
maxTokens: 8192,
cost: { input: 0.02, output: 0.04, cacheRead: 0.02, cacheWrite: 0.04 },
},
];
export function buildMeganovaModelDefinition(
model: (typeof MEGANOVA_MODEL_CATALOG)[number],
): ModelDefinitionConfig {
return {
id: model.id,
name: model.name,
api: "openai-completions",
reasoning: model.reasoning,
input: model.input,
cost: model.cost,
contextWindow: model.contextWindow,
maxTokens: model.maxTokens,
compat: {
supportsReasoningEffort: false,
},
};
}

View File

@ -19,6 +19,7 @@ import { SGLANG_DEFAULT_BASE_URL, SGLANG_PROVIDER_LABEL } from "./sglang-default
import { VLLM_DEFAULT_BASE_URL, VLLM_PROVIDER_LABEL } from "./vllm-defaults.js";
export { buildHuggingfaceProvider } from "../../extensions/huggingface/provider-catalog.js";
export { buildKilocodeProviderWithDiscovery } from "../../extensions/kilocode/provider-catalog.js";
export { buildMeganovaProvider } from "../../extensions/meganova/provider-catalog.js";
export { buildVeniceProvider } from "../../extensions/venice/provider-catalog.js";
export { buildVercelAiGatewayProvider } from "../../extensions/vercel-ai-gateway/provider-catalog.js";

View File

@ -7,6 +7,7 @@ import cloudflareAiGatewayPlugin from "../../extensions/cloudflare-ai-gateway/in
import googlePlugin from "../../extensions/google/index.js";
import huggingfacePlugin from "../../extensions/huggingface/index.js";
import kimiCodingPlugin from "../../extensions/kimi-coding/index.js";
import meganovaPlugin from "../../extensions/meganova/index.js";
import minimaxPlugin from "../../extensions/minimax/index.js";
import mistralPlugin from "../../extensions/mistral/index.js";
import moonshotPlugin from "../../extensions/moonshot/index.js";
@ -90,6 +91,7 @@ function createDefaultProviderPlugins() {
googlePlugin,
huggingfacePlugin,
kimiCodingPlugin,
meganovaPlugin,
minimaxPlugin,
mistralPlugin,
moonshotPlugin,

View File

@ -691,6 +691,23 @@ describe("onboard (non-interactive): provider auth", () => {
});
});
it("infers MegaNova auth choice from --meganova-api-key and sets default model", async () => {
await withOnboardEnv("openclaw-onboard-meganova-infer-", async (env) => {
const cfg = await runOnboardingAndReadConfig(env, {
meganovaApiKey: "meganova-test-key",
});
expect(cfg.auth?.profiles?.["meganova:default"]?.provider).toBe("meganova");
expect(cfg.auth?.profiles?.["meganova:default"]?.mode).toBe("api_key");
expect(cfg.agents?.defaults?.model?.primary).toBe("meganova/zai-org/GLM-5");
await expectApiKeyProfile({
profileId: "meganova:default",
provider: "meganova",
key: "meganova-test-key",
});
});
}, 60_000);
it("infers QIANFAN auth choice from --qianfan-api-key and sets default model", async () => {
await withOnboardEnv("openclaw-onboard-qianfan-infer-", async (env) => {
const cfg = await runOnboardingAndReadConfig(env, {

View File

@ -47,6 +47,7 @@ export type BuiltInAuthChoice =
| "mistral-api-key"
| "volcengine-api-key"
| "byteplus-api-key"
| "meganova-api-key"
| "qianfan-api-key"
| "modelstudio-api-key-cn"
| "modelstudio-api-key"
@ -75,6 +76,7 @@ export type BuiltInAuthChoiceGroupId =
| "mistral"
| "qwen"
| "together"
| "meganova"
| "huggingface"
| "qianfan"
| "modelstudio"
@ -133,6 +135,7 @@ export type OnboardOptions = {
syntheticApiKey?: string;
veniceApiKey?: string;
togetherApiKey?: string;
meganovaApiKey?: string;
huggingfaceApiKey?: string;
opencodeZenApiKey?: string;
opencodeGoApiKey?: string;

View File

@ -64,6 +64,12 @@ export {
SYNTHETIC_DEFAULT_MODEL_REF,
SYNTHETIC_MODEL_CATALOG,
} from "../agents/synthetic-models.js";
export {
buildMeganovaModelDefinition,
MEGANOVA_BASE_URL,
MEGANOVA_DEFAULT_MODEL_REF,
MEGANOVA_MODEL_CATALOG,
} from "../agents/meganova-models.js";
export {
buildTogetherModelDefinition,
TOGETHER_BASE_URL,

View File

@ -14,6 +14,7 @@ export const BUNDLED_PROVIDER_AUTH_ENV_VAR_CANDIDATES = {
kilocode: ["KILOCODE_API_KEY"],
kimi: ["KIMI_API_KEY", "KIMICODE_API_KEY"],
"kimi-coding": ["KIMI_API_KEY", "KIMICODE_API_KEY"],
meganova: ["MEGANOVA_API_KEY"],
minimax: ["MINIMAX_API_KEY"],
"minimax-portal": ["MINIMAX_OAUTH_TOKEN", "MINIMAX_API_KEY"],
mistral: ["MISTRAL_API_KEY"],