Merge bddd6d823997b0e0d159d4e86ae1462c883c2484 into 9fb78453e088cd7b553d7779faa0de5c83708e70

This commit is contained in:
Varun Chopra 2026-03-20 22:20:40 -07:00 committed by GitHub
commit d26eea6f4b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 14 additions and 13 deletions

View File

@ -3,10 +3,10 @@ import type { OpenClawConfig } from "../config/config.js";
import { createOllamaEmbeddingProvider } from "./embeddings-ollama.js";
describe("embeddings-ollama", () => {
it("calls /api/embeddings and returns normalized vectors", async () => {
it("calls /api/embed and returns normalized vectors", async () => {
const fetchMock = vi.fn(
async () =>
new Response(JSON.stringify({ embedding: [3, 4] }), {
new Response(JSON.stringify({ embeddings: [[3, 4]] }), {
status: 200,
headers: { "content-type": "application/json" },
}),
@ -31,7 +31,7 @@ describe("embeddings-ollama", () => {
it("resolves baseUrl/apiKey/headers from models.providers.ollama and strips /v1", async () => {
const fetchMock = vi.fn(
async () =>
new Response(JSON.stringify({ embedding: [1, 0] }), {
new Response(JSON.stringify({ embeddings: [[1, 0]] }), {
status: 200,
headers: { "content-type": "application/json" },
}),
@ -60,7 +60,7 @@ describe("embeddings-ollama", () => {
await provider.embedQuery("hello");
expect(fetchMock).toHaveBeenCalledWith(
"http://127.0.0.1:11434/api/embeddings",
"http://127.0.0.1:11434/api/embed",
expect.objectContaining({
method: "POST",
headers: expect.objectContaining({
@ -90,7 +90,7 @@ describe("embeddings-ollama", () => {
it("falls back to env key when models.providers.ollama.apiKey is an unresolved SecretRef", async () => {
const fetchMock = vi.fn(
async () =>
new Response(JSON.stringify({ embedding: [1, 0] }), {
new Response(JSON.stringify({ embeddings: [[1, 0]] }), {
status: 200,
headers: { "content-type": "application/json" },
}),
@ -118,7 +118,7 @@ describe("embeddings-ollama", () => {
await provider.embedQuery("hello");
expect(fetchMock).toHaveBeenCalledWith(
"http://127.0.0.1:11434/api/embeddings",
"http://127.0.0.1:11434/api/embed",
expect.objectContaining({
headers: expect.objectContaining({
Authorization: "Bearer ollama-env",

View File

@ -73,7 +73,7 @@ export async function createOllamaEmbeddingProvider(
options: EmbeddingProviderOptions,
): Promise<{ provider: EmbeddingProvider; client: OllamaEmbeddingClient }> {
const client = resolveOllamaEmbeddingClient(options);
const embedUrl = `${client.baseUrl.replace(/\/$/, "")}/api/embeddings`;
const embedUrl = `${client.baseUrl.replace(/\/$/, "")}/api/embed`;
const embedOne = async (text: string): Promise<number[]> => {
const json = await withRemoteHttpResponse({
@ -82,19 +82,19 @@ export async function createOllamaEmbeddingProvider(
init: {
method: "POST",
headers: client.headers,
body: JSON.stringify({ model: client.model, prompt: text }),
body: JSON.stringify({ model: client.model, input: text }),
},
onResponse: async (res) => {
if (!res.ok) {
throw new Error(`Ollama embeddings HTTP ${res.status}: ${await res.text()}`);
}
return (await res.json()) as { embedding?: number[] };
return (await res.json()) as { embeddings?: number[][] };
},
});
if (!Array.isArray(json.embedding)) {
throw new Error(`Ollama embeddings response missing embedding[]`);
if (!Array.isArray(json.embeddings) || !Array.isArray(json.embeddings[0])) {
throw new Error(`Ollama embeddings response missing embeddings[]`);
}
return sanitizeAndNormalizeEmbedding(json.embedding);
return sanitizeAndNormalizeEmbedding(json.embeddings[0]);
};
const provider: EmbeddingProvider = {
@ -102,7 +102,8 @@ export async function createOllamaEmbeddingProvider(
model: client.model,
embedQuery: embedOne,
embedBatch: async (texts: string[]) => {
// Ollama /api/embeddings accepts one prompt per request.
// Ollama /api/embed supports batched input, but we fan-out here to
// keep error handling and response normalisation consistent per text.
return await Promise.all(texts.map(embedOne));
},
};