fix(memory): split embedding provider types

This commit is contained in:
Vincent Koc
2026-04-09 08:32:32 +01:00
parent c1969ebf2a
commit 77e0e3bac5
9 changed files with 83 additions and 64 deletions

View File

@@ -1,7 +1,7 @@
import { normalizeLowercaseStringOrEmpty } from "../../shared/string-coerce.js";
import { sanitizeAndNormalizeEmbedding } from "./embedding-vectors.js";
import { debugEmbeddingsLog } from "./embeddings-debug.js";
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.js";
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.types.js";
// ---------------------------------------------------------------------------
// Types & constants

View File

@@ -13,7 +13,11 @@ import { normalizeOptionalString } from "../../shared/string-coerce.js";
import type { EmbeddingInput } from "./embedding-inputs.js";
import { sanitizeAndNormalizeEmbedding } from "./embedding-vectors.js";
import { debugEmbeddingsLog } from "./embeddings-debug.js";
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.js";
import type {
EmbeddingProvider,
EmbeddingProviderOptions,
GeminiTaskType,
} from "./embeddings.types.js";
import { buildRemoteBaseUrlPolicy, withRemoteHttpResponse } from "./remote-http.js";
import { resolveMemorySecretInputString } from "./secret-input.js";
@@ -42,14 +46,7 @@ export const GEMINI_EMBEDDING_2_MODELS = new Set([
const GEMINI_EMBEDDING_2_DEFAULT_DIMENSIONS = 3072;
const GEMINI_EMBEDDING_2_VALID_DIMENSIONS = [768, 1536, 3072] as const;
export type GeminiTaskType =
| "RETRIEVAL_QUERY"
| "RETRIEVAL_DOCUMENT"
| "SEMANTIC_SIMILARITY"
| "CLASSIFICATION"
| "CLUSTERING"
| "QUESTION_ANSWERING"
| "FACT_VERIFICATION";
export type { GeminiTaskType } from "./embeddings.types.js";
export type GeminiTextPart = { text: string };
export type GeminiInlinePart = {

View File

@@ -4,7 +4,7 @@ import {
createRemoteEmbeddingProvider,
resolveRemoteEmbeddingClient,
} from "./embeddings-remote-provider.js";
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.js";
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.types.js";
export type MistralEmbeddingClient = {
baseUrl: string;

View File

@@ -5,7 +5,7 @@ import {
createRemoteEmbeddingProvider,
resolveRemoteEmbeddingClient,
} from "./embeddings-remote-provider.js";
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.js";
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.types.js";
export type OpenAiEmbeddingClient = {
baseUrl: string;

View File

@@ -1,7 +1,7 @@
import { requireApiKey, resolveApiKeyForProvider } from "../../agents/model-auth.js";
import type { SsrFPolicy } from "../../infra/net/ssrf.js";
import { normalizeOptionalString } from "../../shared/string-coerce.js";
import type { EmbeddingProviderOptions } from "./embeddings.js";
import type { EmbeddingProviderOptions } from "./embeddings.types.js";
import { buildRemoteBaseUrlPolicy } from "./remote-http.js";
import { resolveMemorySecretInputString } from "./secret-input.js";

View File

@@ -4,7 +4,7 @@ import {
type RemoteEmbeddingProviderId,
} from "./embeddings-remote-client.js";
import { fetchRemoteEmbeddingVectors } from "./embeddings-remote-fetch.js";
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.js";
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.types.js";
export type RemoteEmbeddingClient = {
baseUrl: string;

View File

@@ -2,7 +2,7 @@ import type { SsrFPolicy } from "../../infra/net/ssrf.js";
import { normalizeEmbeddingModelWithPrefixes } from "./embeddings-model-normalize.js";
import { resolveRemoteEmbeddingBearerClient } from "./embeddings-remote-client.js";
import { fetchRemoteEmbeddingVectors } from "./embeddings-remote-fetch.js";
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.js";
import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.types.js";
export type VoyageEmbeddingClient = {
baseUrl: string;

View File

@@ -1,22 +1,15 @@
import fsSync from "node:fs";
import type { Llama, LlamaEmbeddingContext, LlamaModel } from "node-llama-cpp";
import type { OpenClawConfig } from "../../config/config.js";
import type { SecretInput } from "../../config/types.secrets.js";
import { formatErrorMessage } from "../../infra/errors.js";
import { normalizeOptionalString } from "../../shared/string-coerce.js";
import { resolveUserPath } from "../../utils.js";
import type { EmbeddingInput } from "./embedding-inputs.js";
import { sanitizeAndNormalizeEmbedding } from "./embedding-vectors.js";
import {
createBedrockEmbeddingProvider,
hasAwsCredentials,
type BedrockEmbeddingClient,
} from "./embeddings-bedrock.js";
import {
createGeminiEmbeddingProvider,
type GeminiEmbeddingClient,
type GeminiTaskType,
} from "./embeddings-gemini.js";
import { createGeminiEmbeddingProvider, type GeminiEmbeddingClient } from "./embeddings-gemini.js";
import {
createMistralEmbeddingProvider,
type MistralEmbeddingClient,
@@ -24,6 +17,14 @@ import {
import { createOllamaEmbeddingProvider, type OllamaEmbeddingClient } from "./embeddings-ollama.js";
import { createOpenAiEmbeddingProvider, type OpenAiEmbeddingClient } from "./embeddings-openai.js";
import { createVoyageEmbeddingProvider, type VoyageEmbeddingClient } from "./embeddings-voyage.js";
import type {
EmbeddingProvider,
EmbeddingProviderFallback,
EmbeddingProviderId,
EmbeddingProviderOptions,
EmbeddingProviderRequest,
GeminiTaskType,
} from "./embeddings.types.js";
import { importNodeLlamaCpp } from "./node-llama.js";
export type { GeminiEmbeddingClient } from "./embeddings-gemini.js";
@@ -32,26 +33,14 @@ export type { OpenAiEmbeddingClient } from "./embeddings-openai.js";
export type { VoyageEmbeddingClient } from "./embeddings-voyage.js";
export type { OllamaEmbeddingClient } from "./embeddings-ollama.js";
export type { BedrockEmbeddingClient } from "./embeddings-bedrock.js";
export type EmbeddingProvider = {
id: string;
model: string;
maxInputTokens?: number;
embedQuery: (text: string) => Promise<number[]>;
embedBatch: (texts: string[]) => Promise<number[][]>;
embedBatchInputs?: (inputs: EmbeddingInput[]) => Promise<number[][]>;
};
export type EmbeddingProviderId =
| "openai"
| "local"
| "gemini"
| "voyage"
| "mistral"
| "ollama"
| "bedrock";
export type EmbeddingProviderRequest = EmbeddingProviderId | "auto";
export type EmbeddingProviderFallback = EmbeddingProviderId | "none";
export type {
EmbeddingProvider,
EmbeddingProviderFallback,
EmbeddingProviderId,
EmbeddingProviderOptions,
EmbeddingProviderRequest,
GeminiTaskType,
} from "./embeddings.types.js";
// Remote providers considered for auto-selection when provider === "auto".
// Ollama is intentionally excluded here so that "auto" mode does not
@@ -73,27 +62,6 @@ export type EmbeddingProviderResult = {
bedrock?: BedrockEmbeddingClient;
};
export type EmbeddingProviderOptions = {
config: OpenClawConfig;
agentDir?: string;
provider: EmbeddingProviderRequest;
remote?: {
baseUrl?: string;
apiKey?: SecretInput;
headers?: Record<string, string>;
};
model: string;
fallback: EmbeddingProviderFallback;
local?: {
modelPath?: string;
modelCacheDir?: string;
};
/** Provider-specific output vector dimensions for supported embedding families. */
outputDimensionality?: number;
/** Gemini: override the default task type sent with embedding requests. */
taskType?: GeminiTaskType;
};
export const DEFAULT_LOCAL_MODEL =
"hf:ggml-org/embeddinggemma-300m-qat-q8_0-GGUF/embeddinggemma-300m-qat-Q8_0.gguf";

View File

@@ -0,0 +1,54 @@
import type { OpenClawConfig } from "../../config/config.js";
import type { SecretInput } from "../../config/types.secrets.js";
import type { EmbeddingInput } from "./embedding-inputs.js";
export type EmbeddingProvider = {
id: string;
model: string;
maxInputTokens?: number;
embedQuery: (text: string) => Promise<number[]>;
embedBatch: (texts: string[]) => Promise<number[][]>;
embedBatchInputs?: (inputs: EmbeddingInput[]) => Promise<number[][]>;
};
export type EmbeddingProviderId =
| "openai"
| "local"
| "gemini"
| "voyage"
| "mistral"
| "ollama"
| "bedrock";
export type EmbeddingProviderRequest = EmbeddingProviderId | "auto";
export type EmbeddingProviderFallback = EmbeddingProviderId | "none";
export type GeminiTaskType =
| "RETRIEVAL_QUERY"
| "RETRIEVAL_DOCUMENT"
| "SEMANTIC_SIMILARITY"
| "CLASSIFICATION"
| "CLUSTERING"
| "QUESTION_ANSWERING"
| "FACT_VERIFICATION";
export type EmbeddingProviderOptions = {
config: OpenClawConfig;
agentDir?: string;
provider: EmbeddingProviderRequest;
remote?: {
baseUrl?: string;
apiKey?: SecretInput;
headers?: Record<string, string>;
};
model: string;
fallback: EmbeddingProviderFallback;
local?: {
modelPath?: string;
modelCacheDir?: string;
};
/** Provider-specific output vector dimensions for supported embedding families. */
outputDimensionality?: number;
/** Gemini: override the default task type sent with embedding requests. */
taskType?: GeminiTaskType;
};