diff --git a/packages/memory-host-sdk/src/host/node-llama.ts b/packages/memory-host-sdk/src/host/node-llama.ts index 9327a1c4503..7b54e3fed2f 100644 --- a/packages/memory-host-sdk/src/host/node-llama.ts +++ b/packages/memory-host-sdk/src/host/node-llama.ts @@ -1,3 +1,29 @@ +export type LlamaEmbedding = { + vector: Float32Array | number[]; +}; + +export type LlamaEmbeddingContext = { + getEmbeddingFor: (text: string) => Promise; +}; + +export type LlamaModel = { + createEmbeddingContext: () => Promise; +}; + +export type Llama = { + loadModel: (params: { modelPath: string }) => Promise; +}; + +export type NodeLlamaCppModule = { + LlamaLogLevel: { + error: number; + }; + getLlama: (params: { logLevel: number }) => Promise; + resolveModelFile: (modelPath: string, cacheDir?: string) => Promise; +}; + +const NODE_LLAMA_CPP_MODULE = "node-llama-cpp"; + export async function importNodeLlamaCpp() { - return import("node-llama-cpp"); + return import(NODE_LLAMA_CPP_MODULE) as Promise; } diff --git a/src/memory-host-sdk/host/embeddings.ts b/src/memory-host-sdk/host/embeddings.ts index ec672e6b5a9..dbf98f261af 100644 --- a/src/memory-host-sdk/host/embeddings.ts +++ b/src/memory-host-sdk/host/embeddings.ts @@ -1,8 +1,12 @@ -import type { Llama, LlamaEmbeddingContext, LlamaModel } from "node-llama-cpp"; import { normalizeOptionalString } from "../../shared/string-coerce.js"; import { sanitizeAndNormalizeEmbedding } from "./embedding-vectors.js"; import type { EmbeddingProvider, EmbeddingProviderOptions } from "./embeddings.types.js"; -import { importNodeLlamaCpp } from "./node-llama.js"; +import { + importNodeLlamaCpp, + type Llama, + type LlamaEmbeddingContext, + type LlamaModel, +} from "./node-llama.js"; export type { EmbeddingProvider, diff --git a/src/memory-host-sdk/host/node-llama.ts b/src/memory-host-sdk/host/node-llama.ts index 9327a1c4503..7b54e3fed2f 100644 --- a/src/memory-host-sdk/host/node-llama.ts +++ b/src/memory-host-sdk/host/node-llama.ts @@ -1,3 +1,29 @@ +export type LlamaEmbedding = { + vector: Float32Array | number[]; +}; + +export type LlamaEmbeddingContext = { + getEmbeddingFor: (text: string) => Promise; +}; + +export type LlamaModel = { + createEmbeddingContext: () => Promise; +}; + +export type Llama = { + loadModel: (params: { modelPath: string }) => Promise; +}; + +export type NodeLlamaCppModule = { + LlamaLogLevel: { + error: number; + }; + getLlama: (params: { logLevel: number }) => Promise; + resolveModelFile: (modelPath: string, cacheDir?: string) => Promise; +}; + +const NODE_LLAMA_CPP_MODULE = "node-llama-cpp"; + export async function importNodeLlamaCpp() { - return import("node-llama-cpp"); + return import(NODE_LLAMA_CPP_MODULE) as Promise; }