diff --git a/src/memory/manager.batch.test.ts b/src/memory/manager.batch.test.ts index 4eed7a1f95..ac1e8b6982 100644 --- a/src/memory/manager.batch.test.ts +++ b/src/memory/manager.batch.test.ts @@ -148,9 +148,9 @@ describe("memory indexing with OpenAI batches", () => { memorySearch: { provider: "openai", model: "text-embedding-3-small", - store: { path: indexPath }, + store: { path: indexPath, vector: { enabled: false } }, sync: { watch: false, onSessionStart: false, onSearch: false }, - query: { minScore: 0 }, + query: { minScore: 0, hybrid: { enabled: false } }, remote: { batch: { enabled: true, wait: true, pollIntervalMs: 1 } }, }, }, @@ -261,9 +261,9 @@ describe("memory indexing with OpenAI batches", () => { memorySearch: { provider: "openai", model: "text-embedding-3-small", - store: { path: indexPath }, + store: { path: indexPath, vector: { enabled: false } }, sync: { watch: false, onSessionStart: false, onSearch: false }, - query: { minScore: 0 }, + query: { minScore: 0, hybrid: { enabled: false } }, remote: { batch: { enabled: true, wait: true, pollIntervalMs: 1 } }, }, }, @@ -366,9 +366,9 @@ describe("memory indexing with OpenAI batches", () => { memorySearch: { provider: "openai", model: "text-embedding-3-small", - store: { path: indexPath }, + store: { path: indexPath, vector: { enabled: false } }, sync: { watch: false, onSessionStart: false, onSearch: false }, - query: { minScore: 0 }, + query: { minScore: 0, hybrid: { enabled: false } }, remote: { batch: { enabled: true, wait: true, pollIntervalMs: 1 } }, }, },