diff --git a/AGENTS.md b/AGENTS.md
deleted file mode 100644
index 668f071f..00000000
--- a/AGENTS.md
+++ /dev/null
@@ -1,9 +0,0 @@
-# MCP Reference Server Development Guide
-
-## Contributing Guidelines
-
-Before making updates to this repo, thoroughly review the CONTRIBUTING.md guide at the root of this repo.
-
-## Testing
-
-Use vitest when configuring or adding tests for servers implemented in typescript.
diff --git a/CLAUDE.md b/CLAUDE.md
deleted file mode 100644
index 6fc464b3..00000000
--- a/CLAUDE.md
+++ /dev/null
@@ -1,2 +0,0 @@
-@./AGENTS.md
-
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 7a10a22f..932b2599 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -21,6 +21,10 @@ We're more selective about:
We don't accept:
- **New server implementations** — We encourage you to publish them yourself, and link to them from the README.
+## Testing
+
+When adding or configuring tests for servers implemented in TypeScript, use **vitest** as the test framework. Vitest provides better ESM support, faster test execution, and a more modern testing experience.
+
## Documentation
Improvements to existing documentation is welcome - although generally we'd prefer ergonomic improvements than documenting pain points if possible!
diff --git a/README.md b/README.md
index b1713863..8ac8ebbd 100644
--- a/README.md
+++ b/README.md
@@ -164,7 +164,7 @@ Official integrations are maintained by companies building production ready MCP
-
**[Convex](https://stack.convex.dev/convex-mcp-server)** - Introspect and query your apps deployed to Convex.
-
**[Cortex](https://github.com/cortexapps/cortex-mcp)** - Official MCP server for [Cortex](https://www.cortex.io).
-
**[Couchbase](https://github.com/Couchbase-Ecosystem/mcp-server-couchbase)** - Interact with the data stored in Couchbase clusters.
--
**[Courier](https://github.com/trycourier/courier-mcp)** - Build, update, and send multi-channel notifications across email, sms, push, Slack, and Microsoft Teams.
+-
**[Courier](https://www.courier.com/docs/tools/mcp)** - Build, update, and send multi-channel notifications across email, sms, push, Slack, and Microsoft Teams.
-
**[CRIC Wuye AI](https://github.com/wuye-ai/mcp-server-wuye-ai)** - Interact with capabilities of the CRIC Wuye AI platform, an intelligent assistant specifically for the property management industry.
-
**[CrowdStrike Falcon](https://github.com/CrowdStrike/falcon-mcp)** - Connects AI agents with the CrowdStrike Falcon platform for intelligent security analysis, providing programmatic access to detections, incidents, behaviors, threat intelligence, hosts, vulnerabilities, and identity protection capabilities.
-
**[CTERA Edge Filer](https://github.com/ctera/mcp-ctera-edge)** - CTERA Edge Filer delivers intelligent edge caching and multiprotocol file access, enabling fast, secure access to files across core and remote sites.
@@ -180,6 +180,7 @@ Official integrations are maintained by companies building production ready MCP
-
**[Defang](https://github.com/DefangLabs/defang/blob/main/src/pkg/mcp/README.md)** - Deploy your project to the cloud seamlessly with the [Defang](https://www.defang.io) platform without leaving your integrated development environment
-
**[Detailer](https://detailer.ginylil.com/)** – Instantly generate rich, AI-powered documentation for your GitHub repositories. Designed for AI agents to gain deep project context before taking action.
-
**[DevCycle](https://docs.devcycle.com/cli-mcp/mcp-getting-started)** - Create and monitor feature flags using natural language in your AI coding assistant.
+-
**[DevExpress](https://docs.devexpress.com/GeneralInformation/405551/help-resources/dev-express-documentation-mcp-server-configure-an-ai-powered-assistant)** Documentation MCP server — Get instant, AI-powered access to 300,000+ help topics on [DevExpress](https://www.devexpress.com) UI Component APIs — right in the AI Coding Assistant/IDE of your choice.
-
**[DevHub](https://github.com/devhub/devhub-cms-mcp)** - Manage and utilize website content within the [DevHub](https://www.devhub.com) CMS platform
-
**[DevRev](https://github.com/devrev/mcp-server)** - An MCP server to integrate with DevRev APIs to search through your DevRev Knowledge Graph where objects can be imported from diff. Sources listed [here](https://devrev.ai/docs/import#available-sources).
-
**[DexPaprika (CoinPaprika)](https://github.com/coinpaprika/dexpaprika-mcp)** - Access real-time DEX data, liquidity pools, token information, and trading analytics across multiple blockchain networks with [DexPaprika](https://dexpaprika.com) by CoinPaprika.
@@ -253,6 +254,7 @@ Official integrations are maintained by companies building production ready MCP
-
**[Improve Digital Publisher MCP](https://github.com/azerion/improvedigital-publisher-mcp-server)** - An MCP server that enables publishers to integrate [Improve Digital’s](https://improvedigital.com/) inventory management system with their AI tools or agents.
-
**[Inbox Zero](https://github.com/elie222/inbox-zero/tree/main/apps/mcp-server)** - AI personal assistant for email [Inbox Zero](https://www.getinboxzero.com)
-
**[Inflectra Spira](https://github.com/Inflectra/mcp-server-spira)** - Connect to your instance of the SpiraTest, SpiraTeam or SpiraPlan application lifecycle management platform by [Inflectra](https://www.inflectra.com)
+-
**[Infobip](https://github.com/Inflectra/mcp-server-spira)** - MCP server for integrating [Infobip](https://www.infobip.com/) global cloud communication platform. It equips AI agents with communication superpowers, allowing them to send and receive SMS and RCS messages, interact with WhatsApp and Viber, automate communication workflows, and manage customer data, all in a production-ready environment.
-
**[Inkeep](https://github.com/inkeep/mcp-server-python)** - RAG Search over your content powered by [Inkeep](https://inkeep.com)
-
**[Integration App](https://github.com/integration-app/mcp-server)** - Interact with any other SaaS applications on behalf of your customers.
-
**[IP2Location.io](https://github.com/ip2location/mcp-ip2location-io)** - Interact with IP2Location.io API to retrieve the geolocation information for an IP address.
@@ -391,6 +393,7 @@ Official integrations are maintained by companies building production ready MCP
-
**[Polymarket](https://github.com/ozgureyilmaz/polymarket-mcp)** - Real-time prediction market data from Polymarket - search markets, analyze prices, identify trading opportunities.
-
**[Port IO](https://github.com/port-labs/port-mcp-server)** - Access and manage your software catalog to improve service quality and compliance.
- **[PostHog](https://github.com/posthog/mcp)** - Interact with PostHog analytics, feature flags, error tracking and more with the official PostHog MCP server.
+-
**[PostIdentity](https://github.com/PostIdentity/mcp-server)** - Generate AI-powered social media posts from any AI assistant. Manage identities, create posts, track referrals, and browse marketplace templates, powered by [PostIdentity](https://postidentity.com).
- **[Postman API](https://github.com/postmanlabs/postman-api-mcp)** - Manage your Postman resources using the [Postman API](https://www.postman.com/postman/postman-public-workspace/collection/i2uqzpp/postman-api).
-
**[Powerdrill](https://github.com/powerdrillai/powerdrill-mcp)** - An MCP server that provides tools to interact with Powerdrill datasets, enabling smart AI data analysis and insights.
-
**[Prisma](https://www.prisma.io/docs/postgres/mcp-server)** - Create and manage Prisma Postgres databases
@@ -428,6 +431,7 @@ Official integrations are maintained by companies building production ready MCP
-
**[Roblox Studio](https://github.com/Roblox/studio-rust-mcp-server)** - Roblox Studio MCP Server, create and manipulate scenes, scripts in Roblox Studio
-
**[Rodin](https://github.com/DeemosTech/rodin-api-mcp)** - Generate 3D Models with [Hyper3D Rodin](https://hyper3d.ai)
-
**[Root Signals](https://github.com/root-signals/root-signals-mcp)** - Improve and quality control your outputs with evaluations using LLM-as-Judge
+- **[Roundtable](https://github.com/askbudi/roundtable)** - Unified integration layer that bridges multiple AI coding assistants (Codex, Claude Code, Cursor, Gemini) through zero-configuration auto-discovery and enterprise-ready architecture.
- **[Routine](https://github.com/routineco/mcp-server)** - MCP server to interact with [Routine](https://routine.co/): calendars, tasks, notes, etc.
-
**[Rube](https://github.com/ComposioHQ/Rube)** - Rube is a Model Context Protocol (MCP) server that connects your AI tools to 500+ apps like Gmail, Slack, GitHub, and Notion. Simply install it in your AI client, authenticate once with your apps, and start asking your AI to perform real actions like "Send an email" or "Create a task."
-
**[SafeDep](https://github.com/safedep/vet/blob/main/docs/mcp.md)** - SafeDep `vet-mcp` helps in vetting open source packages for security risks—such as vulnerabilities and malicious code—before they're used in your project, especially with AI-generated code suggestions.
@@ -519,8 +523,10 @@ Official integrations are maintained by companies building production ready MCP
-
**[Zapier](https://zapier.com/mcp)** - Connect your AI Agents to 8,000 apps instantly.
-
**[Zenable](https://docs.zenable.io/integrations/mcp/getting-started)** - Clean up sloppy AI code and prevent vulnerabilities
- **[ZenML](https://github.com/zenml-io/mcp-zenml)** - Interact with your MLOps and LLMOps pipelines through your [ZenML](https://www.zenml.io) MCP server
+- **[ZettelkastenSpace](https://github.com/joshylchen/zettelkasten_space)** - Built on the proven [Zettelkasten](https://www.zettelkasten.space/) method, enhanced with Claude Desktop integration via Model Context Protocol
-
**[Zine](https://www.zine.ai)** - Your memory, everywhere AI goes. Think iPhoto for your knowledge - upload and curate. Like ChatGPT but portable - context that travels with you.
-
**[ZIZAI Recruitment](https://github.com/zaiwork/mcp)** - Interact with the next-generation intelligent recruitment platform for employees and employers, powered by [ZIZAI Recruitment](https://zizai.work).
+
### 🌎 Community Servers
A growing set of community-developed and maintained servers demonstrates various applications of MCP across different domains.
@@ -537,6 +543,7 @@ A growing set of community-developed and maintained servers demonstrates various
- **[Actor Critic Thinking](https://github.com/aquarius-wing/actor-critic-thinking-mcp)** - Actor-critic thinking for performance evaluation
- **[Adobe Commerce](https://github.com/rafaelstz/adobe-commerce-dev-mcp)** — MCP to interact with Adobe Commerce GraphQL API, including orders, products, customers, etc.
- **[ADR Analysis](https://github.com/tosin2013/mcp-adr-analysis-server)** - AI-powered Architectural Decision Records (ADR) analysis server that provides architectural insights, technology stack detection, security checks, and TDD workflow enhancement for software development projects.
+- **[Ads MCP](https://github.com/amekala/ads-mcp)** - Remote MCP server for cross-platform ad campaign creation (Google Ads Search & PMax, TikTok). OAuth 2.1 authentication with progress streaming support for long-running operations. [Website](https://www.adspirer.com/)
- **[AgentBay](https://github.com/Michael98671/agentbay)** - An MCP server for providing serverless cloud infrastructure for AI agents.
- **[Agentic Framework](https://github.com/Piotr1215/mcp-agentic-framework)** - Multi-agent collaboration framework enabling AI agents to register, discover each other, exchange asynchronous messages via HTTP transport, and work together on complex tasks with persistent message history.
- **[AgentMode](https://www.agentmode.app)** - Connect to dozens of databases, data warehouses, Github & more, from a single MCP server. Run the Docker image locally, in the cloud, or on-premise.
@@ -621,6 +628,7 @@ A growing set of community-developed and maintained servers demonstrates various
- **[BugBug MCP Server](https://github.com/simplypixi/bugbug-mcp-server)** - Unofficial MCP server for BugBug API.
- **[BVG MCP Server - (Unofficial) ](https://github.com/svkaizoku/mcp-bvg)** - Unofficial MCP server for Berliner Verkehrsbetriebe Api.
- **[Bybit](https://github.com/ethancod1ng/bybit-mcp-server)** - A Model Context Protocol (MCP) server for integrating AI assistants with Bybit cryptocurrency exchange APIs, enabling automated trading, market data access, and account management.
+- **[C64 Bridge](https://github.com/chrisgleissner/c64bridge)** - AI command bridge for Commodore 64 hardware. Control Ultimate 64 and C64 Ultimate devices through REST API with BASIC and assembly program creation, real-time memory inspection, SID audio synthesis, and curated retro computing knowledge via local RAG.
- **[CAD-MCP](https://github.com/daobataotie/CAD-MCP#)** (by daobataotie) - Drawing CAD(Line,Circle,Text,Annotation...) through MCP server, supporting mainstream CAD software.
- **[Calculator](https://github.com/githejie/mcp-server-calculator)** - This server enables LLMs to use calculator for precise numerical calculations.
- **[CalDAV MCP](https://github.com/dominik1001/caldav-mcp)** - A CalDAV MCP server to expose calendar operations as tools for AI assistants.
@@ -911,6 +919,7 @@ A growing set of community-developed and maintained servers demonstrates various
- **[kwrds.ai](https://github.com/mkotsollaris/kwrds_ai_mcp)** - Keyword research, people also ask, SERP and other SEO tools for [kwrds.ai](https://www.kwrds.ai/)
- **[KYC-mcp-server](https://github.com/vishnurudra-ai/KYC-mcp-server)** - Know Your Computer (KYC) - MCP Server compatible with Claude Desktop. Comprehensive system diagnostics for Windows, Mac OS and Linux operating system with AI-powered recommendations.
- **[Langflow-DOC-QA-SERVER](https://github.com/GongRzhe/Langflow-DOC-QA-SERVER)** - A Model Context Protocol server for document Q&A powered by Langflow. It demonstrates core MCP concepts by providing a simple interface to query documents through a Langflow backend.
+- **[Langflow MCP Server](https://github.com/nobrainer-tech/langflow-mcp)** - Comprehensive MCP server providing 90 tools for Langflow workflow automation - manage flows, execute workflows, handle builds, and interact with knowledge bases. Includes Docker support and full API coverage for Langflow 1.6.4.
- **[Language Server](https://github.com/isaacphi/mcp-language-server)** - MCP Language Server helps MCP enabled clients navigate codebases more easily by giving them access to semantic tools like get definition, references, rename, and diagnostics.
- **[Lark(Feishu)](https://github.com/kone-net/mcp_server_lark)** - A Model Context Protocol(MCP) server for Lark(Feishu) sheet, message, doc and etc.
- **[Lazy Toggl MCP](https://github.com/movstox/lazy-toggl-mcp)** - Simple unofficial MCP server to track time via Toggl API
@@ -983,6 +992,7 @@ A growing set of community-developed and maintained servers demonstrates various
- **[mcp-meme-sticky](https://github.com/nkapila6/mcp-meme-sticky)** - Make memes or stickers using MCP server for WhatsApp or Telegram.
- **[mcp-memory-service](https://github.com/doobidoo/mcp-memory-service)** - Universal MCP memory service providing semantic memory search, persistent storage, and autonomous memory consolidation for AI assistants across 13+ AI applications.
- **[MCP-NixOS](https://github.com/utensils/mcp-nixos)** - A Model Context Protocol server that provides AI assistants with accurate, real-time information about NixOS packages, system options, Home Manager settings, and nix-darwin macOS configurations.
+- **[mcp-n8n](https://github.com/gomakers-ai/mcp-n8n)** - Complete n8n API integration with 41 tools for workflow management, execution monitoring, credentials, and 100+ pre-built templates. Control your entire n8n automation infrastructure through AI conversations.
- **[mcp-open-library](https://github.com/8enSmith/mcp-open-library)** - A Model Context Protocol (MCP) server for the Open Library API that enables AI assistants to search for book and author information.
- **[MCP-OpenStack-Ops](https://github.com/call518/MCP-OpenStack-Ops)** - Professional OpenStack operations automation via MCP server. Specialized tools for cluster monitoring, instance management, volume control & network analysis. FastMCP + OpenStack SDK + Bearer auth. Claude Desktop ready. Perfect for DevOps & cloud automation.
- **[MCP-PostgreSQL-Ops](https://github.com/call518/MCP-PostgreSQL-Ops)** - Model Context Protocol (MCP) server for Apache Ambari API integration. This project provides tools for managing Hadoop clusters, including service operations, configuration management, status monitoring, and request tracking.
@@ -1145,6 +1155,7 @@ A growing set of community-developed and maintained servers demonstrates various
- **[Postman](https://github.com/shannonlal/mcp-postman)** - MCP server for running Postman Collections locally via Newman. Allows for simple execution of Postman Server and returns the results of whether the collection passed all the tests.
- **[Powerdrill](https://github.com/powerdrillai/powerdrill-mcp)** - Interact with Powerdrill datasets, authenticated with [Powerdrill](https://powerdrill.ai) User ID and Project API Key.
- **[Prefect](https://github.com/allen-munsch/mcp-prefect)** - MCP Server for workflow orchestration and ELT/ETL with Prefect Server, and Prefect Cloud [https://www.prefect.io/] using the `prefect` python client.
+- **[Producer Pal](https://github.com/adamjmurray/producer-pal)** - MCP server for controlling Ableton Live, embedded in a Max for Live device for easy drag and drop installation.
- **[Productboard](https://github.com/kenjihikmatullah/productboard-mcp)** - Integrate the Productboard API into agentic workflows via MCP.
- **[Prometheus](https://github.com/pab1it0/prometheus-mcp-server)** - Query and analyze Prometheus - open-source monitoring system.
- **[Prometheus (Golang)](https://github.com/tjhop/prometheus-mcp-server/)** - A Prometheus MCP server with full API support for comprehensive management and deep interaction with Prometheus beyond basic query support. Written in go, it is a single binary install that is capable of STDIO, SSE, and HTTP transports for complex deployments.
@@ -1316,6 +1327,7 @@ A growing set of community-developed and maintained servers demonstrates various
- **[Universal MCP Servers](https://github.com/universal-mcp)** - A collection of MCP servers created using the [AgentR Universal MCP SDK](https://github.com/universal-mcp/universal-mcp).
- **[Unleash Integration (Feature Toggle)](https://github.com/cuongtl1992/unleash-mcp)** - A Model Context Protocol (MCP) server implementation that integrates with Unleash Feature Toggle system. Provide a bridge between LLM applications and Unleash feature flag system
- **[Upbit MCP Server](https://github.com/solangii/upbit-mcp-server)** – An MCP server that enables real - time access to cryptocurrency prices, market summaries, and asset listings from the Upbit exchange.
+- **[USA Spending MCP Server](https://github.com/thsmale/usaspending-mcp-server)** – This leverages the official source of government spending data [USASPENDING.gov](https://www.usaspending.gov/). Which enables one to track government spending over time, search government spending by agency, explore government spending to communities, and much more.
- **[use_aws_mcp](https://github.com/runjivu/use_aws_mcp)** - amazon-q-cli's use_aws tool extracted into independent mcp, for general aws api usage.
- **[User Feedback](https://github.com/mrexodia/user-feedback-mcp)** - Simple MCP Server to enable a human-in-the-loop workflow in tools like Cline and Cursor.
- **[USPTO](https://github.com/riemannzeta/patent_mcp_server)** - MCP server for accessing United States Patent & Trademark Office data through its Open Data Protocol (ODP) API.
@@ -1463,6 +1475,7 @@ Additional resources on MCP.
-
**[mkinf](https://mkinf.io)** - An Open Source registry of hosted MCP Servers to accelerate AI agent workflows.
- **[Open-Sourced MCP Servers Directory](https://github.com/chatmcp/mcp-directory)** - A curated list of MCP servers by **[mcpso](https://mcp.so)**
-
**[OpenTools](https://opentools.com)** - An open registry for finding, installing, and building with MCP servers by **[opentoolsteam](https://github.com/opentoolsteam)**
+- **[Programmatic MCP Prototype](https://github.com/domdomegg/programmatic-mcp-prototype)** - Experimental agent prototype demonstrating programmatic MCP tool composition, progressive tool discovery, state persistence, and skill building through TypeScript code execution by **[Adam Jones](https://github.com/domdomegg)**
- **[PulseMCP](https://www.pulsemcp.com)** ([API](https://www.pulsemcp.com/api)) - Community hub & weekly newsletter for discovering MCP servers, clients, articles, and news by **[Tadas Antanavicius](https://github.com/tadasant)**, **[Mike Coughlin](https://github.com/macoughl)**, and **[Ravina Patel](https://github.com/ravinahp)**
- **[r/mcp](https://www.reddit.com/r/mcp)** – A Reddit community dedicated to MCP by **[Frank Fiegel](https://github.com/punkpeye)**
- **[r/modelcontextprotocol](https://www.reddit.com/r/modelcontextprotocol)** – A Model Context Protocol community Reddit page - discuss ideas, get answers to your questions, network with like-minded people, and showcase your projects! by **[Alex Andru](https://github.com/QuantGeekDev)**
diff --git a/package-lock.json b/package-lock.json
index f0fc6496..80a20fb5 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -4284,8 +4284,10 @@
},
"devDependencies": {
"@types/node": "^22",
+ "@vitest/coverage-v8": "^2.1.8",
"shx": "^0.3.4",
- "typescript": "^5.6.2"
+ "typescript": "^5.6.2",
+ "vitest": "^2.1.8"
}
},
"src/postgres": {
diff --git a/src/fetch/README.md b/src/fetch/README.md
index 5324e507..1bf12a15 100644
--- a/src/fetch/README.md
+++ b/src/fetch/README.md
@@ -168,6 +168,48 @@ This can be customized by adding the argument `--user-agent=YourUserAgent` to th
The server can be configured to use a proxy by using the `--proxy-url` argument.
+## Windows Configuration
+
+If you're experiencing timeout issues on Windows, you may need to set the `PYTHONIOENCODING` environment variable to ensure proper character encoding:
+
+
+Windows configuration (uvx)
+
+```json
+{
+ "mcpServers": {
+ "fetch": {
+ "command": "uvx",
+ "args": ["mcp-server-fetch"],
+ "env": {
+ "PYTHONIOENCODING": "utf-8"
+ }
+ }
+ }
+}
+```
+
+
+
+Windows configuration (pip)
+
+```json
+{
+ "mcpServers": {
+ "fetch": {
+ "command": "python",
+ "args": ["-m", "mcp_server_fetch"],
+ "env": {
+ "PYTHONIOENCODING": "utf-8"
+ }
+ }
+ }
+}
+```
+
+
+This addresses character encoding issues that can cause the server to timeout on Windows systems.
+
## Debugging
You can use the MCP inspector to debug the server. For uvx installations:
diff --git a/src/filesystem/__tests__/path-utils.test.ts b/src/filesystem/__tests__/path-utils.test.ts
index 8ddee33c..868d8f08 100644
--- a/src/filesystem/__tests__/path-utils.test.ts
+++ b/src/filesystem/__tests__/path-utils.test.ts
@@ -1,4 +1,4 @@
-import { describe, it, expect } from 'vitest';
+import { describe, it, expect, afterEach } from 'vitest';
import { normalizePath, expandHome, convertToWindowsPath } from '../path-utils.js';
describe('Path Utilities', () => {
@@ -10,14 +10,25 @@ describe('Path Utilities', () => {
.toBe('/home/user/some path');
});
- it('converts WSL paths to Windows format', () => {
+ it('never converts WSL paths (they work correctly in WSL with Node.js fs)', () => {
+ // WSL paths should NEVER be converted, regardless of platform
+ // They are valid Linux paths that work with Node.js fs operations inside WSL
expect(convertToWindowsPath('/mnt/c/NS/MyKindleContent'))
- .toBe('C:\\NS\\MyKindleContent');
+ .toBe('/mnt/c/NS/MyKindleContent');
+ expect(convertToWindowsPath('/mnt/d/Documents'))
+ .toBe('/mnt/d/Documents');
});
- it('converts Unix-style Windows paths to Windows format', () => {
- expect(convertToWindowsPath('/c/NS/MyKindleContent'))
- .toBe('C:\\NS\\MyKindleContent');
+ it('converts Unix-style Windows paths only on Windows platform', () => {
+ // On Windows, /c/ style paths should be converted
+ if (process.platform === 'win32') {
+ expect(convertToWindowsPath('/c/NS/MyKindleContent'))
+ .toBe('C:\\NS\\MyKindleContent');
+ } else {
+ // On Linux, leave them unchanged
+ expect(convertToWindowsPath('/c/NS/MyKindleContent'))
+ .toBe('/c/NS/MyKindleContent');
+ }
});
it('leaves Windows paths unchanged but ensures backslashes', () => {
@@ -34,11 +45,20 @@ describe('Path Utilities', () => {
.toBe('C:\\Program Files\\Some App');
});
- it('handles uppercase and lowercase drive letters', () => {
+ it('handles drive letter paths based on platform', () => {
+ // WSL paths should never be converted
expect(convertToWindowsPath('/mnt/d/some/path'))
- .toBe('D:\\some\\path');
- expect(convertToWindowsPath('/d/some/path'))
- .toBe('D:\\some\\path');
+ .toBe('/mnt/d/some/path');
+
+ if (process.platform === 'win32') {
+ // On Windows, Unix-style paths like /d/ should be converted
+ expect(convertToWindowsPath('/d/some/path'))
+ .toBe('D:\\some\\path');
+ } else {
+ // On Linux, /d/ is just a regular Unix path
+ expect(convertToWindowsPath('/d/some/path'))
+ .toBe('/d/some/path');
+ }
});
});
@@ -67,21 +87,33 @@ describe('Path Utilities', () => {
.toBe('C:\\NS\\MyKindleContent');
});
- it('handles WSL paths', () => {
+ it('always preserves WSL paths (they work correctly in WSL)', () => {
+ // WSL paths should ALWAYS be preserved, regardless of platform
+ // This is the fix for issue #2795
expect(normalizePath('/mnt/c/NS/MyKindleContent'))
- .toBe('C:\\NS\\MyKindleContent');
+ .toBe('/mnt/c/NS/MyKindleContent');
+ expect(normalizePath('/mnt/d/Documents'))
+ .toBe('/mnt/d/Documents');
});
it('handles Unix-style Windows paths', () => {
- expect(normalizePath('/c/NS/MyKindleContent'))
- .toBe('C:\\NS\\MyKindleContent');
+ // On Windows, /c/ paths should be converted
+ if (process.platform === 'win32') {
+ expect(normalizePath('/c/NS/MyKindleContent'))
+ .toBe('C:\\NS\\MyKindleContent');
+ } else if (process.platform === 'linux') {
+ // On Linux, /c/ is just a regular Unix path
+ expect(normalizePath('/c/NS/MyKindleContent'))
+ .toBe('/c/NS/MyKindleContent');
+ }
});
it('handles paths with spaces and mixed slashes', () => {
expect(normalizePath('C:/NS/My Kindle Content'))
.toBe('C:\\NS\\My Kindle Content');
+ // WSL paths should always be preserved
expect(normalizePath('/mnt/c/NS/My Kindle Content'))
- .toBe('C:\\NS\\My Kindle Content');
+ .toBe('/mnt/c/NS/My Kindle Content');
expect(normalizePath('C:\\Program Files (x86)\\App Name'))
.toBe('C:\\Program Files (x86)\\App Name');
expect(normalizePath('"C:\\Program Files\\App Name"'))
@@ -91,10 +123,19 @@ describe('Path Utilities', () => {
});
it('preserves spaces in all path formats', () => {
+ // WSL paths should always be preserved
expect(normalizePath('/mnt/c/Program Files/App Name'))
- .toBe('C:\\Program Files\\App Name');
- expect(normalizePath('/c/Program Files/App Name'))
- .toBe('C:\\Program Files\\App Name');
+ .toBe('/mnt/c/Program Files/App Name');
+
+ if (process.platform === 'win32') {
+ // On Windows, Unix-style paths like /c/ should be converted
+ expect(normalizePath('/c/Program Files/App Name'))
+ .toBe('C:\\Program Files\\App Name');
+ } else {
+ // On Linux, /c/ is just a regular Unix path
+ expect(normalizePath('/c/Program Files/App Name'))
+ .toBe('/c/Program Files/App Name');
+ }
expect(normalizePath('C:/Program Files/App Name'))
.toBe('C:\\Program Files\\App Name');
});
@@ -105,15 +146,16 @@ describe('Path Utilities', () => {
.toBe('C:\\NS\\Sub&Folder');
expect(normalizePath('C:/NS/Sub&Folder'))
.toBe('C:\\NS\\Sub&Folder');
+ // WSL paths should always be preserved
expect(normalizePath('/mnt/c/NS/Sub&Folder'))
- .toBe('C:\\NS\\Sub&Folder');
-
+ .toBe('/mnt/c/NS/Sub&Folder');
+
// Test tilde in path (short names in Windows)
expect(normalizePath('C:\\NS\\MYKIND~1'))
.toBe('C:\\NS\\MYKIND~1');
expect(normalizePath('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1'))
.toBe('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1');
-
+
// Test other special characters
expect(normalizePath('C:\\Path with #hash'))
.toBe('C:\\Path with #hash');
@@ -128,10 +170,19 @@ describe('Path Utilities', () => {
it('capitalizes lowercase drive letters for Windows paths', () => {
expect(normalizePath('c:/windows/system32'))
.toBe('C:\\windows\\system32');
- expect(normalizePath('/mnt/d/my/folder')) // WSL path with lowercase drive
- .toBe('D:\\my\\folder');
- expect(normalizePath('/e/another/folder')) // Unix-style Windows path with lowercase drive
- .toBe('E:\\another\\folder');
+ // WSL paths should always be preserved
+ expect(normalizePath('/mnt/d/my/folder'))
+ .toBe('/mnt/d/my/folder');
+
+ if (process.platform === 'win32') {
+ // On Windows, Unix-style paths should be converted and capitalized
+ expect(normalizePath('/e/another/folder'))
+ .toBe('E:\\another\\folder');
+ } else {
+ // On Linux, /e/ is just a regular Unix path
+ expect(normalizePath('/e/another/folder'))
+ .toBe('/e/another/folder');
+ }
});
it('handles UNC paths correctly', () => {
@@ -145,11 +196,8 @@ describe('Path Utilities', () => {
});
it('returns normalized non-Windows/WSL/Unix-style Windows paths as is after basic normalization', () => {
- // Relative path
- const relativePath = 'some/relative/path';
- expect(normalizePath(relativePath)).toBe(relativePath.replace(/\//g, '\\'));
-
// A path that looks somewhat absolute but isn't a drive or recognized Unix root for Windows conversion
+ // These paths should be preserved as-is (not converted to Windows C:\ format or WSL format)
const otherAbsolutePath = '\\someserver\\share\\file';
expect(normalizePath(otherAbsolutePath)).toBe(otherAbsolutePath);
});
@@ -172,4 +220,146 @@ describe('Path Utilities', () => {
expect(expandHome('C:/test')).toBe('C:/test');
});
});
+
+ describe('WSL path handling (issue #2795 fix)', () => {
+ // Save original platform
+ const originalPlatform = process.platform;
+
+ afterEach(() => {
+ // Restore platform after each test
+ Object.defineProperty(process, 'platform', {
+ value: originalPlatform,
+ writable: true,
+ configurable: true
+ });
+ });
+
+ it('should NEVER convert WSL paths - they work correctly in WSL with Node.js fs', () => {
+ // The key insight: When running `wsl npx ...`, Node.js runs INSIDE WSL (process.platform === 'linux')
+ // and /mnt/c/ paths work correctly with Node.js fs operations in that environment.
+ // Converting them to C:\ format breaks fs operations because Windows paths don't work inside WSL.
+
+ // Mock Linux platform (inside WSL)
+ Object.defineProperty(process, 'platform', {
+ value: 'linux',
+ writable: true,
+ configurable: true
+ });
+
+ // WSL paths should NOT be converted, even inside WSL
+ expect(normalizePath('/mnt/c/Users/username/folder'))
+ .toBe('/mnt/c/Users/username/folder');
+
+ expect(normalizePath('/mnt/d/Documents/project'))
+ .toBe('/mnt/d/Documents/project');
+ });
+
+ it('should also preserve WSL paths when running on Windows', () => {
+ // Mock Windows platform
+ Object.defineProperty(process, 'platform', {
+ value: 'win32',
+ writable: true,
+ configurable: true
+ });
+
+ // WSL paths should still be preserved (though they wouldn't be accessible from Windows Node.js)
+ expect(normalizePath('/mnt/c/Users/username/folder'))
+ .toBe('/mnt/c/Users/username/folder');
+
+ expect(normalizePath('/mnt/d/Documents/project'))
+ .toBe('/mnt/d/Documents/project');
+ });
+
+ it('should convert Unix-style Windows paths (/c/) only when running on Windows (win32)', () => {
+ // Mock process.platform to be 'win32' (Windows)
+ Object.defineProperty(process, 'platform', {
+ value: 'win32',
+ writable: true,
+ configurable: true
+ });
+
+ // Unix-style Windows paths like /c/ should be converted on Windows
+ expect(normalizePath('/c/Users/username/folder'))
+ .toBe('C:\\Users\\username\\folder');
+
+ expect(normalizePath('/d/Documents/project'))
+ .toBe('D:\\Documents\\project');
+ });
+
+ it('should NOT convert Unix-style paths (/c/) when running inside WSL (linux)', () => {
+ // Mock process.platform to be 'linux' (WSL/Linux)
+ Object.defineProperty(process, 'platform', {
+ value: 'linux',
+ writable: true,
+ configurable: true
+ });
+
+ // When on Linux, /c/ is just a regular Unix directory, not a drive letter
+ expect(normalizePath('/c/some/path'))
+ .toBe('/c/some/path');
+
+ expect(normalizePath('/d/another/path'))
+ .toBe('/d/another/path');
+ });
+
+ it('should preserve regular Unix paths on all platforms', () => {
+ // Test on Linux
+ Object.defineProperty(process, 'platform', {
+ value: 'linux',
+ writable: true,
+ configurable: true
+ });
+
+ expect(normalizePath('/home/user/documents'))
+ .toBe('/home/user/documents');
+
+ expect(normalizePath('/var/log/app'))
+ .toBe('/var/log/app');
+
+ // Test on Windows (though these paths wouldn't work on Windows)
+ Object.defineProperty(process, 'platform', {
+ value: 'win32',
+ writable: true,
+ configurable: true
+ });
+
+ expect(normalizePath('/home/user/documents'))
+ .toBe('/home/user/documents');
+
+ expect(normalizePath('/var/log/app'))
+ .toBe('/var/log/app');
+ });
+
+ it('reproduces exact scenario from issue #2795', () => {
+ // Simulate running inside WSL: wsl npx @modelcontextprotocol/server-filesystem /mnt/c/Users/username/folder
+ Object.defineProperty(process, 'platform', {
+ value: 'linux',
+ writable: true,
+ configurable: true
+ });
+
+ // This is the exact path from the issue
+ const inputPath = '/mnt/c/Users/username/folder';
+ const result = normalizePath(inputPath);
+
+ // Should NOT convert to C:\Users\username\folder
+ expect(result).toBe('/mnt/c/Users/username/folder');
+ expect(result).not.toContain('C:');
+ expect(result).not.toContain('\\');
+ });
+
+ it('should handle relative path slash conversion based on platform', () => {
+ // This test verifies platform-specific behavior naturally without mocking
+ // On Windows: forward slashes converted to backslashes
+ // On Linux/Unix: forward slashes preserved
+ const relativePath = 'some/relative/path';
+ const result = normalizePath(relativePath);
+
+ if (originalPlatform === 'win32') {
+ expect(result).toBe('some\\relative\\path');
+ } else {
+ expect(result).toBe('some/relative/path');
+ }
+ });
+ });
});
diff --git a/src/filesystem/path-utils.ts b/src/filesystem/path-utils.ts
index aca7f061..c897b9d2 100644
--- a/src/filesystem/path-utils.ts
+++ b/src/filesystem/path-utils.ts
@@ -8,14 +8,15 @@ import os from 'os';
*/
export function convertToWindowsPath(p: string): string {
// Handle WSL paths (/mnt/c/...)
+ // NEVER convert WSL paths - they are valid Linux paths that work with Node.js fs operations in WSL
+ // Converting them to Windows format (C:\...) breaks fs operations inside WSL
if (p.startsWith('/mnt/')) {
- const driveLetter = p.charAt(5).toUpperCase();
- const pathPart = p.slice(6).replace(/\//g, '\\');
- return `${driveLetter}:${pathPart}`;
+ return p; // Leave WSL paths unchanged
}
-
+
// Handle Unix-style Windows paths (/c/...)
- if (p.match(/^\/[a-zA-Z]\//)) {
+ // Only convert when running on Windows
+ if (p.match(/^\/[a-zA-Z]\//) && process.platform === 'win32') {
const driveLetter = p.charAt(1).toUpperCase();
const pathPart = p.slice(2).replace(/\//g, '\\');
return `${driveLetter}:${pathPart}`;
@@ -38,21 +39,29 @@ export function convertToWindowsPath(p: string): string {
export function normalizePath(p: string): string {
// Remove any surrounding quotes and whitespace
p = p.trim().replace(/^["']|["']$/g, '');
-
- // Check if this is a Unix path (starts with / but not a Windows or WSL path)
- const isUnixPath = p.startsWith('/') &&
- !p.match(/^\/mnt\/[a-z]\//i) &&
- !p.match(/^\/[a-zA-Z]\//);
-
+
+ // Check if this is a Unix path that should not be converted
+ // WSL paths (/mnt/) should ALWAYS be preserved as they work correctly in WSL with Node.js fs
+ // Regular Unix paths should also be preserved
+ const isUnixPath = p.startsWith('/') && (
+ // Always preserve WSL paths (/mnt/c/, /mnt/d/, etc.)
+ p.match(/^\/mnt\/[a-z]\//i) ||
+ // On non-Windows platforms, treat all absolute paths as Unix paths
+ (process.platform !== 'win32') ||
+ // On Windows, preserve Unix paths that aren't Unix-style Windows paths (/c/, /d/, etc.)
+ (process.platform === 'win32' && !p.match(/^\/[a-zA-Z]\//))
+ );
+
if (isUnixPath) {
// For Unix paths, just normalize without converting to Windows format
// Replace double slashes with single slashes and remove trailing slashes
return p.replace(/\/+/g, '/').replace(/\/+$/, '');
}
-
- // Convert WSL or Unix-style Windows paths to Windows format
+
+ // Convert Unix-style Windows paths (/c/, /d/) to Windows format if on Windows
+ // This function will now leave /mnt/ paths unchanged
p = convertToWindowsPath(p);
-
+
// Handle double backslashes, preserving leading UNC \\
if (p.startsWith('\\\\')) {
// For UNC paths, first normalize any excessive leading backslashes to exactly \\
@@ -67,15 +76,15 @@ export function normalizePath(p: string): string {
// For non-UNC paths, normalize all double backslashes
p = p.replace(/\\\\/g, '\\');
}
-
+
// Use Node's path normalization, which handles . and .. segments
let normalized = path.normalize(p);
-
+
// Fix UNC paths after normalization (path.normalize can remove a leading backslash)
if (p.startsWith('\\\\') && !normalized.startsWith('\\\\')) {
normalized = '\\' + normalized;
}
-
+
// Handle Windows paths: convert slashes and ensure drive letter is capitalized
if (normalized.match(/^[a-zA-Z]:/)) {
let result = normalized.replace(/\//g, '\\');
@@ -85,10 +94,15 @@ export function normalizePath(p: string): string {
}
return result;
}
-
- // For all other paths (including relative paths), convert forward slashes to backslashes
- // This ensures relative paths like "some/relative/path" become "some\\relative\\path"
- return normalized.replace(/\//g, '\\');
+
+ // On Windows, convert forward slashes to backslashes for relative paths
+ // On Linux/Unix, preserve forward slashes
+ if (process.platform === 'win32') {
+ return normalized.replace(/\//g, '\\');
+ }
+
+ // On non-Windows platforms, keep the normalized path as-is
+ return normalized;
}
/**
diff --git a/src/memory/__tests__/file-path.test.ts b/src/memory/__tests__/file-path.test.ts
new file mode 100644
index 00000000..d1a16e46
--- /dev/null
+++ b/src/memory/__tests__/file-path.test.ts
@@ -0,0 +1,156 @@
+import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
+import { promises as fs } from 'fs';
+import path from 'path';
+import { fileURLToPath } from 'url';
+import { ensureMemoryFilePath, defaultMemoryPath } from '../index.js';
+
+describe('ensureMemoryFilePath', () => {
+ const testDir = path.dirname(fileURLToPath(import.meta.url));
+ const oldMemoryPath = path.join(testDir, '..', 'memory.json');
+ const newMemoryPath = path.join(testDir, '..', 'memory.jsonl');
+
+ let originalEnv: string | undefined;
+
+ beforeEach(() => {
+ // Save original environment variable
+ originalEnv = process.env.MEMORY_FILE_PATH;
+ // Delete environment variable
+ delete process.env.MEMORY_FILE_PATH;
+ });
+
+ afterEach(async () => {
+ // Restore original environment variable
+ if (originalEnv !== undefined) {
+ process.env.MEMORY_FILE_PATH = originalEnv;
+ } else {
+ delete process.env.MEMORY_FILE_PATH;
+ }
+
+ // Clean up test files
+ try {
+ await fs.unlink(oldMemoryPath);
+ } catch {
+ // Ignore if file doesn't exist
+ }
+ try {
+ await fs.unlink(newMemoryPath);
+ } catch {
+ // Ignore if file doesn't exist
+ }
+ });
+
+ describe('with MEMORY_FILE_PATH environment variable', () => {
+ it('should return absolute path when MEMORY_FILE_PATH is absolute', async () => {
+ const absolutePath = '/tmp/custom-memory.jsonl';
+ process.env.MEMORY_FILE_PATH = absolutePath;
+
+ const result = await ensureMemoryFilePath();
+
+ expect(result).toBe(absolutePath);
+ });
+
+ it('should convert relative path to absolute when MEMORY_FILE_PATH is relative', async () => {
+ const relativePath = 'custom-memory.jsonl';
+ process.env.MEMORY_FILE_PATH = relativePath;
+
+ const result = await ensureMemoryFilePath();
+
+ expect(path.isAbsolute(result)).toBe(true);
+ expect(result).toContain('custom-memory.jsonl');
+ });
+
+ it('should handle Windows absolute paths', async () => {
+ const windowsPath = 'C:\\temp\\memory.jsonl';
+ process.env.MEMORY_FILE_PATH = windowsPath;
+
+ const result = await ensureMemoryFilePath();
+
+ // On Windows, should return as-is; on Unix, will be treated as relative
+ if (process.platform === 'win32') {
+ expect(result).toBe(windowsPath);
+ } else {
+ expect(path.isAbsolute(result)).toBe(true);
+ }
+ });
+ });
+
+ describe('without MEMORY_FILE_PATH environment variable', () => {
+ it('should return default path when no files exist', async () => {
+ const result = await ensureMemoryFilePath();
+
+ expect(result).toBe(defaultMemoryPath);
+ });
+
+ it('should migrate from memory.json to memory.jsonl when only old file exists', async () => {
+ // Create old memory.json file
+ await fs.writeFile(oldMemoryPath, '{"test":"data"}');
+
+ const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
+
+ const result = await ensureMemoryFilePath();
+
+ expect(result).toBe(defaultMemoryPath);
+
+ // Verify migration happened
+ const newFileExists = await fs.access(newMemoryPath).then(() => true).catch(() => false);
+ const oldFileExists = await fs.access(oldMemoryPath).then(() => true).catch(() => false);
+
+ expect(newFileExists).toBe(true);
+ expect(oldFileExists).toBe(false);
+
+ // Verify console messages
+ expect(consoleErrorSpy).toHaveBeenCalledWith(
+ expect.stringContaining('DETECTED: Found legacy memory.json file')
+ );
+ expect(consoleErrorSpy).toHaveBeenCalledWith(
+ expect.stringContaining('COMPLETED: Successfully migrated')
+ );
+
+ consoleErrorSpy.mockRestore();
+ });
+
+ it('should use new file when both old and new files exist', async () => {
+ // Create both files
+ await fs.writeFile(oldMemoryPath, '{"old":"data"}');
+ await fs.writeFile(newMemoryPath, '{"new":"data"}');
+
+ const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
+
+ const result = await ensureMemoryFilePath();
+
+ expect(result).toBe(defaultMemoryPath);
+
+ // Verify no migration happened (both files should still exist)
+ const newFileExists = await fs.access(newMemoryPath).then(() => true).catch(() => false);
+ const oldFileExists = await fs.access(oldMemoryPath).then(() => true).catch(() => false);
+
+ expect(newFileExists).toBe(true);
+ expect(oldFileExists).toBe(true);
+
+ // Verify no console messages about migration
+ expect(consoleErrorSpy).not.toHaveBeenCalled();
+
+ consoleErrorSpy.mockRestore();
+ });
+
+ it('should preserve file content during migration', async () => {
+ const testContent = '{"entities": [{"name": "test", "type": "person"}]}';
+ await fs.writeFile(oldMemoryPath, testContent);
+
+ await ensureMemoryFilePath();
+
+ const migratedContent = await fs.readFile(newMemoryPath, 'utf-8');
+ expect(migratedContent).toBe(testContent);
+ });
+ });
+
+ describe('defaultMemoryPath', () => {
+ it('should end with memory.jsonl', () => {
+ expect(defaultMemoryPath).toMatch(/memory\.jsonl$/);
+ });
+
+ it('should be an absolute path', () => {
+ expect(path.isAbsolute(defaultMemoryPath)).toBe(true);
+ });
+ });
+});
diff --git a/src/memory/__tests__/knowledge-graph.test.ts b/src/memory/__tests__/knowledge-graph.test.ts
new file mode 100644
index 00000000..a65d527b
--- /dev/null
+++ b/src/memory/__tests__/knowledge-graph.test.ts
@@ -0,0 +1,394 @@
+import { describe, it, expect, beforeEach, afterEach } from 'vitest';
+import { promises as fs } from 'fs';
+import path from 'path';
+import { fileURLToPath } from 'url';
+import { KnowledgeGraphManager, Entity, Relation, KnowledgeGraph } from '../index.js';
+
+describe('KnowledgeGraphManager', () => {
+ let manager: KnowledgeGraphManager;
+ let testFilePath: string;
+
+ beforeEach(async () => {
+ // Create a temporary test file path
+ testFilePath = path.join(
+ path.dirname(fileURLToPath(import.meta.url)),
+ `test-memory-${Date.now()}.jsonl`
+ );
+ manager = new KnowledgeGraphManager(testFilePath);
+ });
+
+ afterEach(async () => {
+ // Clean up test file
+ try {
+ await fs.unlink(testFilePath);
+ } catch (error) {
+ // Ignore errors if file doesn't exist
+ }
+ });
+
+ describe('createEntities', () => {
+ it('should create new entities', async () => {
+ const entities: Entity[] = [
+ { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp'] },
+ { name: 'Bob', entityType: 'person', observations: ['likes programming'] },
+ ];
+
+ const newEntities = await manager.createEntities(entities);
+ expect(newEntities).toHaveLength(2);
+ expect(newEntities).toEqual(entities);
+
+ const graph = await manager.readGraph();
+ expect(graph.entities).toHaveLength(2);
+ });
+
+ it('should not create duplicate entities', async () => {
+ const entities: Entity[] = [
+ { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp'] },
+ ];
+
+ await manager.createEntities(entities);
+ const newEntities = await manager.createEntities(entities);
+
+ expect(newEntities).toHaveLength(0);
+
+ const graph = await manager.readGraph();
+ expect(graph.entities).toHaveLength(1);
+ });
+
+ it('should handle empty entity arrays', async () => {
+ const newEntities = await manager.createEntities([]);
+ expect(newEntities).toHaveLength(0);
+ });
+ });
+
+ describe('createRelations', () => {
+ it('should create new relations', async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: [] },
+ { name: 'Bob', entityType: 'person', observations: [] },
+ ]);
+
+ const relations: Relation[] = [
+ { from: 'Alice', to: 'Bob', relationType: 'knows' },
+ ];
+
+ const newRelations = await manager.createRelations(relations);
+ expect(newRelations).toHaveLength(1);
+ expect(newRelations).toEqual(relations);
+
+ const graph = await manager.readGraph();
+ expect(graph.relations).toHaveLength(1);
+ });
+
+ it('should not create duplicate relations', async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: [] },
+ { name: 'Bob', entityType: 'person', observations: [] },
+ ]);
+
+ const relations: Relation[] = [
+ { from: 'Alice', to: 'Bob', relationType: 'knows' },
+ ];
+
+ await manager.createRelations(relations);
+ const newRelations = await manager.createRelations(relations);
+
+ expect(newRelations).toHaveLength(0);
+
+ const graph = await manager.readGraph();
+ expect(graph.relations).toHaveLength(1);
+ });
+
+ it('should handle empty relation arrays', async () => {
+ const newRelations = await manager.createRelations([]);
+ expect(newRelations).toHaveLength(0);
+ });
+ });
+
+ describe('addObservations', () => {
+ it('should add observations to existing entities', async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp'] },
+ ]);
+
+ const results = await manager.addObservations([
+ { entityName: 'Alice', contents: ['likes coffee', 'has a dog'] },
+ ]);
+
+ expect(results).toHaveLength(1);
+ expect(results[0].entityName).toBe('Alice');
+ expect(results[0].addedObservations).toHaveLength(2);
+
+ const graph = await manager.readGraph();
+ const alice = graph.entities.find(e => e.name === 'Alice');
+ expect(alice?.observations).toHaveLength(3);
+ });
+
+ it('should not add duplicate observations', async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp'] },
+ ]);
+
+ await manager.addObservations([
+ { entityName: 'Alice', contents: ['likes coffee'] },
+ ]);
+
+ const results = await manager.addObservations([
+ { entityName: 'Alice', contents: ['likes coffee', 'has a dog'] },
+ ]);
+
+ expect(results[0].addedObservations).toHaveLength(1);
+ expect(results[0].addedObservations).toContain('has a dog');
+
+ const graph = await manager.readGraph();
+ const alice = graph.entities.find(e => e.name === 'Alice');
+ expect(alice?.observations).toHaveLength(3);
+ });
+
+ it('should throw error for non-existent entity', async () => {
+ await expect(
+ manager.addObservations([
+ { entityName: 'NonExistent', contents: ['some observation'] },
+ ])
+ ).rejects.toThrow('Entity with name NonExistent not found');
+ });
+ });
+
+ describe('deleteEntities', () => {
+ it('should delete entities', async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: [] },
+ { name: 'Bob', entityType: 'person', observations: [] },
+ ]);
+
+ await manager.deleteEntities(['Alice']);
+
+ const graph = await manager.readGraph();
+ expect(graph.entities).toHaveLength(1);
+ expect(graph.entities[0].name).toBe('Bob');
+ });
+
+ it('should cascade delete relations when deleting entities', async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: [] },
+ { name: 'Bob', entityType: 'person', observations: [] },
+ { name: 'Charlie', entityType: 'person', observations: [] },
+ ]);
+
+ await manager.createRelations([
+ { from: 'Alice', to: 'Bob', relationType: 'knows' },
+ { from: 'Bob', to: 'Charlie', relationType: 'knows' },
+ ]);
+
+ await manager.deleteEntities(['Bob']);
+
+ const graph = await manager.readGraph();
+ expect(graph.entities).toHaveLength(2);
+ expect(graph.relations).toHaveLength(0);
+ });
+
+ it('should handle deleting non-existent entities', async () => {
+ await manager.deleteEntities(['NonExistent']);
+ const graph = await manager.readGraph();
+ expect(graph.entities).toHaveLength(0);
+ });
+ });
+
+ describe('deleteObservations', () => {
+ it('should delete observations from entities', async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp', 'likes coffee'] },
+ ]);
+
+ await manager.deleteObservations([
+ { entityName: 'Alice', observations: ['likes coffee'] },
+ ]);
+
+ const graph = await manager.readGraph();
+ const alice = graph.entities.find(e => e.name === 'Alice');
+ expect(alice?.observations).toHaveLength(1);
+ expect(alice?.observations).toContain('works at Acme Corp');
+ });
+
+ it('should handle deleting from non-existent entities', async () => {
+ await manager.deleteObservations([
+ { entityName: 'NonExistent', observations: ['some observation'] },
+ ]);
+ // Should not throw error
+ const graph = await manager.readGraph();
+ expect(graph.entities).toHaveLength(0);
+ });
+ });
+
+ describe('deleteRelations', () => {
+ it('should delete specific relations', async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: [] },
+ { name: 'Bob', entityType: 'person', observations: [] },
+ ]);
+
+ await manager.createRelations([
+ { from: 'Alice', to: 'Bob', relationType: 'knows' },
+ { from: 'Alice', to: 'Bob', relationType: 'works_with' },
+ ]);
+
+ await manager.deleteRelations([
+ { from: 'Alice', to: 'Bob', relationType: 'knows' },
+ ]);
+
+ const graph = await manager.readGraph();
+ expect(graph.relations).toHaveLength(1);
+ expect(graph.relations[0].relationType).toBe('works_with');
+ });
+ });
+
+ describe('readGraph', () => {
+ it('should return empty graph when file does not exist', async () => {
+ const graph = await manager.readGraph();
+ expect(graph.entities).toHaveLength(0);
+ expect(graph.relations).toHaveLength(0);
+ });
+
+ it('should return complete graph with entities and relations', async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp'] },
+ ]);
+
+ await manager.createRelations([
+ { from: 'Alice', to: 'Alice', relationType: 'self' },
+ ]);
+
+ const graph = await manager.readGraph();
+ expect(graph.entities).toHaveLength(1);
+ expect(graph.relations).toHaveLength(1);
+ });
+ });
+
+ describe('searchNodes', () => {
+ beforeEach(async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: ['works at Acme Corp', 'likes programming'] },
+ { name: 'Bob', entityType: 'person', observations: ['works at TechCo'] },
+ { name: 'Acme Corp', entityType: 'company', observations: ['tech company'] },
+ ]);
+
+ await manager.createRelations([
+ { from: 'Alice', to: 'Acme Corp', relationType: 'works_at' },
+ { from: 'Bob', to: 'Acme Corp', relationType: 'competitor' },
+ ]);
+ });
+
+ it('should search by entity name', async () => {
+ const result = await manager.searchNodes('Alice');
+ expect(result.entities).toHaveLength(1);
+ expect(result.entities[0].name).toBe('Alice');
+ });
+
+ it('should search by entity type', async () => {
+ const result = await manager.searchNodes('company');
+ expect(result.entities).toHaveLength(1);
+ expect(result.entities[0].name).toBe('Acme Corp');
+ });
+
+ it('should search by observation content', async () => {
+ const result = await manager.searchNodes('programming');
+ expect(result.entities).toHaveLength(1);
+ expect(result.entities[0].name).toBe('Alice');
+ });
+
+ it('should be case insensitive', async () => {
+ const result = await manager.searchNodes('ALICE');
+ expect(result.entities).toHaveLength(1);
+ expect(result.entities[0].name).toBe('Alice');
+ });
+
+ it('should include relations between matched entities', async () => {
+ const result = await manager.searchNodes('Acme');
+ expect(result.entities).toHaveLength(2); // Alice and Acme Corp
+ expect(result.relations).toHaveLength(1); // Only Alice -> Acme Corp relation
+ });
+
+ it('should return empty graph for no matches', async () => {
+ const result = await manager.searchNodes('NonExistent');
+ expect(result.entities).toHaveLength(0);
+ expect(result.relations).toHaveLength(0);
+ });
+ });
+
+ describe('openNodes', () => {
+ beforeEach(async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: [] },
+ { name: 'Bob', entityType: 'person', observations: [] },
+ { name: 'Charlie', entityType: 'person', observations: [] },
+ ]);
+
+ await manager.createRelations([
+ { from: 'Alice', to: 'Bob', relationType: 'knows' },
+ { from: 'Bob', to: 'Charlie', relationType: 'knows' },
+ ]);
+ });
+
+ it('should open specific nodes by name', async () => {
+ const result = await manager.openNodes(['Alice', 'Bob']);
+ expect(result.entities).toHaveLength(2);
+ expect(result.entities.map(e => e.name)).toContain('Alice');
+ expect(result.entities.map(e => e.name)).toContain('Bob');
+ });
+
+ it('should include relations between opened nodes', async () => {
+ const result = await manager.openNodes(['Alice', 'Bob']);
+ expect(result.relations).toHaveLength(1);
+ expect(result.relations[0].from).toBe('Alice');
+ expect(result.relations[0].to).toBe('Bob');
+ });
+
+ it('should exclude relations to unopened nodes', async () => {
+ const result = await manager.openNodes(['Bob']);
+ expect(result.relations).toHaveLength(0);
+ });
+
+ it('should handle opening non-existent nodes', async () => {
+ const result = await manager.openNodes(['NonExistent']);
+ expect(result.entities).toHaveLength(0);
+ });
+
+ it('should handle empty node list', async () => {
+ const result = await manager.openNodes([]);
+ expect(result.entities).toHaveLength(0);
+ expect(result.relations).toHaveLength(0);
+ });
+ });
+
+ describe('file persistence', () => {
+ it('should persist data across manager instances', async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: ['persistent data'] },
+ ]);
+
+ // Create new manager instance with same file path
+ const manager2 = new KnowledgeGraphManager(testFilePath);
+ const graph = await manager2.readGraph();
+
+ expect(graph.entities).toHaveLength(1);
+ expect(graph.entities[0].name).toBe('Alice');
+ });
+
+ it('should handle JSONL format correctly', async () => {
+ await manager.createEntities([
+ { name: 'Alice', entityType: 'person', observations: [] },
+ ]);
+ await manager.createRelations([
+ { from: 'Alice', to: 'Alice', relationType: 'self' },
+ ]);
+
+ // Read file directly
+ const fileContent = await fs.readFile(testFilePath, 'utf-8');
+ const lines = fileContent.split('\n').filter(line => line.trim());
+
+ expect(lines).toHaveLength(2);
+ expect(JSON.parse(lines[0])).toHaveProperty('type', 'entity');
+ expect(JSON.parse(lines[1])).toHaveProperty('type', 'relation');
+ });
+ });
+});
diff --git a/src/memory/index.ts b/src/memory/index.ts
index 204968f5..94585a44 100644
--- a/src/memory/index.ts
+++ b/src/memory/index.ts
@@ -11,10 +11,10 @@ import path from 'path';
import { fileURLToPath } from 'url';
// Define memory file path using environment variable with fallback
-const defaultMemoryPath = path.join(path.dirname(fileURLToPath(import.meta.url)), 'memory.jsonl');
+export const defaultMemoryPath = path.join(path.dirname(fileURLToPath(import.meta.url)), 'memory.jsonl');
// Handle backward compatibility: migrate memory.json to memory.jsonl if needed
-async function ensureMemoryFilePath(): Promise {
+export async function ensureMemoryFilePath(): Promise {
if (process.env.MEMORY_FILE_PATH) {
// Custom path provided, use it as-is (with absolute path resolution)
return path.isAbsolute(process.env.MEMORY_FILE_PATH)
@@ -50,28 +50,30 @@ async function ensureMemoryFilePath(): Promise {
let MEMORY_FILE_PATH: string;
// We are storing our memory using entities, relations, and observations in a graph structure
-interface Entity {
+export interface Entity {
name: string;
entityType: string;
observations: string[];
}
-interface Relation {
+export interface Relation {
from: string;
to: string;
relationType: string;
}
-interface KnowledgeGraph {
+export interface KnowledgeGraph {
entities: Entity[];
relations: Relation[];
}
// The KnowledgeGraphManager class contains all operations to interact with the knowledge graph
-class KnowledgeGraphManager {
+export class KnowledgeGraphManager {
+ constructor(private memoryFilePath: string) {}
+
private async loadGraph(): Promise {
try {
- const data = await fs.readFile(MEMORY_FILE_PATH, "utf-8");
+ const data = await fs.readFile(this.memoryFilePath, "utf-8");
const lines = data.split("\n").filter(line => line.trim() !== "");
return lines.reduce((graph: KnowledgeGraph, line) => {
const item = JSON.parse(line);
@@ -89,20 +91,20 @@ class KnowledgeGraphManager {
private async saveGraph(graph: KnowledgeGraph): Promise {
const lines = [
- ...graph.entities.map(e => JSON.stringify({
- type: "entity",
- name: e.name,
- entityType: e.entityType,
- observations: e.observations
+ ...graph.entities.map(e => JSON.stringify({
+ type: "entity",
+ name: e.name,
+ entityType: e.entityType,
+ observations: e.observations
})),
- ...graph.relations.map(r => JSON.stringify({
- type: "relation",
- from: r.from,
- to: r.to,
- relationType: r.relationType
+ ...graph.relations.map(r => JSON.stringify({
+ type: "relation",
+ from: r.from,
+ to: r.to,
+ relationType: r.relationType
})),
];
- await fs.writeFile(MEMORY_FILE_PATH, lines.join("\n"));
+ await fs.writeFile(this.memoryFilePath, lines.join("\n"));
}
async createEntities(entities: Entity[]): Promise {
@@ -222,7 +224,7 @@ class KnowledgeGraphManager {
}
}
-const knowledgeGraphManager = new KnowledgeGraphManager();
+let knowledgeGraphManager: KnowledgeGraphManager;
// The server instance and tools exposed to Claude
@@ -465,7 +467,10 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
async function main() {
// Initialize memory file path with backward compatibility
MEMORY_FILE_PATH = await ensureMemoryFilePath();
-
+
+ // Initialize knowledge graph manager with the memory file path
+ knowledgeGraphManager = new KnowledgeGraphManager(MEMORY_FILE_PATH);
+
const transport = new StdioServerTransport();
await server.connect(transport);
console.error("Knowledge Graph MCP Server running on stdio");
diff --git a/src/memory/package.json b/src/memory/package.json
index bb133ce2..3af1c44d 100644
--- a/src/memory/package.json
+++ b/src/memory/package.json
@@ -16,14 +16,17 @@
"scripts": {
"build": "tsc && shx chmod +x dist/*.js",
"prepare": "npm run build",
- "watch": "tsc --watch"
+ "watch": "tsc --watch",
+ "test": "vitest run --coverage"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.19.1"
},
"devDependencies": {
"@types/node": "^22",
+ "@vitest/coverage-v8": "^2.1.8",
"shx": "^0.3.4",
- "typescript": "^5.6.2"
+ "typescript": "^5.6.2",
+ "vitest": "^2.1.8"
}
}
\ No newline at end of file
diff --git a/src/memory/vitest.config.ts b/src/memory/vitest.config.ts
new file mode 100644
index 00000000..d414ec8f
--- /dev/null
+++ b/src/memory/vitest.config.ts
@@ -0,0 +1,14 @@
+import { defineConfig } from 'vitest/config';
+
+export default defineConfig({
+ test: {
+ globals: true,
+ environment: 'node',
+ include: ['**/__tests__/**/*.test.ts'],
+ coverage: {
+ provider: 'v8',
+ include: ['**/*.ts'],
+ exclude: ['**/__tests__/**', '**/dist/**'],
+ },
+ },
+});