mirror of
https://github.com/modelcontextprotocol/servers.git
synced 2026-02-19 11:54:58 -05:00
Merge branch 'main' into ochafik/zip-tool-outputs
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -301,3 +301,4 @@ cython_debug/
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
.claude/settings.local.json
|
||||
|
||||
@@ -243,6 +243,7 @@ Official integrations are maintained by companies building production ready MCP
|
||||
- <img height="12" width="12" src="https://hunter.io/favicon.ico" alt="Hunter Logo" /> **[Hunter](https://github.com/hunter-io/hunter-mcp)** - Interact with the [Hunter API](https://hunter.io) to get B2B data using natural language.
|
||||
- <img height="12" width="12" src="https://app.hyperbolic.xyz/hyperbolic-logo.svg" alt="Hyperbolic Labs Logo" /> **[Hyperbolic](https://github.com/HyperbolicLabs/hyperbolic-mcp)** - Interact with Hyperbolic's GPU cloud, enabling agents and LLMs to view and rent available GPUs, SSH into them, and run GPU-powered workloads for you.
|
||||
- <img height="12" width="12" src="https://hyperbrowser-assets-bucket.s3.us-east-1.amazonaws.com/Hyperbrowser-logo.png" alt="Hyperbrowsers23 Logo" /> **[Hyperbrowser](https://github.com/hyperbrowserai/mcp)** - [Hyperbrowser](https://www.hyperbrowser.ai/) is the next-generation platform empowering AI agents and enabling effortless, scalable browser automation.
|
||||
- **[IBM watsonx.data intelligence](https://github.com/IBM/data-intelligence-mcp-server)** - Find, understand, and work with your data in the watsonx.data intelligence governance & catalog, data quality, data lineage, and data product hub
|
||||
- **[IBM wxflows](https://github.com/IBM/wxflows/tree/main/examples/mcp/javascript)** - Tool platform by IBM to build, test and deploy tools for any data source
|
||||
- <img height="12" width="12" src="https://improvedigital.com/favicon.ico" alt="Improve Digital Icon" /> **[Improve Digital Publisher MCP](https://github.com/azerion/improvedigital-publisher-mcp-server)** - An MCP server that enables publishers to integrate [Improve Digital’s](https://improvedigital.com/) inventory management system with their AI tools or agents.
|
||||
- <img height="12" width="12" src="https://www.getinboxzero.com/icon.png" alt="Inbox Zero Logo" /> **[Inbox Zero](https://github.com/elie222/inbox-zero/tree/main/apps/mcp-server)** - AI personal assistant for email [Inbox Zero](https://www.getinboxzero.com)
|
||||
@@ -353,6 +354,7 @@ Official integrations are maintained by companies building production ready MCP
|
||||
- <img height="12" width="12" src="https://raw.githubusercontent.com/oracle/mcp/refs/heads/main/oracle.svg" alt="Oracle Logo" /> **[Oracle](https://docs.oracle.com/en/database/oracle/sql-developer-command-line/25.2/sqcug/starting-and-managing-sqlcl-mcp-server.html#GUID-5F916B5D-8670-42BD-9F8B-D3D2424EC47E)** - Official [Oracle Database: SQLcl ](https://www.oracle.com/database/sqldeveloper/technologies/sqlcl/download/) MCP server enabling all access to any Oracle Database via native MCP support directly in SQLcl.
|
||||
- <img height="12" width="12" src="https://orshot.com/brand/favicon.svg" alt="Orshot Logo" /> **[Orshot](https://github.com/rishimohan/orshot-mcp-server)** - Official [Orshot](https://orshot.com) MCP server to dynamically generate images from custom design templates.
|
||||
- <img height="12" width="12" src="https://oxylabs.io/favicon.ico" alt="Oxylabs Logo" /> **[Oxylabs](https://github.com/oxylabs/oxylabs-mcp)** - Scrape websites with Oxylabs Web API, supporting dynamic rendering and parsing for structured data extraction.
|
||||
- <img src="https://cdn.bfldr.com/7GK1OJLK/at/kq7cwt4vkw5m2x9s4gkvbf7g/android-chrome-512x512-favicon.png?auto=webp&format=png&width=12&height=12" width="12" height="12" alt="PandaDoc"> **[PandaDoc](https://developers.pandadoc.com/docs/use-pandadoc-mcp-server)** - Configure AI development tools to connect to PandaDoc's Model Context Protocol server and leverage AI-powered PandaDoc integrations.
|
||||
- <img height="12" width="12" src="https://developer.paddle.com/favicon.svg" alt="Paddle Logo" /> **[Paddle](https://github.com/PaddleHQ/paddle-mcp-server)** - Interact with the Paddle API. Manage product catalog, billing and subscriptions, and reports.
|
||||
- **[PaddleOCR](https://paddlepaddle.github.io/PaddleOCR/latest/en/version3.x/deployment/mcp_server.html)** - An MCP server that brings enterprise-grade OCR and document parsing capabilities to AI applications.
|
||||
- <img height="12" width="12" src="https://cdn.brandfolder.io/YX9ETPCP/at/266537g8kh6mmvt24jvsjb/P-GreenRGB.svg" alt="PagerDuty Logo" /> **[PagerDuty](https://github.com/PagerDuty/pagerduty-mcp-server)** - Interact with your PagerDuty account, allowing you to manage incidents, services, schedules, and more directly from your MCP-enabled client.
|
||||
@@ -614,6 +616,7 @@ A growing set of community-developed and maintained servers demonstrates various
|
||||
- **[Chroma](https://github.com/privetin/chroma)** - Vector database server for semantic document search and metadata filtering, built on Chroma
|
||||
- **[Chrome history](https://github.com/vincent-pli/chrome-history-mcp)** - Talk with AI about your browser history, get fun ^_^
|
||||
- **[CIViC](https://github.com/QuentinCody/civic-mcp-server)** - MCP server for the Clinical Interpretation of Variants in Cancer (CIViC) database, providing access to clinical variant interpretations and genomic evidence for cancer research.
|
||||
- **[claude-faf-mcp](https://github.com/Wolfe-Jam/claude-faf-mcp)** - MCP server for .faf format. Context scoring engine with project context management.
|
||||
- **[Claude Thread Continuity](https://github.com/peless/claude-thread-continuity)** - Persistent memory system enabling Claude Desktop conversations to resume with full context across sessions. Maintains conversation history, project states, and user preferences for seamless multi-session workflows.
|
||||
- **[ClaudePost](https://github.com/ZilongXue/claude-post)** - ClaudePost enables seamless email management for Gmail, offering secure features like email search, reading, and sending.
|
||||
- **[CLDGeminiPDF Analyzer](https://github.com/tfll37/CLDGeminiPDF-Analyzer)** - MCP server tool enabling sharing large PDF files to Google LLMs via API for further/additional analysis and response retrieval to Claude Desktop.
|
||||
@@ -933,6 +936,7 @@ A growing set of community-developed and maintained servers demonstrates various
|
||||
- **[MCP Create](https://github.com/tesla0225/mcp-create)** - A dynamic MCP server management service that creates, runs, and manages Model Context Protocol servers on-the-fly.
|
||||
- **[MCP Documentation Server](https://github.com/andrea9293/mcp-documentation-server)** - Server that provides local-first document management and semantic search via embeddings or Gemini AI (recommended). Optimized for performance with disk persistence, an in-memory index, and caching.
|
||||
- **[MCP Installer](https://github.com/anaisbetts/mcp-installer)** - This server is a server that installs other MCP servers for you.
|
||||
- **[MCP on Android TV](https://github.com/MiddlePoint-Solutions/mcp-on-android-tv)** - A Model Context Protocol (MCP) server running directly on your Android TV with bundeld access to ADB on-device.
|
||||
- **[MCP ProjectManage OpenProject](https://github.com/boma086/mcp-projectmanage-openproject)** - This server provides the MCP service for project weekly reports, with project management information supplied by OpenProject.
|
||||
- **[MCP Proxy Server](https://github.com/TBXark/mcp-proxy)** - An MCP proxy server that aggregates and serves multiple MCP resource servers through a single HTTP server.
|
||||
- **[MCP Server Creator](https://github.com/GongRzhe/MCP-Server-Creator)** - A powerful Model Context Protocol (MCP) server that creates other MCP servers! This meta-server provides tools for dynamically generating FastMCP server configurations and Python code.
|
||||
@@ -989,6 +993,7 @@ A growing set of community-developed and maintained servers demonstrates various
|
||||
- **[Mikrotik](https://github.com/jeff-nasseri/mikrotik-mcp)** - Mikrotik MCP server which cover networking operations (IP, DHCP, Firewall, etc)
|
||||
- **[Mindmap](https://github.com/YuChenSSR/mindmap-mcp-server)** (by YuChenSSR) - A server that generates mindmaps from input containing markdown code.
|
||||
- **[Minima](https://github.com/dmayboroda/minima)** - MCP server for RAG on local files
|
||||
- **[MLflow](https://github.com/kkruglik/mlflow-mcp)** - MLflow MCP server for ML experiment tracking with advanced querying, run comparison, artifact access, and model registry.
|
||||
- **[Modao Proto MCP](https://github.com/modao-dev/modao-proto-mcp)** - AI-powered HTML prototype generation server that converts natural language descriptions into complete HTML code with modern design and responsive layouts. Supports design description expansion and seamless integration with Modao workspace.
|
||||
- **[Mobile MCP](https://github.com/mobile-next/mobile-mcp)** (by Mobile Next) - MCP server for Mobile(iOS/Android) automation, app scraping and development using physical devices or simulators/emulators.
|
||||
- **[Monday.com (unofficial)](https://github.com/sakce/mcp-server-monday)** - MCP Server to interact with Monday.com boards and items.
|
||||
|
||||
1048
package-lock.json
generated
1048
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -6,7 +6,6 @@ import {
|
||||
CompleteRequestSchema,
|
||||
CreateMessageRequest,
|
||||
CreateMessageResultSchema,
|
||||
ElicitRequest,
|
||||
ElicitResultSchema,
|
||||
GetPromptRequestSchema,
|
||||
ListPromptsRequestSchema,
|
||||
@@ -264,22 +263,6 @@ export const createServer = () => {
|
||||
|
||||
};
|
||||
|
||||
const requestElicitation = async (
|
||||
message: string,
|
||||
requestedSchema: any,
|
||||
sendRequest: SendRequest
|
||||
) => {
|
||||
const request: ElicitRequest = {
|
||||
method: 'elicitation/create',
|
||||
params: {
|
||||
message,
|
||||
requestedSchema,
|
||||
},
|
||||
};
|
||||
|
||||
return await sendRequest(request, ElicitResultSchema);
|
||||
};
|
||||
|
||||
const ALL_RESOURCES: Resource[] = Array.from({ length: 100 }, (_, i) => {
|
||||
const uri = `test://static/resource/${i + 1}`;
|
||||
if (i % 2 === 0) {
|
||||
@@ -557,7 +540,7 @@ export const createServer = () => {
|
||||
});
|
||||
if (clientCapabilities!.elicitation) tools.push ({
|
||||
name: ToolName.ELICITATION,
|
||||
description: "Demonstrates the Elicitation feature by asking the user to provide information about their favorite color, number, and pets.",
|
||||
description: "Elicitation test tool that demonstrates how to request user input with various field types (string, boolean, email, uri, date, integer, number, enum)",
|
||||
inputSchema: zodToJsonSchema(ElicitationSchema) as ToolInput,
|
||||
});
|
||||
|
||||
@@ -752,27 +735,75 @@ export const createServer = () => {
|
||||
if (name === ToolName.ELICITATION) {
|
||||
ElicitationSchema.parse(args);
|
||||
|
||||
const elicitationResult = await requestElicitation(
|
||||
'What are your favorite things?',
|
||||
{
|
||||
type: 'object',
|
||||
properties: {
|
||||
color: { type: 'string', description: 'Favorite color' },
|
||||
number: {
|
||||
type: 'integer',
|
||||
description: 'Favorite number',
|
||||
minimum: 1,
|
||||
maximum: 100,
|
||||
},
|
||||
pets: {
|
||||
type: 'string',
|
||||
enum: ['cats', 'dogs', 'birds', 'fish', 'reptiles'],
|
||||
description: 'Favorite pets',
|
||||
const elicitationResult = await extra.sendRequest({
|
||||
method: 'elicitation/create',
|
||||
params: {
|
||||
message: 'Please provide inputs for the following fields:',
|
||||
requestedSchema: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: {
|
||||
title: 'Full Name',
|
||||
type: 'string',
|
||||
description: 'Your full, legal name',
|
||||
},
|
||||
check: {
|
||||
title: 'Agree to terms',
|
||||
type: 'boolean',
|
||||
description: 'A boolean check',
|
||||
},
|
||||
color: {
|
||||
title: 'Favorite Color',
|
||||
type: 'string',
|
||||
description: 'Favorite color (open text)',
|
||||
default: 'blue',
|
||||
},
|
||||
email: {
|
||||
title: 'Email Address',
|
||||
type: 'string',
|
||||
format: 'email',
|
||||
description: 'Your email address (will be verified, and never shared with anyone else)',
|
||||
},
|
||||
homepage: {
|
||||
type: 'string',
|
||||
format: 'uri',
|
||||
description: 'Homepage / personal site',
|
||||
},
|
||||
birthdate: {
|
||||
title: 'Birthdate',
|
||||
type: 'string',
|
||||
format: 'date',
|
||||
description: 'Your date of birth (will never be shared with anyone else)',
|
||||
},
|
||||
integer: {
|
||||
title: 'Favorite Integer',
|
||||
type: 'integer',
|
||||
description: 'Your favorite integer (do not give us your phone number, pin, or other sensitive info)',
|
||||
minimum: 1,
|
||||
maximum: 100,
|
||||
default: 42,
|
||||
},
|
||||
number: {
|
||||
title: 'Favorite Number',
|
||||
type: 'number',
|
||||
description: 'Favorite number (there are no wrong answers)',
|
||||
minimum: 0,
|
||||
maximum: 1000,
|
||||
default: 3.14,
|
||||
},
|
||||
petType: {
|
||||
title: 'Pet type',
|
||||
type: 'string',
|
||||
enum: ['cats', 'dogs', 'birds', 'fish', 'reptiles'],
|
||||
enumNames: ['Cats', 'Dogs', 'Birds', 'Fish', 'Reptiles'],
|
||||
default: 'dogs',
|
||||
description: 'Your favorite pet type',
|
||||
},
|
||||
},
|
||||
required: ['name'],
|
||||
},
|
||||
},
|
||||
extra.sendRequest
|
||||
);
|
||||
}, ElicitResultSchema, { timeout: 10 * 60 * 1000 /* 10 minutes */ });
|
||||
|
||||
// Handle different response actions
|
||||
const content = [];
|
||||
@@ -780,19 +811,30 @@ export const createServer = () => {
|
||||
if (elicitationResult.action === 'accept' && elicitationResult.content) {
|
||||
content.push({
|
||||
type: "text",
|
||||
text: `✅ User provided their favorite things!`,
|
||||
text: `✅ User provided the requested information!`,
|
||||
});
|
||||
|
||||
// Only access elicitationResult.content when action is accept
|
||||
const { color, number, pets } = elicitationResult.content;
|
||||
const userData = elicitationResult.content;
|
||||
const lines = [];
|
||||
if (userData.name) lines.push(`- Name: ${userData.name}`);
|
||||
if (userData.check !== undefined) lines.push(`- Agreed to terms: ${userData.check}`);
|
||||
if (userData.color) lines.push(`- Favorite Color: ${userData.color}`);
|
||||
if (userData.email) lines.push(`- Email: ${userData.email}`);
|
||||
if (userData.homepage) lines.push(`- Homepage: ${userData.homepage}`);
|
||||
if (userData.birthdate) lines.push(`- Birthdate: ${userData.birthdate}`);
|
||||
if (userData.integer !== undefined) lines.push(`- Favorite Integer: ${userData.integer}`);
|
||||
if (userData.number !== undefined) lines.push(`- Favorite Number: ${userData.number}`);
|
||||
if (userData.petType) lines.push(`- Pet Type: ${userData.petType}`);
|
||||
|
||||
content.push({
|
||||
type: "text",
|
||||
text: `Their favorites are:\n- Color: ${color || 'not specified'}\n- Number: ${number || 'not specified'}\n- Pets: ${pets || 'not specified'}`,
|
||||
text: `User inputs:\n${lines.join('\n')}`,
|
||||
});
|
||||
} else if (elicitationResult.action === 'decline') {
|
||||
content.push({
|
||||
type: "text",
|
||||
text: `❌ User declined to provide their favorite things.`,
|
||||
text: `❌ User declined to provide the requested information.`,
|
||||
});
|
||||
} else if (elicitationResult.action === 'cancel') {
|
||||
content.push({
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
"start:streamableHttp": "node dist/streamableHttp.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.18.0",
|
||||
"@modelcontextprotocol/sdk": "^1.19.1",
|
||||
"cors": "^2.8.5",
|
||||
"express": "^4.21.1",
|
||||
"zod": "^3.23.8",
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"test": "jest --config=jest.config.cjs --coverage"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "^1.17.0",
|
||||
"@modelcontextprotocol/sdk": "^1.19.1",
|
||||
"diff": "^5.1.0",
|
||||
"glob": "^10.3.10",
|
||||
"minimatch": "^10.0.1",
|
||||
|
||||
@@ -173,14 +173,14 @@ The server can be configured using the following environment variables:
|
||||
"@modelcontextprotocol/server-memory"
|
||||
],
|
||||
"env": {
|
||||
"MEMORY_FILE_PATH": "/path/to/custom/memory.json"
|
||||
"MEMORY_FILE_PATH": "/path/to/custom/memory.jsonl"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- `MEMORY_FILE_PATH`: Path to the memory storage JSON file (default: `memory.json` in the server directory)
|
||||
- `MEMORY_FILE_PATH`: Path to the memory storage JSONL file (default: `memory.jsonl` in the server directory)
|
||||
|
||||
# VS Code Installation Instructions
|
||||
|
||||
|
||||
@@ -11,14 +11,43 @@ import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
// Define memory file path using environment variable with fallback
|
||||
const defaultMemoryPath = path.join(path.dirname(fileURLToPath(import.meta.url)), 'memory.json');
|
||||
const defaultMemoryPath = path.join(path.dirname(fileURLToPath(import.meta.url)), 'memory.jsonl');
|
||||
|
||||
// If MEMORY_FILE_PATH is just a filename, put it in the same directory as the script
|
||||
const MEMORY_FILE_PATH = process.env.MEMORY_FILE_PATH
|
||||
? path.isAbsolute(process.env.MEMORY_FILE_PATH)
|
||||
? process.env.MEMORY_FILE_PATH
|
||||
: path.join(path.dirname(fileURLToPath(import.meta.url)), process.env.MEMORY_FILE_PATH)
|
||||
: defaultMemoryPath;
|
||||
// Handle backward compatibility: migrate memory.json to memory.jsonl if needed
|
||||
async function ensureMemoryFilePath(): Promise<string> {
|
||||
if (process.env.MEMORY_FILE_PATH) {
|
||||
// Custom path provided, use it as-is (with absolute path resolution)
|
||||
return path.isAbsolute(process.env.MEMORY_FILE_PATH)
|
||||
? process.env.MEMORY_FILE_PATH
|
||||
: path.join(path.dirname(fileURLToPath(import.meta.url)), process.env.MEMORY_FILE_PATH);
|
||||
}
|
||||
|
||||
// No custom path set, check for backward compatibility migration
|
||||
const oldMemoryPath = path.join(path.dirname(fileURLToPath(import.meta.url)), 'memory.json');
|
||||
const newMemoryPath = defaultMemoryPath;
|
||||
|
||||
try {
|
||||
// Check if old file exists and new file doesn't
|
||||
await fs.access(oldMemoryPath);
|
||||
try {
|
||||
await fs.access(newMemoryPath);
|
||||
// Both files exist, use new one (no migration needed)
|
||||
return newMemoryPath;
|
||||
} catch {
|
||||
// Old file exists, new file doesn't - migrate
|
||||
console.error('DETECTED: Found legacy memory.json file, migrating to memory.jsonl for JSONL format compatibility');
|
||||
await fs.rename(oldMemoryPath, newMemoryPath);
|
||||
console.error('COMPLETED: Successfully migrated memory.json to memory.jsonl');
|
||||
return newMemoryPath;
|
||||
}
|
||||
} catch {
|
||||
// Old file doesn't exist, use new path
|
||||
return newMemoryPath;
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize memory file path (will be set during startup)
|
||||
let MEMORY_FILE_PATH: string;
|
||||
|
||||
// We are storing our memory using entities, relations, and observations in a graph structure
|
||||
interface Entity {
|
||||
@@ -434,6 +463,9 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
});
|
||||
|
||||
async function main() {
|
||||
// Initialize memory file path with backward compatibility
|
||||
MEMORY_FILE_PATH = await ensureMemoryFilePath();
|
||||
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("Knowledge Graph MCP Server running on stdio");
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "1.0.1"
|
||||
"@modelcontextprotocol/sdk": "^1.19.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^22",
|
||||
|
||||
440
src/sequentialthinking/__tests__/lib.test.ts
Normal file
440
src/sequentialthinking/__tests__/lib.test.ts
Normal file
@@ -0,0 +1,440 @@
|
||||
import { describe, it, expect, beforeEach, jest } from '@jest/globals';
|
||||
import { SequentialThinkingServer, ThoughtData } from '../lib.js';
|
||||
|
||||
// Mock chalk to avoid ESM issues in Jest
|
||||
jest.mock('chalk', () => {
|
||||
const chalkMock = {
|
||||
yellow: (str: string) => str,
|
||||
green: (str: string) => str,
|
||||
blue: (str: string) => str,
|
||||
};
|
||||
return {
|
||||
default: chalkMock,
|
||||
__esModule: true,
|
||||
};
|
||||
});
|
||||
|
||||
describe('SequentialThinkingServer', () => {
|
||||
let server: SequentialThinkingServer;
|
||||
|
||||
beforeEach(() => {
|
||||
// Disable thought logging for tests
|
||||
process.env.DISABLE_THOUGHT_LOGGING = 'true';
|
||||
server = new SequentialThinkingServer();
|
||||
});
|
||||
|
||||
describe('processThought - validation', () => {
|
||||
it('should reject input with missing thought', () => {
|
||||
const input = {
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('Invalid thought');
|
||||
});
|
||||
|
||||
it('should reject input with non-string thought', () => {
|
||||
const input = {
|
||||
thought: 123,
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('Invalid thought');
|
||||
});
|
||||
|
||||
it('should reject input with missing thoughtNumber', () => {
|
||||
const input = {
|
||||
thought: 'Test thought',
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('Invalid thoughtNumber');
|
||||
});
|
||||
|
||||
it('should reject input with non-number thoughtNumber', () => {
|
||||
const input = {
|
||||
thought: 'Test thought',
|
||||
thoughtNumber: '1',
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('Invalid thoughtNumber');
|
||||
});
|
||||
|
||||
it('should reject input with missing totalThoughts', () => {
|
||||
const input = {
|
||||
thought: 'Test thought',
|
||||
thoughtNumber: 1,
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('Invalid totalThoughts');
|
||||
});
|
||||
|
||||
it('should reject input with non-number totalThoughts', () => {
|
||||
const input = {
|
||||
thought: 'Test thought',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: '3',
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('Invalid totalThoughts');
|
||||
});
|
||||
|
||||
it('should reject input with missing nextThoughtNeeded', () => {
|
||||
const input = {
|
||||
thought: 'Test thought',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 3
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('Invalid nextThoughtNeeded');
|
||||
});
|
||||
|
||||
it('should reject input with non-boolean nextThoughtNeeded', () => {
|
||||
const input = {
|
||||
thought: 'Test thought',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: 'true'
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('Invalid nextThoughtNeeded');
|
||||
});
|
||||
});
|
||||
|
||||
describe('processThought - valid inputs', () => {
|
||||
it('should accept valid basic thought', () => {
|
||||
const input = {
|
||||
thought: 'This is my first thought',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBeUndefined();
|
||||
|
||||
const data = JSON.parse(result.content[0].text);
|
||||
expect(data.thoughtNumber).toBe(1);
|
||||
expect(data.totalThoughts).toBe(3);
|
||||
expect(data.nextThoughtNeeded).toBe(true);
|
||||
expect(data.thoughtHistoryLength).toBe(1);
|
||||
});
|
||||
|
||||
it('should accept thought with optional fields', () => {
|
||||
const input = {
|
||||
thought: 'Revising my earlier idea',
|
||||
thoughtNumber: 2,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true,
|
||||
isRevision: true,
|
||||
revisesThought: 1,
|
||||
needsMoreThoughts: false
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBeUndefined();
|
||||
|
||||
const data = JSON.parse(result.content[0].text);
|
||||
expect(data.thoughtNumber).toBe(2);
|
||||
expect(data.thoughtHistoryLength).toBe(1);
|
||||
});
|
||||
|
||||
it('should track multiple thoughts in history', () => {
|
||||
const input1 = {
|
||||
thought: 'First thought',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const input2 = {
|
||||
thought: 'Second thought',
|
||||
thoughtNumber: 2,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const input3 = {
|
||||
thought: 'Final thought',
|
||||
thoughtNumber: 3,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: false
|
||||
};
|
||||
|
||||
server.processThought(input1);
|
||||
server.processThought(input2);
|
||||
const result = server.processThought(input3);
|
||||
|
||||
const data = JSON.parse(result.content[0].text);
|
||||
expect(data.thoughtHistoryLength).toBe(3);
|
||||
expect(data.nextThoughtNeeded).toBe(false);
|
||||
});
|
||||
|
||||
it('should auto-adjust totalThoughts if thoughtNumber exceeds it', () => {
|
||||
const input = {
|
||||
thought: 'Thought 5',
|
||||
thoughtNumber: 5,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
const data = JSON.parse(result.content[0].text);
|
||||
|
||||
expect(data.totalThoughts).toBe(5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('processThought - branching', () => {
|
||||
it('should track branches correctly', () => {
|
||||
const input1 = {
|
||||
thought: 'Main thought',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const input2 = {
|
||||
thought: 'Branch A thought',
|
||||
thoughtNumber: 2,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true,
|
||||
branchFromThought: 1,
|
||||
branchId: 'branch-a'
|
||||
};
|
||||
|
||||
const input3 = {
|
||||
thought: 'Branch B thought',
|
||||
thoughtNumber: 2,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: false,
|
||||
branchFromThought: 1,
|
||||
branchId: 'branch-b'
|
||||
};
|
||||
|
||||
server.processThought(input1);
|
||||
server.processThought(input2);
|
||||
const result = server.processThought(input3);
|
||||
|
||||
const data = JSON.parse(result.content[0].text);
|
||||
expect(data.branches).toContain('branch-a');
|
||||
expect(data.branches).toContain('branch-b');
|
||||
expect(data.branches.length).toBe(2);
|
||||
expect(data.thoughtHistoryLength).toBe(3);
|
||||
});
|
||||
|
||||
it('should allow multiple thoughts in same branch', () => {
|
||||
const input1 = {
|
||||
thought: 'Branch thought 1',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 2,
|
||||
nextThoughtNeeded: true,
|
||||
branchFromThought: 1,
|
||||
branchId: 'branch-a'
|
||||
};
|
||||
|
||||
const input2 = {
|
||||
thought: 'Branch thought 2',
|
||||
thoughtNumber: 2,
|
||||
totalThoughts: 2,
|
||||
nextThoughtNeeded: false,
|
||||
branchFromThought: 1,
|
||||
branchId: 'branch-a'
|
||||
};
|
||||
|
||||
server.processThought(input1);
|
||||
const result = server.processThought(input2);
|
||||
|
||||
const data = JSON.parse(result.content[0].text);
|
||||
expect(data.branches).toContain('branch-a');
|
||||
expect(data.branches.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('processThought - edge cases', () => {
|
||||
it('should reject empty thought string', () => {
|
||||
const input = {
|
||||
thought: '',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 1,
|
||||
nextThoughtNeeded: false
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBe(true);
|
||||
expect(result.content[0].text).toContain('Invalid thought');
|
||||
});
|
||||
|
||||
it('should handle very long thought strings', () => {
|
||||
const input = {
|
||||
thought: 'a'.repeat(10000),
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 1,
|
||||
nextThoughtNeeded: false
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle thoughtNumber = 1, totalThoughts = 1', () => {
|
||||
const input = {
|
||||
thought: 'Only thought',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 1,
|
||||
nextThoughtNeeded: false
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
expect(result.isError).toBeUndefined();
|
||||
|
||||
const data = JSON.parse(result.content[0].text);
|
||||
expect(data.thoughtNumber).toBe(1);
|
||||
expect(data.totalThoughts).toBe(1);
|
||||
});
|
||||
|
||||
it('should handle nextThoughtNeeded = false', () => {
|
||||
const input = {
|
||||
thought: 'Final thought',
|
||||
thoughtNumber: 3,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: false
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
const data = JSON.parse(result.content[0].text);
|
||||
|
||||
expect(data.nextThoughtNeeded).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('processThought - response format', () => {
|
||||
it('should return correct response structure on success', () => {
|
||||
const input = {
|
||||
thought: 'Test thought',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 1,
|
||||
nextThoughtNeeded: false
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
|
||||
expect(result).toHaveProperty('content');
|
||||
expect(Array.isArray(result.content)).toBe(true);
|
||||
expect(result.content.length).toBe(1);
|
||||
expect(result.content[0]).toHaveProperty('type', 'text');
|
||||
expect(result.content[0]).toHaveProperty('text');
|
||||
});
|
||||
|
||||
it('should return correct error structure on failure', () => {
|
||||
const input = {
|
||||
thought: 'Test',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 1
|
||||
// missing nextThoughtNeeded
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
|
||||
expect(result).toHaveProperty('isError', true);
|
||||
expect(result).toHaveProperty('content');
|
||||
expect(Array.isArray(result.content)).toBe(true);
|
||||
|
||||
const errorData = JSON.parse(result.content[0].text);
|
||||
expect(errorData).toHaveProperty('error');
|
||||
expect(errorData).toHaveProperty('status', 'failed');
|
||||
});
|
||||
|
||||
it('should return valid JSON in response', () => {
|
||||
const input = {
|
||||
thought: 'Test thought',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 1,
|
||||
nextThoughtNeeded: false
|
||||
};
|
||||
|
||||
const result = server.processThought(input);
|
||||
|
||||
expect(() => JSON.parse(result.content[0].text)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('processThought - with logging enabled', () => {
|
||||
let serverWithLogging: SequentialThinkingServer;
|
||||
|
||||
beforeEach(() => {
|
||||
// Enable thought logging for these tests
|
||||
delete process.env.DISABLE_THOUGHT_LOGGING;
|
||||
serverWithLogging = new SequentialThinkingServer();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Reset to disabled for other tests
|
||||
process.env.DISABLE_THOUGHT_LOGGING = 'true';
|
||||
});
|
||||
|
||||
it('should format and log regular thoughts', () => {
|
||||
const input = {
|
||||
thought: 'Test thought with logging',
|
||||
thoughtNumber: 1,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true
|
||||
};
|
||||
|
||||
const result = serverWithLogging.processThought(input);
|
||||
expect(result.isError).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should format and log revision thoughts', () => {
|
||||
const input = {
|
||||
thought: 'Revised thought',
|
||||
thoughtNumber: 2,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: true,
|
||||
isRevision: true,
|
||||
revisesThought: 1
|
||||
};
|
||||
|
||||
const result = serverWithLogging.processThought(input);
|
||||
expect(result.isError).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should format and log branch thoughts', () => {
|
||||
const input = {
|
||||
thought: 'Branch thought',
|
||||
thoughtNumber: 2,
|
||||
totalThoughts: 3,
|
||||
nextThoughtNeeded: false,
|
||||
branchFromThought: 1,
|
||||
branchId: 'branch-a'
|
||||
};
|
||||
|
||||
const result = serverWithLogging.processThought(input);
|
||||
expect(result.isError).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -7,135 +7,7 @@ import {
|
||||
ListToolsRequestSchema,
|
||||
Tool,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
// Fixed chalk import for ESM
|
||||
import chalk from 'chalk';
|
||||
|
||||
interface ThoughtData {
|
||||
thought: string;
|
||||
thoughtNumber: number;
|
||||
totalThoughts: number;
|
||||
isRevision?: boolean;
|
||||
revisesThought?: number;
|
||||
branchFromThought?: number;
|
||||
branchId?: string;
|
||||
needsMoreThoughts?: boolean;
|
||||
nextThoughtNeeded: boolean;
|
||||
}
|
||||
|
||||
class SequentialThinkingServer {
|
||||
private thoughtHistory: ThoughtData[] = [];
|
||||
private branches: Record<string, ThoughtData[]> = {};
|
||||
private disableThoughtLogging: boolean;
|
||||
|
||||
constructor() {
|
||||
this.disableThoughtLogging = (process.env.DISABLE_THOUGHT_LOGGING || "").toLowerCase() === "true";
|
||||
}
|
||||
|
||||
private validateThoughtData(input: unknown): ThoughtData {
|
||||
const data = input as Record<string, unknown>;
|
||||
|
||||
if (!data.thought || typeof data.thought !== 'string') {
|
||||
throw new Error('Invalid thought: must be a string');
|
||||
}
|
||||
if (!data.thoughtNumber || typeof data.thoughtNumber !== 'number') {
|
||||
throw new Error('Invalid thoughtNumber: must be a number');
|
||||
}
|
||||
if (!data.totalThoughts || typeof data.totalThoughts !== 'number') {
|
||||
throw new Error('Invalid totalThoughts: must be a number');
|
||||
}
|
||||
if (typeof data.nextThoughtNeeded !== 'boolean') {
|
||||
throw new Error('Invalid nextThoughtNeeded: must be a boolean');
|
||||
}
|
||||
|
||||
return {
|
||||
thought: data.thought,
|
||||
thoughtNumber: data.thoughtNumber,
|
||||
totalThoughts: data.totalThoughts,
|
||||
nextThoughtNeeded: data.nextThoughtNeeded,
|
||||
isRevision: data.isRevision as boolean | undefined,
|
||||
revisesThought: data.revisesThought as number | undefined,
|
||||
branchFromThought: data.branchFromThought as number | undefined,
|
||||
branchId: data.branchId as string | undefined,
|
||||
needsMoreThoughts: data.needsMoreThoughts as boolean | undefined,
|
||||
};
|
||||
}
|
||||
|
||||
private formatThought(thoughtData: ThoughtData): string {
|
||||
const { thoughtNumber, totalThoughts, thought, isRevision, revisesThought, branchFromThought, branchId } = thoughtData;
|
||||
|
||||
let prefix = '';
|
||||
let context = '';
|
||||
|
||||
if (isRevision) {
|
||||
prefix = chalk.yellow('🔄 Revision');
|
||||
context = ` (revising thought ${revisesThought})`;
|
||||
} else if (branchFromThought) {
|
||||
prefix = chalk.green('🌿 Branch');
|
||||
context = ` (from thought ${branchFromThought}, ID: ${branchId})`;
|
||||
} else {
|
||||
prefix = chalk.blue('💭 Thought');
|
||||
context = '';
|
||||
}
|
||||
|
||||
const header = `${prefix} ${thoughtNumber}/${totalThoughts}${context}`;
|
||||
const border = '─'.repeat(Math.max(header.length, thought.length) + 4);
|
||||
|
||||
return `
|
||||
┌${border}┐
|
||||
│ ${header} │
|
||||
├${border}┤
|
||||
│ ${thought.padEnd(border.length - 2)} │
|
||||
└${border}┘`;
|
||||
}
|
||||
|
||||
public processThought(input: unknown): { content: Array<{ type: string; text: string }>; isError?: boolean } {
|
||||
try {
|
||||
const validatedInput = this.validateThoughtData(input);
|
||||
|
||||
if (validatedInput.thoughtNumber > validatedInput.totalThoughts) {
|
||||
validatedInput.totalThoughts = validatedInput.thoughtNumber;
|
||||
}
|
||||
|
||||
this.thoughtHistory.push(validatedInput);
|
||||
|
||||
if (validatedInput.branchFromThought && validatedInput.branchId) {
|
||||
if (!this.branches[validatedInput.branchId]) {
|
||||
this.branches[validatedInput.branchId] = [];
|
||||
}
|
||||
this.branches[validatedInput.branchId].push(validatedInput);
|
||||
}
|
||||
|
||||
if (!this.disableThoughtLogging) {
|
||||
const formattedThought = this.formatThought(validatedInput);
|
||||
console.error(formattedThought);
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
thoughtNumber: validatedInput.thoughtNumber,
|
||||
totalThoughts: validatedInput.totalThoughts,
|
||||
nextThoughtNeeded: validatedInput.nextThoughtNeeded,
|
||||
branches: Object.keys(this.branches),
|
||||
thoughtHistoryLength: this.thoughtHistory.length
|
||||
}, null, 2)
|
||||
}]
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
status: 'failed'
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
import { SequentialThinkingServer } from './lib.js';
|
||||
|
||||
const SEQUENTIAL_THINKING_TOOL: Tool = {
|
||||
name: "sequentialthinking",
|
||||
|
||||
26
src/sequentialthinking/jest.config.cjs
Normal file
26
src/sequentialthinking/jest.config.cjs
Normal file
@@ -0,0 +1,26 @@
|
||||
/** @type {import('ts-jest').JestConfigWithTsJest} */
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
extensionsToTreatAsEsm: ['.ts'],
|
||||
moduleNameMapper: {
|
||||
'^(\\.{1,2}/.*)\\.js$': '$1',
|
||||
},
|
||||
transform: {
|
||||
'^.+\\.tsx?$': [
|
||||
'ts-jest',
|
||||
{
|
||||
useESM: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
transformIgnorePatterns: [
|
||||
'node_modules/(?!(chalk)/)',
|
||||
],
|
||||
testMatch: ['**/__tests__/**/*.test.ts'],
|
||||
collectCoverageFrom: [
|
||||
'**/*.ts',
|
||||
'!**/__tests__/**',
|
||||
'!**/dist/**',
|
||||
],
|
||||
}
|
||||
128
src/sequentialthinking/lib.ts
Normal file
128
src/sequentialthinking/lib.ts
Normal file
@@ -0,0 +1,128 @@
|
||||
import chalk from 'chalk';
|
||||
|
||||
export interface ThoughtData {
|
||||
thought: string;
|
||||
thoughtNumber: number;
|
||||
totalThoughts: number;
|
||||
isRevision?: boolean;
|
||||
revisesThought?: number;
|
||||
branchFromThought?: number;
|
||||
branchId?: string;
|
||||
needsMoreThoughts?: boolean;
|
||||
nextThoughtNeeded: boolean;
|
||||
}
|
||||
|
||||
export class SequentialThinkingServer {
|
||||
private thoughtHistory: ThoughtData[] = [];
|
||||
private branches: Record<string, ThoughtData[]> = {};
|
||||
private disableThoughtLogging: boolean;
|
||||
|
||||
constructor() {
|
||||
this.disableThoughtLogging = (process.env.DISABLE_THOUGHT_LOGGING || "").toLowerCase() === "true";
|
||||
}
|
||||
|
||||
private validateThoughtData(input: unknown): ThoughtData {
|
||||
const data = input as Record<string, unknown>;
|
||||
|
||||
if (!data.thought || typeof data.thought !== 'string') {
|
||||
throw new Error('Invalid thought: must be a string');
|
||||
}
|
||||
if (!data.thoughtNumber || typeof data.thoughtNumber !== 'number') {
|
||||
throw new Error('Invalid thoughtNumber: must be a number');
|
||||
}
|
||||
if (!data.totalThoughts || typeof data.totalThoughts !== 'number') {
|
||||
throw new Error('Invalid totalThoughts: must be a number');
|
||||
}
|
||||
if (typeof data.nextThoughtNeeded !== 'boolean') {
|
||||
throw new Error('Invalid nextThoughtNeeded: must be a boolean');
|
||||
}
|
||||
|
||||
return {
|
||||
thought: data.thought,
|
||||
thoughtNumber: data.thoughtNumber,
|
||||
totalThoughts: data.totalThoughts,
|
||||
nextThoughtNeeded: data.nextThoughtNeeded,
|
||||
isRevision: data.isRevision as boolean | undefined,
|
||||
revisesThought: data.revisesThought as number | undefined,
|
||||
branchFromThought: data.branchFromThought as number | undefined,
|
||||
branchId: data.branchId as string | undefined,
|
||||
needsMoreThoughts: data.needsMoreThoughts as boolean | undefined,
|
||||
};
|
||||
}
|
||||
|
||||
private formatThought(thoughtData: ThoughtData): string {
|
||||
const { thoughtNumber, totalThoughts, thought, isRevision, revisesThought, branchFromThought, branchId } = thoughtData;
|
||||
|
||||
let prefix = '';
|
||||
let context = '';
|
||||
|
||||
if (isRevision) {
|
||||
prefix = chalk.yellow('🔄 Revision');
|
||||
context = ` (revising thought ${revisesThought})`;
|
||||
} else if (branchFromThought) {
|
||||
prefix = chalk.green('🌿 Branch');
|
||||
context = ` (from thought ${branchFromThought}, ID: ${branchId})`;
|
||||
} else {
|
||||
prefix = chalk.blue('💭 Thought');
|
||||
context = '';
|
||||
}
|
||||
|
||||
const header = `${prefix} ${thoughtNumber}/${totalThoughts}${context}`;
|
||||
const border = '─'.repeat(Math.max(header.length, thought.length) + 4);
|
||||
|
||||
return `
|
||||
┌${border}┐
|
||||
│ ${header} │
|
||||
├${border}┤
|
||||
│ ${thought.padEnd(border.length - 2)} │
|
||||
└${border}┘`;
|
||||
}
|
||||
|
||||
public processThought(input: unknown): { content: Array<{ type: string; text: string }>; isError?: boolean } {
|
||||
try {
|
||||
const validatedInput = this.validateThoughtData(input);
|
||||
|
||||
if (validatedInput.thoughtNumber > validatedInput.totalThoughts) {
|
||||
validatedInput.totalThoughts = validatedInput.thoughtNumber;
|
||||
}
|
||||
|
||||
this.thoughtHistory.push(validatedInput);
|
||||
|
||||
if (validatedInput.branchFromThought && validatedInput.branchId) {
|
||||
if (!this.branches[validatedInput.branchId]) {
|
||||
this.branches[validatedInput.branchId] = [];
|
||||
}
|
||||
this.branches[validatedInput.branchId].push(validatedInput);
|
||||
}
|
||||
|
||||
if (!this.disableThoughtLogging) {
|
||||
const formattedThought = this.formatThought(validatedInput);
|
||||
console.error(formattedThought);
|
||||
}
|
||||
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
thoughtNumber: validatedInput.thoughtNumber,
|
||||
totalThoughts: validatedInput.totalThoughts,
|
||||
nextThoughtNeeded: validatedInput.nextThoughtNeeded,
|
||||
branches: Object.keys(this.branches),
|
||||
thoughtHistoryLength: this.thoughtHistory.length
|
||||
}, null, 2)
|
||||
}]
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
status: 'failed'
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: true
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,17 +16,23 @@
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
"watch": "tsc --watch",
|
||||
"test": "jest --config=jest.config.cjs --coverage"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "0.5.0",
|
||||
"@modelcontextprotocol/sdk": "^1.19.1",
|
||||
"chalk": "^5.3.0",
|
||||
"yargs": "^17.7.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@jest/globals": "^29.7.0",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/node": "^22",
|
||||
"@types/yargs": "^17.0.32",
|
||||
"jest": "^29.7.0",
|
||||
"shx": "^0.3.4",
|
||||
"ts-jest": "^29.1.1",
|
||||
"ts-node": "^10.9.2",
|
||||
"typescript": "^5.3.3"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user