mirror of
https://github.com/modelcontextprotocol/servers.git
synced 2026-02-19 11:54:58 -05:00
Updated Brave
This commit is contained in:
@@ -18,4 +18,4 @@ mcp-server-postgres
|
||||
# ...
|
||||
```
|
||||
|
||||
Each server will operate differently. See the READMEs within [src](src/) for more information.
|
||||
Each server will operate differently. See the READMEs within [src](src/) for more information.
|
||||
78
package-lock.json
generated
78
package-lock.json
generated
@@ -17,6 +17,7 @@
|
||||
"@modelcontextprotocol/server-everything": "*",
|
||||
"@modelcontextprotocol/server-filesystem": "*",
|
||||
"@modelcontextprotocol/server-gdrive": "*",
|
||||
"@modelcontextprotocol/server-memory": "*",
|
||||
"@modelcontextprotocol/server-postgres": "*",
|
||||
"@modelcontextprotocol/server-puppeteer": "*",
|
||||
"@modelcontextprotocol/server-slack": "*"
|
||||
@@ -183,6 +184,14 @@
|
||||
"resolved": "src/gdrive",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@modelcontextprotocol/server-google-maps": {
|
||||
"resolved": "src/google-maps",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@modelcontextprotocol/server-memory": {
|
||||
"resolved": "src/memory",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/@modelcontextprotocol/server-postgres": {
|
||||
"resolved": "src/postgres",
|
||||
"link": true
|
||||
@@ -323,11 +332,20 @@
|
||||
"version": "22.9.0",
|
||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.9.0.tgz",
|
||||
"integrity": "sha512-vuyHg81vvWA1Z1ELfvLko2c8f34gyA0zaic0+Rllc5lbCnbSyuvb2Oxpm6TAUAC/2xZN3QGqxBNggD1nNR2AfQ==",
|
||||
"devOptional": true,
|
||||
"dependencies": {
|
||||
"undici-types": "~6.19.8"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/node-fetch": {
|
||||
"version": "2.6.12",
|
||||
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.12.tgz",
|
||||
"integrity": "sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"form-data": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/pg": {
|
||||
"version": "8.11.10",
|
||||
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.11.10.tgz",
|
||||
@@ -3322,8 +3340,7 @@
|
||||
"node_modules/undici-types": {
|
||||
"version": "6.19.8",
|
||||
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz",
|
||||
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==",
|
||||
"devOptional": true
|
||||
"integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw=="
|
||||
},
|
||||
"node_modules/universalify": {
|
||||
"version": "2.0.1",
|
||||
@@ -3829,12 +3846,54 @@
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
},
|
||||
"src/google-maps": {
|
||||
"name": "@modelcontextprotocol/server-google-maps",
|
||||
"version": "0.1.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "0.6.0",
|
||||
"@types/node-fetch": "^2.6.12"
|
||||
},
|
||||
"bin": {
|
||||
"mcp-server-google-maps": "dist/index.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
},
|
||||
"src/google-maps/node_modules/@modelcontextprotocol/sdk": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-0.6.0.tgz",
|
||||
"integrity": "sha512-9rsDudGhDtMbvxohPoMMyAUOmEzQsOK+XFchh6gZGqo8sx9sBuZQs+CUttXqa8RZXKDaJRCN2tUtgGof7jRkkw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"content-type": "^1.0.5",
|
||||
"raw-body": "^3.0.0",
|
||||
"zod": "^3.23.8"
|
||||
}
|
||||
},
|
||||
"src/memory": {
|
||||
"name": "@modelcontextprotocol/server-memory",
|
||||
"version": "0.1.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "0.5.0"
|
||||
},
|
||||
"bin": {
|
||||
"mcp-server-memory": "dist/index.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
},
|
||||
"src/postgres": {
|
||||
"name": "@modelcontextprotocol/server-postgres",
|
||||
"version": "0.1.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "0.5.0",
|
||||
"@modelcontextprotocol/sdk": "0.6.0",
|
||||
"pg": "^8.13.0"
|
||||
},
|
||||
"bin": {
|
||||
@@ -3846,6 +3905,17 @@
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
},
|
||||
"src/postgres/node_modules/@modelcontextprotocol/sdk": {
|
||||
"version": "0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-0.6.0.tgz",
|
||||
"integrity": "sha512-9rsDudGhDtMbvxohPoMMyAUOmEzQsOK+XFchh6gZGqo8sx9sBuZQs+CUttXqa8RZXKDaJRCN2tUtgGof7jRkkw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"content-type": "^1.0.5",
|
||||
"raw-body": "^3.0.0",
|
||||
"zod": "^3.23.8"
|
||||
}
|
||||
},
|
||||
"src/puppeteer": {
|
||||
"name": "@modelcontextprotocol/server-puppeteer",
|
||||
"version": "0.1.0",
|
||||
|
||||
@@ -24,6 +24,7 @@
|
||||
"@modelcontextprotocol/server-postgres": "*",
|
||||
"@modelcontextprotocol/server-puppeteer": "*",
|
||||
"@modelcontextprotocol/server-slack": "*",
|
||||
"@modelcontextprotocol/server-brave-search": "*"
|
||||
"@modelcontextprotocol/server-brave-search": "*",
|
||||
"@modelcontextprotocol/server-memory": "*"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,15 +2,37 @@
|
||||
|
||||
This MCP server integrates with Google Drive to allow listing, reading, and searching over files.
|
||||
|
||||
## Components
|
||||
|
||||
### Tools
|
||||
|
||||
- **search**
|
||||
- Search for files in Google Drive
|
||||
- Input: `query` (string): Search query
|
||||
- Returns file names and MIME types of matching files
|
||||
|
||||
### Resources
|
||||
|
||||
The server provides access to Google Drive files:
|
||||
|
||||
- **Files** (`gdrive:///<file_id>`)
|
||||
- Supports all file types
|
||||
- Google Workspace files are automatically exported:
|
||||
- Docs → Markdown
|
||||
- Sheets → CSV
|
||||
- Presentations → Plain text
|
||||
- Drawings → PNG
|
||||
- Other files are provided in their native format
|
||||
|
||||
## Getting started
|
||||
|
||||
1. Create a new Google Cloud project
|
||||
2. Enable the Google Drive API
|
||||
3. Configure an OAuth consent screen ("internal" is fine for testing)
|
||||
1. [Create a new Google Cloud project](https://console.cloud.google.com/projectcreate)
|
||||
2. [Enable the Google Drive API](https://console.cloud.google.com/workspace-api/products)
|
||||
3. [Configure an OAuth consent screen](https://console.cloud.google.com/apis/credentials/consent) ("internal" is fine for testing)
|
||||
4. Add OAuth scope `https://www.googleapis.com/auth/drive.readonly`
|
||||
5. Create an OAuth Client ID for application type "Desktop App"
|
||||
5. [Create an OAuth Client ID](https://console.cloud.google.com/apis/credentials/oauthclient) for application type "Desktop App"
|
||||
6. Download the JSON file of your client's OAuth keys
|
||||
7. Rename the key file to `gcp-oauth.keys.json` and place into the root of this repo
|
||||
7. Rename the key file to `gcp-oauth.keys.json` and place into the root of this repo (i.e. `servers/gcp-oauth.keys.json`)
|
||||
|
||||
Make sure to build the server with either `npm run build` or `npm run watch`.
|
||||
|
||||
@@ -18,16 +40,18 @@ Make sure to build the server with either `npm run build` or `npm run watch`.
|
||||
|
||||
To authenticate and save credentials:
|
||||
|
||||
1. Run the server with the `auth` argument: `node build/gdrive auth`
|
||||
1. Run the server with the `auth` argument: `node ./dist auth`
|
||||
2. This will open an authentication flow in your system browser
|
||||
3. Complete the authentication process
|
||||
4. Credentials will be saved for future use
|
||||
4. Credentials will be saved in the root of this repo (i.e. `servers/.gdrive-server-credentials.json`)
|
||||
|
||||
### Running the server
|
||||
### Usage with Desktop App
|
||||
|
||||
After authenticating:
|
||||
To integrate this server with the desktop app, add the following to your app's server configuration:
|
||||
|
||||
1. Run the server normally: `node build/gdrive`
|
||||
2. The server will load the saved credentials and start
|
||||
|
||||
Note: If you haven't authenticated yet, the server will prompt you to run with the `auth` argument first.
|
||||
```json
|
||||
{
|
||||
"mcp-server-gdrive": {
|
||||
"command": "mcp-server-gdrive"
|
||||
}
|
||||
}
|
||||
@@ -44,7 +44,7 @@ server.setRequestHandler(ListResourcesRequestSchema, async (request) => {
|
||||
|
||||
return {
|
||||
resources: files.map((file) => ({
|
||||
uri: `gdrive://${file.id}`,
|
||||
uri: `gdrive:///${file.id}`,
|
||||
mimeType: file.mimeType,
|
||||
name: file.name,
|
||||
})),
|
||||
@@ -53,7 +53,7 @@ server.setRequestHandler(ListResourcesRequestSchema, async (request) => {
|
||||
});
|
||||
|
||||
server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
|
||||
const fileId = request.params.uri.replace("gdrive://", "");
|
||||
const fileId = request.params.uri.replace("gdrive:///", "");
|
||||
|
||||
// First get file metadata to check mime type
|
||||
const file = await drive.files.get({
|
||||
@@ -149,14 +149,16 @@ server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
if (request.params.name === "search") {
|
||||
const query = request.params.arguments?.query as string;
|
||||
|
||||
const userQuery = request.params.arguments?.query as string;
|
||||
const escapedQuery = userQuery.replace(/\\/g, "\\\\").replace(/'/g, "\\'");
|
||||
const formattedQuery = `fullText contains '${escapedQuery}'`;
|
||||
|
||||
const res = await drive.files.list({
|
||||
q: query,
|
||||
q: formattedQuery,
|
||||
pageSize: 10,
|
||||
fields: "files(id, name, mimeType, modifiedTime, size)",
|
||||
});
|
||||
|
||||
|
||||
const fileList = res.data.files
|
||||
?.map((file: any) => `${file.name} (${file.mimeType})`)
|
||||
.join("\n");
|
||||
@@ -175,7 +177,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
|
||||
const credentialsPath = path.join(
|
||||
path.dirname(new URL(import.meta.url).pathname),
|
||||
"../../.gdrive-server-credentials.json",
|
||||
"../../../.gdrive-server-credentials.json",
|
||||
);
|
||||
|
||||
async function authenticateAndSaveCredentials() {
|
||||
@@ -183,7 +185,7 @@ async function authenticateAndSaveCredentials() {
|
||||
const auth = await authenticate({
|
||||
keyfilePath: path.join(
|
||||
path.dirname(new URL(import.meta.url).pathname),
|
||||
"../../gcp-oauth.keys.json",
|
||||
"../../../gcp-oauth.keys.json",
|
||||
),
|
||||
scopes: ["https://www.googleapis.com/auth/drive.readonly"],
|
||||
});
|
||||
|
||||
@@ -1 +1 @@
|
||||
3.11
|
||||
3.10
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
# mcp-git
|
||||
# mcp-git: A git MCP server
|
||||
|
||||
A Model Context Protocol server for Git repository interaction and automation. This server provides tools to read, search, and manipulate Git repositories via Large Language Models.
|
||||
|
||||
Please note that mcp-git is currently in early development. The functionality and available tools are subject to change and expansion as we continue to develop and improve the server.
|
||||
|
||||
## Available Tools
|
||||
|
||||
The current list of tools includes:
|
||||
|
||||
- `git_read_file`: Read contents of a file at a specific Git reference
|
||||
- `git_list_files`: List all files in a repository or subdirectory
|
||||
- `git_file_history`: Get commit history for a specific file
|
||||
@@ -12,10 +16,15 @@ A Model Context Protocol server for Git repository interaction and automation. T
|
||||
- `git_get_diff`: View diffs between Git references
|
||||
- `git_get_repo_structure`: View repository file structure
|
||||
- `git_list_repos`: List available Git repositories
|
||||
- `git_log`: Retrieve commit log for the repository
|
||||
- `git_list_branches`: List all branches in the repository
|
||||
- `git_list_tags`: List all tags in the repository
|
||||
|
||||
This list is expected to grow as we add more functionality to the server. We welcome contributions from the community to expand and enhance the available tools.
|
||||
|
||||
## Installation
|
||||
|
||||
### Using uv
|
||||
### Using uv (recommended)
|
||||
|
||||
When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will
|
||||
use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-git*.
|
||||
@@ -35,36 +44,6 @@ python -m mcp_git
|
||||
```
|
||||
|
||||
## Configuration
|
||||
### Configure for Zed
|
||||
|
||||
Add to your Zed settings.json:
|
||||
|
||||
```json
|
||||
"experimental.context_servers": {
|
||||
"servers": [
|
||||
{
|
||||
"id": "mcp-git",
|
||||
"executable": "uvx",
|
||||
"args": ["mcp-git"]
|
||||
}
|
||||
]
|
||||
},
|
||||
```
|
||||
|
||||
Alternatively, if using pip installation:
|
||||
|
||||
```json
|
||||
"experimental.context_servers": {
|
||||
"servers": [
|
||||
{
|
||||
"id": "mcp-git",
|
||||
"executable": "python",
|
||||
"args": ["-m", "mcp_git"]
|
||||
}
|
||||
]
|
||||
},
|
||||
```
|
||||
|
||||
### Configure for Claude.app
|
||||
|
||||
Add to your Claude settings:
|
||||
@@ -73,7 +52,7 @@ Add to your Claude settings:
|
||||
"mcpServers": {
|
||||
"mcp-git": {
|
||||
"command": "uvx",
|
||||
"args": ["mcp-git"]
|
||||
"args": ["mcp-git", "--repository", "path/to/git/repo"]
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -84,14 +63,41 @@ Alternatively, if using pip installation:
|
||||
"mcpServers": {
|
||||
"mcp-git": {
|
||||
"command": "python",
|
||||
"args": ["-m", "mcp_git"]
|
||||
"args": ["-m", "mcp_git", "--repository", "path/to/git/repo"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Configure for Zed
|
||||
|
||||
Add to your Zed settings.json:
|
||||
|
||||
```json
|
||||
"context_servers": [
|
||||
"mcp-git": {
|
||||
"command": "uvx",
|
||||
"args": ["mcp-git"]
|
||||
}
|
||||
],
|
||||
```
|
||||
|
||||
Alternatively, if using pip installation:
|
||||
|
||||
```json
|
||||
"context_servers": {
|
||||
"mcp-git": {
|
||||
"command": "python",
|
||||
"args": ["-m", "mcp-git"]
|
||||
}
|
||||
},
|
||||
```
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
For examples of other MCP servers and implementation patterns, see:
|
||||
https://github.com/modelcontextprotocol/example-servers/
|
||||
We encourage contributions to help expand and improve mcp-git. Whether you want to add new tools, enhance existing functionality, or improve documentation, your input is valuable.
|
||||
|
||||
Pull requests welcome!
|
||||
For examples of other MCP servers and implementation patterns, see:
|
||||
https://github.com/modelcontextprotocol/servers
|
||||
|
||||
Pull requests are welcome! Feel free to contribute new ideas, bug fixes, or enhancements to make mcp-git even more powerful and useful.
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
[project]
|
||||
name = "mcp-git"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
description = "A Model Context Protocol server providing tools to read, search, and manipulate Git repositories programmatically via LLMs"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
requires-python = ">=3.10"
|
||||
authors = [{ name = "Anthropic, PBC." }]
|
||||
maintainers = [{ name = "David Soria Parra", email = "davidsp@anthropic.com" }]
|
||||
keywords = ["git", "mcp", "llm", "automation"]
|
||||
@@ -13,7 +13,7 @@ classifiers = [
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
]
|
||||
dependencies = [
|
||||
"click>=8.1.7",
|
||||
@@ -30,5 +30,4 @@ requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.uv]
|
||||
index-strategy = "unsafe-best-match"
|
||||
dev-dependencies = ["ruff>=0.7.3"]
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import logging
|
||||
import json
|
||||
import sys
|
||||
import click
|
||||
import anyio
|
||||
import anyio.lowlevel
|
||||
@@ -7,8 +9,16 @@ from git.types import Sequence
|
||||
from mcp.server import Server
|
||||
from mcp.server.session import ServerSession
|
||||
from mcp.server.stdio import stdio_server
|
||||
from mcp.types import TextContent, Tool, EmbeddedResource, ImageContent, ListRootsResult
|
||||
from enum import StrEnum
|
||||
from mcp.types import (
|
||||
ClientCapabilities,
|
||||
TextContent,
|
||||
Tool,
|
||||
EmbeddedResource,
|
||||
ImageContent,
|
||||
ListRootsResult,
|
||||
RootsCapability,
|
||||
)
|
||||
from enum import Enum
|
||||
import git
|
||||
from git.objects import Blob, Tree
|
||||
|
||||
@@ -66,7 +76,21 @@ class ListReposInput(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class GitTools(StrEnum):
|
||||
class GitLogInput(BaseModel):
|
||||
repo_path: str
|
||||
max_count: int = 10
|
||||
ref: str = "HEAD"
|
||||
|
||||
|
||||
class ListBranchesInput(BaseModel):
|
||||
repo_path: str
|
||||
|
||||
|
||||
class ListTagsInput(BaseModel):
|
||||
repo_path: str
|
||||
|
||||
|
||||
class GitTools(str, Enum):
|
||||
READ_FILE = "git_read_file"
|
||||
LIST_FILES = "git_list_files"
|
||||
FILE_HISTORY = "git_file_history"
|
||||
@@ -75,12 +99,19 @@ class GitTools(StrEnum):
|
||||
GET_DIFF = "git_get_diff"
|
||||
GET_REPO_STRUCTURE = "git_get_repo_structure"
|
||||
LIST_REPOS = "git_list_repos"
|
||||
GIT_LOG = "git_log"
|
||||
LIST_BRANCHES = "git_list_branches"
|
||||
LIST_TAGS = "git_list_tags"
|
||||
|
||||
|
||||
def git_read_file(repo: git.Repo, file_path: str, ref: str = "HEAD") -> str:
|
||||
tree = repo.commit(ref).tree
|
||||
blob = tree / file_path
|
||||
return blob.data_stream.read().decode("utf-8", errors="replace")
|
||||
try:
|
||||
return blob.data_stream.read().decode("utf-8", errors="replace")
|
||||
except UnicodeDecodeError:
|
||||
# If it's a binary file, return a message indicating that
|
||||
return "[Binary file content not shown]"
|
||||
|
||||
|
||||
def git_list_files(repo: git.Repo, path: str = "", ref: str = "HEAD") -> Sequence[str]:
|
||||
@@ -122,10 +153,14 @@ def git_search_code(
|
||||
tree = repo.commit(ref).tree
|
||||
for blob in tree.traverse():
|
||||
if isinstance(blob, Blob) and Path(blob.path).match(file_pattern):
|
||||
content = blob.data_stream.read().decode("utf-8")
|
||||
for i, line in enumerate(content.splitlines()):
|
||||
if query in line:
|
||||
results.append(f"{blob.path}:{i+1}: {line}")
|
||||
try:
|
||||
content = blob.data_stream.read().decode("utf-8", errors="replace")
|
||||
for i, line in enumerate(content.splitlines()):
|
||||
if query in line:
|
||||
results.append(f"{blob.path}:{i+1}: {line}")
|
||||
except UnicodeDecodeError:
|
||||
# Skip binary files
|
||||
continue
|
||||
return results
|
||||
|
||||
|
||||
@@ -153,14 +188,35 @@ def git_get_repo_structure(repo: git.Repo, ref: str = "HEAD") -> str:
|
||||
return str(structure)
|
||||
|
||||
|
||||
def git_log(repo: git.Repo, max_count: int = 10, ref: str = "HEAD") -> list[str]:
|
||||
commits = list(repo.iter_commits(ref, max_count=max_count))
|
||||
log = []
|
||||
for commit in commits:
|
||||
log.append(
|
||||
f"Commit: {commit.hexsha}\n"
|
||||
f"Author: {commit.author}\n"
|
||||
f"Date: {commit.authored_datetime}\n"
|
||||
f"Message: {commit.message}\n"
|
||||
)
|
||||
return log
|
||||
|
||||
|
||||
def git_list_branches(repo: git.Repo) -> list[str]:
|
||||
return [str(branch) for branch in repo.branches]
|
||||
|
||||
|
||||
def git_list_tags(repo: git.Repo) -> list[str]:
|
||||
return [str(tag) for tag in repo.tags]
|
||||
|
||||
|
||||
async def serve(repository: Path | None) -> None:
|
||||
# Set up logging
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
if repository is not None:
|
||||
try:
|
||||
git.Repo(repository)
|
||||
logger.info(f"Using repository at {repository}")
|
||||
except git.InvalidGitRepositoryError:
|
||||
logger.error(f"{repository} is not a valid Git repository")
|
||||
return
|
||||
@@ -234,6 +290,28 @@ async def serve(repository: Path | None) -> None:
|
||||
"accessible to the current session.",
|
||||
inputSchema=ListReposInput.schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.GIT_LOG,
|
||||
description="Retrieves the commit log for the repository, showing the "
|
||||
"history of commits including commit hashes, authors, dates, and "
|
||||
"commit messages. This tool provides an overview of the project's "
|
||||
"development history.",
|
||||
inputSchema=GitLogInput.schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.LIST_BRANCHES,
|
||||
description="Lists all branches in the Git repository. This tool "
|
||||
"provides an overview of the different lines of development in the "
|
||||
"project.",
|
||||
inputSchema=ListBranchesInput.schema(),
|
||||
),
|
||||
Tool(
|
||||
name=GitTools.LIST_TAGS,
|
||||
description="Lists all tags in the Git repository. This tool "
|
||||
"provides an overview of the tagged versions or releases in the "
|
||||
"project.",
|
||||
inputSchema=ListTagsInput.schema(),
|
||||
),
|
||||
]
|
||||
|
||||
async def list_repos() -> Sequence[str]:
|
||||
@@ -243,7 +321,14 @@ async def serve(repository: Path | None) -> None:
|
||||
"server.request_context.session must be a ServerSession"
|
||||
)
|
||||
|
||||
roots_result: ListRootsResult = await server.request_context.session.list_roots()
|
||||
if not server.request_context.session.check_client_capability(
|
||||
ClientCapabilities(roots=RootsCapability())
|
||||
):
|
||||
return []
|
||||
|
||||
roots_result: ListRootsResult = (
|
||||
await server.request_context.session.list_roots()
|
||||
)
|
||||
logger.debug(f"Roots result: {roots_result}")
|
||||
repo_paths = []
|
||||
for root in roots_result.roots:
|
||||
@@ -269,71 +354,123 @@ async def serve(repository: Path | None) -> None:
|
||||
) -> list[TextContent | ImageContent | EmbeddedResource]:
|
||||
if name == GitTools.LIST_REPOS:
|
||||
result = await list_repos()
|
||||
return [TextContent(type="text", text=str(r)) for r in result]
|
||||
logging.debug(f"repos={result}")
|
||||
return [
|
||||
TextContent(
|
||||
type="text",
|
||||
text=f"Here is some JSON that contains a list of git repositories: {json.dumps(result)}",
|
||||
)
|
||||
]
|
||||
|
||||
repo_path = Path(arguments["repo_path"])
|
||||
repo = git.Repo(repo_path)
|
||||
|
||||
match name:
|
||||
case GitTools.READ_FILE:
|
||||
content = git_read_file(
|
||||
repo, arguments["file_path"], arguments.get("ref", "HEAD")
|
||||
)
|
||||
return [
|
||||
TextContent(
|
||||
type="text",
|
||||
text=git_read_file(
|
||||
repo, arguments["file_path"], arguments.get("ref", "HEAD")
|
||||
)
|
||||
text=f"Here is some JSON that contains the contents of a file: {json.dumps({'content': content})}",
|
||||
)
|
||||
]
|
||||
|
||||
case GitTools.LIST_FILES:
|
||||
files = git_list_files(
|
||||
repo, arguments.get("path", ""), arguments.get("ref", "HEAD")
|
||||
)
|
||||
return [
|
||||
TextContent(type="text", text=str(f))
|
||||
for f in git_list_files(
|
||||
repo, arguments.get("path", ""), arguments.get("ref", "HEAD")
|
||||
TextContent(
|
||||
type="text",
|
||||
text=f"Here is some JSON that contains a list of files: {json.dumps({'files': list(files)})}",
|
||||
)
|
||||
]
|
||||
|
||||
case GitTools.FILE_HISTORY:
|
||||
history = git_file_history(
|
||||
repo, arguments["file_path"], arguments.get("max_entries", 10)
|
||||
)
|
||||
return [
|
||||
TextContent(type="text", text=entry)
|
||||
for entry in git_file_history(
|
||||
repo, arguments["file_path"], arguments.get("max_entries", 10)
|
||||
TextContent(
|
||||
type="text",
|
||||
text=f"Here is some JSON that contains a file's history: {json.dumps({'history': list(history)})}",
|
||||
)
|
||||
]
|
||||
|
||||
case GitTools.COMMIT:
|
||||
result = git_commit(repo, arguments["message"], arguments.get("files"))
|
||||
return [TextContent(type="text", text=result)]
|
||||
return [
|
||||
TextContent(
|
||||
type="text",
|
||||
text=f"Here is some JSON that contains the commit result: {json.dumps({'result': result})}",
|
||||
)
|
||||
]
|
||||
|
||||
case GitTools.SEARCH_CODE:
|
||||
results = git_search_code(
|
||||
repo,
|
||||
arguments["query"],
|
||||
arguments.get("file_pattern", "*"),
|
||||
arguments.get("ref", "HEAD"),
|
||||
)
|
||||
return [
|
||||
TextContent(type="text", text=result)
|
||||
for result in git_search_code(
|
||||
repo,
|
||||
arguments["query"],
|
||||
arguments.get("file_pattern", "*"),
|
||||
arguments.get("ref", "HEAD"),
|
||||
TextContent(
|
||||
type="text",
|
||||
text=f"Here is some JSON that contains code search matches: {json.dumps({'matches': results})}",
|
||||
)
|
||||
]
|
||||
|
||||
case GitTools.GET_DIFF:
|
||||
diff = git_get_diff(
|
||||
repo,
|
||||
arguments["ref1"],
|
||||
arguments["ref2"],
|
||||
arguments.get("file_path"),
|
||||
)
|
||||
return [
|
||||
TextContent(
|
||||
type="text",
|
||||
text=git_get_diff(
|
||||
repo,
|
||||
arguments["ref1"],
|
||||
arguments["ref2"],
|
||||
arguments.get("file_path"),
|
||||
)
|
||||
text=f"Here is some JSON that contains a diff: {json.dumps({'diff': diff})}",
|
||||
)
|
||||
]
|
||||
|
||||
case GitTools.GET_REPO_STRUCTURE:
|
||||
structure = git_get_repo_structure(repo, arguments.get("ref", "HEAD"))
|
||||
return [
|
||||
TextContent(
|
||||
type="text",
|
||||
text=git_get_repo_structure(repo, arguments.get("ref", "HEAD"))
|
||||
text=f"Here is some JSON that contains the repository structure: {json.dumps({'structure': structure})}",
|
||||
)
|
||||
]
|
||||
|
||||
case GitTools.GIT_LOG:
|
||||
log = git_log(
|
||||
repo, arguments.get("max_count", 10), arguments.get("ref", "HEAD")
|
||||
)
|
||||
return [
|
||||
TextContent(
|
||||
type="text",
|
||||
text=f"Here is some JSON that contains the git log: {json.dumps({'log': log})}",
|
||||
)
|
||||
]
|
||||
|
||||
case GitTools.LIST_BRANCHES:
|
||||
branches = git_list_branches(repo)
|
||||
return [
|
||||
TextContent(
|
||||
type="text",
|
||||
text=f"Here is some JSON that contains a list of branches: {json.dumps({'branches': branches})}",
|
||||
)
|
||||
]
|
||||
|
||||
case GitTools.LIST_TAGS:
|
||||
tags = git_list_tags(repo)
|
||||
return [
|
||||
TextContent(
|
||||
type="text",
|
||||
text=f"Here is some JSON that contains a list of tags: {json.dumps({'tags': tags})}",
|
||||
)
|
||||
]
|
||||
|
||||
@@ -348,7 +485,14 @@ async def serve(repository: Path | None) -> None:
|
||||
|
||||
@click.command()
|
||||
@click.option("-r", "--repository", type=click.Path(path_type=Path, dir_okay=True))
|
||||
def main(repository: Path | None):
|
||||
@click.option("-v", "--verbose", count=True)
|
||||
def main(repository: Path | None, verbose: int):
|
||||
logging_level = logging.WARN
|
||||
if verbose == 1:
|
||||
logging_level = logging.INFO
|
||||
elif verbose >= 2:
|
||||
logging_level = logging.DEBUG
|
||||
logging.basicConfig(level=logging_level, stream=sys.stderr)
|
||||
anyio.run(serve, repository)
|
||||
|
||||
|
||||
|
||||
42
src/git/uv.lock
generated
42
src/git/uv.lock
generated
@@ -1,5 +1,5 @@
|
||||
version = 1
|
||||
requires-python = ">=3.11"
|
||||
requires-python = ">=3.10"
|
||||
resolution-markers = [
|
||||
"python_full_version < '3.13'",
|
||||
"python_full_version >= '3.13'",
|
||||
@@ -19,8 +19,10 @@ name = "anyio"
|
||||
version = "4.6.2.post1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||
{ name = "idna" },
|
||||
{ name = "sniffio" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9f/09/45b9b7a6d4e45c6bcb5bf61d19e3ab87df68e0601fa8c5293de3542546cc/anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c", size = 173422 }
|
||||
wheels = [
|
||||
@@ -57,6 +59,15 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.2.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "gitdb"
|
||||
version = "4.0.11"
|
||||
@@ -139,7 +150,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "mcp"
|
||||
version = "0.9.0"
|
||||
version = "0.9.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio" },
|
||||
@@ -149,14 +160,14 @@ dependencies = [
|
||||
{ name = "sse-starlette" },
|
||||
{ name = "starlette" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/cd/bb/fd56a5c331a6c95a4f2ec907683db3382d30b99b808ef6f46fa4f08a4b74/mcp-0.9.0.tar.gz", hash = "sha256:1d7e3f8d78bf5b37c98a233fce8cebbb86c57d8964d2c3b03cf08cdebd103d9a", size = 78343 }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e7/1c/932818470ffd49c33509110c835101a8dc4c9cdd06028b9f647fb3dde237/mcp-0.9.1.tar.gz", hash = "sha256:e8509a37c2ab546095788ed170e0fb4d7ce0cf5a3ee56b6449c78af27321a425", size = 78218 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/07/077116e6a23dd0546391f5caa81b4f52938d8a81f2449c55c0b50c0215bf/mcp-0.9.0-py3-none-any.whl", hash = "sha256:e09aca08eadaf0552541aaa71271b44f99a6a5d16e5b1b03c421366f72b51753", size = 31691 },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/a0/2ee813d456b57a726d583868417d1ad900fbe12ee3c8cd866e3e804ca486/mcp-0.9.1-py3-none-any.whl", hash = "sha256:7f640fcfb0be486aa510594df309920ae1d375cdca1f8aff21db3a96d837f303", size = 31562 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mcp-git"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
@@ -204,6 +215,18 @@ dependencies = [
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e2/aa/6b6a9b9f8537b872f552ddd46dd3da230367754b6f707b8e1e963f515ea3/pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863", size = 402156 }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/8b/d3ae387f66277bd8104096d6ec0a145f4baa2966ebb2cad746c0920c9526/pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b", size = 1867835 },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/76/f68272e4c3a7df8777798282c5e47d508274917f29992d84e1898f8908c7/pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166", size = 1776689 },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/69/5f945b4416f42ea3f3bc9d2aaec66c76084a6ff4ff27555bf9415ab43189/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb", size = 1800748 },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/ab/891a7b0054bcc297fb02d44d05c50e68154e31788f2d9d41d0b72c89fdf7/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916", size = 1806469 },
|
||||
{ url = "https://files.pythonhosted.org/packages/31/7c/6e3fa122075d78f277a8431c4c608f061881b76c2b7faca01d317ee39b5d/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07", size = 2002246 },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/6f/22d5692b7ab63fc4acbc74de6ff61d185804a83160adba5e6cc6068e1128/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232", size = 2659404 },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/ac/1e647dc1121c028b691028fa61a4e7477e6aeb5132628fde41dd34c1671f/pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2", size = 2053940 },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/75/984740c17f12c3ce18b5a2fcc4bdceb785cce7df1511a4ce89bca17c7e2d/pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f", size = 1921437 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/74/13c5f606b64d93f0721e7768cd3e8b2102164866c207b8cd6f90bb15d24f/pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3", size = 1966129 },
|
||||
{ url = "https://files.pythonhosted.org/packages/18/03/9c4aa5919457c7b57a016c1ab513b1a926ed9b2bb7915bf8e506bf65c34b/pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071", size = 2110908 },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/2c/053d33f029c5dc65e5cf44ff03ceeefb7cce908f8f3cca9265e7f9b540c8/pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119", size = 1735278 },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/81/7dfe464eca78d76d31dd661b04b5f2036ec72ea8848dd87ab7375e185c23/pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f", size = 1917453 },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/30/890a583cd3f2be27ecf32b479d5d615710bb926d92da03e3f7838ff3e58b/pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8", size = 1865160 },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/9a/b634442e1253bc6889c87afe8bb59447f106ee042140bd57680b3b113ec7/pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d", size = 1776777 },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/9a/7816295124a6b08c24c96f9ce73085032d8bcbaf7e5a781cd41aa910c891/pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e", size = 1799244 },
|
||||
@@ -240,6 +263,14 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/16/16/b805c74b35607d24d37103007f899abc4880923b04929547ae68d478b7f4/pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f", size = 2116814 },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/58/5305e723d9fcdf1c5a655e6a4cc2a07128bf644ff4b1d98daf7a9dbf57da/pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769", size = 1738360 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/ae/e14b0ff8b3f48e02394d8acd911376b7b66e164535687ef7dc24ea03072f/pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5", size = 1919411 },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/a9/5d582eb3204464284611f636b55c0a7410d748ff338756323cb1ce721b96/pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5", size = 1857135 },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/57/faf36290933fe16717f97829eabfb1868182ac495f99cf0eda9f59687c9d/pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec", size = 1740583 },
|
||||
{ url = "https://files.pythonhosted.org/packages/91/7c/d99e3513dc191c4fec363aef1bf4c8af9125d8fa53af7cb97e8babef4e40/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480", size = 1793637 },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/18/812222b6d18c2d13eebbb0f7cdc170a408d9ced65794fdb86147c77e1982/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068", size = 1941963 },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/36/c1f3642ac3f05e6bb4aec3ffc399fa3f84895d259cf5f0ce3054b7735c29/pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801", size = 1915332 },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/ca/9c0854829311fb446020ebb540ee22509731abad886d2859c855dd29b904/pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728", size = 1957926 },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/1c/7836b67c42d0cd4441fcd9fafbf6a027ad4b79b6559f80cf11f89fd83648/pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433", size = 2100342 },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/f9/b6bcaf874f410564a78908739c80861a171788ef4d4f76f5009656672dfe/pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753", size = 1920344 },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -327,6 +358,7 @@ source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
{ name = "h11" },
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e0/fc/1d785078eefd6945f3e5bab5c076e4230698046231eb0f3747bc5c8fa992/uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e", size = 77564 }
|
||||
wheels = [
|
||||
|
||||
65
src/google-maps/README.md
Normal file
65
src/google-maps/README.md
Normal file
@@ -0,0 +1,65 @@
|
||||
# Google Maps MCP Server
|
||||
|
||||
MCP Server for the Google Maps API.
|
||||
|
||||
## Tools
|
||||
|
||||
1. `geocode`
|
||||
- Convert address to coordinates
|
||||
- Input: `address` (string)
|
||||
- Returns: location, formatted_address, place_id
|
||||
|
||||
2. `reverse_geocode`
|
||||
- Convert coordinates to address
|
||||
- Inputs:
|
||||
- `latitude` (number)
|
||||
- `longitude` (number)
|
||||
- Returns: formatted_address, place_id, address_components
|
||||
|
||||
3. `search_places`
|
||||
- Search for places using text query
|
||||
- Inputs:
|
||||
- `query` (string)
|
||||
- `location` (optional): { latitude: number, longitude: number }
|
||||
- `radius` (optional): number (meters, max 50000)
|
||||
- Returns: array of places with names, addresses, locations
|
||||
|
||||
4. `get_place_details`
|
||||
- Get detailed information about a place
|
||||
- Input: `place_id` (string)
|
||||
- Returns: name, address, contact info, ratings, reviews, opening hours
|
||||
|
||||
5. `get_distance_matrix`
|
||||
- Calculate distances and times between points
|
||||
- Inputs:
|
||||
- `origins` (string[])
|
||||
- `destinations` (string[])
|
||||
- `mode` (optional): "driving" | "walking" | "bicycling" | "transit"
|
||||
- Returns: distances and durations matrix
|
||||
|
||||
6. `get_elevation`
|
||||
- Get elevation data for locations
|
||||
- Input: `locations` (array of {latitude, longitude})
|
||||
- Returns: elevation data for each point
|
||||
|
||||
7. `get_directions`
|
||||
- Get directions between points
|
||||
- Inputs:
|
||||
- `origin` (string)
|
||||
- `destination` (string)
|
||||
- `mode` (optional): "driving" | "walking" | "bicycling" | "transit"
|
||||
- Returns: route details with steps, distance, duration
|
||||
|
||||
## Setup
|
||||
|
||||
1. Get a Google Maps API key by following the instructions [here](https://developers.google.com/maps/documentation/javascript/get-api-key#create-api-keys).
|
||||
|
||||
2. To use this with Claude Desktop, add the following to your `claude_desktop_config.json`:
|
||||
```json
|
||||
"mcp-server-google-maps": {
|
||||
"command": "mcp-server-google-maps",
|
||||
"env": {
|
||||
"GOOGLE_MAPS_API_KEY": "<YOUR_API_KEY>"
|
||||
}
|
||||
}
|
||||
```
|
||||
710
src/google-maps/index.ts
Normal file
710
src/google-maps/index.ts
Normal file
@@ -0,0 +1,710 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
Tool,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import fetch from "node-fetch";
|
||||
|
||||
// Response interfaces
|
||||
interface GoogleMapsResponse {
|
||||
status: string;
|
||||
error_message?: string;
|
||||
}
|
||||
|
||||
interface GeocodeResponse extends GoogleMapsResponse {
|
||||
results: Array<{
|
||||
place_id: string;
|
||||
formatted_address: string;
|
||||
geometry: {
|
||||
location: {
|
||||
lat: number;
|
||||
lng: number;
|
||||
}
|
||||
};
|
||||
address_components: Array<{
|
||||
long_name: string;
|
||||
short_name: string;
|
||||
types: string[];
|
||||
}>;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface PlacesSearchResponse extends GoogleMapsResponse {
|
||||
results: Array<{
|
||||
name: string;
|
||||
place_id: string;
|
||||
formatted_address: string;
|
||||
geometry: {
|
||||
location: {
|
||||
lat: number;
|
||||
lng: number;
|
||||
}
|
||||
};
|
||||
rating?: number;
|
||||
types: string[];
|
||||
}>;
|
||||
}
|
||||
|
||||
interface PlaceDetailsResponse extends GoogleMapsResponse {
|
||||
result: {
|
||||
name: string;
|
||||
place_id: string;
|
||||
formatted_address: string;
|
||||
formatted_phone_number?: string;
|
||||
website?: string;
|
||||
rating?: number;
|
||||
reviews?: Array<{
|
||||
author_name: string;
|
||||
rating: number;
|
||||
text: string;
|
||||
time: number;
|
||||
}>;
|
||||
opening_hours?: {
|
||||
weekday_text: string[];
|
||||
open_now: boolean;
|
||||
};
|
||||
geometry: {
|
||||
location: {
|
||||
lat: number;
|
||||
lng: number;
|
||||
}
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
interface DistanceMatrixResponse extends GoogleMapsResponse {
|
||||
origin_addresses: string[];
|
||||
destination_addresses: string[];
|
||||
rows: Array<{
|
||||
elements: Array<{
|
||||
status: string;
|
||||
duration: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
distance: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
}>;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface ElevationResponse extends GoogleMapsResponse {
|
||||
results: Array<{
|
||||
elevation: number;
|
||||
location: {
|
||||
lat: number;
|
||||
lng: number;
|
||||
};
|
||||
resolution: number;
|
||||
}>;
|
||||
}
|
||||
|
||||
interface DirectionsResponse extends GoogleMapsResponse {
|
||||
routes: Array<{
|
||||
summary: string;
|
||||
legs: Array<{
|
||||
distance: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
duration: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
steps: Array<{
|
||||
html_instructions: string;
|
||||
distance: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
duration: {
|
||||
text: string;
|
||||
value: number;
|
||||
};
|
||||
travel_mode: string;
|
||||
}>;
|
||||
}>;
|
||||
}>;
|
||||
}
|
||||
|
||||
function getApiKey(): string {
|
||||
const apiKey = process.env.GOOGLE_MAPS_API_KEY;
|
||||
if (!apiKey) {
|
||||
console.error("GOOGLE_MAPS_API_KEY environment variable is not set");
|
||||
process.exit(1);
|
||||
}
|
||||
return apiKey;
|
||||
}
|
||||
|
||||
const GOOGLE_MAPS_API_KEY = getApiKey();
|
||||
|
||||
// Tool definitions
|
||||
const GEOCODE_TOOL: Tool = {
|
||||
name: "maps_geocode",
|
||||
description: "Convert an address into geographic coordinates",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
address: {
|
||||
type: "string",
|
||||
description: "The address to geocode"
|
||||
}
|
||||
},
|
||||
required: ["address"]
|
||||
}
|
||||
};
|
||||
|
||||
const REVERSE_GEOCODE_TOOL: Tool = {
|
||||
name: "maps_reverse_geocode",
|
||||
description: "Convert coordinates into an address",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
latitude: {
|
||||
type: "number",
|
||||
description: "Latitude coordinate"
|
||||
},
|
||||
longitude: {
|
||||
type: "number",
|
||||
description: "Longitude coordinate"
|
||||
}
|
||||
},
|
||||
required: ["latitude", "longitude"]
|
||||
}
|
||||
};
|
||||
|
||||
const SEARCH_PLACES_TOOL: Tool = {
|
||||
name: "maps_search_places",
|
||||
description: "Search for places using Google Places API",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
query: {
|
||||
type: "string",
|
||||
description: "Search query"
|
||||
},
|
||||
location: {
|
||||
type: "object",
|
||||
properties: {
|
||||
latitude: { type: "number" },
|
||||
longitude: { type: "number" }
|
||||
},
|
||||
description: "Optional center point for the search"
|
||||
},
|
||||
radius: {
|
||||
type: "number",
|
||||
description: "Search radius in meters (max 50000)"
|
||||
}
|
||||
},
|
||||
required: ["query"]
|
||||
}
|
||||
};
|
||||
|
||||
const PLACE_DETAILS_TOOL: Tool = {
|
||||
name: "maps_place_details",
|
||||
description: "Get detailed information about a specific place",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
place_id: {
|
||||
type: "string",
|
||||
description: "The place ID to get details for"
|
||||
}
|
||||
},
|
||||
required: ["place_id"]
|
||||
}
|
||||
};
|
||||
|
||||
const DISTANCE_MATRIX_TOOL: Tool = {
|
||||
name: "maps_distance_matrix",
|
||||
description: "Calculate travel distance and time for multiple origins and destinations",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
origins: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description: "Array of origin addresses or coordinates"
|
||||
},
|
||||
destinations: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description: "Array of destination addresses or coordinates"
|
||||
},
|
||||
mode: {
|
||||
type: "string",
|
||||
description: "Travel mode (driving, walking, bicycling, transit)",
|
||||
enum: ["driving", "walking", "bicycling", "transit"]
|
||||
}
|
||||
},
|
||||
required: ["origins", "destinations"]
|
||||
}
|
||||
};
|
||||
|
||||
const ELEVATION_TOOL: Tool = {
|
||||
name: "maps_elevation",
|
||||
description: "Get elevation data for locations on the earth",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
locations: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
latitude: { type: "number" },
|
||||
longitude: { type: "number" }
|
||||
},
|
||||
required: ["latitude", "longitude"]
|
||||
},
|
||||
description: "Array of locations to get elevation for"
|
||||
}
|
||||
},
|
||||
required: ["locations"]
|
||||
}
|
||||
};
|
||||
|
||||
const DIRECTIONS_TOOL: Tool = {
|
||||
name: "maps_directions",
|
||||
description: "Get directions between two points",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
origin: {
|
||||
type: "string",
|
||||
description: "Starting point address or coordinates"
|
||||
},
|
||||
destination: {
|
||||
type: "string",
|
||||
description: "Ending point address or coordinates"
|
||||
},
|
||||
mode: {
|
||||
type: "string",
|
||||
description: "Travel mode (driving, walking, bicycling, transit)",
|
||||
enum: ["driving", "walking", "bicycling", "transit"]
|
||||
}
|
||||
},
|
||||
required: ["origin", "destination"]
|
||||
}
|
||||
};
|
||||
|
||||
const MAPS_TOOLS = [
|
||||
GEOCODE_TOOL,
|
||||
REVERSE_GEOCODE_TOOL,
|
||||
SEARCH_PLACES_TOOL,
|
||||
PLACE_DETAILS_TOOL,
|
||||
DISTANCE_MATRIX_TOOL,
|
||||
ELEVATION_TOOL,
|
||||
DIRECTIONS_TOOL,
|
||||
] as const;
|
||||
|
||||
// API handlers
|
||||
async function handleGeocode(address: string) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/geocode/json");
|
||||
url.searchParams.append("address", address);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as GeocodeResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Geocoding failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
location: data.results[0].geometry.location,
|
||||
formatted_address: data.results[0].formatted_address,
|
||||
place_id: data.results[0].place_id
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function handleReverseGeocode(latitude: number, longitude: number) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/geocode/json");
|
||||
url.searchParams.append("latlng", `${latitude},${longitude}`);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as GeocodeResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Reverse geocoding failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
formatted_address: data.results[0].formatted_address,
|
||||
place_id: data.results[0].place_id,
|
||||
address_components: data.results[0].address_components
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function handlePlaceSearch(
|
||||
query: string,
|
||||
location?: { latitude: number; longitude: number },
|
||||
radius?: number
|
||||
) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/place/textsearch/json");
|
||||
url.searchParams.append("query", query);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
if (location) {
|
||||
url.searchParams.append("location", `${location.latitude},${location.longitude}`);
|
||||
}
|
||||
if (radius) {
|
||||
url.searchParams.append("radius", radius.toString());
|
||||
}
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as PlacesSearchResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Place search failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
places: data.results.map((place) => ({
|
||||
name: place.name,
|
||||
formatted_address: place.formatted_address,
|
||||
location: place.geometry.location,
|
||||
place_id: place.place_id,
|
||||
rating: place.rating,
|
||||
types: place.types
|
||||
}))
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function handlePlaceDetails(place_id: string) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/place/details/json");
|
||||
url.searchParams.append("place_id", place_id);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as PlaceDetailsResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Place details request failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
name: data.result.name,
|
||||
formatted_address: data.result.formatted_address,
|
||||
location: data.result.geometry.location,
|
||||
formatted_phone_number: data.result.formatted_phone_number,
|
||||
website: data.result.website,
|
||||
rating: data.result.rating,
|
||||
reviews: data.result.reviews,
|
||||
opening_hours: data.result.opening_hours
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
}
|
||||
};
|
||||
}
|
||||
async function handleDistanceMatrix(
|
||||
origins: string[],
|
||||
destinations: string[],
|
||||
mode: "driving" | "walking" | "bicycling" | "transit" = "driving"
|
||||
) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/distancematrix/json");
|
||||
url.searchParams.append("origins", origins.join("|"));
|
||||
url.searchParams.append("destinations", destinations.join("|"));
|
||||
url.searchParams.append("mode", mode);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as DistanceMatrixResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Distance matrix request failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
origin_addresses: data.origin_addresses,
|
||||
destination_addresses: data.destination_addresses,
|
||||
results: data.rows.map((row) => ({
|
||||
elements: row.elements.map((element) => ({
|
||||
status: element.status,
|
||||
duration: element.duration,
|
||||
distance: element.distance
|
||||
}))
|
||||
}))
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function handleElevation(locations: Array<{ latitude: number; longitude: number }>) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/elevation/json");
|
||||
const locationString = locations
|
||||
.map((loc) => `${loc.latitude},${loc.longitude}`)
|
||||
.join("|");
|
||||
url.searchParams.append("locations", locationString);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as ElevationResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Elevation request failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
results: data.results.map((result) => ({
|
||||
elevation: result.elevation,
|
||||
location: result.location,
|
||||
resolution: result.resolution
|
||||
}))
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async function handleDirections(
|
||||
origin: string,
|
||||
destination: string,
|
||||
mode: "driving" | "walking" | "bicycling" | "transit" = "driving"
|
||||
) {
|
||||
const url = new URL("https://maps.googleapis.com/maps/api/directions/json");
|
||||
url.searchParams.append("origin", origin);
|
||||
url.searchParams.append("destination", destination);
|
||||
url.searchParams.append("mode", mode);
|
||||
url.searchParams.append("key", GOOGLE_MAPS_API_KEY);
|
||||
|
||||
const response = await fetch(url.toString());
|
||||
const data = await response.json() as DirectionsResponse;
|
||||
|
||||
if (data.status !== "OK") {
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Directions request failed: ${data.error_message || data.status}`
|
||||
}],
|
||||
isError: true
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: JSON.stringify({
|
||||
routes: data.routes.map((route) => ({
|
||||
summary: route.summary,
|
||||
distance: route.legs[0].distance,
|
||||
duration: route.legs[0].duration,
|
||||
steps: route.legs[0].steps.map((step) => ({
|
||||
instructions: step.html_instructions,
|
||||
distance: step.distance,
|
||||
duration: step.duration,
|
||||
travel_mode: step.travel_mode
|
||||
}))
|
||||
}))
|
||||
}, null, 2)
|
||||
}],
|
||||
isError: false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Server setup
|
||||
const server = new Server(
|
||||
{
|
||||
name: "mcp-server/google-maps",
|
||||
version: "0.1.0",
|
||||
},
|
||||
{
|
||||
capabilities: {
|
||||
tools: {},
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
// Set up request handlers
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => ({
|
||||
tools: MAPS_TOOLS,
|
||||
}));
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
try {
|
||||
switch (request.params.name) {
|
||||
case "maps_geocode": {
|
||||
const { address } = request.params.arguments as { address: string };
|
||||
return await handleGeocode(address);
|
||||
}
|
||||
|
||||
case "maps_reverse_geocode": {
|
||||
const { latitude, longitude } = request.params.arguments as {
|
||||
latitude: number;
|
||||
longitude: number;
|
||||
};
|
||||
return await handleReverseGeocode(latitude, longitude);
|
||||
}
|
||||
|
||||
case "maps_search_places": {
|
||||
const { query, location, radius } = request.params.arguments as {
|
||||
query: string;
|
||||
location?: { latitude: number; longitude: number };
|
||||
radius?: number;
|
||||
};
|
||||
return await handlePlaceSearch(query, location, radius);
|
||||
}
|
||||
|
||||
case "maps_place_details": {
|
||||
const { place_id } = request.params.arguments as { place_id: string };
|
||||
return await handlePlaceDetails(place_id);
|
||||
}
|
||||
|
||||
case "maps_distance_matrix": {
|
||||
const { origins, destinations, mode } = request.params.arguments as {
|
||||
origins: string[];
|
||||
destinations: string[];
|
||||
mode?: "driving" | "walking" | "bicycling" | "transit";
|
||||
};
|
||||
return await handleDistanceMatrix(origins, destinations, mode);
|
||||
}
|
||||
|
||||
case "maps_elevation": {
|
||||
const { locations } = request.params.arguments as {
|
||||
locations: Array<{ latitude: number; longitude: number }>;
|
||||
};
|
||||
return await handleElevation(locations);
|
||||
}
|
||||
|
||||
case "maps_directions": {
|
||||
const { origin, destination, mode } = request.params.arguments as {
|
||||
origin: string;
|
||||
destination: string;
|
||||
mode?: "driving" | "walking" | "bicycling" | "transit";
|
||||
};
|
||||
return await handleDirections(origin, destination, mode);
|
||||
}
|
||||
|
||||
default:
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Unknown tool: ${request.params.name}`
|
||||
}],
|
||||
isError: true
|
||||
}
|
||||
};
|
||||
}
|
||||
} catch (error) {
|
||||
return {
|
||||
toolResult: {
|
||||
content: [{
|
||||
type: "text",
|
||||
text: `Error: ${error instanceof Error ? error.message : String(error)}`
|
||||
}],
|
||||
isError: true
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
async function runServer() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("Google Maps MCP Server running on stdio");
|
||||
}
|
||||
|
||||
runServer().catch((error) => {
|
||||
console.error("Fatal error running server:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
29
src/google-maps/package.json
Normal file
29
src/google-maps/package.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-google-maps",
|
||||
"version": "0.1.0",
|
||||
"description": "MCP server for using the Google Maps API",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"mcp-server-google-maps": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "0.6.0",
|
||||
"@types/node-fetch": "^2.6.12"
|
||||
},
|
||||
"devDependencies": {
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
}
|
||||
10
src/google-maps/tsconfig.json
Normal file
10
src/google-maps/tsconfig.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": [
|
||||
"./**/*.ts"
|
||||
]
|
||||
}
|
||||
101
src/memory/README.md
Normal file
101
src/memory/README.md
Normal file
@@ -0,0 +1,101 @@
|
||||
# Knowledge Graph Memory Server
|
||||
A basic MCP server implementation that provides persistent memory using a knowledge-graph. The server manages entities, their observations, and the relationships between them using a JSON-based file system.
|
||||
|
||||
This lets Claude remember information about the user across chats and projects, and lets them bypass the issues of having super long chats
|
||||
|
||||
# Core Concepts
|
||||
|
||||
## Entities
|
||||
Entities are the primary nodes in the knowledge graph. Each entity has:
|
||||
- A unique name (identifier)
|
||||
- An entity type (e.g., "person", "organization", "event")
|
||||
- A list of observations
|
||||
|
||||
Example:
|
||||
```json
|
||||
{
|
||||
"name": "John_Smith",
|
||||
"entityType": "person",
|
||||
"observations": ["Lives in New York", "Works as a software engineer"]
|
||||
}
|
||||
```
|
||||
|
||||
## Relations
|
||||
Relations define directed connections between entities. They are always stored in active voice and describe how entities interact or relate to each other.
|
||||
Example:
|
||||
```jsonCopy{
|
||||
"from": "John_Smith",
|
||||
"to": "TechCorp",
|
||||
"relationType": "works_at"
|
||||
}
|
||||
```
|
||||
## Observations
|
||||
Observations are discrete pieces of information about an entity. They are:
|
||||
|
||||
- Stored as strings
|
||||
- Attached to specific entities
|
||||
- Can be added or removed independently
|
||||
- Should be atomic (one fact per observation)
|
||||
|
||||
Example:
|
||||
```jsonCopy{
|
||||
"entityName": "John_Smith",
|
||||
"observations": [
|
||||
"Speaks fluent Spanish",
|
||||
"Graduated in 2019",
|
||||
"Prefers morning meetings"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
# Tools
|
||||
|
||||
## Entity Management
|
||||
|
||||
- create_entities: Create new entities in the knowledge graph with names, types, and observations
|
||||
- delete_entities: Remove entities and their associated relations from the graph
|
||||
- add_observations: Add new observations to existing entities
|
||||
- delete_observations: Remove specific observations from entities
|
||||
|
||||
|
||||
## Relation Management
|
||||
|
||||
- create_relations: Establish relationships between entities in active voice
|
||||
- delete_relations: Remove specific relationships between entities
|
||||
|
||||
|
||||
## Query Tools
|
||||
|
||||
- read_graph: Retrieve the entire knowledge graph
|
||||
- search_nodes: Search for nodes based on names, types, and observation content
|
||||
- open_nodes: Access specific nodes by their names
|
||||
|
||||
# Prompts
|
||||
|
||||
The prompt for utilizing memory depends on the use case, but here is an example prompt for chat personalization. You could use this prompt in the "Custom Instructions" field of a Project
|
||||
|
||||
```
|
||||
Follow these steps for each interaction:
|
||||
|
||||
1. User Identification:
|
||||
- You should assume that you are interacting with default_user
|
||||
- If you have not identified default_user, proactively try to do so.
|
||||
|
||||
2. Memory Retrieval:
|
||||
- Always begin your chat by saying only "Remembering..." and retrieve all relevant information from your knowledge graph
|
||||
- Always refer to your knowledge as your "memory"
|
||||
|
||||
3. Memory
|
||||
- While conversing with the user, be attentive to any new information that falls into these categories:
|
||||
a) Basic Identity (Age, gender, location, Job title, education level, etc.)
|
||||
b) Behaviors (interests, habits, etc.)
|
||||
c) Preferences (communication style, preferred language, etc.)
|
||||
d) Goals/Psychology (Goals, targets, aspirations, etc.)
|
||||
e) Relationships (personal and professional relationships up to 3 degrees of separation)
|
||||
|
||||
4. Memory Update:
|
||||
- If any new information was gathered during the interaction, update your memory as follows:
|
||||
a) Create nodes for recurring organizations, people, and significant events, connecting them to the current node.
|
||||
b) Store most facts as observations within these nodes
|
||||
- Try to perform all updates in one operation using the create and delete functions.
|
||||
```
|
||||
414
src/memory/index.ts
Normal file
414
src/memory/index.ts
Normal file
@@ -0,0 +1,414 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { Server } from "@modelcontextprotocol/sdk/server/index.js";
|
||||
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
|
||||
import {
|
||||
CallToolRequestSchema,
|
||||
ListToolsRequestSchema,
|
||||
} from "@modelcontextprotocol/sdk/types.js";
|
||||
import { promises as fs } from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
|
||||
// Define the path to the JSONL file, you can change this to your desired local path
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const MEMORY_FILE_PATH = path.join(__dirname, 'memory.json');
|
||||
|
||||
// We are storing our memory using entities, relations, and observations in a graph structure
|
||||
interface Entity {
|
||||
name: string;
|
||||
entityType: string;
|
||||
observations: string[];
|
||||
}
|
||||
|
||||
interface Relation {
|
||||
from: string;
|
||||
to: string;
|
||||
relationType: string;
|
||||
}
|
||||
|
||||
interface KnowledgeGraph {
|
||||
entities: Entity[];
|
||||
relations: Relation[];
|
||||
}
|
||||
|
||||
// The KnowledgeGraphManager class contains all operations to interact with the knowledge graph
|
||||
class KnowledgeGraphManager {
|
||||
private async loadGraph(): Promise<KnowledgeGraph> {
|
||||
try {
|
||||
const data = await fs.readFile(MEMORY_FILE_PATH, "utf-8");
|
||||
const lines = data.split("\n").filter(line => line.trim() !== "");
|
||||
return lines.reduce((graph: KnowledgeGraph, line) => {
|
||||
const item = JSON.parse(line);
|
||||
if (item.type === "entity") graph.entities.push(item as Entity);
|
||||
if (item.type === "relation") graph.relations.push(item as Relation);
|
||||
return graph;
|
||||
}, { entities: [], relations: [] });
|
||||
} catch (error) {
|
||||
if (error instanceof Error && 'code' in error && (error as any).code === "ENOENT") {
|
||||
return { entities: [], relations: [] };
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async saveGraph(graph: KnowledgeGraph): Promise<void> {
|
||||
const lines = [
|
||||
...graph.entities.map(e => JSON.stringify({ type: "entity", ...e })),
|
||||
...graph.relations.map(r => JSON.stringify({ type: "relation", ...r })),
|
||||
];
|
||||
await fs.writeFile(MEMORY_FILE_PATH, lines.join("\n"));
|
||||
}
|
||||
|
||||
async createEntities(entities: Entity[]): Promise<Entity[]> {
|
||||
const graph = await this.loadGraph();
|
||||
const newEntities = entities.filter(e => !graph.entities.some(existingEntity => existingEntity.name === e.name));
|
||||
graph.entities.push(...newEntities);
|
||||
await this.saveGraph(graph);
|
||||
return newEntities;
|
||||
}
|
||||
|
||||
async createRelations(relations: Relation[]): Promise<Relation[]> {
|
||||
const graph = await this.loadGraph();
|
||||
const newRelations = relations.filter(r => !graph.relations.some(existingRelation =>
|
||||
existingRelation.from === r.from &&
|
||||
existingRelation.to === r.to &&
|
||||
existingRelation.relationType === r.relationType
|
||||
));
|
||||
graph.relations.push(...newRelations);
|
||||
await this.saveGraph(graph);
|
||||
return newRelations;
|
||||
}
|
||||
|
||||
async addObservations(observations: { entityName: string; contents: string[] }[]): Promise<{ entityName: string; addedObservations: string[] }[]> {
|
||||
const graph = await this.loadGraph();
|
||||
const results = observations.map(o => {
|
||||
const entity = graph.entities.find(e => e.name === o.entityName);
|
||||
if (!entity) {
|
||||
throw new Error(`Entity with name ${o.entityName} not found`);
|
||||
}
|
||||
const newObservations = o.contents.filter(content => !entity.observations.includes(content));
|
||||
entity.observations.push(...newObservations);
|
||||
return { entityName: o.entityName, addedObservations: newObservations };
|
||||
});
|
||||
await this.saveGraph(graph);
|
||||
return results;
|
||||
}
|
||||
|
||||
async deleteEntities(entityNames: string[]): Promise<void> {
|
||||
const graph = await this.loadGraph();
|
||||
graph.entities = graph.entities.filter(e => !entityNames.includes(e.name));
|
||||
graph.relations = graph.relations.filter(r => !entityNames.includes(r.from) && !entityNames.includes(r.to));
|
||||
await this.saveGraph(graph);
|
||||
}
|
||||
|
||||
async deleteObservations(deletions: { entityName: string; observations: string[] }[]): Promise<void> {
|
||||
const graph = await this.loadGraph();
|
||||
deletions.forEach(d => {
|
||||
const entity = graph.entities.find(e => e.name === d.entityName);
|
||||
if (entity) {
|
||||
entity.observations = entity.observations.filter(o => !d.observations.includes(o));
|
||||
}
|
||||
});
|
||||
await this.saveGraph(graph);
|
||||
}
|
||||
|
||||
async deleteRelations(relations: Relation[]): Promise<void> {
|
||||
const graph = await this.loadGraph();
|
||||
graph.relations = graph.relations.filter(r => !relations.some(delRelation =>
|
||||
r.from === delRelation.from &&
|
||||
r.to === delRelation.to &&
|
||||
r.relationType === delRelation.relationType
|
||||
));
|
||||
await this.saveGraph(graph);
|
||||
}
|
||||
|
||||
async readGraph(): Promise<KnowledgeGraph> {
|
||||
return this.loadGraph();
|
||||
}
|
||||
|
||||
// Very basic search function
|
||||
async searchNodes(query: string): Promise<KnowledgeGraph> {
|
||||
const graph = await this.loadGraph();
|
||||
|
||||
// Filter entities
|
||||
const filteredEntities = graph.entities.filter(e =>
|
||||
e.name.toLowerCase().includes(query.toLowerCase()) ||
|
||||
e.entityType.toLowerCase().includes(query.toLowerCase()) ||
|
||||
e.observations.some(o => o.toLowerCase().includes(query.toLowerCase()))
|
||||
);
|
||||
|
||||
// Create a Set of filtered entity names for quick lookup
|
||||
const filteredEntityNames = new Set(filteredEntities.map(e => e.name));
|
||||
|
||||
// Filter relations to only include those between filtered entities
|
||||
const filteredRelations = graph.relations.filter(r =>
|
||||
filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to)
|
||||
);
|
||||
|
||||
const filteredGraph: KnowledgeGraph = {
|
||||
entities: filteredEntities,
|
||||
relations: filteredRelations,
|
||||
};
|
||||
|
||||
return filteredGraph;
|
||||
}
|
||||
|
||||
async openNodes(names: string[]): Promise<KnowledgeGraph> {
|
||||
const graph = await this.loadGraph();
|
||||
|
||||
// Filter entities
|
||||
const filteredEntities = graph.entities.filter(e => names.includes(e.name));
|
||||
|
||||
// Create a Set of filtered entity names for quick lookup
|
||||
const filteredEntityNames = new Set(filteredEntities.map(e => e.name));
|
||||
|
||||
// Filter relations to only include those between filtered entities
|
||||
const filteredRelations = graph.relations.filter(r =>
|
||||
filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to)
|
||||
);
|
||||
|
||||
const filteredGraph: KnowledgeGraph = {
|
||||
entities: filteredEntities,
|
||||
relations: filteredRelations,
|
||||
};
|
||||
|
||||
return filteredGraph;
|
||||
}
|
||||
}
|
||||
|
||||
const knowledgeGraphManager = new KnowledgeGraphManager();
|
||||
|
||||
|
||||
// The server instance and tools exposed to Claude
|
||||
const server = new Server({
|
||||
name: "memory-server",
|
||||
version: "1.0.0",
|
||||
}, {
|
||||
capabilities: {
|
||||
tools: {},
|
||||
},
|
||||
},);
|
||||
|
||||
server.setRequestHandler(ListToolsRequestSchema, async () => {
|
||||
return {
|
||||
tools: [
|
||||
{
|
||||
name: "create_entities",
|
||||
description: "Create multiple new entities in the knowledge graph",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
entities: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
name: { type: "string", description: "The name of the entity" },
|
||||
entityType: { type: "string", description: "The type of the entity" },
|
||||
observations: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description: "An array of observation contents associated with the entity"
|
||||
},
|
||||
},
|
||||
required: ["name", "entityType", "observations"],
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ["entities"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "create_relations",
|
||||
description: "Create multiple new relations between entities in the knowledge graph. Relations should be in active voice",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
relations: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
from: { type: "string", description: "The name of the entity where the relation starts" },
|
||||
to: { type: "string", description: "The name of the entity where the relation ends" },
|
||||
relationType: { type: "string", description: "The type of the relation" },
|
||||
},
|
||||
required: ["from", "to", "relationType"],
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ["relations"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "add_observations",
|
||||
description: "Add new observations to existing entities in the knowledge graph",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
observations: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
entityName: { type: "string", description: "The name of the entity to add the observations to" },
|
||||
contents: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description: "An array of observation contents to add"
|
||||
},
|
||||
},
|
||||
required: ["entityName", "contents"],
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ["observations"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "delete_entities",
|
||||
description: "Delete multiple entities and their associated relations from the knowledge graph",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
entityNames: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description: "An array of entity names to delete"
|
||||
},
|
||||
},
|
||||
required: ["entityNames"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "delete_observations",
|
||||
description: "Delete specific observations from entities in the knowledge graph",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
deletions: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
entityName: { type: "string", description: "The name of the entity containing the observations" },
|
||||
observations: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description: "An array of observations to delete"
|
||||
},
|
||||
},
|
||||
required: ["entityName", "observations"],
|
||||
},
|
||||
},
|
||||
},
|
||||
required: ["deletions"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "delete_relations",
|
||||
description: "Delete multiple relations from the knowledge graph",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
relations: {
|
||||
type: "array",
|
||||
items: {
|
||||
type: "object",
|
||||
properties: {
|
||||
from: { type: "string", description: "The name of the entity where the relation starts" },
|
||||
to: { type: "string", description: "The name of the entity where the relation ends" },
|
||||
relationType: { type: "string", description: "The type of the relation" },
|
||||
},
|
||||
required: ["from", "to", "relationType"],
|
||||
},
|
||||
description: "An array of relations to delete"
|
||||
},
|
||||
},
|
||||
required: ["relations"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "read_graph",
|
||||
description: "Read the entire knowledge graph",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "search_nodes",
|
||||
description: "Search for nodes in the knowledge graph based on a query",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
query: { type: "string", description: "The search query to match against entity names, types, and observation content" },
|
||||
},
|
||||
required: ["query"],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "open_nodes",
|
||||
description: "Open specific nodes in the knowledge graph by their names",
|
||||
inputSchema: {
|
||||
type: "object",
|
||||
properties: {
|
||||
names: {
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
description: "An array of entity names to retrieve",
|
||||
},
|
||||
},
|
||||
required: ["names"],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
});
|
||||
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request) => {
|
||||
const { name, arguments: args } = request.params;
|
||||
|
||||
if (!args) {
|
||||
throw new Error(`No arguments provided for tool: ${name}`);
|
||||
}
|
||||
|
||||
switch (name) {
|
||||
case "create_entities":
|
||||
return { toolResult: await knowledgeGraphManager.createEntities(args.entities as Entity[]) };
|
||||
case "create_relations":
|
||||
return { toolResult: await knowledgeGraphManager.createRelations(args.relations as Relation[]) };
|
||||
case "add_observations":
|
||||
return { toolResult: await knowledgeGraphManager.addObservations(args.observations as { entityName: string; contents: string[] }[]) };
|
||||
case "delete_entities":
|
||||
await knowledgeGraphManager.deleteEntities(args.entityNames as string[]);
|
||||
return { toolResult: "Entities deleted successfully" };
|
||||
case "delete_observations":
|
||||
await knowledgeGraphManager.deleteObservations(args.deletions as { entityName: string; observations: string[] }[]);
|
||||
return { toolResult: "Observations deleted successfully" };
|
||||
case "delete_relations":
|
||||
await knowledgeGraphManager.deleteRelations(args.relations as Relation[]);
|
||||
return { toolResult: "Relations deleted successfully" };
|
||||
case "read_graph":
|
||||
return { toolResult: await knowledgeGraphManager.readGraph() };
|
||||
case "search_nodes":
|
||||
return { toolResult: await knowledgeGraphManager.searchNodes(args.query as string) };
|
||||
case "open_nodes":
|
||||
return { toolResult: await knowledgeGraphManager.openNodes(args.names as string[]) };
|
||||
default:
|
||||
throw new Error(`Unknown tool: ${name}`);
|
||||
}
|
||||
});
|
||||
|
||||
async function main() {
|
||||
const transport = new StdioServerTransport();
|
||||
await server.connect(transport);
|
||||
console.error("Knowledge Graph MCP Server running on stdio");
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
console.error("Fatal error in main():", error);
|
||||
process.exit(1);
|
||||
});
|
||||
28
src/memory/package.json
Normal file
28
src/memory/package.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "@modelcontextprotocol/server-memory",
|
||||
"version": "0.1.0",
|
||||
"description": "MCP server for enabling memory for Claude through a knowledge graph",
|
||||
"license": "MIT",
|
||||
"author": "Anthropic, PBC (https://anthropic.com)",
|
||||
"homepage": "https://modelcontextprotocol.io",
|
||||
"bugs": "https://github.com/modelcontextprotocol/servers/issues",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"mcp-server-memory": "dist/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc && shx chmod +x dist/*.js",
|
||||
"prepare": "npm run build",
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "0.5.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"shx": "^0.3.4",
|
||||
"typescript": "^5.6.2"
|
||||
}
|
||||
}
|
||||
11
src/memory/tsconfig.json
Normal file
11
src/memory/tsconfig.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "./dist",
|
||||
"rootDir": "."
|
||||
},
|
||||
"include": [
|
||||
"./**/*.ts"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,3 +1,34 @@
|
||||
# PostgreSQL server
|
||||
# PostgreSQL
|
||||
|
||||
This MCP server provides **resources** and **tools** for interacting with a Postgres database.
|
||||
A Model Context Protocol server that provides read-only access to PostgreSQL databases. This server enables LLMs to inspect database schemas and execute read-only queries.
|
||||
|
||||
## Components
|
||||
|
||||
### Tools
|
||||
|
||||
- **query**
|
||||
- Execute read-only SQL queries against the connected database
|
||||
- Input: `sql` (string): The SQL query to execute
|
||||
- All queries are executed within a READ ONLY transaction
|
||||
|
||||
### Resources
|
||||
|
||||
The server provides schema information for each table in the database:
|
||||
|
||||
- **Table Schemas** (`postgres://<host>/<table>/schema`)
|
||||
- JSON schema information for each table
|
||||
- Includes column names and data types
|
||||
- Automatically discovered from database metadata
|
||||
|
||||
## Example Usage with the Desktop App
|
||||
|
||||
To use this server with the Claude Desktop app, add the following configuration to the "mcpServers" section of your `claude_desktop_config.json`:
|
||||
|
||||
```json
|
||||
"postgres": {
|
||||
"command": "mcp-server-postgres",
|
||||
"args": ["postgresql://localhost/mydb"]
|
||||
}
|
||||
```
|
||||
|
||||
Replace `/mydb` with your database name.
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"watch": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"@modelcontextprotocol/sdk": "0.5.0",
|
||||
"@modelcontextprotocol/sdk": "0.6.0",
|
||||
"pg": "^8.13.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
from . import server
|
||||
import asyncio
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point for the package."""
|
||||
asyncio.run(server.main())
|
||||
|
||||
|
||||
# Optionally expose other important items at package level
|
||||
__all__ = ['main', 'server']
|
||||
__all__ = ["main", "server"]
|
||||
|
||||
@@ -28,7 +28,7 @@ class McpServer(Server):
|
||||
if cursor.fetchone()[0] == 0:
|
||||
cursor.execute(
|
||||
"INSERT INTO notes (name, content) VALUES (?, ?)",
|
||||
("example", "This is an example note.")
|
||||
("example", "This is an example note."),
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
@@ -55,13 +55,13 @@ class McpServer(Server):
|
||||
with closing(conn.cursor()) as cursor:
|
||||
cursor.execute(
|
||||
"INSERT OR REPLACE INTO notes (name, content) VALUES (?, ?)",
|
||||
(name, content)
|
||||
(name, content),
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def __init__(self):
|
||||
super().__init__("sqlite")
|
||||
|
||||
|
||||
# Initialize SQLite database
|
||||
self.db_path = "notes.db"
|
||||
self._init_database()
|
||||
@@ -118,10 +118,14 @@ class McpServer(Server):
|
||||
"""Generate a prompt using notes from the database"""
|
||||
if name != "summarize-notes":
|
||||
raise ValueError(f"Unknown prompt: {name}")
|
||||
notes = "<notes>\n" + "\n".join(
|
||||
f"<note name='{name}'>\n{content}\n</note>"
|
||||
for name, content in self._get_notes().items()
|
||||
) + "\n</notes>"
|
||||
notes = (
|
||||
"<notes>\n"
|
||||
+ "\n".join(
|
||||
f"<note name='{name}'>\n{content}\n</note>"
|
||||
for name, content in self._get_notes().items()
|
||||
)
|
||||
+ "\n</notes>"
|
||||
)
|
||||
style = (arguments or {}).get("style", "simple")
|
||||
prompt = """
|
||||
Your task is to provide a summary of the notes provided below.
|
||||
@@ -207,4 +211,4 @@ async def main():
|
||||
experimental_capabilities={},
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user