diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..4f687118 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,212 @@ +name: Automatic Release Creation + +on: + workflow_dispatch: + schedule: + - cron: '0 10 * * *' + +jobs: + create-metadata: + runs-on: ubuntu-latest + outputs: + hash: ${{ steps.last-release.outputs.hash }} + version: ${{ steps.create-version.outputs.version}} + npm_packages: ${{ steps.create-npm-packages.outputs.npm_packages}} + pypi_packages: ${{ steps.create-pypi-packages.outputs.pypi_packages}} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Get last release hash + id: last-release + run: | + HASH=$(git rev-list --tags --max-count=1 || echo "HEAD~1") + echo "hash=${HASH}" >> $GITHUB_OUTPUT + echo "Using last release hash: ${HASH}" + + - name: Install uv + uses: astral-sh/setup-uv@v5 + + - name: Create version name + id: create-version + run: | + VERSION=$(uv run --script scripts/release.py generate-version) + echo "version $VERSION" + echo "version=$VERSION" >> $GITHUB_OUTPUT + + - name: Create notes + run: | + HASH="${{ steps.last-release.outputs.hash }}" + uv run --script scripts/release.py generate-notes --directory src/ $HASH > RELEASE_NOTES.md + cat RELEASE_NOTES.md + + - name: Release notes + uses: actions/upload-artifact@v4 + with: + name: release-notes + path: RELEASE_NOTES.md + + - name: Create python matrix + id: create-pypi-packages + run: | + HASH="${{ steps.last-release.outputs.hash }}" + PYPI=$(uv run --script scripts/release.py generate-matrix --pypi --directory src $HASH) + echo "pypi_packages $PYPI" + echo "pypi_packages=$PYPI" >> $GITHUB_OUTPUT + + - name: Create npm matrix + id: create-npm-packages + run: | + HASH="${{ steps.last-release.outputs.hash }}" + NPM=$(uv run --script scripts/release.py generate-matrix --npm --directory src $HASH) + echo "npm_packages $NPM" + echo "npm_packages=$NPM" >> $GITHUB_OUTPUT + + update-packages: + needs: [create-metadata] + if: ${{ needs.create-metadata.outputs.npm_packages != '[]' || needs.create-metadata.outputs.pypi_packages != '[]' }} + runs-on: ubuntu-latest + outputs: + changes_made: ${{ steps.commit.outputs.changes_made }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Install uv + uses: astral-sh/setup-uv@v5 + + - name: Update packages + run: | + HASH="${{ needs.create-metadata.outputs.hash }}" + uv run --script scripts/release.py update-packages --directory src/ $HASH + + - name: Configure git + run: | + git config --global user.name "GitHub Actions" + git config --global user.email "actions@github.com" + + - name: Commit changes + id: commit + run: | + VERSION="${{ needs.create-metadata.outputs.version }}" + git add -u + if git diff-index --quiet HEAD; then + echo "changes_made=false" >> $GITHUB_OUTPUT + else + git commit -m 'Automatic update of packages' + git tag -a "$VERSION" -m "Release $VERSION" + git push origin "$VERSION" + echo "changes_made=true" >> $GITHUB_OUTPUT + fi + + publish-pypi: + needs: [update-packages, create-metadata] + strategy: + fail-fast: false + matrix: + package: ${{ fromJson(needs.create-metadata.outputs.pypi_packages) }} + name: Build ${{ matrix.package }} + environment: release + permissions: + id-token: write # Required for trusted publishing + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ needs.create-metadata.outputs.version }} + + - name: Install uv + uses: astral-sh/setup-uv@v5 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version-file: "src/${{ matrix.package }}/.python-version" + + - name: Install dependencies + working-directory: src/${{ matrix.package }} + run: uv sync --frozen --all-extras --dev + + - name: Run pyright + working-directory: src/${{ matrix.package }} + run: uv run --frozen pyright + + - name: Build package + working-directory: src/${{ matrix.package }} + run: uv build + + - name: Publish package to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: src/${{ matrix.package }}/dist + + publish-npm: + needs: [update-packages, create-metadata] + strategy: + fail-fast: false + matrix: + package: ${{ fromJson(needs.create-metadata.outputs.npm_packages) }} + name: Build ${{ matrix.package }} + environment: release + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + ref: ${{ needs.create-metadata.outputs.version }} + + - uses: actions/setup-node@v4 + with: + node-version: 22 + cache: npm + registry-url: 'https://registry.npmjs.org' + + - name: Install dependencies + working-directory: src/${{ matrix.package }} + run: npm ci + + - name: Check if version exists on npm + working-directory: src/${{ matrix.package }} + run: | + VERSION=$(jq -r .version package.json) + if npm view --json | jq --arg version "$VERSION" '[.[]][0].versions | contains([$version])'; then + echo "Version $VERSION already exists on npm" + exit 1 + fi + echo "Version $VERSION is new, proceeding with publish" + + - name: Build package + working-directory: src/${{ matrix.package }} + run: npm run build + + - name: Publish package + working-directory: src/${{ matrix.package }} + run: | + npm publish --access public + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + + create-release: + needs: [update-packages, create-metadata, publish-pypi, publish-npm] + if: needs.update-packages.outputs.changes_made == 'true' + runs-on: ubuntu-latest + environment: release + permissions: + contents: write + steps: + - uses: actions/checkout@v4 + + - name: Download release notes + uses: actions/download-artifact@v4 + with: + name: release-notes + + - name: Create release + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN}} + run: | + VERSION="${{ needs.create-metadata.outputs.version }}" + gh release create "$VERSION" \ + --title "Release $VERSION" \ + --notes-file RELEASE_NOTES.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8c6e176a..28cba5a6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,6 +10,8 @@ The repository contains reference implementations, as well as a list of communit We generally don't accept new servers into the repository. We do accept pull requests to the [README.md](./README.md) adding a reference to your servers. +Please keep lists in alphabetical order to minimize merge conflicts when adding new items. + - Check the [modelcontextprotocol.io](https://modelcontextprotocol.io) documentation - Ensure your server doesn't duplicate existing functionality - Consider whether your server would be generally useful to others diff --git a/README.md b/README.md index 499e3e41..3e73c50f 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,8 @@ to community built servers and additional resources. The servers in this repository showcase the versatility and extensibility of MCP, demonstrating how it can be used to give Large Language Models (LLMs) secure, controlled access to tools and data sources. Each MCP server is implemented with either the [Typescript MCP SDK](https://github.com/modelcontextprotocol/typescript-sdk) or [Python MCP SDK](https://github.com/modelcontextprotocol/python-sdk). +> Note: Lists in this README are maintained in alphabetical order to minimize merge conflicts when adding new items. + ## 🌟 Reference Servers These servers aim to demonstrate MCP features and the Typescript and Python SDK. @@ -39,19 +41,21 @@ Official integrations are maintained by companies building production ready MCP - Axiom Logo **[Axiom](https://github.com/axiomhq/mcp-server-axiom)** - Query and analyze your Axiom logs, traces, and all other event data in natural language - Browserbase Logo **[Browserbase](https://github.com/browserbase/mcp-server-browserbase)** - Automate browser interactions in the cloud (e.g. web navigation, data extraction, form filling, and more) - **[Cloudflare](https://github.com/cloudflare/mcp-server-cloudflare)** - Deploy, configure & interrogate your resources on the Cloudflare developer platform (e.g. Workers/KV/R2/D1) -- **[Raygun](https://github.com/MindscapeHQ/mcp-server-raygun)** - Interact with your crash reporting and real using monitoring data on your Raygun account -- **[Obsidian Markdown Notes](https://github.com/calclavia/mcp-obsidian)** - Read and search through your Obsidian vault or any directory containing Markdown notes - E2B Logo **[E2B](https://github.com/e2b-dev/mcp-server)** - Run code in secure sandboxes hosted by [E2B](https://e2b.dev) - Exa Logo **[Exa](https://github.com/exa-labs/exa-mcp-server)** - Search Engine made for AIs by [Exa](https://exa.ai) +- Fireproof Logo **[Fireproof](https://github.com/fireproof-storage/mcp-database-server)** - Immutable ledger database with live synchronization - **[JetBrains](https://github.com/JetBrains/mcp-jetbrains)** – Work on your code with JetBrains IDEs -- **[Needle](https://github.com/JANHMS/needle-mcp)** - Production-ready RAG out of the box to search and retrieve data from your own documents. -- **[Neon](https://github.com/neondatabase/mcp-server-neon)** - Interact with the Neon serverless Postgres platform -- Neo4j Logo **[Neo4j](https://github.com/neo4j-contrib/mcp-neo4j/)** - Neo4j graph database server (schema + read/write-cypher) and separate graph database backed memory -- Tinybird Logo **[Tinybird](https://github.com/tinybirdco/mcp-tinybird)** - Interact with Tinybird serverless ClickHouse platform -- [Search1API](https://github.com/fatwang2/search1api-mcp) - One API for Search, Crawling, and Sitemaps -- **[Qdrant](https://github.com/qdrant/mcp-server-qdrant/)** - Implement semantic memory layer on top of the Qdrant vector search engine +- Kagi Logo **[Kagi Search](https://github.com/kagisearch/kagimcp)** - Search the web using Kagi's search API +- Meilisearch Logo **[Meilisearch](https://github.com/meilisearch/meilisearch-mcp)** - Interact & query with Meilisearch (Full-text & semantic search API) - **[Metoro](https://github.com/metoro-io/metoro-mcp-server)** - Query and interact with kubernetes environments monitored by Metoro - +- MotherDuck Logo **[MotherDuck](https://github.com/motherduckdb/mcp-server-motherduck)** - Query and analyze data with MotherDuck and local DuckDB +- Needle AI Logo **[Needle](https://github.com/needle-ai/needle-mcp)** - Production-ready RAG out of the box to search and retrieve data from your own documents. +- Neo4j Logo **[Neo4j](https://github.com/neo4j-contrib/mcp-neo4j/)** - Neo4j graph database server (schema + read/write-cypher) and separate graph database backed memory +- **[Neon](https://github.com/neondatabase/mcp-server-neon)** - Interact with the Neon serverless Postgres platform +- **[Qdrant](https://github.com/qdrant/mcp-server-qdrant/)** - Implement semantic memory layer on top of the Qdrant vector search engine +- **[Raygun](https://github.com/MindscapeHQ/mcp-server-raygun)** - Interact with your crash reporting and real using monitoring data on your Raygun account +- [Search1API](https://github.com/fatwang2/search1api-mcp) - One API for Search, Crawling, and Sitemaps +- Tinybird Logo **[Tinybird](https://github.com/tinybirdco/mcp-tinybird)** - Interact with Tinybird serverless ClickHouse platform ### 🌎 Community Servers @@ -59,72 +63,105 @@ A growing set of community-developed and maintained servers demonstrates various > **Note:** Community servers are **untested** and should be used at **your own risk**. They are not affiliated with or endorsed by Anthropic. -- **[MCP Installer](https://github.com/anaisbetts/mcp-installer)** - This server is a server that installs other MCP servers for you. -- **[NS Travel Information](https://github.com/r-huijts/ns-mcp-server)** - Access Dutch Railways (NS) real-time train travel information and disruptions through the official NS API. -- **[Spotify](https://github.com/varunneal/spotify-mcp)** - This MCP allows an LLM to play and use Spotify. -- **[Inoyu](https://github.com/sergehuber/inoyu-mcp-unomi-server)** - Interact with an Apache Unomi CDP customer data platform to retrieve and update customer profiles -- **[Vega-Lite](https://github.com/isaacwasserman/mcp-vegalite-server)** - Generate visualizations from fetched data using the VegaLite format and renderer. -- **[Snowflake](https://github.com/isaacwasserman/mcp-snowflake-server)** - This MCP server enables LLMs to interact with Snowflake databases, allowing for secure and controlled data operations. -- **[MySQL](https://github.com/designcomputer/mysql_mcp_server)** (by DesignComputer) - MySQL database integration in Python with configurable access controls and schema inspection -- **[MySQL](https://github.com/benborla/mcp-server-mysql)** (by benborla) - MySQL database integration in NodeJS with configurable access controls and schema inspection -- **[MSSQL](https://github.com/aekanun2020/mcp-server/)** - MSSQL database integration with configurable access controls and schema inspection +- **[AWS S3](https://github.com/aws-samples/sample-mcp-server-s3)** - A sample MCP server for AWS S3 that flexibly fetches objects from S3 such as PDF documents +- **[AWS](https://github.com/rishikavikondala/mcp-server-aws)** - Perform operations on your AWS resources using an LLM +- **[Airtable](https://github.com/domdomegg/airtable-mcp-server)** - Read and write access to [Airtable](https://airtable.com/) databases, with schema inspection. +- **[Airtable](https://github.com/felores/airtable-mcp)** - Airtable Model Context Protocol Server. +- **[AlphaVantage](https://github.com/calvernaz/alphavantage)** - MCP server for stock market data API [AlphaVantage](https://www.alphavantage.co) +- **[Anki](https://github.com/scorzeth/anki-mcp-server)** - An MCP server for interacting with your [Anki](https://apps.ankiweb.net) decks and cards. +- **[Any Chat Completions](https://github.com/pyroprompts/any-chat-completions-mcp)** - Interact with any OpenAI SDK Compatible Chat Completions API like OpenAI, Perplexity, Groq, xAI and many more. +- **[Atlassian](https://github.com/sooperset/mcp-atlassian)** - Interact with Atlassian Cloud products (Confluence and Jira) including searching/reading Confluence spaces/pages, accessing Jira issues, and project metadata. - **[BigQuery](https://github.com/LucasHild/mcp-server-bigquery)** (by LucasHild) - This server enables LLMs to inspect database schemas and execute queries on BigQuery. - **[BigQuery](https://github.com/ergut/mcp-bigquery-server)** (by ergut) - Server implementation for Google BigQuery integration that enables direct BigQuery database access and querying capabilities -- **[Todoist](https://github.com/abhiz123/todoist-mcp-server)** - Interact with Todoist to manage your tasks. -- **[Tavily search](https://github.com/RamXX/mcp-tavily)** - An MCP server for Tavily's search & news API, with explicit site inclusions/exclusions -- **[Linear](https://github.com/jerhadf/linear-mcp-server)** - Allows LLM to interact with Linear's API for project management, including searching, creating, and updating issues. -- **[Playwright](https://github.com/executeautomation/mcp-playwright)** - This MCP Server will help you run browser automation and webscraping using Playwright -- **[AWS](https://github.com/rishikavikondala/mcp-server-aws)** - Perform operations on your AWS resources using an LLM -- **[LlamaCloud](https://github.com/run-llama/mcp-server-llamacloud)** (by marcusschiesser) - Integrate the data stored in a managed index on [LlamaCloud](https://cloud.llamaindex.ai/) -- **[Any Chat Completions](https://github.com/pyroprompts/any-chat-completions-mcp)** - Interact with any OpenAI SDK Compatible Chat Completions API like OpenAI, Perplexity, Groq, xAI and many more. -- **[Windows CLI](https://github.com/SimonB97/win-cli-mcp-server)** - MCP server for secure command-line interactions on Windows systems, enabling controlled access to PowerShell, CMD, and Git Bash shells. -- **[OpenRPC](https://github.com/shanejonas/openrpc-mpc-server)** - Interact with and discover JSON-RPC APIs via [OpenRPC](https://open-rpc.org). -- **[FireCrawl](https://github.com/vrknetha/mcp-server-firecrawl)** - Advanced web scraping with JavaScript rendering, PDF support, and smart rate limiting -- **[AlphaVantage](https://github.com/calvernaz/alphavantage)** - MCP server for stock market data API [AlphaVantage](https://www.alphavantage.co) +- **[ChatSum](https://github.com/mcpso/mcp-server-chatsum)** - Query and Summarize chat messages with LLM. by [mcpso](https://mcp.so) +- **[Chroma](https://github.com/privetin/chroma)** - Vector database server for semantic document search and metadata filtering, built on Chroma +- **[Cloudinary](https://github.com/felores/cloudinary-mcp-server)** - Cloudinary Model Context Protocol Server to upload media to Cloudinary and get back the media link and details. +- **[cognee-mcp](https://github.com/topoteretes/cognee-mcp-server)** - GraphRAG memory server with customizable ingestion, data processing and search +- **[coin_api_mcp](https://github.com/longmans/coin_api_mcp)** - Provides access to [coinmarketcap](https://coinmarketcap.com/) cryptocurrency data. +- **[Contentful-mcp](https://github.com/ivo-toby/contentful-mcp)** - Read, update, delete, publish content in your [Contentful](https://contentful.com) space(s) from this MCP Server. +- **[Data Exploration](https://github.com/reading-plus-ai/mcp-server-data-exploration)** - MCP server for autonomous data exploration on .csv-based datasets, providing intelligent insights with minimal effort. NOTE: Will execute arbitrary Python code on your machine, please use with caution! +- **[Dataset Viewer](https://github.com/privetin/dataset-viewer)** - Browse and analyze Hugging Face datasets with features like search, filtering, statistics, and data export +- **[DevRev](https://github.com/kpsunil97/devrev-mcp-server)** - An MCP server to integrate with DevRev APIs to search through your DevRev Knowledge Graph where objects can be imported from diff. sources listed [here](https://devrev.ai/docs/import#available-sources). +- **[Dify](https://github.com/YanxingLiu/dify-mcp-server)** - A simple implementation of an MCP server for dify workflows. - **[Docker](https://github.com/ckreiling/mcp-server-docker)** - Integrate with Docker to manage containers, images, volumes, and networks. +- **[Drupal](https://github.com/Omedia/mcp-server-drupal)** - Server for interacting with [Drupal](https://www.drupal.org/project/mcp) using STDIO transport layer. +- **[Elasticsearch](https://github.com/cr7258/elasticsearch-mcp-server)** - MCP server implementation that provides Elasticsearch interaction. +- **[Fetch](https://github.com/zcaceres/fetch-mcp)** - A server that flexibly fetches HTML, JSON, Markdown, or plaintext. +- **[FireCrawl](https://github.com/vrknetha/mcp-server-firecrawl)** - Advanced web scraping with JavaScript rendering, PDF support, and smart rate limiting +- **[FlightRadar24](https://github.com/sunsetcoder/flightradar24-mcp-server)** - A Claude Desktop MCP server that helps you track flights in real-time using Flightradar24 data. +- **[Glean](https://github.com/longyi1207/glean-mcp-server)** - A server that uses Glean API to search and chat. +- **[Google Calendar](https://github.com/v-3/google-calendar)** - Integration with Google Calendar to check schedules, find time, and add/delete events +- **[Google Tasks](https://github.com/zcaceres/gtasks-mcp)** - Google Tasks API Model Context Protocol Server. +- **[Home Assistant](https://github.com/tevonsb/homeassistant-mcp)** - Interact with [Home Assistant](https://www.home-assistant.io/) including viewing and controlling lights, switches, sensors, and all other Home Assistant entities. +- **[HuggingFace Spaces](https://github.com/evalstate/mcp-hfspace)** - Server for using HuggingFace Spaces, supporting Open Source Image, Audio, Text Models and more. Claude Desktop mode for easy integration. +- **[Inoyu](https://github.com/sergehuber/inoyu-mcp-unomi-server)** - Interact with an Apache Unomi CDP customer data platform to retrieve and update customer profiles +- **[Keycloak MCP](https://github.com/ChristophEnglisch/keycloak-model-context-protocol)** - This MCP server enables natural language interaction with Keycloak for user and realm management including creating, deleting, and listing users and realms. - **[Kubernetes](https://github.com/Flux159/mcp-server-kubernetes)** - Connect to Kubernetes cluster and manage pods, deployments, and services. +- **[Linear](https://github.com/jerhadf/linear-mcp-server)** - Allows LLM to interact with Linear's API for project management, including searching, creating, and updating issues. +- **[LlamaCloud](https://github.com/run-llama/mcp-server-llamacloud)** (by marcusschiesser) - Integrate the data stored in a managed index on [LlamaCloud](https://cloud.llamaindex.ai/) +- **[MCP Installer](https://github.com/anaisbetts/mcp-installer)** - This server is a server that installs other MCP servers for you. +- **[mcp-k8s-go](https://github.com/strowk/mcp-k8s-go)** - Golang-based Kubernetes server for MCP to browse pods and their logs, events, namespaces and more. Built to be extensible. +- **[MSSQL](https://github.com/aekanun2020/mcp-server/)** - MSSQL database integration with configurable access controls and schema inspection +- **[Markdownify](https://github.com/zcaceres/mcp-markdownify-server)** - MCP to convert almost anything to Markdown (PPTX, HTML, PDF, Youtube Transcripts and more) +- **[Minima](https://github.com/dmayboroda/minima)** - MCP server for RAG on local files +- **[MongoDB](https://github.com/kiliczsh/mcp-mongo-server)** - A Model Context Protocol Server for MongoDB. +- **[MySQL](https://github.com/benborla/mcp-server-mysql)** (by benborla) - MySQL database integration in NodeJS with configurable access controls and schema inspection +- **[MySQL](https://github.com/designcomputer/mysql_mcp_server)** (by DesignComputer) - MySQL database integration in Python with configurable access controls and schema inspection +- **[NS Travel Information](https://github.com/r-huijts/ns-mcp-server)** - Access Dutch Railways (NS) real-time train travel information and disruptions through the official NS API. +- **[Notion](https://github.com/suekou/mcp-notion-server)** (by suekou) - Interact with Notion API. +- **[Notion](https://github.com/v-3/notion-server)** (by v-3) - Notion MCP integration. Search, Read, Update, and Create pages through Claude chat. +- **[oatpp-mcp](https://github.com/oatpp/oatpp-mcp)** - C++ MCP integration for Oat++. Use [Oat++](https://oatpp.io) to build MCP servers. +- **[Obsidian Markdown Notes](https://github.com/calclavia/mcp-obsidian)** - Read and search through your Obsidian vault or any directory containing Markdown notes - **[OpenAPI](https://github.com/snaggle-ai/openapi-mcp-server)** - Interact with [OpenAPI](https://www.openapis.org/) APIs. +- **[OpenCTI](https://github.com/Spathodea-Network/opencti-mcp)** - Interact with OpenCTI platform to retrieve threat intelligence data including reports, indicators, malware and threat actors. +- **[OpenRPC](https://github.com/shanejonas/openrpc-mpc-server)** - Interact with and discover JSON-RPC APIs via [OpenRPC](https://open-rpc.org). - **[Pandoc](https://github.com/vivekVells/mcp-pandoc)** - MCP server for seamless document format conversion using Pandoc, supporting Markdown, HTML, and plain text, with other formats like PDF, csv and docx in development. - **[Pinecone](https://github.com/sirmews/mcp-pinecone)** - MCP server for searching and uploading records to Pinecone. Allows for simple RAG features, leveraging Pinecone's Inference API. -- **[HuggingFace Spaces](https://github.com/evalstate/mcp-hfspace)** - Server for using HuggingFace Spaces, supporting Open Source Image, Audio, Text Models and more. Claude Desktop mode for easy integration. -- **[ChatSum](https://github.com/chatmcp/mcp-server-chatsum)** - Query and Summarize chat messages with LLM. by [mcpso](https://mcp.so) -- **[Rememberizer AI](https://github.com/skydeckai/mcp-server-rememberizer)** - An MCP server designed for interacting with the Rememberizer data source, facilitating enhanced knowledge retrieval. -- **[FlightRadar24](https://github.com/sunsetcoder/flightradar24-mcp-server)** - A Claude Desktop MCP server that helps you track flights in real-time using Flightradar24 data. -- **[X (Twitter)](https://github.com/vidhupv/x-mcp)** (by vidhupv) - Create, manage and publish X/Twitter posts directly through Claude chat. -- **[X (Twitter)](https://github.com/EnesCinr/twitter-mcp)** (by EnesCinr) - Interact with twitter API. Post tweets and search for tweets by query. +- **[Placid.app](https://github.com/felores/placid-mcp-server)** - Generate image and video creatives using Placid.app templates +- **[Playwright](https://github.com/executeautomation/mcp-playwright)** - This MCP Server will help you run browser automation and webscraping using Playwright +- **[Postman](https://github.com/shannonlal/mcp-postman)** - MCP server for running Postman Collections locally via Newman. Allows for simple execution of Postman Server and returns the results of whether the collection passed all the tests. - **[RAG Web Browser](https://github.com/apify/mcp-server-rag-web-browser)** An MCP server for Apify's RAG Web Browser Actor to perform web searches, scrape URLs, and return content in Markdown. -- **[XMind](https://github.com/apeyroux/mcp-xmind)** - Read and search through your XMind directory containing XMind files. -- **[oatpp-mcp](https://github.com/oatpp/oatpp-mcp)** - C++ MCP integration for Oat++. Use [Oat++](https://oatpp.io) to build MCP servers. -- **[Contentful-mcp](https://github.com/ivo-toby/contentful-mcp)** - Read, update, delete, publish content in your [Contentful](https://contentful.com) space(s) from this MCP Server. -- **[Home Assistant](https://github.com/tevonsb/homeassistant-mcp)** - Interact with [Home Assistant](https://www.home-assistant.io/) including viewing and controlling lights, switches, sensors, and all other Home Assistant entities. -- **[cognee-mcp](https://github.com/topoteretes/cognee-mcp-server)** - GraphRAG memory server with customizable ingestion, data processing and search -- **[Airtable](https://github.com/domdomegg/airtable-mcp-server)** - Read and write access to [Airtable](https://airtable.com/) databases, with schema inspection. -- **[mcp-k8s-go](https://github.com/strowk/mcp-k8s-go)** - Golang-based Kubernetes server for MCP to browse pods and their logs, events, namespaces and more. Built to be extensible. -- **[Notion](https://github.com/v-3/notion-server)** (by v-3) - Notion MCP integration. Search, Read, Update, and Create pages through Claude chat. -- **[Notion](https://github.com/suekou/mcp-notion-server)** (by suekou) - Interact with Notion API. +- **[Rememberizer AI](https://github.com/skydeckai/mcp-server-rememberizer)** - An MCP server designed for interacting with the Rememberizer data source, facilitating enhanced knowledge retrieval. +- **[Salesforce MCP](https://github.com/smn2gnt/MCP-Salesforce)** - Interact with Salesforce Data and Metadata +- **[Scholarly](https://github.com/adityak74/mcp-scholarly)** - A MCP server to search for scholarly and academic articles. +- **[Snowflake](https://github.com/isaacwasserman/mcp-snowflake-server)** - This MCP server enables LLMs to interact with Snowflake databases, allowing for secure and controlled data operations. +- **[Spotify](https://github.com/varunneal/spotify-mcp)** - This MCP allows an LLM to play and use Spotify. - **[TMDB](https://github.com/Laksh-star/mcp-server-tmdb)** - This MCP server integrates with The Movie Database (TMDB) API to provide movie information, search capabilities, and recommendations. -- **[MongoDB](https://github.com/kiliczsh/mcp-mongo-server)** - A Model Context Protocol Server for MongoDB. -- **[Airtable](https://github.com/felores/airtable-mcp)** - Airtable Model Context Protocol Server. -- **[Atlassian](https://github.com/sooperset/mcp-atlassian)** - Interact with Atlassian Cloud products (Confluence and Jira) including searching/reading Confluence spaces/pages, accessing Jira issues, and project metadata. -- **[Google Tasks](https://github.com/zcaceres/gtasks-mcp)** - Google Tasks API Model Context Protocol Server. -- **[Fetch](https://github.com/zcaceres/fetch-mcp)** - A server that flexibly fetches HTML, JSON, Markdown, or plaintext +- **[Tavily search](https://github.com/RamXX/mcp-tavily)** - An MCP server for Tavily's search & news API, with explicit site inclusions/exclusions +- **[Todoist](https://github.com/abhiz123/todoist-mcp-server)** - Interact with Todoist to manage your tasks. +- **[Vega-Lite](https://github.com/isaacwasserman/mcp-vegalite-server)** - Generate visualizations from fetched data using the VegaLite format and renderer. +- **[Windows CLI](https://github.com/SimonB97/win-cli-mcp-server)** - MCP server for secure command-line interactions on Windows systems, enabling controlled access to PowerShell, CMD, and Git Bash shells. +- **[X (Twitter)](https://github.com/EnesCinr/twitter-mcp)** (by EnesCinr) - Interact with twitter API. Post tweets and search for tweets by query. +- **[X (Twitter)](https://github.com/vidhupv/x-mcp)** (by vidhupv) - Create, manage and publish X/Twitter posts directly through Claude chat. +- **[XMind](https://github.com/apeyroux/mcp-xmind)** - Read and search through your XMind directory containing XMind files. + +## πŸ“š Frameworks + +These are high-level frameworks that make it easier to build MCP servers. + +* [EasyMCP](https://github.com/zcaceres/easy-mcp/) (TypeScript) +* [FastMCP](https://github.com/punkpeye/fastmcp) (TypeScript) ## πŸ“š Resources Additional resources on MCP. +- **[AiMCP](https://www.aimcp.info)** - A collection of MCP clients&servers to find the right mcp tools by **[Hekmon](https://github.com/hekmon8)** +- **[Awesome Crypto MCP Servers by badkk](https://github.com/badkk/awesome-crypto-mcp-servers)** - A curated list of MCP servers by **[Luke Fan](https://github.com/badkk)** +- **[Awesome MCP Servers by appcypher](https://github.com/appcypher/awesome-mcp-servers)** - A curated list of MCP servers by **[Stephen Akinyemi](https://github.com/appcypher)** - **[Awesome MCP Servers by punkpeye](https://github.com/punkpeye/awesome-mcp-servers)** (**[website](https://glama.ai/mcp/servers)**) - A curated list of MCP servers by **[Frank Fiegel](https://github.com/punkpeye)** - **[Awesome MCP Servers by wong2](https://github.com/wong2/awesome-mcp-servers)** (**[website](https://mcpservers.org)**) - A curated list of MCP servers by **[wong2](https://github.com/wong2)** -- **[Awesome MCP Servers by appcypher](https://github.com/appcypher/awesome-mcp-servers)** - A curated list of MCP servers by **[Stephen Akinyemi](https://github.com/appcypher)** -- **[Open-Sourced MCP Servers Directory](https://github.com/chatmcp/mcp-directory)** - A curated list of MCP servers by **[mcpso](https://mcp.so)** - **[Discord Server](https://glama.ai/mcp/discord)** – A community discord server dedicated to MCP by **[Frank Fiegel](https://github.com/punkpeye)** -- **[Smithery](https://smithery.ai/)** - A registry of MCP servers to find the right tools for your LLM agents by **[Henry Mao](https://github.com/calclavia)** -- **[mcp-get](https://mcp-get.com)** - Command line tool for installing and managing MCP servers by **[Michael Latman](https://github.com/michaellatman)** -- **[mcp-cli](https://github.com/wong2/mcp-cli)** - A CLI inspector for the Model Context Protocol by **[wong2](https://github.com/wong2)** -- **[r/mcp](https://www.reddit.com/r/mcp)** – A Reddit community dedicated to MCP by **[Frank Fiegel](https://github.com/punkpeye)** +- **[MCP Badges](https://github.com/mcpx-dev/mcp-badges)** – Quickly highlight your MCP project with clear, eye-catching badges, by **[Ironben](https://github.com/nanbingxyz)** - **[MCP X Community](https://x.com/i/communities/1861891349609603310)** – A X community for MCP by **[Xiaoyi](https://x.com/chxy)** +- **[mcp-cli](https://github.com/wong2/mcp-cli)** - A CLI inspector for the Model Context Protocol by **[wong2](https://github.com/wong2)** +- **[mcp-get](https://mcp-get.com)** - Command line tool for installing and managing MCP servers by **[Michael Latman](https://github.com/michaellatman)** - **[mcp-manager](https://github.com/zueai/mcp-manager)** - Simple Web UI to install and manage MCP servers for Claude Desktop by **[Zue](https://github.com/zueai)** - **[MCPHub](https://github.com/Jeamee/MCPHub-Desktop)** – An Open Source MacOS & Windows GUI Desktop app for discovering, installing and managing MCP servers by **[Jeamee](https://github.com/jeamee)** +- **[Open-Sourced MCP Servers Directory](https://github.com/chatmcp/mcp-directory)** - A curated list of MCP servers by **[mcpso](https://mcp.so)** +- **[PulseMCP](https://www.pulsemcp.com)** ([API](https://www.pulsemcp.com/api)) - Community hub & weekly newsletter for discovering MCP servers, clients, articles, and news by **[Tadas Antanavicius](https://github.com/tadasant)**, **[Mike Coughlin](https://github.com/macoughl)**, and **[Ravina Patel](https://github.com/ravinahp)** +- **[r/mcp](https://www.reddit.com/r/mcp)** – A Reddit community dedicated to MCP by **[Frank Fiegel](https://github.com/punkpeye)** +- **[Smithery](https://smithery.ai/)** - A registry of MCP servers to find the right tools for your LLM agents by **[Henry Mao](https://github.com/calclavia)** ## πŸš€ Getting Started diff --git a/package-lock.json b/package-lock.json index 0dcbfbbc..ce8e521f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2497,9 +2497,10 @@ "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" }, "node_modules/express": { - "version": "4.21.1", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.1.tgz", - "integrity": "sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==", + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "license": "MIT", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", @@ -2520,7 +2521,7 @@ "methods": "~1.1.2", "on-finished": "2.4.1", "parseurl": "~1.3.3", - "path-to-regexp": "0.1.10", + "path-to-regexp": "0.1.12", "proxy-addr": "~2.0.7", "qs": "6.13.0", "range-parser": "~1.2.1", @@ -2535,6 +2536,10 @@ }, "engines": { "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/extend": { @@ -3770,9 +3775,10 @@ "license": "ISC" }, "node_modules/path-to-regexp": { - "version": "0.1.10", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.10.tgz", - "integrity": "sha512-7lf7qcQidTku0Gu3YDPc8DJ1q7OOucfa/BSsIwjuh56VU7katFvuM8hULfkwB3Fns/rsVF7PwPKVw1sl5KQS9w==" + "version": "0.1.12", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", + "integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==", + "license": "MIT" }, "node_modules/pend": { "version": "1.2.0", diff --git a/scripts/release.py b/scripts/release.py new file mode 100755 index 00000000..05d76c0a --- /dev/null +++ b/scripts/release.py @@ -0,0 +1,210 @@ +#!/usr/bin/env uv run --script +# /// script +# requires-python = ">=3.12" +# dependencies = [ +# "click>=8.1.8", +# "tomlkit>=0.13.2" +# ] +# /// +import sys +import re +import click +from pathlib import Path +import json +import tomlkit +import datetime +import subprocess +from dataclasses import dataclass +from typing import Any, Iterator, NewType, Protocol + + +Version = NewType("Version", str) +GitHash = NewType("GitHash", str) + + +class GitHashParamType(click.ParamType): + name = "git_hash" + + def convert( + self, value: Any, param: click.Parameter | None, ctx: click.Context | None + ) -> GitHash | None: + if value is None: + return None + + if not (8 <= len(value) <= 40): + self.fail(f"Git hash must be between 8 and 40 characters, got {len(value)}") + + if not re.match(r"^[0-9a-fA-F]+$", value): + self.fail("Git hash must contain only hex digits (0-9, a-f)") + + try: + # Verify hash exists in repo + subprocess.run( + ["git", "rev-parse", "--verify", value], check=True, capture_output=True + ) + except subprocess.CalledProcessError: + self.fail(f"Git hash {value} not found in repository") + + return GitHash(value.lower()) + + +GIT_HASH = GitHashParamType() + + +class Package(Protocol): + path: Path + + def package_name(self) -> str: ... + + def update_version(self, version: Version) -> None: ... + + +@dataclass +class NpmPackage: + path: Path + + def package_name(self) -> str: + with open(self.path / "package.json", "r") as f: + return json.load(f)["name"] + + def update_version(self, version: Version): + with open(self.path / "package.json", "r+") as f: + data = json.load(f) + data["version"] = version + f.seek(0) + json.dump(data, f, indent=2) + f.truncate() + + +@dataclass +class PyPiPackage: + path: Path + + def package_name(self) -> str: + with open(self.path / "pyproject.toml") as f: + toml_data = tomlkit.parse(f.read()) + name = toml_data.get("project", {}).get("name") + if not name: + raise Exception("No name in pyproject.toml project section") + return str(name) + + def update_version(self, version: Version): + # Update version in pyproject.toml + with open(self.path / "pyproject.toml") as f: + data = tomlkit.parse(f.read()) + data["project"]["version"] = version + + with open(self.path / "pyproject.toml", "w") as f: + f.write(tomlkit.dumps(data)) + + +def has_changes(path: Path, git_hash: GitHash) -> bool: + """Check if any files changed between current state and git hash""" + try: + output = subprocess.run( + ["git", "diff", "--name-only", git_hash, "--", "."], + cwd=path, + check=True, + capture_output=True, + text=True, + ) + + changed_files = [Path(f) for f in output.stdout.splitlines()] + relevant_files = [f for f in changed_files if f.suffix in [".py", ".ts"]] + return len(relevant_files) >= 1 + except subprocess.CalledProcessError: + return False + + +def gen_version() -> Version: + """Generate version based on current date""" + now = datetime.datetime.now() + return Version(f"{now.year}.{now.month}.{now.day}") + + +def find_changed_packages(directory: Path, git_hash: GitHash) -> Iterator[Package]: + for path in directory.glob("*/package.json"): + if has_changes(path.parent, git_hash): + yield NpmPackage(path.parent) + for path in directory.glob("*/pyproject.toml"): + if has_changes(path.parent, git_hash): + yield PyPiPackage(path.parent) + + +@click.group() +def cli(): + pass + + +@cli.command("update-packages") +@click.option( + "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() +) +@click.argument("git_hash", type=GIT_HASH) +def update_packages(directory: Path, git_hash: GitHash) -> int: + # Detect package type + path = directory.resolve(strict=True) + version = gen_version() + + for package in find_changed_packages(path, git_hash): + name = package.package_name() + package.update_version(version) + + click.echo(f"{name}@{version}") + + return 0 + + +@cli.command("generate-notes") +@click.option( + "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() +) +@click.argument("git_hash", type=GIT_HASH) +def generate_notes(directory: Path, git_hash: GitHash) -> int: + # Detect package type + path = directory.resolve(strict=True) + version = gen_version() + + click.echo(f"# Release : v{version}") + click.echo("") + click.echo("## Updated packages") + for package in find_changed_packages(path, git_hash): + name = package.package_name() + click.echo(f"- {name}@{version}") + + return 0 + + +@cli.command("generate-version") +def generate_version() -> int: + # Detect package type + click.echo(gen_version()) + return 0 + + +@cli.command("generate-matrix") +@click.option( + "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() +) +@click.option("--npm", is_flag=True, default=False) +@click.option("--pypi", is_flag=True, default=False) +@click.argument("git_hash", type=GIT_HASH) +def generate_matrix(directory: Path, git_hash: GitHash, pypi: bool, npm: bool) -> int: + # Detect package type + path = directory.resolve(strict=True) + version = gen_version() + + changes = [] + for package in find_changed_packages(path, git_hash): + pkg = package.path.relative_to(path) + if npm and isinstance(package, NpmPackage): + changes.append(str(pkg)) + if pypi and isinstance(package, PyPiPackage): + changes.append(str(pkg)) + + click.echo(json.dumps(changes)) + return 0 + + +if __name__ == "__main__": + sys.exit(cli()) diff --git a/src/aws-kb-retrieval-server/Dockerfile b/src/aws-kb-retrieval-server/Dockerfile index a173a606..80cbb388 100644 --- a/src/aws-kb-retrieval-server/Dockerfile +++ b/src/aws-kb-retrieval-server/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder COPY src/aws-kb-retrieval-server /app COPY tsconfig.json /tsconfig.json diff --git a/src/brave-search/Dockerfile b/src/brave-search/Dockerfile index fefeca68..7749c243 100644 --- a/src/brave-search/Dockerfile +++ b/src/brave-search/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder # Must be entire project because `prepare` script is run during `npm install` and requires all files. COPY src/brave-search /app diff --git a/src/everart/Dockerfile b/src/everart/Dockerfile index c27f9542..d853b8ef 100644 --- a/src/everart/Dockerfile +++ b/src/everart/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder COPY src/everart /app COPY tsconfig.json /tsconfig.json diff --git a/src/everything/Dockerfile b/src/everything/Dockerfile index 56267937..6729298b 100644 --- a/src/everything/Dockerfile +++ b/src/everything/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder COPY src/everything /app COPY tsconfig.json /tsconfig.json diff --git a/src/everything/everything.ts b/src/everything/everything.ts index e89dd192..84beb942 100644 --- a/src/everything/everything.ts +++ b/src/everything/everything.ts @@ -411,7 +411,7 @@ export const createServer = () => { maxTokens, ); return { - content: [{ type: "text", text: `LLM sampling result: ${result}` }], + content: [{ type: "text", text: `LLM sampling result: ${result.content.text}` }], }; } diff --git a/src/fetch/pyproject.toml b/src/fetch/pyproject.toml index 1d43cae8..ed76fdcd 100644 --- a/src/fetch/pyproject.toml +++ b/src/fetch/pyproject.toml @@ -17,7 +17,7 @@ classifiers = [ ] dependencies = [ "markdownify>=0.13.1", - "mcp>=1.0.0", + "mcp>=1.1.3", "protego>=0.3.1", "pydantic>=2.0.0", "readabilipy>=0.2.0", diff --git a/src/fetch/src/mcp_server_fetch/server.py b/src/fetch/src/mcp_server_fetch/server.py index 6e831ff6..775d79c8 100644 --- a/src/fetch/src/mcp_server_fetch/server.py +++ b/src/fetch/src/mcp_server_fetch/server.py @@ -7,6 +7,7 @@ from mcp.shared.exceptions import McpError from mcp.server import Server from mcp.server.stdio import stdio_server from mcp.types import ( + ErrorData, GetPromptResult, Prompt, PromptArgument, @@ -79,15 +80,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None: headers={"User-Agent": user_agent}, ) except HTTPError: - raise McpError( - INTERNAL_ERROR, - f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue", - ) + raise McpError(ErrorData( + code=INTERNAL_ERROR, + message=f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue", + )) if response.status_code in (401, 403): - raise McpError( - INTERNAL_ERROR, - f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt", - ) + raise McpError(ErrorData( + code=INTERNAL_ERROR, + message=f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt", + )) elif 400 <= response.status_code < 500: return robot_txt = response.text @@ -96,15 +97,15 @@ async def check_may_autonomously_fetch_url(url: str, user_agent: str) -> None: ) robot_parser = Protego.parse(processed_robot_txt) if not robot_parser.can_fetch(str(url), user_agent): - raise McpError( - INTERNAL_ERROR, - f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, " + raise McpError(ErrorData( + code=INTERNAL_ERROR, + message=f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, " f"{user_agent}\n" f"{url}" f"\n{robot_txt}\n\n" f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n" f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.", - ) + )) async def fetch_url( @@ -124,12 +125,12 @@ async def fetch_url( timeout=30, ) except HTTPError as e: - raise McpError(INTERNAL_ERROR, f"Failed to fetch {url}: {e!r}") + raise McpError(ErrorData(code=INTERNAL_ERROR, message=f"Failed to fetch {url}: {e!r}")) if response.status_code >= 400: - raise McpError( - INTERNAL_ERROR, - f"Failed to fetch {url} - status code {response.status_code}", - ) + raise McpError(ErrorData( + code=INTERNAL_ERROR, + message=f"Failed to fetch {url} - status code {response.status_code}", + )) page_raw = response.text @@ -221,11 +222,11 @@ Although originally you did not have internet access, and were advised to refuse try: args = Fetch(**arguments) except ValueError as e: - raise McpError(INVALID_PARAMS, str(e)) + raise McpError(ErrorData(code=INVALID_PARAMS, message=str(e))) url = str(args.url) if not url: - raise McpError(INVALID_PARAMS, "URL is required") + raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required")) if not ignore_robots_txt: await check_may_autonomously_fetch_url(url, user_agent_autonomous) @@ -233,15 +234,27 @@ Although originally you did not have internet access, and were advised to refuse content, prefix = await fetch_url( url, user_agent_autonomous, force_raw=args.raw ) - if len(content) > args.max_length: - content = content[args.start_index : args.start_index + args.max_length] - content += f"\n\nContent truncated. Call the fetch tool with a start_index of {args.start_index + args.max_length} to get more content." + original_length = len(content) + if args.start_index >= original_length: + content = "No more content available." + else: + truncated_content = content[args.start_index : args.start_index + args.max_length] + if not truncated_content: + content = "No more content available." + else: + content = truncated_content + actual_content_length = len(truncated_content) + remaining_content = original_length - (args.start_index + actual_content_length) + # Only add the prompt to continue fetching if there is still remaining content + if actual_content_length == args.max_length and remaining_content > 0: + next_start = args.start_index + actual_content_length + content += f"\n\nContent truncated. Call the fetch tool with a start_index of {next_start} to get more content." return [TextContent(type="text", text=f"{prefix}Contents of {url}:\n{content}")] @server.get_prompt() async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult: if not arguments or "url" not in arguments: - raise McpError(INVALID_PARAMS, "URL is required") + raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required")) url = arguments["url"] diff --git a/src/fetch/uv.lock b/src/fetch/uv.lock index bb114910..219214ff 100644 --- a/src/fetch/uv.lock +++ b/src/fetch/uv.lock @@ -48,71 +48,63 @@ wheels = [ [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } +sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/8b/825cc84cf13a28bfbcba7c416ec22bf85a9584971be15b21dd8300c65b7f/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6", size = 196363 }, - { url = "https://files.pythonhosted.org/packages/23/81/d7eef6a99e42c77f444fdd7bc894b0ceca6c3a95c51239e74a722039521c/charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b", size = 125639 }, - { url = "https://files.pythonhosted.org/packages/21/67/b4564d81f48042f520c948abac7079356e94b30cb8ffb22e747532cf469d/charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99", size = 120451 }, - { url = "https://files.pythonhosted.org/packages/c2/72/12a7f0943dd71fb5b4e7b55c41327ac0a1663046a868ee4d0d8e9c369b85/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca", size = 140041 }, - { url = "https://files.pythonhosted.org/packages/67/56/fa28c2c3e31217c4c52158537a2cf5d98a6c1e89d31faf476c89391cd16b/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d", size = 150333 }, - { url = "https://files.pythonhosted.org/packages/f9/d2/466a9be1f32d89eb1554cf84073a5ed9262047acee1ab39cbaefc19635d2/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7", size = 142921 }, - { url = "https://files.pythonhosted.org/packages/f8/01/344ec40cf5d85c1da3c1f57566c59e0c9b56bcc5566c08804a95a6cc8257/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3", size = 144785 }, - { url = "https://files.pythonhosted.org/packages/73/8b/2102692cb6d7e9f03b9a33a710e0164cadfce312872e3efc7cfe22ed26b4/charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907", size = 146631 }, - { url = "https://files.pythonhosted.org/packages/d8/96/cc2c1b5d994119ce9f088a9a0c3ebd489d360a2eb058e2c8049f27092847/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b", size = 140867 }, - { url = "https://files.pythonhosted.org/packages/c9/27/cde291783715b8ec30a61c810d0120411844bc4c23b50189b81188b273db/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912", size = 149273 }, - { url = "https://files.pythonhosted.org/packages/3a/a4/8633b0fc1a2d1834d5393dafecce4a1cc56727bfd82b4dc18fc92f0d3cc3/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95", size = 152437 }, - { url = "https://files.pythonhosted.org/packages/64/ea/69af161062166b5975ccbb0961fd2384853190c70786f288684490913bf5/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e", size = 150087 }, - { url = "https://files.pythonhosted.org/packages/3b/fd/e60a9d9fd967f4ad5a92810138192f825d77b4fa2a557990fd575a47695b/charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe", size = 145142 }, - { url = "https://files.pythonhosted.org/packages/6d/02/8cb0988a1e49ac9ce2eed1e07b77ff118f2923e9ebd0ede41ba85f2dcb04/charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc", size = 94701 }, - { url = "https://files.pythonhosted.org/packages/d6/20/f1d4670a8a723c46be695dff449d86d6092916f9e99c53051954ee33a1bc/charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749", size = 102191 }, - { url = "https://files.pythonhosted.org/packages/9c/61/73589dcc7a719582bf56aae309b6103d2762b526bffe189d635a7fcfd998/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c", size = 193339 }, - { url = "https://files.pythonhosted.org/packages/77/d5/8c982d58144de49f59571f940e329ad6e8615e1e82ef84584c5eeb5e1d72/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944", size = 124366 }, - { url = "https://files.pythonhosted.org/packages/bf/19/411a64f01ee971bed3231111b69eb56f9331a769072de479eae7de52296d/charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee", size = 118874 }, - { url = "https://files.pythonhosted.org/packages/4c/92/97509850f0d00e9f14a46bc751daabd0ad7765cff29cdfb66c68b6dad57f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c", size = 138243 }, - { url = "https://files.pythonhosted.org/packages/e2/29/d227805bff72ed6d6cb1ce08eec707f7cfbd9868044893617eb331f16295/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6", size = 148676 }, - { url = "https://files.pythonhosted.org/packages/13/bc/87c2c9f2c144bedfa62f894c3007cd4530ba4b5351acb10dc786428a50f0/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea", size = 141289 }, - { url = "https://files.pythonhosted.org/packages/eb/5b/6f10bad0f6461fa272bfbbdf5d0023b5fb9bc6217c92bf068fa5a99820f5/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc", size = 142585 }, - { url = "https://files.pythonhosted.org/packages/3b/a0/a68980ab8a1f45a36d9745d35049c1af57d27255eff8c907e3add84cf68f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5", size = 144408 }, - { url = "https://files.pythonhosted.org/packages/d7/a1/493919799446464ed0299c8eef3c3fad0daf1c3cd48bff9263c731b0d9e2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594", size = 139076 }, - { url = "https://files.pythonhosted.org/packages/fb/9d/9c13753a5a6e0db4a0a6edb1cef7aee39859177b64e1a1e748a6e3ba62c2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c", size = 146874 }, - { url = "https://files.pythonhosted.org/packages/75/d2/0ab54463d3410709c09266dfb416d032a08f97fd7d60e94b8c6ef54ae14b/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365", size = 150871 }, - { url = "https://files.pythonhosted.org/packages/8d/c9/27e41d481557be53d51e60750b85aa40eaf52b841946b3cdeff363105737/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129", size = 148546 }, - { url = "https://files.pythonhosted.org/packages/ee/44/4f62042ca8cdc0cabf87c0fc00ae27cd8b53ab68be3605ba6d071f742ad3/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236", size = 143048 }, - { url = "https://files.pythonhosted.org/packages/01/f8/38842422988b795220eb8038745d27a675ce066e2ada79516c118f291f07/charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99", size = 94389 }, - { url = "https://files.pythonhosted.org/packages/0b/6e/b13bd47fa9023b3699e94abf565b5a2f0b0be6e9ddac9812182596ee62e4/charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27", size = 101752 }, - { url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445 }, - { url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275 }, - { url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020 }, - { url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128 }, - { url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277 }, - { url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174 }, - { url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838 }, - { url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149 }, - { url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043 }, - { url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229 }, - { url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556 }, - { url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772 }, - { url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800 }, - { url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836 }, - { url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187 }, - { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, - { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, - { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, - { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, - { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, - { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, - { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, - { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, - { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, - { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, - { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, - { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, - { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, - { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, - { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, - { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, + { url = "https://files.pythonhosted.org/packages/0d/58/5580c1716040bc89206c77d8f74418caf82ce519aae06450393ca73475d1/charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de", size = 198013 }, + { url = "https://files.pythonhosted.org/packages/d0/11/00341177ae71c6f5159a08168bcb98c6e6d196d372c94511f9f6c9afe0c6/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176", size = 141285 }, + { url = "https://files.pythonhosted.org/packages/01/09/11d684ea5819e5a8f5100fb0b38cf8d02b514746607934134d31233e02c8/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037", size = 151449 }, + { url = "https://files.pythonhosted.org/packages/08/06/9f5a12939db324d905dc1f70591ae7d7898d030d7662f0d426e2286f68c9/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f", size = 143892 }, + { url = "https://files.pythonhosted.org/packages/93/62/5e89cdfe04584cb7f4d36003ffa2936681b03ecc0754f8e969c2becb7e24/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a", size = 146123 }, + { url = "https://files.pythonhosted.org/packages/a9/ac/ab729a15c516da2ab70a05f8722ecfccc3f04ed7a18e45c75bbbaa347d61/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a", size = 147943 }, + { url = "https://files.pythonhosted.org/packages/03/d2/3f392f23f042615689456e9a274640c1d2e5dd1d52de36ab8f7955f8f050/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247", size = 142063 }, + { url = "https://files.pythonhosted.org/packages/f2/e3/e20aae5e1039a2cd9b08d9205f52142329f887f8cf70da3650326670bddf/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408", size = 150578 }, + { url = "https://files.pythonhosted.org/packages/8d/af/779ad72a4da0aed925e1139d458adc486e61076d7ecdcc09e610ea8678db/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb", size = 153629 }, + { url = "https://files.pythonhosted.org/packages/c2/b6/7aa450b278e7aa92cf7732140bfd8be21f5f29d5bf334ae987c945276639/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d", size = 150778 }, + { url = "https://files.pythonhosted.org/packages/39/f4/d9f4f712d0951dcbfd42920d3db81b00dd23b6ab520419626f4023334056/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807", size = 146453 }, + { url = "https://files.pythonhosted.org/packages/49/2b/999d0314e4ee0cff3cb83e6bc9aeddd397eeed693edb4facb901eb8fbb69/charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f", size = 95479 }, + { url = "https://files.pythonhosted.org/packages/2d/ce/3cbed41cff67e455a386fb5e5dd8906cdda2ed92fbc6297921f2e4419309/charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f", size = 102790 }, + { url = "https://files.pythonhosted.org/packages/72/80/41ef5d5a7935d2d3a773e3eaebf0a9350542f2cab4eac59a7a4741fbbbbe/charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", size = 194995 }, + { url = "https://files.pythonhosted.org/packages/7a/28/0b9fefa7b8b080ec492110af6d88aa3dea91c464b17d53474b6e9ba5d2c5/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", size = 139471 }, + { url = "https://files.pythonhosted.org/packages/71/64/d24ab1a997efb06402e3fc07317e94da358e2585165930d9d59ad45fcae2/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", size = 149831 }, + { url = "https://files.pythonhosted.org/packages/37/ed/be39e5258e198655240db5e19e0b11379163ad7070962d6b0c87ed2c4d39/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", size = 142335 }, + { url = "https://files.pythonhosted.org/packages/88/83/489e9504711fa05d8dde1574996408026bdbdbd938f23be67deebb5eca92/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", size = 143862 }, + { url = "https://files.pythonhosted.org/packages/c6/c7/32da20821cf387b759ad24627a9aca289d2822de929b8a41b6241767b461/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", size = 145673 }, + { url = "https://files.pythonhosted.org/packages/68/85/f4288e96039abdd5aeb5c546fa20a37b50da71b5cf01e75e87f16cd43304/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", size = 140211 }, + { url = "https://files.pythonhosted.org/packages/28/a3/a42e70d03cbdabc18997baf4f0227c73591a08041c149e710045c281f97b/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", size = 148039 }, + { url = "https://files.pythonhosted.org/packages/85/e4/65699e8ab3014ecbe6f5c71d1a55d810fb716bbfd74f6283d5c2aa87febf/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", size = 151939 }, + { url = "https://files.pythonhosted.org/packages/b1/82/8e9fe624cc5374193de6860aba3ea8070f584c8565ee77c168ec13274bd2/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", size = 149075 }, + { url = "https://files.pythonhosted.org/packages/3d/7b/82865ba54c765560c8433f65e8acb9217cb839a9e32b42af4aa8e945870f/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", size = 144340 }, + { url = "https://files.pythonhosted.org/packages/b5/b6/9674a4b7d4d99a0d2df9b215da766ee682718f88055751e1e5e753c82db0/charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b", size = 95205 }, + { url = "https://files.pythonhosted.org/packages/1e/ab/45b180e175de4402dcf7547e4fb617283bae54ce35c27930a6f35b6bef15/charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76", size = 102441 }, + { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, + { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, + { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, + { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, + { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, + { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, + { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, + { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, + { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, + { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, + { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, + { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, + { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, + { url = "https://files.pythonhosted.org/packages/38/94/ce8e6f63d18049672c76d07d119304e1e2d7c6098f0841b51c666e9f44a0/charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", size = 195698 }, + { url = "https://files.pythonhosted.org/packages/24/2e/dfdd9770664aae179a96561cc6952ff08f9a8cd09a908f259a9dfa063568/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", size = 140162 }, + { url = "https://files.pythonhosted.org/packages/24/4e/f646b9093cff8fc86f2d60af2de4dc17c759de9d554f130b140ea4738ca6/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", size = 150263 }, + { url = "https://files.pythonhosted.org/packages/5e/67/2937f8d548c3ef6e2f9aab0f6e21001056f692d43282b165e7c56023e6dd/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", size = 142966 }, + { url = "https://files.pythonhosted.org/packages/52/ed/b7f4f07de100bdb95c1756d3a4d17b90c1a3c53715c1a476f8738058e0fa/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", size = 144992 }, + { url = "https://files.pythonhosted.org/packages/96/2c/d49710a6dbcd3776265f4c923bb73ebe83933dfbaa841c5da850fe0fd20b/charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", size = 147162 }, + { url = "https://files.pythonhosted.org/packages/b4/41/35ff1f9a6bd380303dea55e44c4933b4cc3c4850988927d4082ada230273/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", size = 140972 }, + { url = "https://files.pythonhosted.org/packages/fb/43/c6a0b685fe6910d08ba971f62cd9c3e862a85770395ba5d9cad4fede33ab/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", size = 149095 }, + { url = "https://files.pythonhosted.org/packages/4c/ff/a9a504662452e2d2878512115638966e75633519ec11f25fca3d2049a94a/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", size = 152668 }, + { url = "https://files.pythonhosted.org/packages/6c/71/189996b6d9a4b932564701628af5cee6716733e9165af1d5e1b285c530ed/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", size = 150073 }, + { url = "https://files.pythonhosted.org/packages/e4/93/946a86ce20790e11312c87c75ba68d5f6ad2208cfb52b2d6a2c32840d922/charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", size = 145732 }, + { url = "https://files.pythonhosted.org/packages/cd/e5/131d2fb1b0dddafc37be4f3a2fa79aa4c037368be9423061dccadfd90091/charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", size = 95391 }, + { url = "https://files.pythonhosted.org/packages/27/f2/4f9a69cc7712b9b5ad8fdb87039fd89abba997ad5cbe690d1835d40405b0/charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", size = 102702 }, + { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, ] [[package]] @@ -169,30 +161,31 @@ wheels = [ [[package]] name = "httpcore" -version = "1.0.7" +version = "1.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 } +sdist = { url = "https://files.pythonhosted.org/packages/17/b0/5e8b8674f8d203335a62fdfcfa0d11ebe09e23613c3391033cbba35f7926/httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61", size = 83234 } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, + { url = "https://files.pythonhosted.org/packages/78/d4/e5d7e4f2174f8a4d63c8897d79eb8fe2503f7ecc03282fee1fa2719c2704/httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5", size = 77926 }, ] [[package]] name = "httpx" -version = "0.28.0" +version = "0.27.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "certifi" }, { name = "httpcore" }, { name = "idna" }, + { name = "sniffio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/df/676b7cf674dd1bdc71a64ad393c89879f75e4a0ab8395165b498262ae106/httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0", size = 141307 } +sdist = { url = "https://files.pythonhosted.org/packages/78/82/08f8c936781f67d9e6b9eeb8a0c8b4e406136ea4c3d1f89a5db71d42e0e6/httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2", size = 144189 } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/fb/a19866137577ba60c6d8b69498dc36be479b13ba454f691348ddf428f185/httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc", size = 73551 }, + { url = "https://files.pythonhosted.org/packages/56/95/9377bcb415797e44274b51d46e3249eba641711cf3348050f76ee7b15ffc/httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0", size = 76395 }, ] [[package]] @@ -310,19 +303,21 @@ wheels = [ [[package]] name = "mcp" -version = "1.0.0" +version = "1.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "httpx" }, { name = "httpx-sse" }, { name = "pydantic" }, + { name = "pydantic-settings" }, { name = "sse-starlette" }, { name = "starlette" }, + { name = "uvicorn" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/de/a9ec0a1b6439f90ea59f89004bb2e7ec6890dfaeef809751d9e6577dca7e/mcp-1.0.0.tar.gz", hash = "sha256:dba51ce0b5c6a80e25576f606760c49a91ee90210fed805b530ca165d3bbc9b7", size = 82891 } +sdist = { url = "https://files.pythonhosted.org/packages/ab/a5/b08dc846ebedae9f17ced878e6975826e90e448cd4592f532f6a88a925a7/mcp-1.2.0.tar.gz", hash = "sha256:2b06c7ece98d6ea9e6379caa38d74b432385c338fb530cb82e2c70ea7add94f5", size = 102973 } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/89/900c0c8445ec001d3725e475fc553b0feb2e8a51be018f3bb7de51e683db/mcp-1.0.0-py3-none-any.whl", hash = "sha256:bbe70ffa3341cd4da78b5eb504958355c68381fb29971471cea1e642a2af5b8a", size = 36361 }, + { url = "https://files.pythonhosted.org/packages/af/84/fca78f19ac8ce6c53ba416247c71baa53a9e791e98d3c81edbc20a77d6d1/mcp-1.2.0-py3-none-any.whl", hash = "sha256:1d0e77d8c14955a5aea1f5aa1f444c8e531c09355c829b20e42f7a142bc0755f", size = 66468 }, ] [[package]] @@ -347,7 +342,7 @@ dev = [ [package.metadata] requires-dist = [ { name = "markdownify", specifier = ">=0.13.1" }, - { name = "mcp", specifier = ">=1.0.0" }, + { name = "mcp", specifier = ">=1.1.3" }, { name = "protego", specifier = ">=0.3.1" }, { name = "pydantic", specifier = ">=2.0.0" }, { name = "readabilipy", specifier = ">=0.2.0" }, @@ -380,16 +375,16 @@ wheels = [ [[package]] name = "pydantic" -version = "2.10.2" +version = "2.10.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, { name = "pydantic-core" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/41/86/a03390cb12cf64e2a8df07c267f3eb8d5035e0f9a04bb20fb79403d2a00e/pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa", size = 785401 } +sdist = { url = "https://files.pythonhosted.org/packages/c4/bd/7fc610993f616d2398958d0028d15eaf53bde5f80cb2edb7aa4f1feaf3a7/pydantic-2.10.1.tar.gz", hash = "sha256:a4daca2dc0aa429555e0656d6bf94873a7dc5f54ee42b1f5873d666fb3f35560", size = 783717 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/74/da832196702d0c56eb86b75bfa346db9238617e29b0b7ee3b8b4eccfe654/pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e", size = 456364 }, + { url = "https://files.pythonhosted.org/packages/e0/fc/fda48d347bd50a788dd2a0f318a52160f911b86fc2d8b4c86f4d7c9bceea/pydantic-2.10.1-py3-none-any.whl", hash = "sha256:a8d20db84de64cf4a7d59e899c2caf0fe9d660c7cfc482528e7020d7dd189a7e", size = 455329 }, ] [[package]] @@ -467,6 +462,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/33/72/f881b5e18fbb67cf2fb4ab253660de3c6899dbb2dba409d0b757e3559e3d/pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c", size = 2001864 }, ] +[[package]] +name = "pydantic-settings" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b5/d4/9dfbe238f45ad8b168f5c96ee49a3df0598ce18a0795a983b419949ce65b/pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0", size = 75646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/f9/ff95fd7d760af42f647ea87f9b8a383d891cdb5e5dbd4613edaeb094252a/pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87", size = 28595 }, +] + [[package]] name = "pyright" version = "1.1.389" @@ -480,6 +488,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1b/26/c288cabf8cfc5a27e1aa9e5029b7682c0f920b8074f45d22bf844314d66a/pyright-1.1.389-py3-none-any.whl", hash = "sha256:41e9620bba9254406dc1f621a88ceab5a88af4c826feb4f614d95691ed243a60", size = 18581 }, ] +[[package]] +name = "python-dotenv" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 }, +] + [[package]] name = "readabilipy" version = "0.2.0" @@ -647,14 +664,14 @@ wheels = [ [[package]] name = "starlette" -version = "0.41.3" +version = "0.41.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1a/4c/9b5764bd22eec91c4039ef4c55334e9187085da2d8a2df7bd570869aae18/starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835", size = 2574159 } +sdist = { url = "https://files.pythonhosted.org/packages/3e/da/1fb4bdb72ae12b834becd7e1e7e47001d32f91ec0ce8d7bc1b618d9f0bd9/starlette-0.41.2.tar.gz", hash = "sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62", size = 2573867 } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/00/2b325970b3060c7cecebab6d295afe763365822b1306a12eeab198f74323/starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7", size = 73225 }, + { url = "https://files.pythonhosted.org/packages/54/43/f185bfd0ca1d213beb4293bed51d92254df23d8ceaf6c0e17146d508a776/starlette-0.41.2-py3-none-any.whl", hash = "sha256:fbc189474b4731cf30fcef52f18a8d070e3f3b46c6a04c97579e85e6ffca942d", size = 73259 }, ] [[package]] @@ -677,16 +694,16 @@ wheels = [ [[package]] name = "uvicorn" -version = "0.32.1" +version = "0.32.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6a/3c/21dba3e7d76138725ef307e3d7ddd29b763119b3aa459d02cc05fefcff75/uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175", size = 77630 } +sdist = { url = "https://files.pythonhosted.org/packages/e0/fc/1d785078eefd6945f3e5bab5c076e4230698046231eb0f3747bc5c8fa992/uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e", size = 77564 } wheels = [ - { url = "https://files.pythonhosted.org/packages/50/c1/2d27b0a15826c2b71dcf6e2f5402181ef85acf439617bb2f1453125ce1f3/uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e", size = 63828 }, + { url = "https://files.pythonhosted.org/packages/eb/14/78bd0e95dd2444b6caacbca2b730671d4295ccb628ef58b81bee903629df/uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82", size = 63723 }, ] [[package]] diff --git a/src/filesystem/Dockerfile b/src/filesystem/Dockerfile index 12e5bcf4..418b1400 100644 --- a/src/filesystem/Dockerfile +++ b/src/filesystem/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder WORKDIR /app diff --git a/src/gdrive/Dockerfile b/src/gdrive/Dockerfile index 8a3458f4..923ffa7e 100644 --- a/src/gdrive/Dockerfile +++ b/src/gdrive/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder COPY src/gdrive /app COPY tsconfig.json /tsconfig.json diff --git a/src/git/README.md b/src/git/README.md index cb22629e..f0855695 100644 --- a/src/git/README.md +++ b/src/git/README.md @@ -213,7 +213,7 @@ If you are doing local development, there are two ways to test your changes: ```json { "mcpServers": { - "brave-search": { + "git": { "command": "docker", "args": [ "run", diff --git a/src/github/Dockerfile b/src/github/Dockerfile index 4a054d65..a3ad36b8 100644 --- a/src/github/Dockerfile +++ b/src/github/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder # Must be entire project because `prepare` script is run during `npm install` and requires all files. COPY src/github /app diff --git a/src/github/README.md b/src/github/README.md index 73cdc584..026dde9b 100644 --- a/src/github/README.md +++ b/src/github/README.md @@ -188,6 +188,95 @@ MCP Server for the GitHub API, enabling file operations, repository management, - `issue_number` (number): Issue number to retrieve - Returns: Github Issue object & details +18. `get_pull_request` + - Get details of a specific pull request + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `pull_number` (number): Pull request number + - Returns: Pull request details including diff and review status + +19. `list_pull_requests` + - List and filter repository pull requests + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `state` (optional string): Filter by state ('open', 'closed', 'all') + - `head` (optional string): Filter by head user/org and branch + - `base` (optional string): Filter by base branch + - `sort` (optional string): Sort by ('created', 'updated', 'popularity', 'long-running') + - `direction` (optional string): Sort direction ('asc', 'desc') + - `per_page` (optional number): Results per page (max 100) + - `page` (optional number): Page number + - Returns: Array of pull request details + +20. `create_pull_request_review` + - Create a review on a pull request + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `pull_number` (number): Pull request number + - `body` (string): Review comment text + - `event` (string): Review action ('APPROVE', 'REQUEST_CHANGES', 'COMMENT') + - `commit_id` (optional string): SHA of commit to review + - `comments` (optional array): Line-specific comments, each with: + - `path` (string): File path + - `position` (number): Line position in diff + - `body` (string): Comment text + - Returns: Created review details + +21. `merge_pull_request` + - Merge a pull request + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `pull_number` (number): Pull request number + - `commit_title` (optional string): Title for merge commit + - `commit_message` (optional string): Extra detail for merge commit + - `merge_method` (optional string): Merge method ('merge', 'squash', 'rebase') + - Returns: Merge result details + +22. `get_pull_request_files` + - Get the list of files changed in a pull request + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `pull_number` (number): Pull request number + - Returns: Array of changed files with patch and status details + +23. `get_pull_request_status` + - Get the combined status of all status checks for a pull request + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `pull_number` (number): Pull request number + - Returns: Combined status check results and individual check details + +24. `update_pull_request_branch` + - Update a pull request branch with the latest changes from the base branch (equivalent to GitHub's "Update branch" button) + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `pull_number` (number): Pull request number + - `expected_head_sha` (optional string): The expected SHA of the pull request's HEAD ref + - Returns: Success message when branch is updated + +25. `get_pull_request_comments` + - Get the review comments on a pull request + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `pull_number` (number): Pull request number + - Returns: Array of pull request review comments with details like the comment text, author, and location in the diff + +26. `get_pull_request_reviews` + - Get the reviews on a pull request + - Inputs: + - `owner` (string): Repository owner + - `repo` (string): Repository name + - `pull_number` (number): Pull request number + - Returns: Array of pull request reviews with details like the review state (APPROVED, CHANGES_REQUESTED, etc.), reviewer, and review body + ## Search Query Syntax ### Code Search @@ -259,7 +348,7 @@ To use this with Claude Desktop, add the following to your `claude_desktop_confi "@modelcontextprotocol/server-github" ], "env": { - "GITHUB_PERSONAL_ACCESS_TOKEN": "" + "GITHUB_PERSONAL_ACCESS_TOKEN": "" } } } diff --git a/src/github/common/errors.ts b/src/github/common/errors.ts new file mode 100644 index 00000000..5b940f3b --- /dev/null +++ b/src/github/common/errors.ts @@ -0,0 +1,89 @@ +export class GitHubError extends Error { + constructor( + message: string, + public readonly status: number, + public readonly response: unknown + ) { + super(message); + this.name = "GitHubError"; + } +} + +export class GitHubValidationError extends GitHubError { + constructor(message: string, status: number, response: unknown) { + super(message, status, response); + this.name = "GitHubValidationError"; + } +} + +export class GitHubResourceNotFoundError extends GitHubError { + constructor(resource: string) { + super(`Resource not found: ${resource}`, 404, { message: `${resource} not found` }); + this.name = "GitHubResourceNotFoundError"; + } +} + +export class GitHubAuthenticationError extends GitHubError { + constructor(message = "Authentication failed") { + super(message, 401, { message }); + this.name = "GitHubAuthenticationError"; + } +} + +export class GitHubPermissionError extends GitHubError { + constructor(message = "Insufficient permissions") { + super(message, 403, { message }); + this.name = "GitHubPermissionError"; + } +} + +export class GitHubRateLimitError extends GitHubError { + constructor( + message = "Rate limit exceeded", + public readonly resetAt: Date + ) { + super(message, 429, { message, reset_at: resetAt.toISOString() }); + this.name = "GitHubRateLimitError"; + } +} + +export class GitHubConflictError extends GitHubError { + constructor(message: string) { + super(message, 409, { message }); + this.name = "GitHubConflictError"; + } +} + +export function isGitHubError(error: unknown): error is GitHubError { + return error instanceof GitHubError; +} + +export function createGitHubError(status: number, response: any): GitHubError { + switch (status) { + case 401: + return new GitHubAuthenticationError(response?.message); + case 403: + return new GitHubPermissionError(response?.message); + case 404: + return new GitHubResourceNotFoundError(response?.message || "Resource"); + case 409: + return new GitHubConflictError(response?.message || "Conflict occurred"); + case 422: + return new GitHubValidationError( + response?.message || "Validation failed", + status, + response + ); + case 429: + return new GitHubRateLimitError( + response?.message, + new Date(response?.reset_at || Date.now() + 60000) + ); + default: + return new GitHubError( + response?.message || "GitHub API error", + status, + response + ); + } +} \ No newline at end of file diff --git a/src/github/common/types.ts b/src/github/common/types.ts new file mode 100644 index 00000000..cca961ba --- /dev/null +++ b/src/github/common/types.ts @@ -0,0 +1,259 @@ +import { z } from "zod"; + +// Base schemas for common types +export const GitHubAuthorSchema = z.object({ + name: z.string(), + email: z.string(), + date: z.string(), +}); + +export const GitHubOwnerSchema = z.object({ + login: z.string(), + id: z.number(), + node_id: z.string(), + avatar_url: z.string(), + url: z.string(), + html_url: z.string(), + type: z.string(), +}); + +export const GitHubRepositorySchema = z.object({ + id: z.number(), + node_id: z.string(), + name: z.string(), + full_name: z.string(), + private: z.boolean(), + owner: GitHubOwnerSchema, + html_url: z.string(), + description: z.string().nullable(), + fork: z.boolean(), + url: z.string(), + created_at: z.string(), + updated_at: z.string(), + pushed_at: z.string(), + git_url: z.string(), + ssh_url: z.string(), + clone_url: z.string(), + default_branch: z.string(), +}); + +export const GithubFileContentLinks = z.object({ + self: z.string(), + git: z.string().nullable(), + html: z.string().nullable() +}); + +export const GitHubFileContentSchema = z.object({ + name: z.string(), + path: z.string(), + sha: z.string(), + size: z.number(), + url: z.string(), + html_url: z.string(), + git_url: z.string(), + download_url: z.string(), + type: z.string(), + content: z.string().optional(), + encoding: z.string().optional(), + _links: GithubFileContentLinks +}); + +export const GitHubDirectoryContentSchema = z.object({ + type: z.string(), + size: z.number(), + name: z.string(), + path: z.string(), + sha: z.string(), + url: z.string(), + git_url: z.string(), + html_url: z.string(), + download_url: z.string().nullable(), +}); + +export const GitHubContentSchema = z.union([ + GitHubFileContentSchema, + z.array(GitHubDirectoryContentSchema), +]); + +export const GitHubTreeEntrySchema = z.object({ + path: z.string(), + mode: z.enum(["100644", "100755", "040000", "160000", "120000"]), + type: z.enum(["blob", "tree", "commit"]), + size: z.number().optional(), + sha: z.string(), + url: z.string(), +}); + +export const GitHubTreeSchema = z.object({ + sha: z.string(), + url: z.string(), + tree: z.array(GitHubTreeEntrySchema), + truncated: z.boolean(), +}); + +export const GitHubCommitSchema = z.object({ + sha: z.string(), + node_id: z.string(), + url: z.string(), + author: GitHubAuthorSchema, + committer: GitHubAuthorSchema, + message: z.string(), + tree: z.object({ + sha: z.string(), + url: z.string(), + }), + parents: z.array( + z.object({ + sha: z.string(), + url: z.string(), + }) + ), +}); + +export const GitHubListCommitsSchema = z.array(z.object({ + sha: z.string(), + node_id: z.string(), + commit: z.object({ + author: GitHubAuthorSchema, + committer: GitHubAuthorSchema, + message: z.string(), + tree: z.object({ + sha: z.string(), + url: z.string() + }), + url: z.string(), + comment_count: z.number(), + }), + url: z.string(), + html_url: z.string(), + comments_url: z.string() +})); + +export const GitHubReferenceSchema = z.object({ + ref: z.string(), + node_id: z.string(), + url: z.string(), + object: z.object({ + sha: z.string(), + type: z.string(), + url: z.string(), + }), +}); + +// User and assignee schemas +export const GitHubIssueAssigneeSchema = z.object({ + login: z.string(), + id: z.number(), + avatar_url: z.string(), + url: z.string(), + html_url: z.string(), +}); + +// Issue-related schemas +export const GitHubLabelSchema = z.object({ + id: z.number(), + node_id: z.string(), + url: z.string(), + name: z.string(), + color: z.string(), + default: z.boolean(), + description: z.string().optional(), +}); + +export const GitHubMilestoneSchema = z.object({ + url: z.string(), + html_url: z.string(), + labels_url: z.string(), + id: z.number(), + node_id: z.string(), + number: z.number(), + title: z.string(), + description: z.string(), + state: z.string(), +}); + +export const GitHubIssueSchema = z.object({ + url: z.string(), + repository_url: z.string(), + labels_url: z.string(), + comments_url: z.string(), + events_url: z.string(), + html_url: z.string(), + id: z.number(), + node_id: z.string(), + number: z.number(), + title: z.string(), + user: GitHubIssueAssigneeSchema, + labels: z.array(GitHubLabelSchema), + state: z.string(), + locked: z.boolean(), + assignee: GitHubIssueAssigneeSchema.nullable(), + assignees: z.array(GitHubIssueAssigneeSchema), + milestone: GitHubMilestoneSchema.nullable(), + comments: z.number(), + created_at: z.string(), + updated_at: z.string(), + closed_at: z.string().nullable(), + body: z.string().nullable(), +}); + +// Search-related schemas +export const GitHubSearchResponseSchema = z.object({ + total_count: z.number(), + incomplete_results: z.boolean(), + items: z.array(GitHubRepositorySchema), +}); + +// Pull request schemas +export const GitHubPullRequestRefSchema = z.object({ + label: z.string(), + ref: z.string(), + sha: z.string(), + user: GitHubIssueAssigneeSchema, + repo: GitHubRepositorySchema, +}); + +export const GitHubPullRequestSchema = z.object({ + url: z.string(), + id: z.number(), + node_id: z.string(), + html_url: z.string(), + diff_url: z.string(), + patch_url: z.string(), + issue_url: z.string(), + number: z.number(), + state: z.string(), + locked: z.boolean(), + title: z.string(), + user: GitHubIssueAssigneeSchema, + body: z.string().nullable(), + created_at: z.string(), + updated_at: z.string(), + closed_at: z.string().nullable(), + merged_at: z.string().nullable(), + merge_commit_sha: z.string().nullable(), + assignee: GitHubIssueAssigneeSchema.nullable(), + assignees: z.array(GitHubIssueAssigneeSchema), + requested_reviewers: z.array(GitHubIssueAssigneeSchema), + labels: z.array(GitHubLabelSchema), + head: GitHubPullRequestRefSchema, + base: GitHubPullRequestRefSchema, +}); + +// Export types +export type GitHubAuthor = z.infer; +export type GitHubRepository = z.infer; +export type GitHubFileContent = z.infer; +export type GitHubDirectoryContent = z.infer; +export type GitHubContent = z.infer; +export type GitHubTree = z.infer; +export type GitHubCommit = z.infer; +export type GitHubListCommits = z.infer; +export type GitHubReference = z.infer; +export type GitHubIssueAssignee = z.infer; +export type GitHubLabel = z.infer; +export type GitHubMilestone = z.infer; +export type GitHubIssue = z.infer; +export type GitHubSearchResponse = z.infer; +export type GitHubPullRequest = z.infer; +export type GitHubPullRequestRef = z.infer; \ No newline at end of file diff --git a/src/github/common/utils.ts b/src/github/common/utils.ts new file mode 100644 index 00000000..21c8aa71 --- /dev/null +++ b/src/github/common/utils.ts @@ -0,0 +1,133 @@ +import { createGitHubError } from "./errors.js"; + +type RequestOptions = { + method?: string; + body?: unknown; + headers?: Record; +}; + +async function parseResponseBody(response: Response): Promise { + const contentType = response.headers.get("content-type"); + if (contentType?.includes("application/json")) { + return response.json(); + } + return response.text(); +} + +export function buildUrl(baseUrl: string, params: Record): string { + const url = new URL(baseUrl); + Object.entries(params).forEach(([key, value]) => { + if (value !== undefined) { + url.searchParams.append(key, value.toString()); + } + }); + return url.toString(); +} + +export async function githubRequest( + url: string, + options: RequestOptions = {} +): Promise { + const headers: Record = { + "Accept": "application/vnd.github.v3+json", + "Content-Type": "application/json", + ...options.headers, + }; + + if (process.env.GITHUB_PERSONAL_ACCESS_TOKEN) { + headers["Authorization"] = `Bearer ${process.env.GITHUB_PERSONAL_ACCESS_TOKEN}`; + } + + const response = await fetch(url, { + method: options.method || "GET", + headers, + body: options.body ? JSON.stringify(options.body) : undefined, + }); + + const responseBody = await parseResponseBody(response); + + if (!response.ok) { + throw createGitHubError(response.status, responseBody); + } + + return responseBody; +} + +export function validateBranchName(branch: string): string { + const sanitized = branch.trim(); + if (!sanitized) { + throw new Error("Branch name cannot be empty"); + } + if (sanitized.includes("..")) { + throw new Error("Branch name cannot contain '..'"); + } + if (/[\s~^:?*[\\\]]/.test(sanitized)) { + throw new Error("Branch name contains invalid characters"); + } + if (sanitized.startsWith("/") || sanitized.endsWith("/")) { + throw new Error("Branch name cannot start or end with '/'"); + } + if (sanitized.endsWith(".lock")) { + throw new Error("Branch name cannot end with '.lock'"); + } + return sanitized; +} + +export function validateRepositoryName(name: string): string { + const sanitized = name.trim().toLowerCase(); + if (!sanitized) { + throw new Error("Repository name cannot be empty"); + } + if (!/^[a-z0-9_.-]+$/.test(sanitized)) { + throw new Error( + "Repository name can only contain lowercase letters, numbers, hyphens, periods, and underscores" + ); + } + if (sanitized.startsWith(".") || sanitized.endsWith(".")) { + throw new Error("Repository name cannot start or end with a period"); + } + return sanitized; +} + +export function validateOwnerName(owner: string): string { + const sanitized = owner.trim().toLowerCase(); + if (!sanitized) { + throw new Error("Owner name cannot be empty"); + } + if (!/^[a-z0-9](?:[a-z0-9]|-(?=[a-z0-9])){0,38}$/.test(sanitized)) { + throw new Error( + "Owner name must start with a letter or number and can contain up to 39 characters" + ); + } + return sanitized; +} + +export async function checkBranchExists( + owner: string, + repo: string, + branch: string +): Promise { + try { + await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/branches/${branch}` + ); + return true; + } catch (error) { + if (error && typeof error === "object" && "status" in error && error.status === 404) { + return false; + } + throw error; + } +} + +export async function checkUserExists(username: string): Promise { + try { + await githubRequest(`https://api.github.com/users/${username}`); + return true; + } catch (error) { + if (error && typeof error === "object" && "status" in error && error.status === 404) { + return false; + } + throw error; + } +} \ No newline at end of file diff --git a/src/github/index.ts b/src/github/index.ts index 3759e8b5..fd77f017 100644 --- a/src/github/index.ts +++ b/src/github/index.ts @@ -5,61 +5,26 @@ import { CallToolRequestSchema, ListToolsRequestSchema, } from "@modelcontextprotocol/sdk/types.js"; -import fetch from "node-fetch"; import { z } from 'zod'; import { zodToJsonSchema } from 'zod-to-json-schema'; + +import * as repository from './operations/repository.js'; +import * as files from './operations/files.js'; +import * as issues from './operations/issues.js'; +import * as pulls from './operations/pulls.js'; +import * as branches from './operations/branches.js'; +import * as search from './operations/search.js'; +import * as commits from './operations/commits.js'; import { - CreateBranchOptionsSchema, - CreateBranchSchema, - CreateIssueOptionsSchema, - CreateIssueSchema, - CreateOrUpdateFileSchema, - CreatePullRequestOptionsSchema, - CreatePullRequestSchema, - CreateRepositoryOptionsSchema, - CreateRepositorySchema, - ForkRepositorySchema, - GetFileContentsSchema, - GetIssueSchema, - GitHubCommitSchema, - GitHubContentSchema, - GitHubCreateUpdateFileResponseSchema, - GitHubForkSchema, - GitHubIssueSchema, - GitHubListCommits, - GitHubListCommitsSchema, - GitHubPullRequestSchema, - GitHubReferenceSchema, - GitHubRepositorySchema, - GitHubSearchResponseSchema, - GitHubTreeSchema, - IssueCommentSchema, - ListCommitsSchema, - ListIssuesOptionsSchema, - PushFilesSchema, - SearchCodeResponseSchema, - SearchCodeSchema, - SearchIssuesResponseSchema, - SearchIssuesSchema, - SearchRepositoriesSchema, - SearchUsersResponseSchema, - SearchUsersSchema, - UpdateIssueOptionsSchema, - type FileOperation, - type GitHubCommit, - type GitHubContent, - type GitHubCreateUpdateFileResponse, - type GitHubFork, - type GitHubIssue, - type GitHubPullRequest, - type GitHubReference, - type GitHubRepository, - type GitHubSearchResponse, - type GitHubTree, - type SearchCodeResponse, - type SearchIssuesResponse, - type SearchUsersResponse -} from './schemas.js'; + GitHubError, + GitHubValidationError, + GitHubResourceNotFoundError, + GitHubAuthenticationError, + GitHubPermissionError, + GitHubRateLimitError, + GitHubConflictError, + isGitHubError, +} from './common/errors.js'; const server = new Server( { @@ -73,646 +38,27 @@ const server = new Server( } ); -const GITHUB_PERSONAL_ACCESS_TOKEN = process.env.GITHUB_PERSONAL_ACCESS_TOKEN; - -if (!GITHUB_PERSONAL_ACCESS_TOKEN) { - console.error("GITHUB_PERSONAL_ACCESS_TOKEN environment variable is not set"); - process.exit(1); -} - -async function forkRepository( - owner: string, - repo: string, - organization?: string -): Promise { - const url = organization - ? `https://api.github.com/repos/${owner}/${repo}/forks?organization=${organization}` - : `https://api.github.com/repos/${owner}/${repo}/forks`; - - const response = await fetch(url, { - method: "POST", - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - }, - }); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return GitHubForkSchema.parse(await response.json()); -} - -async function createBranch( - owner: string, - repo: string, - options: z.infer -): Promise { - const fullRef = `refs/heads/${options.ref}`; - - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/git/refs`, - { - method: "POST", - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - "Content-Type": "application/json", - }, - body: JSON.stringify({ - ref: fullRef, - sha: options.sha, - }), +function formatGitHubError(error: GitHubError): string { + let message = `GitHub API Error: ${error.message}`; + + if (error instanceof GitHubValidationError) { + message = `Validation Error: ${error.message}`; + if (error.response) { + message += `\nDetails: ${JSON.stringify(error.response)}`; } - ); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); + } else if (error instanceof GitHubResourceNotFoundError) { + message = `Not Found: ${error.message}`; + } else if (error instanceof GitHubAuthenticationError) { + message = `Authentication Failed: ${error.message}`; + } else if (error instanceof GitHubPermissionError) { + message = `Permission Denied: ${error.message}`; + } else if (error instanceof GitHubRateLimitError) { + message = `Rate Limit Exceeded: ${error.message}\nResets at: ${error.resetAt.toISOString()}`; + } else if (error instanceof GitHubConflictError) { + message = `Conflict: ${error.message}`; } - return GitHubReferenceSchema.parse(await response.json()); -} - -async function getDefaultBranchSHA( - owner: string, - repo: string -): Promise { - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/main`, - { - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - }, - } - ); - - if (!response.ok) { - const masterResponse = await fetch( - `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/master`, - { - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - }, - } - ); - - if (!masterResponse.ok) { - throw new Error( - "Could not find default branch (tried 'main' and 'master')" - ); - } - - const data = GitHubReferenceSchema.parse(await masterResponse.json()); - return data.object.sha; - } - - const data = GitHubReferenceSchema.parse(await response.json()); - return data.object.sha; -} - -async function getFileContents( - owner: string, - repo: string, - path: string, - branch?: string -): Promise { - let url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`; - if (branch) { - url += `?ref=${branch}`; - } - - const response = await fetch(url, { - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - }, - }); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - const data = GitHubContentSchema.parse(await response.json()); - - // If it's a file, decode the content - if (!Array.isArray(data) && data.content) { - data.content = Buffer.from(data.content, "base64").toString("utf8"); - } - - return data; -} - -async function createIssue( - owner: string, - repo: string, - options: z.infer -): Promise { - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/issues`, - { - method: "POST", - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - "Content-Type": "application/json", - }, - body: JSON.stringify(options), - } - ); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return GitHubIssueSchema.parse(await response.json()); -} - -async function createPullRequest( - owner: string, - repo: string, - options: z.infer -): Promise { - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/pulls`, - { - method: "POST", - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - "Content-Type": "application/json", - }, - body: JSON.stringify(options), - } - ); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return GitHubPullRequestSchema.parse(await response.json()); -} - -async function createOrUpdateFile( - owner: string, - repo: string, - path: string, - content: string, - message: string, - branch: string, - sha?: string -): Promise { - const encodedContent = Buffer.from(content).toString("base64"); - - let currentSha = sha; - if (!currentSha) { - try { - const existingFile = await getFileContents(owner, repo, path, branch); - if (!Array.isArray(existingFile)) { - currentSha = existingFile.sha; - } - } catch (error) { - console.error( - "Note: File does not exist in branch, will create new file" - ); - } - } - - const url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`; - - const body = { - message, - content: encodedContent, - branch, - ...(currentSha ? { sha: currentSha } : {}), - }; - - const response = await fetch(url, { - method: "PUT", - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - "Content-Type": "application/json", - }, - body: JSON.stringify(body), - }); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return GitHubCreateUpdateFileResponseSchema.parse(await response.json()); -} - -async function createTree( - owner: string, - repo: string, - files: FileOperation[], - baseTree?: string -): Promise { - const tree = files.map((file) => ({ - path: file.path, - mode: "100644" as const, - type: "blob" as const, - content: file.content, - })); - - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/git/trees`, - { - method: "POST", - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - "Content-Type": "application/json", - }, - body: JSON.stringify({ - tree, - base_tree: baseTree, - }), - } - ); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return GitHubTreeSchema.parse(await response.json()); -} - -async function createCommit( - owner: string, - repo: string, - message: string, - tree: string, - parents: string[] -): Promise { - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/git/commits`, - { - method: "POST", - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - "Content-Type": "application/json", - }, - body: JSON.stringify({ - message, - tree, - parents, - }), - } - ); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return GitHubCommitSchema.parse(await response.json()); -} - -async function updateReference( - owner: string, - repo: string, - ref: string, - sha: string -): Promise { - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/git/refs/${ref}`, - { - method: "PATCH", - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - "Content-Type": "application/json", - }, - body: JSON.stringify({ - sha, - force: true, - }), - } - ); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return GitHubReferenceSchema.parse(await response.json()); -} - -async function pushFiles( - owner: string, - repo: string, - branch: string, - files: FileOperation[], - message: string -): Promise { - const refResponse = await fetch( - `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}`, - { - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - }, - } - ); - - if (!refResponse.ok) { - throw new Error(`GitHub API error: ${refResponse.statusText}`); - } - - const ref = GitHubReferenceSchema.parse(await refResponse.json()); - const commitSha = ref.object.sha; - - const tree = await createTree(owner, repo, files, commitSha); - const commit = await createCommit(owner, repo, message, tree.sha, [ - commitSha, - ]); - return await updateReference(owner, repo, `heads/${branch}`, commit.sha); -} - -async function searchRepositories( - query: string, - page: number = 1, - perPage: number = 30 -): Promise { - const url = new URL("https://api.github.com/search/repositories"); - url.searchParams.append("q", query); - url.searchParams.append("page", page.toString()); - url.searchParams.append("per_page", perPage.toString()); - - const response = await fetch(url.toString(), { - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - }, - }); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return GitHubSearchResponseSchema.parse(await response.json()); -} - -async function createRepository( - options: z.infer -): Promise { - const response = await fetch("https://api.github.com/user/repos", { - method: "POST", - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - "Content-Type": "application/json", - }, - body: JSON.stringify(options), - }); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return GitHubRepositorySchema.parse(await response.json()); -} - -async function listCommits( - owner: string, - repo: string, - page: number = 1, - perPage: number = 30, - sha?: string, -): Promise { - const url = new URL(`https://api.github.com/repos/${owner}/${repo}/commits`); - url.searchParams.append("page", page.toString()); - url.searchParams.append("per_page", perPage.toString()); - if (sha) { - url.searchParams.append("sha", sha); - } - - const response = await fetch( - url.toString(), - { - method: "GET", - headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - "Content-Type": "application/json" - }, - } - ); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return GitHubListCommitsSchema.parse(await response.json()); -} - -async function listIssues( - owner: string, - repo: string, - options: Omit, 'owner' | 'repo'> -): Promise { - const url = new URL(`https://api.github.com/repos/${owner}/${repo}/issues`); - - // Add query parameters - if (options.state) url.searchParams.append('state', options.state); - if (options.labels) url.searchParams.append('labels', options.labels.join(',')); - if (options.sort) url.searchParams.append('sort', options.sort); - if (options.direction) url.searchParams.append('direction', options.direction); - if (options.since) url.searchParams.append('since', options.since); - if (options.page) url.searchParams.append('page', options.page.toString()); - if (options.per_page) url.searchParams.append('per_page', options.per_page.toString()); - - const response = await fetch(url.toString(), { - headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server" - } - }); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return z.array(GitHubIssueSchema).parse(await response.json()); -} - -async function updateIssue( - owner: string, - repo: string, - issueNumber: number, - options: Omit, 'owner' | 'repo' | 'issue_number'> -): Promise { - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/issues/${issueNumber}`, - { - method: "PATCH", - headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - "Content-Type": "application/json" - }, - body: JSON.stringify({ - title: options.title, - body: options.body, - state: options.state, - labels: options.labels, - assignees: options.assignees, - milestone: options.milestone - }) - } - ); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return GitHubIssueSchema.parse(await response.json()); -} - -async function addIssueComment( - owner: string, - repo: string, - issueNumber: number, - body: string -): Promise> { - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/issues/${issueNumber}/comments`, - { - method: "POST", - headers: { - "Authorization": `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - "Accept": "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - "Content-Type": "application/json" - }, - body: JSON.stringify({ body }) - } - ); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return IssueCommentSchema.parse(await response.json()); -} - -async function searchCode( - params: z.infer -): Promise { - const url = new URL("https://api.github.com/search/code"); - Object.entries(params).forEach(([key, value]) => { - if (value !== undefined && value !== null) { - url.searchParams.append(key, value.toString()); - } - }); - - const response = await fetch(url.toString(), { - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - }, - }); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return SearchCodeResponseSchema.parse(await response.json()); -} - -async function searchIssues( - params: z.infer -): Promise { - const url = new URL("https://api.github.com/search/issues"); - Object.entries(params).forEach(([key, value]) => { - if (value !== undefined && value !== null) { - url.searchParams.append(key, value.toString()); - } - }); - - const response = await fetch(url.toString(), { - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - }, - }); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return SearchIssuesResponseSchema.parse(await response.json()); -} - -async function searchUsers( - params: z.infer -): Promise { - const url = new URL("https://api.github.com/search/users"); - Object.entries(params).forEach(([key, value]) => { - if (value !== undefined && value !== null) { - url.searchParams.append(key, value.toString()); - } - }); - - const response = await fetch(url.toString(), { - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - }, - }); - - if (!response.ok) { - throw new Error(`GitHub API error: ${response.statusText}`); - } - - return SearchUsersResponseSchema.parse(await response.json()); -} - -async function getIssue( - owner: string, - repo: string, - issueNumber: number -): Promise { - const response = await fetch( - `https://api.github.com/repos/${owner}/${repo}/issues/${issueNumber}`, - { - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - }, - } -); - - if (!response.ok) { - throw new Error(`Github API error: ${response.statusText}`); - } - - return GitHubIssueSchema.parse(await response.json()); + return message; } server.setRequestHandler(ListToolsRequestSchema, async () => { @@ -721,91 +67,87 @@ server.setRequestHandler(ListToolsRequestSchema, async () => { { name: "create_or_update_file", description: "Create or update a single file in a GitHub repository", - inputSchema: zodToJsonSchema(CreateOrUpdateFileSchema), + inputSchema: zodToJsonSchema(files.CreateOrUpdateFileSchema), }, { name: "search_repositories", description: "Search for GitHub repositories", - inputSchema: zodToJsonSchema(SearchRepositoriesSchema), + inputSchema: zodToJsonSchema(repository.SearchRepositoriesSchema), }, { name: "create_repository", description: "Create a new GitHub repository in your account", - inputSchema: zodToJsonSchema(CreateRepositorySchema), + inputSchema: zodToJsonSchema(repository.CreateRepositoryOptionsSchema), }, { name: "get_file_contents", - description: - "Get the contents of a file or directory from a GitHub repository", - inputSchema: zodToJsonSchema(GetFileContentsSchema), + description: "Get the contents of a file or directory from a GitHub repository", + inputSchema: zodToJsonSchema(files.GetFileContentsSchema), }, { name: "push_files", - description: - "Push multiple files to a GitHub repository in a single commit", - inputSchema: zodToJsonSchema(PushFilesSchema), + description: "Push multiple files to a GitHub repository in a single commit", + inputSchema: zodToJsonSchema(files.PushFilesSchema), }, { name: "create_issue", description: "Create a new issue in a GitHub repository", - inputSchema: zodToJsonSchema(CreateIssueSchema), + inputSchema: zodToJsonSchema(issues.CreateIssueSchema), }, { name: "create_pull_request", description: "Create a new pull request in a GitHub repository", - inputSchema: zodToJsonSchema(CreatePullRequestSchema), + inputSchema: zodToJsonSchema(pulls.CreatePullRequestSchema), }, { name: "fork_repository", - description: - "Fork a GitHub repository to your account or specified organization", - inputSchema: zodToJsonSchema(ForkRepositorySchema), + description: "Fork a GitHub repository to your account or specified organization", + inputSchema: zodToJsonSchema(repository.ForkRepositorySchema), }, { name: "create_branch", description: "Create a new branch in a GitHub repository", - inputSchema: zodToJsonSchema(CreateBranchSchema), + inputSchema: zodToJsonSchema(branches.CreateBranchSchema), }, { name: "list_commits", description: "Get list of commits of a branch in a GitHub repository", - inputSchema: zodToJsonSchema(ListCommitsSchema) + inputSchema: zodToJsonSchema(commits.ListCommitsSchema) }, { name: "list_issues", description: "List issues in a GitHub repository with filtering options", - inputSchema: zodToJsonSchema(ListIssuesOptionsSchema) + inputSchema: zodToJsonSchema(issues.ListIssuesOptionsSchema) }, { name: "update_issue", description: "Update an existing issue in a GitHub repository", - inputSchema: zodToJsonSchema(UpdateIssueOptionsSchema) + inputSchema: zodToJsonSchema(issues.UpdateIssueOptionsSchema) }, { name: "add_issue_comment", description: "Add a comment to an existing issue", - inputSchema: zodToJsonSchema(IssueCommentSchema) + inputSchema: zodToJsonSchema(issues.IssueCommentSchema) }, { name: "search_code", description: "Search for code across GitHub repositories", - inputSchema: zodToJsonSchema(SearchCodeSchema), + inputSchema: zodToJsonSchema(search.SearchCodeSchema), }, { name: "search_issues", - description: - "Search for issues and pull requests across GitHub repositories", - inputSchema: zodToJsonSchema(SearchIssuesSchema), + description: "Search for issues and pull requests across GitHub repositories", + inputSchema: zodToJsonSchema(search.SearchIssuesSchema), }, { name: "search_users", description: "Search for users on GitHub", - inputSchema: zodToJsonSchema(SearchUsersSchema), + inputSchema: zodToJsonSchema(search.SearchUsersSchema), }, { name: "get_issue", description: "Get details of a specific issue in a GitHub repository.", - inputSchema: zodToJsonSchema(GetIssueSchema) + inputSchema: zodToJsonSchema(issues.GetIssueSchema) } ], }; @@ -819,55 +161,29 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { switch (request.params.name) { case "fork_repository": { - const args = ForkRepositorySchema.parse(request.params.arguments); - const fork = await forkRepository( - args.owner, - args.repo, - args.organization - ); + const args = repository.ForkRepositorySchema.parse(request.params.arguments); + const fork = await repository.forkRepository(args.owner, args.repo, args.organization); return { content: [{ type: "text", text: JSON.stringify(fork, null, 2) }], }; } case "create_branch": { - const args = CreateBranchSchema.parse(request.params.arguments); - let sha: string; - if (args.from_branch) { - const response = await fetch( - `https://api.github.com/repos/${args.owner}/${args.repo}/git/refs/heads/${args.from_branch}`, - { - headers: { - Authorization: `token ${GITHUB_PERSONAL_ACCESS_TOKEN}`, - Accept: "application/vnd.github.v3+json", - "User-Agent": "github-mcp-server", - }, - } - ); - - if (!response.ok) { - throw new Error(`Source branch '${args.from_branch}' not found`); - } - - const data = GitHubReferenceSchema.parse(await response.json()); - sha = data.object.sha; - } else { - sha = await getDefaultBranchSHA(args.owner, args.repo); - } - - const branch = await createBranch(args.owner, args.repo, { - ref: args.branch, - sha, - }); - + const args = branches.CreateBranchSchema.parse(request.params.arguments); + const branch = await branches.createBranchFromRef( + args.owner, + args.repo, + args.branch, + args.from_branch + ); return { content: [{ type: "text", text: JSON.stringify(branch, null, 2) }], }; } case "search_repositories": { - const args = SearchRepositoriesSchema.parse(request.params.arguments); - const results = await searchRepositories( + const args = repository.SearchRepositoriesSchema.parse(request.params.arguments); + const results = await repository.searchRepositories( args.query, args.page, args.perPage @@ -878,18 +194,16 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { } case "create_repository": { - const args = CreateRepositorySchema.parse(request.params.arguments); - const repository = await createRepository(args); + const args = repository.CreateRepositoryOptionsSchema.parse(request.params.arguments); + const result = await repository.createRepository(args); return { - content: [ - { type: "text", text: JSON.stringify(repository, null, 2) }, - ], + content: [{ type: "text", text: JSON.stringify(result, null, 2) }], }; } case "get_file_contents": { - const args = GetFileContentsSchema.parse(request.params.arguments); - const contents = await getFileContents( + const args = files.GetFileContentsSchema.parse(request.params.arguments); + const contents = await files.getFileContents( args.owner, args.repo, args.path, @@ -901,8 +215,8 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { } case "create_or_update_file": { - const args = CreateOrUpdateFileSchema.parse(request.params.arguments); - const result = await createOrUpdateFile( + const args = files.CreateOrUpdateFileSchema.parse(request.params.arguments); + const result = await files.createOrUpdateFile( args.owner, args.repo, args.path, @@ -917,8 +231,8 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { } case "push_files": { - const args = PushFilesSchema.parse(request.params.arguments); - const result = await pushFiles( + const args = files.PushFilesSchema.parse(request.params.arguments); + const result = await files.pushFiles( args.owner, args.repo, args.branch, @@ -931,83 +245,84 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { } case "create_issue": { - const args = CreateIssueSchema.parse(request.params.arguments); + const args = issues.CreateIssueSchema.parse(request.params.arguments); const { owner, repo, ...options } = args; - const issue = await createIssue(owner, repo, options); + const issue = await issues.createIssue(owner, repo, options); return { content: [{ type: "text", text: JSON.stringify(issue, null, 2) }], }; } case "create_pull_request": { - const args = CreatePullRequestSchema.parse(request.params.arguments); - const { owner, repo, ...options } = args; - const pullRequest = await createPullRequest(owner, repo, options); + const args = pulls.CreatePullRequestSchema.parse(request.params.arguments); + const pullRequest = await pulls.createPullRequest(args); return { - content: [ - { type: "text", text: JSON.stringify(pullRequest, null, 2) }, - ], + content: [{ type: "text", text: JSON.stringify(pullRequest, null, 2) }], }; } case "search_code": { - const args = SearchCodeSchema.parse(request.params.arguments); - const results = await searchCode(args); + const args = search.SearchCodeSchema.parse(request.params.arguments); + const results = await search.searchCode(args); return { content: [{ type: "text", text: JSON.stringify(results, null, 2) }], }; } case "search_issues": { - const args = SearchIssuesSchema.parse(request.params.arguments); - const results = await searchIssues(args); + const args = search.SearchIssuesSchema.parse(request.params.arguments); + const results = await search.searchIssues(args); return { content: [{ type: "text", text: JSON.stringify(results, null, 2) }], }; } case "search_users": { - const args = SearchUsersSchema.parse(request.params.arguments); - const results = await searchUsers(args); + const args = search.SearchUsersSchema.parse(request.params.arguments); + const results = await search.searchUsers(args); return { content: [{ type: "text", text: JSON.stringify(results, null, 2) }], }; } case "list_issues": { - const args = ListIssuesOptionsSchema.parse(request.params.arguments); + const args = issues.ListIssuesOptionsSchema.parse(request.params.arguments); const { owner, repo, ...options } = args; - const issues = await listIssues(owner, repo, options); - return { toolResult: issues }; + const result = await issues.listIssues(owner, repo, options); + return { toolResult: result }; } case "update_issue": { - const args = UpdateIssueOptionsSchema.parse(request.params.arguments); + const args = issues.UpdateIssueOptionsSchema.parse(request.params.arguments); const { owner, repo, issue_number, ...options } = args; - const issue = await updateIssue(owner, repo, issue_number, options); - return { toolResult: issue }; + const result = await issues.updateIssue(owner, repo, issue_number, options); + return { toolResult: result }; } case "add_issue_comment": { - const args = IssueCommentSchema.parse(request.params.arguments); + const args = issues.IssueCommentSchema.parse(request.params.arguments); const { owner, repo, issue_number, body } = args; - const comment = await addIssueComment(owner, repo, issue_number, body); - return { toolResult: comment }; + const result = await issues.addIssueComment(owner, repo, issue_number, body); + return { toolResult: result }; } case "list_commits": { - const args = ListCommitsSchema.parse(request.params.arguments); - const results = await listCommits(args.owner, args.repo, args.page, args.perPage, args.sha); - return { content: [{ type: "text", text: JSON.stringify(results, null, 2) }] }; + const args = commits.ListCommitsSchema.parse(request.params.arguments); + const results = await commits.listCommits( + args.owner, + args.repo, + args.page, + args.perPage, + args.sha + ); + return { + content: [{ type: "text", text: JSON.stringify(results, null, 2) }], + }; } case "get_issue": { - const args = z.object({ - owner: z.string(), - repo: z.string(), - issue_number: z.number() - }).parse(request.params.arguments); - const issue = await getIssue(args.owner, args.repo, args.issue_number); + const args = issues.GetIssueSchema.parse(request.params.arguments); + const issue = await issues.getIssue(args.owner, args.repo, args.issue_number); return { toolResult: issue }; } @@ -1016,14 +331,10 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => { } } catch (error) { if (error instanceof z.ZodError) { - throw new Error( - `Invalid arguments: ${error.errors - .map( - (e: z.ZodError["errors"][number]) => - `${e.path.join(".")}: ${e.message}` - ) - .join(", ")}` - ); + throw new Error(`Invalid input: ${JSON.stringify(error.errors)}`); + } + if (isGitHubError(error)) { + throw new Error(formatGitHubError(error)); } throw error; } @@ -1038,4 +349,4 @@ async function runServer() { runServer().catch((error) => { console.error("Fatal error in main():", error); process.exit(1); -}); +}); \ No newline at end of file diff --git a/src/github/operations/branches.ts b/src/github/operations/branches.ts new file mode 100644 index 00000000..9b7033b5 --- /dev/null +++ b/src/github/operations/branches.ts @@ -0,0 +1,112 @@ +import { z } from "zod"; +import { githubRequest } from "../common/utils.js"; +import { GitHubReferenceSchema } from "../common/types.js"; + +// Schema definitions +export const CreateBranchOptionsSchema = z.object({ + ref: z.string(), + sha: z.string(), +}); + +export const CreateBranchSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + branch: z.string().describe("Name for the new branch"), + from_branch: z.string().optional().describe("Optional: source branch to create from (defaults to the repository's default branch)"), +}); + +// Type exports +export type CreateBranchOptions = z.infer; + +// Function implementations +export async function getDefaultBranchSHA(owner: string, repo: string): Promise { + try { + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/main` + ); + const data = GitHubReferenceSchema.parse(response); + return data.object.sha; + } catch (error) { + const masterResponse = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/master` + ); + if (!masterResponse) { + throw new Error("Could not find default branch (tried 'main' and 'master')"); + } + const data = GitHubReferenceSchema.parse(masterResponse); + return data.object.sha; + } +} + +export async function createBranch( + owner: string, + repo: string, + options: CreateBranchOptions +): Promise> { + const fullRef = `refs/heads/${options.ref}`; + + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/git/refs`, + { + method: "POST", + body: { + ref: fullRef, + sha: options.sha, + }, + } + ); + + return GitHubReferenceSchema.parse(response); +} + +export async function getBranchSHA( + owner: string, + repo: string, + branch: string +): Promise { + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}` + ); + + const data = GitHubReferenceSchema.parse(response); + return data.object.sha; +} + +export async function createBranchFromRef( + owner: string, + repo: string, + newBranch: string, + fromBranch?: string +): Promise> { + let sha: string; + if (fromBranch) { + sha = await getBranchSHA(owner, repo, fromBranch); + } else { + sha = await getDefaultBranchSHA(owner, repo); + } + + return createBranch(owner, repo, { + ref: newBranch, + sha, + }); +} + +export async function updateBranch( + owner: string, + repo: string, + branch: string, + sha: string +): Promise> { + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}`, + { + method: "PATCH", + body: { + sha, + force: true, + }, + } + ); + + return GitHubReferenceSchema.parse(response); +} diff --git a/src/github/operations/commits.ts b/src/github/operations/commits.ts new file mode 100644 index 00000000..b10e1b5f --- /dev/null +++ b/src/github/operations/commits.ts @@ -0,0 +1,26 @@ +import { z } from "zod"; +import { githubRequest, buildUrl } from "../common/utils.js"; + +export const ListCommitsSchema = z.object({ + owner: z.string(), + repo: z.string(), + sha: z.string().optional(), + page: z.number().optional(), + perPage: z.number().optional() +}); + +export async function listCommits( + owner: string, + repo: string, + page?: number, + perPage?: number, + sha?: string +) { + return githubRequest( + buildUrl(`https://api.github.com/repos/${owner}/${repo}/commits`, { + page: page?.toString(), + per_page: perPage?.toString(), + sha + }) + ); +} \ No newline at end of file diff --git a/src/github/operations/files.ts b/src/github/operations/files.ts new file mode 100644 index 00000000..9517946e --- /dev/null +++ b/src/github/operations/files.ts @@ -0,0 +1,219 @@ +import { z } from "zod"; +import { githubRequest } from "../common/utils.js"; +import { + GitHubContentSchema, + GitHubAuthorSchema, + GitHubTreeSchema, + GitHubCommitSchema, + GitHubReferenceSchema, + GitHubFileContentSchema, +} from "../common/types.js"; + +// Schema definitions +export const FileOperationSchema = z.object({ + path: z.string(), + content: z.string(), +}); + +export const CreateOrUpdateFileSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + path: z.string().describe("Path where to create/update the file"), + content: z.string().describe("Content of the file"), + message: z.string().describe("Commit message"), + branch: z.string().describe("Branch to create/update the file in"), + sha: z.string().optional().describe("SHA of the file being replaced (required when updating existing files)"), +}); + +export const GetFileContentsSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + path: z.string().describe("Path to the file or directory"), + branch: z.string().optional().describe("Branch to get contents from"), +}); + +export const PushFilesSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + branch: z.string().describe("Branch to push to (e.g., 'main' or 'master')"), + files: z.array(FileOperationSchema).describe("Array of files to push"), + message: z.string().describe("Commit message"), +}); + +export const GitHubCreateUpdateFileResponseSchema = z.object({ + content: GitHubFileContentSchema.nullable(), + commit: z.object({ + sha: z.string(), + node_id: z.string(), + url: z.string(), + html_url: z.string(), + author: GitHubAuthorSchema, + committer: GitHubAuthorSchema, + message: z.string(), + tree: z.object({ + sha: z.string(), + url: z.string(), + }), + parents: z.array( + z.object({ + sha: z.string(), + url: z.string(), + html_url: z.string(), + }) + ), + }), +}); + +// Type exports +export type FileOperation = z.infer; +export type GitHubCreateUpdateFileResponse = z.infer; + +// Function implementations +export async function getFileContents( + owner: string, + repo: string, + path: string, + branch?: string +) { + let url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`; + if (branch) { + url += `?ref=${branch}`; + } + + const response = await githubRequest(url); + const data = GitHubContentSchema.parse(response); + + // If it's a file, decode the content + if (!Array.isArray(data) && data.content) { + data.content = Buffer.from(data.content, "base64").toString("utf8"); + } + + return data; +} + +export async function createOrUpdateFile( + owner: string, + repo: string, + path: string, + content: string, + message: string, + branch: string, + sha?: string +) { + const encodedContent = Buffer.from(content).toString("base64"); + + let currentSha = sha; + if (!currentSha) { + try { + const existingFile = await getFileContents(owner, repo, path, branch); + if (!Array.isArray(existingFile)) { + currentSha = existingFile.sha; + } + } catch (error) { + console.error("Note: File does not exist in branch, will create new file"); + } + } + + const url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`; + const body = { + message, + content: encodedContent, + branch, + ...(currentSha ? { sha: currentSha } : {}), + }; + + const response = await githubRequest(url, { + method: "PUT", + body, + }); + + return GitHubCreateUpdateFileResponseSchema.parse(response); +} + +async function createTree( + owner: string, + repo: string, + files: FileOperation[], + baseTree?: string +) { + const tree = files.map((file) => ({ + path: file.path, + mode: "100644" as const, + type: "blob" as const, + content: file.content, + })); + + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/git/trees`, + { + method: "POST", + body: { + tree, + base_tree: baseTree, + }, + } + ); + + return GitHubTreeSchema.parse(response); +} + +async function createCommit( + owner: string, + repo: string, + message: string, + tree: string, + parents: string[] +) { + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/git/commits`, + { + method: "POST", + body: { + message, + tree, + parents, + }, + } + ); + + return GitHubCommitSchema.parse(response); +} + +async function updateReference( + owner: string, + repo: string, + ref: string, + sha: string +) { + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/git/refs/${ref}`, + { + method: "PATCH", + body: { + sha, + force: true, + }, + } + ); + + return GitHubReferenceSchema.parse(response); +} + +export async function pushFiles( + owner: string, + repo: string, + branch: string, + files: FileOperation[], + message: string +) { + const refResponse = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}` + ); + + const ref = GitHubReferenceSchema.parse(refResponse); + const commitSha = ref.object.sha; + + const tree = await createTree(owner, repo, files, commitSha); + const commit = await createCommit(owner, repo, message, tree.sha, [commitSha]); + return await updateReference(owner, repo, `heads/${branch}`, commit.sha); +} diff --git a/src/github/operations/issues.ts b/src/github/operations/issues.ts new file mode 100644 index 00000000..d2907bf7 --- /dev/null +++ b/src/github/operations/issues.ts @@ -0,0 +1,118 @@ +import { z } from "zod"; +import { githubRequest, buildUrl } from "../common/utils.js"; + +export const GetIssueSchema = z.object({ + owner: z.string(), + repo: z.string(), + issue_number: z.number(), +}); + +export const IssueCommentSchema = z.object({ + owner: z.string(), + repo: z.string(), + issue_number: z.number(), + body: z.string(), +}); + +export const CreateIssueOptionsSchema = z.object({ + title: z.string(), + body: z.string().optional(), + assignees: z.array(z.string()).optional(), + milestone: z.number().optional(), + labels: z.array(z.string()).optional(), +}); + +export const CreateIssueSchema = z.object({ + owner: z.string(), + repo: z.string(), + ...CreateIssueOptionsSchema.shape, +}); + +export const ListIssuesOptionsSchema = z.object({ + owner: z.string(), + repo: z.string(), + direction: z.enum(["asc", "desc"]).optional(), + labels: z.array(z.string()).optional(), + page: z.number().optional(), + per_page: z.number().optional(), + since: z.string().optional(), + sort: z.enum(["created", "updated", "comments"]).optional(), + state: z.enum(["open", "closed", "all"]).optional(), +}); + +export const UpdateIssueOptionsSchema = z.object({ + owner: z.string(), + repo: z.string(), + issue_number: z.number(), + title: z.string().optional(), + body: z.string().optional(), + assignees: z.array(z.string()).optional(), + milestone: z.number().optional(), + labels: z.array(z.string()).optional(), + state: z.enum(["open", "closed"]).optional(), +}); + +export async function getIssue(owner: string, repo: string, issue_number: number) { + return githubRequest(`https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}`); +} + +export async function addIssueComment( + owner: string, + repo: string, + issue_number: number, + body: string +) { + return githubRequest(`https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}/comments`, { + method: "POST", + body: { body }, + }); +} + +export async function createIssue( + owner: string, + repo: string, + options: z.infer +) { + return githubRequest( + `https://api.github.com/repos/${owner}/${repo}/issues`, + { + method: "POST", + body: options, + } + ); +} + +export async function listIssues( + owner: string, + repo: string, + options: Omit, "owner" | "repo"> +) { + const urlParams: Record = { + direction: options.direction, + labels: options.labels?.join(","), + page: options.page?.toString(), + per_page: options.per_page?.toString(), + since: options.since, + sort: options.sort, + state: options.state + }; + + return githubRequest( + buildUrl(`https://api.github.com/repos/${owner}/${repo}/issues`, urlParams) + ); +} + +export async function updateIssue( + owner: string, + repo: string, + issue_number: number, + options: Omit, "owner" | "repo" | "issue_number"> +) { + return githubRequest( + `https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}`, + { + method: "PATCH", + body: options, + } + ); +} \ No newline at end of file diff --git a/src/github/operations/pulls.ts b/src/github/operations/pulls.ts new file mode 100644 index 00000000..9b1a5bd7 --- /dev/null +++ b/src/github/operations/pulls.ts @@ -0,0 +1,302 @@ +import { z } from "zod"; +import { githubRequest } from "../common/utils.js"; +import { + GitHubPullRequestSchema, + GitHubIssueAssigneeSchema, + GitHubRepositorySchema, +} from "../common/types.js"; + +// Schema definitions +export const PullRequestFileSchema = z.object({ + sha: z.string(), + filename: z.string(), + status: z.enum(['added', 'removed', 'modified', 'renamed', 'copied', 'changed', 'unchanged']), + additions: z.number(), + deletions: z.number(), + changes: z.number(), + blob_url: z.string(), + raw_url: z.string(), + contents_url: z.string(), + patch: z.string().optional() +}); + +export const StatusCheckSchema = z.object({ + url: z.string(), + state: z.enum(['error', 'failure', 'pending', 'success']), + description: z.string().nullable(), + target_url: z.string().nullable(), + context: z.string(), + created_at: z.string(), + updated_at: z.string() +}); + +export const CombinedStatusSchema = z.object({ + state: z.enum(['error', 'failure', 'pending', 'success']), + statuses: z.array(StatusCheckSchema), + sha: z.string(), + total_count: z.number() +}); + +export const PullRequestCommentSchema = z.object({ + url: z.string(), + id: z.number(), + node_id: z.string(), + pull_request_review_id: z.number().nullable(), + diff_hunk: z.string(), + path: z.string().nullable(), + position: z.number().nullable(), + original_position: z.number().nullable(), + commit_id: z.string(), + original_commit_id: z.string(), + user: GitHubIssueAssigneeSchema, + body: z.string(), + created_at: z.string(), + updated_at: z.string(), + html_url: z.string(), + pull_request_url: z.string(), + author_association: z.string(), + _links: z.object({ + self: z.object({ href: z.string() }), + html: z.object({ href: z.string() }), + pull_request: z.object({ href: z.string() }) + }) +}); + +export const PullRequestReviewSchema = z.object({ + id: z.number(), + node_id: z.string(), + user: GitHubIssueAssigneeSchema, + body: z.string().nullable(), + state: z.enum(['APPROVED', 'CHANGES_REQUESTED', 'COMMENTED', 'DISMISSED', 'PENDING']), + html_url: z.string(), + pull_request_url: z.string(), + commit_id: z.string(), + submitted_at: z.string().nullable(), + author_association: z.string() +}); + +// Input schemas +export const CreatePullRequestSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + title: z.string().describe("Pull request title"), + body: z.string().optional().describe("Pull request body/description"), + head: z.string().describe("The name of the branch where your changes are implemented"), + base: z.string().describe("The name of the branch you want the changes pulled into"), + draft: z.boolean().optional().describe("Whether to create the pull request as a draft"), + maintainer_can_modify: z.boolean().optional().describe("Whether maintainers can modify the pull request") +}); + +export const GetPullRequestSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + pull_number: z.number().describe("Pull request number") +}); + +export const ListPullRequestsSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + state: z.enum(['open', 'closed', 'all']).optional().describe("State of the pull requests to return"), + head: z.string().optional().describe("Filter by head user or head organization and branch name"), + base: z.string().optional().describe("Filter by base branch name"), + sort: z.enum(['created', 'updated', 'popularity', 'long-running']).optional().describe("What to sort results by"), + direction: z.enum(['asc', 'desc']).optional().describe("The direction of the sort"), + per_page: z.number().optional().describe("Results per page (max 100)"), + page: z.number().optional().describe("Page number of the results") +}); + +export const CreatePullRequestReviewSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + pull_number: z.number().describe("Pull request number"), + commit_id: z.string().optional().describe("The SHA of the commit that needs a review"), + body: z.string().describe("The body text of the review"), + event: z.enum(['APPROVE', 'REQUEST_CHANGES', 'COMMENT']).describe("The review action to perform"), + comments: z.array(z.object({ + path: z.string().describe("The relative path to the file being commented on"), + position: z.number().describe("The position in the diff where you want to add a review comment"), + body: z.string().describe("Text of the review comment") + })).optional().describe("Comments to post as part of the review") +}); + +export const MergePullRequestSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + pull_number: z.number().describe("Pull request number"), + commit_title: z.string().optional().describe("Title for the automatic commit message"), + commit_message: z.string().optional().describe("Extra detail to append to automatic commit message"), + merge_method: z.enum(['merge', 'squash', 'rebase']).optional().describe("Merge method to use") +}); + +export const GetPullRequestFilesSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + pull_number: z.number().describe("Pull request number") +}); + +export const GetPullRequestStatusSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + pull_number: z.number().describe("Pull request number") +}); + +export const UpdatePullRequestBranchSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + pull_number: z.number().describe("Pull request number"), + expected_head_sha: z.string().optional().describe("The expected SHA of the pull request's HEAD ref") +}); + +export const GetPullRequestCommentsSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + pull_number: z.number().describe("Pull request number") +}); + +export const GetPullRequestReviewsSchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + pull_number: z.number().describe("Pull request number") +}); + +// Function implementations +export async function createPullRequest( + params: z.infer +): Promise> { + const { owner, repo, ...options } = CreatePullRequestSchema.parse(params); + + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/pulls`, + { + method: "POST", + body: options, + } + ); + + return GitHubPullRequestSchema.parse(response); +} + +export async function getPullRequest( + owner: string, + repo: string, + pullNumber: number +): Promise> { + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}` + ); + return GitHubPullRequestSchema.parse(response); +} + +export async function listPullRequests( + owner: string, + repo: string, + options: Omit, 'owner' | 'repo'> +): Promise[]> { + const url = new URL(`https://api.github.com/repos/${owner}/${repo}/pulls`); + + if (options.state) url.searchParams.append('state', options.state); + if (options.head) url.searchParams.append('head', options.head); + if (options.base) url.searchParams.append('base', options.base); + if (options.sort) url.searchParams.append('sort', options.sort); + if (options.direction) url.searchParams.append('direction', options.direction); + if (options.per_page) url.searchParams.append('per_page', options.per_page.toString()); + if (options.page) url.searchParams.append('page', options.page.toString()); + + const response = await githubRequest(url.toString()); + return z.array(GitHubPullRequestSchema).parse(response); +} + +export async function createPullRequestReview( + owner: string, + repo: string, + pullNumber: number, + options: Omit, 'owner' | 'repo' | 'pull_number'> +): Promise> { + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/reviews`, + { + method: 'POST', + body: options, + } + ); + return PullRequestReviewSchema.parse(response); +} + +export async function mergePullRequest( + owner: string, + repo: string, + pullNumber: number, + options: Omit, 'owner' | 'repo' | 'pull_number'> +): Promise { + return githubRequest( + `https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/merge`, + { + method: 'PUT', + body: options, + } + ); +} + +export async function getPullRequestFiles( + owner: string, + repo: string, + pullNumber: number +): Promise[]> { + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/files` + ); + return z.array(PullRequestFileSchema).parse(response); +} + +export async function updatePullRequestBranch( + owner: string, + repo: string, + pullNumber: number, + expectedHeadSha?: string +): Promise { + await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/update-branch`, + { + method: "PUT", + body: expectedHeadSha ? { expected_head_sha: expectedHeadSha } : undefined, + } + ); +} + +export async function getPullRequestComments( + owner: string, + repo: string, + pullNumber: number +): Promise[]> { + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/comments` + ); + return z.array(PullRequestCommentSchema).parse(response); +} + +export async function getPullRequestReviews( + owner: string, + repo: string, + pullNumber: number +): Promise[]> { + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/pulls/${pullNumber}/reviews` + ); + return z.array(PullRequestReviewSchema).parse(response); +} + +export async function getPullRequestStatus( + owner: string, + repo: string, + pullNumber: number +): Promise> { + // First get the PR to get the head SHA + const pr = await getPullRequest(owner, repo, pullNumber); + const sha = pr.head.sha; + + // Then get the combined status for that SHA + const response = await githubRequest( + `https://api.github.com/repos/${owner}/${repo}/commits/${sha}/status` + ); + return CombinedStatusSchema.parse(response); +} \ No newline at end of file diff --git a/src/github/operations/repository.ts b/src/github/operations/repository.ts new file mode 100644 index 00000000..4cf0ab9b --- /dev/null +++ b/src/github/operations/repository.ts @@ -0,0 +1,65 @@ +import { z } from "zod"; +import { githubRequest } from "../common/utils.js"; +import { GitHubRepositorySchema, GitHubSearchResponseSchema } from "../common/types.js"; + +// Schema definitions +export const CreateRepositoryOptionsSchema = z.object({ + name: z.string().describe("Repository name"), + description: z.string().optional().describe("Repository description"), + private: z.boolean().optional().describe("Whether the repository should be private"), + autoInit: z.boolean().optional().describe("Initialize with README.md"), +}); + +export const SearchRepositoriesSchema = z.object({ + query: z.string().describe("Search query (see GitHub search syntax)"), + page: z.number().optional().describe("Page number for pagination (default: 1)"), + perPage: z.number().optional().describe("Number of results per page (default: 30, max: 100)"), +}); + +export const ForkRepositorySchema = z.object({ + owner: z.string().describe("Repository owner (username or organization)"), + repo: z.string().describe("Repository name"), + organization: z.string().optional().describe("Optional: organization to fork to (defaults to your personal account)"), +}); + +// Type exports +export type CreateRepositoryOptions = z.infer; + +// Function implementations +export async function createRepository(options: CreateRepositoryOptions) { + const response = await githubRequest("https://api.github.com/user/repos", { + method: "POST", + body: options, + }); + return GitHubRepositorySchema.parse(response); +} + +export async function searchRepositories( + query: string, + page: number = 1, + perPage: number = 30 +) { + const url = new URL("https://api.github.com/search/repositories"); + url.searchParams.append("q", query); + url.searchParams.append("page", page.toString()); + url.searchParams.append("per_page", perPage.toString()); + + const response = await githubRequest(url.toString()); + return GitHubSearchResponseSchema.parse(response); +} + +export async function forkRepository( + owner: string, + repo: string, + organization?: string +) { + const url = organization + ? `https://api.github.com/repos/${owner}/${repo}/forks?organization=${organization}` + : `https://api.github.com/repos/${owner}/${repo}/forks`; + + const response = await githubRequest(url, { method: "POST" }); + return GitHubRepositorySchema.extend({ + parent: GitHubRepositorySchema, + source: GitHubRepositorySchema, + }).parse(response); +} diff --git a/src/github/operations/search.ts b/src/github/operations/search.ts new file mode 100644 index 00000000..76faa729 --- /dev/null +++ b/src/github/operations/search.ts @@ -0,0 +1,45 @@ +import { z } from "zod"; +import { githubRequest, buildUrl } from "../common/utils.js"; + +export const SearchOptions = z.object({ + q: z.string(), + order: z.enum(["asc", "desc"]).optional(), + page: z.number().min(1).optional(), + per_page: z.number().min(1).max(100).optional(), +}); + +export const SearchUsersOptions = SearchOptions.extend({ + sort: z.enum(["followers", "repositories", "joined"]).optional(), +}); + +export const SearchIssuesOptions = SearchOptions.extend({ + sort: z.enum([ + "comments", + "reactions", + "reactions-+1", + "reactions--1", + "reactions-smile", + "reactions-thinking_face", + "reactions-heart", + "reactions-tada", + "interactions", + "created", + "updated", + ]).optional(), +}); + +export const SearchCodeSchema = SearchOptions; +export const SearchUsersSchema = SearchUsersOptions; +export const SearchIssuesSchema = SearchIssuesOptions; + +export async function searchCode(params: z.infer) { + return githubRequest(buildUrl("https://api.github.com/search/code", params)); +} + +export async function searchIssues(params: z.infer) { + return githubRequest(buildUrl("https://api.github.com/search/issues", params)); +} + +export async function searchUsers(params: z.infer) { + return githubRequest(buildUrl("https://api.github.com/search/users", params)); +} \ No newline at end of file diff --git a/src/github/schemas.ts b/src/github/schemas.ts deleted file mode 100644 index 0a322328..00000000 --- a/src/github/schemas.ts +++ /dev/null @@ -1,719 +0,0 @@ -import { z } from "zod"; - -// Base schemas for common types -export const GitHubAuthorSchema = z.object({ - name: z.string(), - email: z.string(), - date: z.string(), -}); - -// Repository related schemas -export const GitHubOwnerSchema = z.object({ - login: z.string(), - id: z.number(), - node_id: z.string(), - avatar_url: z.string(), - url: z.string(), - html_url: z.string(), - type: z.string(), -}); - -export const GitHubRepositorySchema = z.object({ - id: z.number(), - node_id: z.string(), - name: z.string(), - full_name: z.string(), - private: z.boolean(), - owner: GitHubOwnerSchema, - html_url: z.string(), - description: z.string().nullable(), - fork: z.boolean(), - url: z.string(), - created_at: z.string(), - updated_at: z.string(), - pushed_at: z.string(), - git_url: z.string(), - ssh_url: z.string(), - clone_url: z.string(), - default_branch: z.string(), -}); - -// File content schemas -export const GitHubFileContentSchema = z.object({ - type: z.string(), - encoding: z.string(), - size: z.number(), - name: z.string(), - path: z.string(), - content: z.string(), - sha: z.string(), - url: z.string(), - git_url: z.string(), - html_url: z.string(), - download_url: z.string(), -}); - -export const GitHubDirectoryContentSchema = z.object({ - type: z.string(), - size: z.number(), - name: z.string(), - path: z.string(), - sha: z.string(), - url: z.string(), - git_url: z.string(), - html_url: z.string(), - download_url: z.string().nullable(), -}); - -export const GitHubContentSchema = z.union([ - GitHubFileContentSchema, - z.array(GitHubDirectoryContentSchema), -]); - -// Operation schemas -export const FileOperationSchema = z.object({ - path: z.string(), - content: z.string(), -}); - -// Tree and commit schemas -export const GitHubTreeEntrySchema = z.object({ - path: z.string(), - mode: z.enum(["100644", "100755", "040000", "160000", "120000"]), - type: z.enum(["blob", "tree", "commit"]), - size: z.number().optional(), - sha: z.string(), - url: z.string(), -}); - -export const GitHubTreeSchema = z.object({ - sha: z.string(), - url: z.string(), - tree: z.array(GitHubTreeEntrySchema), - truncated: z.boolean(), -}); - -export const GitHubListCommitsSchema = z.array(z.object({ - sha: z.string(), - node_id: z.string(), - commit: z.object({ - author: GitHubAuthorSchema, - committer: GitHubAuthorSchema, - message: z.string(), - tree: z.object({ - sha: z.string(), - url: z.string() - }), - url: z.string(), - comment_count: z.number(), - }), - url: z.string(), - html_url: z.string(), - comments_url: z.string() -})); - -export const GitHubCommitSchema = z.object({ - sha: z.string(), - node_id: z.string(), - url: z.string(), - author: GitHubAuthorSchema, - committer: GitHubAuthorSchema, - message: z.string(), - tree: z.object({ - sha: z.string(), - url: z.string(), - }), - parents: z.array( - z.object({ - sha: z.string(), - url: z.string(), - }) - ), -}); - -// Reference schema -export const GitHubReferenceSchema = z.object({ - ref: z.string(), - node_id: z.string(), - url: z.string(), - object: z.object({ - sha: z.string(), - type: z.string(), - url: z.string(), - }), -}); - -// Input schemas for operations -export const CreateRepositoryOptionsSchema = z.object({ - name: z.string(), - description: z.string().optional(), - private: z.boolean().optional(), - auto_init: z.boolean().optional(), -}); - -export const CreateIssueOptionsSchema = z.object({ - title: z.string(), - body: z.string().optional(), - assignees: z.array(z.string()).optional(), - milestone: z.number().optional(), - labels: z.array(z.string()).optional(), -}); - -export const CreatePullRequestOptionsSchema = z.object({ - title: z.string(), - body: z.string().optional(), - head: z.string(), - base: z.string(), - maintainer_can_modify: z.boolean().optional(), - draft: z.boolean().optional(), -}); - -export const CreateBranchOptionsSchema = z.object({ - ref: z.string(), - sha: z.string(), -}); - -// Response schemas for operations -export const GitHubCreateUpdateFileResponseSchema = z.object({ - content: GitHubFileContentSchema.nullable(), - commit: z.object({ - sha: z.string(), - node_id: z.string(), - url: z.string(), - html_url: z.string(), - author: GitHubAuthorSchema, - committer: GitHubAuthorSchema, - message: z.string(), - tree: z.object({ - sha: z.string(), - url: z.string(), - }), - parents: z.array( - z.object({ - sha: z.string(), - url: z.string(), - html_url: z.string(), - }) - ), - }), -}); - -export const GitHubSearchResponseSchema = z.object({ - total_count: z.number(), - incomplete_results: z.boolean(), - items: z.array(GitHubRepositorySchema), -}); - -// Fork related schemas -export const GitHubForkParentSchema = z.object({ - name: z.string(), - full_name: z.string(), - owner: z.object({ - login: z.string(), - id: z.number(), - avatar_url: z.string(), - }), - html_url: z.string(), -}); - -export const GitHubForkSchema = GitHubRepositorySchema.extend({ - parent: GitHubForkParentSchema, - source: GitHubForkParentSchema, -}); - -// Issue related schemas -export const GitHubLabelSchema = z.object({ - id: z.number(), - node_id: z.string(), - url: z.string(), - name: z.string(), - color: z.string(), - default: z.boolean(), - description: z.string().optional(), -}); - -export const GitHubIssueAssigneeSchema = z.object({ - login: z.string(), - id: z.number(), - avatar_url: z.string(), - url: z.string(), - html_url: z.string(), -}); - -export const GitHubMilestoneSchema = z.object({ - url: z.string(), - html_url: z.string(), - labels_url: z.string(), - id: z.number(), - node_id: z.string(), - number: z.number(), - title: z.string(), - description: z.string(), - state: z.string(), -}); - -export const GitHubIssueSchema = z.object({ - url: z.string(), - repository_url: z.string(), - labels_url: z.string(), - comments_url: z.string(), - events_url: z.string(), - html_url: z.string(), - id: z.number(), - node_id: z.string(), - number: z.number(), - title: z.string(), - user: GitHubIssueAssigneeSchema, - labels: z.array(GitHubLabelSchema), - state: z.string(), - locked: z.boolean(), - assignee: GitHubIssueAssigneeSchema.nullable(), - assignees: z.array(GitHubIssueAssigneeSchema), - milestone: GitHubMilestoneSchema.nullable(), - comments: z.number(), - created_at: z.string(), - updated_at: z.string(), - closed_at: z.string().nullable(), - body: z.string().nullable(), -}); - -// Pull Request related schemas -export const GitHubPullRequestHeadSchema = z.object({ - label: z.string(), - ref: z.string(), - sha: z.string(), - user: GitHubIssueAssigneeSchema, - repo: GitHubRepositorySchema, -}); - -export const GitHubPullRequestSchema = z.object({ - url: z.string(), - id: z.number(), - node_id: z.string(), - html_url: z.string(), - diff_url: z.string(), - patch_url: z.string(), - issue_url: z.string(), - number: z.number(), - state: z.string(), - locked: z.boolean(), - title: z.string(), - user: GitHubIssueAssigneeSchema, - body: z.string(), - created_at: z.string(), - updated_at: z.string(), - closed_at: z.string().nullable(), - merged_at: z.string().nullable(), - merge_commit_sha: z.string().nullable(), - assignee: GitHubIssueAssigneeSchema.nullable(), - assignees: z.array(GitHubIssueAssigneeSchema), - head: GitHubPullRequestHeadSchema, - base: GitHubPullRequestHeadSchema, -}); - -const RepoParamsSchema = z.object({ - owner: z.string().describe("Repository owner (username or organization)"), - repo: z.string().describe("Repository name"), -}); - -export const CreateOrUpdateFileSchema = RepoParamsSchema.extend({ - path: z.string().describe("Path where to create/update the file"), - content: z.string().describe("Content of the file"), - message: z.string().describe("Commit message"), - branch: z.string().describe("Branch to create/update the file in"), - sha: z - .string() - .optional() - .describe( - "SHA of the file being replaced (required when updating existing files)" - ), -}); - -export const SearchRepositoriesSchema = z.object({ - query: z.string().describe("Search query (see GitHub search syntax)"), - page: z - .number() - .optional() - .describe("Page number for pagination (default: 1)"), - perPage: z - .number() - .optional() - .describe("Number of results per page (default: 30, max: 100)"), -}); - -export const ListCommitsSchema = z.object({ - owner: z.string().describe("Repository owner (username or organization)"), - repo: z.string().describe("Repository name"), - page: z.number().optional().describe("Page number for pagination (default: 1)"), - perPage: z.number().optional().describe("Number of results per page (default: 30, max: 100)"), - sha: z.string().optional() - .describe("SHA of the file being replaced (required when updating existing files)") -}); - -export const CreateRepositorySchema = z.object({ - name: z.string().describe("Repository name"), - description: z.string().optional().describe("Repository description"), - private: z - .boolean() - .optional() - .describe("Whether the repository should be private"), - autoInit: z.boolean().optional().describe("Initialize with README.md"), -}); - -export const GetFileContentsSchema = RepoParamsSchema.extend({ - path: z.string().describe("Path to the file or directory"), - branch: z.string().optional().describe("Branch to get contents from"), -}); - -export const PushFilesSchema = RepoParamsSchema.extend({ - branch: z.string().describe("Branch to push to (e.g., 'main' or 'master')"), - files: z - .array( - z.object({ - path: z.string().describe("Path where to create the file"), - content: z.string().describe("Content of the file"), - }) - ) - .describe("Array of files to push"), - message: z.string().describe("Commit message"), -}); - -export const CreateIssueSchema = RepoParamsSchema.extend({ - title: z.string().describe("Issue title"), - body: z.string().optional().describe("Issue body/description"), - assignees: z - .array(z.string()) - .optional() - .describe("Array of usernames to assign"), - labels: z.array(z.string()).optional().describe("Array of label names"), - milestone: z.number().optional().describe("Milestone number to assign"), -}); - -export const CreatePullRequestSchema = RepoParamsSchema.extend({ - title: z.string().describe("Pull request title"), - body: z.string().optional().describe("Pull request body/description"), - head: z - .string() - .describe("The name of the branch where your changes are implemented"), - base: z - .string() - .describe("The name of the branch you want the changes pulled into"), - draft: z - .boolean() - .optional() - .describe("Whether to create the pull request as a draft"), - maintainer_can_modify: z - .boolean() - .optional() - .describe("Whether maintainers can modify the pull request"), -}); - -export const ForkRepositorySchema = RepoParamsSchema.extend({ - organization: z - .string() - .optional() - .describe( - "Optional: organization to fork to (defaults to your personal account)" - ), -}); - -export const CreateBranchSchema = RepoParamsSchema.extend({ - branch: z.string().describe("Name for the new branch"), - from_branch: z - .string() - .optional() - .describe( - "Optional: source branch to create from (defaults to the repository's default branch)" - ), -}); - -/** - * Response schema for a code search result item - * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-code - */ -export const SearchCodeItemSchema = z.object({ - name: z.string().describe("The name of the file"), - path: z.string().describe("The path to the file in the repository"), - sha: z.string().describe("The SHA hash of the file"), - url: z.string().describe("The API URL for this file"), - git_url: z.string().describe("The Git URL for this file"), - html_url: z.string().describe("The HTML URL to view this file on GitHub"), - repository: GitHubRepositorySchema.describe( - "The repository where this file was found" - ), - score: z.number().describe("The search result score"), -}); - -/** - * Response schema for code search results - */ -export const SearchCodeResponseSchema = z.object({ - total_count: z.number().describe("Total number of matching results"), - incomplete_results: z - .boolean() - .describe("Whether the results are incomplete"), - items: z.array(SearchCodeItemSchema).describe("The search results"), -}); - -/** - * Response schema for an issue search result item - * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-issues-and-pull-requests - */ -export const SearchIssueItemSchema = z.object({ - url: z.string().describe("The API URL for this issue"), - repository_url: z - .string() - .describe("The API URL for the repository where this issue was found"), - labels_url: z.string().describe("The API URL for the labels of this issue"), - comments_url: z.string().describe("The API URL for comments of this issue"), - events_url: z.string().describe("The API URL for events of this issue"), - html_url: z.string().describe("The HTML URL to view this issue on GitHub"), - id: z.number().describe("The ID of this issue"), - node_id: z.string().describe("The Node ID of this issue"), - number: z.number().describe("The number of this issue"), - title: z.string().describe("The title of this issue"), - user: GitHubIssueAssigneeSchema.describe("The user who created this issue"), - labels: z.array(GitHubLabelSchema).describe("The labels of this issue"), - state: z.string().describe("The state of this issue"), - locked: z.boolean().describe("Whether this issue is locked"), - assignee: GitHubIssueAssigneeSchema.nullable().describe( - "The assignee of this issue" - ), - assignees: z - .array(GitHubIssueAssigneeSchema) - .describe("The assignees of this issue"), - comments: z.number().describe("The number of comments on this issue"), - created_at: z.string().describe("The creation time of this issue"), - updated_at: z.string().describe("The last update time of this issue"), - closed_at: z.string().nullable().describe("The closure time of this issue"), - body: z.string().describe("The body of this issue"), - score: z.number().describe("The search result score"), - pull_request: z - .object({ - url: z.string().describe("The API URL for this pull request"), - html_url: z.string().describe("The HTML URL to view this pull request"), - diff_url: z.string().describe("The URL to view the diff"), - patch_url: z.string().describe("The URL to view the patch"), - }) - .optional() - .describe("Pull request details if this is a PR"), -}); - -/** - * Response schema for issue search results - */ -export const SearchIssuesResponseSchema = z.object({ - total_count: z.number().describe("Total number of matching results"), - incomplete_results: z - .boolean() - .describe("Whether the results are incomplete"), - items: z.array(SearchIssueItemSchema).describe("The search results"), -}); - -/** - * Response schema for a user search result item - * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-users - */ -export const SearchUserItemSchema = z.object({ - login: z.string().describe("The username of the user"), - id: z.number().describe("The ID of the user"), - node_id: z.string().describe("The Node ID of the user"), - avatar_url: z.string().describe("The avatar URL of the user"), - gravatar_id: z.string().describe("The Gravatar ID of the user"), - url: z.string().describe("The API URL for this user"), - html_url: z.string().describe("The HTML URL to view this user on GitHub"), - followers_url: z.string().describe("The API URL for followers of this user"), - following_url: z.string().describe("The API URL for following of this user"), - gists_url: z.string().describe("The API URL for gists of this user"), - starred_url: z - .string() - .describe("The API URL for starred repositories of this user"), - subscriptions_url: z - .string() - .describe("The API URL for subscriptions of this user"), - organizations_url: z - .string() - .describe("The API URL for organizations of this user"), - repos_url: z.string().describe("The API URL for repositories of this user"), - events_url: z.string().describe("The API URL for events of this user"), - received_events_url: z - .string() - .describe("The API URL for received events of this user"), - type: z.string().describe("The type of this user"), - site_admin: z.boolean().describe("Whether this user is a site administrator"), - score: z.number().describe("The search result score"), -}); - -/** - * Response schema for user search results - */ -export const SearchUsersResponseSchema = z.object({ - total_count: z.number().describe("Total number of matching results"), - incomplete_results: z - .boolean() - .describe("Whether the results are incomplete"), - items: z.array(SearchUserItemSchema).describe("The search results"), -}); - -/** - * Input schema for code search - * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-code--parameters - */ -export const SearchCodeSchema = z.object({ - q: z - .string() - .describe( - "Search query. See GitHub code search syntax: https://docs.github.com/en/search-github/searching-on-github/searching-code" - ), - order: z - .enum(["asc", "desc"]) - .optional() - .describe("Sort order (asc or desc)"), - per_page: z - .number() - .min(1) - .max(100) - .optional() - .describe("Results per page (max 100)"), - page: z.number().min(1).optional().describe("Page number"), -}); - -/** - * Input schema for issues search - * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-issues-and-pull-requests--parameters - */ -export const SearchIssuesSchema = z.object({ - q: z - .string() - .describe( - "Search query. See GitHub issues search syntax: https://docs.github.com/en/search-github/searching-on-github/searching-issues-and-pull-requests" - ), - sort: z - .enum([ - "comments", - "reactions", - "reactions-+1", - "reactions--1", - "reactions-smile", - "reactions-thinking_face", - "reactions-heart", - "reactions-tada", - "interactions", - "created", - "updated", - ]) - .optional() - .describe("Sort field"), - order: z - .enum(["asc", "desc"]) - .optional() - .describe("Sort order (asc or desc)"), - per_page: z - .number() - .min(1) - .max(100) - .optional() - .describe("Results per page (max 100)"), - page: z.number().min(1).optional().describe("Page number"), -}); - -/** - * Input schema for users search - * @see https://docs.github.com/en/rest/search/search?apiVersion=2022-11-28#search-users--parameters - */ -export const SearchUsersSchema = z.object({ - q: z - .string() - .describe( - "Search query. See GitHub users search syntax: https://docs.github.com/en/search-github/searching-on-github/searching-users" - ), - sort: z - .enum(["followers", "repositories", "joined"]) - .optional() - .describe("Sort field"), - order: z - .enum(["asc", "desc"]) - .optional() - .describe("Sort order (asc or desc)"), - per_page: z - .number() - .min(1) - .max(100) - .optional() - .describe("Results per page (max 100)"), - page: z.number().min(1).optional().describe("Page number"), -}); - -// Add these schema definitions for issue management - -export const ListIssuesOptionsSchema = z.object({ - owner: z.string(), - repo: z.string(), - state: z.enum(['open', 'closed', 'all']).optional(), - labels: z.array(z.string()).optional(), - sort: z.enum(['created', 'updated', 'comments']).optional(), - direction: z.enum(['asc', 'desc']).optional(), - since: z.string().optional(), // ISO 8601 timestamp - page: z.number().optional(), - per_page: z.number().optional() -}); - -export const UpdateIssueOptionsSchema = z.object({ - owner: z.string(), - repo: z.string(), - issue_number: z.number(), - title: z.string().optional(), - body: z.string().optional(), - state: z.enum(['open', 'closed']).optional(), - labels: z.array(z.string()).optional(), - assignees: z.array(z.string()).optional(), - milestone: z.number().optional() -}); - -export const IssueCommentSchema = z.object({ - owner: z.string(), - repo: z.string(), - issue_number: z.number(), - body: z.string() -}); - -export const GetIssueSchema = z.object({ - owner: z.string().describe("Repository owner (username or organization)"), - repo: z.string().describe("Repository name"), - issue_number: z.number().describe("Issue number") -}); - -// Export types -export type GitHubAuthor = z.infer; -export type GitHubFork = z.infer; -export type GitHubIssue = z.infer; -export type GitHubPullRequest = z.infer; -export type GitHubRepository = z.infer; -export type GitHubFileContent = z.infer; -export type GitHubDirectoryContent = z.infer< - typeof GitHubDirectoryContentSchema ->; -export type GitHubContent = z.infer; -export type FileOperation = z.infer; -export type GitHubTree = z.infer; -export type GitHubCommit = z.infer; -export type GitHubListCommits = z.infer; -export type GitHubReference = z.infer; -export type CreateRepositoryOptions = z.infer< - typeof CreateRepositoryOptionsSchema ->; -export type CreateIssueOptions = z.infer; -export type CreatePullRequestOptions = z.infer< - typeof CreatePullRequestOptionsSchema ->; -export type CreateBranchOptions = z.infer; -export type GitHubCreateUpdateFileResponse = z.infer< - typeof GitHubCreateUpdateFileResponseSchema ->; -export type GitHubSearchResponse = z.infer; -export type SearchCodeItem = z.infer; -export type SearchCodeResponse = z.infer; -export type SearchIssueItem = z.infer; -export type SearchIssuesResponse = z.infer; -export type SearchUserItem = z.infer; -export type SearchUsersResponse = z.infer; diff --git a/src/gitlab/Dockerfile b/src/gitlab/Dockerfile index ce8823ca..e119288d 100644 --- a/src/gitlab/Dockerfile +++ b/src/gitlab/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder COPY src/gitlab /app COPY tsconfig.json /tsconfig.json diff --git a/src/google-maps/Dockerfile b/src/google-maps/Dockerfile index 62949680..3808d7fc 100644 --- a/src/google-maps/Dockerfile +++ b/src/google-maps/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder # Must be entire project because `prepare` script is run during `npm install` and requires all files. COPY src/google-maps /app diff --git a/src/memory/Dockerfile b/src/memory/Dockerfile index d5af8471..2f85d0cf 100644 --- a/src/memory/Dockerfile +++ b/src/memory/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder COPY src/memory /app COPY tsconfig.json /tsconfig.json diff --git a/src/postgres/Dockerfile b/src/postgres/Dockerfile index f390ec0e..59ef9cac 100644 --- a/src/postgres/Dockerfile +++ b/src/postgres/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder COPY src/postgres /app COPY tsconfig.json /tsconfig.json diff --git a/src/sequentialthinking/Dockerfile b/src/sequentialthinking/Dockerfile index 08a4282a..f1a88195 100644 --- a/src/sequentialthinking/Dockerfile +++ b/src/sequentialthinking/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder COPY src/sequentialthinking /app COPY tsconfig.json /tsconfig.json diff --git a/src/sequentialthinking/README.md b/src/sequentialthinking/README.md index 77ccb454..3914d8ea 100644 --- a/src/sequentialthinking/README.md +++ b/src/sequentialthinking/README.md @@ -83,7 +83,7 @@ Add this to your `claude_desktop_config.json`: Docker: ```bash -docker build -t mcp/sequentialthinking -f sequentialthinking/Dockerfile . +docker build -t mcp/sequentialthinking -f src/sequentialthinking/Dockerfile . ``` ## License diff --git a/src/slack/Dockerfile b/src/slack/Dockerfile index 6df0be52..1f7efa46 100644 --- a/src/slack/Dockerfile +++ b/src/slack/Dockerfile @@ -1,4 +1,4 @@ -FROM node:22.12-alpine as builder +FROM node:22.12-alpine AS builder # Must be entire project because `prepare` script is run during `npm install` and requires all files. COPY src/slack /app diff --git a/src/slack/index.ts b/src/slack/index.ts index 8cdc143c..b0124660 100644 --- a/src/slack/index.ts +++ b/src/slack/index.ts @@ -102,7 +102,7 @@ const replyToThreadTool: Tool = { }, thread_ts: { type: "string", - description: "The timestamp of the parent message", + description: "The timestamp of the parent message in the format '1234567890.123456'. Timestamps in the format without the period can be converted by adding the period such that 6 numbers come after it.", }, text: { type: "string", @@ -168,7 +168,7 @@ const getThreadRepliesTool: Tool = { }, thread_ts: { type: "string", - description: "The timestamp of the parent message", + description: "The timestamp of the parent message in the format '1234567890.123456'. Timestamps in the format without the period can be converted by adding the period such that 6 numbers come after it.", }, }, required: ["channel_id", "thread_ts"],