Windows filesystem MCP enhancements (#543)

* fix: comprehensive Windows path handling improvements

- Add path-utils module for consistent path handling
- Handle Windows paths with spaces via proper quoting
- Support Unix-style Windows paths (/c/path)
- Support WSL paths (/mnt/c/path)
- Add comprehensive test coverage
- Fix path normalization for all path formats

Closes #447

* tested locally and working now

* Add filesystem path utils and tests

* Ensure Windows drive letters are capitalized in normalizePath

* adding test for gh pr comment

* pushing jest and windows testing config

* last commit? fixing comments on PR

* Fix bin and bump sdk

* Remove redundant commonjs version of path-utils and import from ts version

* Remove copying cjs file

* Remove copying run-server

* Remove complex args parsing and do other cleanup

* Add missing tools details to Readme

* Move utility functions from index to lib

* Add more tests and handle very small and very large files edge cases

* Finish refactoring and include original security fix comments

* On Windows, also check for drive root

* Check symlink support on restricted Windows environments

* Fix tests

* Bump SDK and package version

* Clean up

---------

Co-authored-by: olaservo <olahungerford@gmail.com>
Co-authored-by: adam jones <adamj+git@anthropic.com>
This commit is contained in:
Michael Casazza
2025-08-18 13:23:40 -04:00
committed by GitHub
parent 7e1d9d9ede
commit 46368832ef
9 changed files with 1291 additions and 402 deletions

2
package-lock.json generated
View File

@@ -6156,7 +6156,7 @@
},
"src/filesystem": {
"name": "@modelcontextprotocol/server-filesystem",
"version": "0.6.2",
"version": "0.6.3",
"license": "MIT",
"dependencies": {
"@modelcontextprotocol/sdk": "^1.17.0",

View File

@@ -73,6 +73,7 @@ The server's directory access control follows this flow:
- `head` (number, optional): First N lines
- `tail` (number, optional): Last N lines
- Always treats the file as UTF-8 text regardless of extension
- Cannot specify both `head` and `tail` simultaneously
- **read_media_file**
- Read an image or audio file
@@ -119,6 +120,23 @@ The server's directory access control follows this flow:
- List directory contents with [FILE] or [DIR] prefixes
- Input: `path` (string)
- **list_directory_with_sizes**
- List directory contents with [FILE] or [DIR] prefixes, including file sizes
- Inputs:
- `path` (string): Directory path to list
- `sortBy` (string, optional): Sort entries by "name" or "size" (default: "name")
- Returns detailed listing with file sizes and summary statistics
- Shows total files, directories, and combined size
- **directory_tree**
- Get a recursive tree view of files and directories as a JSON structure
- Input: `path` (string): Starting directory path
- Returns JSON structure with:
- `name`: File/directory name
- `type`: "file" or "directory"
- `children`: Array of child entries (for directories only)
- Output is formatted with 2-space indentation for readability
- **move_file**
- Move or rename files and directories
- Inputs:

View File

@@ -0,0 +1,701 @@
import { describe, it, expect, beforeEach, afterEach, jest } from '@jest/globals';
import fs from 'fs/promises';
import path from 'path';
import os from 'os';
import {
// Pure utility functions
formatSize,
normalizeLineEndings,
createUnifiedDiff,
// Security & validation functions
validatePath,
setAllowedDirectories,
// File operations
getFileStats,
readFileContent,
writeFileContent,
// Search & filtering functions
searchFilesWithValidation,
// File editing functions
applyFileEdits,
tailFile,
headFile
} from '../lib.js';
// Mock fs module
jest.mock('fs/promises');
const mockFs = fs as jest.Mocked<typeof fs>;
describe('Lib Functions', () => {
beforeEach(() => {
jest.clearAllMocks();
// Set up allowed directories for tests
const allowedDirs = process.platform === 'win32' ? ['C:\\Users\\test', 'C:\\temp', 'C:\\allowed'] : ['/home/user', '/tmp', '/allowed'];
setAllowedDirectories(allowedDirs);
});
afterEach(() => {
jest.restoreAllMocks();
// Clear allowed directories after tests
setAllowedDirectories([]);
});
describe('Pure Utility Functions', () => {
describe('formatSize', () => {
it('formats bytes correctly', () => {
expect(formatSize(0)).toBe('0 B');
expect(formatSize(512)).toBe('512 B');
expect(formatSize(1024)).toBe('1.00 KB');
expect(formatSize(1536)).toBe('1.50 KB');
expect(formatSize(1048576)).toBe('1.00 MB');
expect(formatSize(1073741824)).toBe('1.00 GB');
expect(formatSize(1099511627776)).toBe('1.00 TB');
});
it('handles edge cases', () => {
expect(formatSize(1023)).toBe('1023 B');
expect(formatSize(1025)).toBe('1.00 KB');
expect(formatSize(1048575)).toBe('1024.00 KB');
});
it('handles very large numbers beyond TB', () => {
// The function only supports up to TB, so very large numbers will show as TB
expect(formatSize(1024 * 1024 * 1024 * 1024 * 1024)).toBe('1024.00 TB');
expect(formatSize(Number.MAX_SAFE_INTEGER)).toContain('TB');
});
it('handles negative numbers', () => {
// Negative numbers will result in NaN for the log calculation
expect(formatSize(-1024)).toContain('NaN');
expect(formatSize(-0)).toBe('0 B');
});
it('handles decimal numbers', () => {
expect(formatSize(1536.5)).toBe('1.50 KB');
expect(formatSize(1023.9)).toBe('1023.9 B');
});
it('handles very small positive numbers', () => {
expect(formatSize(1)).toBe('1 B');
expect(formatSize(0.5)).toBe('0.5 B');
expect(formatSize(0.1)).toBe('0.1 B');
});
});
describe('normalizeLineEndings', () => {
it('converts CRLF to LF', () => {
expect(normalizeLineEndings('line1\r\nline2\r\nline3')).toBe('line1\nline2\nline3');
});
it('leaves LF unchanged', () => {
expect(normalizeLineEndings('line1\nline2\nline3')).toBe('line1\nline2\nline3');
});
it('handles mixed line endings', () => {
expect(normalizeLineEndings('line1\r\nline2\nline3\r\n')).toBe('line1\nline2\nline3\n');
});
it('handles empty string', () => {
expect(normalizeLineEndings('')).toBe('');
});
});
describe('createUnifiedDiff', () => {
it('creates diff for simple changes', () => {
const original = 'line1\nline2\nline3';
const modified = 'line1\nmodified line2\nline3';
const diff = createUnifiedDiff(original, modified, 'test.txt');
expect(diff).toContain('--- test.txt');
expect(diff).toContain('+++ test.txt');
expect(diff).toContain('-line2');
expect(diff).toContain('+modified line2');
});
it('handles CRLF normalization', () => {
const original = 'line1\r\nline2\r\n';
const modified = 'line1\nmodified line2\n';
const diff = createUnifiedDiff(original, modified);
expect(diff).toContain('-line2');
expect(diff).toContain('+modified line2');
});
it('handles identical content', () => {
const content = 'line1\nline2\nline3';
const diff = createUnifiedDiff(content, content);
// Should not contain any +/- lines for identical content (excluding header lines)
expect(diff.split('\n').filter((line: string) => line.startsWith('+++') || line.startsWith('---'))).toHaveLength(2);
expect(diff.split('\n').filter((line: string) => line.startsWith('+') && !line.startsWith('+++'))).toHaveLength(0);
expect(diff.split('\n').filter((line: string) => line.startsWith('-') && !line.startsWith('---'))).toHaveLength(0);
});
it('handles empty content', () => {
const diff = createUnifiedDiff('', '');
expect(diff).toContain('--- file');
expect(diff).toContain('+++ file');
});
it('handles default filename parameter', () => {
const diff = createUnifiedDiff('old', 'new');
expect(diff).toContain('--- file');
expect(diff).toContain('+++ file');
});
it('handles custom filename', () => {
const diff = createUnifiedDiff('old', 'new', 'custom.txt');
expect(diff).toContain('--- custom.txt');
expect(diff).toContain('+++ custom.txt');
});
});
});
describe('Security & Validation Functions', () => {
describe('validatePath', () => {
// Use Windows-compatible paths for testing
const allowedDirs = process.platform === 'win32' ? ['C:\\Users\\test', 'C:\\temp'] : ['/home/user', '/tmp'];
beforeEach(() => {
mockFs.realpath.mockImplementation(async (path: any) => path.toString());
});
it('validates allowed paths', async () => {
const testPath = process.platform === 'win32' ? 'C:\\Users\\test\\file.txt' : '/home/user/file.txt';
const result = await validatePath(testPath);
expect(result).toBe(testPath);
});
it('rejects disallowed paths', async () => {
const testPath = process.platform === 'win32' ? 'C:\\Windows\\System32\\file.txt' : '/etc/passwd';
await expect(validatePath(testPath))
.rejects.toThrow('Access denied - path outside allowed directories');
});
it('handles non-existent files by checking parent directory', async () => {
const newFilePath = process.platform === 'win32' ? 'C:\\Users\\test\\newfile.txt' : '/home/user/newfile.txt';
const parentPath = process.platform === 'win32' ? 'C:\\Users\\test' : '/home/user';
// Create an error with the ENOENT code that the implementation checks for
const enoentError = new Error('ENOENT') as NodeJS.ErrnoException;
enoentError.code = 'ENOENT';
mockFs.realpath
.mockRejectedValueOnce(enoentError)
.mockResolvedValueOnce(parentPath);
const result = await validatePath(newFilePath);
expect(result).toBe(path.resolve(newFilePath));
});
it('rejects when parent directory does not exist', async () => {
const newFilePath = process.platform === 'win32' ? 'C:\\Users\\test\\nonexistent\\newfile.txt' : '/home/user/nonexistent/newfile.txt';
// Create errors with the ENOENT code
const enoentError1 = new Error('ENOENT') as NodeJS.ErrnoException;
enoentError1.code = 'ENOENT';
const enoentError2 = new Error('ENOENT') as NodeJS.ErrnoException;
enoentError2.code = 'ENOENT';
mockFs.realpath
.mockRejectedValueOnce(enoentError1)
.mockRejectedValueOnce(enoentError2);
await expect(validatePath(newFilePath))
.rejects.toThrow('Parent directory does not exist');
});
});
});
describe('File Operations', () => {
describe('getFileStats', () => {
it('returns file statistics', async () => {
const mockStats = {
size: 1024,
birthtime: new Date('2023-01-01'),
mtime: new Date('2023-01-02'),
atime: new Date('2023-01-03'),
isDirectory: () => false,
isFile: () => true,
mode: 0o644
};
mockFs.stat.mockResolvedValueOnce(mockStats as any);
const result = await getFileStats('/test/file.txt');
expect(result).toEqual({
size: 1024,
created: new Date('2023-01-01'),
modified: new Date('2023-01-02'),
accessed: new Date('2023-01-03'),
isDirectory: false,
isFile: true,
permissions: '644'
});
});
it('handles directory statistics', async () => {
const mockStats = {
size: 4096,
birthtime: new Date('2023-01-01'),
mtime: new Date('2023-01-02'),
atime: new Date('2023-01-03'),
isDirectory: () => true,
isFile: () => false,
mode: 0o755
};
mockFs.stat.mockResolvedValueOnce(mockStats as any);
const result = await getFileStats('/test/dir');
expect(result.isDirectory).toBe(true);
expect(result.isFile).toBe(false);
expect(result.permissions).toBe('755');
});
});
describe('readFileContent', () => {
it('reads file with default encoding', async () => {
mockFs.readFile.mockResolvedValueOnce('file content');
const result = await readFileContent('/test/file.txt');
expect(result).toBe('file content');
expect(mockFs.readFile).toHaveBeenCalledWith('/test/file.txt', 'utf-8');
});
it('reads file with custom encoding', async () => {
mockFs.readFile.mockResolvedValueOnce('file content');
const result = await readFileContent('/test/file.txt', 'ascii');
expect(result).toBe('file content');
expect(mockFs.readFile).toHaveBeenCalledWith('/test/file.txt', 'ascii');
});
});
describe('writeFileContent', () => {
it('writes file content', async () => {
mockFs.writeFile.mockResolvedValueOnce(undefined);
await writeFileContent('/test/file.txt', 'new content');
expect(mockFs.writeFile).toHaveBeenCalledWith('/test/file.txt', 'new content', { encoding: "utf-8", flag: 'wx' });
});
});
});
describe('Search & Filtering Functions', () => {
describe('searchFilesWithValidation', () => {
beforeEach(() => {
mockFs.realpath.mockImplementation(async (path: any) => path.toString());
});
it('excludes files matching exclude patterns', async () => {
const mockEntries = [
{ name: 'test.txt', isDirectory: () => false },
{ name: 'test.log', isDirectory: () => false },
{ name: 'node_modules', isDirectory: () => true }
];
mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
// Mock realpath to return the same path for validation to pass
mockFs.realpath.mockImplementation(async (inputPath: any) => {
const pathStr = inputPath.toString();
// Return the path as-is for validation
return pathStr;
});
const result = await searchFilesWithValidation(
testDir,
'test',
allowedDirs,
{ excludePatterns: ['*.log', 'node_modules'] }
);
const expectedResult = process.platform === 'win32' ? 'C:\\allowed\\dir\\test.txt' : '/allowed/dir/test.txt';
expect(result).toEqual([expectedResult]);
});
it('handles validation errors during search', async () => {
const mockEntries = [
{ name: 'test.txt', isDirectory: () => false },
{ name: 'invalid_file.txt', isDirectory: () => false }
];
mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
// Mock validatePath to throw error for invalid_file.txt
mockFs.realpath.mockImplementation(async (path: any) => {
if (path.toString().includes('invalid_file.txt')) {
throw new Error('Access denied');
}
return path.toString();
});
const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
const result = await searchFilesWithValidation(
testDir,
'test',
allowedDirs,
{}
);
// Should only return the valid file, skipping the invalid one
const expectedResult = process.platform === 'win32' ? 'C:\\allowed\\dir\\test.txt' : '/allowed/dir/test.txt';
expect(result).toEqual([expectedResult]);
});
it('handles complex exclude patterns with wildcards', async () => {
const mockEntries = [
{ name: 'test.txt', isDirectory: () => false },
{ name: 'test.backup', isDirectory: () => false },
{ name: 'important_test.js', isDirectory: () => false }
];
mockFs.readdir.mockResolvedValueOnce(mockEntries as any);
const testDir = process.platform === 'win32' ? 'C:\\allowed\\dir' : '/allowed/dir';
const allowedDirs = process.platform === 'win32' ? ['C:\\allowed'] : ['/allowed'];
const result = await searchFilesWithValidation(
testDir,
'test',
allowedDirs,
{ excludePatterns: ['*.backup'] }
);
const expectedResults = process.platform === 'win32' ? [
'C:\\allowed\\dir\\test.txt',
'C:\\allowed\\dir\\important_test.js'
] : [
'/allowed/dir/test.txt',
'/allowed/dir/important_test.js'
];
expect(result).toEqual(expectedResults);
});
});
});
describe('File Editing Functions', () => {
describe('applyFileEdits', () => {
beforeEach(() => {
mockFs.readFile.mockResolvedValue('line1\nline2\nline3\n');
mockFs.writeFile.mockResolvedValue(undefined);
});
it('applies simple text replacement', async () => {
const edits = [
{ oldText: 'line2', newText: 'modified line2' }
];
mockFs.rename.mockResolvedValueOnce(undefined);
const result = await applyFileEdits('/test/file.txt', edits, false);
expect(result).toContain('modified line2');
// Should write to temporary file then rename
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'line1\nmodified line2\nline3\n',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'/test/file.txt'
);
});
it('handles dry run mode', async () => {
const edits = [
{ oldText: 'line2', newText: 'modified line2' }
];
const result = await applyFileEdits('/test/file.txt', edits, true);
expect(result).toContain('modified line2');
expect(mockFs.writeFile).not.toHaveBeenCalled();
});
it('applies multiple edits sequentially', async () => {
const edits = [
{ oldText: 'line1', newText: 'first line' },
{ oldText: 'line3', newText: 'third line' }
];
mockFs.rename.mockResolvedValueOnce(undefined);
await applyFileEdits('/test/file.txt', edits, false);
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'first line\nline2\nthird line\n',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'/test/file.txt'
);
});
it('handles whitespace-flexible matching', async () => {
mockFs.readFile.mockResolvedValue(' line1\n line2\n line3\n');
const edits = [
{ oldText: 'line2', newText: 'modified line2' }
];
mockFs.rename.mockResolvedValueOnce(undefined);
await applyFileEdits('/test/file.txt', edits, false);
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
' line1\n modified line2\n line3\n',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'/test/file.txt'
);
});
it('throws error for non-matching edits', async () => {
const edits = [
{ oldText: 'nonexistent line', newText: 'replacement' }
];
await expect(applyFileEdits('/test/file.txt', edits, false))
.rejects.toThrow('Could not find exact match for edit');
});
it('handles complex multi-line edits with indentation', async () => {
mockFs.readFile.mockResolvedValue('function test() {\n console.log("hello");\n return true;\n}');
const edits = [
{
oldText: ' console.log("hello");\n return true;',
newText: ' console.log("world");\n console.log("test");\n return false;'
}
];
mockFs.rename.mockResolvedValueOnce(undefined);
await applyFileEdits('/test/file.js', edits, false);
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
'function test() {\n console.log("world");\n console.log("test");\n return false;\n}',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
'/test/file.js'
);
});
it('handles edits with different indentation patterns', async () => {
mockFs.readFile.mockResolvedValue(' if (condition) {\n doSomething();\n }');
const edits = [
{
oldText: 'doSomething();',
newText: 'doSomethingElse();\n doAnotherThing();'
}
];
mockFs.rename.mockResolvedValueOnce(undefined);
await applyFileEdits('/test/file.js', edits, false);
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
' if (condition) {\n doSomethingElse();\n doAnotherThing();\n }',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.js\.[a-f0-9]+\.tmp$/),
'/test/file.js'
);
});
it('handles CRLF line endings in file content', async () => {
mockFs.readFile.mockResolvedValue('line1\r\nline2\r\nline3\r\n');
const edits = [
{ oldText: 'line2', newText: 'modified line2' }
];
mockFs.rename.mockResolvedValueOnce(undefined);
await applyFileEdits('/test/file.txt', edits, false);
expect(mockFs.writeFile).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'line1\nmodified line2\nline3\n',
'utf-8'
);
expect(mockFs.rename).toHaveBeenCalledWith(
expect.stringMatching(/\/test\/file\.txt\.[a-f0-9]+\.tmp$/),
'/test/file.txt'
);
});
});
describe('tailFile', () => {
it('handles empty files', async () => {
mockFs.stat.mockResolvedValue({ size: 0 } as any);
const result = await tailFile('/test/empty.txt', 5);
expect(result).toBe('');
expect(mockFs.open).not.toHaveBeenCalled();
});
it('calls stat to check file size', async () => {
mockFs.stat.mockResolvedValue({ size: 100 } as any);
// Mock file handle with proper typing
const mockFileHandle = {
read: jest.fn(),
close: jest.fn()
} as any;
mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
await tailFile('/test/file.txt', 2);
expect(mockFs.stat).toHaveBeenCalledWith('/test/file.txt');
expect(mockFs.open).toHaveBeenCalledWith('/test/file.txt', 'r');
});
it('handles files with content and returns last lines', async () => {
mockFs.stat.mockResolvedValue({ size: 50 } as any);
const mockFileHandle = {
read: jest.fn(),
close: jest.fn()
} as any;
// Simulate reading file content in chunks
mockFileHandle.read
.mockResolvedValueOnce({ bytesRead: 20, buffer: Buffer.from('line3\nline4\nline5\n') })
.mockResolvedValueOnce({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
const result = await tailFile('/test/file.txt', 2);
expect(mockFileHandle.close).toHaveBeenCalled();
});
it('handles read errors gracefully', async () => {
mockFs.stat.mockResolvedValue({ size: 100 } as any);
const mockFileHandle = {
read: jest.fn(),
close: jest.fn()
} as any;
mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
await tailFile('/test/file.txt', 5);
expect(mockFileHandle.close).toHaveBeenCalled();
});
});
describe('headFile', () => {
it('opens file for reading', async () => {
// Mock file handle with proper typing
const mockFileHandle = {
read: jest.fn(),
close: jest.fn()
} as any;
mockFileHandle.read.mockResolvedValue({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
await headFile('/test/file.txt', 2);
expect(mockFs.open).toHaveBeenCalledWith('/test/file.txt', 'r');
});
it('handles files with content and returns first lines', async () => {
const mockFileHandle = {
read: jest.fn(),
close: jest.fn()
} as any;
// Simulate reading file content with newlines
mockFileHandle.read
.mockResolvedValueOnce({ bytesRead: 20, buffer: Buffer.from('line1\nline2\nline3\n') })
.mockResolvedValueOnce({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
const result = await headFile('/test/file.txt', 2);
expect(mockFileHandle.close).toHaveBeenCalled();
});
it('handles files with leftover content', async () => {
const mockFileHandle = {
read: jest.fn(),
close: jest.fn()
} as any;
// Simulate reading file content without final newline
mockFileHandle.read
.mockResolvedValueOnce({ bytesRead: 15, buffer: Buffer.from('line1\nline2\nend') })
.mockResolvedValueOnce({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
const result = await headFile('/test/file.txt', 5);
expect(mockFileHandle.close).toHaveBeenCalled();
});
it('handles reaching requested line count', async () => {
const mockFileHandle = {
read: jest.fn(),
close: jest.fn()
} as any;
// Simulate reading exactly the requested number of lines
mockFileHandle.read
.mockResolvedValueOnce({ bytesRead: 12, buffer: Buffer.from('line1\nline2\n') })
.mockResolvedValueOnce({ bytesRead: 0 });
mockFileHandle.close.mockResolvedValue(undefined);
mockFs.open.mockResolvedValue(mockFileHandle);
const result = await headFile('/test/file.txt', 2);
expect(mockFileHandle.close).toHaveBeenCalled();
});
});
});
});

View File

@@ -162,6 +162,12 @@ describe('Path Utilities', () => {
expect(result).not.toContain('~');
});
it('expands bare ~ to home directory', () => {
const result = expandHome('~');
expect(result).not.toContain('~');
expect(result.length).toBeGreaterThan(0);
});
it('leaves other paths unchanged', () => {
expect(expandHome('C:/test')).toBe('C:/test');
});

View File

@@ -4,6 +4,49 @@ import * as fs from 'fs/promises';
import * as os from 'os';
import { isPathWithinAllowedDirectories } from '../path-validation.js';
/**
* Check if the current environment supports symlink creation
*/
async function checkSymlinkSupport(): Promise<boolean> {
const testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'symlink-test-'));
try {
const targetFile = path.join(testDir, 'target.txt');
const linkFile = path.join(testDir, 'link.txt');
await fs.writeFile(targetFile, 'test');
await fs.symlink(targetFile, linkFile);
// If we get here, symlinks are supported
return true;
} catch (error) {
// EPERM indicates no symlink permissions
if ((error as NodeJS.ErrnoException).code === 'EPERM') {
return false;
}
// Other errors might indicate a real problem
throw error;
} finally {
await fs.rm(testDir, { recursive: true, force: true });
}
}
// Global variable to store symlink support status
let symlinkSupported: boolean | null = null;
/**
* Get cached symlink support status, checking once per test run
*/
async function getSymlinkSupport(): Promise<boolean> {
if (symlinkSupported === null) {
symlinkSupported = await checkSymlinkSupport();
if (!symlinkSupported) {
console.log('\n⚠ Symlink tests will be skipped - symlink creation not supported in this environment');
console.log(' On Windows, enable Developer Mode or run as Administrator to enable symlink tests');
}
}
return symlinkSupported;
}
describe('Path Validation', () => {
it('allows exact directory match', () => {
const allowed = ['/home/user/project'];
@@ -587,6 +630,12 @@ describe('Path Validation', () => {
});
it('demonstrates symlink race condition allows writing outside allowed directories', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping symlink race condition test - symlinks not supported');
return;
}
const allowed = [allowedDir];
await expect(fs.access(testPath)).rejects.toThrow();
@@ -603,6 +652,12 @@ describe('Path Validation', () => {
});
it('shows timing differences between validation approaches', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping timing validation test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const validation1 = isPathWithinAllowedDirectories(testPath, allowed);
@@ -618,6 +673,12 @@ describe('Path Validation', () => {
});
it('validates directory creation timing', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping directory creation timing test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const testDir = path.join(allowedDir, 'newdir');
@@ -632,6 +693,12 @@ describe('Path Validation', () => {
});
it('demonstrates exclusive file creation behavior', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping exclusive file creation test - symlinks not supported');
return;
}
const allowed = [allowedDir];
await fs.symlink(targetFile, testPath);
@@ -644,6 +711,12 @@ describe('Path Validation', () => {
});
it('should use resolved parent paths for non-existent files', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping resolved parent paths test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const symlinkDir = path.join(allowedDir, 'link');
@@ -662,6 +735,12 @@ describe('Path Validation', () => {
});
it('demonstrates parent directory symlink traversal', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping parent directory symlink traversal test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const deepPath = path.join(allowedDir, 'sub1', 'sub2', 'file.txt');
@@ -682,6 +761,12 @@ describe('Path Validation', () => {
});
it('should prevent race condition between validatePath and file operation', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping race condition prevention test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const racePath = path.join(allowedDir, 'race-file.txt');
const targetFile = path.join(forbiddenDir, 'target.txt');
@@ -730,6 +815,12 @@ describe('Path Validation', () => {
});
it('should handle symlinks that point within allowed directories', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping symlinks within allowed directories test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const targetFile = path.join(allowedDir, 'target.txt');
const symlinkPath = path.join(allowedDir, 'symlink.txt');
@@ -756,6 +847,12 @@ describe('Path Validation', () => {
});
it('should prevent overwriting files through symlinks pointing outside allowed directories', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping symlink overwrite prevention test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const legitFile = path.join(allowedDir, 'existing.txt');
const targetFile = path.join(forbiddenDir, 'target.txt');
@@ -786,6 +883,12 @@ describe('Path Validation', () => {
});
it('demonstrates race condition in read operations', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping race condition in read operations test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const legitFile = path.join(allowedDir, 'readable.txt');
const secretFile = path.join(forbiddenDir, 'secret.txt');
@@ -812,6 +915,12 @@ describe('Path Validation', () => {
});
it('verifies rename does not follow symlinks', async () => {
const symlinkSupported = await getSymlinkSupport();
if (!symlinkSupported) {
console.log(' ⏭️ Skipping rename symlink test - symlinks not supported');
return;
}
const allowed = [allowedDir];
const tempFile = path.join(allowedDir, 'temp.txt');
const targetSymlink = path.join(allowedDir, 'target-symlink.txt');

View File

@@ -12,14 +12,23 @@ import {
import fs from "fs/promises";
import { createReadStream } from "fs";
import path from "path";
import os from 'os';
import { randomBytes } from 'crypto';
import { z } from "zod";
import { zodToJsonSchema } from "zod-to-json-schema";
import { diffLines, createTwoFilesPatch } from 'diff';
import { minimatch } from 'minimatch';
import { isPathWithinAllowedDirectories } from './path-validation.js';
import { normalizePath, expandHome } from './path-utils.js';
import { getValidRootDirectories } from './roots-utils.js';
import {
// Function imports
formatSize,
validatePath,
getFileStats,
readFileContent,
writeFileContent,
searchFilesWithValidation,
applyFileEdits,
tailFile,
headFile,
setAllowedDirectories,
} from './lib.js';
// Command line argument parsing
const args = process.argv.slice(2);
@@ -31,25 +40,14 @@ if (args.length === 0) {
console.error("At least one directory must be provided by EITHER method for the server to operate.");
}
// Normalize all paths consistently
function normalizePath(p: string): string {
return path.normalize(p);
}
function expandHome(filepath: string): string {
if (filepath.startsWith('~/') || filepath === '~') {
return path.join(os.homedir(), filepath.slice(1));
}
return filepath;
}
// Store allowed directories in normalized and resolved form
let allowedDirectories = await Promise.all(
args.map(async (dir) => {
const expanded = expandHome(dir);
const absolute = path.resolve(expanded);
try {
// Resolve symlinks in allowed directories during startup
// Security: Resolve symlinks in allowed directories during startup
// This ensures we know the real paths and can validate against them later
const resolved = await fs.realpath(absolute);
return normalizePath(resolved);
} catch (error) {
@@ -61,9 +59,9 @@ let allowedDirectories = await Promise.all(
);
// Validate that all directories exist and are accessible
await Promise.all(args.map(async (dir) => {
await Promise.all(allowedDirectories.map(async (dir) => {
try {
const stats = await fs.stat(expandHome(dir));
const stats = await fs.stat(dir);
if (!stats.isDirectory()) {
console.error(`Error: ${dir} is not a directory`);
process.exit(1);
@@ -74,47 +72,8 @@ await Promise.all(args.map(async (dir) => {
}
}));
// Security utilities
async function validatePath(requestedPath: string): Promise<string> {
const expandedPath = expandHome(requestedPath);
const absolute = path.isAbsolute(expandedPath)
? path.resolve(expandedPath)
: path.resolve(process.cwd(), expandedPath);
const normalizedRequested = normalizePath(absolute);
// Check if path is within allowed directories
const isAllowed = isPathWithinAllowedDirectories(normalizedRequested, allowedDirectories);
if (!isAllowed) {
throw new Error(`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`);
}
// Handle symlinks by checking their real path
try {
const realPath = await fs.realpath(absolute);
const normalizedReal = normalizePath(realPath);
if (!isPathWithinAllowedDirectories(normalizedReal, allowedDirectories)) {
throw new Error(`Access denied - symlink target outside allowed directories: ${realPath} not in ${allowedDirectories.join(', ')}`);
}
return realPath;
} catch (error) {
// For new files that don't exist yet, verify parent directory
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
const parentDir = path.dirname(absolute);
try {
const realParentPath = await fs.realpath(parentDir);
const normalizedParent = normalizePath(realParentPath);
if (!isPathWithinAllowedDirectories(normalizedParent, allowedDirectories)) {
throw new Error(`Access denied - parent directory outside allowed directories: ${realParentPath} not in ${allowedDirectories.join(', ')}`);
}
return absolute;
} catch {
throw new Error(`Parent directory does not exist: ${parentDir}`);
}
}
throw error;
}
}
// Initialize the global allowedDirectories in lib.ts
setAllowedDirectories(allowedDirectories);
// Schema definitions
const ReadTextFileArgsSchema = z.object({
@@ -182,16 +141,6 @@ const GetFileInfoArgsSchema = z.object({
const ToolInputSchema = ToolSchema.shape.inputSchema;
type ToolInput = z.infer<typeof ToolInputSchema>;
interface FileInfo {
size: number;
created: Date;
modified: Date;
accessed: Date;
isDirectory: boolean;
isFile: boolean;
permissions: string;
}
// Server setup
const server = new Server(
{
@@ -205,277 +154,6 @@ const server = new Server(
},
);
// Tool implementations
async function getFileStats(filePath: string): Promise<FileInfo> {
const stats = await fs.stat(filePath);
return {
size: stats.size,
created: stats.birthtime,
modified: stats.mtime,
accessed: stats.atime,
isDirectory: stats.isDirectory(),
isFile: stats.isFile(),
permissions: stats.mode.toString(8).slice(-3),
};
}
async function searchFiles(
rootPath: string,
pattern: string,
excludePatterns: string[] = []
): Promise<string[]> {
const results: string[] = [];
async function search(currentPath: string) {
const entries = await fs.readdir(currentPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(currentPath, entry.name);
try {
// Validate each path before processing
await validatePath(fullPath);
// Check if path matches any exclude pattern
const relativePath = path.relative(rootPath, fullPath);
const shouldExclude = excludePatterns.some(pattern => {
const globPattern = pattern.includes('*') ? pattern : `**/${pattern}/**`;
return minimatch(relativePath, globPattern, { dot: true });
});
if (shouldExclude) {
continue;
}
if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
results.push(fullPath);
}
if (entry.isDirectory()) {
await search(fullPath);
}
} catch (error) {
// Skip invalid paths during search
continue;
}
}
}
await search(rootPath);
return results;
}
// file editing and diffing utilities
function normalizeLineEndings(text: string): string {
return text.replace(/\r\n/g, '\n');
}
function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string {
// Ensure consistent line endings for diff
const normalizedOriginal = normalizeLineEndings(originalContent);
const normalizedNew = normalizeLineEndings(newContent);
return createTwoFilesPatch(
filepath,
filepath,
normalizedOriginal,
normalizedNew,
'original',
'modified'
);
}
async function applyFileEdits(
filePath: string,
edits: Array<{oldText: string, newText: string}>,
dryRun = false
): Promise<string> {
// Read file content and normalize line endings
const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8'));
// Apply edits sequentially
let modifiedContent = content;
for (const edit of edits) {
const normalizedOld = normalizeLineEndings(edit.oldText);
const normalizedNew = normalizeLineEndings(edit.newText);
// If exact match exists, use it
if (modifiedContent.includes(normalizedOld)) {
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew);
continue;
}
// Otherwise, try line-by-line matching with flexibility for whitespace
const oldLines = normalizedOld.split('\n');
const contentLines = modifiedContent.split('\n');
let matchFound = false;
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
const potentialMatch = contentLines.slice(i, i + oldLines.length);
// Compare lines with normalized whitespace
const isMatch = oldLines.every((oldLine, j) => {
const contentLine = potentialMatch[j];
return oldLine.trim() === contentLine.trim();
});
if (isMatch) {
// Preserve original indentation of first line
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || '';
const newLines = normalizedNew.split('\n').map((line, j) => {
if (j === 0) return originalIndent + line.trimStart();
// For subsequent lines, try to preserve relative indentation
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || '';
const newIndent = line.match(/^\s*/)?.[0] || '';
if (oldIndent && newIndent) {
const relativeIndent = newIndent.length - oldIndent.length;
return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart();
}
return line;
});
contentLines.splice(i, oldLines.length, ...newLines);
modifiedContent = contentLines.join('\n');
matchFound = true;
break;
}
}
if (!matchFound) {
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`);
}
}
// Create unified diff
const diff = createUnifiedDiff(content, modifiedContent, filePath);
// Format diff with appropriate number of backticks
let numBackticks = 3;
while (diff.includes('`'.repeat(numBackticks))) {
numBackticks++;
}
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`;
if (!dryRun) {
// Security: Use atomic rename to prevent race conditions where symlinks
// could be created between validation and write. Rename operations
// replace the target file atomically and don't follow symlinks.
const tempPath = `${filePath}.${randomBytes(16).toString('hex')}.tmp`;
try {
await fs.writeFile(tempPath, modifiedContent, 'utf-8');
await fs.rename(tempPath, filePath);
} catch (error) {
try {
await fs.unlink(tempPath);
} catch {}
throw error;
}
}
return formattedDiff;
}
// Helper functions
function formatSize(bytes: number): string {
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) return '0 B';
const i = Math.floor(Math.log(bytes) / Math.log(1024));
if (i === 0) return `${bytes} ${units[i]}`;
return `${(bytes / Math.pow(1024, i)).toFixed(2)} ${units[i]}`;
}
// Memory-efficient implementation to get the last N lines of a file
async function tailFile(filePath: string, numLines: number): Promise<string> {
const CHUNK_SIZE = 1024; // Read 1KB at a time
const stats = await fs.stat(filePath);
const fileSize = stats.size;
if (fileSize === 0) return '';
// Open file for reading
const fileHandle = await fs.open(filePath, 'r');
try {
const lines: string[] = [];
let position = fileSize;
let chunk = Buffer.alloc(CHUNK_SIZE);
let linesFound = 0;
let remainingText = '';
// Read chunks from the end of the file until we have enough lines
while (position > 0 && linesFound < numLines) {
const size = Math.min(CHUNK_SIZE, position);
position -= size;
const { bytesRead } = await fileHandle.read(chunk, 0, size, position);
if (!bytesRead) break;
// Get the chunk as a string and prepend any remaining text from previous iteration
const readData = chunk.slice(0, bytesRead).toString('utf-8');
const chunkText = readData + remainingText;
// Split by newlines and count
const chunkLines = normalizeLineEndings(chunkText).split('\n');
// If this isn't the end of the file, the first line is likely incomplete
// Save it to prepend to the next chunk
if (position > 0) {
remainingText = chunkLines[0];
chunkLines.shift(); // Remove the first (incomplete) line
}
// Add lines to our result (up to the number we need)
for (let i = chunkLines.length - 1; i >= 0 && linesFound < numLines; i--) {
lines.unshift(chunkLines[i]);
linesFound++;
}
}
return lines.join('\n');
} finally {
await fileHandle.close();
}
}
// New function to get the first N lines of a file
async function headFile(filePath: string, numLines: number): Promise<string> {
const fileHandle = await fs.open(filePath, 'r');
try {
const lines: string[] = [];
let buffer = '';
let bytesRead = 0;
const chunk = Buffer.alloc(1024); // 1KB buffer
// Read chunks and count lines until we have enough or reach EOF
while (lines.length < numLines) {
const result = await fileHandle.read(chunk, 0, chunk.length, bytesRead);
if (result.bytesRead === 0) break; // End of file
bytesRead += result.bytesRead;
buffer += chunk.slice(0, result.bytesRead).toString('utf-8');
const newLineIndex = buffer.lastIndexOf('\n');
if (newLineIndex !== -1) {
const completeLines = buffer.slice(0, newLineIndex).split('\n');
buffer = buffer.slice(newLineIndex + 1);
for (const line of completeLines) {
lines.push(line);
if (lines.length >= numLines) break;
}
}
}
// If there is leftover content and we still need lines, add it
if (buffer.length > 0 && lines.length < numLines) {
lines.push(buffer);
}
return lines.join('\n');
} finally {
await fileHandle.close();
}
}
// Reads a file as a stream of buffers, concatenates them, and then encodes
// the result to a Base64 string. This is a memory-efficient way to handle
// binary data from a stream before the final encoding.
@@ -662,8 +340,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
content: [{ type: "text", text: headContent }],
};
}
const content = await fs.readFile(validPath, "utf-8");
const content = await readFileContent(validPath);
return {
content: [{ type: "text", text: content }],
};
@@ -710,7 +387,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
parsed.data.paths.map(async (filePath: string) => {
try {
const validPath = await validatePath(filePath);
const content = await fs.readFile(validPath, "utf-8");
const content = await readFileContent(validPath);
return `${filePath}:\n${content}\n`;
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error);
@@ -729,31 +406,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
throw new Error(`Invalid arguments for write_file: ${parsed.error}`);
}
const validPath = await validatePath(parsed.data.path);
try {
// Security: 'wx' flag ensures exclusive creation - fails if file/symlink exists,
// preventing writes through pre-existing symlinks
await fs.writeFile(validPath, parsed.data.content, { encoding: "utf-8", flag: 'wx' });
} catch (error) {
if ((error as NodeJS.ErrnoException).code === 'EEXIST') {
// Security: Use atomic rename to prevent race conditions where symlinks
// could be created between validation and write. Rename operations
// replace the target file atomically and don't follow symlinks.
const tempPath = `${validPath}.${randomBytes(16).toString('hex')}.tmp`;
try {
await fs.writeFile(tempPath, parsed.data.content, 'utf-8');
await fs.rename(tempPath, validPath);
} catch (renameError) {
try {
await fs.unlink(tempPath);
} catch {}
throw renameError;
}
} else {
throw error;
}
}
await writeFileContent(validPath, parsed.data.content);
return {
content: [{ type: "text", text: `Successfully wrote to ${parsed.data.path}` }],
};
@@ -870,43 +523,43 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
throw new Error(`Invalid arguments for directory_tree: ${parsed.error}`);
}
interface TreeEntry {
name: string;
type: 'file' | 'directory';
children?: TreeEntry[];
}
interface TreeEntry {
name: string;
type: 'file' | 'directory';
children?: TreeEntry[];
}
async function buildTree(currentPath: string): Promise<TreeEntry[]> {
const validPath = await validatePath(currentPath);
const entries = await fs.readdir(validPath, {withFileTypes: true});
const result: TreeEntry[] = [];
async function buildTree(currentPath: string): Promise<TreeEntry[]> {
const validPath = await validatePath(currentPath);
const entries = await fs.readdir(validPath, {withFileTypes: true});
const result: TreeEntry[] = [];
for (const entry of entries) {
const entryData: TreeEntry = {
name: entry.name,
type: entry.isDirectory() ? 'directory' : 'file'
};
for (const entry of entries) {
const entryData: TreeEntry = {
name: entry.name,
type: entry.isDirectory() ? 'directory' : 'file'
};
if (entry.isDirectory()) {
const subPath = path.join(currentPath, entry.name);
entryData.children = await buildTree(subPath);
}
result.push(entryData);
if (entry.isDirectory()) {
const subPath = path.join(currentPath, entry.name);
entryData.children = await buildTree(subPath);
}
return result;
result.push(entryData);
}
const treeData = await buildTree(parsed.data.path);
return {
content: [{
type: "text",
text: JSON.stringify(treeData, null, 2)
}],
};
return result;
}
const treeData = await buildTree(parsed.data.path);
return {
content: [{
type: "text",
text: JSON.stringify(treeData, null, 2)
}],
};
}
case "move_file": {
const parsed = MoveFileArgsSchema.safeParse(args);
if (!parsed.success) {
@@ -926,7 +579,7 @@ server.setRequestHandler(CallToolRequestSchema, async (request) => {
throw new Error(`Invalid arguments for search_files: ${parsed.error}`);
}
const validPath = await validatePath(parsed.data.path);
const results = await searchFiles(validPath, parsed.data.pattern, parsed.data.excludePatterns);
const results = await searchFilesWithValidation(validPath, parsed.data.pattern, allowedDirectories, { excludePatterns: parsed.data.excludePatterns });
return {
content: [{ type: "text", text: results.length > 0 ? results.join("\n") : "No matches found" }],
};
@@ -972,6 +625,7 @@ async function updateAllowedDirectoriesFromRoots(requestedRoots: Root[]) {
const validatedRootDirs = await getValidRootDirectories(requestedRoots);
if (validatedRootDirs.length > 0) {
allowedDirectories = [...validatedRootDirs];
setAllowedDirectories(allowedDirectories); // Update the global state in lib.ts
console.error(`Updated allowed directories from MCP roots: ${validatedRootDirs.length} valid directories`);
} else {
console.error("No valid root directories provided by client");

392
src/filesystem/lib.ts Normal file
View File

@@ -0,0 +1,392 @@
import fs from "fs/promises";
import path from "path";
import os from 'os';
import { randomBytes } from 'crypto';
import { diffLines, createTwoFilesPatch } from 'diff';
import { minimatch } from 'minimatch';
import { normalizePath, expandHome } from './path-utils.js';
import { isPathWithinAllowedDirectories } from './path-validation.js';
// Global allowed directories - set by the main module
let allowedDirectories: string[] = [];
// Function to set allowed directories from the main module
export function setAllowedDirectories(directories: string[]): void {
allowedDirectories = [...directories];
}
// Function to get current allowed directories
export function getAllowedDirectories(): string[] {
return [...allowedDirectories];
}
// Type definitions
interface FileInfo {
size: number;
created: Date;
modified: Date;
accessed: Date;
isDirectory: boolean;
isFile: boolean;
permissions: string;
}
export interface SearchOptions {
excludePatterns?: string[];
}
export interface SearchResult {
path: string;
isDirectory: boolean;
}
// Pure Utility Functions
export function formatSize(bytes: number): string {
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
if (bytes === 0) return '0 B';
const i = Math.floor(Math.log(bytes) / Math.log(1024));
if (i < 0 || i === 0) return `${bytes} ${units[0]}`;
const unitIndex = Math.min(i, units.length - 1);
return `${(bytes / Math.pow(1024, unitIndex)).toFixed(2)} ${units[unitIndex]}`;
}
export function normalizeLineEndings(text: string): string {
return text.replace(/\r\n/g, '\n');
}
export function createUnifiedDiff(originalContent: string, newContent: string, filepath: string = 'file'): string {
// Ensure consistent line endings for diff
const normalizedOriginal = normalizeLineEndings(originalContent);
const normalizedNew = normalizeLineEndings(newContent);
return createTwoFilesPatch(
filepath,
filepath,
normalizedOriginal,
normalizedNew,
'original',
'modified'
);
}
// Security & Validation Functions
export async function validatePath(requestedPath: string): Promise<string> {
const expandedPath = expandHome(requestedPath);
const absolute = path.isAbsolute(expandedPath)
? path.resolve(expandedPath)
: path.resolve(process.cwd(), expandedPath);
const normalizedRequested = normalizePath(absolute);
// Security: Check if path is within allowed directories before any file operations
const isAllowed = isPathWithinAllowedDirectories(normalizedRequested, allowedDirectories);
if (!isAllowed) {
throw new Error(`Access denied - path outside allowed directories: ${absolute} not in ${allowedDirectories.join(', ')}`);
}
// Security: Handle symlinks by checking their real path to prevent symlink attacks
// This prevents attackers from creating symlinks that point outside allowed directories
try {
const realPath = await fs.realpath(absolute);
const normalizedReal = normalizePath(realPath);
if (!isPathWithinAllowedDirectories(normalizedReal, allowedDirectories)) {
throw new Error(`Access denied - symlink target outside allowed directories: ${realPath} not in ${allowedDirectories.join(', ')}`);
}
return realPath;
} catch (error) {
// Security: For new files that don't exist yet, verify parent directory
// This ensures we can't create files in unauthorized locations
if ((error as NodeJS.ErrnoException).code === 'ENOENT') {
const parentDir = path.dirname(absolute);
try {
const realParentPath = await fs.realpath(parentDir);
const normalizedParent = normalizePath(realParentPath);
if (!isPathWithinAllowedDirectories(normalizedParent, allowedDirectories)) {
throw new Error(`Access denied - parent directory outside allowed directories: ${realParentPath} not in ${allowedDirectories.join(', ')}`);
}
return absolute;
} catch {
throw new Error(`Parent directory does not exist: ${parentDir}`);
}
}
throw error;
}
}
// File Operations
export async function getFileStats(filePath: string): Promise<FileInfo> {
const stats = await fs.stat(filePath);
return {
size: stats.size,
created: stats.birthtime,
modified: stats.mtime,
accessed: stats.atime,
isDirectory: stats.isDirectory(),
isFile: stats.isFile(),
permissions: stats.mode.toString(8).slice(-3),
};
}
export async function readFileContent(filePath: string, encoding: string = 'utf-8'): Promise<string> {
return await fs.readFile(filePath, encoding as BufferEncoding);
}
export async function writeFileContent(filePath: string, content: string): Promise<void> {
try {
// Security: 'wx' flag ensures exclusive creation - fails if file/symlink exists,
// preventing writes through pre-existing symlinks
await fs.writeFile(filePath, content, { encoding: "utf-8", flag: 'wx' });
} catch (error) {
if ((error as NodeJS.ErrnoException).code === 'EEXIST') {
// Security: Use atomic rename to prevent race conditions where symlinks
// could be created between validation and write. Rename operations
// replace the target file atomically and don't follow symlinks.
const tempPath = `${filePath}.${randomBytes(16).toString('hex')}.tmp`;
try {
await fs.writeFile(tempPath, content, 'utf-8');
await fs.rename(tempPath, filePath);
} catch (renameError) {
try {
await fs.unlink(tempPath);
} catch {}
throw renameError;
}
} else {
throw error;
}
}
}
// File Editing Functions
interface FileEdit {
oldText: string;
newText: string;
}
export async function applyFileEdits(
filePath: string,
edits: FileEdit[],
dryRun: boolean = false
): Promise<string> {
// Read file content and normalize line endings
const content = normalizeLineEndings(await fs.readFile(filePath, 'utf-8'));
// Apply edits sequentially
let modifiedContent = content;
for (const edit of edits) {
const normalizedOld = normalizeLineEndings(edit.oldText);
const normalizedNew = normalizeLineEndings(edit.newText);
// If exact match exists, use it
if (modifiedContent.includes(normalizedOld)) {
modifiedContent = modifiedContent.replace(normalizedOld, normalizedNew);
continue;
}
// Otherwise, try line-by-line matching with flexibility for whitespace
const oldLines = normalizedOld.split('\n');
const contentLines = modifiedContent.split('\n');
let matchFound = false;
for (let i = 0; i <= contentLines.length - oldLines.length; i++) {
const potentialMatch = contentLines.slice(i, i + oldLines.length);
// Compare lines with normalized whitespace
const isMatch = oldLines.every((oldLine, j) => {
const contentLine = potentialMatch[j];
return oldLine.trim() === contentLine.trim();
});
if (isMatch) {
// Preserve original indentation of first line
const originalIndent = contentLines[i].match(/^\s*/)?.[0] || '';
const newLines = normalizedNew.split('\n').map((line, j) => {
if (j === 0) return originalIndent + line.trimStart();
// For subsequent lines, try to preserve relative indentation
const oldIndent = oldLines[j]?.match(/^\s*/)?.[0] || '';
const newIndent = line.match(/^\s*/)?.[0] || '';
if (oldIndent && newIndent) {
const relativeIndent = newIndent.length - oldIndent.length;
return originalIndent + ' '.repeat(Math.max(0, relativeIndent)) + line.trimStart();
}
return line;
});
contentLines.splice(i, oldLines.length, ...newLines);
modifiedContent = contentLines.join('\n');
matchFound = true;
break;
}
}
if (!matchFound) {
throw new Error(`Could not find exact match for edit:\n${edit.oldText}`);
}
}
// Create unified diff
const diff = createUnifiedDiff(content, modifiedContent, filePath);
// Format diff with appropriate number of backticks
let numBackticks = 3;
while (diff.includes('`'.repeat(numBackticks))) {
numBackticks++;
}
const formattedDiff = `${'`'.repeat(numBackticks)}diff\n${diff}${'`'.repeat(numBackticks)}\n\n`;
if (!dryRun) {
// Security: Use atomic rename to prevent race conditions where symlinks
// could be created between validation and write. Rename operations
// replace the target file atomically and don't follow symlinks.
const tempPath = `${filePath}.${randomBytes(16).toString('hex')}.tmp`;
try {
await fs.writeFile(tempPath, modifiedContent, 'utf-8');
await fs.rename(tempPath, filePath);
} catch (error) {
try {
await fs.unlink(tempPath);
} catch {}
throw error;
}
}
return formattedDiff;
}
// Memory-efficient implementation to get the last N lines of a file
export async function tailFile(filePath: string, numLines: number): Promise<string> {
const CHUNK_SIZE = 1024; // Read 1KB at a time
const stats = await fs.stat(filePath);
const fileSize = stats.size;
if (fileSize === 0) return '';
// Open file for reading
const fileHandle = await fs.open(filePath, 'r');
try {
const lines: string[] = [];
let position = fileSize;
let chunk = Buffer.alloc(CHUNK_SIZE);
let linesFound = 0;
let remainingText = '';
// Read chunks from the end of the file until we have enough lines
while (position > 0 && linesFound < numLines) {
const size = Math.min(CHUNK_SIZE, position);
position -= size;
const { bytesRead } = await fileHandle.read(chunk, 0, size, position);
if (!bytesRead) break;
// Get the chunk as a string and prepend any remaining text from previous iteration
const readData = chunk.slice(0, bytesRead).toString('utf-8');
const chunkText = readData + remainingText;
// Split by newlines and count
const chunkLines = normalizeLineEndings(chunkText).split('\n');
// If this isn't the end of the file, the first line is likely incomplete
// Save it to prepend to the next chunk
if (position > 0) {
remainingText = chunkLines[0];
chunkLines.shift(); // Remove the first (incomplete) line
}
// Add lines to our result (up to the number we need)
for (let i = chunkLines.length - 1; i >= 0 && linesFound < numLines; i--) {
lines.unshift(chunkLines[i]);
linesFound++;
}
}
return lines.join('\n');
} finally {
await fileHandle.close();
}
}
// New function to get the first N lines of a file
export async function headFile(filePath: string, numLines: number): Promise<string> {
const fileHandle = await fs.open(filePath, 'r');
try {
const lines: string[] = [];
let buffer = '';
let bytesRead = 0;
const chunk = Buffer.alloc(1024); // 1KB buffer
// Read chunks and count lines until we have enough or reach EOF
while (lines.length < numLines) {
const result = await fileHandle.read(chunk, 0, chunk.length, bytesRead);
if (result.bytesRead === 0) break; // End of file
bytesRead += result.bytesRead;
buffer += chunk.slice(0, result.bytesRead).toString('utf-8');
const newLineIndex = buffer.lastIndexOf('\n');
if (newLineIndex !== -1) {
const completeLines = buffer.slice(0, newLineIndex).split('\n');
buffer = buffer.slice(newLineIndex + 1);
for (const line of completeLines) {
lines.push(line);
if (lines.length >= numLines) break;
}
}
}
// If there is leftover content and we still need lines, add it
if (buffer.length > 0 && lines.length < numLines) {
lines.push(buffer);
}
return lines.join('\n');
} finally {
await fileHandle.close();
}
}
export async function searchFilesWithValidation(
rootPath: string,
pattern: string,
allowedDirectories: string[],
options: SearchOptions = {}
): Promise<string[]> {
const { excludePatterns = [] } = options;
const results: string[] = [];
async function search(currentPath: string) {
const entries = await fs.readdir(currentPath, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(currentPath, entry.name);
try {
await validatePath(fullPath);
const relativePath = path.relative(rootPath, fullPath);
const shouldExclude = excludePatterns.some(excludePattern => {
const globPattern = excludePattern.includes('*') ? excludePattern : `**/${excludePattern}/**`;
return minimatch(relativePath, globPattern, { dot: true });
});
if (shouldExclude) continue;
if (entry.name.toLowerCase().includes(pattern.toLowerCase())) {
results.push(fullPath);
}
if (entry.isDirectory()) {
await search(fullPath);
}
} catch {
continue;
}
}
}
await search(rootPath);
return results;
}

View File

@@ -1,6 +1,6 @@
{
"name": "@modelcontextprotocol/server-filesystem",
"version": "0.6.2",
"version": "0.6.3",
"description": "MCP server for filesystem access",
"license": "MIT",
"author": "Anthropic, PBC (https://anthropic.com)",

View File

@@ -68,10 +68,19 @@ export function isPathWithinAllowedDirectories(absolutePath: string, allowedDire
}
// Special case for root directory to avoid double slash
// On Windows, we need to check if both paths are on the same drive
if (normalizedDir === path.sep) {
return normalizedPath.startsWith(path.sep);
}
// On Windows, also check for drive root (e.g., "C:\")
if (path.sep === '\\' && normalizedDir.match(/^[A-Za-z]:\\?$/)) {
// Ensure both paths are on the same drive
const dirDrive = normalizedDir.charAt(0).toLowerCase();
const pathDrive = normalizedPath.charAt(0).toLowerCase();
return pathDrive === dirDrive && normalizedPath.startsWith(normalizedDir.replace(/\\?$/, '\\'));
}
return normalizedPath.startsWith(normalizedDir + path.sep);
});
}
}