fix(security): restrict skill download target paths

This commit is contained in:
Peter Steinberger
2026-02-16 03:46:32 +01:00
parent c6c53437f7
commit 2363e1b085
9 changed files with 442 additions and 324 deletions

View File

@@ -36,6 +36,7 @@ Docs: https://docs.openclaw.ai
- Gateway/Agent: reject malformed `agent:`-prefixed session keys (for example, `agent:main`) in `agent` and `agent.identity.get` instead of silently resolving them to the default agent, preventing accidental cross-session routing. (#15707) Thanks @rodrigouroz.
- Gateway/Security: redact sensitive session/path details from `status` responses for non-admin clients; full details remain available to `operator.admin`. (#8590) Thanks @fr33d3m0n.
- Web Fetch/Security: cap downloaded response body size before HTML parsing to prevent memory exhaustion from oversized or deeply nested pages. Thanks @xuemian168.
- Skills/Security: restrict `download` installer `targetDir` to the per-skill tools directory to prevent arbitrary file writes. Thanks @Adam55A-code.
- Agents: return an explicit timeout error reply when an embedded run times out before producing any payloads, preventing silent dropped turns during slow cache-refresh transitions. (#16659) Thanks @liaosvcaf and @vignesh07.
- Agents/OpenAI: force `store=true` for direct OpenAI Responses/Codex runs to preserve multi-turn server-side conversation state, while leaving proxy/non-OpenAI endpoints unchanged. (#16803) Thanks @mark9232 and @vignesh07.
- Agents/Security: sanitize workspace paths before embedding into LLM prompts (strip Unicode control/format chars) to prevent instruction injection via malicious directory names. Thanks @aether-ai-agent.

View File

@@ -18,7 +18,7 @@ metadata:
"archive": "tar.bz2",
"extract": true,
"stripComponents": 1,
"targetDir": "~/.openclaw/tools/sherpa-onnx-tts/runtime",
"targetDir": "runtime",
"label": "Download sherpa-onnx runtime (macOS)",
},
{
@@ -29,7 +29,7 @@ metadata:
"archive": "tar.bz2",
"extract": true,
"stripComponents": 1,
"targetDir": "~/.openclaw/tools/sherpa-onnx-tts/runtime",
"targetDir": "runtime",
"label": "Download sherpa-onnx runtime (Linux x64)",
},
{
@@ -40,7 +40,7 @@ metadata:
"archive": "tar.bz2",
"extract": true,
"stripComponents": 1,
"targetDir": "~/.openclaw/tools/sherpa-onnx-tts/runtime",
"targetDir": "runtime",
"label": "Download sherpa-onnx runtime (Windows x64)",
},
{
@@ -49,7 +49,7 @@ metadata:
"url": "https://github.com/k2-fsa/sherpa-onnx/releases/download/tts-models/vits-piper-en_US-lessac-high.tar.bz2",
"archive": "tar.bz2",
"extract": true,
"targetDir": "~/.openclaw/tools/sherpa-onnx-tts/models",
"targetDir": "models",
"label": "Download Piper en_US lessac (high)",
},
],

View File

@@ -0,0 +1,376 @@
import type { ReadableStream as NodeReadableStream } from "node:stream/web";
import fs from "node:fs";
import path from "node:path";
import { Readable } from "node:stream";
import { pipeline } from "node:stream/promises";
import type { SkillInstallResult } from "./skills-install.js";
import type { SkillEntry, SkillInstallSpec } from "./skills.js";
import { extractArchive as extractArchiveSafe } from "../infra/archive.js";
import { fetchWithSsrFGuard } from "../infra/net/fetch-guard.js";
import { isWithinDir, resolveSafeBaseDir } from "../infra/path-safety.js";
import { runCommandWithTimeout } from "../process/exec.js";
import { ensureDir, resolveUserPath } from "../utils.js";
import { hasBinary } from "./skills.js";
import { resolveSkillToolsRootDir } from "./skills/tools-dir.js";
function isNodeReadableStream(value: unknown): value is NodeJS.ReadableStream {
return Boolean(value && typeof (value as NodeJS.ReadableStream).pipe === "function");
}
function summarizeInstallOutput(text: string): string | undefined {
const raw = text.trim();
if (!raw) {
return undefined;
}
const lines = raw
.split("\n")
.map((line) => line.trim())
.filter(Boolean);
if (lines.length === 0) {
return undefined;
}
const preferred =
lines.find((line) => /^error\b/i.test(line)) ??
lines.find((line) => /\b(err!|error:|failed)\b/i.test(line)) ??
lines.at(-1);
if (!preferred) {
return undefined;
}
const normalized = preferred.replace(/\s+/g, " ").trim();
const maxLen = 200;
return normalized.length > maxLen ? `${normalized.slice(0, maxLen - 1)}` : normalized;
}
function formatInstallFailureMessage(result: {
code: number | null;
stdout: string;
stderr: string;
}): string {
const code = typeof result.code === "number" ? `exit ${result.code}` : "unknown exit";
const summary = summarizeInstallOutput(result.stderr) ?? summarizeInstallOutput(result.stdout);
if (!summary) {
return `Install failed (${code})`;
}
return `Install failed (${code}): ${summary}`;
}
function isWindowsDrivePath(p: string): boolean {
return /^[a-zA-Z]:[\\/]/.test(p);
}
function resolveDownloadTargetDir(entry: SkillEntry, spec: SkillInstallSpec): string {
const safeRoot = resolveSkillToolsRootDir(entry);
const raw = spec.targetDir?.trim();
if (!raw) {
return safeRoot;
}
// Treat non-absolute paths as relative to the per-skill tools root.
const resolved =
raw.startsWith("~") || path.isAbsolute(raw) || isWindowsDrivePath(raw)
? resolveUserPath(raw)
: path.resolve(safeRoot, raw);
if (!isWithinDir(safeRoot, resolved)) {
throw new Error(
`Refusing to install outside the skill tools directory. targetDir="${raw}" resolves to "${resolved}". Allowed root: "${safeRoot}".`,
);
}
return resolved;
}
function resolveArchiveType(spec: SkillInstallSpec, filename: string): string | undefined {
const explicit = spec.archive?.trim().toLowerCase();
if (explicit) {
return explicit;
}
const lower = filename.toLowerCase();
if (lower.endsWith(".tar.gz") || lower.endsWith(".tgz")) {
return "tar.gz";
}
if (lower.endsWith(".tar.bz2") || lower.endsWith(".tbz2")) {
return "tar.bz2";
}
if (lower.endsWith(".zip")) {
return "zip";
}
return undefined;
}
function normalizeArchiveEntryPath(raw: string): string {
return raw.replaceAll("\\", "/");
}
function validateArchiveEntryPath(entryPath: string): void {
if (!entryPath || entryPath === "." || entryPath === "./") {
return;
}
if (isWindowsDrivePath(entryPath)) {
throw new Error(`archive entry uses a drive path: ${entryPath}`);
}
const normalized = path.posix.normalize(normalizeArchiveEntryPath(entryPath));
if (normalized === ".." || normalized.startsWith("../")) {
throw new Error(`archive entry escapes targetDir: ${entryPath}`);
}
if (path.posix.isAbsolute(normalized) || normalized.startsWith("//")) {
throw new Error(`archive entry is absolute: ${entryPath}`);
}
}
function stripArchivePath(entryPath: string, stripComponents: number): string | null {
const raw = normalizeArchiveEntryPath(entryPath);
if (!raw || raw === "." || raw === "./") {
return null;
}
// Important: tar's --strip-components semantics operate on raw path segments,
// before any normalization that would collapse "..". We mimic that so we
// can detect strip-induced escapes like "a/../b" with stripComponents=1.
const parts = raw.split("/").filter((part) => part.length > 0 && part !== ".");
const strip = Math.max(0, Math.floor(stripComponents));
const stripped = strip === 0 ? parts.join("/") : parts.slice(strip).join("/");
const result = path.posix.normalize(stripped);
if (!result || result === "." || result === "./") {
return null;
}
return result;
}
function validateExtractedPathWithinRoot(params: {
rootDir: string;
relPath: string;
originalPath: string;
}): void {
const safeBase = resolveSafeBaseDir(params.rootDir);
const outPath = path.resolve(params.rootDir, params.relPath);
if (!outPath.startsWith(safeBase)) {
throw new Error(`archive entry escapes targetDir: ${params.originalPath}`);
}
}
async function downloadFile(
url: string,
destPath: string,
timeoutMs: number,
): Promise<{ bytes: number }> {
const { response, release } = await fetchWithSsrFGuard({
url,
timeoutMs: Math.max(1_000, timeoutMs),
});
try {
if (!response.ok || !response.body) {
throw new Error(`Download failed (${response.status} ${response.statusText})`);
}
await ensureDir(path.dirname(destPath));
const file = fs.createWriteStream(destPath);
const body = response.body as unknown;
const readable = isNodeReadableStream(body)
? body
: Readable.fromWeb(body as NodeReadableStream);
await pipeline(readable, file);
const stat = await fs.promises.stat(destPath);
return { bytes: stat.size };
} finally {
await release();
}
}
async function extractArchive(params: {
archivePath: string;
archiveType: string;
targetDir: string;
stripComponents?: number;
timeoutMs: number;
}): Promise<{ stdout: string; stderr: string; code: number | null }> {
const { archivePath, archiveType, targetDir, stripComponents, timeoutMs } = params;
const strip =
typeof stripComponents === "number" && Number.isFinite(stripComponents)
? Math.max(0, Math.floor(stripComponents))
: 0;
try {
if (archiveType === "zip") {
await extractArchiveSafe({
archivePath,
destDir: targetDir,
timeoutMs,
kind: "zip",
stripComponents: strip,
});
return { stdout: "", stderr: "", code: 0 };
}
if (archiveType === "tar.gz") {
await extractArchiveSafe({
archivePath,
destDir: targetDir,
timeoutMs,
kind: "tar",
stripComponents: strip,
tarGzip: true,
});
return { stdout: "", stderr: "", code: 0 };
}
if (archiveType === "tar.bz2") {
if (!hasBinary("tar")) {
return { stdout: "", stderr: "tar not found on PATH", code: null };
}
// Preflight list to prevent zip-slip style traversal before extraction.
const listResult = await runCommandWithTimeout(["tar", "tf", archivePath], { timeoutMs });
if (listResult.code !== 0) {
return {
stdout: listResult.stdout,
stderr: listResult.stderr || "tar list failed",
code: listResult.code,
};
}
const entries = listResult.stdout
.split("\n")
.map((line) => line.trim())
.filter(Boolean);
const verboseResult = await runCommandWithTimeout(["tar", "tvf", archivePath], { timeoutMs });
if (verboseResult.code !== 0) {
return {
stdout: verboseResult.stdout,
stderr: verboseResult.stderr || "tar verbose list failed",
code: verboseResult.code,
};
}
for (const line of verboseResult.stdout.split("\n")) {
const trimmed = line.trim();
if (!trimmed) {
continue;
}
const typeChar = trimmed[0];
if (typeChar === "l" || typeChar === "h" || trimmed.includes(" -> ")) {
return {
stdout: verboseResult.stdout,
stderr: "tar archive contains link entries; refusing to extract for safety",
code: 1,
};
}
}
for (const entry of entries) {
validateArchiveEntryPath(entry);
const relPath = stripArchivePath(entry, strip);
if (!relPath) {
continue;
}
validateArchiveEntryPath(relPath);
validateExtractedPathWithinRoot({ rootDir: targetDir, relPath, originalPath: entry });
}
const argv = ["tar", "xf", archivePath, "-C", targetDir];
if (strip > 0) {
argv.push("--strip-components", String(strip));
}
return await runCommandWithTimeout(argv, { timeoutMs });
}
return { stdout: "", stderr: `unsupported archive type: ${archiveType}`, code: null };
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
return { stdout: "", stderr: message, code: 1 };
}
}
export async function installDownloadSpec(params: {
entry: SkillEntry;
spec: SkillInstallSpec;
timeoutMs: number;
}): Promise<SkillInstallResult> {
const { entry, spec, timeoutMs } = params;
const url = spec.url?.trim();
if (!url) {
return {
ok: false,
message: "missing download url",
stdout: "",
stderr: "",
code: null,
};
}
let filename = "";
try {
const parsed = new URL(url);
filename = path.basename(parsed.pathname);
} catch {
filename = path.basename(url);
}
if (!filename) {
filename = "download";
}
let targetDir = "";
try {
targetDir = resolveDownloadTargetDir(entry, spec);
await ensureDir(targetDir);
const stat = await fs.promises.lstat(targetDir);
if (stat.isSymbolicLink()) {
throw new Error(`targetDir is a symlink: ${targetDir}`);
}
if (!stat.isDirectory()) {
throw new Error(`targetDir is not a directory: ${targetDir}`);
}
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
return { ok: false, message, stdout: "", stderr: message, code: null };
}
const archivePath = path.join(targetDir, filename);
let downloaded = 0;
try {
const result = await downloadFile(url, archivePath, timeoutMs);
downloaded = result.bytes;
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
return { ok: false, message, stdout: "", stderr: message, code: null };
}
const archiveType = resolveArchiveType(spec, filename);
const shouldExtract = spec.extract ?? Boolean(archiveType);
if (!shouldExtract) {
return {
ok: true,
message: `Downloaded to ${archivePath}`,
stdout: `downloaded=${downloaded}`,
stderr: "",
code: 0,
};
}
if (!archiveType) {
return {
ok: false,
message: "extract requested but archive type could not be detected",
stdout: "",
stderr: "",
code: null,
};
}
const extractResult = await extractArchive({
archivePath,
archiveType,
targetDir,
stripComponents: spec.stripComponents,
timeoutMs,
});
const success = extractResult.code === 0;
return {
ok: success,
message: success
? `Downloaded and extracted to ${targetDir}`
: formatInstallFailureMessage(extractResult),
stdout: extractResult.stdout.trim(),
stderr: extractResult.stderr.trim(),
code: extractResult.code,
};
}

View File

@@ -1,15 +1,11 @@
import type { ReadableStream as NodeReadableStream } from "node:stream/web";
import fs from "node:fs";
import path from "node:path";
import { Readable } from "node:stream";
import { pipeline } from "node:stream/promises";
import type { OpenClawConfig } from "../config/config.js";
import { extractArchive as extractArchiveSafe } from "../infra/archive.js";
import { resolveBrewExecutable } from "../infra/brew.js";
import { fetchWithSsrFGuard } from "../infra/net/fetch-guard.js";
import { runCommandWithTimeout } from "../process/exec.js";
import { scanDirectoryWithSummary } from "../security/skill-scanner.js";
import { CONFIG_DIR, ensureDir, resolveUserPath } from "../utils.js";
import { resolveUserPath } from "../utils.js";
import { installDownloadSpec } from "./skills-install-download.js";
import {
hasBinary,
loadWorkspaceSkillEntries,
@@ -18,7 +14,6 @@ import {
type SkillInstallSpec,
type SkillsInstallPreferences,
} from "./skills.js";
import { resolveSkillKey } from "./skills/frontmatter.js";
export type SkillInstallRequest = {
workspaceDir: string;
@@ -37,10 +32,6 @@ export type SkillInstallResult = {
warnings?: string[];
};
function isNodeReadableStream(value: unknown): value is NodeJS.ReadableStream {
return Boolean(value && typeof (value as NodeJS.ReadableStream).pipe === "function");
}
function summarizeInstallOutput(text: string): string | undefined {
const raw = text.trim();
if (!raw) {
@@ -200,304 +191,6 @@ function buildInstallCommand(
}
}
function resolveDownloadTargetDir(entry: SkillEntry, spec: SkillInstallSpec): string {
if (spec.targetDir?.trim()) {
return resolveUserPath(spec.targetDir);
}
const key = resolveSkillKey(entry.skill, entry);
return path.join(CONFIG_DIR, "tools", key);
}
function resolveArchiveType(spec: SkillInstallSpec, filename: string): string | undefined {
const explicit = spec.archive?.trim().toLowerCase();
if (explicit) {
return explicit;
}
const lower = filename.toLowerCase();
if (lower.endsWith(".tar.gz") || lower.endsWith(".tgz")) {
return "tar.gz";
}
if (lower.endsWith(".tar.bz2") || lower.endsWith(".tbz2")) {
return "tar.bz2";
}
if (lower.endsWith(".zip")) {
return "zip";
}
return undefined;
}
function normalizeArchiveEntryPath(raw: string): string {
return raw.replaceAll("\\", "/");
}
function isWindowsDrivePath(p: string): boolean {
return /^[a-zA-Z]:[\\/]/.test(p);
}
function validateArchiveEntryPath(entryPath: string): void {
if (!entryPath || entryPath === "." || entryPath === "./") {
return;
}
if (isWindowsDrivePath(entryPath)) {
throw new Error(`archive entry uses a drive path: ${entryPath}`);
}
const normalized = path.posix.normalize(normalizeArchiveEntryPath(entryPath));
if (normalized === ".." || normalized.startsWith("../")) {
throw new Error(`archive entry escapes targetDir: ${entryPath}`);
}
if (path.posix.isAbsolute(normalized) || normalized.startsWith("//")) {
throw new Error(`archive entry is absolute: ${entryPath}`);
}
}
function resolveSafeBaseDir(rootDir: string): string {
const resolved = path.resolve(rootDir);
return resolved.endsWith(path.sep) ? resolved : `${resolved}${path.sep}`;
}
function stripArchivePath(entryPath: string, stripComponents: number): string | null {
const raw = normalizeArchiveEntryPath(entryPath);
if (!raw || raw === "." || raw === "./") {
return null;
}
// Important: tar's --strip-components semantics operate on raw path segments,
// before any normalization that would collapse "..". We mimic that so we
// can detect strip-induced escapes like "a/../b" with stripComponents=1.
const parts = raw.split("/").filter((part) => part.length > 0 && part !== ".");
const strip = Math.max(0, Math.floor(stripComponents));
const stripped = strip === 0 ? parts.join("/") : parts.slice(strip).join("/");
const result = path.posix.normalize(stripped);
if (!result || result === "." || result === "./") {
return null;
}
return result;
}
function validateExtractedPathWithinRoot(params: {
rootDir: string;
relPath: string;
originalPath: string;
}): void {
const safeBase = resolveSafeBaseDir(params.rootDir);
const outPath = path.resolve(params.rootDir, params.relPath);
if (!outPath.startsWith(safeBase)) {
throw new Error(`archive entry escapes targetDir: ${params.originalPath}`);
}
}
async function downloadFile(
url: string,
destPath: string,
timeoutMs: number,
): Promise<{ bytes: number }> {
const { response, release } = await fetchWithSsrFGuard({
url,
timeoutMs: Math.max(1_000, timeoutMs),
});
try {
if (!response.ok || !response.body) {
throw new Error(`Download failed (${response.status} ${response.statusText})`);
}
await ensureDir(path.dirname(destPath));
const file = fs.createWriteStream(destPath);
const body = response.body as unknown;
const readable = isNodeReadableStream(body)
? body
: Readable.fromWeb(body as NodeReadableStream);
await pipeline(readable, file);
const stat = await fs.promises.stat(destPath);
return { bytes: stat.size };
} finally {
await release();
}
}
async function extractArchive(params: {
archivePath: string;
archiveType: string;
targetDir: string;
stripComponents?: number;
timeoutMs: number;
}): Promise<{ stdout: string; stderr: string; code: number | null }> {
const { archivePath, archiveType, targetDir, stripComponents, timeoutMs } = params;
const strip =
typeof stripComponents === "number" && Number.isFinite(stripComponents)
? Math.max(0, Math.floor(stripComponents))
: 0;
try {
if (archiveType === "zip") {
await extractArchiveSafe({
archivePath,
destDir: targetDir,
timeoutMs,
kind: "zip",
stripComponents: strip,
});
return { stdout: "", stderr: "", code: 0 };
}
if (archiveType === "tar.gz") {
await extractArchiveSafe({
archivePath,
destDir: targetDir,
timeoutMs,
kind: "tar",
stripComponents: strip,
tarGzip: true,
});
return { stdout: "", stderr: "", code: 0 };
}
if (archiveType === "tar.bz2") {
if (!hasBinary("tar")) {
return { stdout: "", stderr: "tar not found on PATH", code: null };
}
// Preflight list to prevent zip-slip style traversal before extraction.
const listResult = await runCommandWithTimeout(["tar", "tf", archivePath], { timeoutMs });
if (listResult.code !== 0) {
return {
stdout: listResult.stdout,
stderr: listResult.stderr || "tar list failed",
code: listResult.code,
};
}
const entries = listResult.stdout
.split("\n")
.map((line) => line.trim())
.filter(Boolean);
const verboseResult = await runCommandWithTimeout(["tar", "tvf", archivePath], { timeoutMs });
if (verboseResult.code !== 0) {
return {
stdout: verboseResult.stdout,
stderr: verboseResult.stderr || "tar verbose list failed",
code: verboseResult.code,
};
}
for (const line of verboseResult.stdout.split("\n")) {
const trimmed = line.trim();
if (!trimmed) {
continue;
}
const typeChar = trimmed[0];
if (typeChar === "l" || typeChar === "h" || trimmed.includes(" -> ")) {
return {
stdout: verboseResult.stdout,
stderr: "tar archive contains link entries; refusing to extract for safety",
code: 1,
};
}
}
for (const entry of entries) {
validateArchiveEntryPath(entry);
const relPath = stripArchivePath(entry, strip);
if (!relPath) {
continue;
}
validateArchiveEntryPath(relPath);
validateExtractedPathWithinRoot({ rootDir: targetDir, relPath, originalPath: entry });
}
const argv = ["tar", "xf", archivePath, "-C", targetDir];
if (strip > 0) {
argv.push("--strip-components", String(strip));
}
return await runCommandWithTimeout(argv, { timeoutMs });
}
return { stdout: "", stderr: `unsupported archive type: ${archiveType}`, code: null };
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
return { stdout: "", stderr: message, code: 1 };
}
}
async function installDownloadSpec(params: {
entry: SkillEntry;
spec: SkillInstallSpec;
timeoutMs: number;
}): Promise<SkillInstallResult> {
const { entry, spec, timeoutMs } = params;
const url = spec.url?.trim();
if (!url) {
return {
ok: false,
message: "missing download url",
stdout: "",
stderr: "",
code: null,
};
}
let filename = "";
try {
const parsed = new URL(url);
filename = path.basename(parsed.pathname);
} catch {
filename = path.basename(url);
}
if (!filename) {
filename = "download";
}
const targetDir = resolveDownloadTargetDir(entry, spec);
await ensureDir(targetDir);
const archivePath = path.join(targetDir, filename);
let downloaded = 0;
try {
const result = await downloadFile(url, archivePath, timeoutMs);
downloaded = result.bytes;
} catch (err) {
const message = err instanceof Error ? err.message : String(err);
return { ok: false, message, stdout: "", stderr: message, code: null };
}
const archiveType = resolveArchiveType(spec, filename);
const shouldExtract = spec.extract ?? Boolean(archiveType);
if (!shouldExtract) {
return {
ok: true,
message: `Downloaded to ${archivePath}`,
stdout: `downloaded=${downloaded}`,
stderr: "",
code: 0,
};
}
if (!archiveType) {
return {
ok: false,
message: "extract requested but archive type could not be detected",
stdout: "",
stderr: "",
code: null,
};
}
const extractResult = await extractArchive({
archivePath,
archiveType,
targetDir,
stripComponents: spec.stripComponents,
timeoutMs,
});
const success = extractResult.code === 0;
return {
ok: success,
message: success
? `Downloaded and extracted to ${targetDir}`
: formatInstallFailureMessage(extractResult),
stdout: extractResult.stdout.trim(),
stderr: extractResult.stderr.trim(),
code: extractResult.code,
};
}
async function resolveBrewBinDir(timeoutMs: number, brewExe?: string): Promise<string | undefined> {
const exe = brewExe ?? (hasBinary("brew") ? "brew" : resolveBrewExecutable());
if (!exe) {

View File

@@ -0,0 +1,11 @@
import path from "node:path";
import type { SkillEntry } from "./types.js";
import { safePathSegmentHashed } from "../../infra/install-safe-path.js";
import { resolveConfigDir } from "../../utils.js";
import { resolveSkillKey } from "./frontmatter.js";
export function resolveSkillToolsRootDir(entry: SkillEntry): string {
const key = resolveSkillKey(entry.skill, entry);
const safeKey = safePathSegmentHashed(key);
return path.join(resolveConfigDir(), "tools", safeKey);
}

View File

@@ -5,6 +5,7 @@ import path from "node:path";
import { Readable, Transform } from "node:stream";
import { pipeline } from "node:stream/promises";
import * as tar from "tar";
import { resolveSafeBaseDir } from "./path-safety.js";
export type ArchiveKind = "tar" | "zip";
@@ -101,11 +102,6 @@ export async function withTimeout<T>(
}
}
function resolveSafeBaseDir(destDir: string): string {
const resolved = path.resolve(destDir);
return resolved.endsWith(path.sep) ? resolved : `${resolved}${path.sep}`;
}
// Path hygiene.
function normalizeArchivePath(raw: string): string {
// Archives may contain Windows separators; treat them as separators.

View File

@@ -1,3 +1,4 @@
import { createHash } from "node:crypto";
import path from "node:path";
export function unscopedPackageName(name: string): string {
@@ -16,6 +17,30 @@ export function safeDirName(input: string): string {
return trimmed.replaceAll("/", "__").replaceAll("\\", "__");
}
export function safePathSegmentHashed(input: string): string {
const trimmed = input.trim();
const base = trimmed
.replaceAll(/[\\/]/g, "-")
.replaceAll(/[^a-zA-Z0-9._-]/g, "-")
.replaceAll(/-+/g, "-")
.replaceAll(/^-+/g, "")
.replaceAll(/-+$/g, "");
const normalized = base.length > 0 ? base : "skill";
const safe = normalized === "." || normalized === ".." ? "skill" : normalized;
const hash = createHash("sha256").update(trimmed).digest("hex").slice(0, 10);
if (safe !== trimmed) {
const prefix = safe.length > 50 ? safe.slice(0, 50) : safe;
return `${prefix}-${hash}`;
}
if (safe.length > 60) {
return `${safe.slice(0, 50)}-${hash}`;
}
return safe;
}
export function resolveSafeInstallDir(params: {
baseDir: string;
id: string;

20
src/infra/path-safety.ts Normal file
View File

@@ -0,0 +1,20 @@
import path from "node:path";
export function resolveSafeBaseDir(rootDir: string): string {
const resolved = path.resolve(rootDir);
return resolved.endsWith(path.sep) ? resolved : `${resolved}${path.sep}`;
}
export function isWithinDir(rootDir: string, targetPath: string): boolean {
const resolvedRoot = path.resolve(rootDir);
const resolvedTarget = path.resolve(targetPath);
// Windows paths are effectively case-insensitive; normalize to avoid false negatives.
if (process.platform === "win32") {
const relative = path.win32.relative(resolvedRoot.toLowerCase(), resolvedTarget.toLowerCase());
return relative === "" || (!relative.startsWith("..") && !path.win32.isAbsolute(relative));
}
const relative = path.relative(resolvedRoot, resolvedTarget);
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
}

View File

@@ -20,6 +20,7 @@ import {
DEFAULT_MAIN_KEY,
normalizeAgentId,
} from "../routing/session-key.js";
import { isWithinDir } from "./path-safety.js";
import {
ensureDir,
existsDir,
@@ -360,11 +361,6 @@ function isDirPath(filePath: string): boolean {
}
}
function isWithinDir(targetPath: string, rootDir: string): boolean {
const relative = path.relative(path.resolve(rootDir), path.resolve(targetPath));
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
}
function isLegacyTreeSymlinkMirror(currentDir: string, realTargetDir: string): boolean {
let entries: fs.Dirent[];
try {
@@ -395,7 +391,7 @@ function isLegacyTreeSymlinkMirror(currentDir: string, realTargetDir: string): b
} catch {
return false;
}
if (!isWithinDir(resolvedRealTarget, realTargetDir)) {
if (!isWithinDir(realTargetDir, resolvedRealTarget)) {
return false;
}
continue;