mirror of
https://github.com/openclaw/openclaw.git
synced 2026-02-19 18:39:20 -05:00
perf(test): reduce lock wait and fixture setup overhead
This commit is contained in:
@@ -1,6 +1,6 @@
|
||||
import { type AddressInfo, createServer } from "node:net";
|
||||
import { fetch as realFetch } from "undici";
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { afterAll, beforeAll, describe, expect, it, vi } from "vitest";
|
||||
|
||||
let testPort = 0;
|
||||
let cdpBaseUrl = "";
|
||||
@@ -185,10 +185,11 @@ function makeResponse(
|
||||
}
|
||||
|
||||
describe("browser control server", () => {
|
||||
beforeEach(async () => {
|
||||
beforeAll(async () => {
|
||||
reachable = false;
|
||||
cfgAttachOnly = false;
|
||||
createTargetId = null;
|
||||
launchCalls.length = 0;
|
||||
|
||||
cdpMocks.createTargetViaCdp.mockImplementation(async () => {
|
||||
if (createTargetId) {
|
||||
@@ -262,7 +263,7 @@ describe("browser control server", () => {
|
||||
);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
afterAll(async () => {
|
||||
vi.unstubAllGlobals();
|
||||
vi.restoreAllMocks();
|
||||
if (prevGatewayPort === undefined) {
|
||||
@@ -274,7 +275,7 @@ describe("browser control server", () => {
|
||||
await stopBrowserControlServer();
|
||||
});
|
||||
|
||||
it("covers primary control routes, validation, and profile compatibility", async () => {
|
||||
it("covers primary control routes, validation, and attach-only compatibility", async () => {
|
||||
const { startBrowserControlServerFromConfig } = await import("./server.js");
|
||||
const started = await startBrowserControlServerFromConfig();
|
||||
expect(started?.port).toBe(testPort);
|
||||
@@ -491,13 +492,6 @@ describe("browser control server", () => {
|
||||
};
|
||||
expect(stopped.ok).toBe(true);
|
||||
expect(stopped.profile).toBe("openclaw");
|
||||
});
|
||||
|
||||
it("covers common error branches", async () => {
|
||||
cfgAttachOnly = true;
|
||||
const { startBrowserControlServerFromConfig } = await import("./server.js");
|
||||
await startBrowserControlServerFromConfig();
|
||||
const base = `http://127.0.0.1:${testPort}`;
|
||||
|
||||
const missing = await realFetch(`${base}/tabs/open`, {
|
||||
method: "POST",
|
||||
@@ -507,15 +501,12 @@ describe("browser control server", () => {
|
||||
expect(missing.status).toBe(400);
|
||||
|
||||
reachable = false;
|
||||
const started = (await realFetch(`${base}/start`, {
|
||||
cfgAttachOnly = true;
|
||||
const attachStarted = (await realFetch(`${base}/start`, {
|
||||
method: "POST",
|
||||
}).then((r) => r.json())) as { error?: string };
|
||||
expect(started.error ?? "").toMatch(/attachOnly/i);
|
||||
});
|
||||
expect(attachStarted.error ?? "").toMatch(/attachOnly/i);
|
||||
|
||||
it("allows attachOnly servers to ensure reachability via callback", async () => {
|
||||
cfgAttachOnly = true;
|
||||
reachable = false;
|
||||
const { startBrowserBridgeServer } = await import("./bridge-server.js");
|
||||
|
||||
const ensured = vi.fn(async () => {
|
||||
@@ -541,11 +532,11 @@ describe("browser control server", () => {
|
||||
onEnsureAttachTarget: ensured,
|
||||
});
|
||||
|
||||
const started = (await realFetch(`${bridge.baseUrl}/start`, {
|
||||
const bridgeStarted = (await realFetch(`${bridge.baseUrl}/start`, {
|
||||
method: "POST",
|
||||
}).then((r) => r.json())) as { ok?: boolean; error?: string };
|
||||
expect(started.error).toBeUndefined();
|
||||
expect(started.ok).toBe(true);
|
||||
expect(bridgeStarted.error).toBeUndefined();
|
||||
expect(bridgeStarted.ok).toBe(true);
|
||||
const status = (await realFetch(`${bridge.baseUrl}/`).then((r) => r.json())) as {
|
||||
running?: boolean;
|
||||
};
|
||||
|
||||
@@ -50,9 +50,8 @@ describe("session store lock (Promise chain mutex)", () => {
|
||||
Array.from({ length: N }, (_, i) =>
|
||||
updateSessionStore(storePath, async (store) => {
|
||||
const entry = store[key] as Record<string, unknown>;
|
||||
// Simulate async work so that without proper serialization
|
||||
// multiple readers would see the same stale value.
|
||||
await sleep(Math.random() * 3);
|
||||
// Keep an async boundary so stale-read races would surface without serialization.
|
||||
await Promise.resolve();
|
||||
entry.counter = (entry.counter as number) + 1;
|
||||
entry.tag = `writer-${i}`;
|
||||
}),
|
||||
@@ -74,7 +73,7 @@ describe("session store lock (Promise chain mutex)", () => {
|
||||
storePath,
|
||||
sessionKey: key,
|
||||
update: async () => {
|
||||
await sleep(9);
|
||||
await Promise.resolve();
|
||||
return { modelOverride: "model-a" };
|
||||
},
|
||||
}),
|
||||
@@ -82,7 +81,7 @@ describe("session store lock (Promise chain mutex)", () => {
|
||||
storePath,
|
||||
sessionKey: key,
|
||||
update: async () => {
|
||||
await sleep(3);
|
||||
await Promise.resolve();
|
||||
return { thinkingLevel: "high" as const };
|
||||
},
|
||||
}),
|
||||
@@ -90,7 +89,7 @@ describe("session store lock (Promise chain mutex)", () => {
|
||||
storePath,
|
||||
sessionKey: key,
|
||||
update: async () => {
|
||||
await sleep(6);
|
||||
await Promise.resolve();
|
||||
return { systemPromptOverride: "custom" };
|
||||
},
|
||||
}),
|
||||
@@ -165,17 +164,30 @@ describe("session store lock (Promise chain mutex)", () => {
|
||||
});
|
||||
|
||||
const order: string[] = [];
|
||||
let started = 0;
|
||||
let releaseBoth: (() => void) | undefined;
|
||||
const gate = new Promise<void>((resolve) => {
|
||||
releaseBoth = resolve;
|
||||
});
|
||||
const markStarted = () => {
|
||||
started += 1;
|
||||
if (started === 2) {
|
||||
releaseBoth?.();
|
||||
}
|
||||
};
|
||||
|
||||
const opA = updateSessionStore(pathA, async (store) => {
|
||||
order.push("a-start");
|
||||
await sleep(12);
|
||||
markStarted();
|
||||
await gate;
|
||||
store.a = { ...store.a, modelOverride: "done-a" } as unknown as SessionEntry;
|
||||
order.push("a-end");
|
||||
});
|
||||
|
||||
const opB = updateSessionStore(pathB, async (store) => {
|
||||
order.push("b-start");
|
||||
await sleep(3);
|
||||
markStarted();
|
||||
await gate;
|
||||
store.b = { ...store.b, modelOverride: "done-b" } as unknown as SessionEntry;
|
||||
order.push("b-end");
|
||||
});
|
||||
@@ -211,7 +223,7 @@ describe("session store lock (Promise chain mutex)", () => {
|
||||
});
|
||||
|
||||
// Allow microtask (finally) to run.
|
||||
await sleep(0);
|
||||
await Promise.resolve();
|
||||
|
||||
expect(getSessionStoreLockQueueSizeForTest()).toBe(0);
|
||||
});
|
||||
@@ -223,7 +235,7 @@ describe("session store lock (Promise chain mutex)", () => {
|
||||
throw new Error("fail");
|
||||
}).catch(() => undefined);
|
||||
|
||||
await sleep(0);
|
||||
await Promise.resolve();
|
||||
|
||||
expect(getSessionStoreLockQueueSizeForTest()).toBe(0);
|
||||
});
|
||||
@@ -266,21 +278,21 @@ describe("session store lock (Promise chain mutex)", () => {
|
||||
const lockHolder = withSessionStoreLockForTest(
|
||||
storePath,
|
||||
async () => {
|
||||
await sleep(40);
|
||||
await sleep(15);
|
||||
},
|
||||
{ timeoutMs: 2_000 },
|
||||
{ timeoutMs: 1_000 },
|
||||
);
|
||||
const timedOut = withSessionStoreLockForTest(
|
||||
storePath,
|
||||
async () => {
|
||||
timedOutRan = true;
|
||||
},
|
||||
{ timeoutMs: 20 },
|
||||
{ timeoutMs: 5 },
|
||||
);
|
||||
|
||||
await expect(timedOut).rejects.toThrow("timeout waiting for session store lock");
|
||||
await lockHolder;
|
||||
await sleep(8);
|
||||
await sleep(2);
|
||||
expect(timedOutRan).toBe(false);
|
||||
});
|
||||
|
||||
@@ -291,7 +303,7 @@ describe("session store lock (Promise chain mutex)", () => {
|
||||
});
|
||||
|
||||
const write = updateSessionStore(storePath, async (store) => {
|
||||
await sleep(18);
|
||||
await sleep(8);
|
||||
store[key] = { ...store[key], modelOverride: "v" } as unknown as SessionEntry;
|
||||
});
|
||||
|
||||
@@ -303,7 +315,7 @@ describe("session store lock (Promise chain mutex)", () => {
|
||||
lockSeen = true;
|
||||
break;
|
||||
} catch {
|
||||
await sleep(2);
|
||||
await sleep(1);
|
||||
}
|
||||
}
|
||||
expect(lockSeen).toBe(true);
|
||||
|
||||
@@ -11,6 +11,9 @@ let fixtureRoot = "";
|
||||
let fixtureFileCount = 0;
|
||||
let largeJpegBuffer: Buffer;
|
||||
let tinyPngBuffer: Buffer;
|
||||
let alphaPngBuffer: Buffer;
|
||||
let fallbackPngBuffer: Buffer;
|
||||
let fallbackPngCap = 0;
|
||||
|
||||
async function writeTempFile(buffer: Buffer, ext: string): Promise<string> {
|
||||
const file = path.join(fixtureRoot, `media-${fixtureFileCount++}${ext}`);
|
||||
@@ -50,6 +53,29 @@ beforeAll(async () => {
|
||||
})
|
||||
.png()
|
||||
.toBuffer();
|
||||
alphaPngBuffer = await sharp({
|
||||
create: {
|
||||
width: 64,
|
||||
height: 64,
|
||||
channels: 4,
|
||||
background: { r: 255, g: 0, b: 0, alpha: 0.5 },
|
||||
},
|
||||
})
|
||||
.png()
|
||||
.toBuffer();
|
||||
const size = 96;
|
||||
const raw = buildDeterministicBytes(size * size * 4);
|
||||
fallbackPngBuffer = await sharp(raw, { raw: { width: size, height: size, channels: 4 } })
|
||||
.png()
|
||||
.toBuffer();
|
||||
const smallestPng = await optimizeImageToPng(fallbackPngBuffer, 1);
|
||||
fallbackPngCap = Math.max(1, smallestPng.optimizedSize - 1);
|
||||
const jpegOptimized = await optimizeImageToJpeg(fallbackPngBuffer, fallbackPngCap);
|
||||
if (jpegOptimized.buffer.length >= smallestPng.optimizedSize) {
|
||||
throw new Error(
|
||||
`JPEG fallback did not shrink below PNG (jpeg=${jpegOptimized.buffer.length}, png=${smallestPng.optimizedSize})`,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -260,18 +286,7 @@ describe("web media loading", () => {
|
||||
});
|
||||
|
||||
it("preserves PNG alpha when under the cap", async () => {
|
||||
const buffer = await sharp({
|
||||
create: {
|
||||
width: 64,
|
||||
height: 64,
|
||||
channels: 4,
|
||||
background: { r: 255, g: 0, b: 0, alpha: 0.5 },
|
||||
},
|
||||
})
|
||||
.png()
|
||||
.toBuffer();
|
||||
|
||||
const file = await writeTempFile(buffer, ".png");
|
||||
const file = await writeTempFile(alphaPngBuffer, ".png");
|
||||
|
||||
const result = await loadWebMedia(file, 1024 * 1024);
|
||||
|
||||
@@ -282,28 +297,13 @@ describe("web media loading", () => {
|
||||
});
|
||||
|
||||
it("falls back to JPEG when PNG alpha cannot fit under cap", async () => {
|
||||
const size = 96;
|
||||
const raw = buildDeterministicBytes(size * size * 4);
|
||||
const pngBuffer = await sharp(raw, { raw: { width: size, height: size, channels: 4 } })
|
||||
.png()
|
||||
.toBuffer();
|
||||
const smallestPng = await optimizeImageToPng(pngBuffer, 1);
|
||||
const cap = Math.max(1, smallestPng.optimizedSize - 1);
|
||||
const jpegOptimized = await optimizeImageToJpeg(pngBuffer, cap);
|
||||
const file = await writeTempFile(fallbackPngBuffer, ".png");
|
||||
|
||||
if (jpegOptimized.buffer.length >= smallestPng.optimizedSize) {
|
||||
throw new Error(
|
||||
`JPEG fallback did not shrink below PNG (jpeg=${jpegOptimized.buffer.length}, png=${smallestPng.optimizedSize})`,
|
||||
);
|
||||
}
|
||||
|
||||
const file = await writeTempFile(pngBuffer, ".png");
|
||||
|
||||
const result = await loadWebMedia(file, cap);
|
||||
const result = await loadWebMedia(file, fallbackPngCap);
|
||||
|
||||
expect(result.kind).toBe("image");
|
||||
expect(result.contentType).toBe("image/jpeg");
|
||||
expect(result.buffer.length).toBeLessThanOrEqual(cap);
|
||||
expect(result.buffer.length).toBeLessThanOrEqual(fallbackPngCap);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user