feat: add sharedTexture module to import shared texture (#47317)

feat: add `sharedTexture` module.
This commit is contained in:
reito
2025-11-07 16:50:28 +08:00
committed by GitHub
parent 38be633aa8
commit bab11f8c7b
34 changed files with 1846 additions and 15 deletions

View File

@@ -0,0 +1,281 @@
import { BaseWindow } from 'electron';
import { expect } from 'chai';
import { randomUUID } from 'node:crypto';
import * as path from 'node:path';
import { closeWindow } from './lib/window-helpers';
const fixtures = path.resolve(__dirname, 'fixtures');
describe('sharedTexture module', () => {
const {
nativeImage
} = require('electron');
const debugSpec = false;
const dirPath = path.join(fixtures, 'api', 'shared-texture');
const osrPath = path.join(dirPath, 'osr.html');
const imagePath = path.join(dirPath, 'image.png');
const targetImage = nativeImage.createFromPath(imagePath);
describe('import shared texture produced by osr', () => {
const {
app,
BrowserWindow,
sharedTexture,
ipcMain
} = require('electron');
afterEach(async () => {
ipcMain.removeAllListeners();
for (const w of BaseWindow.getAllWindows()) {
await closeWindow(w);
}
});
it('successfully imported and rendered with subtle api', (done) => {
type CapturedTextureHolder = {
importedSubtle: Electron.SharedTextureImportedSubtle,
texture: Electron.OffscreenSharedTexture
}
const capturedTextures = new Map<string, CapturedTextureHolder>();
const preloadPath = path.join(dirPath, 'subtle', 'preload.js');
const htmlPath = path.join(dirPath, 'subtle', 'index.html');
const createWindow = () => {
const win = new BrowserWindow({
width: 256,
height: 256,
show: debugSpec,
webPreferences: {
preload: preloadPath
}
});
const osr = new BrowserWindow({
width: 128,
height: 128,
show: debugSpec,
webPreferences: {
offscreen: {
useSharedTexture: true
}
}
});
osr.webContents.setFrameRate(1);
osr.webContents.on('paint', (event: any) => {
// Step 1: Input source of shared texture handle.
const texture = event.texture;
if (!texture) {
console.error('No texture, GPU may be unavailable, skipping.');
done();
return;
}
// Step 2: Import as SharedTextureImported
console.log(texture.textureInfo);
const importedSubtle = sharedTexture.subtle.importSharedTexture(texture.textureInfo);
// Step 3: Prepare for transfer to another process (win's renderer)
const transfer = importedSubtle.startTransferSharedTexture();
const id = randomUUID();
capturedTextures.set(id, { importedSubtle, texture });
// Step 4: Send the shared texture to the renderer process (goto preload.js)
win.webContents.send('shared-texture', id, transfer);
});
ipcMain.on('shared-texture-done', (event: any, id: string) => {
// Step 12: Release the shared texture resources at main process
const data = capturedTextures.get(id);
if (data) {
capturedTextures.delete(id);
const { importedSubtle, texture } = data;
// Step 13: Release the imported shared texture
importedSubtle.release(() => {
// Step 14: Release the shared texture once GPU is done
texture.release();
});
// Step 15: Slightly timeout and capture the node screenshot
setTimeout(async () => {
// Step 16: Compare the captured image with the target image
const captured = await win.webContents.capturePage({
x: 16,
y: 16,
width: 128,
height: 128
});
// Step 17: Resize the target image to match the captured image size, in case dpr != 1
const target = targetImage.resize({ ...captured.getSize() });
// Step 18: nativeImage have error comparing pixel data when color space is different,
// send to browser for comparison using canvas.
win.webContents.send('verify-captured-image', {
captured: captured.toDataURL(),
target: target.toDataURL()
});
}, 300);
}
});
ipcMain.on('verify-captured-image-done', (event: any, result: { difference: number, total: number }) => {
// Step 22: Verify the result from renderer process
try {
// macOS may have tiny color difference after the whole rendering process,
// and the color may change slightly when resizing at device pixel ratio != 1.
// Limit error should not be different more than 1% of the whole image.
const ratio = result.difference / result.total;
console.log('image difference: ', ratio);
expect(ratio).to.be.lessThan(0.01);
done();
} catch (e) {
done(e);
}
});
ipcMain.on('webgpu-unavailable', () => {
console.error('WebGPU is not available, skipping.');
done();
});
win.loadFile(htmlPath);
osr.loadFile(osrPath);
};
app.whenReady().then(() => {
createWindow();
});
}).timeout(debugSpec ? 100000 : 10000);
const runSharedTextureManagedTest = (done: Mocha.Done, iframe: boolean) => {
const preloadPath = path.join(dirPath, 'managed', 'preload.js');
const htmlPath = path.join(dirPath, 'managed', iframe ? 'frame.html' : 'index.html');
const createWindow = () => {
const win = new BrowserWindow({
width: 256,
height: 256,
show: debugSpec,
webPreferences: {
preload: preloadPath,
nodeIntegrationInSubFrames: iframe
}
});
const osr = new BrowserWindow({
width: 128,
height: 128,
show: debugSpec,
webPreferences: {
offscreen: {
useSharedTexture: true
}
}
});
osr.webContents.setFrameRate(1);
osr.webContents.on('paint', async (event: any) => {
const targetFrame = iframe ? win.webContents.mainFrame.frames[0] : win.webContents.mainFrame;
if (!targetFrame) {
done(new Error('Target frame not found'));
return;
}
// Step 1: Input source of shared texture handle.
const texture = event.texture;
if (!texture) {
console.error('No texture, GPU may be unavailable, skipping.');
done();
return;
}
// Step 2: Import as SharedTextureImported
console.log(texture.textureInfo);
const imported = sharedTexture.importSharedTexture({
textureInfo: texture.textureInfo,
allReferencesReleased: () => {
// Release the shared texture source once GPU is done.
// Will be called when all processes have finished using the shared texture.
texture.release();
// Slightly timeout and capture the node screenshot
setTimeout(async () => {
// Compare the captured image with the target image
const captured = await win.webContents.capturePage({
x: 16,
y: 16,
width: 128,
height: 128
});
// Resize the target image to match the captured image size, in case dpr != 1
const target = targetImage.resize({ ...captured.getSize() });
// nativeImage have error comparing pixel data when color space is different,
// send to browser for comparison using canvas.
targetFrame.send('verify-captured-image', {
captured: captured.toDataURL(),
target: target.toDataURL()
});
}, 300);
}
});
// Step 3: Transfer to another process (win's renderer)
await sharedTexture.sendSharedTexture({
frame: iframe ? targetFrame : win.webContents.mainFrame,
importedSharedTexture: imported
});
// Step 4: Release the imported and wait for signal to release the source
imported.release();
});
ipcMain.on('verify-captured-image-done', (event: any, result: { difference: number, total: number }) => {
// Verify the result from renderer process
try {
// macOS may have tiny color difference after the whole rendering process,
// and the color may change slightly when resizing at device pixel ratio != 1.
// Limit error should not be different more than 1% of the whole image.
const ratio = result.difference / result.total;
console.log('image difference: ', ratio);
expect(ratio).to.be.lessThan(0.01);
done();
} catch (e) {
setTimeout(() => done(e), 1000000);
}
});
ipcMain.on('webgpu-unavailable', () => {
console.error('WebGPU is not available, skipping.');
done();
});
win.loadFile(htmlPath);
osr.loadFile(osrPath);
};
app.whenReady().then(() => {
createWindow();
});
};
it('successfully imported and rendered with managed api, without iframe', (done) => {
runSharedTextureManagedTest(done, false);
}).timeout(debugSpec ? 100000 : 10000);
it('successfully imported and rendered with managed api, with iframe', (done) => {
runSharedTextureManagedTest(done, true);
}).timeout(debugSpec ? 100000 : 10000);
});
});

View File

@@ -0,0 +1,168 @@
window.verifyCapturedImage = (images, result) => {
const { captured, target } = images;
// Compare the captured image with the target image
const capturedImage = new Image();
capturedImage.src = captured;
capturedImage.onload = () => {
const targetImage = new Image();
targetImage.src = target;
targetImage.onload = () => {
const canvas = document.createElement('canvas');
canvas.width = capturedImage.width;
canvas.height = capturedImage.height;
const ctx = canvas.getContext('2d');
ctx.drawImage(capturedImage, 0, 0);
const capturedData = ctx.getImageData(0, 0, canvas.width, canvas.height).data;
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.drawImage(targetImage, 0, 0);
const targetData = ctx.getImageData(0, 0, canvas.width, canvas.height).data;
// Compare the pixel data
let difference = 0;
for (let i = 0; i < capturedData.length; i += 4) {
difference += Math.abs(capturedData[i] - targetData[i]);
difference += Math.abs(capturedData[i + 1] - targetData[i + 1]);
difference += Math.abs(capturedData[i + 2] - targetData[i + 2]);
difference += Math.abs(capturedData[i + 3] - targetData[i + 3]);
}
// Send the result back to the main process
result({ difference, total: capturedData.length * 255 });
canvas.remove();
capturedImage.remove();
targetImage.remove();
};
};
};
window.initWebGpu = async () => {
// Init WebGPU
const canvas = document.createElement('canvas');
canvas.width = 128;
canvas.height = 128;
canvas.style.width = '128px';
canvas.style.height = '128px';
canvas.style.position = 'absolute';
canvas.style.top = '16px';
canvas.style.left = '16px';
document.body.appendChild(canvas);
const context = canvas.getContext('webgpu');
// Configure WebGPU context
const adapter = await navigator.gpu.requestAdapter();
const device = await adapter.requestDevice();
const format = navigator.gpu.getPreferredCanvasFormat();
context.configure({ device, format });
window.renderFrame = async (frame) => {
try {
// Create external texture
const externalTexture = device.importExternalTexture({ source: frame });
// Create bind group layout, correctly specifying the external texture type
const bindGroupLayout = device.createBindGroupLayout({
entries: [
{
binding: 0,
visibility: window.GPUShaderStage.FRAGMENT,
externalTexture: {}
},
{
binding: 1,
visibility: window.GPUShaderStage.FRAGMENT,
sampler: {}
}
]
});
// Create pipeline layout
const pipelineLayout = device.createPipelineLayout({
bindGroupLayouts: [bindGroupLayout]
});
// Create render pipeline
const pipeline = device.createRenderPipeline({
layout: pipelineLayout,
vertex: {
module: device.createShaderModule({
code: `
@vertex
fn main(@builtin(vertex_index) VertexIndex : u32) -> @builtin(position) vec4<f32> {
var pos = array<vec2<f32>, 6>(
vec2<f32>(-1.0, -1.0),
vec2<f32>(1.0, -1.0),
vec2<f32>(-1.0, 1.0),
vec2<f32>(-1.0, 1.0),
vec2<f32>(1.0, -1.0),
vec2<f32>(1.0, 1.0)
);
return vec4<f32>(pos[VertexIndex], 0.0, 1.0);
}
`
}),
entryPoint: 'main'
},
fragment: {
module: device.createShaderModule({
code: `
@group(0) @binding(0) var extTex: texture_external;
@group(0) @binding(1) var mySampler: sampler;
@fragment
fn main(@builtin(position) fragCoord: vec4<f32>) -> @location(0) vec4<f32> {
let texCoord = fragCoord.xy / vec2<f32>(${canvas.width}.0, ${canvas.height}.0);
return textureSampleBaseClampToEdge(extTex, mySampler, texCoord);
}
`
}),
entryPoint: 'main',
targets: [{ format }]
},
primitive: { topology: 'triangle-list' }
});
// Create bind group
const bindGroup = device.createBindGroup({
layout: bindGroupLayout,
entries: [
{
binding: 0,
resource: externalTexture
},
{
binding: 1,
resource: device.createSampler()
}
]
});
// Create command encoder and render pass
const commandEncoder = device.createCommandEncoder();
const textureView = context.getCurrentTexture().createView();
const renderPass = commandEncoder.beginRenderPass({
colorAttachments: [
{
view: textureView,
clearValue: { r: 0.0, g: 0.0, b: 0.0, a: 1.0 },
loadOp: 'clear',
storeOp: 'store'
}
]
});
// Set pipeline and bind group
renderPass.setPipeline(pipeline);
renderPass.setBindGroup(0, bindGroup);
renderPass.draw(6); // Draw a rectangle composed of two triangles
renderPass.end();
// Submit commands
device.queue.submit([commandEncoder.finish()]);
} catch (error) {
console.error('Rendering error:', error);
}
};
};

Binary file not shown.

After

Width:  |  Height:  |  Size: 873 B

View File

@@ -0,0 +1,12 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
</head>
<body style="margin: 0;">
<iframe src="index.html" style="width: 100%; height: 100%; border: none;"></iframe>
</body>
</html>

View File

@@ -0,0 +1,14 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Hello World!</title>
<script src="../common.js" defer></script>
<script src="renderer.js" defer></script>
</head>
<body>
</body>
</html>

View File

@@ -0,0 +1,28 @@
const { sharedTexture } = require('electron');
const { ipcRenderer, contextBridge } = require('electron/renderer');
contextBridge.exposeInMainWorld('textures', {
onSharedTexture: (cb) => {
// Step 0: Register the receiver for transferred shared texture
sharedTexture.setSharedTextureReceiver(async (data) => {
// Step 5: Receive the imported shared texture
const { importedSharedTexture: imported } = data;
await cb(imported);
// Release the imported shared texture since we're done.
// No need to use the callback here, as the util function automatically
// managed the lifetime via sync token.
imported.release();
});
},
webGpuUnavailable: () => {
ipcRenderer.send('webgpu-unavailable');
},
verifyCapturedImage: (verify) => {
ipcRenderer.on('verify-captured-image', (e, images) => {
verify(images, (result) => {
ipcRenderer.send('verify-captured-image-done', result);
});
});
}
});

View File

@@ -0,0 +1,21 @@
window.initWebGpu().catch((err) => {
console.error('Failed to initialize WebGPU:', err);
window.textures.webGpuUnavailable();
});
window.textures.onSharedTexture(async (imported) => {
try {
// Step 6: Get VideoFrame from the imported texture
const frame = imported.getVideoFrame();
// Step 7: Render using WebGPU
await window.renderFrame(frame);
// Step 8: Release the VideoFrame as we no longer need it
frame.close();
} catch (error) {
console.error('Error getting VideoFrame:', error);
}
});
window.textures.verifyCapturedImage(window.verifyCapturedImage);

View File

@@ -0,0 +1,13 @@
<!DOCTYPE html>
<html lang="en">
<header>
<meta charset="UTF-8">
<title>Hello World!</title>
</header>
<body>
<img src="image.png" style="width: 128px; height: 128px; position: absolute; left: 0; top: 0;"></img>
</body>
</html>

View File

@@ -0,0 +1,14 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Hello World!</title>
<script src="../common.js" defer></script>
<script src="renderer.js" defer></script>
</head>
<body>
</body>
</html>

View File

@@ -0,0 +1,30 @@
const { sharedTexture } = require('electron');
const { ipcRenderer, contextBridge } = require('electron/renderer');
contextBridge.exposeInMainWorld('textures', {
onSharedTexture: (cb) => {
ipcRenderer.on('shared-texture', async (e, id, transfer) => {
// Step 5: Get the shared texture from the transfer
const importedSubtle = sharedTexture.subtle.finishTransferSharedTexture(transfer);
// Step 6: Let the renderer render using WebGPU
await cb(id, importedSubtle);
// Step 10: Release the shared texture with a callback
importedSubtle.release(() => {
// Step 11: When GPU command buffer is done, we can notify the main process to release
ipcRenderer.send('shared-texture-done', id);
});
});
},
webGpuUnavailable: () => {
ipcRenderer.send('webgpu-unavailable');
},
verifyCapturedImage: (verify) => {
ipcRenderer.on('verify-captured-image', (e, images) => {
verify(images, (result) => {
ipcRenderer.send('verify-captured-image-done', result);
});
});
}
});

View File

@@ -0,0 +1,21 @@
window.initWebGpu().catch((err) => {
console.error('Failed to initialize WebGPU:', err);
window.textures.webGpuUnavailable();
});
window.textures.onSharedTexture(async (id, importedSubtle) => {
try {
// Step 7: Get VideoFrame from the imported texture
const frame = importedSubtle.getVideoFrame();
// Step 8: Render using WebGPU
await window.renderFrame(frame);
// Step 9: Release the VideoFrame as we no longer need it
frame.close();
} catch (error) {
console.error('Error getting VideoFrame:', error);
}
});
window.textures.verifyCapturedImage(window.verifyCapturedImage);