feat(webcodecs): add a WebGL renderer

This commit is contained in:
Simon Chan 2024-03-12 21:18:55 +08:00
parent c7549ad6c8
commit 28d650f63a
No known key found for this signature in database
GPG key ID: A8B69F750B9BCEDD
4 changed files with 159 additions and 35 deletions

View file

@ -0,0 +1,24 @@
import type { FrameRenderer } from "./renderer.js";
export class BitmapFrameRenderer implements FrameRenderer {
#context: ImageBitmapRenderingContext;
constructor(canvas: HTMLCanvasElement) {
this.#context = canvas.getContext("bitmaprenderer", { alpha: false })!;
}
draw(frame: VideoFrame): void {
createImageBitmap(frame)
.then((bitmap) => {
this.#context.transferFromImageBitmap(bitmap);
frame.close();
})
.catch((e) => {
console.warn(
"[@yume-chan/scrcpy-decoder-webcodecs]",
"VideoDecoder error",
e,
);
});
}
}

View file

@ -14,6 +14,10 @@ import type {
} from "@yume-chan/scrcpy-decoder-tinyh264"; } from "@yume-chan/scrcpy-decoder-tinyh264";
import { WritableStream } from "@yume-chan/stream-extra"; import { WritableStream } from "@yume-chan/stream-extra";
import { BitmapFrameRenderer } from "./bitmap.js";
import type { FrameRenderer } from "./renderer.js";
import { WebGLFrameRenderer } from "./webgl.js";
function toHex(value: number) { function toHex(value: number) {
return value.toString(16).padStart(2, "0").toUpperCase(); return value.toString(16).padStart(2, "0").toUpperCase();
} }
@ -48,9 +52,9 @@ export class WebCodecsDecoder implements ScrcpyVideoDecoder {
return this.#writable; return this.#writable;
} }
#renderer: HTMLCanvasElement; #canvas: HTMLCanvasElement;
get renderer() { get renderer() {
return this.#renderer; return this.#canvas;
} }
#frameRendered = 0; #frameRendered = 0;
@ -68,9 +72,9 @@ export class WebCodecsDecoder implements ScrcpyVideoDecoder {
return this.#sizeChanged.event; return this.#sizeChanged.event;
} }
#context: ImageBitmapRenderingContext;
#decoder: VideoDecoder; #decoder: VideoDecoder;
#config: Uint8Array | undefined; #config: Uint8Array | undefined;
#renderer: FrameRenderer;
#currentFrameRendered = false; #currentFrameRendered = false;
#animationFrameId = 0; #animationFrameId = 0;
@ -78,38 +82,30 @@ export class WebCodecsDecoder implements ScrcpyVideoDecoder {
constructor(codec: ScrcpyVideoCodecId) { constructor(codec: ScrcpyVideoCodecId) {
this.#codec = codec; this.#codec = codec;
this.#renderer = document.createElement("canvas"); this.#canvas = document.createElement("canvas");
try {
this.#renderer = new WebGLFrameRenderer(this.#canvas);
} catch {
this.#renderer = new BitmapFrameRenderer(this.#canvas);
}
this.#context = this.#renderer.getContext("bitmaprenderer", {
alpha: false,
})!;
this.#decoder = new VideoDecoder({ this.#decoder = new VideoDecoder({
output: (frame) => { output: (frame) => {
createImageBitmap(frame) if (this.#currentFrameRendered) {
.then((bitmap) => { this.#frameRendered += 1;
if (this.#currentFrameRendered) { } else {
this.#frameRendered += 1; this.#frameSkipped += 1;
} else { }
this.#frameSkipped += 1; this.#currentFrameRendered = false;
}
this.#currentFrameRendered = false;
// PERF: H.264 renderer may draw multiple frames in one vertical sync interval to minimize latency. // PERF: H.264 renderer may draw multiple frames in one vertical sync interval to minimize latency.
// When multiple frames are drawn in one vertical sync interval, // When multiple frames are drawn in one vertical sync interval,
// only the last one is visible to users. // only the last one is visible to users.
// But this ensures users can always see the most up-to-date screen. // But this ensures users can always see the most up-to-date screen.
// This is also the behavior of official Scrcpy client. // This is also the behavior of official Scrcpy client.
// https://github.com/Genymobile/scrcpy/issues/3679 // https://github.com/Genymobile/scrcpy/issues/3679
this.#context.transferFromImageBitmap(bitmap); this.#renderer.draw(frame);
frame.close();
})
.catch((e) => {
console.warn(
"[@yume-chan/scrcpy-decoder-webcodecs]",
"createImageBitmap error",
e,
);
});
}, },
error(e) { error(e) {
console.warn( console.warn(
@ -152,8 +148,8 @@ export class WebCodecsDecoder implements ScrcpyVideoDecoder {
croppedHeight, croppedHeight,
} = h264ParseConfiguration(data); } = h264ParseConfiguration(data);
this.#renderer.width = croppedWidth; this.#canvas.width = croppedWidth;
this.#renderer.height = croppedHeight; this.#canvas.height = croppedHeight;
this.#sizeChanged.fire({ this.#sizeChanged.fire({
width: croppedWidth, width: croppedWidth,
height: croppedHeight, height: croppedHeight,
@ -182,8 +178,8 @@ export class WebCodecsDecoder implements ScrcpyVideoDecoder {
croppedHeight, croppedHeight,
} = h265ParseConfiguration(data); } = h265ParseConfiguration(data);
this.#renderer.width = croppedWidth; this.#canvas.width = croppedWidth;
this.#renderer.height = croppedHeight; this.#canvas.height = croppedHeight;
this.#sizeChanged.fire({ this.#sizeChanged.fire({
width: croppedWidth, width: croppedWidth,
height: croppedHeight, height: croppedHeight,

View file

@ -0,0 +1,3 @@
export interface FrameRenderer {
draw(frame: VideoFrame): void;
}

View file

@ -0,0 +1,101 @@
import type { FrameRenderer } from "./renderer.js";
export class WebGLFrameRenderer implements FrameRenderer {
static vertexShaderSource = `
attribute vec2 xy;
varying highp vec2 uv;
void main(void) {
gl_Position = vec4(xy, 0.0, 1.0);
// Map vertex coordinates (-1 to +1) to UV coordinates (0 to 1).
// UV coordinates are Y-flipped relative to vertex coordinates.
uv = vec2((1.0 + xy.x) / 2.0, (1.0 - xy.y) / 2.0);
}
`;
static fragmentShaderSource = `
varying highp vec2 uv;
uniform sampler2D texture;
void main(void) {
gl_FragColor = texture2D(texture, uv);
}
`;
#context: WebGLRenderingContext;
constructor(canvas: HTMLCanvasElement) {
const gl =
canvas.getContext("webgl2", { alpha: false }) ||
canvas.getContext("webgl", { alpha: false });
if (!gl) {
throw new Error("WebGL not supported");
}
this.#context = gl;
const vertexShader = gl.createShader(gl.VERTEX_SHADER)!;
gl.shaderSource(vertexShader, WebGLFrameRenderer.vertexShaderSource);
gl.compileShader(vertexShader);
if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
throw new Error(gl.getShaderInfoLog(vertexShader)!);
}
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER)!;
gl.shaderSource(
fragmentShader,
WebGLFrameRenderer.fragmentShaderSource,
);
gl.compileShader(fragmentShader);
if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
throw new Error(gl.getShaderInfoLog(fragmentShader)!);
}
const shaderProgram = gl.createProgram()!;
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
throw new Error(gl.getProgramInfoLog(shaderProgram)!);
}
gl.useProgram(shaderProgram);
// Vertex coordinates, clockwise from bottom-left.
const vertexBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vertexBuffer);
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array([-1.0, -1.0, -1.0, +1.0, +1.0, +1.0, +1.0, -1.0]),
gl.STATIC_DRAW,
);
const xyLocation = gl.getAttribLocation(shaderProgram, "xy");
gl.vertexAttribPointer(xyLocation, 2, gl.FLOAT, false, 0, 0);
gl.enableVertexAttribArray(xyLocation);
// Create one texture to upload frames to.
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}
draw(frame: VideoFrame) {
const gl = this.#context;
gl.texImage2D(
gl.TEXTURE_2D,
0,
gl.RGBA,
gl.RGBA,
gl.UNSIGNED_BYTE,
frame,
);
frame.close();
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
gl.drawArrays(gl.TRIANGLE_FAN, 0, 4);
}
}