mirror of
https://github.com/yume-chan/ya-webadb.git
synced 2025-10-03 17:59:50 +02:00
feat(decoder): support offscreen canvas in tiny h264 decoder
This commit is contained in:
parent
ac932cc447
commit
10ed1848f5
6 changed files with 100 additions and 76 deletions
|
@ -7,8 +7,8 @@ import {
|
|||
h264ParseConfiguration,
|
||||
} from "@yume-chan/scrcpy";
|
||||
import { WritableStream } from "@yume-chan/stream-extra";
|
||||
import type { default as YuvBuffer } from "yuv-buffer";
|
||||
import type { default as YuvCanvas } from "yuv-canvas";
|
||||
import YuvBuffer from "yuv-buffer";
|
||||
import YuvCanvas from "yuv-canvas";
|
||||
|
||||
import type {
|
||||
ScrcpyVideoDecoder,
|
||||
|
@ -21,21 +21,23 @@ const NOOP = () => {
|
|||
// no-op
|
||||
};
|
||||
|
||||
let cachedInitializePromise:
|
||||
| Promise<{ YuvBuffer: typeof YuvBuffer; YuvCanvas: typeof YuvCanvas }>
|
||||
| undefined;
|
||||
function initialize() {
|
||||
if (!cachedInitializePromise) {
|
||||
cachedInitializePromise = Promise.all([
|
||||
import("yuv-buffer"),
|
||||
import("yuv-canvas"),
|
||||
]).then(([YuvBuffer, { default: YuvCanvas }]) => ({
|
||||
YuvBuffer,
|
||||
YuvCanvas,
|
||||
}));
|
||||
}
|
||||
export interface TinyH264DecoderInit {
|
||||
/**
|
||||
* Optional render target canvas element or offscreen canvas.
|
||||
* If not provided, a new `<canvas>` (when DOM is available)
|
||||
* or a `OffscreenCanvas` will be created.
|
||||
*/
|
||||
canvas?: HTMLCanvasElement | OffscreenCanvas | undefined;
|
||||
}
|
||||
|
||||
return cachedInitializePromise;
|
||||
export function createCanvas() {
|
||||
if (typeof document !== "undefined") {
|
||||
return document.createElement("canvas");
|
||||
}
|
||||
if (typeof OffscreenCanvas !== "undefined") {
|
||||
return new OffscreenCanvas(0, 0);
|
||||
}
|
||||
throw new Error("no canvas input found nor any canvas can be created");
|
||||
}
|
||||
|
||||
export class TinyH264Decoder implements ScrcpyVideoDecoder {
|
||||
|
@ -47,7 +49,7 @@ export class TinyH264Decoder implements ScrcpyVideoDecoder {
|
|||
},
|
||||
};
|
||||
|
||||
#renderer: HTMLCanvasElement;
|
||||
#renderer: HTMLCanvasElement | OffscreenCanvas;
|
||||
get renderer() {
|
||||
return this.#renderer;
|
||||
}
|
||||
|
@ -75,10 +77,12 @@ export class TinyH264Decoder implements ScrcpyVideoDecoder {
|
|||
#yuvCanvas: YuvCanvas | undefined;
|
||||
#initializer: PromiseResolver<TinyH264Wrapper> | undefined;
|
||||
|
||||
constructor() {
|
||||
void initialize();
|
||||
|
||||
this.#renderer = document.createElement("canvas");
|
||||
constructor({ canvas }: TinyH264DecoderInit = {}) {
|
||||
if (canvas) {
|
||||
this.#renderer = canvas;
|
||||
} else {
|
||||
this.#renderer = createCanvas();
|
||||
}
|
||||
|
||||
this.#writable = new WritableStream<ScrcpyMediaStreamPacket>({
|
||||
write: async (packet) => {
|
||||
|
@ -104,10 +108,21 @@ export class TinyH264Decoder implements ScrcpyVideoDecoder {
|
|||
this.dispose();
|
||||
|
||||
this.#initializer = new PromiseResolver<TinyH264Wrapper>();
|
||||
const { YuvBuffer, YuvCanvas } = await initialize();
|
||||
|
||||
if (!this.#yuvCanvas) {
|
||||
this.#yuvCanvas = YuvCanvas.attach(this.#renderer);
|
||||
// yuv-canvas detects WebGL support by creating a <canvas> itself
|
||||
// not working in worker
|
||||
const canvas = createCanvas();
|
||||
const attributes: WebGLContextAttributes = {
|
||||
// Disallow software rendering.
|
||||
// Other rendering methods are faster than software-based WebGL.
|
||||
failIfMajorPerformanceCaveat: true,
|
||||
};
|
||||
const gl =
|
||||
canvas.getContext("webgl2", attributes) ||
|
||||
canvas.getContext("webgl", attributes);
|
||||
this.#yuvCanvas = YuvCanvas.attach(this.#renderer, {
|
||||
webGL: !!gl,
|
||||
});
|
||||
}
|
||||
|
||||
const {
|
||||
|
|
|
@ -145,8 +145,15 @@ declare module "yuv-buffer" {
|
|||
declare module "yuv-canvas" {
|
||||
import type { YUVFrame } from "yuv-buffer";
|
||||
|
||||
export interface YUVCanvasOptions {
|
||||
webGL?: boolean | undefined;
|
||||
}
|
||||
|
||||
export default class YUVCanvas {
|
||||
static attach(canvas: HTMLCanvasElement): YUVCanvas;
|
||||
static attach(
|
||||
canvas: HTMLCanvasElement | OffscreenCanvas,
|
||||
options: YUVCanvasOptions,
|
||||
): YUVCanvas;
|
||||
|
||||
drawFrame(data: YUVFrame): void;
|
||||
}
|
||||
|
|
|
@ -4,14 +4,29 @@ import { ScrcpyVideoCodecId } from "@yume-chan/scrcpy";
|
|||
import type {
|
||||
ScrcpyVideoDecoder,
|
||||
ScrcpyVideoDecoderCapability,
|
||||
TinyH264DecoderInit,
|
||||
} from "@yume-chan/scrcpy-decoder-tinyh264";
|
||||
import { createCanvas } from "@yume-chan/scrcpy-decoder-tinyh264";
|
||||
import type { WritableStreamDefaultController } from "@yume-chan/stream-extra";
|
||||
import { WritableStream } from "@yume-chan/stream-extra";
|
||||
|
||||
import { Av1Codec, H264Decoder, H265Decoder } from "./codec/index.js";
|
||||
import type { CodecDecoder } from "./codec/type.js";
|
||||
import type { FrameRenderer } from "./render/index.js";
|
||||
import { BitmapFrameRenderer, WebGLFrameRenderer } from "./render/index.js";
|
||||
import type { FrameSink } from "./render/index.js";
|
||||
import { BitmapFrameSink, WebGLFrameSink } from "./render/index.js";
|
||||
|
||||
export interface WebCodecsVideoDecoderInit extends TinyH264DecoderInit {
|
||||
/**
|
||||
* The video codec to decode
|
||||
*/
|
||||
codec: ScrcpyVideoCodecId;
|
||||
|
||||
/**
|
||||
* Whether to allow capturing the canvas content using APIs like `readPixels` and `toDataURL`.
|
||||
* Enable this option may reduce performance.
|
||||
*/
|
||||
enableCapture?: boolean | undefined;
|
||||
}
|
||||
|
||||
export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
||||
static isSupported() {
|
||||
|
@ -36,9 +51,9 @@ export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
|||
return this.#writable;
|
||||
}
|
||||
|
||||
#canvas: HTMLCanvasElement | OffscreenCanvas;
|
||||
#renderer: HTMLCanvasElement | OffscreenCanvas;
|
||||
get renderer() {
|
||||
return this.#canvas;
|
||||
return this.#renderer;
|
||||
}
|
||||
|
||||
#frameRendered = 0;
|
||||
|
@ -57,45 +72,30 @@ export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
|||
}
|
||||
|
||||
#decoder: VideoDecoder;
|
||||
#renderer: FrameRenderer;
|
||||
#frameSink: FrameSink;
|
||||
|
||||
#currentFrameRendered = false;
|
||||
#animationFrameId = 0;
|
||||
|
||||
/**
|
||||
* Create a new WebCodecs video decoder.
|
||||
* @param codec The video codec to decode
|
||||
* @param enableCapture
|
||||
* Whether to allow capturing the canvas content using APIs like `readPixels` and `toDataURL`.
|
||||
* Enable this option may reduce performance.
|
||||
* @param canvas Optional render target cavas element or offscreen canvas
|
||||
*/
|
||||
constructor(
|
||||
codec: ScrcpyVideoCodecId,
|
||||
enableCapture: boolean,
|
||||
canvas?: HTMLCanvasElement | OffscreenCanvas,
|
||||
) {
|
||||
constructor({ codec, canvas, enableCapture }: WebCodecsVideoDecoderInit) {
|
||||
this.#codec = codec;
|
||||
|
||||
if (canvas) {
|
||||
this.#canvas = canvas;
|
||||
} else if (typeof document !== "undefined") {
|
||||
this.#canvas = document.createElement("canvas");
|
||||
} else if (typeof OffscreenCanvas !== "undefined") {
|
||||
this.#canvas = new OffscreenCanvas(0, 0);
|
||||
this.#renderer = canvas;
|
||||
} else {
|
||||
throw new Error(
|
||||
"no canvas input found nor any canvas can be created",
|
||||
);
|
||||
this.#renderer = createCanvas();
|
||||
}
|
||||
|
||||
try {
|
||||
this.#renderer = new WebGLFrameRenderer(
|
||||
this.#canvas,
|
||||
enableCapture,
|
||||
this.#frameSink = new WebGLFrameSink(
|
||||
this.#renderer,
|
||||
!!enableCapture,
|
||||
);
|
||||
} catch {
|
||||
this.#renderer = new BitmapFrameRenderer(this.#canvas);
|
||||
this.#frameSink = new BitmapFrameSink(this.#renderer);
|
||||
}
|
||||
|
||||
this.#decoder = new VideoDecoder({
|
||||
|
@ -114,7 +114,7 @@ export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
|||
// This is also the behavior of official Scrcpy client.
|
||||
// https://github.com/Genymobile/scrcpy/issues/3679
|
||||
this.#updateSize(frame.displayWidth, frame.displayHeight);
|
||||
this.#renderer.draw(frame);
|
||||
this.#frameSink.draw(frame);
|
||||
},
|
||||
error(e) {
|
||||
if (controller) {
|
||||
|
@ -170,9 +170,12 @@ export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
|||
}
|
||||
|
||||
#updateSize = (width: number, height: number) => {
|
||||
if (width !== this.#canvas.width || height !== this.#canvas.height) {
|
||||
this.#canvas.width = width;
|
||||
this.#canvas.height = height;
|
||||
if (
|
||||
width !== this.#renderer.width ||
|
||||
height !== this.#renderer.height
|
||||
) {
|
||||
this.#renderer.width = width;
|
||||
this.#renderer.height = height;
|
||||
this.#sizeChanged.fire({
|
||||
width: width,
|
||||
height: height,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import type { FrameRenderer } from "./type.js";
|
||||
import type { FrameSink } from "./type.js";
|
||||
|
||||
export class BitmapFrameRenderer implements FrameRenderer {
|
||||
export class BitmapFrameSink implements FrameSink {
|
||||
#context: ImageBitmapRenderingContext;
|
||||
|
||||
constructor(canvas: HTMLCanvasElement | OffscreenCanvas) {
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
export interface FrameRenderer {
|
||||
export interface FrameSink {
|
||||
draw(frame: VideoFrame): void;
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import type { FrameRenderer } from "./type.js";
|
||||
import type { FrameSink } from "./type.js";
|
||||
|
||||
export class WebGLFrameRenderer implements FrameRenderer {
|
||||
export class WebGLFrameSink implements FrameSink {
|
||||
static vertexShaderSource = `
|
||||
attribute vec2 xy;
|
||||
|
||||
|
@ -37,34 +37,33 @@ export class WebGLFrameRenderer implements FrameRenderer {
|
|||
canvas: HTMLCanvasElement | OffscreenCanvas,
|
||||
enableCapture: boolean,
|
||||
) {
|
||||
const attributes: WebGLContextAttributes = {
|
||||
// Low-power GPU should be enough for video rendering.
|
||||
powerPreference: "low-power",
|
||||
alpha: false,
|
||||
// Disallow software rendering.
|
||||
// Other rendering methods are faster than software-based WebGL.
|
||||
failIfMajorPerformanceCaveat: true,
|
||||
preserveDrawingBuffer: enableCapture,
|
||||
};
|
||||
|
||||
const gl =
|
||||
canvas.getContext("webgl2", {
|
||||
alpha: false,
|
||||
failIfMajorPerformanceCaveat: true,
|
||||
preserveDrawingBuffer: enableCapture,
|
||||
}) ||
|
||||
canvas.getContext("webgl", {
|
||||
alpha: false,
|
||||
failIfMajorPerformanceCaveat: true,
|
||||
preserveDrawingBuffer: enableCapture,
|
||||
});
|
||||
canvas.getContext("webgl2", attributes) ||
|
||||
canvas.getContext("webgl", attributes);
|
||||
if (!gl) {
|
||||
throw new Error("WebGL not supported");
|
||||
}
|
||||
this.#context = gl;
|
||||
|
||||
const vertexShader = gl.createShader(gl.VERTEX_SHADER)!;
|
||||
gl.shaderSource(vertexShader, WebGLFrameRenderer.vertexShaderSource);
|
||||
gl.shaderSource(vertexShader, WebGLFrameSink.vertexShaderSource);
|
||||
gl.compileShader(vertexShader);
|
||||
if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
|
||||
throw new Error(gl.getShaderInfoLog(vertexShader)!);
|
||||
}
|
||||
|
||||
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER)!;
|
||||
gl.shaderSource(
|
||||
fragmentShader,
|
||||
WebGLFrameRenderer.fragmentShaderSource,
|
||||
);
|
||||
gl.shaderSource(fragmentShader, WebGLFrameSink.fragmentShaderSource);
|
||||
gl.compileShader(fragmentShader);
|
||||
if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
|
||||
throw new Error(gl.getShaderInfoLog(fragmentShader)!);
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue