mirror of
https://github.com/yume-chan/ya-webadb.git
synced 2025-10-03 09:49:24 +02:00
feat(webcodecs): add a renderer based on Insertable Stream API
This commit is contained in:
parent
66a98f89ba
commit
8e4c1ef963
11 changed files with 348 additions and 128 deletions
5
.changeset/good-steaks-play.md
Normal file
5
.changeset/good-steaks-play.md
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
"@yume-chan/scrcpy-decoder-webcodecs": patch
|
||||||
|
---
|
||||||
|
|
||||||
|
Add a renderer based on Insertable Stream API
|
|
@ -35,7 +35,7 @@ export function createCanvas() {
|
||||||
return document.createElement("canvas");
|
return document.createElement("canvas");
|
||||||
}
|
}
|
||||||
if (typeof OffscreenCanvas !== "undefined") {
|
if (typeof OffscreenCanvas !== "undefined") {
|
||||||
return new OffscreenCanvas(0, 0);
|
return new OffscreenCanvas(1, 1);
|
||||||
}
|
}
|
||||||
throw new Error("no canvas input found nor any canvas can be created");
|
throw new Error("no canvas input found nor any canvas can be created");
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,7 +11,6 @@ export interface ScrcpyVideoDecoderCapability {
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ScrcpyVideoDecoder extends Disposable {
|
export interface ScrcpyVideoDecoder extends Disposable {
|
||||||
readonly renderer: HTMLElement | OffscreenCanvas;
|
|
||||||
readonly sizeChanged: Event<{ width: number; height: number }>;
|
readonly sizeChanged: Event<{ width: number; height: number }>;
|
||||||
readonly framesRendered: number;
|
readonly framesRendered: number;
|
||||||
readonly framesSkipped: number;
|
readonly framesSkipped: number;
|
||||||
|
|
|
@ -4,32 +4,110 @@ import { ScrcpyVideoCodecId } from "@yume-chan/scrcpy";
|
||||||
import type {
|
import type {
|
||||||
ScrcpyVideoDecoder,
|
ScrcpyVideoDecoder,
|
||||||
ScrcpyVideoDecoderCapability,
|
ScrcpyVideoDecoderCapability,
|
||||||
TinyH264DecoderInit,
|
|
||||||
} from "@yume-chan/scrcpy-decoder-tinyh264";
|
} from "@yume-chan/scrcpy-decoder-tinyh264";
|
||||||
import { createCanvas } from "@yume-chan/scrcpy-decoder-tinyh264";
|
|
||||||
import type { WritableStreamDefaultController } from "@yume-chan/stream-extra";
|
import type { WritableStreamDefaultController } from "@yume-chan/stream-extra";
|
||||||
import { WritableStream } from "@yume-chan/stream-extra";
|
import { WritableStream } from "@yume-chan/stream-extra";
|
||||||
|
|
||||||
import { Av1Codec, H264Decoder, H265Decoder } from "./codec/index.js";
|
import { Av1Codec, H264Decoder, H265Decoder } from "./codec/index.js";
|
||||||
import type { CodecDecoder } from "./codec/type.js";
|
import type { CodecDecoder } from "./codec/type.js";
|
||||||
import type { FrameSink } from "./render/index.js";
|
import type { WebCodecsVideoDecoderRenderer } from "./render/index.js";
|
||||||
import { BitmapFrameSink, WebGLFrameSink } from "./render/index.js";
|
|
||||||
|
|
||||||
export interface WebCodecsVideoDecoderInit extends TinyH264DecoderInit {
|
class Pool<T> {
|
||||||
|
#controller!: ReadableStreamDefaultController<T>;
|
||||||
|
#readable = new ReadableStream<T>(
|
||||||
|
{
|
||||||
|
start: (controller) => {
|
||||||
|
this.#controller = controller;
|
||||||
|
},
|
||||||
|
pull: (controller) => {
|
||||||
|
controller.enqueue(this.#initializer());
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{ highWaterMark: 0 },
|
||||||
|
);
|
||||||
|
#reader = this.#readable.getReader();
|
||||||
|
|
||||||
|
#initializer: () => T;
|
||||||
|
|
||||||
|
#size = 0;
|
||||||
|
#capacity: number;
|
||||||
|
|
||||||
|
constructor(initializer: () => T, capacity: number) {
|
||||||
|
this.#initializer = initializer;
|
||||||
|
this.#capacity = capacity;
|
||||||
|
}
|
||||||
|
|
||||||
|
async borrow() {
|
||||||
|
const result = await this.#reader.read();
|
||||||
|
return result.value!;
|
||||||
|
}
|
||||||
|
|
||||||
|
return(value: T) {
|
||||||
|
if (this.#size < this.#capacity) {
|
||||||
|
this.#controller.enqueue(value);
|
||||||
|
this.#size += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class VideoFrameCapturer {
|
||||||
|
#canvas: OffscreenCanvas | HTMLCanvasElement;
|
||||||
|
#context: ImageBitmapRenderingContext;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
if (typeof OffscreenCanvas !== "undefined") {
|
||||||
|
this.#canvas = new OffscreenCanvas(1, 1);
|
||||||
|
} else {
|
||||||
|
this.#canvas = document.createElement("canvas");
|
||||||
|
this.#canvas.width = 1;
|
||||||
|
this.#canvas.height = 1;
|
||||||
|
}
|
||||||
|
this.#context = this.#canvas.getContext("bitmaprenderer", {
|
||||||
|
alpha: false,
|
||||||
|
})!;
|
||||||
|
}
|
||||||
|
|
||||||
|
async capture(frame: VideoFrame): Promise<Blob> {
|
||||||
|
this.#canvas.width = frame.displayWidth;
|
||||||
|
this.#canvas.height = frame.displayHeight;
|
||||||
|
|
||||||
|
const bitmap = await createImageBitmap(frame);
|
||||||
|
this.#context.transferFromImageBitmap(bitmap);
|
||||||
|
|
||||||
|
if (this.#canvas instanceof OffscreenCanvas) {
|
||||||
|
return await this.#canvas.convertToBlob({
|
||||||
|
type: "image/png",
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
(this.#canvas as HTMLCanvasElement).toBlob((blob) => {
|
||||||
|
if (!blob) {
|
||||||
|
reject(new Error("Failed to convert canvas to blob"));
|
||||||
|
} else {
|
||||||
|
resolve(blob);
|
||||||
|
}
|
||||||
|
}, "image/png");
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const VideoFrameCapturerPool = /*@__PURE__*/ new Pool(
|
||||||
|
() => new VideoFrameCapturer(),
|
||||||
|
4,
|
||||||
|
);
|
||||||
|
|
||||||
|
export interface WebCodecsVideoDecoderInit {
|
||||||
/**
|
/**
|
||||||
* The video codec to decode
|
* The video codec to decode
|
||||||
*/
|
*/
|
||||||
codec: ScrcpyVideoCodecId;
|
codec: ScrcpyVideoCodecId;
|
||||||
|
|
||||||
/**
|
renderer: WebCodecsVideoDecoderRenderer;
|
||||||
* Whether to allow capturing the canvas content using APIs like `readPixels` and `toDataURL`.
|
|
||||||
* Enable this option may reduce performance.
|
|
||||||
*/
|
|
||||||
enableCapture?: boolean | undefined;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
||||||
static isSupported() {
|
static get isSupported() {
|
||||||
return typeof globalThis.VideoDecoder !== "undefined";
|
return typeof globalThis.VideoDecoder !== "undefined";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -37,6 +115,7 @@ export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
||||||
{
|
{
|
||||||
h264: {},
|
h264: {},
|
||||||
h265: {},
|
h265: {},
|
||||||
|
av1: {},
|
||||||
};
|
};
|
||||||
|
|
||||||
#codec: ScrcpyVideoCodecId;
|
#codec: ScrcpyVideoCodecId;
|
||||||
|
@ -51,19 +130,23 @@ export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
||||||
return this.#writable;
|
return this.#writable;
|
||||||
}
|
}
|
||||||
|
|
||||||
#renderer: HTMLCanvasElement | OffscreenCanvas;
|
#error: Error | undefined;
|
||||||
|
#controller!: WritableStreamDefaultController;
|
||||||
|
|
||||||
|
#renderer: WebCodecsVideoDecoderRenderer;
|
||||||
get renderer() {
|
get renderer() {
|
||||||
return this.#renderer;
|
return this.#renderer;
|
||||||
}
|
}
|
||||||
|
|
||||||
#frameRendered = 0;
|
#framesDraw = 0;
|
||||||
|
#framesPresented = 0;
|
||||||
get framesRendered() {
|
get framesRendered() {
|
||||||
return this.#frameRendered;
|
return this.#framesPresented;
|
||||||
}
|
}
|
||||||
|
|
||||||
#frameSkipped = 0;
|
#framesSkipped = 0;
|
||||||
get framesSkipped() {
|
get framesSkipped() {
|
||||||
return this.#frameSkipped;
|
return this.#framesSkipped;
|
||||||
}
|
}
|
||||||
|
|
||||||
#sizeChanged = new EventEmitter<{ width: number; height: number }>();
|
#sizeChanged = new EventEmitter<{ width: number; height: number }>();
|
||||||
|
@ -72,61 +155,40 @@ export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
||||||
}
|
}
|
||||||
|
|
||||||
#decoder: VideoDecoder;
|
#decoder: VideoDecoder;
|
||||||
#frameSink: FrameSink;
|
|
||||||
|
|
||||||
#currentFrameRendered = false;
|
#drawing = false;
|
||||||
|
#nextFrame: VideoFrame | undefined;
|
||||||
|
#captureFrame: VideoFrame | undefined;
|
||||||
|
|
||||||
#animationFrameId = 0;
|
#animationFrameId = 0;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a new WebCodecs video decoder.
|
* Create a new WebCodecs video decoder.
|
||||||
*/
|
*/
|
||||||
constructor({ codec, canvas, enableCapture }: WebCodecsVideoDecoderInit) {
|
constructor({ codec, renderer }: WebCodecsVideoDecoderInit) {
|
||||||
this.#codec = codec;
|
this.#codec = codec;
|
||||||
|
|
||||||
if (canvas) {
|
this.#renderer = renderer;
|
||||||
this.#renderer = canvas;
|
|
||||||
} else {
|
|
||||||
this.#renderer = createCanvas();
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
this.#frameSink = new WebGLFrameSink(
|
|
||||||
this.#renderer,
|
|
||||||
!!enableCapture,
|
|
||||||
);
|
|
||||||
} catch {
|
|
||||||
this.#frameSink = new BitmapFrameSink(this.#renderer);
|
|
||||||
}
|
|
||||||
|
|
||||||
this.#decoder = new VideoDecoder({
|
this.#decoder = new VideoDecoder({
|
||||||
output: (frame) => {
|
output: (frame) => {
|
||||||
if (this.#currentFrameRendered) {
|
this.#captureFrame?.close();
|
||||||
this.#frameRendered += 1;
|
// PERF: `VideoFrame#clone` is cheap
|
||||||
} else {
|
this.#captureFrame = frame.clone();
|
||||||
this.#frameSkipped += 1;
|
|
||||||
}
|
|
||||||
this.#currentFrameRendered = false;
|
|
||||||
|
|
||||||
// PERF: Draw every frame to minimize latency at cost of performance.
|
if (this.#drawing) {
|
||||||
// When multiple frames are drawn in one vertical sync interval,
|
if (this.#nextFrame) {
|
||||||
// only the last one is visible to users.
|
this.#nextFrame.close();
|
||||||
// But this ensures users can always see the most up-to-date screen.
|
this.#framesSkipped += 1;
|
||||||
// This is also the behavior of official Scrcpy client.
|
|
||||||
// https://github.com/Genymobile/scrcpy/issues/3679
|
|
||||||
this.#updateSize(frame.displayWidth, frame.displayHeight);
|
|
||||||
this.#frameSink.draw(frame);
|
|
||||||
},
|
|
||||||
error(e) {
|
|
||||||
if (controller) {
|
|
||||||
try {
|
|
||||||
controller.error(e);
|
|
||||||
} catch {
|
|
||||||
// ignore
|
|
||||||
// `controller` may already in error state
|
|
||||||
}
|
}
|
||||||
} else {
|
this.#nextFrame = frame;
|
||||||
error = e;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void this.#draw(frame);
|
||||||
|
},
|
||||||
|
error: (error) => {
|
||||||
|
this.#setError(error);
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -151,14 +213,12 @@ export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let error: Error | undefined;
|
|
||||||
let controller: WritableStreamDefaultController | undefined;
|
|
||||||
this.#writable = new WritableStream<ScrcpyMediaStreamPacket>({
|
this.#writable = new WritableStream<ScrcpyMediaStreamPacket>({
|
||||||
start: (_controller) => {
|
start: (controller) => {
|
||||||
if (error) {
|
if (this.#error) {
|
||||||
_controller.error(error);
|
controller.error(this.#error);
|
||||||
} else {
|
} else {
|
||||||
controller = _controller;
|
this.#controller = controller;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
write: (packet) => {
|
write: (packet) => {
|
||||||
|
@ -166,32 +226,79 @@ export class WebCodecsVideoDecoder implements ScrcpyVideoDecoder {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
this.#onFramePresented();
|
this.#onVerticalSync();
|
||||||
|
}
|
||||||
|
|
||||||
|
#setError(error: Error) {
|
||||||
|
if (this.#controller) {
|
||||||
|
try {
|
||||||
|
this.#controller.error(error);
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
this.#error = error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async #draw(frame: VideoFrame) {
|
||||||
|
try {
|
||||||
|
this.#drawing = true;
|
||||||
|
// PERF: Draw every frame to minimize latency at cost of performance.
|
||||||
|
// When multiple frames are drawn in one vertical sync interval,
|
||||||
|
// only the last one is visible to users.
|
||||||
|
// But this ensures users can always see the most up-to-date screen.
|
||||||
|
// This is also the behavior of official Scrcpy client.
|
||||||
|
// https://github.com/Genymobile/scrcpy/issues/3679
|
||||||
|
this.#updateSize(frame.displayWidth, frame.displayHeight);
|
||||||
|
await this.#renderer.draw(frame);
|
||||||
|
this.#framesDraw += 1;
|
||||||
|
frame.close();
|
||||||
|
|
||||||
|
if (this.#nextFrame) {
|
||||||
|
const frame = this.#nextFrame;
|
||||||
|
this.#nextFrame = undefined;
|
||||||
|
await this.#draw(frame);
|
||||||
|
}
|
||||||
|
|
||||||
|
this.#drawing = false;
|
||||||
|
} catch (error) {
|
||||||
|
this.#setError(error as Error);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#updateSize = (width: number, height: number) => {
|
#updateSize = (width: number, height: number) => {
|
||||||
if (
|
this.#renderer.setSize(width, height);
|
||||||
width !== this.#renderer.width ||
|
this.#sizeChanged.fire({ width, height });
|
||||||
height !== this.#renderer.height
|
|
||||||
) {
|
|
||||||
this.#renderer.width = width;
|
|
||||||
this.#renderer.height = height;
|
|
||||||
this.#sizeChanged.fire({
|
|
||||||
width: width,
|
|
||||||
height: height,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#onFramePresented = () => {
|
#onVerticalSync = () => {
|
||||||
this.#currentFrameRendered = true;
|
if (this.#framesDraw > 0) {
|
||||||
this.#animationFrameId = requestAnimationFrame(this.#onFramePresented);
|
this.#framesPresented += 1;
|
||||||
|
this.#framesSkipped += this.#framesDraw - 1;
|
||||||
|
this.#framesDraw = 0;
|
||||||
|
}
|
||||||
|
this.#animationFrameId = requestAnimationFrame(this.#onVerticalSync);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
async snapshot() {
|
||||||
|
const frame = this.#captureFrame;
|
||||||
|
if (!frame) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
const capturer = await VideoFrameCapturerPool.borrow();
|
||||||
|
const result = await capturer.capture(frame);
|
||||||
|
VideoFrameCapturerPool.return(capturer);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
dispose() {
|
dispose() {
|
||||||
cancelAnimationFrame(this.#animationFrameId);
|
cancelAnimationFrame(this.#animationFrameId);
|
||||||
if (this.#decoder.state !== "closed") {
|
if (this.#decoder.state !== "closed") {
|
||||||
this.#decoder.close();
|
this.#decoder.close();
|
||||||
}
|
}
|
||||||
|
this.#nextFrame?.close();
|
||||||
|
this.#captureFrame?.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,24 +1,18 @@
|
||||||
import type { FrameSink } from "./type.js";
|
import { CanvasWebCodecsVideoDecoderRenderer } from "./canvas.js";
|
||||||
|
|
||||||
export class BitmapFrameSink implements FrameSink {
|
export class BitmapWebCodecsDecoderRenderer extends CanvasWebCodecsVideoDecoderRenderer {
|
||||||
#context: ImageBitmapRenderingContext;
|
#context: ImageBitmapRenderingContext;
|
||||||
|
|
||||||
constructor(canvas: HTMLCanvasElement | OffscreenCanvas) {
|
constructor(canvas?: HTMLCanvasElement | OffscreenCanvas) {
|
||||||
this.#context = canvas.getContext("bitmaprenderer", { alpha: false })!;
|
super(canvas);
|
||||||
|
|
||||||
|
this.#context = this.canvas.getContext("bitmaprenderer", {
|
||||||
|
alpha: false,
|
||||||
|
})!;
|
||||||
}
|
}
|
||||||
|
|
||||||
draw(frame: VideoFrame): void {
|
async draw(frame: VideoFrame): Promise<void> {
|
||||||
createImageBitmap(frame)
|
const bitmap = await createImageBitmap(frame);
|
||||||
.then((bitmap) => {
|
this.#context.transferFromImageBitmap(bitmap);
|
||||||
this.#context.transferFromImageBitmap(bitmap);
|
|
||||||
frame.close();
|
|
||||||
})
|
|
||||||
.catch((e) => {
|
|
||||||
console.warn(
|
|
||||||
"[@yume-chan/scrcpy-decoder-webcodecs]",
|
|
||||||
"VideoDecoder error",
|
|
||||||
e,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
import { createCanvas } from "@yume-chan/scrcpy-decoder-tinyh264";
|
||||||
|
|
||||||
|
import type { WebCodecsVideoDecoderRenderer } from "./type.js";
|
||||||
|
|
||||||
|
export abstract class CanvasWebCodecsVideoDecoderRenderer
|
||||||
|
implements WebCodecsVideoDecoderRenderer
|
||||||
|
{
|
||||||
|
#canvas: HTMLCanvasElement | OffscreenCanvas;
|
||||||
|
get canvas() {
|
||||||
|
return this.#canvas;
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(canvas?: HTMLCanvasElement | OffscreenCanvas) {
|
||||||
|
if (canvas) {
|
||||||
|
this.#canvas = canvas;
|
||||||
|
} else {
|
||||||
|
this.#canvas = createCanvas();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setSize(width: number, height: number): void {
|
||||||
|
if (this.#canvas.width !== width || this.#canvas.height !== height) {
|
||||||
|
this.#canvas.width = width;
|
||||||
|
this.#canvas.height = height;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
abstract draw(frame: VideoFrame): Promise<void>;
|
||||||
|
}
|
|
@ -1,3 +1,7 @@
|
||||||
|
// cspell: ignore insertable
|
||||||
|
|
||||||
export * from "./bitmap.js";
|
export * from "./bitmap.js";
|
||||||
|
export * from "./canvas.js";
|
||||||
|
export * from "./insertable-stream.js";
|
||||||
export * from "./type.js";
|
export * from "./type.js";
|
||||||
export * from "./webgl.js";
|
export * from "./webgl.js";
|
||||||
|
|
|
@ -0,0 +1,62 @@
|
||||||
|
// cspell: ignore insertable
|
||||||
|
|
||||||
|
import type { WebCodecsVideoDecoderRenderer } from "./type.js";
|
||||||
|
|
||||||
|
declare class MediaStreamTrackGenerator extends MediaStreamTrack {
|
||||||
|
constructor(options: { kind: "audio" | "video" });
|
||||||
|
|
||||||
|
writable: WritableStream<VideoFrame>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class InsertableStreamWebCodecsDecoderRenderer
|
||||||
|
implements WebCodecsVideoDecoderRenderer
|
||||||
|
{
|
||||||
|
static get isSupported() {
|
||||||
|
return typeof MediaStreamTrackGenerator !== "undefined";
|
||||||
|
}
|
||||||
|
|
||||||
|
#element: HTMLVideoElement;
|
||||||
|
get element() {
|
||||||
|
return this.#element;
|
||||||
|
}
|
||||||
|
|
||||||
|
#generator: MediaStreamTrackGenerator;
|
||||||
|
#writer: WritableStreamDefaultWriter<VideoFrame>;
|
||||||
|
#stream: MediaStream;
|
||||||
|
|
||||||
|
constructor(element?: HTMLVideoElement) {
|
||||||
|
if (element) {
|
||||||
|
this.#element = element;
|
||||||
|
} else if (typeof document !== "undefined") {
|
||||||
|
this.#element = document.createElement("video");
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
"no video element input found nor any video element can be created",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
this.#element.muted = true;
|
||||||
|
this.#element.autoplay = true;
|
||||||
|
this.#element.disablePictureInPicture = true;
|
||||||
|
this.#element.disableRemotePlayback = true;
|
||||||
|
|
||||||
|
// The spec replaced `MediaStreamTrackGenerator` with `VideoTrackGenerator`.
|
||||||
|
// But Chrome has not implemented it yet.
|
||||||
|
// https://issues.chromium.org/issues/40058895
|
||||||
|
this.#generator = new MediaStreamTrackGenerator({ kind: "video" });
|
||||||
|
this.#writer = this.#generator.writable.getWriter();
|
||||||
|
|
||||||
|
this.#stream = new MediaStream([this.#generator]);
|
||||||
|
this.#element.srcObject = this.#stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
setSize(width: number, height: number): void {
|
||||||
|
if (this.#element.width !== width || this.#element.height !== height) {
|
||||||
|
this.#element.width = width;
|
||||||
|
this.#element.height = height;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async draw(frame: VideoFrame): Promise<void> {
|
||||||
|
await this.#writer.write(frame);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,3 +1,5 @@
|
||||||
export interface FrameSink {
|
export interface WebCodecsVideoDecoderRenderer {
|
||||||
draw(frame: VideoFrame): void;
|
setSize(width: number, height: number): void;
|
||||||
|
|
||||||
|
draw(frame: VideoFrame): Promise<void>;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,30 @@
|
||||||
import type { FrameSink } from "./type.js";
|
import { createCanvas } from "@yume-chan/scrcpy-decoder-tinyh264";
|
||||||
|
|
||||||
export class WebGLFrameSink implements FrameSink {
|
import { CanvasWebCodecsVideoDecoderRenderer } from "./canvas.js";
|
||||||
|
|
||||||
|
const Resolved = Promise.resolve();
|
||||||
|
|
||||||
|
function createContext(
|
||||||
|
canvas: HTMLCanvasElement | OffscreenCanvas,
|
||||||
|
enableCapture?: boolean,
|
||||||
|
): WebGLRenderingContext | null {
|
||||||
|
const attributes: WebGLContextAttributes = {
|
||||||
|
// Low-power GPU should be enough for video rendering.
|
||||||
|
powerPreference: "low-power",
|
||||||
|
alpha: false,
|
||||||
|
// Disallow software rendering.
|
||||||
|
// Other rendering methods are faster than software-based WebGL.
|
||||||
|
failIfMajorPerformanceCaveat: true,
|
||||||
|
preserveDrawingBuffer: !!enableCapture,
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
canvas.getContext("webgl2", attributes) ||
|
||||||
|
canvas.getContext("webgl", attributes)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export class WebGLWebCodecsDecoderRenderer extends CanvasWebCodecsVideoDecoderRenderer {
|
||||||
static vertexShaderSource = `
|
static vertexShaderSource = `
|
||||||
attribute vec2 xy;
|
attribute vec2 xy;
|
||||||
|
|
||||||
|
@ -24,6 +48,11 @@ export class WebGLFrameSink implements FrameSink {
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
|
static get isSupported() {
|
||||||
|
const canvas = createCanvas();
|
||||||
|
return !!createContext(canvas);
|
||||||
|
}
|
||||||
|
|
||||||
#context: WebGLRenderingContext;
|
#context: WebGLRenderingContext;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -34,36 +63,32 @@ export class WebGLFrameSink implements FrameSink {
|
||||||
* Enable this option may reduce performance.
|
* Enable this option may reduce performance.
|
||||||
*/
|
*/
|
||||||
constructor(
|
constructor(
|
||||||
canvas: HTMLCanvasElement | OffscreenCanvas,
|
canvas?: HTMLCanvasElement | OffscreenCanvas,
|
||||||
enableCapture: boolean,
|
enableCapture?: boolean,
|
||||||
) {
|
) {
|
||||||
const attributes: WebGLContextAttributes = {
|
super(canvas);
|
||||||
// Low-power GPU should be enough for video rendering.
|
|
||||||
powerPreference: "low-power",
|
|
||||||
alpha: false,
|
|
||||||
// Disallow software rendering.
|
|
||||||
// Other rendering methods are faster than software-based WebGL.
|
|
||||||
failIfMajorPerformanceCaveat: true,
|
|
||||||
preserveDrawingBuffer: enableCapture,
|
|
||||||
};
|
|
||||||
|
|
||||||
const gl =
|
const gl = createContext(this.canvas, enableCapture);
|
||||||
canvas.getContext("webgl2", attributes) ||
|
|
||||||
canvas.getContext("webgl", attributes);
|
|
||||||
if (!gl) {
|
if (!gl) {
|
||||||
throw new Error("WebGL not supported");
|
throw new Error("WebGL not supported");
|
||||||
}
|
}
|
||||||
this.#context = gl;
|
this.#context = gl;
|
||||||
|
|
||||||
const vertexShader = gl.createShader(gl.VERTEX_SHADER)!;
|
const vertexShader = gl.createShader(gl.VERTEX_SHADER)!;
|
||||||
gl.shaderSource(vertexShader, WebGLFrameSink.vertexShaderSource);
|
gl.shaderSource(
|
||||||
|
vertexShader,
|
||||||
|
WebGLWebCodecsDecoderRenderer.vertexShaderSource,
|
||||||
|
);
|
||||||
gl.compileShader(vertexShader);
|
gl.compileShader(vertexShader);
|
||||||
if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
|
if (!gl.getShaderParameter(vertexShader, gl.COMPILE_STATUS)) {
|
||||||
throw new Error(gl.getShaderInfoLog(vertexShader)!);
|
throw new Error(gl.getShaderInfoLog(vertexShader)!);
|
||||||
}
|
}
|
||||||
|
|
||||||
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER)!;
|
const fragmentShader = gl.createShader(gl.FRAGMENT_SHADER)!;
|
||||||
gl.shaderSource(fragmentShader, WebGLFrameSink.fragmentShaderSource);
|
gl.shaderSource(
|
||||||
|
fragmentShader,
|
||||||
|
WebGLWebCodecsDecoderRenderer.fragmentShaderSource,
|
||||||
|
);
|
||||||
gl.compileShader(fragmentShader);
|
gl.compileShader(fragmentShader);
|
||||||
if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
|
if (!gl.getShaderParameter(fragmentShader, gl.COMPILE_STATUS)) {
|
||||||
throw new Error(gl.getShaderInfoLog(fragmentShader)!);
|
throw new Error(gl.getShaderInfoLog(fragmentShader)!);
|
||||||
|
@ -100,7 +125,7 @@ export class WebGLFrameSink implements FrameSink {
|
||||||
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
|
||||||
}
|
}
|
||||||
|
|
||||||
draw(frame: VideoFrame) {
|
draw(frame: VideoFrame): Promise<void> {
|
||||||
const gl = this.#context;
|
const gl = this.#context;
|
||||||
gl.texImage2D(
|
gl.texImage2D(
|
||||||
gl.TEXTURE_2D,
|
gl.TEXTURE_2D,
|
||||||
|
@ -110,9 +135,10 @@ export class WebGLFrameSink implements FrameSink {
|
||||||
gl.UNSIGNED_BYTE,
|
gl.UNSIGNED_BYTE,
|
||||||
frame,
|
frame,
|
||||||
);
|
);
|
||||||
frame.close();
|
|
||||||
|
|
||||||
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
|
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight);
|
||||||
gl.drawArrays(gl.TRIANGLE_FAN, 0, 4);
|
gl.drawArrays(gl.TRIANGLE_FAN, 0, 4);
|
||||||
|
|
||||||
|
return Resolved;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,7 +48,7 @@ async function findTests(path) {
|
||||||
await findTests(resolve(process.cwd(), "esm"));
|
await findTests(resolve(process.cwd(), "esm"));
|
||||||
|
|
||||||
const test = run({
|
const test = run({
|
||||||
concurrency: true,
|
concurrency: false,
|
||||||
files: tests,
|
files: tests,
|
||||||
});
|
});
|
||||||
test.on("test:fail", () => {
|
test.on("test:fail", () => {
|
||||||
|
@ -124,11 +124,3 @@ filterCoverage
|
||||||
.pipe(lcov)
|
.pipe(lcov)
|
||||||
// @ts-expect-error
|
// @ts-expect-error
|
||||||
.pipe(createWriteStream(resolve(coverageFolder, "lcov.info")));
|
.pipe(createWriteStream(resolve(coverageFolder, "lcov.info")));
|
||||||
|
|
||||||
// run({
|
|
||||||
// concurrency: false,
|
|
||||||
// files: tests,
|
|
||||||
// })
|
|
||||||
// // @ts-expect-error
|
|
||||||
// .pipe(Lcov)
|
|
||||||
// .pipe(createWriteStream(resolve(coverageFolder, "lcov.info")));
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue