初始化

This commit is contained in:
2025-07-30 13:39:32 +08:00
commit d1f2452b28
253 changed files with 32087 additions and 0 deletions

4
vendor/tinyh264/Canvas.ts vendored Normal file
View File

@@ -0,0 +1,4 @@
export default abstract class Canvas {
constructor(protected readonly canvas: HTMLCanvasElement) {}
public abstract decode(buffer: Uint8Array, width: number, height: number): void;
}

View File

@@ -0,0 +1,3 @@
import { init } from 'tinyh264';
init();

13
vendor/tinyh264/LICENSE vendored Normal file
View File

@@ -0,0 +1,13 @@
Copyright (c) 2019 Erik De Rijcke
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

3
vendor/tinyh264/README.md vendored Normal file
View File

@@ -0,0 +1,3 @@
Based on demo code from [udevbe/tinyh264](https://github.com/udevbe/tinyh264/tree/caf7142/demo)
See [License](LICENSE)

39
vendor/tinyh264/ShaderCompiler.ts vendored Normal file
View File

@@ -0,0 +1,39 @@
/**
* Represents a WebGL shader object and provides a mechanism to load shaders from HTML
* script tags.
*/
export default class ShaderCompiler {
/**
* @param {WebGLRenderingContext}gl
* @param {{type: string, source: string}}script
* @return {WebGLShader}
*/
static compile(gl: WebGLRenderingContext, script: { type: string; source: string }): WebGLShader | null {
let shader: WebGLShader | null;
// Now figure out what type of shader script we have, based on its MIME type.
if (script.type === 'x-shader/x-fragment') {
shader = gl.createShader(gl.FRAGMENT_SHADER);
} else if (script.type === 'x-shader/x-vertex') {
shader = gl.createShader(gl.VERTEX_SHADER);
} else {
throw new Error('Unknown shader type: ' + script.type);
}
if (!shader) {
throw new Error('Failed to create shader');
}
// Send the source to the shader object.
gl.shaderSource(shader, script.source);
// Compile the shader program.
gl.compileShader(shader);
// See if it compiled successfully.
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
throw new Error('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
}
return shader;
}
}

64
vendor/tinyh264/ShaderProgram.ts vendored Normal file
View File

@@ -0,0 +1,64 @@
export default class ShaderProgram {
public program: WebGLProgram | null;
/**
* @param {WebGLRenderingContext}gl
*/
constructor(private gl: WebGLRenderingContext) {
this.program = this.gl.createProgram();
}
/**
* @param {WebGLShader}shader
*/
attach(shader: WebGLShader): void {
if (!this.program) {
throw Error(`Program type is ${typeof this.program}`);
}
this.gl.attachShader(this.program, shader);
}
link(): void {
if (!this.program) {
throw Error(`Program type is ${typeof this.program}`);
}
this.gl.linkProgram(this.program);
// If creating the shader program failed, alert.
if (!this.gl.getProgramParameter(this.program, this.gl.LINK_STATUS)) {
console.error('Unable to initialize the shader program.');
}
}
use(): void {
this.gl.useProgram(this.program);
}
/**
* @param {string}name
* @return {number}
*/
getAttributeLocation(name: string): number {
if (!this.program) {
throw Error(`Program type is ${typeof this.program}`);
}
return this.gl.getAttribLocation(this.program, name);
}
/**
* @param {string}name
* @return {WebGLUniformLocation | null}
*/
getUniformLocation(name: string): WebGLUniformLocation | null {
if (!this.program) {
throw Error(`Program type is ${typeof this.program}`);
}
return this.gl.getUniformLocation(this.program, name);
}
/**
* @param {WebGLUniformLocation}uniformLocation
* @param {Array<number>}array
*/
setUniformM4(uniformLocation: WebGLUniformLocation, array: number[]): void {
this.gl.uniformMatrix4fv(uniformLocation, false, array);
}
}

50
vendor/tinyh264/ShaderSources.ts vendored Normal file
View File

@@ -0,0 +1,50 @@
/**
* @type {{type: string, source: string}}
*/
export const vertexQuad = {
type: 'x-shader/x-vertex',
source: `
precision mediump float;
uniform mat4 u_projection;
attribute vec2 a_position;
attribute vec2 a_texCoord;
varying vec2 v_texCoord;
void main(){
v_texCoord = a_texCoord;
gl_Position = u_projection * vec4(a_position, 0.0, 1.0);
}
`,
};
/**
* @type {{type: string, source: string}}
*/
export const fragmentYUV = {
type: 'x-shader/x-fragment',
source: `
precision lowp float;
varying vec2 v_texCoord;
uniform sampler2D yTexture;
uniform sampler2D uTexture;
uniform sampler2D vTexture;
const mat4 conversion = mat4(
1.0, 0.0, 1.402, -0.701,
1.0, -0.344, -0.714, 0.529,
1.0, 1.772, 0.0, -0.886,
0.0, 0.0, 0.0, 0.0
);
void main(void) {
float yChannel = texture2D(yTexture, v_texCoord).x;
float uChannel = texture2D(uTexture, v_texCoord).x;
float vChannel = texture2D(vTexture, v_texCoord).x;
vec4 channels = vec4(yChannel, uChannel, vChannel, 1.0);
vec3 rgb = (channels * conversion).xyz;
gl_FragColor = vec4(rgb, 1.0);
}
`,
};

45
vendor/tinyh264/YUVCanvas.ts vendored Normal file
View File

@@ -0,0 +1,45 @@
import Canvas from './Canvas';
export default class YUVCanvas extends Canvas {
private canvasCtx: CanvasRenderingContext2D;
private canvasBuffer: ImageData | null = null;
constructor(canvas: HTMLCanvasElement) {
super(canvas);
this.canvasCtx = this.canvas.getContext('2d') as CanvasRenderingContext2D;
}
public decode(buffer: Uint8Array, width: number, height: number): void {
if (!buffer) {
return;
}
if (!this.canvasBuffer) {
this.canvasBuffer = this.canvasCtx.createImageData(width, height);
}
const lumaSize = width * height;
const chromaSize = lumaSize >> 2;
const ybuf = buffer.subarray(0, lumaSize);
const ubuf = buffer.subarray(lumaSize, lumaSize + chromaSize);
const vbuf = buffer.subarray(lumaSize + chromaSize, lumaSize + 2 * chromaSize);
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
const yIndex = x + y * width;
const uIndex = ~~(y / 2) * ~~(width / 2) + ~~(x / 2);
const vIndex = ~~(y / 2) * ~~(width / 2) + ~~(x / 2);
const R = 1.164 * (ybuf[yIndex] - 16) + 1.596 * (vbuf[vIndex] - 128);
const G = 1.164 * (ybuf[yIndex] - 16) - 0.813 * (vbuf[vIndex] - 128) - 0.391 * (ubuf[uIndex] - 128);
const B = 1.164 * (ybuf[yIndex] - 16) + 2.018 * (ubuf[uIndex] - 128);
const rgbIndex = yIndex * 4;
this.canvasBuffer.data[rgbIndex + 0] = R;
this.canvasBuffer.data[rgbIndex + 1] = G;
this.canvasBuffer.data[rgbIndex + 2] = B;
this.canvasBuffer.data[rgbIndex + 3] = 0xff;
}
}
this.canvasCtx.putImageData(this.canvasBuffer, 0, 0);
}
}

145
vendor/tinyh264/YUVSurfaceShader.ts vendored Normal file
View File

@@ -0,0 +1,145 @@
import ShaderProgram from './ShaderProgram';
import ShaderCompiler from './ShaderCompiler';
import { fragmentYUV, vertexQuad } from './ShaderSources';
import Texture from '../h264-live-player/Texture';
type ShaderArguments = {
yTexture: WebGLUniformLocation | null;
uTexture: WebGLUniformLocation | null;
vTexture: WebGLUniformLocation | null;
u_projection: WebGLUniformLocation | null;
a_position: number;
a_texCoord: number;
};
export default class YUVSurfaceShader {
/**
*
* @param {WebGLRenderingContext} gl
* @returns {YUVSurfaceShader}
*/
static create(gl: WebGLRenderingContext): YUVSurfaceShader {
const program = this._initShaders(gl);
const shaderArgs = this._initShaderArgs(gl, program);
const vertexBuffer = this._initBuffers(gl);
return new YUVSurfaceShader(gl, vertexBuffer as WebGLBuffer, shaderArgs, program);
}
static _initShaders(gl: WebGLRenderingContext): ShaderProgram {
const program = new ShaderProgram(gl);
program.attach(ShaderCompiler.compile(gl, vertexQuad) as WebGLShader);
program.attach(ShaderCompiler.compile(gl, fragmentYUV) as WebGLShader);
program.link();
program.use();
return program;
}
static _initShaderArgs(gl: WebGLRenderingContext, program: ShaderProgram): ShaderArguments {
// find shader arguments
const shaderArgs: ShaderArguments = {
yTexture: program.getUniformLocation('yTexture'),
uTexture: program.getUniformLocation('uTexture'),
vTexture: program.getUniformLocation('vTexture'),
u_projection: program.getUniformLocation('u_projection'),
a_position: program.getAttributeLocation('a_position'),
a_texCoord: program.getAttributeLocation('a_texCoord'),
};
gl.enableVertexAttribArray(shaderArgs.a_position);
gl.enableVertexAttribArray(shaderArgs.a_texCoord);
return shaderArgs;
}
static _initBuffers(gl: WebGLRenderingContext): WebGLBuffer | null {
// Create vertex buffer object.
return gl.createBuffer();
}
constructor(
private gl: WebGLRenderingContext,
private vertexBuffer: WebGLBuffer,
private shaderArgs: ShaderArguments,
private program: ShaderProgram,
) {}
/**
*
* @param {Texture} textureY
* @param {Texture} textureU
* @param {Texture} textureV
*/
setTexture(textureY: Texture, textureU: Texture, textureV: Texture): void {
const gl = this.gl;
gl.uniform1i(this.shaderArgs.yTexture, 0);
gl.uniform1i(this.shaderArgs.uTexture, 1);
gl.uniform1i(this.shaderArgs.vTexture, 2);
gl.activeTexture(gl.TEXTURE0);
gl.bindTexture(gl.TEXTURE_2D, textureY.texture);
gl.activeTexture(gl.TEXTURE1);
gl.bindTexture(gl.TEXTURE_2D, textureU.texture);
gl.activeTexture(gl.TEXTURE2);
gl.bindTexture(gl.TEXTURE_2D, textureV.texture);
}
use(): void {
this.program.use();
}
release(): void {
this.gl.useProgram(null);
}
/**
* @param {{w:number, h:number}}encodedFrameSize
* @param {{maxXTexCoord:number, maxYTexCoord:number}} h264RenderState
*/
updateShaderData(
encodedFrameSize: { w: number; h: number },
h264RenderState: { maxXTexCoord: number; maxYTexCoord: number },
): void {
const { w, h } = encodedFrameSize;
this.gl.viewport(0, 0, w, h);
// prettier-ignore
this.program.setUniformM4(this.shaderArgs.u_projection as WebGLUniformLocation, [
2.0 / w, 0, 0, 0,
0, 2.0 / -h, 0, 0,
0, 0, 1, 0,
-1, 1, 0, 1
])
this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.vertexBuffer);
// prettier-ignore
this.gl.bufferData(this.gl.ARRAY_BUFFER, new Float32Array([
// First triangle
// top left:
0, 0, 0, 0,
// top right:
w, 0, h264RenderState.maxXTexCoord, 0,
// bottom right:
w, h, h264RenderState.maxXTexCoord, h264RenderState.maxYTexCoord,
// Second triangle
// bottom right:
w, h, h264RenderState.maxXTexCoord, h264RenderState.maxYTexCoord,
// bottom left:
0, h, 0, h264RenderState.maxYTexCoord,
// top left:
0, 0, 0, 0
]), this.gl.DYNAMIC_DRAW);
this.gl.vertexAttribPointer(this.shaderArgs.a_position, 2, this.gl.FLOAT, false, 16, 0);
this.gl.vertexAttribPointer(this.shaderArgs.a_texCoord, 2, this.gl.FLOAT, false, 16, 8);
}
draw(): void {
const gl = this.gl;
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT | gl.STENCIL_BUFFER_BIT);
gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 6);
gl.bindTexture(gl.TEXTURE_2D, null);
}
}

66
vendor/tinyh264/YUVWebGLCanvas.ts vendored Normal file
View File

@@ -0,0 +1,66 @@
/**
* based on tinyh264 demo: https://github.com/udevbe/tinyh264/tree/master/demo
*/
import YUVSurfaceShader from './YUVSurfaceShader';
import Texture from '../h264-live-player/Texture';
import Canvas from './Canvas';
export default class YUVWebGLCanvas extends Canvas {
private yTexture: Texture;
private uTexture: Texture;
private vTexture: Texture;
private yuvSurfaceShader: YUVSurfaceShader;
constructor(canvas: HTMLCanvasElement) {
super(canvas);
const gl = canvas.getContext('experimental-webgl', {
preserveDrawingBuffer: true,
}) as WebGLRenderingContext | null;
if (!gl) {
throw new Error('Unable to initialize WebGL. Your browser may not support it.');
}
this.yuvSurfaceShader = YUVSurfaceShader.create(gl);
this.yTexture = Texture.create(gl, gl.LUMINANCE);
this.uTexture = Texture.create(gl, gl.LUMINANCE);
this.vTexture = Texture.create(gl, gl.LUMINANCE);
}
decode(buffer: Uint8Array, width: number, height: number): void {
this.canvas.width = width;
this.canvas.height = height;
// the width & height returned are actually padded, so we have to use the frame size to get the real image dimension
// when uploading to texture
const stride = width; // stride
// height is padded with filler rows
// if we knew the size of the video before encoding, we could cut out the black filler pixels. We don't, so just set
// it to the size after encoding
const sourceWidth = width;
const sourceHeight = height;
const maxXTexCoord = sourceWidth / stride;
const maxYTexCoord = sourceHeight / height;
const lumaSize = stride * height;
const chromaSize = lumaSize >> 2;
const yBuffer = buffer.subarray(0, lumaSize);
const uBuffer = buffer.subarray(lumaSize, lumaSize + chromaSize);
const vBuffer = buffer.subarray(lumaSize + chromaSize, lumaSize + 2 * chromaSize);
const chromaHeight = height >> 1;
const chromaStride = stride >> 1;
// we upload the entire image, including stride padding & filler rows. The actual visible image will be mapped
// from texture coordinates as to crop out stride padding & filler rows using maxXTexCoord and maxYTexCoord.
this.yTexture.image2dBuffer(yBuffer, stride, height);
this.uTexture.image2dBuffer(uBuffer, chromaStride, chromaHeight);
this.vTexture.image2dBuffer(vBuffer, chromaStride, chromaHeight);
this.yuvSurfaceShader.setTexture(this.yTexture, this.uTexture, this.vTexture);
this.yuvSurfaceShader.updateShaderData({ w: width, h: height }, { maxXTexCoord, maxYTexCoord });
this.yuvSurfaceShader.draw();
}
}