- html - 出于某种原因,IE8 对我的 Sass 文件中继承的 html5 CSS 不友好?
- JMeter 在响应断言中使用 span 标签的问题
- html - 在 :hover and :active? 上具有不同效果的 CSS 动画
- html - 相对于居中的 html 内容固定的 CSS 重复背景?
我正在使用描述的技术 here ( code , demo ) 用于将视频帧用作 WebGL 纹理,以及来自 here 的简单场景(仅显示 2D 图像,而不是 3D 旋转立方体) .
目标是 YouTube 的 Tampermonkey 用户脚本(带有 WebGL 着色器,即视频效果)。
由于 gl.clearColor(0.5,0.5,0.5,1)
, Canvas 填充为灰色。但是接下来应该从视频中绘制帧的代码行没有可见的效果。哪一部分可能是错误的?没有错误。
我试图在发布前缩短代码,但显然即使是简单的 WebGL 场景也需要大量样板代码。
// ==UserScript==
// @name tmp
// @namespace http://tampermonkey.net/
// @version 0.1
// @description try to take over the world!
// @author You
// @match https://www.youtube.com/*
// @icon https://www.google.com/s2/favicons?domain=youtube.com
// @grant none
// ==/UserScript==
(function() {
// will set to true when video can be copied to texture
var copyVideo = false;
const video = document.getElementsByTagName("video")[0];
// immediately after finding the video, create canvas and set its dimensions
let canvas = document.createElement('canvas');
canvas.setAttribute('id', 'glcanvas');
canvas.setAttribute('width', '300');
canvas.setAttribute('height', '200');
canvas.setAttribute('style', 'position: absolute;');
video.parentElement.appendChild(canvas);
var playing = false;
var timeupdate = false;
// Waiting for these 2 events ensures
// there is data in the video
video.addEventListener('playing', function() {
playing = true;
checkReady();
}, true);
video.addEventListener('timeupdate', function() {
timeupdate = true;
checkReady();
}, true);
function checkReady() {
if (playing && timeupdate) {
copyVideo = true;
}
}
// Initialize the GL context
const gl = canvas.getContext("webgl");
// Only continue if WebGL is available and working
if (gl === null) {
alert("Unable to initialize WebGL. Your browser or machine may not support it.");
return;
}
// Vertex shader program
const vsSource = `
attribute vec2 a_position;
attribute vec2 a_texCoord;
uniform vec2 u_resolution;
varying vec2 v_texCoord;
void main() {
// convert the rectangle from pixels to 0.0 to 1.0
vec2 zeroToOne = a_position / u_resolution;
// convert from 0->1 to 0->2
vec2 zeroToTwo = zeroToOne * 2.0;
// convert from 0->2 to -1->+1 (clipspace)
vec2 clipSpace = zeroToTwo - 1.0;
gl_Position = vec4(clipSpace * vec2(1, -1), 0, 1);
// pass the texCoord to the fragment shader
// The GPU will interpolate this value between points.
v_texCoord = a_texCoord;
}
`;
// Fragment shader program
const fsSource = `
precision mediump float;
// our texture
uniform sampler2D u_image;
// the texCoords passed in from the vertex shader.
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord).bgra;
}
`;
// Initialize a shader program, so WebGL knows how to draw our data
function initShaderProgram(gl, vsSource, fsSource) {
const vertexShader = loadShader(gl, gl.VERTEX_SHADER, vsSource);
const fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fsSource);
// Create the shader program
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, vertexShader);
gl.attachShader(shaderProgram, fragmentShader);
gl.linkProgram(shaderProgram);
// If creating the shader program failed, alert
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
return null;
}
return shaderProgram;
}
// creates a shader of the given type, uploads the source and compiles it.
function loadShader(gl, type, source) {
const shader = gl.createShader(type);
// Send the source to the shader object
gl.shaderSource(shader, source);
// Compile the shader program
gl.compileShader(shader);
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
// Initialize a shader program; this is where all the lighting
// for the vertices and so forth is established.
const shaderProgram = initShaderProgram(gl, vsSource, fsSource);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(shaderProgram, "a_position");
var texcoordLocation = gl.getAttribLocation(shaderProgram, "a_texCoord");
// Create a buffer to put three 2d clip space points in
var positionBuffer = gl.createBuffer();
// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Set a rectangle the same size as the image.
setRectangle(gl, 0, 0, video.width, video.height);
// provide texture coordinates for the rectangle.
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
0.0, 0.0,
1.0, 0.0,
0.0, 1.0,
0.0, 1.0,
1.0, 0.0,
1.0, 1.0,
]), gl.STATIC_DRAW);
// Create a texture.
var texture = initTexture(gl);
function drawScene() {
// lookup uniforms
var resolutionLocation = gl.getUniformLocation(shaderProgram, "u_resolution");
//webglUtils.resizeCanvasToDisplaySize(gl.canvas);
// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
// Clear the canvas
gl.clearColor(0.5,0.5,0.5,1);
gl.clear(gl.COLOR_BUFFER_BIT);
// Tell it to use our program (pair of shaders)
gl.useProgram(shaderProgram);
// Turn on the position attribute
gl.enableVertexAttribArray(positionLocation);
// Bind the position buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Tell the position attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionLocation, size, type, normalize, stride, offset);
// Turn on the texcoord attribute
gl.enableVertexAttribArray(texcoordLocation);
// bind the texcoord buffer.
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
// Tell the texcoord attribute how to get data out of texcoordBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
texcoordLocation, size, type, normalize, stride, offset);
// set the resolution
gl.uniform2f(resolutionLocation, gl.canvas.width, gl.canvas.height);
// Draw the rectangle.
var primitiveType = gl.TRIANGLES;
var offset = 0;
var count = 6;
gl.drawArrays(primitiveType, offset, count);
}
function setRectangle(gl, x, y, width, height) {
var x1 = x;
var x2 = x + width;
var y1 = y;
var y2 = y + height;
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
x1, y1,
x2, y1,
x1, y2,
x1, y2,
x2, y1,
x2, y2,
]), gl.STATIC_DRAW);
}
var then = 0;
// Draw the scene repeatedly
function render(now) {
now *= 0.001; // convert to seconds
const deltaTime = now - then;
then = now;
if (copyVideo) {
updateTexture(gl, texture, video);
}
drawScene();
requestAnimationFrame(render);
}
requestAnimationFrame(render);
function initTexture(gl) {
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
// Because video has to be download over the internet
// they might take a moment until it's ready so
// put a single pixel in the texture so we can
// use it immediately.
const level = 0;
const internalFormat = gl.RGBA;
const width = 1;
const height = 1;
const border = 0;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
const pixel = new Uint8Array([0, 0, 255, 255]); // opaque blue
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
width, height, border, srcFormat, srcType,
pixel);
// Turn off mips and set wrapping to clamp to edge so it
// will work regardless of the dimensions of the video.
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
return texture;
}
function updateTexture(gl, texture, video) {
const level = 0;
const internalFormat = gl.RGBA;
const srcFormat = gl.RGBA;
const srcType = gl.UNSIGNED_BYTE;
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
srcFormat, srcType, video);
}
})();
最佳答案
Atekihcan 正确指出了第一个问题,您将 NDC 坐标计算与事情混淆了,实际上直接发送它们要容易得多。此外,您可以很容易地从中导出纹理坐标,从而保存第二个缓冲区的设置。
第二个问题是您的事件没有在您期望的链中触发(至少对我来说不是重新加载播放视频和运行脚本)。我相信监听 timeupdate
事件就足够了,因为如果视频无法播放,时间将不会更新。工作代码:
// ==UserScript==
// @name tmp
// @namespace http://tampermonkey.net/
// @version 0.1
// @description try to take over the world!
// @author You
// @match https://www.youtube.com/*
// @icon https://www.google.com/s2/favicons?domain=youtube.com
// @grant none
// ==/UserScript==
(function() {
// will set to true when video can be copied to texture
var copyVideo = false;
const video = document.getElementsByTagName("video")[0];
// immediately after finding the video, create canvas and set its dimensions
let canvas = document.createElement('canvas');
canvas.setAttribute('id', 'glcanvas');
canvas.setAttribute('width', '300');
canvas.setAttribute('height', '200');
canvas.setAttribute('style', 'position: absolute;');
video.parentElement.appendChild(canvas);
video.addEventListener('timeupdate', function() {
copyVideo=true;
}, true);
// Initialize the GL context
const gl = canvas.getContext("webgl");
// Only continue if WebGL is available and working
if (gl === null) {
alert("Unable to initialize WebGL. Your browser or machine may not support it.");
return;
}
// Vertex shader program
const vsSource = `
attribute vec2 a_position;
varying vec2 v_texCoord;
void main() {
gl_Position = vec4(a_position, 0.0, 1.0);
v_texCoord = a_position*.5+.5;
v_texCoord.y = 1.-v_texCoord.y;
}
`;
// Fragment shader program
const fsSource = `
precision mediump float;
uniform sampler2D u_image;
varying vec2 v_texCoord;
void main() {
gl_FragColor = texture2D(u_image, v_texCoord);
}
`;
const positionData = new Float32Array([
-1.0,-1.0,
1.0,-1.0,
-1.0, 1.0,
1.0,-1.0,
1.0, 1.0,
-1.0, 1.0
]);
// Initialize a shader program, so WebGL knows how to draw our data
function initShaderProgram(gl, vsSource, fsSource) {
const shaderProgram = gl.createProgram();
gl.attachShader(shaderProgram, loadShader(gl, gl.VERTEX_SHADER, vsSource));
gl.attachShader(shaderProgram, loadShader(gl, gl.FRAGMENT_SHADER, fsSource));
gl.linkProgram(shaderProgram);
// If creating the shader program failed, alert
if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
return null;
}
return shaderProgram;
}
// creates a shader of the given type, uploads the source and compiles it.
function loadShader(gl, type, source) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
// See if it compiled successfully
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
alert('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
// Initialize shader program
const shaderProgram = initShaderProgram(gl, vsSource, fsSource);
// look up where the vertex data needs to go.
var positionLocation = gl.getAttribLocation(shaderProgram, "a_position");
var textureLoc = gl.getUniformLocation(shaderProgram, "u_image");
// Create a vertex buffer
var positionBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.bufferData(gl.ARRAY_BUFFER, positionData, gl.STATIC_DRAW);
// Create texture
var texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, 1, 1, 0, gl.RGB, gl.UNSIGNED_BYTE, new Uint8Array([0, 0, 255]));
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
// Initialize rendering
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);
gl.clearColor(1.0,0.0,0.0,1.0);
function drawScene() {
gl.clear(gl.COLOR_BUFFER_BIT);
gl.useProgram(shaderProgram);
// Turn on the vertex attribute
gl.enableVertexAttribArray(positionLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0);
// Draw the rectangle
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
// Draw the scene repeatedly
function render() {
if (copyVideo)
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, video);
drawScene();
requestAnimationFrame(render);
}
requestAnimationFrame(render);
})();
注意:我还将纹理格式更改为 RGB(alpha channel 将隐式为 1),但这无关紧要。
关于javascript - 来自 YouTube 视频帧的 WebGL 纹理,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/70627240/
我有一个未定义数量的显示上下文,每个都将显示一个纹理。当我调用 glGenTextures 时,我会在所有显示上下文中返回相同的名称。这会起作用吗?即使它们具有相同的名称,它们仍会存储和显示不同的纹理
我在 SVG 中看到过:文本填充是图像而不是颜色;我一直想知道使用 CSS3 是否可以实现这样的事情。 我浏览了整个网络,到目前为止只找到了基本上将图像覆盖在文本上的解决方法(请参阅 this ,这对
我是 WebGL 的新手。 :)我知道顶点数据和纹理不应该经常更新,但是当它们确实发生变化时,首选哪个:- 通过调用 gl.deleteBuffer 销毁先前的缓冲区 (static_draw) 并创
我需要将 GL_RGBA32F 作为内部格式,但我在 OpenGL ES 实现中没有得到它。相反,我只得到 GL_FLOAT 作为纹理数据类型。 OES_texture_float 规范没有说明里面的
当我执行某些几何体的渲染时,我可以在控制台中看到此警告: THREE.WebGLRenderer: Texture is not power of two. Texture.minFilter sho
我正在尝试使用阴影贴图实现阴影,因此我需要将场景渲染到单独的帧缓冲区(纹理)。我无法让它正常工作,因此在剥离我的代码库后,我留下了一组相对简单的指令,这些指令应该将场景渲染到纹理,然后简单地渲染纹理。
我在 XNA 中使用带有自定义着色器的标准 .fbx 导入器。当我使用 BasicEffect 时,.fbx 模型被 UV 正确包裹并且纹理正确。但是,当我使用我的自定义效果时,我必须将纹理作为参数加
如果我创建一个 .PNG 1024 x 1024 的纹理并在中间画一个 124 x 124 的圆,它周围是空的,它使用的 RAM 量是否与我画一个 124 x 的圆一样124 x 124 空间上的 1
我试图在 Android 中绘制一个地球仪,为此我使用了 OpenGL。然而,为了让它更容易理解,我将从制作一个简单的 3D 立方体开始。我使用 Blender 创建我的 3D 对象(立方体),并在我
文本本身的背景图像层是否有任何 JS/CSS 解决方案? 示例 最佳答案 检查这个http://lea.verou.me/2012/05/text-masking-the-standards-way/
非功能代码: if sprite.texture == "texture" { (code) } 当 Sprite 具有特定纹理时,我正在尝试访问 Sprite 的纹理以运行代码。目前纹理仅在我的
我正在尝试学习适用于 iOS 的 SceneKit 并超越基本形状。我对纹理的工作原理有点困惑。在示例项目中,平面是一个网格,并对其应用了平面 png 纹理。你如何“告诉”纹理如何包裹到物体上?在 3
基本上, 这有效: var expl1 = new THREE.ImageUtils.loadTexture( 'images/explodes/expl1.png' ); this.material
我正在尝试将各种场景渲染为一组纹理,每个场景都有自己的纹理到应该绘制的位置...... 问题: 创建 512 个 FBO,每个 FBO 绑定(bind)了 512 个纹理,这有多糟糕。只使用一个 FB
我正在使用文本 protobuf 文件进行系统配置。 我遇到的一个问题是序列化的 protobuf 格式不支持注释。 有没有办法解决? 我说的是文本序列化数据格式,而不是方案定义。 这个问题是有人在某
我想将我的 3D 纹理的初始化从 CPU 移到 GPU。作为测试,我编写了一个着色器将所有体素设置为一个常数值,但纹理根本没有修改。我如何使它工作? 计算着色器: #version 430 layou
我可以像这样用 JavFX 制作一个矩形: Rectangle node2 = RectangleBuilder.create() .x(-100) .
我在 iPhone 上遇到了 openGL 问题,我确信一定有一个简单的解决方案! 当我加载纹理并显示它时,我得到了很多我认为所谓的“色带”,其中颜色,特别是渐变上的颜色,似乎会自动“优化”。 只是为
假设我有一个域类 class Profile{ String name byte[] logo } 和一个 Controller : class ImageController {
我正在开发一款使用 SDL 的 2D 游戏。由于某些系统的 CPU 较弱而 GPU 较强,因此除了普通的 SDL/软件之外,我还有一个使用 OpenGL 的渲染器后端。 渲染器界面的简化版本如下所示:
我是一名优秀的程序员,十分优秀!