gpt4 book ai didi

javascript - WebGL 帧缓冲区多重采样

转载 作者:行者123 更新时间:2023-11-29 16:39:26 25 4
gpt4 key购买 nike

我知道 webgl 可以对您渲染到屏幕的内容进行抗锯齿或多重采样以避免出现硬边,但是当我使用帧缓冲区时它不再这样做了并且屏幕上出现了一堆锯齿状的边缘。

如何让帧缓冲区使用多重采样?

最佳答案

这花了我一天的时间才弄明白,所以我想我应该发布一个例子供其他人效仿。我从webgl2fundamentals.org借用了下面的立方体动画代码.我添加的只是在 3d 纹理上执行抗锯齿的代码。确保使用 canvas.getContext("webgl2", {antialias: false}); 初始化上下文; 此方法不适用于抗锯齿。

要对生成的纹理进行抗锯齿处理,您需要初始化一个 Renderbuffer 对象和两个 Framebuffer 对象,一个用于存储绘图,另一个用于处理抗锯齿后的图形.

  // Create and bind the framebuffer
const FRAMEBUFFER =
{
RENDERBUFFER: 0,
COLORBUFFER: 1
};
const fb =
[
gl.createFramebuffer(),
gl.createFramebuffer()
];
const colorRenderbuffer = gl.createRenderbuffer();

gl.bindRenderbuffer(gl.RENDERBUFFER,
colorRenderbuffer);

gl.renderbufferStorageMultisample(gl.RENDERBUFFER,
gl.getParameter(gl.MAX_SAMPLES),
gl.RGBA8,
targetTextureWidth,
targetTextureHeight);

gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);

gl.framebufferRenderbuffer(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.RENDERBUFFER,
colorRenderbuffer);

gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.COLORBUFFER]);

gl.framebufferTexture2D(gl.FRAMEBUFFER,
gl.COLOR_ATTACHMENT0,
gl.TEXTURE_2D,
targetTexture, 0);

gl.bindFramebuffer(gl.FRAMEBUFFER, null);

在绘制纹理之前,将 Framebuffer 设置为两个 Framebuffer 对象中的第一个。

  // render to our targetTexture by binding the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);

然后做贴图绘制,再做抗锯齿,这就需要第二个buffer了。

  // ... drawing code ...
//
// "blit" the cube into the color buffer, which adds antialiasing
gl.bindFramebuffer(gl.READ_FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);

gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER,
fb[FRAMEBUFFER.COLORBUFFER]);

gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);

gl.blitFramebuffer(0, 0, targetTextureWidth, targetTextureHeight,
0, 0, targetTextureWidth, targetTextureHeight,
gl.COLOR_BUFFER_BIT, gl.LINEAR);

// render the top layer to the framebuffer as well
gl.bindFramebuffer(gl.FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);

将顶层绘制到缓冲区中后,使用与之前相同的抗锯齿方法,这次将 DRAW_FRAMEBUFFER 设置为 null;这告诉它绘制到实际的 Canvas 上。

  // this time render to the default buffer, which is just canvas
gl.bindFramebuffer(gl.READ_FRAMEBUFFER,
fb[FRAMEBUFFER.RENDERBUFFER]);

gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, null);

gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, canvas.width, canvas.height,
0, 0, canvas.width, canvas.height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);

成品如下:

"use strict";

var vertexShaderSource = `#version 300 es

// an attribute is an input (in) to a vertex shader.
// It will receive data from a buffer
in vec4 a_position;
in vec2 a_texcoord;

// A matrix to transform the positions by
uniform mat4 u_matrix;

// a varying to pass the texture coordinates to the fragment shader
out vec2 v_texcoord;

// all shaders have a main function
void main() {
// Multiply the position by the matrix.
gl_Position = u_matrix * a_position;

// Pass the texcoord to the fragment shader.
v_texcoord = a_texcoord;
}
`;

var fragmentShaderSource = `#version 300 es

precision mediump float;

// Passed in from the vertex shader.
in vec2 v_texcoord;

// The texture.
uniform sampler2D u_texture;

// we need to declare an output for the fragment shader
out vec4 outColor;

void main() {
outColor = texture(u_texture, v_texcoord);
}
`;

function main() {
// Get A WebGL context
/** @type {HTMLCanvasElement} */
var canvas = document.getElementById("canvas");
var gl = canvas.getContext("webgl2", {
antialias: false
});
if (!gl) {
return;
}

// Use our boilerplate utils to compile the shaders and link into a program
var program = webglUtils.createProgramFromSources(gl, [vertexShaderSource, fragmentShaderSource]);

// look up where the vertex data needs to go.
var positionAttributeLocation = gl.getAttribLocation(program, "a_position");
var texcoordAttributeLocation = gl.getAttribLocation(program, "a_texcoord");

// look up uniform locations
var matrixLocation = gl.getUniformLocation(program, "u_matrix");
var textureLocation = gl.getUniformLocation(program, "u_texture");

// Create a buffer
var positionBuffer = gl.createBuffer();

// Create a vertex array object (attribute state)
var vao = gl.createVertexArray();

// and make it the one we're currently working with
gl.bindVertexArray(vao);

// Turn on the attribute
gl.enableVertexAttribArray(positionAttributeLocation);

// Bind it to ARRAY_BUFFER (think of it as ARRAY_BUFFER = positionBuffer)
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Set Geometry.
setGeometry(gl);

// Tell the attribute how to get data out of positionBuffer (ARRAY_BUFFER)
var size = 3; // 3 components per iteration
var type = gl.FLOAT; // the data is 32bit floats
var normalize = false; // don't normalize the data
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next position
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
positionAttributeLocation, size, type, normalize, stride, offset);

// create the texcoord buffer, make it the current ARRAY_BUFFER
// and copy in the texcoord values
var texcoordBuffer = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, texcoordBuffer);
setTexcoords(gl);

// Turn on the attribute
gl.enableVertexAttribArray(texcoordAttributeLocation);

// Tell the attribute how to get data out of colorBuffer (ARRAY_BUFFER)
var size = 2; // 2 components per iteration
var type = gl.FLOAT; // the data is 32bit floating point values
var normalize = true; // convert from 0-255 to 0.0-1.0
var stride = 0; // 0 = move forward size * sizeof(type) each iteration to get the next color
var offset = 0; // start at the beginning of the buffer
gl.vertexAttribPointer(
texcoordAttributeLocation, size, type, normalize, stride, offset);

// Create a texture.
var texture = gl.createTexture();

// use texture unit 0
gl.activeTexture(gl.TEXTURE0 + 0);

// bind to the TEXTURE_2D bind point of texture unit 0
gl.bindTexture(gl.TEXTURE_2D, texture);

// fill texture with 3x2 pixels
{
const level = 0;
const internalFormat = gl.R8;
const width = 3;
const height = 2;
const border = 0;
const format = gl.RED;
const type = gl.UNSIGNED_BYTE;
const data = new Uint8Array([
128, 64, 128,
0, 192, 0,
]);
gl.pixelStorei(gl.UNPACK_ALIGNMENT, 1);
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat, width, height, border,
format, type, data);
}

// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);

// Create a texture to render to
const targetTextureWidth = 512;
const targetTextureHeight = 512;
const targetTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, targetTexture);

{
// define size and format of level 0
const level = 0;
const internalFormat = gl.RGBA;
const border = 0;
const format = gl.RGBA;
const type = gl.UNSIGNED_BYTE;
const data = null;
gl.texImage2D(gl.TEXTURE_2D, level, internalFormat,
targetTextureWidth, targetTextureHeight, border,
format, type, data);

// set the filtering so we don't need mips
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
}

// Create and bind the framebuffer
const FRAMEBUFFER = {
RENDERBUFFER: 0,
COLORBUFFER: 1
};
const fb = [gl.createFramebuffer(), gl.createFramebuffer()];
const colorRenderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, colorRenderbuffer);
gl.renderbufferStorageMultisample(gl.RENDERBUFFER, gl.getParameter(gl.MAX_SAMPLES), gl.RGBA8, targetTextureWidth, targetTextureHeight);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.RENDERBUFFER, colorRenderbuffer);
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.COLORBUFFER]);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, targetTexture, 0);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);

function degToRad(d) {
return d * Math.PI / 180;
}

var fieldOfViewRadians = degToRad(60);
var modelXRotationRadians = degToRad(0);
var modelYRotationRadians = degToRad(0);

// Get the starting time.
var then = 0;

requestAnimationFrame(drawScene);

function drawCube(aspect) {
// Tell it to use our program (pair of shaders)
gl.useProgram(program);

// Bind the attribute/buffer set we want.
gl.bindVertexArray(vao);

// Compute the projection matrix
var projectionMatrix =
m4.perspective(fieldOfViewRadians, aspect, 1, 2000);

var cameraPosition = [0, 0, 2];
var up = [0, 1, 0];
var target = [0, 0, 0];

// Compute the camera's matrix using look at.
var cameraMatrix = m4.lookAt(cameraPosition, target, up);

// Make a view matrix from the camera matrix.
var viewMatrix = m4.inverse(cameraMatrix);

var viewProjectionMatrix = m4.multiply(projectionMatrix, viewMatrix);

var matrix = m4.xRotate(viewProjectionMatrix, modelXRotationRadians);
matrix = m4.yRotate(matrix, modelYRotationRadians);

// Set the matrix.
gl.uniformMatrix4fv(matrixLocation, false, matrix);

// Tell the shader to use texture unit 0 for u_texture
gl.uniform1i(textureLocation, 0);

// Draw the geometry.
var primitiveType = gl.TRIANGLES;
var offset = 0;
var count = 6 * 6;
gl.drawArrays(primitiveType, offset, count);
}

// Draw the scene.
function drawScene(time) {
// convert to seconds
time *= 0.001;
// Subtract the previous time from the current time
var deltaTime = time - then;
// Remember the current time for the next frame.
then = time;

// Animate the rotation
modelYRotationRadians += -0.7 * deltaTime;
modelXRotationRadians += -0.4 * deltaTime;

//webglUtils.resizeCanvasToDisplaySize(gl.canvas);

gl.enable(gl.CULL_FACE);
gl.enable(gl.DEPTH_TEST);

{
// render to our targetTexture by binding the framebuffer
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);

// render cube with our 3x2 texture
gl.bindTexture(gl.TEXTURE_2D, texture);

// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, targetTextureWidth, targetTextureHeight);

// Clear the canvas AND the depth buffer.
gl.clearColor(0, 0, 1, 1); // clear to blue
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);

const aspect = targetTextureWidth / targetTextureHeight;
drawCube(aspect);

// "blit" the cube into the color buffer, which adds antialiasing
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, fb[FRAMEBUFFER.COLORBUFFER]);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, targetTextureWidth, targetTextureHeight,
0, 0, targetTextureWidth, targetTextureHeight,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
}

{
// render the top layer to the frame buffer as well
gl.bindFramebuffer(gl.FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);

// render the cube with the texture we just rendered to
gl.bindTexture(gl.TEXTURE_2D, targetTexture);

// Tell WebGL how to convert from clip space to pixels
gl.viewport(0, 0, targetTextureWidth, targetTextureHeight);

// Clear the canvas AND the depth buffer.
gl.clearColor(0.105, 0.105, 0.105, 1); // clear to black
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);


const aspect = 1;
drawCube(aspect);

// this time render to the default buffer, which is just canvas
gl.bindFramebuffer(gl.READ_FRAMEBUFFER, fb[FRAMEBUFFER.RENDERBUFFER]);
gl.bindFramebuffer(gl.DRAW_FRAMEBUFFER, null);
gl.clearBufferfv(gl.COLOR, 0, [1.0, 1.0, 1.0, 1.0]);
gl.blitFramebuffer(0, 0, canvas.width, canvas.height,
0, 0, canvas.width, canvas.height,
gl.COLOR_BUFFER_BIT, gl.LINEAR);
}

requestAnimationFrame(drawScene);
}
}

// Fill the buffer with the values that define a cube.
function setGeometry(gl) {
var positions = new Float32Array(
[-0.5, -0.5, -0.5, -0.5, 0.5, -0.5,
0.5, -0.5, -0.5, -0.5, 0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, -0.5,

-0.5, -0.5, 0.5,
0.5, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5, 0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, 0.5,

-0.5, 0.5, -0.5, -0.5, 0.5, 0.5,
0.5, 0.5, -0.5, -0.5, 0.5, 0.5,
0.5, 0.5, 0.5,
0.5, 0.5, -0.5,

-0.5, -0.5, -0.5,
0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, -0.5, 0.5,
0.5, -0.5, -0.5,
0.5, -0.5, 0.5,

-0.5, -0.5, -0.5, -0.5, -0.5, 0.5, -0.5, 0.5, -0.5, -0.5, -0.5, 0.5, -0.5, 0.5, 0.5, -0.5, 0.5, -0.5,

0.5, -0.5, -0.5,
0.5, 0.5, -0.5,
0.5, -0.5, 0.5,
0.5, -0.5, 0.5,
0.5, 0.5, -0.5,
0.5, 0.5, 0.5,

]);
gl.bufferData(gl.ARRAY_BUFFER, positions, gl.STATIC_DRAW);
}

// Fill the buffer with texture coordinates the cube.
function setTexcoords(gl) {
gl.bufferData(
gl.ARRAY_BUFFER,
new Float32Array(
[
0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,

0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,

0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,

0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,

0, 0,
0, 1,
1, 0,
0, 1,
1, 1,
1, 0,

0, 0,
0, 1,
1, 0,
1, 0,
0, 1,
1, 1,

]),
gl.STATIC_DRAW);
}

main();
html 
{
background-color: #1b1b1b;
}
<canvas id="canvas" width="512" height="512"></canvas>

<!--
for most samples webgl-utils only provides shader compiling/linking and
canvas resizing because why clutter the examples with code that's the same in every sample.
See http://webglfundamentals.org/webgl/lessons/webgl-boilerplate.html
and http://webglfundamentals.org/webgl/lessons/webgl-resizing-the-canvas.html
for webgl-utils, m3, m4, and webgl-lessons-ui.
-->
<script src="https://webgl2fundamentals.org/webgl/resources/webgl-utils.js"></script>
<script src="https://webgl2fundamentals.org/webgl/resources/m4.js"></script>

关于javascript - WebGL 帧缓冲区多重采样,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/47934444/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com