gpt4 book ai didi

javascript - 在 WebGL 中获取鼠标单击的 3D 坐标

转载 作者:行者123 更新时间:2023-12-01 00:10:04 25 4
gpt4 key购买 nike

由于令人惊讶的是几乎没有关于 webGL 的信息(或者我只是不知道如何搜索它),我有一个关于如何将鼠标坐标转换为 3D 坐标的问题,以便查看屏幕上的确切位置我正在点击。

所以我的情况是,我有一个非常简单的天空盒,相机位于 [0, 0, 0],我可以通过单击和拖动来查看它的周围。我想要做的是能够单击该天空盒上的某个位置,并知道我单击的位置,因为我需要在该位置上放置注释(某些文本或 html 元素)。当我转向另一边时,该 html 元素必须移动并消失。因此,我需要一种方法来单击鼠标并找出我正在单击立方体的哪一侧以及在什么坐标处,以便我可以正确放置注释。

我使用的是普通的 WebGL,我不使用 THREE.js 或类似的东西。由于它只是一个立方体,我只能假设找到交集不会那么困难,并且不需要额外的库。

最佳答案

嗯,你确实是对的,很难找到一个例子😭

常见的 webgl 着色器项目使用类似的代码进行 3D

gl_Position = matrix * position;

gl_Position = projection * modelView * position;

gl_Position = projection * view * world * position;

基本上都是一样的东西。它们获取位置并将其乘以矩阵以转换为剪辑空间。您需要做相反的事情来走另一条路,在剪辑空间中占据一个位置并隐藏回 position 空间,即

inverse (projection * view * world) * clipSpacePosition

因此,使用您的 3D 库并计算您传递给 WebGL 的矩阵的逆矩阵。例如,这里是一些计算矩阵以使用 twgl's 绘制内容的代码。数学库

  const fov = 30 * Math.PI / 180;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const zNear = 0.5;
const zFar = 10;
const projection = m4.perspective(fov, aspect, zNear, zFar);

const eye = [1, 4, -6];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);

const view = m4.inverse(camera);
const viewProjection = m4.multiply(projection, view);
const world = m4.rotationY(time);

对于有效执行此操作的着色器

  gl_Position = viewProjection * world * position

所以我们需要逆

  const invMat = m4.inverse(m4.multiply(viewProjection, world));

然后我们需要一个剪辑空间射线。我们将从 2D 转到 3D,因此我们将使用 -1 和 +1 作为我们的 Z 值来制作一条从 zNear 开始并在 zFar 结束的穿过平截头体的射线

  canvas.addEventListener('mousemove', (e) => {
const rect = canvas.getBoundingClientRect();
const x = e.clientX - rect.left;
const y = e.clientY - rect.top;

const clipX = x / rect.width * 2 - 1;
const clipY = y / rect.height * -2 + 1;

const start = m4.transformPoint(invMat, [clipX, clipY, -1]);
const end = m4.transformPoint(invMat, [clipX, clipY, 1]);

... do something with start/end
});

startend 现在相对于 position (几何体中的数据),因此您现在必须使用一些射线到三 Angular 形代码在 JavaScript 中遍历所有三 Angular 形,并查看从开始到结束的射线是否与一个或多个三 Angular 形相交。

请注意,如果您想要的只是世界空间中的光线,而不是位置空间,那么您将使用

  const invMat = m4.inverse(viewProjection);

"use strict";

const vs = `
uniform mat4 u_world;
uniform mat4 u_viewProjection;

attribute vec4 position;
attribute vec2 texcoord;
attribute vec4 color;

varying vec4 v_position;
varying vec2 v_texcoord;
varying vec4 v_color;

void main() {
v_texcoord = texcoord;
v_color = color;
gl_Position = u_viewProjection * u_world * position;
}
`;

const fs = `
precision mediump float;

varying vec2 v_texcoord;
varying vec4 v_color;

uniform sampler2D tex;

void main() {
gl_FragColor = texture2D(tex, v_texcoord) * v_color;
}
`;

const m4 = twgl.m4;
const gl = document.querySelector("#c").getContext("webgl");

// compiles shaders, links, looks up locations
const programInfo = twgl.createProgramInfo(gl, [vs, fs]);

const cubeArrays = twgl.primitives.createCubeVertices(1);
cubeArrays.color = {value: [0.2, 0.3, 1, 1]};
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData
// for each array
const cubeBufferInfo = twgl.createBufferInfoFromArrays(gl, cubeArrays);

const numLines = 50;
const positions = new Float32Array(numLines * 3 * 2);
const colors = new Float32Array(numLines * 4 * 2);
// calls gl.createBuffer, gl.bindBuffer, gl.bufferData
// for each array
const linesBufferInfo = twgl.createBufferInfoFromArrays(gl, {
position: positions,
color: colors,
texcoord: { value: [0, 0], },
});

const tex = twgl.createTexture(gl, {
minMag: gl.NEAREST,
format: gl.LUMINANCE,
src: [
255, 192,
192, 255,
],
});

let clipX = 0;
let clipY = 0;
let lineNdx = 0;

function render(time) {
time *= 0.001;
twgl.resizeCanvasToDisplaySize(gl.canvas);
gl.viewport(0, 0, gl.canvas.width, gl.canvas.height);

gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);

const fov = 30 * Math.PI / 180;
const aspect = gl.canvas.clientWidth / gl.canvas.clientHeight;
const zNear = 1;
const zFar = 10;
const projection = m4.perspective(fov, aspect, zNear, zFar);

const eye = [Math.cos(time), Math.sin(time), 6];
const target = [0, 0, 0];
const up = [0, 1, 0];
const camera = m4.lookAt(eye, target, up);

const view = m4.inverse(camera);
const viewProjection = m4.multiply(projection, view);
const world = m4.rotateX(m4.rotationY(1), 1);

gl.useProgram(programInfo.program);
// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, cubeBufferInfo);
twgl.setUniformsAndBindTextures(programInfo, {
tex,
u_world: world,
u_viewProjection: viewProjection,
color: [0.2, 0.3, 1, 1],
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, cubeBufferInfo);

// add a line in world space
const invMat = m4.inverse(viewProjection);
const start = m4.transformPoint(invMat, [clipX, clipY, -1]);
const end = m4.transformPoint(invMat, [clipX, clipY, 1]);
const poffset = lineNdx * 3 * 2;
const coffset = lineNdx * 4 * 2;
const color = [Math.random(), Math.random(), Math.random(), 1];
positions.set(start, poffset);
positions.set(end, poffset + 3);
colors.set(color, coffset);
colors.set(color, coffset + 4);

gl.bindBuffer(gl.ARRAY_BUFFER, linesBufferInfo.attribs.position.buffer);
gl.bufferSubData(gl.ARRAY_BUFFER, 0, positions);
gl.bindBuffer(gl.ARRAY_BUFFER, linesBufferInfo.attribs.color.buffer);
gl.bufferSubData(gl.ARRAY_BUFFER, 0, colors);

lineNdx = (lineNdx + 1) % numLines;

// calls gl.bindBuffer, gl.enableVertexAttribArray, gl.vertexAttribPointer
twgl.setBuffersAndAttributes(gl, programInfo, linesBufferInfo);
twgl.setUniformsAndBindTextures(programInfo, {
tex,
u_world: m4.identity(),
u_viewProjection: viewProjection,
color: [1, 0, 0, 1],
});
// calls gl.drawArrays or gl.drawElements
twgl.drawBufferInfo(gl, linesBufferInfo, gl.LINES);

requestAnimationFrame(render);
}
requestAnimationFrame(render);


gl.canvas.addEventListener('mousemove', (e) => {
const canvas = gl.canvas;
const rect = canvas.getBoundingClientRect();
const x = e.clientX - rect.left;
const y = e.clientY - rect.top;

clipX = x / rect.width * 2 - 1;
clipY = y / rect.height * -2 + 1;
});
body { margin: 0; }
canvas { width: 100vw; height: 100vh; display: block; }
<canvas id="c"></canvas>
<script src="https://twgljs.org/dist/4.x/twgl-full.min.js"></script>

至于WebGL信息有一些here

关于javascript - 在 WebGL 中获取鼠标单击的 3D 坐标,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/60136758/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com