gpt4 book ai didi

java - 光线转换给出不稳定的值

转载 作者:行者123 更新时间:2023-11-30 01:54:06 25 4
gpt4 key购买 nike

使用 LWJGL3 和 JOML。

我正在尝试找出如何使用光线转换系统获取地形上的点。我使用点来设置字符位置以查看正在输出哪个点。

我不认为是我的地形导致了问题,因为移动角色(使用键盘或仅在每帧添加一个值)并将其附加到地形效果很好。

我遇到的问题:

  • 反转投影矩阵会导致该点在正确位置和其他点之间闪烁,但我不确定与其他值的关系。
  • 不反转投影矩阵会停止闪烁,但现在该点以指数方式远离鼠标位置。
  • 在屏幕中心附近,这两个位置将合并在一起。

如果我打印出地形点 vector ,由于某种原因,它会以科学计数法显示:

( 5.335E+1  3.849E-2 -9.564E+1)
( 8.804E+1 -6.256E-3 -2.815E+2)
( 5.335E+1 3.849E-2 -9.564E+1)
( 8.804E+1 -6.256E-3 -2.815E+2)
( 5.335E+1 3.849E-2 -9.564E+1)

如果我单独打印出每个 x、y 和 z 值,它实际上显示了正确的位置,但也轻拂到另一个位置(鼠标移动距离屏幕中心越远,两者之间的差异就会增加):

5.8912144, 0.016174316, -7.771721
6.1992702, 0.01574707, -11.79966
5.8912144, 0.016174316, -7.771721
6.1992702, 0.01574707, -11.79966
6.609352, 0.01815033, -8.793705

光线转换类:

public class Raycast
{
private static final int RECURSION_COUNT = 200;
private static final float RAY_RANGE = 600;

private Input input;
private Vector3f currentRay = new Vector3f();

private Matrix4f projectionMatrix;
private Matrix4f viewMatrix;

private Camera camera;
private Terrain terrain;
private Vector3f currentTerrainPoint;

public Raycast(Camera camera, Matrix4f projectionMatrix, Terrain terrain, Input input) {
this.camera = camera;
this.projectionMatrix = projectionMatrix;
this.input = input;
this.viewMatrix = MathUtils.createViewMatrix(camera);
this.terrain = terrain;
}

public void update()
{
viewMatrix = MathUtils.createViewMatrix(camera);
currentRay = calculateRay();
if (intersectionInRange(0, RAY_RANGE, currentRay)) {
currentTerrainPoint = binarySearch(0, 0, RAY_RANGE, currentRay);
} else {
currentTerrainPoint = null;
}
}

private Vector3f calculateRay()
{
float mouseX = (float) input.getMouseDx();
float mouseY = (float) input.getMouseDy();
Vector2f deviceCoords = getNormalizedDeviceCoordinates(mouseX, mouseY);
//System.out.println(deviceCoords.x+", "+deviceCoords.y);
Vector4f clipCoords = new Vector4f(deviceCoords.x, deviceCoords.y, -1f, 1f);
Vector4f eyeCoords = toEyeCoords(clipCoords);
Vector3f worldRay = toWorldCoords(eyeCoords);
return worldRay;
}

private Vector3f toWorldCoords(Vector4f eyeCoords)
{
Matrix4f invertedView = viewMatrix.invert();
Vector4f rayWorld = invertedView.transform(eyeCoords);
Vector3f mouseRay = new Vector3f(rayWorld.x, rayWorld.y, rayWorld.z);
mouseRay.normalize();
return mouseRay;
}

private Vector4f toEyeCoords(Vector4f clipCoords)
{
Matrix4f invertedProjection = projectionMatrix.invert();
Vector4f eyeCoords = invertedProjection.transform(clipCoords);

return new Vector4f(eyeCoords.x, eyeCoords.y, -1f, 0f);
}

private Vector2f getNormalizedDeviceCoordinates(float mouseX, float mouseY)
{
float x = (2f * mouseX) / Constants.DISPLAY_WIDTH - 1f;
float y = (2f * mouseY) / Constants.DISPLAY_HEIGHT - 1f;

return new Vector2f(x, -y);
}

private Vector3f getPointOnRay(Vector3f ray, float distance) {
//Vector3f camPos = new Vector3f(camera.getPosX(), camera.getPosY(), camera.getPosZ());
Vector3f start = new Vector3f(camera.getPosX(), camera.getPosY(), camera.getPosZ());
Vector3f scaledRay = new Vector3f(ray.x * distance, ray.y * distance, ray.z * distance);
return start.add(scaledRay);
}

private Vector3f binarySearch(int count, float start, float finish, Vector3f ray) {
float half = start + ((finish - start) / 2f);
if (count >= RECURSION_COUNT) {
Vector3f endPoint = getPointOnRay(ray, half);
Terrain terrain = getTerrain(endPoint.x, endPoint.z);
if (terrain != null) {
return endPoint;
} else {
return null;
}
}
if (intersectionInRange(start, half, ray)) {
return binarySearch(count + 1, start, half, ray);
} else {
return binarySearch(count + 1, half, finish, ray);
}
}

private boolean intersectionInRange(float start, float finish, Vector3f ray) {
Vector3f startPoint = getPointOnRay(ray, start);
Vector3f endPoint = getPointOnRay(ray, finish);
if (!isUnderGround(startPoint) && isUnderGround(endPoint)) {
return true;
} else {
return false;
}
}

private boolean isUnderGround(Vector3f testPoint) {
Terrain terrain = getTerrain(testPoint.x, testPoint.z);
float height = 0;
if (terrain != null) {
height = terrain.getTerrainHeight(testPoint.x, testPoint.z);
}
if (testPoint.y < height) {
return true;
} else {
return false;
}
}

private Terrain getTerrain(float worldX, float worldZ) {
return terrain;
}

public Vector3f getCurrentTerrainPoint() {
return currentTerrainPoint;
}

public Vector3f getCurrentRay() {
return currentRay;
}
}

带有 View 和投影矩阵的 MathUtils 类:

public class MathUtils {

public static float baryCentric(Vector3f p1, Vector3f p2, Vector3f p3, Vector2f pos) {
float det = (p2.z - p3.z) * (p1.x - p3.x) + (p3.x - p2.x) * (p1.z - p3.z);
float l1 = ((p2.z - p3.z) * (pos.x - p3.x) + (p3.x - p2.x) * (pos.y - p3.z)) / det;
float l2 = ((p3.z - p1.z) * (pos.x - p3.x) + (p1.x - p3.x) * (pos.y - p3.z)) / det;
float l3 = 1.0f - l1 - l2;
return l1 * p1.y + l2 * p2.y + l3 * p3.y;
}

public static Matrix4f createTransformationMatrix(Vector2f translation, Vector2f scale) {
Matrix4f matrix = new Matrix4f();
matrix.identity();
matrix.translate(translation.x,translation.y,0f);
matrix.scale(scale.x,scale.y,1f);
return matrix;
}

public static Matrix4f createTransformationMatrix(Vector3f translation, float rx, float ry, float rz, float scale) {
Matrix4f transformationMatrix = new Matrix4f();
transformationMatrix.identity();
transformationMatrix.translate(translation);
transformationMatrix.rotate((float) Math.toRadians(rx), 1,0,0);
transformationMatrix.rotate((float) Math.toRadians(ry), 0,1,0);
transformationMatrix.rotate((float) Math.toRadians(rz), 0,0,1);
transformationMatrix.scale(scale);
return transformationMatrix;
}

public static Matrix4f createViewMatrix(Camera camera) {
Matrix4f viewMatrix = new Matrix4f();
viewMatrix.identity();
viewMatrix = viewMatrix.rotate((float) Math.toRadians(camera.getPitch()), 1,0,0);//((float) Math.toRadians(camera.getPitch()), new Vector3f(1, 0, 0), viewMatrix);
viewMatrix = viewMatrix.rotate((float) Math.toRadians(camera.getYaw()),0, 1, 0);
Vector3f cameraPos = new Vector3f(camera.getPosX(), camera.getPosY(), camera.getPosZ());
Vector3f negativeCameraPos = new Vector3f(-cameraPos.x, -cameraPos.y, -cameraPos.z);
viewMatrix = viewMatrix.translate(negativeCameraPos);
return viewMatrix;
}

public static Matrix4f createProjectionMatrix() {
Matrix4f projectionMatrix = new Matrix4f();
float aspectRatio = (float) Constants.DISPLAY_WIDTH / (float) Constants.DISPLAY_HEIGHT;
float fov = Constants.FOV;
float near = Constants.NEAR_PLANE;
float far = Constants.FAR_PLANE;
projectionMatrix = projectionMatrix.perspective((float) java.lang.Math.toRadians(fov), aspectRatio, near, far);
return projectionMatrix;
}

最佳答案

View 空间中的坐标是 Cartesian coordinates由 3 个组件 xyz 组成。投影矩阵从 View 空间变换到剪辑空间。剪辑空间坐标为 Homogeneous coordinates有 4 个组件 xyzw

剪辑空间坐标可以通过 Perspective divide 转换为标准化设备坐标。 .
这意味着 xyz 分量除以 w 分量。

如果你想从规范化的设备空间转换到 View 空间,那么你必须进行逆操作。这意味着您必须通过逆投影矩阵进行变换,并将结果的 xyz 分量除以 >w 结果的组成部分。

private Vector4f toEyeCoords(Vector4f ndcCoords)
{
Matrix4f invertedProjection = projectionMatrix.invert(new Matrix4f());
Vector4f eyeCoords = invertedProjection.transform(clipCoords);

return new Vector4f(eyeCoords.x/eyeCoords.w, eyeCoords.y/eyeCoords.w, eyeCoords.z/eyeCoords.w, 0.0f);
}

关于java - 光线转换给出不稳定的值,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/55082890/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com