gpt4 book ai didi

android - OpenGL ES 2.0 : Multiple light sources : Shader issue

转载 作者:行者123 更新时间:2023-11-30 02:37:51 26 4
gpt4 key购买 nike

更新 3(非常感谢您的帮助)

我删除了建议的内容。 u_IT_MVMatrix 似乎也有问题(不管它是干什么用的)事情看起来好多了,但地板应该发光,纹理砖应该有来自彩色砖(蓝色、红色等)的光

enter image description here

纹理对象的顶点( fragment 保持不变)

uniform mat4 u_MVPMatrix;       // A constant representing the combined model/view/projection matrix.
uniform mat4 u_MVMatrix; // A constant representing the combined model/view matrix.

attribute vec4 a_Position; // Per-vertex position information we will pass in.
attribute vec3 a_Normal; // Per-vertex normal information we will pass in.
attribute vec2 a_TexCoordinate; // Per-vertex texture coordinate information we will pass in.

varying vec3 v_Position; // This will be passed into the fragment shader.
varying vec3 v_Normal; // This will be passed into the fragment shader.
varying vec2 v_TexCoordinate; // This will be passed into the fragment shader.

uniform vec4 u_PointLightPositions[3]; // In eye space
uniform vec3 u_PointLightColors[3];
vec4 eyeSpacePosition;
vec3 eyeSpaceNormal;

uniform vec4 v_Color;
varying vec3 lighting;
vec3 materialColor;

vec3 getAmbientLighting();
vec3 getDirectionalLighting();
vec3 getPointLighting();

// The entry point for our vertex shader.
void main()
{
//materialColor = vec3(v_Color.xyz); // Will be modified by the texture later.
materialColor = vec3(1.0, 1.0, 1.0);

// Transform the vertex into eye space.
v_Position = vec3(u_MVMatrix * a_Position);

// Pass through the texture coordinate.
v_TexCoordinate = a_TexCoordinate;

// Transform the normal's orientation into eye space.
v_Normal = vec3(u_MVMatrix * vec4(a_Normal, 0.0));

// gl_Position is a special variable used to store the final position.
// Multiply the vertex by the matrix to get the final point in normalized screen coordinates.
eyeSpacePosition = u_MVMatrix * a_Position;

// The model normals need to be adjusted as per the transpose of the inverse of the modelview matrix.
eyeSpaceNormal = normalize(vec3(u_MVMatrix * vec4(a_Normal, 0.0)));
gl_Position = u_MVPMatrix * a_Position;

lighting = getAmbientLighting();
lighting += getPointLighting();
}

vec3 getAmbientLighting()
{
return materialColor * 0.2;
}

vec3 getPointLighting()
{
vec3 lightingSum = vec3(0.0);

for (int i = 0; i < 3; i++) {
vec3 toPointLight = vec3(u_PointLightPositions[i]) - vec3(eyeSpacePosition);
float distance = length(toPointLight);
//distance = distance / 5.0;
toPointLight = normalize(toPointLight);

float cosine = max(dot(eyeSpaceNormal, toPointLight), 0.0);
lightingSum += (materialColor * u_PointLightColors[i] * 20.0 * cosine)
/ distance;
}

return lightingSum;
}

**Vertex for light bricks (no texture)**





uniform mat4 u_MVPMatrix; // A constant representing the combined model/view/projection matrix.
uniform mat4 u_MVMatrix; // A constant representing the combined model/view matrix.

attribute vec4 a_Position; // Per-vertex position information we will pass in.
attribute vec3 a_Normal; // Per-vertex normal information we will pass in.

varying vec3 v_Position; // This will be passed into the fragment shader.
varying vec3 v_Normal; // This will be passed into the fragment shader.

uniform vec4 u_PointLightPositions[3]; // In eye space
uniform vec3 u_PointLightColors[3];
vec4 eyeSpacePosition;
vec3 eyeSpaceNormal;

uniform vec4 v_Color;
varying vec3 lighting;

vec3 getAmbientLighting();
vec3 getDirectionalLighting();
vec3 getPointLighting();

// The entry point for our vertex shader.
void main()
{
// Transform the vertex into eye space.
v_Position = vec3(u_MVMatrix * a_Position);

// Transform the normal's orientation into eye space.
v_Normal = vec3(u_MVMatrix * vec4(a_Normal, 0.0));

// gl_Position is a special variable used to store the final position.
// Multiply the vertex by the matrix to get the final point in normalized screen coordinates.
gl_Position = u_MVPMatrix * a_Position;
eyeSpacePosition = u_MVMatrix * a_Position;

// The model normals need to be adjusted as per the transpose of the inverse of the modelview matrix.
eyeSpaceNormal = normalize(vec3(u_MVMatrix * vec4(a_Normal, 0.0)));

lighting = getAmbientLighting();
lighting += getPointLighting();
}

vec3 getAmbientLighting()
{
return v_Color.xyz * 0.2;
}

vec3 getPointLighting()
{
vec3 lightingSum = vec3(0.0);

for (int i = 0; i < 3; i++) {
vec3 toPointLight = vec3(u_PointLightPositions[i]) - vec3(eyeSpacePosition);
float distance = length(toPointLight);
toPointLight = normalize(toPointLight);

float cosine = max(dot(eyeSpaceNormal, toPointLight), 0.0);
lightingSum += (v_Color.xyz * u_PointLightColors[i] * 20.0 * cosine)
/ distance;
}

return lightingSum;
}

我总是在着色器中使用多个光源时遇到困难,但我在我的 Android OpenGL 2.0 快速入门书中找到了一个示例。

我以为我会提前给它,可悲的是无论我做什么,我似乎都是光,所以当我靠近一个物体时它会变亮,我想要做的是让 3 个不同的地方(比如路灯)成为光来源。

我在我的渲染中定义我的灯光位置和颜色

// new lighting
public final float[] pointLightPositions = new float[]
{0f, 1f, 0f, 1f,
100f, 1f, 0f, 1f,
50f, 1f, 0f, 1f};

public final float[] pointLightColors = new float[]
{1.00f, 0.20f, 0.20f,
0.02f, 0.25f, 0.02f,
0.02f, 0.20f, 1.00f};

渲染时

    uPointLightPositionsLocation =
glGetUniformLocation(mProgramHandle, "u_PointLightPositions");
uPointLightColorsLocation =
glGetUniformLocation(mProgramHandle, "u_PointLightColors");

glUniform4fv(uPointLightPositionsLocation, 3, mRenderer.pointLightPositions, 0);
glUniform3fv(uPointLightColorsLocation, 3, mRenderer.pointLightColors, 0);

// not sure why I need this
// lighting
final float[] pointPositionsInEyeSpace = new float[12];
multiplyMV(pointPositionsInEyeSpace, 0, mVMatrix, 0, mRenderer.pointLightPositions, 0);
multiplyMV(pointPositionsInEyeSpace, 4, mVMatrix, 0, mRenderer.pointLightPositions, 4);
multiplyMV(pointPositionsInEyeSpace, 8, mVMatrix, 0, mRenderer.pointLightPositions, 8);

Matrix.multiplyMM(mRenderer.mMVPMatrix, 0, mVMatrix, 0, mRenderer.mModelMatrix, 0);

着色器(顶点)

uniform mat4 u_MVPMatrix;       // A constant representing the combined model/view/projection matrix.                  
uniform mat4 u_MVMatrix; // A constant representing the combined model/view matrix.

attribute vec4 a_Position; // Per-vertex position information we will pass in.
attribute vec3 a_Normal; // Per-vertex normal information we will pass in.
attribute vec2 a_TexCoordinate; // Per-vertex texture coordinate information we will pass in.

varying vec3 v_Position; // This will be passed into the fragment shader.
varying vec3 v_Normal; // This will be passed into the fragment shader.
varying vec2 v_TexCoordinate; // This will be passed into the fragment shader.

uniform vec4 u_PointLightPositions[3]; // In eye space
uniform vec3 u_PointLightColors[3];

// The entry point for our vertex shader.
void main()
{

// Transform the vertex into eye space.
v_Position = vec3(u_MVMatrix * a_Position);

// Pass through the texture coordinate.
v_TexCoordinate = a_TexCoordinate;

// Transform the normal's orientation into eye space.
v_Normal = vec3(u_MVMatrix * vec4(a_Normal, 0.0));

// gl_Position is a special variable used to store the final position.
// Multiply the vertex by the matrix to get the final point in normalized screen coordinates.
gl_Position = u_MVPMatrix * a_Position;
}

fragment

precision mediump float;        // Set the default precision to medium. We don't need as high of a 
// precision in the fragment shader.
uniform vec3 u_LightPos; // The position of the light in eye space.
uniform sampler2D u_Texture; // The input texture.

varying vec3 v_Position; // Interpolated position for this fragment.
varying vec3 v_Normal; // Interpolated normal for this fragment.
varying vec2 v_TexCoordinate; // Interpolated texture coordinate per fragment.

uniform vec4 v_Color;

uniform vec4 u_PointLightPositions[3]; // In eye space
uniform vec3 u_PointLightColors[3];

vec3 getPointLighting();

// The entry point for our fragment shader.
void main()
{
// Will be used for attenuation.
float distance = length(u_LightPos - v_Position);

// Get a lighting direction vector from the light to the vertex.
vec3 lightVector = normalize(u_LightPos - v_Position);

// Calculate the dot product of the light vector and vertex normal. If the normal and light vector are
// pointing in the same direction then it will get max illumination.
float diffuse = max(dot(v_Normal, lightVector), 0.0);

// Add attenuation.
diffuse = diffuse * (1.0 / (1.0 + (0.25 * distance)));

// Add ambient lighting
diffuse = diffuse + 0.7;

// Multiply the color by the diffuse illumination level and texture value to get final output color.
//gl_FragColor = (diffuse * texture2D(u_Texture, v_TexCoordinate));
gl_FragColor = diffuse * texture2D(u_Texture, v_TexCoordinate) ;
gl_FragColor *= (v_Color * vec4(getPointLighting(),v_Color.w));
}

vec3 getPointLighting()
{
vec3 lightingSum = vec3(0.0);

for (int i = 0; i < 3; i++) {
vec3 toPointLight = vec3(u_PointLightPositions[i])
- vec3(v_Position);
float distance = length(toPointLight);
toPointLight = normalize(toPointLight);

float cosine = max(dot(v_Normal, toPointLight), 0.0);

//lightingSum += vec3(0.0, 0.0, 1.0);
lightingSum += (vec3(v_Color.xyz) * u_PointLightColors[i] * 5.0 * cosine) / distance;
}

return lightingSum;
}

如果有人能提供帮助,我将非常高兴:)

更新 2

我有灯光,颜色不同,但只有当我非常靠近时它们才会发光?我确定它与 u_IT_MVMatrix 矩阵有关

fragment

    uniform vec3 u_LightPos;        // The position of the light in eye space.
uniform sampler2D u_Texture; // The input texture.

varying vec3 v_Position; // Interpolated position for this fragment.
varying vec3 v_Normal; // Interpolated normal for this fragment.
varying vec2 v_TexCoordinate; // Interpolated texture coordinate per fragment.

uniform vec4 v_Color;
varying vec3 lighting;
// The entry point for our fragment shader.
void main()
{

gl_FragColor = texture2D(u_Texture, v_TexCoordinate) ;
gl_FragColor *= vec4(lighting,1.0);
}

顶点

    uniform mat4 u_MVPMatrix;       // A constant representing the combined model/view/projection matrix.
uniform mat4 u_MVMatrix; // A constant representing the combined model/view matrix.

attribute vec4 a_Position; // Per-vertex position information we will pass in.
attribute vec3 a_Normal; // Per-vertex normal information we will pass in.
attribute vec2 a_TexCoordinate; // Per-vertex texture coordinate information we will pass in.

varying vec3 v_Position; // This will be passed into the fragment shader.
varying vec3 v_Normal; // This will be passed into the fragment shader.
varying vec2 v_TexCoordinate; // This will be passed into the fragment shader.

uniform vec4 u_PointLightPositions[3]; // In eye space
uniform vec3 u_PointLightColors[3];

uniform vec3 u_VectorToLight; // In eye space
uniform mat4 u_IT_MVMatrix;
vec4 eyeSpacePosition;
vec3 eyeSpaceNormal;

uniform vec4 v_Color;
varying vec3 lighting;
vec3 materialColor;


vec3 getAmbientLighting();
vec3 getDirectionalLighting();
vec3 getPointLighting();

// The entry point for our vertex shader.
void main()
{
materialColor = vec3(1.0, 1.0, 1.0); // Will be modified by the texture later.


// Transform the vertex into eye space.
v_Position = vec3(u_MVMatrix * a_Position);

// Pass through the texture coordinate.
v_TexCoordinate = a_TexCoordinate;

// Transform the normal's orientation into eye space.
v_Normal = vec3(u_MVMatrix * vec4(a_Normal, 0.0));

// gl_Position is a special variable used to store the final position.
// Multiply the vertex by the matrix to get the final point in normalized screen coordinates.

eyeSpacePosition = u_MVMatrix * a_Position;

// The model normals need to be adjusted as per the transpose
// of the inverse of the modelview matrix.
eyeSpaceNormal = normalize(vec3(u_IT_MVMatrix * vec4(a_Normal, 0.0)));

gl_Position = u_MVPMatrix * a_Position;

lighting = getAmbientLighting();
lighting += getDirectionalLighting();
lighting += getPointLighting();

}

vec3 getAmbientLighting()
{
return materialColor * 0.2;
}

vec3 getDirectionalLighting()
{
return materialColor * max(dot(eyeSpaceNormal, u_VectorToLight), 0.0);
}

vec3 getPointLighting()
{
vec3 lightingSum = vec3(0.0);

for (int i = 0; i < 3; i++) {
vec3 toPointLight = vec3(u_PointLightPositions[i]) - vec3(eyeSpacePosition);
float distance = length(toPointLight);
toPointLight = normalize(toPointLight);

float cosine = max(dot(eyeSpaceNormal, toPointLight), 0.0);
lightingSum += (materialColor * u_PointLightColors[i] * 5.0 * cosine)
/ distance;
}

return lightingSum;
}

所以我认为这与我的立场有关

//multiplyMM(mModelMatrix, 0, VMatrix, 0, mModelMatrix, 0);
//invertM(tempMatrix, 0, mModelMatrix, 0);
transposeM(it_modelViewMatrix, 0, VMatrix, 0);

最佳答案

在您的代码中,您确实有四盏灯,第四盏灯位于 u_LightPos。

我建议您完全删除漫反射变量(第四盏灯)以及对 v_Color 的所有引用(因为您也有纹理)。然后你应该开始只看到你的三个路灯的照明。

附言。出于性能考虑,我还将光照计算移至顶点着色器。

关于android - OpenGL ES 2.0 : Multiple light sources : Shader issue,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/26213558/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com