gpt4 book ai didi

java - 如何将UV纹理从 blender 加载到opengl android java

转载 作者:太空宇宙 更新时间:2023-11-04 14:12:24 31 4
gpt4 key购买 nike

呃呃呃

大家好我尝试加载我在 blender 中制作的 UV 纹理。在 blender 里就可以了图加载正常, Material 加载正常。但是纹理有问题..我不知道出了什么问题,因为我在纹理3D中是新的。(对于opengl总是只使用2d四边形..我的意思是只使用2d方向)

这是我的简单立方体的 obj 文件

   # Blender v2.72 (sub 0) OBJ File: ''
# www.blender.org
mtllib ncube.mtl
o Cube
v 1.000000 -1.000000 -1.000000
v 1.000000 -1.000000 1.000000
v -1.000000 -1.000000 1.000000
v -1.000000 -1.000000 -1.000000
v 1.000000 1.000000 -0.999999
v 0.999999 1.000000 1.000001
v -1.000000 1.000000 1.000000
v -1.000000 1.000000 -1.000000
vt 0.000100 0.250000
vt 0.250000 0.250000
vt 0.250000 0.499900
vt 0.499900 0.499900
vt 0.499900 0.250000
vt 0.749800 0.250000
vt 0.250000 0.750000
vt 0.250000 0.999900
vt 0.000100 0.999900
vt 0.250000 0.000100
vt 0.499900 0.000100
vt 0.250000 0.749800
vt 0.000100 0.499900
vt 0.749800 0.499900
vt 0.000100 0.750000
vt 0.499900 0.749800
usemtl Material
s off
f 2/1 3/2 4/3
f 8/4 7/5 6/6
f 1/7 5/8 6/9
f 2/10 6/11 7/5
f 7/5 8/4 4/3
f 1/12 4/3 8/4
f 1/13 2/1 4/3
f 5/14 8/4 6/6
f 2/15 1/7 6/9
f 3/2 2/10 7/5
f 3/2 7/5 4/3
f 5/16 1/12 8/4

对于opengl,我修复了cos faces以错误的方式加载,所以当我解析时修复是f=f-1。如果不是负值,我就得不到正确的立方体。如果没有纹理的立方体比面看起来:f 2 3 4;所以修复后看起来是正确的:f 1 2 3;

但是当我导出带有纹理的 obj 文件而不是带有斜线 1/13 2/1 4/3 的面时,我不知道需要其他面。这是第一个

这是我的加载图的类

public class TestUV {
private FloatBuffer mVertexBuffer;
public FloatBuffer mColorBuffer;
public FloatBuffer mColorBufferKs;
public FloatBuffer textureBuffer;
private ShortBuffer mIndexBuffer;
private short indicesF[];
private float vertices[];
private float textureCoords[];
private float matcolors[];
private float matcolorsKs[];
OBJLoader loader;
public int tempo[];
public float tempFloat[];

public TestUV(Context context, String filename) {
loader = new OBJLoader(context);
loader.loadVertAndInd(filename);
tempFloat=loader.indices;


short indices[] = new short[tempFloat.length];
for (int i = 0; i < tempFloat.length; i++) {
indices[i] = (short)tempFloat[i];
}

indicesF=indices;
vertices=loader.vertices;

matcolors=loader.materialsKd;
matcolorsKs=loader.materialsKs;
textureCoords=loader.Vtextures;
String temp = ""+matcolors.length;
String temp1 = ""+matcolorsKs.length;
Log.e("","LENGTH OF MATCOmatcolors.length "+temp+" KS = "+temp1);
ByteBuffer byteBuf = ByteBuffer.allocateDirect(vertices.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
mVertexBuffer = byteBuf.asFloatBuffer();
mVertexBuffer.put(vertices);
mVertexBuffer.position(0);

byteBuf = ByteBuffer.allocateDirect(matcolors.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
mColorBuffer = byteBuf.asFloatBuffer();
mColorBuffer.put(matcolors);
mColorBuffer.position(0);

byteBuf = ByteBuffer.allocateDirect(matcolorsKs.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
mColorBufferKs = byteBuf.asFloatBuffer();
mColorBufferKs.put(matcolorsKs);
mColorBufferKs.position(0);

ByteBuffer ibb = ByteBuffer.allocateDirect(indices.length*4);
ibb.order(ByteOrder.nativeOrder());
mIndexBuffer = ibb.asShortBuffer();
mIndexBuffer.put(indices);
mIndexBuffer.position(0);

byteBuf = ByteBuffer.allocateDirect(textureCoords.length * 4);
byteBuf.order(ByteOrder.nativeOrder());
textureBuffer = byteBuf.asFloatBuffer();
textureBuffer.put(textureCoords);
textureBuffer.position(0);
}
public float xPos=0;
public float yPos=0;
public float zPos=0;
public void setX(float x){xPos=x;}
public void setY(float y){yPos=y;}
public void setXYZ(float x,float y,float z)
{
xPos=x;
yPos=y;
zPos=z;
}
// draw our object
/** WORK WITH LIGHT **/
//**********************************************************/
//float[] pos = {0.0f, 0.0f, -3.0f, 1.0f}; //2
float lightX = 0;
float lightY =0;
float[] red={1.0f, 0.0f, 1.0f, 1.0f};
float[] lightpos = {0,0,-1,1};
float[] lightpos1 = {0,0,0,0};
float[] lightdir ={-0.5f,-0.6f,-0.7f};
float[] lightdir1 ={0.5f,0.6f,0.7f};
public final static int SS_SUNLIGHT = GL10.GL_LIGHT0;
public final static int SS_SUNLIGHT1 = GL10.GL_LIGHT1;
public final static int SS_SUNLIGHT2 = GL10.GL_LIGHT2;
public FloatBuffer makeFloatBuffer(float p[]){
ByteBuffer vbb = ByteBuffer.allocateDirect(p.length * 4);
vbb.order(ByteOrder.nativeOrder());
FloatBuffer fb = vbb.asFloatBuffer(); // create a floating point buffer from the ByteBuffer
fb.put(p); // add the coordinates to the FloatBuffer
fb.position(0);
return fb;
}

public void draw(GL10 gl,float xR,float yR) {

gl.glMatrixMode(GL10.GL_MODELVIEW);
gl.glLoadIdentity();


gl.glFrontFace(GL10.GL_CW);

gl.glVertexPointer(3, GL10.GL_FLOAT, 0, mVertexBuffer);
gl.glColorPointer(4, GL10.GL_FLOAT, 0, mColorBuffer);

gl.glTranslatef(xPos, yPos, zPos);
gl.glRotatef(xR, 0, 1, 0); //rotate around y-axis

gl.glMaterialfv(GL10.GL_FRONT_AND_BACK, GL10.GL_SPECULAR, mColorBufferKs);

gl.glLightfv(GL10.GL_LIGHT1, GL10.GL_SPECULAR, mColorBufferKs);

gl.glEnable(GL10.GL_LIGHTING);

gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER,
GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER,
GL10.GL_NEAREST);
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]); //enable texture
//ENABLES
gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);
gl.glEnableClientState(GL10.GL_COLOR_ARRAY);
gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);



gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, textureBuffer);

gl.glDrawElements(GL10.GL_TRIANGLES, indicesF.length, GL10.GL_UNSIGNED_SHORT,mIndexBuffer);
//ENABLE TEXTURE!!!!!
gl.glEnable(GL10.GL_TEXTURE_2D);
//DISABLES
gl.glDisableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
gl.glDisableClientState(GL10.GL_VERTEX_ARRAY);
gl.glDisableClientState(GL10.GL_COLOR_ARRAY);
}



private int[] textures = new int[1];
//////=================================================================
public void loadTexture(GL10 gl, Context context,int count) {
// loading texture
Bitmap bitmap = BitmapFactory.decodeResource(context.getResources(),
count);

// generate one texture pointer
gl.glGenTextures(1, textures, 0);
// ...and bind it to our array
gl.glBindTexture(GL10.GL_TEXTURE_2D, textures[0]);

// create nearest filtered texture
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_NEAREST);
gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);

// Use Android GLUtils to specify a two-dimensional texture image from our bitmap
GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bitmap, 0);

// Clean up
bitmap.recycle();
}

}

代码非常糟糕并且没有清理,对此表示抱歉。
纹理enter image description here并截屏它在设备上的外观 enter image description here

请帮助别人:(我不知道该怎么办我的解析器工作正确。只是无法理解出了什么问题。

最佳答案

对于我认为的提问者来说为时已晚,但对于任何遇到这个问题的人来说都太晚了。你说:

But when i export obj file with texture than faces coming with slash 1/13 2/1 4/3 i dont know for what need other faces

第二个值(斜线之后)是纹理的索引。它们的顺序与顶点索引的顺序不同。

我没有看到 loadVertAndInd() 的实现,但猜测这就是问题所在。您需要组织纹理 uv 坐标的顺序以匹配顶点的索引。一种方法是创建一个顶点缓冲区大小的纹理缓冲区,使用纹理索引从纹理坐标中进行采样,然后使用顶点索引将它们放置在纹理缓冲区中。

关于java - 如何将UV纹理从 blender 加载到opengl android java,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/28161227/

31 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com