openGLES vertex pointer problem
I'm new to openGLES (and to openGL too) and i have a problem...
I have a struct strip:
struct Vertex2F
{
GLfloat x;
GLfloat y;
};
struct Vertex3F
{
GLfloat x;
GLfloat y;
GLfloat z;
};
struct Color4UB
{
GLubyte r;
GLubyte g;
GLubyte b;
GLubyte a;
};
struct Vertex
{
Vertex3F pos;
Color4UB color;
Vertex2F tex;
};
struct Strip
{
Strip() {vertices = 0; count = 0;}
Strip(int cnt);
~Strip();
void allocate(int cnt);
void draw();
Vertex *vertices;
int count;
};
And i want do render GL_TRIANGLE_STRIP. Here is the code:
const int size = sizeof(Vertex);
long stripOffset = (long) &strip_;
int diff = offsetof(Vertex, pos); //diff = 0
glVertexPointer(3, GL_FLOAT, size, (void*)(stripOffset + diff));
It shows something strange after rendering with glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
if shows at all. But this code works as expected:
GLfloat ar[4*3];
for 开发者_开发百科(int i = 0; i < 4; ++i)
{
ar[3*i + 0] = strip_.vertices[i].pos.x;
ar[3*i + 1] = strip_.vertices[i].pos.y;
ar[3*i + 2] = strip_.vertices[i].pos.z;
}
glVertexPointer(3, GL_FLOAT, 0, (void*)(ar));
Please explain me what am i doing wrong in first case ?
_strip.vertices
is a pointer. I assume it is allocated dynamically. So the data in _strip.vertices
is not just stored at the beginning of _strip
, but at some different place and _strip.vertices
just points there. So just use
long stripOffset = (long) strip_.vertices;
instead of
long stripOffset = (long) &strip_;
精彩评论