开发者

Trouble displaying texture in OpenGL ES with C++

I'm trying to write a small, portable 2D engine for iOS to learn C++ and OpenGL. Right now I'm trying to display a texture that I've loaded in. I've been successful displaying a texture when loading in the with CoreGraphic libraries, but now I'm trying to load the file in C++ with fread and libpng and all I see is a white box.

My texture is 64x64 so it's not a power of 2 problem. Also I have enabled GL_TEXTURE_2D.

The first block of code is used to load the png, convert the png to image data, and load the data into OpenGL.

void AssetManager::loadImage(const std::string &filename)
{
Texture texture(filename);
png_structp png_ptr = NULL;
png_infop info_ptr = NULL;
png_bytep *row_pointers = NULL;
int bitDepth, colourType;

FILE *pngFile = fopen(std::string(Game::environmentData.basePath + "/" + filename).c_str(), "rb");

if(!pngFile)
     return;

png_byte sig[8];

fread(&sig, 8, sizeof(png_byte), pngFile);
rewind(pngFile);//so when we init io it won't bitch
if(!png_check_sig(sig, 8))
    return;

png_ptr = png_create_read_struct(PNG_LIBPNG_VER_STRING, NULL,NULL,NULL);

if(!png_ptr)
    return;

if(setjmp(png_jmpbuf(png_ptr)开发者_如何转开发))
    return;

info_ptr = png_create_info_struct(png_ptr);

if(!info_ptr)
    return;

png_init_io(png_ptr, pngFile);

png_read_info(png_ptr, info_ptr);

bitDepth = png_get_bit_depth(png_ptr, info_ptr);

colourType = png_get_color_type(png_ptr, info_ptr);

if(colourType == PNG_COLOR_TYPE_PALETTE)
    png_set_palette_to_rgb(png_ptr);

/*if(colourType == PNG_COLOR_TYPE_GRAY && bitDepth < 8)
    png_set_gray_1_2_4_to_8(png_ptr);*/

if(png_get_valid(png_ptr, info_ptr, PNG_INFO_tRNS))
    png_set_tRNS_to_alpha(png_ptr);

if(bitDepth == 16)
    png_set_strip_16(png_ptr);
else if(bitDepth < 8)
    png_set_packing(png_ptr);

png_read_update_info(png_ptr, info_ptr);


png_get_IHDR(png_ptr, info_ptr, &texture.width, &texture.height,
             &bitDepth, &colourType, NULL, NULL, NULL);

int components = GetTextureInfo(colourType);

if(components == -1)
{
    if(png_ptr)
        png_destroy_read_struct(&png_ptr, &info_ptr, NULL);
    return;
}

GLubyte *pixels = (GLubyte *)malloc(sizeof(GLubyte) * (texture.width * texture.height * components));

row_pointers = (png_bytep *)malloc(sizeof(png_bytep) * texture.height);

for(int i = 0; i < texture.height; ++i)
    row_pointers[i] = (png_bytep)(pixels + (i * texture.width * components));

png_read_image(png_ptr, row_pointers);
png_read_end(png_ptr, NULL);


// make it
glGenTextures(1, &texture.name);
// bind it
glBindTexture(GL_TEXTURE_2D, texture.name);

GLuint glcolours;

switch (components) {
    case 1: 
        glcolours = GL_LUMINANCE;
        break;
    case 2: 
        glcolours = GL_LUMINANCE_ALPHA;
        break;
    case 3: 
        glcolours = GL_RGB;
        break;
    case 4: 
        glcolours = GL_RGBA;
        break;
}

glTexImage2D(GL_TEXTURE_2D, 0, components, texture.width, texture.height, 0, glcolours, GL_UNSIGNED_BYTE, pixels);

glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); 
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); 
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);

png_destroy_read_struct(&png_ptr, &info_ptr, NULL);

fclose(pngFile);
free(row_pointers);
free(pixels);

textures.push_back(texture);
}

Here is the code for my Texture class:

class Texture
{
public:

Texture(const std::string &filename) { this->filename = filename; }

unsigned int name;
unsigned int size;

unsigned int width;
unsigned int height;

std::string filename;
};

Here is how I setup my view:

void Renderer::Setup(Rectangle rect, CameraType cameraType)
{
_viewRect = rect;

glViewport(0,0,_viewRect.width,_viewRect.height);

if(cameraType == Renderer::Orthographic)
{

    glMatrixMode(GL_PROJECTION);
    glLoadIdentity();
    glOrthof(_viewRect.x,_viewRect.width,_viewRect.y,_viewRect.height,kZNear,kZFar);
    glMatrixMode(GL_MODELVIEW);
}
else
{
    GLfloat size = kZNear * tanf(DegreesToRadians(kFieldOfView) / 2.0); 

    glEnable(GL_DEPTH_TEST);

    glMatrixMode(GL_PROJECTION); 
    glLoadIdentity();
    glFrustumf(-size, size, -size / (_viewRect.width / _viewRect.height), size / (_viewRect.width / _viewRect.height), kZNear, kZFar); 
    glMatrixMode(GL_MODELVIEW);
}

glEnable(GL_TEXTURE_2D);
glBlendFunc(GL_ONE, GL_SRC_COLOR);
glEnable(GL_BLEND); 
}

Now here is where I draw the texture:

void Renderer::DrawTexture(int x, int y, Texture &texture)
{   
GLfloat vertices[] = {
    x,  _viewRect.height - y, 0.0,
    x,  _viewRect.height - (y + texture.height), 0.0,
    x + texture.width, _viewRect.height - y, 0.0,
    x + texture.width, _viewRect.height - (y + texture.height), 0.0
};


static const GLfloat normals[] = {
    0.0, 0.0, 1.0,
    0.0, 0.0, 1.0,
    0.0, 0.0, 1.0,
    0.0, 0.0, 1.0
};

GLfloat texCoords[] = {
 0.0, 1.0,
 0.0, 0.0,
 1.0, 1.0,      
 1.0, 0.0
 };


glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);

glLoadIdentity();
glTranslatef(0.0, 0.0, -3.0);

glBindTexture(GL_TEXTURE_2D, texture.name);

glVertexPointer(3, GL_FLOAT, 0, vertices);
glNormalPointer(GL_FLOAT, 0, normals);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);

glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
}

UPDATE:

I changed the function I use to generate a texture. It now create a random test texture. I still see a white texture no matter what. Gonna keep digging into how I'm renderering.

Here's the new function:

void AssetManager::CreateNoisyTexture(const char * key)
{    
Texture texture(key, 64, 64);
const unsigned int components = 4;


GLubyte *pixels = (GLubyte *)malloc(sizeof(GLubyte) * (texture.width * texture.height * components));
GLubyte *pitr1 = pixels;    
GLubyte *pitr2 = pixels + (texture.width * texture.height * components);

while (pitr1 != pitr2) {

    *pitr1 = rand() * 0xFF;
    *(pitr1 + 1) = rand() * 0xFF;
    *(pitr1 + 2) = rand() * 0xFF;
    *(pitr1 + 3)  = 0xFF;

    pitr1 += 4;
}

glGenTextures(1, &texture.name);    
glBindTexture(GL_TEXTURE_2D, texture.name); 
glTexImage2D(GL_TEXTURE_2D, 0, components, texture.width, texture.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels);

glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); 
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);

free(pixels);

printf("Created texture with key: %s  name: %d", texture.key, texture.name);

textures.push_back(texture);
}


Ok I figured it out. The problem was that I was using the wrong internal pixel format.

glTexImage2D(GL_TEXTURE_2D, 0, 4, texture.width, texture.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels);

Should be:

glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, texture.width, texture.height, 0, GL_RGBA, GL_UNSIGNED_BYTE, pixels);

I needed to pass in GL_RGBA instead of 4. In the docs I read this:

internalFormat - Specifies the number of color components in the texture. Must be 1, 2, 3, or 4, or one of the following symbolic constants:

I figured it didn't matter but I guess it does. One other thing to note, I figured this out by using glGetError() to figure out where the error was occurring. When I call glTexImage2D() the error was GL_INVALID_OPERATION.

Thanks for your help IronMensan!

UPDATE:

So the reason why I couldn't send 4 is because it is not allowed and internalFormat and format need to be the same in the OpenGL ES 1.1 spec. You can only send GL_ALPHA, GL_RGB, GL_RGBA, GL_LUMINANCE, or GL_LUMINANCE_ALPHA. Lesson learned.

0

上一篇:

下一篇:

精彩评论

暂无评论...
验证码 换一张
取 消

最新问答

问答排行榜