CG Shader Semantics with OpenGL 3.x?
I used to have CG shaders working with vertex arrays in OpenGL 2.x, but I've updated to use VBOs and VAOs in OpenGL 3.x and now the semantics don't seem to be working, except for POSITION. CG doesn't throw up any compile errors, but if I set my output color in my fragment shader to my input normal value, I just get solid black. There's another answer that links to a page saying to use cgGLEnableClientState
(which did nothing by itself) and cgGLSetParameterPointer
(which seems crazy since I'm already sending the data to OpenGL, why send another copy through CG). So what am I missing?
Vertex Shader:
struct input
{
in uniform float4x4 worldViewProjMatrix;
in uniform float4x4 invTransWorldMatrix;
in uniform float4x4 worldMatrix;
in uniform float3 lightDir;
in uniform float3 eyePosition;
in varying float4 position : POSITION;
in varying float4 normal : NORMAL;
in varying float2 texCoord : TEXCOORD;
};
struct output
{
out varying float4 position : POSITION;
out varying float2 texCoord : TEXCOORD0;
out varying float3 light : TEXCOORD1;
out varying float3 normal : TEXCOORD2;
out varying float3 view : TEXCOORD3;
};
output main(input IN)
{
output OUT = output(0);
OUT.position = mul(IN.worldViewProjMatrix, IN.position);
OUT.texCoord = IN.texCoord;
OUT.light = IN.lightDir;
float3 worldPosition = normalize(mul(IN.worldMatrix, IN.position)).xyz;
OUT.view = IN.eyePosition - worldPosition;
OUT.normal = normalize(mul(IN.invTransWorldMatrix, IN.normal)).xyz;
return OUT;
}
Fragment Shader:
struct input {
in varying float2 texCoord : TEXCOORD0;
in varying float3 light : TEXCOORD1;
in varying float3 normal : TEXCOORD2;
in varying float3 view : TEXCOORD3;
in uniform float3 diffuse;
in uniform float3 ambient;
in uniform float3 specular;
in uniform float shininess;
in uniform sampler2D colorMapSampler;
};
float4 main(input IN) : COLOR
{
float4 color = tex2D(IN.colorMapSampler, IN.texCoord);
float3 normal = normalize(IN.normal);
float3 lightDir = normalize(IN.light);
float3 viewDir = normalize(IN.view);
float3 diff = saturate(dot(normal, lightDir));
float3 reflect = normalize(2 * diff * normal - lightDir);
float3 specular = pow(saturate(dot(reflect, viewDir)), IN.shininess);
float4 result;
//result = float4(color.rgb * (IN.ambient + IN.diffuse * diff) + IN.specular * specular, 1.0f);
result = float4(IN.normal, 1.0f);
return result;
}
I found someplace that listed these as the indices for glVertexAttribPointer
, but they could easily be wrong (these are the Shader::POSITION
, Shader::NORMAL
, etc in the VBO setup function):
enum GenericVertexInputIndices
{
POSITION = 0,
BLENDWEIGHT = 1,
NORMAL = 2,
DIFFUSE = 3, COLOR0 = 3,
SPECULAR = 4, COLOR1 = 4,
TESSFACTOR = 5, FOGCOORD = 5,
PSIZE = 6,
BLENDINDICES = 7,
TEXCOORD0 = 8,
TEXCOORD1 = 9,
TEXCOORD2 = 10,
TEXCOORD3 = 11,
TEXCOORD4 = 12,
TEXCOORD5 = 13,
TEXCOORD6 = 14, TANGENT = 14,
TEXCOORD7 = 15, BINORMAL =开发者_如何学编程 15,
};
VBO setup function below:
void MeshObject::initVBO(const unsigned int&_indexVBO, unsigned int& _indexOffset)
{
glGenVertexArrays(1, &m_vao);
glBindVertexArray(m_vao);
//sub in this section of the index data
m_indexOffset = _indexOffset;
_indexOffset = _indexOffset + m_indices.size();
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indexVBO);
glBufferSubData(GL_ELEMENT_ARRAY_BUFFER, m_indexOffset * sizeof(unsigned short), m_indices.size() * sizeof(unsigned short), &(m_indices[0]));
//init vertex data
glGenBuffers(1, &m_vertexVBO);
glBindBuffer(GL_ARRAY_BUFFER, m_vertexVBO);
{
glBufferData(GL_ARRAY_BUFFER, m_data.size() * sizeof(VertexData), &(m_data[0]), GL_STATIC_DRAW);
glEnableVertexAttribArray(Shader::POSITION);
glVertexAttribPointer(Shader::POSITION, 3, GL_FLOAT, GL_FALSE, sizeof(VertexData), (char*)0);
glEnableVertexAttribArray(Shader::NORMAL);
glVertexAttribPointer(Shader::NORMAL, 3, GL_FLOAT, GL_FALSE, sizeof(VertexData), (char*)12);
glEnableVertexAttribArray(Shader::TEXCOORD0);
glVertexAttribPointer(Shader::TEXCOORD0, 2, GL_FLOAT, GL_FALSE, sizeof(VertexData), (char*)24);
}
}
Shader bind function below:
void Shader::bind(const matrix4 &_worldTransform, const Material::MaterialInfo &_info)
{
CGerror error;
//bind to the shader
CGprofile profile = renderGlobals.shaderMgr.getProfile(static_cast<Shader::ShaderType>(m_shaderType));
cgGLEnableProfile(profile);
error = cgGetError();
cgGLBindProgram(m_program);
error = cgGetError();
switch (m_shaderType)
{
case VERTEX:
{
//get vertex parameters
CGparameter worldMatrix = cgGetNamedParameter(m_program, "IN.worldMatrix");
CGparameter worldViewProjMatrix = cgGetNamedParameter(m_program, "IN.worldViewProjMatrix");
CGparameter invTransWorldMatrix = cgGetNamedParameter(m_program, "IN.invTransWorldMatrix");
CGparameter light = cgGetNamedParameter(m_program, "IN.lightDir");
CGparameter eyePosition = cgGetNamedParameter(m_program, "IN.eyePosition");
error = cgGetError();
//set vertex parameters
matrix4 worldViewProj = *(renderGlobals.debugCamera.getViewProjectionMatrix()) * _worldTransform;
cgGLSetMatrixParameterfc(worldViewProjMatrix, worldViewProj.m16);
matrix4 invTransWorld = _worldTransform.getInverse().getTranspose();
if (invTransWorldMatrix != NULL)
{
cgGLSetMatrixParameterfc(invTransWorldMatrix, invTransWorld.m16);
}
if (worldMatrix != NULL)
{
cgGLSetMatrixParameterfc(worldMatrix, _worldTransform.m16);
}
vector3 lightPos = *renderGlobals.debugCamera.getPosition();
//vector3 lightPos = vector3(0.0f, 0.0f, 0.0f);
vector3 lightDir = lightPos - _worldTransform.wAxis;
if (light != NULL)
{
cgGLSetParameter3fv(light, lightDir.v);
}
if (eyePosition != NULL)
{
cgGLSetParameter3fv(eyePosition, renderGlobals.debugCamera.getPosition()->v);
}
error = cgGetError();
break;
}
case FRAGMENT:
{
//set up material info
CGparameter diffuse = cgGetNamedParameter(m_program, "IN.diffuse");
CGparameter ambient = cgGetNamedParameter(m_program, "IN.ambient");
CGparameter specular = cgGetNamedParameter(m_program, "IN.specular");
CGparameter shininess = cgGetNamedParameter(m_program, "IN.shininess");
if (diffuse != NULL)
{
cgGLSetParameter3fv(diffuse, _info.diffuse.rgb);
}
if (ambient != NULL)
{
cgGLSetParameter3fv(ambient, _info.ambient.rgb);
}
if (specular != NULL)
{
cgGLSetParameter3fv(specular, _info.specular.rgb);
}
if (shininess != NULL)
{
cgGLSetParameter1f(shininess, _info.shininess);
}
//set up textures
CGparameter colorMapSampler = cgGetNamedParameter(m_program, "IN.colorMapSampler");
if (colorMapSampler != NULL)
{
if (_info.textureInfo[0].size() > 0)
{
Index<Texture> texture = _info.textureInfo[0][0].texture;
cgGLSetTextureParameter(colorMapSampler, texture->getID());
cgGLEnableTextureParameter(colorMapSampler);
} else {
cgGLDisableTextureParameter(colorMapSampler);
}
}
break;
}
default:
{
//ERROR: tryin to bind a shader with an unknown type
assert(0);
unbind();
return;
}
}
}
Changed the semantics in the vertex input structure to use ATTR* matching what I have in the GenericVertexInputIndices and voila, it works. I had tried changing all my semantics to ATTR* in both the vertex and fragment shader before and gotten a bunch of domain conflict errors, but didn't notice that it didn't complain about the ones in the vertex input structure. Apparently they're only for vertex input. Another small detail that totally screws everything up.
精彩评论