I am trying to try an integer texture from my geometry shader running on a GeForce 330m. It seems to return other values ββthan what I load into the texture. I use this code (mainly) to create and load textures:
glGenTextures( 1, &textureId );
glBindTexture( GL_TEXTURE_2D, textureId );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT );
glTexParameterf( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32I, w, h, 0, GL_RGBA, GL_UNSIGNED_BYTE, 0);
glBindTexture( GL_TEXTURE_2D, 0);
glBindTexture( GL_TEXTURE_2D, textureId );
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA32I, w, h, 0, GL_RGBA, GL_INT, <some 0-initialized large enough buffer>);
glBindTexture( GL_TEXTURE_2D, 0);
glBindTexture( GL_TEXTURE_2D, textureId );
int data[] = { 3, 0, 36, 400 };
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 3, 1, 1, GL_RGBA, GL_INT, data);
glBindTexture( GL_TEXTURE_2D, 0);
The idea is that one pixel in a certain state has a w-value of 400. Then I use this in my geometry shader:
#version 330
#extension GL_EXT_gpu_shader4 : require
uniform isampler2D models;
void main()
{
ivec4 modelstats4 = texelFetch2D(models, ivec2(0, 3), 0);
if (modelstats4.w > height) {
}
}
, , , (IE, ). texel, . , , , , , . ( OO, ), OpenGL .