Hi there

I dont read all messages here, but if problem still there - your uniform for 
sampler initialized wrong
You should use  state->addUniform(new osg::Uniform("lut", int(TEXTURE_UNIT)));
notice int cast there
or you should create uniform with explicit type specification of 
osg::Uniform::INT, or use int TEXTURE_UNIT declaration.
 
in your case osg uniform detect uniform type from constructor which is unsigned 
and opengl use INT uniforms for samplers, so osg trying to set INT uniform with 
opengl call which works only with unsigned uniforms so here goes your error.

Cheers,
Sergey.

17.02.2012, 18:30, "Ethan Fahy" <ethanf...@gmail.com>:
> Thanks for the reply Alex.  My setup is exactly as you describe but I'm still 
> having troubles.  I've taken the essential bits of my code to create a sample 
> main() to illustrate exactly what I'm doing:
>
> Code:
>
> int main(int argc, char* argv[])
> {
>         //INITIAL SETUP
>         //create root
>         osg::ref_ptr<osg::Group> root = new osg::Group();
>         //load osg-dem generated ive terrain complex
>         osg::ref_ptr<osg::Node> terrainNode = 
> osgDB::readNodeFile("terrain.ive");
>         //attach terrain node to root
>         root->addChild(terrainNode);
>         //create stateSet from terrainNode
>         osg::StateSet *state = terrainNode->getOrCreateStateSet();
>         //create program
>         osg::Program *program = new osg::Program;
>         //create shaders
>         osg::Shader *vertObj = new osg::Shader( osg::Shader::VERTEX );
>     osg::Shader *fragObj = new osg::Shader( osg::Shader::FRAGMENT );
>         //add shaders to program
>         program->addShader( fragObj );
>     program->addShader( vertObj );
>         //load shader src files into shaders
>         vertObj->loadShaderSourceFromFile( "shader.vert" );
>         fragObj->loadShaderSourceFromFile( "shader.frag" );
>
>         //CREATE LOOKUP TABLE AND IMAGE
>         //allocate memory for 4 by 4 lookup table
>         int height=4;
>         int width=4;
>         const int size = width*height*4;//*4 for rgba channels
>         unsigned char* data = (unsigned char*)calloc(size, sizeof(unsigned 
> char));
>         //Store arbitrary value of unsigned char 101 in each rgba channel in 
> a flattened 1D data array
>         int dataIndex;
>         for( int i=0 ; i < height ; i++ ){
>                 for( int j=0 ; j < width ; j++ ){
>                         dataIndex = i*width*4 + j*4;
>                         data[dataIndex] = 101;//red
>                         data[dataIndex+1] = 101;//green
>                         data[dataIndex+2] = 101;//blue
>                         data[dataIndex+3] = 101;//alpha
>                 }
>         }
>         //create image
>         osg::ref_ptr<osg::Image> image = new osg::Image;
>         image->setOrigin(osg::Image::BOTTOM_LEFT);
>         image->setImage(width, height, 1 ,GL_RGBA,GL_RGBA,GL_UNSIGNED_BYTE, 
> (unsigned char*)data,osg::Image::NO_DELETE);
>
>         //create texture2D and add image to it
>         osg::ref_ptr<osg::Texture2D> lutTexture = new osg::Texture2D;
>         lutTexture->setTextureSize(width, height);//unsure if this is needed 
> or if it's inherited from the setImage function below
>         lutTexture->setInternalFormat(GL_RGBA);//unsure if this is needed or 
> if it's inherited from the setImage function below
>         lutTexture->setFilter(osg::Texture::MIN_FILTER, 
> osg::Texture::NEAREST);
>     lutTexture->setFilter(osg::Texture::MAG_FILTER, osg::Texture::NEAREST);
>     lutTexture->setImage(image);
>
>         //assign texture to hardcoded texture unit 1 so that it can be 
> accessed in the shader
>         const unsigned int TEXTURE_UNIT = 1;
>         state->setTextureAttributeAndModes(TEXTURE_UNIT, lutTexture, 
> osg::StateAttribute::ON);
>         state->addUniform(new osg::Uniform("lut", TEXTURE_UNIT));
>
>         //attach the program to the stateSet
>         state->setAttributeAndModes(program, osg::StateAttribute::ON);
>
>         //open an osgviewer to see the root
>         osgViewer::Viewer viewer;
>         viewer.setSceneData(root);
>         viewer.setLightingMode(osg::View::NO_LIGHT);
>         viewer.setCameraManipulator(new osgGA::TrackballManipulator);
>         viewer.home();
>         return viewer.run();
> }
>
> and here are the vert and frag shaders:
> shader.vert
>
> Code:
>
> void main(void)
> {
>     gl_TexCoord[0] = gl_MultiTexCoord0;
>         gl_TexCoord[1] = gl_MultiTexCoord1; //unsure if this is needed?
>     gl_FrontColor = gl_Color;
>         gl_Position = ftransform();
> }
>
> shader.frag
>
> Code:
>
> uniform sampler2D lut;
> uniform sampler2D baseTexture;
> void main(void)
> {
>         //get color of the terrain's texture
>         vec4 color = texture2D(baseTexture, gl_TexCoord[0].st);
>         //find index value from data in the rgb channels of the terrain's 
> texture. (left out complicated equations since they are relevant, but I've 
> tested that their values are between 0-1
>         float index1 = some indexing logic here;
>         float index2 = some more index logic here;
>         //Look up new color values from lookup texture based on indices
>         vec2 lutCoord = vec2(index1, index2);
>         gl_FragColor = texture2D(lut, lutCoord);
> }
>
> So, if I create an image that has all values of rgba set to unsigned 
> char=101, I would expect that if I lookup values in that texture that they 
> should always be equal to rgba=(101,101,101,101) as long as my lutCoords are 
> both between 0.0-1.0.  However when I actually run this code the terrain 
> texture is not all the same color and I get this error on the command line:
> "Warning: detected OpenGL error 'invalid operation' at After Renderer:compile"
> By adding/removing things from my frag shader I have seen that this error 
> shows up whenever I try to use the values that I get back from the 
> texture2d(lut, lutCoord) command in any computational sense, e.g. if I set 
> those colors to gl_FragColor.  If I did this instead I don't get errors:
>
> Code:
>
> vec4 test = texture2D(lut, lutCoords);
> gl_FragColor = color;
>
> Once I figure out why I'm getting this OpenGL error I will replace the lookup 
> table numbers with meaningful values.  I will also use GL_LUMINANCE to store 
> a single 32 bit float instead of filling the rgba channels with 4x unsigned 
> ints, but for testing purposes I wanted to use something simple and easy to 
> understand conceptually.
>
> So, I know this is a lot of code, but anyone have any thoughts on this?  I'm 
> hoping that I just made some small and obvious error...
>
> ------------------
> Read this topic online here:
> http://forum.openscenegraph.org/viewtopic.php?p=45537#45537
>
> _______________________________________________
> osg-users mailing list
> osg-users@lists.openscenegraph.org
> http://lists.openscenegraph.org/listinfo.cgi/osg-users-openscenegraph.org
_______________________________________________
osg-users mailing list
osg-users@lists.openscenegraph.org
http://lists.openscenegraph.org/listinfo.cgi/osg-users-openscenegraph.org

Reply via email to