I render a normal and a depth image of a scene. Then I want to reuse these two images to do further texture/image processing in a second fragment shader. I use a framebuffer with 3 textures attached to it. 2 for the normal and the depth textures and one is supposed to contain the final processed image. The problem is, that I can't get the first two images to the second fragment shader to use them as texture samplers.
Here is my code: First I create a fbo and attach 3 textures to it.
// create FBO
glGenFramebuffers(1, &Framebuffer);
glBindFramebuffer(GL_FRAMEBUFFER, Framebuffer);
glGenTextures(1, &renderedNormalTexture);
//glBindTexture, glTexImage2D, glTexParameteri ... left out for clarity
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, renderedNormalTexture, 0);
glGenTextures(1, &renderedDepthTexture);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT1, GL_TEXTURE_2D, renderedDepthTexture, 0);
glGenTextures(1, &edgeTexture);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT2, GL_TEXTURE_2D, edgeTexture, 0);
ndbuffers[0] = GL_COLOR_ATTACHMENT0;
ndbuffers[1] = GL_COLOR_ATTACHMENT1;
ndbuffers[2] = GL_COLOR_ATTACHMENT2;
glDrawBuffers(3, ndbuffers);
Fragment shader 1: This is the first fragment shader which outputs 2 textures to position 0 and 1
in vec3 position_worldspace;
in vec3 normal_cameraspace;
in vec4 vpos_to_fragment;
uniform float zmin;
uniform float zmax;
layout(location = 0) out vec3 normalcolor;
layout(location = 1) out vec3 depthcolor;
void main() {
normalcolor = normalize( normal_cameraspace ) * 0.5 + 0.5; // normal out
vec4 v = vec4(vpos_to_fragment);
v /= v.w;
float gray = (-v.z - zmin) / (zmax - zmin);
depthcolor = vec3(gray); // depth out
}
fragment shader 2:
And the second fragment shader which is supposed to receive two texture samplers from position 0 and 1 and do sth. with them:
uniform sampler2D normalImage;
uniform sampler2D depthImage;
uniform float width;
uniform float height;
in vec2 UV;
layout(location = 2) out vec3 color;
void main() {
vec3 irgb = texture2D(normalImage, UV).rgb;
// do sth here...
color = irgb ;
}
And finally the rendering step: Maybe I am mistaken here. I render the geometry/scene (once?!) and apply two fragment shaders.
glUseProgram(FragmentShader1);
SetMVPUniforms();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
RenderScene(); // render geometry
glUseProgram(FragmentShader2);
GLuint nID = glGetUniformLocation(FragmentShader2, "normalImage");
GLuint dID = glGetUniformLocation(FragmentShader2, "depthImage");
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, renderedNormalTexture);
glProgramUniform1i(FragmentShader2, nID, 0);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, renderedDepthTexture);
glProgramUniform1i(FragmentShader2, dID, 1);
Now what I get is either nothing or a wrong colored image.