I'm attempting to create a basic Phong lighting shader to learn about lighting in shaders. Also, I'm using openframeworks. I've created 3 cubes which have a camera rotating around them. The lighting appears to be working (sorta), but the cubes have unwanted transparency which you can see here:
Here is my code, which is based on this tutorial
testApp.h
#pragma once
#include "ofMain.h"
class testApp : public ofBaseApp{
public:
ofCamera camera;
ofLight pointLight;
float camAngle;
float camX;
float camY;
float camZ;
ofShader lightShader;
ofBoxPrimitive box1;
ofBoxPrimitive box2;
ofBoxPrimitive box3;
void setup();
void update();
void draw();
};
testApp.cpp
#include "testApp.h"
//--------------------------------------------------------------
void testApp::setup()
{
glEnable(GL_DEPTH_TEST);
glEnable(GL_LIGHTING);
ofBackground(100, 100, 100);
ofSetFrameRate(30);
camera.setNearClip(0.1);
camera.setFarClip(1200);
camAngle = 0;
camX = 200;
camY = 150;
camZ = 200;
pointLight.setPointLight();
lightShader.load("shaders/lightShader");
//boxes setup here, not necessary to show
}
//--------------------------------------------------------------
void testApp::update()
{
camAngle += 0.01f;
if (camAngle >= 360)
{
camAngle = 0;
}
camX = 300 * sin(camAngle);
camZ = 300 * cos(camAngle);
camera.lookAt(ofVec3f(0, 0, 0));
camera.setPosition(ofVec3f(camX, camY, camZ));
pointLight.setPosition(-50, -20, 200);
}
//--------------------------------------------------------------
void testApp::draw()
{
lightShader.begin();
camera.begin();
pointLight.enable();
pointLight.draw();
ofVec3f lightLocation = pointLight.getPosition();
lightShader.setUniform3f("lightLocation", lightLocation.x, lightLocation.y, lightLocation.z);
box1.draw();
ofPushMatrix();
ofTranslate(60, 50);
ofRotate(45, 1.0, 0.0, 0.0);
box2.draw();
ofPopMatrix();
ofPushMatrix();
ofTranslate(-70,70);
ofRotate(110, 1.0, 0.0, 0.6);
box3.draw();
ofPopMatrix();
pointLight.disable();
camera.end();
lightShader.end();
}
lightShader.vert
// vertex shader
#version 150
//these are passed in by openframeworks
uniform mat4 modelViewProjectionMatrix;
in vec4 position;
in vec4 color;
in vec3 normal;
in vec2 texcoord;
out vec4 vertPosition;
out vec4 vertColor;
out vec3 vertNormal;
out vec2 texCoordVar;
void main()
{
texCoordVar = texcoord;
vertPosition = position;
vertColor = color;
vertNormal = normal;
gl_Position = modelViewProjectionMatrix * position;
}
lightShader.frag
// fragment shader
#version 150
uniform mat4 modelViewMatrix;
uniform mat4 modelViewProjectionMatrix;
uniform vec3 lightLocation;
uniform vec3 lightIntensity;
in vec2 texCoordVar;
in vec3 vertNormal;
in vec4 vertPosition;
in vec4 vertColor;
out vec4 outputColor;
void main()
{
//calculate normal in world coordinates
mat3 normalMatrix = transpose(inverse(mat3(modelViewMatrix)));
vec3 vertexNormal = normalize(normalMatrix * vertNormal);
//calculate the location of this pixel in world coordinates
vec3 fragPosition = vec3(modelViewMatrix * vertPosition);
//calculate the vector from this pixels surface to the light source
vec3 surfaceToLight = normalize(lightLocation - fragPosition);
//calculate the cosine of the angle of incidence (brightness)
float brightness = max(0.0, dot(vertexNormal, surfaceToLight));
//calculate final color of the pixel
outputColor = brightness * vec4(vertColor.rgb, 1.0);
}
Through some experimentation, I discovered that if I change the last line of the fragment shader to this:
outputColor = brightness * (vec4(vertColor.rgb, 1.0) * modelViewProjectionMatrix);
I get this:
The colors are wrong, and the lighting gets very black, but the objects are opaque as they should be. So, maybe I should be using the modelViewProjectionMatrix somehow? Not sure how though.
outputColor = vec4(brightness * vertColor.rgb, 1.0);
– Myrtlemyrvyn