Difference between revisions of "Shaders"

From JPCT
Jump to: navigation, search
(Normal mapping)
Line 1: Line 1:
 
=== Shaders ===
 
=== Shaders ===
  
jPCT supports shaders for [[compiled objects]]. However, there is no generic shader framework. You have to write the shader code in OpenGL by yourself and attach it to the compiled object by implementing the IRenderHook-interface.  
+
jPCT supports shaders for [[compiled objects]]. However, there is no generic shader framework yet. You have to write the shader code in OpenGL by yourself and attach it to the compiled object by implementing the IRenderHook-interface.  
  
  

Revision as of 20:02, 7 June 2009

Shaders

jPCT supports shaders for compiled objects. However, there is no generic shader framework yet. You have to write the shader code in OpenGL by yourself and attach it to the compiled object by implementing the IRenderHook-interface.


Most basic shader

Here's an example of a simple fragment- (in DX:pixel-) shader that paints all framents in green:

import java.nio.*;
import com.threed.jpct.*;
import org.lwjgl.*;
import org.lwjgl.opengl.*;

public class MyFirstShader implements IRenderHook {

	private String myShaderSource="void main() {gl_FragColor = vec4(0.0,1.0,0.0,1.0);}";
	private int prg=0;
	private int fragShade=0;
	private boolean init=false;
	
	public void beforeRendering(int polyID) {
		if (!init) {
			init();
		}
		ARBShaderObjects.glUseProgramObjectARB(prg);
	}

	public void afterRendering(int polyID) {
		ARBShaderObjects.glUseProgramObjectARB(0);
	}

	public void onDispose() {
		ARBShaderObjects.glDeleteObjectARB(fragShade);
		ARBShaderObjects.glDeleteObjectARB(prg);
	}

	public boolean repeatRendering() {
		return false;
	}
	
	private void init() {
		prg=ARBShaderObjects.glCreateProgramObjectARB();
		fragShade=ARBShaderObjects.glCreateShaderObjectARB(ARBFragmentShader.GL_FRAGMENT_SHADER_ARB);
		
		byte[] src=myShaderSource.getBytes();
		ByteBuffer shader = BufferUtils.createByteBuffer(src.length);
		shader.put(src);
		shader.flip();
		
		ARBShaderObjects.glShaderSourceARB(fragShade, shader);
		
		ARBShaderObjects.glCompileShaderARB(fragShade);
		ARBShaderObjects.glAttachObjectARB(prg, fragShade);
		ARBShaderObjects.glLinkProgramARB(prg);
		
		Logger.log("Shader compiled!", Logger.MESSAGE);
		
		init=true;
	}
}


Normal mapping

Here's a more advanced example, based on some tutorial from Ozone3D with some modifications to compute the tangent vector in the shader. This example consists of four different parts:

The demo application:

import com.threed.jpct.*;
import com.threed.jpct.util.Light;

public class NormalMappingTest {

	private World world;
	private FrameBuffer buffer;
	private Object3D sphere;
	private Light light;

	public static void main(String[] args) throws Exception {
		new NormalMappingTest().loop();
	}

	public NormalMappingTest() throws Exception {
		Config.maxPolysVisible=1000;
		Config.lightMul=1;
		Config.glTrilinear=true;
		Config.specTerm=10;
		Config.specPow=4;
		
		world = new World();

		TextureManager.getInstance().addTexture("stones", new Texture("stones.jpg"));
		TextureManager.getInstance().addTexture("normals", new Texture("normals.jpg"));

		sphere = Loader.loadASC("terra.asc", 15, false);
		
		TextureInfo ti=new TextureInfo(TextureManager.getInstance().getTextureID("stones"));
		ti.add(TextureManager.getInstance().getTextureID("normals"), TextureInfo.MODE_MODULATE);
		
		sphere.setTexture(ti);
		sphere.build();
		world.addObject(sphere);
		
		world.getCamera().setPosition(50, -50, -5);
		world.getCamera().lookAt(sphere.getTransformedCenter());
		
		sphere.compile();
		sphere.setSpecularLighting(true);
		sphere.strip();
		sphere.setRenderHook(new NormalMapping());
		
		light=new Light(world);
		light.enable();
		light.setIntensity(new SimpleVector(100,100,255));
		light.setPosition(new SimpleVector(-80,120,-370));
		light.setAttenuation(-1);
	}

	private void loop() throws Exception {
		buffer = new FrameBuffer(800, 600, FrameBuffer.SAMPLINGMODE_GL_AA_2X);
		buffer.disableRenderer(IRenderer.RENDERER_SOFTWARE);
		buffer.enableRenderer(IRenderer.RENDERER_OPENGL);
		
		long time=System.nanoTime()/1000000L;
		int fps=0;
		
		while (!org.lwjgl.opengl.Display.isCloseRequested()) {
			buffer.clear(java.awt.Color.BLUE);
			world.renderScene(buffer);
			world.draw(buffer);
			buffer.update();
			buffer.displayGLOnly();
			sphere.rotateX(0.00003f);
			
			light.rotate(new SimpleVector(0,0.0001f,0), sphere.getTransformedCenter());
			
			fps++;
			long now=System.nanoTime()/1000000L;
			if (now-time>=1000) {
				time=now;
				System.out.println(fps+" fps");
				fps=0;
			}
		}
		buffer.disableRenderer(IRenderer.RENDERER_OPENGL);
		buffer.dispose();
		System.exit(0);
	}
}


The shader "framework" as an implementation of IRenderHook. It is a very basic implementation. It contains no logging, no tests if the features are actually available etc...it's up to you to add this, if you want to use this code for production.

import java.nio.*;
import com.threed.jpct.*;
import org.lwjgl.*;
import org.lwjgl.opengl.*;

public class NormalMapping implements IRenderHook {

	private String fragSource=Loader.loadTextFile("glsl/normalmapping/fragmentshader.glsl");
	private String vertexSource=Loader.loadTextFile("glsl/normalmapping/vertexshader.glsl");;
	private int prg=0;
	private int fragShade=0;
	private int vertShade=0;
	private boolean init=false;
	
	private int locColor=0;
	private int locNormal=0;
	private int locRadius=0;
	
	public void beforeRendering(int polyID) {
		if (!init) {
			init();
		}
		ARBShaderObjects.glUseProgramObjectARB(prg);
		ARBShaderObjects.glUniform1iARB(locColor, 0);
		ARBShaderObjects.glUniform1iARB(locNormal, 1);
		ARBShaderObjects.glUniform1fARB(locRadius, 0.0005f);
	
	}

	public void afterRendering(int polyID) {
		ARBShaderObjects.glUseProgramObjectARB(0);
	}

	public void onDispose() {
		ARBShaderObjects.glDeleteObjectARB(fragShade);
		ARBShaderObjects.glDeleteObjectARB(vertShade);
		ARBShaderObjects.glDeleteObjectARB(prg);
	}

	public boolean repeatRendering() {
		return false;
	}
	
	private void init() {
		prg=ARBShaderObjects.glCreateProgramObjectARB();
		fragShade=ARBShaderObjects.glCreateShaderObjectARB(ARBFragmentShader.GL_FRAGMENT_SHADER_ARB);
		
		byte[] src=fragSource.getBytes();
		ByteBuffer shader = BufferUtils.createByteBuffer(src.length);
		shader.put(src);
		shader.flip();
		
		ARBShaderObjects.glShaderSourceARB(fragShade, shader);
		
		ARBShaderObjects.glCompileShaderARB(fragShade);
		ARBShaderObjects.glAttachObjectARB(prg, fragShade);
		
		vertShade=ARBShaderObjects.glCreateShaderObjectARB(ARBVertexShader.GL_VERTEX_SHADER_ARB);
		
		src=vertexSource.getBytes();
		ByteBuffer shader2 = BufferUtils.createByteBuffer(src.length);
		shader2.put(src);
		shader2.flip();
		
		ARBShaderObjects.glShaderSourceARB(vertShade, shader2);
		
		ARBShaderObjects.glCompileShaderARB(vertShade);
		ARBShaderObjects.glAttachObjectARB(prg, vertShade);
		
		ARBShaderObjects.glLinkProgramARB(prg);
		
		Logger.log("Shader compiled!", Logger.MESSAGE);
		
		locColor=getLocation("colorMap");
		locNormal=getLocation("normalMap");
		locRadius=getLocation("invRadius");
		
		init=true;
	}
	
	private int getLocation(String n) {
		byte[] nb=n.getBytes();
		ByteBuffer name = BufferUtils.createByteBuffer(nb.length+1);
		name.put(nb);
		name.put((byte)0);
		name.flip();
		return ARBShaderObjects.glGetUniformLocationARB(prg, name);
	}
}


The vertex shader:

varying vec3 lightVec; 
varying vec3 eyeVec;
varying vec2 texCoord;					 

void main(void)
{
	gl_Position = ftransform();
	texCoord = gl_MultiTexCoord0.xy;
	
	// jPCT doesn't provide the tangent vector...compute it (this isn't 100% accurate...) 
		
	vec3 c1 = cross(gl_Normal, vec3(0.0, 0.0, 1.0)); 
	vec3 c2 = cross(gl_Normal, vec3(0.0, 1.0, 0.0));
	
	vec3 vTangent=c1;
	if (length(c2)>length(vTangent)) {
		vTangent=c2;
	}
	
	vTangent = normalize(vTangent);

	vec3 n = normalize(gl_NormalMatrix * gl_Normal);
	vec3 t = normalize(gl_NormalMatrix * vTangent);
	vec3 b = cross(n, t);
	
	vec3 vVertex = vec3(gl_ModelViewMatrix * gl_Vertex);
	vec3 tmpVec = gl_LightSource[0].position.xyz - vVertex;

	lightVec.x = dot(tmpVec, t);
	lightVec.y = dot(tmpVec, b);
	lightVec.z = dot(tmpVec, n);

	tmpVec = -vVertex;
	eyeVec.x = dot(tmpVec, t);
	eyeVec.y = dot(tmpVec, b);
	eyeVec.z = dot(tmpVec, n);
}


and finally the fragment-/pixel-shader:

varying vec3 lightVec;
varying vec3 eyeVec;
varying vec2 texCoord;

uniform sampler2D colorMap;
uniform sampler2D normalMap;
uniform float invRadius;

void main (void)
{
	float distSqr = dot(lightVec, lightVec);
	float att = clamp(1.0 - invRadius * sqrt(distSqr), 0.0, 1.0);
	vec3 lVec = lightVec * inversesqrt(distSqr);

	vec3 vVec = normalize(eyeVec);
	vec4 base = texture2D(colorMap, texCoord);
	vec3 bump = normalize(texture2D(normalMap, texCoord).xyz * 2.0 - 1.0);

	vec4 vAmbient = gl_LightSource[0].ambient * gl_FrontMaterial.ambient;

	float diffuse = max(dot(lVec, bump), 0.0);
	vec4 vDiffuse = gl_LightSource[0].diffuse * gl_FrontMaterial.diffuse * 	diffuse;	

	float specular = pow(clamp(dot(reflect(-lVec, bump), vVec), 0.0, 1.0), gl_FrontMaterial.shininess);
	vec4 vSpecular = gl_LightSource[0].specular * gl_FrontMaterial.specular * specular;	
	
	gl_FragColor = (vAmbient*base + vDiffuse*base + vSpecular) * att;
}

And this is how it looks:

Bumpmap.jpg

Bumpmap2.jpg