www.jpct.net
jPCT  a 3d engine for Java => Support => Topic started by: AGP on January 22, 2014, 04:58:53 pm

Sorry about the shader question. I made a slight change to this in order to update it with the camera's position. Ray, of course, is always the center of the ocean plane. All I get is a black plane in jpct. The first method is how I update the shader (once per gameloop iteration, naturally).
public void update(SimpleVector cameraPosition) {
shader.setUniform("iGlobalTime", (timeF+=.1f));
shader.setUniform("pos", cameraPosition);
}
// License Creative Commons AttributionNonCommercialShareAlike 3.0 Unported License.
uniform vec3 iResolution;
uniform vec3 iChannelResolution[4];
uniform vec3 pos;
uniform vec3 ray;
uniform float iGlobalTime;
uniform float iChannelTime[4];
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
uniform sampler2D iChannel2;
uniform sampler2D iChannel3;
const float tau = 6.28318530717958647692;
// Gamma correction
#define GAMMA (2.2)
vec3 ToLinear( in vec3 col ) {
// simulate a monitor, converting colour values into light values
return pow( col, vec3(GAMMA) );
}
vec3 ToGamma( in vec3 col ) {
// convert back into colour values, so the correct light will come out of the monitor
return pow( col, vec3(1.0/GAMMA) );
}
vec3 localRay;
// Set up a camera looking at the scene.
// origin  camera is positioned relative to, and looking at, this point
// distance  how far camera is from origin
// rotation  about x & y axes, by lefthand screw rule, relative to camera looking along +z
// zoom  the relative length of the lens
void CamPolar( out vec3 pos, out vec3 ray, in vec3 origin, in vec2 rotation, in float distance, in float zoom ) {
// get rotation coefficients
vec2 c = vec2(cos(rotation.x),cos(rotation.y));
vec4 s;
s.xy = vec2(sin(rotation.x),sin(rotation.y)); // worth testing if this is faster as sin or sqrt(1.0cos);
s.zw = s.xy;
// ray in view space
ray.xy = gl_FragCoord.xy  iResolution.xy*.5;
ray.z = iResolution.y*zoom;
ray = normalize(ray);
localRay = ray;
// rotate ray
ray.yz = ray.yz*c.xx + ray.zy*s.zx;
ray.xz = ray.xz*c.yy + ray.zx*s.yw;
// position camera
pos = origin  distance*vec3(c.x*s.y,s.z,c.x*c.y);
}
// Noise functions, distinguished by variable types
vec2 Noise( in vec3 x ) {
vec3 p = floor(x);
vec3 f = fract(x);
f = f*f*(3.02.0*f);
// vec3 f2 = f*f; f = f*f2*(10.015.0*f+6.0*f2);
vec2 uv = (p.xy+vec2(37.0,17.0)*p.z) + f.xy;
// hardware interpolation lacks precision
// vec4 rg = texture2D( iChannel0, (uv+0.5)/256.0, 100.0 );
vec4 rg = mix( mix(
texture2D( iChannel0, (floor(uv)+0.5)/256.0, 100.0 ),
texture2D( iChannel0, (floor(uv)+vec2(1,0)+0.5)/256.0, 100.0 ),
fract(uv.x) ),
mix(
texture2D( iChannel0, (floor(uv)+vec2(0,1)+0.5)/256.0, 100.0 ),
texture2D( iChannel0, (floor(uv)+1.5)/256.0, 100.0 ),
fract(uv.x) ),
fract(uv.y) );
return mix( rg.yw, rg.xz, f.z );
}
vec4 Noise( in vec2 x ) {
vec2 p = floor(x.xy);
vec2 f = fract(x.xy);
f = f*f*(3.02.0*f);
// vec3 f2 = f*f; f = f*f2*(10.015.0*f+6.0*f2);
vec2 uv = p.xy + f.xy;
return texture2D( iChannel0, (uv+0.5)/256.0, 100.0 );
}
vec4 Noise( in ivec2 x ) {
return texture2D( iChannel0, (vec2(x)+0.5)/256.0, 100.0 );
}
vec2 Noise( in ivec3 x ) {
vec2 uv = vec2(x.xy)+vec2(37.0,17.0)*float(x.z);
return texture2D( iChannel0, (uv+0.5)/256.0, 100.0 ).xz;
}
float Waves( vec3 pos ) {
pos *= .2*vec3(1,1,1);
const int octaves = 6;
float f = 0.0;
// need to do the octaves from large to small, otherwise things don't line up
// (because I rotate by 45 degrees on each octave)
pos += iGlobalTime*vec3(0,.1,.1);
for ( int i=0; i < octaves; i++ ) {
pos = (pos.yzx + pos.zyx*vec3(1,1,1))/sqrt(2.0);
f = f*2.0+abs(Noise(pos).x.5)*2.0;
pos *= 2.0;
}
f /= exp2(float(octaves));
return (.5f)*1.0;
}
float WavesDetail( vec3 pos ) {
pos *= .2*vec3(1,1,1);
const int octaves = 8;
float f = 0.0;
// need to do the octaves from large to small, otherwise things don't line up
// (because I rotate by 45 degrees on each octave)
pos += iGlobalTime*vec3(0,.1,.1);
for ( int i=0; i < octaves; i++ ) {
pos = (pos.yzx + pos.zyx*vec3(1,1,1))/sqrt(2.0);
f = f*2.0+abs(Noise(pos).x.5)*2.0;
pos *= 2.0;
}
f /= exp2(float(octaves));
return (.5f)*1.0;
}
float WavesSmooth( vec3 pos ) {
pos *= .2*vec3(1,1,1);
const int octaves = 2;
float f = 0.0;
// need to do the octaves from large to small, otherwise things don't line up
// (because I rotate by 45 degrees on each octave)
pos += iGlobalTime*vec3(0,.1,.1);
for ( int i=0; i < octaves; i++ ) {
pos = (pos.yzx + pos.zyx*vec3(1,1,1))/sqrt(2.0);
//f = f*2.0+abs(Noise(pos).x.5)*2.0;
f = f*2.0+sqrt(pow(Noise(pos).x.5,2.0)+.01)*2.0;
pos *= 2.0;
}
f /= exp2(float(octaves));
return (.5f)*1.0;
}
float WaveCrests( vec3 ipos ) {
vec3 pos = ipos;
pos *= .2*vec3(1,1,1);
const int octaves1 = 6;
const int octaves2 = 16;
float f = 0.0;
// need to do the octaves from large to small, otherwise things don't line up
// (because I rotate by 45 degrees on each octave)
pos += iGlobalTime*vec3(0,.1,.1);
vec3 pos2 = pos;
for ( int i=0; i < octaves1; i++ ) {
pos = (pos.yzx + pos.zyx*vec3(1,1,1))/sqrt(2.0);
f = f*1.5+abs(Noise(pos).x.5)*2.0;
pos *= 2.0;
}
pos = pos2 * exp2(float(octaves1));
pos.y = .05*iGlobalTime;
for ( int i=octaves1; i < octaves2; i++ ) {
pos = (pos.yzx + pos.zyx*vec3(1,1,1))/sqrt(2.0);
f = f*1.5+pow(abs(Noise(pos).x.5)*2.0,1.0);
pos *= 2.0;
}
f /= 1500.0;
f = Noise(ivec2(gl_FragCoord.xy)).x*.01;
return pow(smoothstep(.4,.1,f),6.0);
}
vec3 Sky( vec3 ray ) {
return vec3(.4,.45,.5);
}
float OceanDistanceField( vec3 pos ) {
return pos.y  Waves(pos);
}
float OceanDistanceFieldDetail( vec3 pos ) {
return pos.y  WavesDetail(pos);
}
vec3 OceanNormal( vec3 pos ) {
vec3 norm;
vec2 d = vec2(.01*length(pos),0);
norm.x = OceanDistanceFieldDetail( pos+d.xyy )OceanDistanceFieldDetail( posd.xyy );
norm.y = OceanDistanceFieldDetail( pos+d.yxy )OceanDistanceFieldDetail( posd.yxy );
norm.z = OceanDistanceFieldDetail( pos+d.yyx )OceanDistanceFieldDetail( posd.yyx );
return normalize(norm);
}
float TraceOcean( vec3 pos, vec3 ray ) {
float h = 1.0;
float t = 0.0;
for ( int i=0; i < 100; i++ ) {
if ( h < .01  t > 100.0 )
break;
h = OceanDistanceField( pos+t*ray );
t += h;
}
if ( h > .1 )
return 0.0;
return t;
}
vec3 ShadeOcean( vec3 pos, vec3 ray ) {
vec3 norm = OceanNormal(pos);
float ndotr = dot(ray,norm);
float fresnel = pow(1.0abs(ndotr),5.0);
vec3 reflectedRay = ray2.0*norm*ndotr;
vec3 refractedRay = ray+(cos(1.33*acos(ndotr))ndotr)*norm;
refractedRay = normalize(refractedRay);
const float crackFudge = .0;
// reflection
vec3 reflection = Sky(reflectedRay);
vec3 col = vec3(0,.04,.04); // undersea colour
col = mix( col, reflection, fresnel );
// foam
col = mix( col, vec3(1), WaveCrests(pos) );
return col;
}
void main(void) {
// vec2 camRot = vec2(.5,.5)+vec2(.35,4.5)*(iMouse.yx/iResolution.yx);
// vec3 pos, ray;
// CamPolar( pos, ray, vec3(0), camRot, 3.0, 1.0 );
float to = TraceOcean( pos, ray );
vec3 result;
if ( to > 0.0 )
result = ShadeOcean( pos+ray*to, ray );
else result = Sky( ray );
// vignette effect
result *= 1.1*smoothstep( .35, 1.0, localRay.z );
gl_FragColor = vec4(ToGamma(result),1.0);
}

Most likely the values that you are giving it for cam and ray aren't the ones that they are supposed to be. I'm not sure what ray is supposed to be, but from the name of it, i would assume that it's the view vector, i.e. the (normalized) vector from the camera to the plane's center, not the center itself.

I see what you mean, but help me out, please. What would you pass it?

shader.setUniform("ray", plane.getTransformedCenter().calcSub(cameraPosition).normalize());
..or something. But that's just a guess based on the name and the expectation that there has to be some view vector in the equation.

That had been my guess, too. Alas, it's still black. Very frustrating.

Do you happen to have another suggestion?
And, by the way, could you make a setUniform method for type samplerCube (which from what I can see would be your SkyBox class)?

...not sure. The shader is pretty complex. I have no idea what cam and ray are supposed to be exactly. But if it works without setting them from the outside, the issue has to be that they are not the values they are supposed to be when setting them. I would somehow start from there. If nothing else helps, i would convert the shader method that calculates them to Java and see what the actual outcome is to get a feel for it.
About cube mapping: There's no point in adding such a uniform to a shader if the backend itself doesn't support cube mapping and i have no plans nor time to add support for it anytime soon, i'm afraid.

I found this other shader that seems to need it. I think it uses to blend the colors of the ocean with the sky.
//http://members.gamedev.net/rootevilgames/mwhite/OceanWater.htm
uniform samplerCube skyDome;
uniform sampler2D water;
uniform vec4 waterColour;
varying mat3 tanSpace;
void main(void) {
vec3 ref = ((gl_TexCoord[1] + texture2D(water, gl_TexCoord[0].xy)) * 0.5).xyz;
ref *= 2.0;
ref = vec3(1.0, 1.0, 1.0);
ref = tanSpace * ref;
vec4 sample = waterColour
+ ( (textureCube(skyDome, ref)  waterColour)
* (dot(ref,vec3(0.0,0.0,1.0)) * waterColour.w));
gl_FragColor = vec4( sample.xyz, 0.825);
}
uniform float fTime0_X;
uniform float waveHeight;
uniform vec2 windDir;
uniform float roughness;
uniform vec4 vViewPosition;
varying mat3 tanSpace;
void main(void) {
float height = sin( 1.0 * (gl_Vertex.x + (windDir.x * fTime0_X)));
height += 1.0;
height = pow( max(0.0, height), roughness);
float height2 = sin( 0.01 * (gl_Vertex.y + (windDir.y * fTime0_X)));
height2 += 1.0;
height2 = pow( max(0.0, height2), roughness);
vec4 pos = gl_Vertex;
pos.z = waveHeight * ((height + height2) / 2.0);
gl_Position = gl_ModelViewProjectionMatrix * pos.xzyw;
vec4 ref = normalize(reflect( (vViewPosition  pos.xzyw), vec4(0.0, 0.0, 1.0, 1.0)));
ref += vec4(1.0,1.0,1.0,1.0);
ref *= 0.5;
gl_TexCoord[1] = ref;
gl_TexCoord[0].xy = 4.0 * (gl_MultiTexCoord0.yx + vec2(0.0, fTime0_X * 0.01));
//
// Find Surface Normal
vec3 binormal = normalize(vec3( cos(1.0 * (gl_Vertex.x + (windDir.x * fTime0_X))),
1.0,
0.0));
vec3 tangent = normalize(
vec3( 0.0,
1.0,
0.01 * cos(0.01 * (gl_Vertex.y + (windDir.y * fTime0_X))))
);
vec3 normal = cross(binormal, tangent);
normal += vec3(1.0,1.0,1.0);
normal *= 0.5;
tanSpace = mat3( vec3(1.0, 0.0, 0.0)
, normal,
vec3(0.0, 0.0, 1.0));
}

Yes, most likely. But to inject this uniform isn't the problem. That's a simple int just like with sampler2D. However, i would have to change a lot in the engine to actually support cube mapping and as said, i have no plans to do so ATM.

I posted a question on Shader Toy. Let's hope that the author answers it.

And here is his response:
Ray needs to be different for each pixel. If you look at the code generating ray you can see it's based on gl_FragCoord.
You'd want something like ray = localRay*cameraMatrix (with localRay computed the way it is now), then pass cameraMatrix as a uniform variable.
So from what I understand we have to further multiply our current ray with the camera matrix. The part I don't get is why I would have to pass the cameraMatrix as a uniform variable and how I would do that. Also, is the multiplication ray.matMul(camera.getProjectionMatrix())?

You can pass a matrix to a uniform...that's not the problem. I'm not sure which matrix he's talking about though. It might be the projection matrix. Or maybe it's the model view matrix (in which case you already have it via gl_ModelViewMatrix in the shader). It seems to me that you then have to calculate the ray in the shader as before like in CamPolar
ray.xy = gl_FragCoord.xy  iResolution.xy*.5;
ray.z = iResolution.y*zoom;
ray = normalize(ray);
localRay = ray;
(that would be localRay then) and transform it by using that matrix (that seems to replace the rest that CamPolar does). The ray that you are passing into the uniform seems not to be needed. In addition, you seem to need "pos" which is the camera's position.
This is all wild guessing...this shader doesn't seem to be made to be used in an actual 3d scene. I would scrap it...

Every shader in ShaderToy is a scene of their own. It would be much more useful if this weren't the case. But why would you scrap it if it can come to work and it looks great?

Every shader in ShaderToy is a scene of their own. It would be much more useful if this weren't the case. But why would you scrap it if it can come to work and it looks great?
I never understood how stuff like ShaderToy works without a proper 3d scene behind it...now i know...it doesn't... :P. If that's the way it is, there has to be some guide on how to convert the results to something less detached from the real world.
Why i would scrap it? Because it's pretty complicated and i personally think that it's impossible to adopt a shader of which you don't how you to set it up correctly. Anyway, this is what i would do if i had to:
 try to figure out which matrix he means when talking about "cameraMatrix"
 provide that matrix and pass it into the shader
 pass the camera position to pos
 calculate localRay like above
 multiply localRay with that matrix to calculate ray
 hope for the best, but expect the worst...

BTW: When passing matrices and vectors to a shader, keep in mind that the coordinate systems differ. For a vector, you have to negate y and z. For a matrix, you can use Matrix.transformToGL().

What does
float n = v;
mean when v is a vec3? It doesn't compile on jpct and I don't really think it should. Yet this is supposedly a finished a functional shader.

The gpu driver compiles the shader, not the engine. This isn't supposed to work if v is a vec3.

You'd be surprised how many shaders online have these kinds of dumb errors (yet they have videos showing them working).
I would really like a proper glsl tutorial or teacher, but all I can ever find that's reasonable are hlsl and cg stuff.

Here's a direct question: how do we make your CheapWater into a glsl shader? Or to start with a smaller question: is that in the vertex shader (I would think so) or in the fragment one?
class CheapWater {
private World world;
private FrameBuffer buffer;
protected Object3D water;
private WaterController wc = null;
private Time waterTime;
public CheapWater(World world, FrameBuffer buffer) throws Exception {
Config.glForceEnvMapToSecondStage = true;
Config.glUseVBO = true;
this.world = world;
this.buffer = buffer;
waterTime = new Time();
TextureManager tm = TextureManager.getInstance();
tm.addTexture("water", new Texture("water3.jpg"));
tm.addTexture("envmap", new Texture("environment.jpg"));
water = Primitives.getPlane(40, 1000);
TextureInfo ti = new TextureInfo(tm.getTextureID("water"));
ti.add(tm.getTextureID("envmap"), TextureInfo.MODE_MODULATE);
wc = new WaterController(water, 15, 5, false);
water.setTexture(ti);
water.setEnvmapped(Object3D.ENVMAP_ENABLED);
water.rotateX((float) Math.PI / 2f);
water.rotateMesh();
water.clearRotation();
water.build();
water.compile(true);
water.setTransparency(2);
water.getMesh().setVertexController(wc, false);
water.setTextureMatrix(new Matrix());
water.translate(0, 56f, 0);//WHY IS 18 DOWN INSTEAD OF UP?
}
public void perform() throws Exception {
wc.update((float)waterTime.deltaTime());
water.getMesh().applyVertexController();
}
private static class WaterController extends GenericVertexController {
private float scale = 0;
private float damping = 0;
private SimpleVector[] preCalcNormals = null;
private SimpleVector[] preCalcNormalsNeg = null;
private float[] lastHeight = null;
private static final long serialVersionUID = 1L;
private float degreeAdd = 0;
private Object3D water = null;
private float lastUpdate = 0;
private boolean realNormals = false;
public WaterController(Object3D water, float waveScale, float damping, boolean realNormals) {
this.scale = waveScale;
this.water = water;
this.realNormals = realNormals;
this.damping = damping;
water.setTextureMatrix(new Matrix());
}
/**
* This calculates some normals...these are rather fake and in no way
* comparable to real surface normals. But they should do the trick...
*/
public boolean setup() {
SimpleVector ax = new SimpleVector(1, 0, 1).normalize();
preCalcNormals = new SimpleVector[(int) (100f * scale)];
preCalcNormalsNeg = new SimpleVector[(int) (100f * scale)];
int end = preCalcNormals.length;
for (int i = 0; i < end; i++) {
float height = 1f + (((float) i) / (end / 2f));
SimpleVector n = new SimpleVector(0, 1, 0);
SimpleVector n2 = new SimpleVector(0, 1, 0);
Matrix m = new Matrix();
Matrix m2 = new Matrix();
if (height <= 0) {
float val = (float) Math.sqrt((height + 1) * (Math.PI / 7f));
m.rotateAxis(ax, val);
m2.rotateAxis(ax, val);
} else {
float val = (float) Math.sqrt((1  height) * (Math.PI / 7f));
m.rotateAxis(ax, val);
m2.rotateAxis(ax, val);
}
n.rotate(m);
n2.rotate(m);
preCalcNormals[i] = n;
preCalcNormalsNeg[i] = n2;
}
SimpleVector[] source = this.getSourceMesh();
lastHeight = new float[source.length];
for (int i = 0; i < source.length; i++)
lastHeight[i] = 0;
return true;
}
public void update(float inc) {
degreeAdd += inc;
lastUpdate = inc;
}
public void apply() {
SimpleVector[] source = this.getSourceMesh();
SimpleVector[] destination = this.getDestinationMesh();
SimpleVector[] destNormals = this.getDestinationNormals();
int end = source.length;
int nEnd = preCalcNormals.length;
for (int i = 0; i < end; i++) {
SimpleVector sourceVector = source[i];
SimpleVector destinationVector = destination[i];
float sin = (float) Math.sin((degreeAdd + sourceVector.x + sourceVector.z) / damping);
destinationVector.set(sourceVector.x, sourceVector.y + sin * scale, sourceVector.z);
int iHeight = (int) ((sin + 1) * (nEnd / 2));
if (lastHeight[i] > sin) {
destNormals[i].set(preCalcNormalsNeg[iHeight]);
}
else destNormals[i].set(preCalcNormals[iHeight]);
lastHeight[i] = sin;
}
water.touch();
if (realNormals)
water.calcNormals();
float tr = lastUpdate / 333f;
water.getTextureMatrix().translate(tr, tr, 0);
}
}
}

That's the vertex one. You would have to do something similar to what the controller does in the shader (i.e. some sin() calculations to the object's vertices).

How do I fill a uniform vec2? Do I need something like setStaticUniform(String, awt.Point)?
Also, is there a way in which to fill a samplerCube so that it has ANY kind of usable value (even if it's just one of the images in a SKyBox)?

This, by the way, is how simple the fragment shader at hand is:
uniform samplerCube skyDome;
uniform sampler2D water;
uniform vec4 waterColour;
varying mat3 tanSpace;
void main(void) {
vec3 ref = ((gl_TexCoord[1] + texture2D(water, gl_TexCoord[0].xy)) * 0.5).xyz;
ref *= 2.0;
ref = vec3(1.0, 1.0, 1.0);
ref = tanSpace * ref;
vec4 sample = waterColour + ( (textureCube(skyDome, ref)  waterColour) * (dot(ref,vec3(0.0,0.0,1.0)) * waterColour.w));
gl_FragColor = vec4( sample.xyz, 0.825);
}

Ok, but that's just to make the water reflect the skycube...

You answered neither of my questions. : )
The most important of them (since I think that I should just be able to replace samplerCube with sampler2D) is how do I pass a vec2.

Figured it out. I was being thick: shader.setStaticUniform("windDir", new float[]{10, 0}) does it.
The shader now looks like this:
uniform sampler2D skyDome;//samplerCube
uniform sampler2D water;
uniform vec4 waterColour;
varying mat3 tanSpace;
void main(void) {
vec3 ref = ((gl_TexCoord[1] + texture2D(water, gl_TexCoord[0].xy)) * 0.5).xyz;
ref *= 2.0;
ref = vec3(1.0, 1.0, 1.0);
ref = tanSpace * ref;
vec4 sample = waterColour + ( (texture(skyDome, vec2(ref.x, ref.y))  waterColour) * (dot(ref,vec3(0.0,0.0,1.0)) * waterColour.w));//waterColour+((textureCube(skyDome, ref)...
gl_FragColor = vec4( sample.xyz, 0.825);
}
I hope that I'm right with the texture(sample2D...) thing.

Oh, sorry...i haven't noticed your actual questions... :[
Yes, a float[2] is the way to go. Replacing the cube map with a single texture might look good enough.

It's not compiling, though. I'm getting the following message:
Fragment shader failed to compile with the following errors:
ERROR: 0:1: error(#132) Syntax error: '<' parse error
ERROR: error(#273) 1 compilation errors. No code generated
Vertex shader failed to compile with the following errors:
ERROR: 0:1: error(#132) Syntax error: 'Fragment' parse error
ERROR: error(#273) 1 compilation errors. No code generated
[ Mon Jan 27 16:09:01 EST 2014 ]  ERROR: Vertex and Fragment shader(s) were not
successfully compiled before glLinkProgram() was called. Link failed.
Tangent handle not found (tangents needed: false)!
Shader compiled!
The problem with these error messages is that I never exactly know where the problem is. I noticed that the method I was using didn't really exist (even reference documentation on glsl is bizarrely wanting) and replaced it now with texture2D. The following is the fragment shader, now. Would you like to have a look at the vertex shader, just in case?
uniform sampler2D skyDome;//samplerCube
uniform sampler2D water;
uniform vec4 waterColour;
varying mat3 tanSpace;
void main(void) {
vec3 ref = ((gl_TexCoord[1] + texture2D(water, gl_TexCoord[0].xy)) * 0.5).xyz;
ref *= 2.0;
ref = vec3(1.0, 1.0, 1.0);
ref = tanSpace * ref;
vec4 sample = waterColour + ( (texture2D(skyDome, vec2(ref.x, ref.y))  waterColour) * (dot(ref,vec3(0.0,0.0,1.0)) * waterColour.w));
gl_FragColor = vec4( sample.xyz, 0.825);
}

And that thing compiles now? Or does it still have problems? The error messages are generated by the driver. Depending on the vendor of your graphics card, they are more or less useful.

I think it should, but it isn't. I guess I'll try it right now on another computer and report back here.

It works, I was being stupid. But it's not perfect yet. Even though the first line of the vertex shader is attribute vec4 tangent, jpct tells me
Tangent handle not found (tangents needed: true)!
Also, it's in the wrong place, and I suspect it's because I didn't fill a uniform vec4 vViewPosition variable. The following is the short vertexshader code. Please help me find how to fill vViewPosition (I don't suppose it's just the cameraPosition because of that pesky fourth value). Is it, perhaps, a 2x2 Matrix?
attribute vec4 tangent;
uniform float fTime0_X;
uniform float waveHeight;
uniform vec2 windDir;
uniform float roughness;
uniform vec4 vViewPosition;
varying mat3 tanSpace;
void main(void) {
float height = sin( 1.0 * (gl_Vertex.x + (windDir.x * fTime0_X)));
height += 1.0;
height = pow( max(0.0, height), roughness);
float height2 = sin( 0.01 * (gl_Vertex.y + (windDir.y * fTime0_X)));
height2 += 1.0;
height2 = pow( max(0.0, height2), roughness);
vec4 pos = gl_Vertex;
pos.z = waveHeight * ((height + height2) / 2.0);
gl_Position = gl_ModelViewProjectionMatrix * pos.xzyw;
vec4 ref = normalize(reflect( (vViewPosition  pos.xzyw), vec4(0.0, 0.0, 1.0, 1.0)));
ref += vec4(1.0,1.0,1.0,1.0);
ref *= 0.5;
gl_TexCoord[1] = ref;
gl_TexCoord[0].xy = 4.0 * (gl_MultiTexCoord0.yx + vec2(0.0, fTime0_X * 0.01));
// Find Surface Normal
vec3 binormal = normalize(vec3( cos(1.0 * (gl_Vertex.x + (windDir.x * fTime0_X))), 1.0, 0.0));
vec3 tangent = normalize(vec3( 0.0, 1.0, 0.01 * cos(0.01 * (gl_Vertex.y + (windDir.y * fTime0_X)))));
vec3 normal = cross(binormal, tangent);
normal += vec3(1.0,1.0,1.0);
normal *= 0.5;
tanSpace = mat3( vec3(1.0, 0.0, 0.0), normal, vec3(0.0, 0.0, 1.0));
}

Even though the first line of the vertex shader is attribute vec4 tangent, jpct tells me
Tangent handle not found (tangents needed: true)!
No, of course not. That's because you aren't using them in the shader. The engine detects that you actually want them (that's the "tangents needed: true" part of the message), but if you don't use them, the shader compiler will simply remove the attribute. So after compilation, the engine can't access the attribute, which is the other part of the message. The same is true for uniforms: If you don't use them in the shader's code, you usually can't access their handles.
I don't suppose it's just the cameraPosition because of that pesky fourth value).
I would estimate that it is. The forth component is the w coordinate (http://en.wikipedia.org/wiki/Homogeneous_coordinates (http://en.wikipedia.org/wiki/Homogeneous_coordinates)...if you are masochistic... ;)). For a vector, you can simply put 1 into it.

So do I convert the SimpleVector (from camera.getPosition()) into OpenGL's coordinate system (then put it into an array of floats with 1 at the end)? If so, how do I convert it? Do you also suppose that the ocean is appearing in midair because I didn't fill vViewPosition?

Negate z and y of the position and add 1 as the forth component. However, i don't see a relation to the position of the ocean. The ocean's position is calculated at the line with the "gl_Position". It doesn't seem to take vViewPosition into account but some calculated position based on the normal vertex position with some height stuff added...

That is bizarre, then, because my ocean is not where it should be. Hey, it just occurred to me: does using a shader cancel the parent's transformations?

does using a shader cancel the parent's transformations?
No, why should it?

All I know are the facts: with the shader, the ocean is floating in the sky. Without it, it's where it should be.

Then that's because the shader modifies the position (which it does). As mentioned, "gl_Position" defines the position.

I often find you resistant to pointing out things that you think are obvious (and that aren't always). What I don't understand is that once removed from its regular place, the ocean stays still (only the waves move). Also, who set the gl_ModelViewProjectionMatrix (or otherwise, how is it calculated)? Isn't that likely the problem?

Sorry...but the thing is that i always stated that i'm not giving full support for writing shaders. Simply because i'm not the best person to do this and there should be plenty of information out there that's better than what i can contribute. I also said, that i find it almost impossible to adapt a shader without knowing some basics about shaders and how they work. And i'm not the person to teach this. I try to help, but i can't be bothered to rewrite some random shader from the internet to make it work.
Anyway, gl_ModelViewProjectionMatrix is a buildin uniform of desktop OpenGL (it's absent in OpenGL ES). It will be populated automatically, so there's no room for it to be wrong (as long as you've compiled the Object3D). The same applies to gl_Vertex. So if you do a
gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
You'll get the same position in space that you'll get if you weren't using any shader at all.

All I can say is that I've been looking, man. I've shown you some of the crap that's online. Nearly no one explains their work, and it's almost as if the community purposely wants to confuse us (if you can show your shader working on a video, it works, and it shouldn't have float = vec3 errors). Besides, this isn't a random shader. It's like the 20th one I experimented with and the first one to actually work, despite this weird displacement. It's also short and fairly easy to understand (once I can see it working perfectly).

In my tests, I can rotate it to my heart's content, but I can't translate it. This is obviously a product of the way in which jpct stores the child/parent's translation: ModelViewProjectionMatrix doesn't see that the child was translated at all. Would you be willing to make me that method now (detachFromParentButKeepTransformations() or so)?

The matrix is injected into the shader by OpenGL. It's the exact same matrix that OpenGL uses when no shader has been assigned. Make sure that you've compiled the object and try to transform the object without any shader assigned. If that works, replace the gl_Position line of the shader with the one that i've posted above and try again. If that doesn't work anymore...then you have created a miracle.

Translating without setting render hook works.
I hadn't realized that you changed it. Your line works, thank you very much. So how exactly aren't you the best person to help with this? : )

OK, but my line disables a part of the actual shader. I'll look at the shader again later to see why this is the case. Has to be something really simple...

I see now that you effectively skipped the waveHeight line.

Yes. The shader completely replaces the z value of the vertex. I would try something like this instead:
vec4 pos = gl_Vertex;
pos.z += waveHeight * ((height + height2) / 2.0);
gl_Position = gl_ModelViewProjectionMatrix * pos;

It works, thank you very much. The result is a little weird, though. You know I actually had tried the += intead of the the =. The difference is that I hadn't replaced the * pos.xzyw with * pos. Is the fact that this worked and not *posxzyw the difference in jpct's and OpenGL's orientation?

Then i've solved this by accident... ;) I actually haven't noticed that it wasn't xyzw but xzyw...i just removed it, because i though it was xyzw and saw no point in that. However, i still see no point in using xzyw...it effectively rotates the object 90° around x without using the rotation matrix for this. This shouldn't be done IMHO because it's nothing but confusing. Looks more like a hack to me. jPCT's coordinate system doesn't matter in a shader. In a shader, only OpenGL's system exist, so no...this has nothing to do with it.

In yet another shader question (honestly, you're the best guy I know for these): how do I rotate the effects of a shader by a camera? I have a simple lake water shader that I've part copied, part written myself, which makes for a very convincing lake if you don't turn the camera. How do I rotate everything along with the camera? Below is the fragment shader (which is the whole thing, really):
uniform float iGlobalTime;
uniform vec3 iResolution;
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
uniform sampler2D iChannel2;
uniform sampler2D iChannel3;
#ifdef GL_ES
precision highp float;
#endif
const float PI = 3.1415926535897932;
// play with these parameters to customize the effect
// ===================================================
//speed
const float speed = 0.1;
const float speed_x = 0.1;//.15
const float speed_y = 0.1;//.15
// refraction
const float emboss = 0.50;//.5
const float intensity = .5;//2.4
const int steps = 8;
const float frequency = 6.0;
const int angle = 7; // better when a prime
// reflection
const float delta = 60.;//60
const float intence = 700.;//700
const float reflectionCutOff = 0.012;
const float reflectionIntence = 200000.;
// ===================================================
float time = iGlobalTime*1.3;
float col(vec2 coord) {
float delta_theta = 2.0 * PI / float(angle);
float col = 0.0;
float theta = 0.0;
for (int i = 0; i < steps; i++) {
vec2 adjc = coord;
theta = delta_theta*float(i);
adjc.x += cos(theta)*time*speed + time * speed_x;
adjc.y = sin(theta)*time*speed  time * speed_y;
col = col + cos( (adjc.x*cos(theta)  adjc.y*sin(theta))*frequency)*intensity;
}
return cos(col);
}
void main(void) {
vec2 p = (gl_FragCoord.xy) / iResolution.xy, c1 = p, c2 = p;
float cc1 = col(c1);
c2.x += iResolution.x/delta;
float dx = emboss*(cc1col(c2))/delta;
c2.x = p.x;
c2.y += iResolution.y/delta;
float dy = emboss*(cc1col(c2))/delta;
c1.x += dx*2.;
c1.y = (c1.y+dy*2.);
float alpha = 1.+dot(dx,dy)*intence;
float ddx = dx  reflectionCutOff;
float ddy = dy  reflectionCutOff;
if (ddx > 0. && ddy > 0.)
alpha = pow(alpha, ddx*ddy*reflectionIntence);
vec4 col = texture2D(iChannel0,c1)*(alpha);
vec4 col2 = texture2D(iChannel1,c1)*(alpha);
vec4 col3 = mix(col, col2, .4);
gl_FragColor = col3;
}

Rotations happen in the vertex shader, not in the fragment shader. In desktop OpenGL, all you have to do is something like
gl_Position = ftransform();

What does "Negate z and y of the position and add 1 as the fourth component" mean? Do I switch z and y or do I just multiply them by 1 and make a vec4 with vc4(x, y, z, 1)? I'm trying out yet another shader...

...or do I just multiply them by 1 and make a vec4 with vc4(x, y, z, 1)?
Yes, that!

Cool.
Is there a way for me to pass an Object3D to the shader? I would like for my ocean (which is now looking pretty good, in my opinion) to move a boat...

Is there a way for me to pass an Object3D to the shader? I would like for my ocean (which is now looking pretty good, in my opinion) to move a boat...
No. How should that be possible? A shader is a program that works on the geometry of an object. If you want a boat on the ocean, that's simply another object.

Shaders render and move other objects: look for the buoy shader in ShaderToy for an example (in that case, an object that was created inside the very shader).
Here's a question: how expensive is it to get all the ocean vertices under the boat every frame (outside of the shader)? I'm thinking about just rotating the boat by the largest slope within the area of the boat (and doing this, obviously, for every frame)...

You can write almost everything in a shader, but that's not the point. Shaders work on vertices and fragments that you give to them one after the other. You can manipulate them in a way that they form new geometry, but you can't create additional geometry out of thin air. That's just not how a shader works. One vertex goes in and one goes out...there's no way around this.
For the second question: You can't do this. There's no reasonable way to access the data of the shader. That's why game play relevant physics are still done on the CPU. You somehow have to approximate this in code.

You can write almost everything in a shader, but that's not the point.
How's that not the point?
For the second question: You can't do this. There's no reasonable way to access the data of the shader. That's why game play relevant physics are still done on the CPU. You somehow have to approximate this in code.
That's terrible. How does a game like Assassin's Creed 4 do it, then? Is its ocean not a shader?

How's that not the point?
It's not, because it doesn't really help. People wrote Frogger in shader code, but even then...you can't break the basic rule how a shader fits into the pipeline and that is vertex in>vertex out and fragment in>fragment out. It's not vertex in> 10 vertices out. That's simply not possible.
That's terrible. How does a game like Assassin's Creed 4 do it, then? Is its ocean not a shader?
By spending millions of dollars and dozens of people on the water simulation alone? Fact is: You can't access the modified vertices from the shader. However, if you know how your shader does things, you actually know where a wave is at a given time and you can do your calculations in code accordingly. Or you apply the same vertex manipulation that you are doing to the ocean to the boat...if that's feasible.

And those "millions of dollars" buy a solution, right?
However, if you know how your shader does things, you actually know where a wave is at a given time and you can do your calculations in code accordingly. Or you apply the same vertex manipulation that you are doing to the ocean to the boat...if that's feasible.
Then wouldn't it be better to just do the water entirely out of the shader? Why do the same calculations twice?

And those "millions of dollars" buy a solution, right?
They buy time and people...i guess that's a good thing to come to a solution. Personally, i don't have one...just ideas what one could do. I'm not sure how well it will work or how much work it will be.
Then wouldn't it be better to just do the water entirely out of the shader? Why do the same calculations twice?
Because it's much faster that way. I guess one would have a physics simulation of the ocean and the ships that controls the actual entities and that runs on the cpu and some shaders that add to the visuals. The tricky thing is to sync them.

Do we have the ability to read from the depth buffer?

No, not really. You can grab the depth buffer from the GPU, but as all reads from the graphics card's context, it's much too slow to do it a real time application.

Is there no way to improve that that doesn't change it significantly (to get a raymarched water shader to work with Objec3Ds)?

No. GPUs are optimized for accessing their local memory. It's already much slower to transfer data to them and it's really slow to read from them.

I know how you love these shader questions, and I'm sorry about that. The following is a really good wholescene shader that renders, entirely in the fragment shader, a phenomenal ocean (and its containing scene, including the sky). This may seem like a stupid question, but I would like for it to consider the whole scene but only render the ocean (that is to say, for the godrays and clouds to affect the ocean as they currently do, but to only paint the ocean on a jpct plane). Is that too hard a question or do you see a simple solution (I've looked and, with my very limited understanding of shaders, haven't found one).
// Clouds: slice based volumetric heightclouds with godrays, density, sunradiance/shadow
// and
// Water: simple reflecting sky/sun and cloud shaded heightmodulated waves
//
// Created by Frank Hugenroth 03/2013
//
// License Creative Commons AttributionNonCommercialShareAlike 3.0 Unported License.
//
// noise and raymarching based on concepts and code from shaders by inigo quilez
//
// some variables to change :)
uniform vec3 iResolution; //VIEWPORT RESOLUTION (IN PIXELS)
uniform float iGlobalTime; //SHADER PLAYBACK TIME (IN SECONDS)
//uniform float iChannelTime[4]; //CHANNEL PLAYBACK TIME (IN SECONDS)
//uniform vec3 iChannelResolution[4]; //CHANNEL RESOLUTION (IN PIXELS)
uniform sampler2D iChannel0;
uniform sampler2D iChannel1;
uniform sampler2D iChannel2;
uniform sampler2D iChannel3;
//uniform vec4 iDate;
//#define RENDER_GODRAYS 1 // set this to 1 to enable godrays
#define RENDER_GODRAYS 0 // disable godrays
#define RENDER_CLOUDS 1//1
#define RENDER_WATER 1
float waterlevel = 70.0; // height of the water
float wavegain = 1.0; // change to adjust the general water wave level
float large_waveheight = 1.0; // change to adjust the "heavy" waves (set to 0.0 to have a very still ocean :)
float small_waveheight = 1.0; // change to adjust the small waves
vec3 fogcolor = vec3( 0.5, 0.7, 1.1 );
vec3 skybottom = vec3( 0.6, 0.8, 1.2 );
vec3 skytop = vec3(0.05, 0.2, 0.5);
vec3 reflskycolor= vec3(0.025, 0.10, 0.20);
vec3 watercolor = vec3(0.2, 0.25, 0.3);
vec3 light = normalize( vec3( 0.1, 0.25, 0.9 ) );
// random/hash function
float hash( float n ) {
return fract(cos(n)*41415.92653);
}
// 2d noise function
float noise( in vec2 x ) {
vec2 p = floor(x);
vec2 f = smoothstep(0.0, 1.0, fract(x));
float n = p.x + p.y*57.0;
return mix(mix( hash(n+ 0.0), hash(n+ 1.0),f.x),
mix( hash(n+ 57.0), hash(n+ 58.0),f.x),f.y);
}
// 3d noise function
float noise( in vec3 x ) {
vec3 p = floor(x);
vec3 f = smoothstep(0.0, 1.0, fract(x));
float n = p.x + p.y*57.0 + 113.0*p.z;
return mix(mix(mix( hash(n+ 0.0), hash(n+ 1.0),f.x),
mix( hash(n+ 57.0), hash(n+ 58.0),f.x),f.y),
mix(mix( hash(n+113.0), hash(n+114.0),f.x),
mix( hash(n+170.0), hash(n+171.0),f.x),f.y),f.z);
}
mat3 m = mat3( 0.00, 1.60, 1.20, 1.60, 0.72, 0.96, 1.20, 0.96, 1.28 );
// Fractional Brownian motion
float fbm( vec3 p ) {
float f = 0.5000*noise( p ); p = m*p*1.1;
f += 0.2500*noise( p ); p = m*p*1.2;
f += 0.1666*noise( p ); p = m*p;
f += 0.0834*noise( p );
return f;
}
mat2 m2 = mat2(1.6,1.2,1.2,1.6);
// Fractional Brownian motion
float fbm( vec2 p ) {
float f = 0.5000*noise( p ); p = m2*p;
f += 0.2500*noise( p ); p = m2*p;
f += 0.1666*noise( p ); p = m2*p;
f += 0.0834*noise( p );
return f;
}
// this calculates the water as a height of a given position
float water( vec2 p ) {
float height = waterlevel;
vec2 shift1 = 0.001*vec2( iGlobalTime*160.0*2.0, iGlobalTime*120.0*2.0 );
vec2 shift2 = 0.001*vec2( iGlobalTime*190.0*2.0, iGlobalTime*130.0*2.0 );
// coarse crossing 'ocean' waves...
float wave = 0.0;
wave += sin(p.x*0.021 + shift2.x)*4.5;
wave += sin(p.x*0.0172+p.y*0.010 + shift2.x*1.121)*4.0;
wave = sin(p.x*0.00104+p.y*0.005 + shift2.x*0.121)*4.0;
// ...added by some smaller faster waves...
wave += sin(p.x*0.02221+p.y*0.01233+shift2.x*3.437)*5.0;
wave += sin(p.x*0.03112+p.y*0.01122+shift2.x*4.269)*2.5 ;
wave *= large_waveheight;
// ...added by some distored random waves (which makes the water looks like water :)
wave += fbm(p*0.01+shift1)*small_waveheight*8.0;
wave += fbm(p*0.022+shift2)*small_waveheight*6.0;
height += wave;
return height;
}
// cloud intersection raycasting
float trace_fog(in vec3 rStart, in vec3 rDirection ) {
#if RENDER_CLOUDS
// makes the clouds moving...
vec2 shift = vec2( iGlobalTime*80.0, iGlobalTime*60.0 );
float sum = 0.0;
// use only 12 cloudlayers ;)
// this improves performance but results in "godrays shining through clouds" effect (sometimes)...
for (int q=1000; q<1012; q++) {
float c = (float(q1000)*50.0+350.0rStart.y) / rDirection.y;// cloud distance
vec3 cpos = rStart + c*rDirection + vec3(831.0, 321.0+float(q1000)*.75shift.x*0.2, 1330.0+shift.y*3.0); // cloud position
float alpha = smoothstep(0.5, 1.0, fbm( cpos*0.0015 )); // cloud density
if (alpha > 0.8)
break;
sum += (1.0sum)*alpha; // alpha saturation
}
return clamp( 1.0sum, 0.0, 1.0 );
#else
return 1.0;
#endif
}
// fog and water intersection function.
// 1st: collects fog intensity while traveling
// 2nd: check if hits the water surface and returns the distance
bool trace(in vec3 rStart, in vec3 rDirection, in float sundot, out float fog, out float dist) {
float h = 20.0;
float t = 0.0;
float st = 1.0;
float alpha = 0.1;
float asum = 0.0;
vec3 p = rStart;
for (int j = 1000; j < 1120; j++) {
// some speedup if all is far away...
if (t > 500.0)
st = 2.0;
else if( t>800.0 )
st = 5.0;
else if( t>1000.0 )
st = 12.0;
p = rStart + t*rDirection; // calc current ray position
#if RENDER_GODRAYS
if (p.y > 50.0 && sundot > 0.001 && t>300.0 && t < 2400.0) {
alpha = sundot * clamp((p.ywaterlevel)/waterlevel, 0.0, 1.0) * 20.0 * st * 0.0012*smoothstep(0.90, 1.0, trace_fog(p,light));
asum += (1.0asum)*alpha;
if (asum > 0.9)
break;
}
#endif
h = p.y  water(p.xz);
if ( h < 0.1 ) {// hit the water?
dist = t;
fog = asum;
return true;
}
if (p.y > 450.0) // lost in space? quit...
break;
// speed up ray if possible...
if (rDirection.y > 0.0) // look up (sky!) > make large steps
t += 30.0 * st;
else if (p.y < waterlevel+20.0)
t += max(1.0,2.0*h)*st;
else
t += max(1.0,1.0*h)*st;
}
dist = t;
fog = asum;
if (h<10.0)
return true;
return false;
}
vec3 camera( float time ) {
return vec3( 500.0 * sin(1.5+1.57*time), 0.0, 1200.0*time );
}
void main(void) {
vec2 xy = 1.0 + 2.0*gl_FragCoord.xy / iResolution.xy;
vec2 s = xy*vec2(1.75,1.0);
// get camera position and view direction
float time = (iGlobalTime+13.5)*.05;
vec3 campos = camera( time );
vec3 camtar = camera( time + 0.4 );
campos.y = max(waterlevel+30.0, waterlevel+90.0 + 60.0*sin(time*2.0));
camtar.y = campos.y*0.9;
float roll = 0.14*sin(time*1.2);
vec3 cw = normalize(camtarcampos);
vec3 cp = vec3(sin(roll), cos(roll),0.0);
vec3 cu = normalize(cross(cw,cp));
vec3 cv = normalize(cross(cu,cw));
vec3 rd = normalize( s.x*cu + s.y*cv + 1.6*cw );
float sundot = clamp(dot(rd,light),0.0,1.0);
vec3 col;
float fog=0.0, dist=0.0;
if (!trace(campos,rd,sundot, fog, dist)) {
// render sky
float t = pow(1.00.7*rd.y, 15.0);
col = 0.8*(skybottom*t + skytop*(1.0t));
// sun
col += 0.47*vec3(1.6,1.4,1.0)*pow( sundot, 350.0 );
// sun haze
col += 0.4*vec3(0.8,0.9,1.0)*pow( sundot, 2.0 );
#if RENDER_CLOUDS
// CLOUDS
vec2 shift = vec2( iGlobalTime*80.0, iGlobalTime*60.0 );
vec4 sum = vec4(0,0,0,0);
for (int q=1000; q<1120; q++) { // 120 layers
float c = (float(q1000)*10.0+350.0campos.y) / rd.y; // cloud height
vec3 cpos = campos + c*rd + vec3(831.0, 321.0+float(q1000)*.15shift.x*0.2, 1330.0+shift.y*3.0); // cloud position
float alpha = smoothstep(0.5, 1.0, fbm( cpos*0.0015 ))*.9; // fractal cloud density
vec3 localcolor = mix(vec3( 1.1, 1.05, 1.0 ), 0.7*vec3( 0.4,0.4,0.3 ), alpha); // density color white>gray
alpha = (1.0sum.w)*alpha; // alpha/density saturation (the more a cloud layer's density, the more the higher layers will be hidden)
sum += vec4(localcolor*alpha, alpha); // sum up weightened color
if (sum.w>0.98)
break;
}
float alpha = smoothstep(0.7, 1.0, sum.w);
sum.rgb /= sum.w+0.0001;
// This is an important stuff to darken densecloud parts when in front (or near)
// of the sun (simulates cloudself shadow)
sum.rgb = 0.6*vec3(0.8, 0.75, 0.7)*pow(sundot,13.0)*alpha;
// This brightens up the lowdensity parts (edges) of the clouds (simulates light scattering in fog)
sum.rgb += 0.2*vec3(1.3, 1.2, 1.0)* pow(sundot,5.0)*(1.0alpha);
col = mix( col, sum.rgb , sum.w*(1.0t) );
#endif
// add godrays
col += vec3(0.5, 0.4, 0.3)*fog;
}
else {
#if RENDER_WATER
// render water
vec3 wpos = campos + dist*rd; // calculate position where ray meets water
// calculate watermirror
vec2 xdiff = vec2(1.0, 0.0)*wavegain;
vec2 ydiff = vec2(0.0, 1.0)*wavegain;
// get the reflected ray direction
rd = reflect(rd, vec3(water(wpos.xzxdiff)  water(wpos.xz+xdiff), 1.0, water(wpos.xzydiff)  water(wpos.xz+ydiff)));
float sh = smoothstep(0.2, 1.0, trace_fog(wpos+20.0*rd,rd))*.7+.3;
// water reflects more the lower the reflecting angle is...
float refl = 1.0clamp(dot(rd,vec3(0.0, 1.0, 0.0)),0.0,1.0);
float wsky = refl*sh; // reflecting (skycolor) amount
float wwater = (1.0refl)*sh; // watercolor amount
float sundot = clamp(dot(rd,light),0.0,1.0);
// watercolor
col = wsky*reflskycolor; // reflecting skycolor
col += wwater*watercolor;
// Sun
float wsunrefl = wsky*(0.25*pow( sundot, 10.0 )+0.25*pow( sundot, 1.5)+0.25*pow( sundot, 200.0));
col += vec3(1.5,1.3,1.0)*wsunrefl; // sun reflection
#endif
// global depthfog
float fo = 1.0exp(pow(0.0003*dist, 1.5));
vec3 fco = fogcolor + 0.6*vec3(0.6,0.5,0.4)*pow( sundot, 4.0 );
col = mix( col, fco, fo );
// add godrays
col += vec3(0.5, 0.4, 0.3)*fog;
}
gl_FragColor=vec4(col,1.0);
}

I don't see how...this kind of shader actually abuses the shader concept to use it as a drawing canvas IMHO. If you apply it to a plane, you would end up with an image of an ocean projected onto a plane. Just like a perspective painting that has fallen over...

I did apply it to a plane. The result is very strange: depending on where I am over the plane I can be looking either at the sky or at the ocean. But the ocean sure looks great.

As said, i don't see a way to use this on a plane unless you really understand what it does and can adjust it so that it works on real 3d geometry.

Here's a question: how expensive is it to get all the ocean vertices under the boat every frame (outside of the shader)? I'm thinking about just rotating the boat by the largest slope within the area of the boat (and doing this, obviously, for every frame)...
...
I guess one would have a physics simulation of the ocean and the ships that controls the actual entities and that runs on the cpu and some shaders that add to the visuals. The tricky thing is to sync them.
There's a technique that involves swapping between two buffers called transform feedback. I'm playing with it right now. If I should have any success, I will post the solution here.