Mevia
Mevia

Reputation: 1564

Port shader program with buffers from shadertoy into three.js

I am lately experimenting and learning webgl and shaders, as i am trying to build animation for the website background. I was able to port simple examples from shadertoy into three.js to play with it. Now i am trying to better understand more advanced examples and i am struggling with this particlar example:

https://www.shadertoy.com/view/4sfBWj

I am aware that to port shader program to three.js you need to:

  1. create three.js basic setup with new THREE.PlaneGeometry()
  2. create uniforms for iTime and iResolution
  3. create vertex and fragment shaders script tags
  4. populate fragment shader with shadertoy contents (image section)
  5. populate vertex shader with generic script
  6. change names to gl_FragColor and gl_FragCoord
  7. change name of the function to void main(void)

if there is some texture used in one or more channels then

  1. load texture with new THREE.TextureLoader() and create uniform for iChannel0

Basic examples will be good to go with above. However the one i linked has:

and both of these also include shader programs and main functions that runs them, how do i deal with it to be able to port it to three.js?

my current progress:

var container;
var camera, scene0, scene1, scene2, renderer;
var uniforms0, uniforms1, uniforms2;
var startTime;
var renderTarget0, renderTarget1;

var clock = new THREE.Clock();

init();
animate();

function init() {
	container = document.getElementById( 'container' );
	startTime = Date.now();
	camera = new THREE.Camera();
	camera.position.z = 1;
	
	scene0 = new THREE.Scene();
	scene1 = new THREE.Scene();
	scene2 = new THREE.Scene();
	
	renderTarget0 = new THREE.WebGLRenderTarget(window.innerWidth, window.innerHeight);
	renderTarget1 = new THREE.WebGLRenderTarget(window.innerWidth, window.innerHeight);
	
	/* scene0 */
	var geometry0 = new THREE.PlaneGeometry(700, 394, 1, 1);
	uniforms0 = {
		iTime: { type: "f", value: 1.0 },
		iResolution: { type: "v1", value: new THREE.Vector2(), }
	};

	var material0 = new THREE.ShaderMaterial( {
		uniforms: uniforms0,
		vertexShader: document.getElementById( 'vs0' ).textContent,
		fragmentShader: document.getElementById( 'fs0' ).textContent
	});
	/* scene0 */

	var mesh0 = new THREE.Mesh( geometry0, material0 );
	
	/* scene1 */
	var geometry1 = new THREE.PlaneGeometry(700, 394, 1, 1);
	uniforms1 = {
		iTime: { type: "f", value: 1.0 },
		iResolution: { type: "v1", value: new THREE.Vector2(), }
	};

	var material1 = new THREE.ShaderMaterial( {
		uniforms: uniforms1,
		vertexShader: document.getElementById( 'vs1' ).textContent,
		fragmentShader: document.getElementById( 'fs1' ).textContent,
		iChannel0: {type: 't', value: renderTarget0 }
	});

	var mesh1 = new THREE.Mesh( geometry1, material1 );
	/* scene1 */
	
	/* scene2 */
	var geometry2 = new THREE.PlaneGeometry(700, 394, 1, 1);
	uniforms2 = {
		iTime: { type: "f", value: 1.0 },
		iResolution: { type: "v1", value: new THREE.Vector2(), }
	};

	var material2 = new THREE.ShaderMaterial( {
		uniforms: uniforms1,
		vertexShader: document.getElementById( 'vs2' ).textContent,
		fragmentShader: document.getElementById( 'fs2' ).textContent,
		iChannel0: {type: 't', value: renderTarget0 },
		iChannel1: {type: 't', value: renderTarget1 }
	});

	var mesh2 = new THREE.Mesh( geometry2, material2 );
	/* scene2 */
	
	scene0.add( mesh0 );
	scene1.add( mesh1 );
	scene2.add( mesh2 );
	
	renderer = new THREE.WebGLRenderer();
	container.appendChild( renderer.domElement );
	onWindowResize();
	window.addEventListener( 'resize', onWindowResize, false );
}

function onWindowResize( event ) {
	uniforms0.iResolution.value.x = window.innerWidth;
	uniforms0.iResolution.value.y = window.innerHeight;
	uniforms1.iResolution.value.x = window.innerWidth;
	uniforms1.iResolution.value.y = window.innerHeight;
	uniforms2.iResolution.value.x = window.innerWidth;
	uniforms2.iResolution.value.y = window.innerHeight;
	renderer.setSize( window.innerWidth, window.innerHeight );
}
function animate() {
	requestAnimationFrame( animate );
	render();
}

function render() {
	
	//renderer.render(scene0, camera, renderTarget0);
	//renderer.render(scene1, camera, renderTarget1);
	
	uniforms1.iChannel0.value = rendertarget0.texture;
	uniforms2.iChannel0.value = rendertarget0.texture;
	uniforms2.iChannel1.value = rendertarget1.texture;
	
	uniforms0.iTime.value += clock.getDelta();
	uniforms1.iTime.value += clock.getDelta();
	uniforms2.iTime.value += clock.getDelta();
	
	//renderer.render( scene2, camera );
	
}
body {
   margin:0;
   padding:0;
   overflow:hidden;
}
<script src="//threejs.org/build/three.min.js"></script>
<div id="container"></div>

<!--  BREAK --->

<script id="vs0" type="x-shader/x-vertex">
  void main() {
    vec4 mvPosition = modelViewMatrix * vec4(position, 1.0 );
    gl_Position = projectionMatrix * mvPosition;
  }
</script>
<script id="fs0" type="x-shader/x-fragment">

    uniform vec2 iResolution;
    uniform float iTime;

	const mat2 m = mat2( 0.8,  0.6, -0.6,  0.8 );
	const mat3 m3 = mat3( 0.8,  0.6, 0.0, -0.6,  0.80, 0.0, 0.0, 0.0, 1.0) *
					mat3( 1.0,  0.0, 0.0, 0.0, -0.60,  0.80, 0.0, 0.8, 0.6) *
					mat3( 0.8, 0.6, 0.0, -0.6,  0.80, 0.0, 0.0, 0.0, 1.0) *
					mat3( 1.0,  0.0, 0.0, 0.0, -0.60,  0.80, 0.0, 0.8, 0.6);

	float time;

	float n1f0(float p) {
		return fract(sin(p * 1.7227636) * 8.03e2);
	}

	float n1f1(float p) {
		return fract(sin(p * 1.42736 + 1.12) * 5.1e2);
	}

	float n1f2(float p) {
		return fract(sin(p * 1.22712 + 12.161) * 5.2e2);
	}


	float n3f(vec3 p) {
		return fract(n1f0(p.x) + n1f1(p.y) + n1f2(p.z) + n1f0(p.x * 1.613) + n1f1(p.y * 3.112) + n1f2(p.z * 4.112));
	}

	float n3(vec3 p) {
		vec3 b = floor(p);
		vec3 e = b + vec3(1.0);
		vec3 f = smoothstep(vec3(0.0), vec3(1.0), fract(p));
		float c000 = n3f(b);
		float c001 = n3f(vec3(b.x, b.y, e.z));
		float c010 = n3f(vec3(b.x, e.y, b.z));
		float c011 = n3f(vec3(b.x, e.y, e.z));
		float c100 = n3f(vec3(e.x, b.y, b.z));
		float c101 = n3f(vec3(e.x, b.y, e.z));
		float c110 = n3f(vec3(e.x, e.y, b.z));
		float c111 = n3f(e);
		vec4 z = mix(vec4(c000, c100, c010, c110), vec4(c001, c101, c011, c111),  f.z);
		vec2 yz = mix(z.xy, z.zw, f.y);
		return mix(yz.x, yz.y, f.x);

	}


	float fbm4( vec3 p )
	{
		float f = 0.0;
		p = m3 * p;
		f +=     0.5000*n3( p ); p = m3*p*2.02;
		f +=     0.2500*n3( p ); p = m3*p*2.03;
		f +=     0.1250*n3( p ); p = m3*p*2.01;
		f +=     0.0625*n3( p );
		return f/0.9375;
	}

	float fbm4( vec2 p )
	{
		return fbm4(vec3(p, time));
	}

	float fbm6( vec3 p )
	{
		float f = 0.0;
		p = m3 * p;
		f +=     0.500000*n3( p ); p = m3*p*2.02;
		f +=     0.250000*n3( p ); p = m3*p*2.03;
		f +=     0.125000*n3( p ); p = m3*p*2.01;
		f +=     0.062500*n3( p ); p = m3*p*2.04;
		f +=     0.031250*n3( p ); p = m3*p*2.01;
		f +=     0.015625*n3( p );
		return f/0.984375;
	}


	float fbm6( vec2 p )
	{
		return fbm6(vec3(p, time));
	}

	float grid(vec2 p) {
		p = sin(p * 3.1415);
		return smoothstep(-0.01, 0.01, p.x * p.y);
	}

    void main(void) {

		time = iTime * 0.7;

		vec2 q = gl_FragCoord.xy / iResolution.xy;
		vec2 p = -1.0 + 2.0 * q;
		p.x *= iResolution.x/iResolution.y;
		p.y *= 0.3;
		p.y -= time * 1.5;
		float tc = time * 1.2;
		float tw1 = time * 2.5;
		float tw2 = time * 0.6;

		vec3 vw1 = vec3(p, tw1);
		vw1.y *= 2.8;
		vec2 ofs1 = vec2(fbm4(vw1), fbm4(vw1 + vec3(10.0, 20.0, 50.0)));
		ofs1.y *= 0.3;
		ofs1.x *= 1.3;

		vec3 vw2 = vec3(p, tw2);
		vw2.y *= 0.8;
		vec2 ofs2 = vec2(fbm4(vw2), fbm4(vw2 + vec3(10.0, 20.0, 50.0)));
		ofs2.y *= 0.3;
		ofs2.x *= 1.3;

		vec2 vs = (p + ofs1 * 0.5 + ofs2 * 0.9) * 4.0;
		vec3 vc = vec3(vs, tc);
		float l;
		l = fbm6(vc);
		l = smoothstep(0.0, 1.0, l);
		l = max(0.0, (l - pow(q.y * 0.8, 0.6)) * 1.8);
		float r = pow(l , 1.5);
		float g = pow(l , 3.0);
		float b = pow(l , 6.0);

		//r = grid(vs);
		gl_FragColor = vec4( r, g, b, 1.0 );

    }

</script>

<!--  BREAK --->

<script id="vs1" type="x-shader/x-vertex">
  void main() {
    vec4 mvPosition = modelViewMatrix * vec4(position, 1.0 );
    gl_Position = projectionMatrix * mvPosition;
  }
</script>
<script id="fs1" type="x-shader/x-fragment">

    uniform vec2 iResolution;
    uniform float iTime;
	uniform sampler2D iChannel0;

	#ifdef GL_ES
	precision mediump float;
	#endif

	#define SIGMA 5.0

	float normpdf(in float x, in float sigma)
	{
		return 0.39894*exp(-0.5*x*x/(sigma*sigma))/sigma;
	}

    void main(void) {

		vec3 c = texture2D(iChannel0, gl_FragCoord.xy / iResolution.xy).rgb;

		//declare stuff
		const int mSize = int(SIGMA * 11.0/7.0);
		const int kSize = (mSize-1)/2;
		float kernel[mSize];
		vec3 finalColor = vec3(0.0);

		//create the 1-D kernel
		float sigma = SIGMA;
		float Z = 0.0;
		for (int j = 0; j <= kSize; ++j)
		{
			kernel[kSize+j] = kernel[kSize-j] = normpdf(float(j), sigma);
		}

		//get the normalization factor (as the gaussian has been clamped)
		for (int j = 0; j < mSize; ++j)
		{
			Z += kernel[j];
		}

		//read out the texels
		for (int i=-kSize; i <= kSize; ++i)
		{
			for (int j=-kSize; j <= kSize; ++j)
			{
				finalColor += kernel[kSize+j]*kernel[kSize+i]*texture2D(iChannel0, (gl_FragCoord.xy+vec2(float(i),float(j))) / iResolution.xy).rgb;

			}
		}

		finalColor /= Z*Z;

		//finalColor = c + finalColor * 0.3;


		gl_FragColor = vec4(finalColor, 1.0);

    }

</script>

<!--  BREAK --->

<script id="vs2" type="x-shader/x-vertex">
  void main() {
    vec4 mvPosition = modelViewMatrix * vec4(position, 1.0 );
    gl_Position = projectionMatrix * mvPosition;
  }
</script>
<script id="fs2" type="x-shader/x-fragment">

    uniform vec2 iResolution;
    uniform float iTime;
	uniform sampler2D iChannel0;
	uniform sampler2D iChannel1;
	
	#ifdef GL_ES
	precision mediump float;
	#endif

	#define SIGMA 5.0

	float normpdf(in float x, in float sigma)
	{
		return 0.39894*exp(-0.5*x*x/(sigma*sigma))/sigma;
	}

    void main(void) {

		vec3 c = texture2D(iChannel0, gl_FragCoord.xy / iResolution.xy).rgb;
		//gl_FragColor = vec4(c, 1.0);
		//return;
		//declare stuff
		const int mSize = int(SIGMA * 11.0/7.0);
		const int kSize = (mSize-1)/2;
		float kernel[mSize];
		vec3 finalColor = vec3(0.0);

		//create the 1-D kernel
		float sigma = SIGMA;
		float Z = 0.0;
		for (int j = 0; j <= kSize; ++j)
		{
			kernel[kSize+j] = kernel[kSize-j] = normpdf(float(j), sigma);
		}

		//get the normalization factor (as the gaussian has been clamped)
		for (int j = 0; j < mSize; ++j)
		{
			Z += kernel[j];
		}

		//read out the texels
		for (int i=-kSize; i <= kSize; ++i)
		{
			for (int j=-kSize; j <= kSize; ++j)
			{
				finalColor += kernel[kSize+j]*kernel[kSize+i]*texture2D(iChannel1, (gl_FragCoord.xy+vec2(float(i),float(j))) / iResolution.xy).rgb;

			}
		}

		finalColor /= Z*Z;

		finalColor = c + pow(finalColor, vec3(0.5)) * 0.5;


		gl_FragColor = vec4(finalColor, 1.0);

    }

</script>

Upvotes: 5

Views: 1479

Answers (1)

M -
M -

Reputation: 28482

This example uses multiple renders per frame. It works like this:

  1. render shaderA to buffer
  2. pass output to shaderB
  3. render shaderB to buffer
  4. pass output to shaderC
  5. render shaderC to canvas

To replicate this in Three.js, you'll need a WebGLRenderTarget as intermediary to pass the output from one render as a texture to the next shader. Here's the pseudocode with only 2 renders, you'll need to extend it if you need more:

var renderer = new WebGLRenderer(w, h, ...);

var scene0 = new Scene();
var scene1 = new Scene();

var plane0 = new THREE.PlaneBufferGeometry(1, 1);
var plane1 = new THREE.PlaneBufferGeometry(1, 1);

// ... continue building materials, shaders, etc

// Add plane mesh to its corresponding scene
scene0.add(planeMesh0);
scene1.add(planeMesh1);

// You should only need one camera if they're both in the same position.
var cam = new Camera();

// renderTarget will store the first buffer
var renderTarget = new WebGLRenderTarget(w, h);

update() {
    // This pass will render the first shader
    // Its output will be drawn on the rendertarget's texture
    renderer.render(scene0, cam, renderTarget);

    // We assign the output of the first render pass to the second plane
    plane1.uniforms.texture.value = rendertarget.texture;

    // Now we render the second shader to the canvas
    renderer.render(scene1, cam);
}

Keep in mind you have to render separate scenes in each pass to avoid recursion issues, so you'll have to add each plane to a separate scene. To learn more about WebGLRenderTarget you can read about it in the docs

Upvotes: 3

Related Questions