“使用 Three.js 的 WebGL 小实验。受到 NASA 关于等离子特性的动画的启发,请参阅:https://www.youtube.com/watch? v=adiN2F6ysLE ”
HTML:
<script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/88/three.min.js"></script>
<script id="vertexShader" type="x-shader/x-vertex">
void main() {
gl_Position = vec4( position, 1.0 );
}
</script>
<script id="fragmentShader" type="x-shader/x-fragment">
uniform vec2 u_resolution;
uniform vec2 u_mouse;
uniform float u_time;
uniform sampler2D u_noise;
uniform sampler2D u_environment;
vec2 movement;
float scale = 4. + sin(u_time * .2) * 3.;
vec2 hash2(vec2 p)
{
vec2 o = texture2D( u_noise, (p+0.5)/256.0, -100.0 ).xy;
return o;
}
const int octaves = 5;
float sinnoise(vec3 p){
float s = (sin(u_time) * .5 + .5);
float _c = cos(float(p.x * .1));
float _s = sin(float(p.x) * .1);
mat2 mat = mat2(_c, -_s, _s, _c);
for (int i=0; i<octaves; i++){
p += cos( p.yxz * 3. + vec3(0., u_time, 10.6)) * (.25 + s * .2);
p += sin( p.yxz + vec3(u_time, .1, 0.)) * (.5 - s * .1) ;
p *= 1. + s * .1;
p.xy *= mat;
}
// return dot(p, p);
return length(p);
}
// Naive environment mapping. Pass the reflected vector and pull back the texture position for that ray.
vec3 envMap(vec3 rd, vec3 sn){
rd.xy -= u_time*.2; // This just sort of compensates for the camera movement
// rd.xy -= movement;
rd /= scale; // scale the whole thing down a but from the scaled UVs
vec3 col = texture2D(u_environment, rd.xy - .5).rgb;
col *= normalize(col);
// col *= vec3(1., 1., 1.2);
// col *= vec3(hash2(rd.xy).y * .5 + .5);
return col;
}
float bumpMap(vec2 uv, float height) {
// uv.x += cos(u_time * .2) * 3.;
// uv.y += sin(u_time * .2) * 2.;
float bump = sinnoise(vec3(uv, 1.));
return bump * height;
}
vec4 renderPass(vec2 uv, vec2 uvoffset) {
vec3 surfacePos = vec3(uv, 0.0);
vec3 ray = normalize(vec3(uv - movement, 1.));
// vec3 lightPos = vec3(cos(u_time / 2.) * 2., sin(u_time / 2.) * 2., -3.);
vec3 lightPos = vec3(cos(u_time * .5 + 2.) * 2., 1. + sin(u_time * .5 + 2.) * 2., -3.) - vec3(movement, 0.);
vec3 normal = vec3(0., 0., -1);
vec2 sampleDistance = vec2(.001, -0.00);
float fx = bumpMap(surfacePos.xy-sampleDistance.xy + uvoffset, 1.);
float fy = bumpMap(surfacePos.xy-sampleDistance.yx + uvoffset, 1.);
float f = bumpMap(surfacePos.xy + uvoffset, 1.);
float freq = (f + fx + fy);
freq = freq * freq;
// return vec4( 1. - smoothstep(80., 110., freq) );
// return vec4( freq / 155. );
fx = (fx-f)/sampleDistance.x;
fy = (fy-f)/sampleDistance.x;
normal = normalize( normal + vec3(fx, fy, 0) * 0.2 );
vec3 lightV = lightPos - surfacePos;
float lightDist = max(length(lightV), 0.001);
lightV /= lightDist;
vec3 lightColour = vec3(.8, .8, 1.);
float shininess = 0.5;
float brightness = 1.;
float falloff = 0.1;
float attenuation = 1./(1.0 + lightDist*lightDist*falloff);
float diffuse = max(dot(normal, lightV), 0.);
float specular = pow(max(dot( reflect(-lightV, normal), -ray), 0.), 52.) * shininess;
vec3 plasma = mix(vec3(1.), vec3(.5, .8, 2.5), smoothstep(80., 100., freq));
vec2 n = hash2(uv * 200. + u_time * 5000.);
plasma += hash2(n).x;
plasma *= .8;
// vec3 tex = texture2D(u_environment, (reflect(vec3(uv, -1.), normal)).xy ).rgb;
vec3 reflect_ray = reflect(vec3(uv - movement, 1.), normal * 1.);
// The reflect ray is the ray wwe use to determine the reflection.
// We use the UV less the movement (to account for "environment") to the surface normal
vec3 tex = envMap(reflect_ray, normal); // Fake environment mapping.
vec3 texCol = (vec3(.5, .4, .2) + tex * brightness) * .5;
vec3 colour = (texCol * (diffuse*vec3(1, .97, .92)*2. + 0.5) + lightColour*specular * f * 2.)*attenuation*1.5;
colour *= 2.;
colour = mix(colour, plasma, 1. - smoothstep(80., 110., freq));
return vec4(colour, 1.);
}
void main() {
vec2 uv = (gl_FragCoord.xy - 0.5 * u_resolution.xy) / min(u_resolution.y, u_resolution.x);
uv *= scale;
// movement = vec2(u_time, 0.);
// uv += movement;
// vec4 render = renderPass(uv, vec2(cos(u_time), sin(u_time)));
vec4 render = renderPass(uv, vec2(0.));
render += render * render * .5;
gl_FragColor = render;
}
</script>
<div id="container" touch-action="none"></div>
body {
margin: 0;
padding: 0;
}
#container {
position: fixed;
touch-action: none;
}
JAVASCRIPT:
/*
Most of the stuff in here is just bootstrapping. Essentially it's just
setting ThreeJS up so that it renders a flat surface upon which to draw
the shader. The only thing to see here really is the uniforms sent to
the shader. Apart from that all of the magic happens in the HTML view
under the fragment shader.
*/
let container;
let camera, scene, renderer;
let uniforms;
let loader=new THREE.TextureLoader();
let texture, environment;
loader.setCrossOrigin("anonymous");
loader.load(
'https://s3-us-west-2.amazonaws.com/s.cdpn.io/982762/noise.png',
function do_something_with_texture(tex) {
texture = tex;
texture.wrapS = THREE.RepeatWrapping;
texture.wrapT = THREE.RepeatWrapping;
texture.minFilter = THREE.LinearFilter;
loader.load(
'https://s3-us-west-2.amazonaws.com/s.cdpn.io/982762/env_lat-lon.png',
function environment_load(tex) {
environment = tex;
environment.wrapS = THREE.RepeatWrapping;
environment.wrapT = THREE.RepeatWrapping;
environment.minFilter = THREE.LinearFilter;
init();
animate();
}
);
}
);
function init() {
container = document.getElementById( 'container' );
camera = new THREE.Camera();
camera.position.z = 1;
scene = new THREE.Scene();
var geometry = new THREE.PlaneBufferGeometry( 2, 2 );
uniforms = {
u_time: { type: "f", value: 1.0 },
u_resolution: { type: "v2", value: new THREE.Vector2() },
u_noise: { type: "t", value: texture },
u_environment: { type: "t", value: environment },
u_mouse: { type: "v2", value: new THREE.Vector2() }
};
var material = new THREE.ShaderMaterial( {
uniforms: uniforms,
vertexShader: document.getElementById( 'vertexShader' ).textContent,
fragmentShader: document.getElementById( 'fragmentShader' ).textContent
} );
material.extensions.derivatives = true;
var mesh = new THREE.Mesh( geometry, material );
scene.add( mesh );
renderer = new THREE.WebGLRenderer();
renderer.setPixelRatio( window.devicePixelRatio );
container.appendChild( renderer.domElement );
onWindowResize();
window.addEventListener( 'resize', onWindowResize, false );
document.addEventListener('pointermove', (e)=> {
let ratio = window.innerHeight / window.innerWidth;
uniforms.u_mouse.value.x = (e.pageX - window.innerWidth / 2) / window.innerWidth / ratio;
uniforms.u_mouse.value.y = (e.pageY - window.innerHeight / 2) / window.innerHeight * -1;
e.preventDefault();
});
}
function onWindowResize( event ) {
renderer.setSize( window.innerWidth, window.innerHeight );
uniforms.u_resolution.value.x = renderer.domElement.width;
uniforms.u_resolution.value.y = renderer.domElement.height;
}
function animate(delta) {
requestAnimationFrame( animate );
render(delta);
}
let capturer = new CCapture( {
verbose: true,
framerate: 30,
// motionBlurFrames: 4,
quality: 90,
format: 'webm',
workersPath: 'js/'
} );
let capturing = false;
isCapturing = function(val) {
if(val === false && window.capturing === true) {
capturer.stop();
capturer.save();
} else if(val === true && window.capturing === false) {
capturer.start();
}
capturing = val;
}
toggleCapture = function() {
isCapturing(!capturing);
}
window.addEventListener('keyup', function(e) { if(e.keyCode == 68) toggleCapture(); });
let then = 0;
function render(delta) {
uniforms.u_time.value = delta * 0.0005;
renderer.render( scene, camera );
if(capturing) {
capturer.capture( renderer.domElement );
}
}
源码:
https://codepen.io/shubniggurath/pen/MGdMbo
体验:
https://codepen.io/shubniggurath/full/MGdMbo