你們好,這裏是 CSS 魔法使——alphardex。html
如下是最終實現的效果圖前端
撒,哈吉馬路由!git
筆者的three.js模板:點擊右下角的fork便可複製一份github
在個人上一篇博文中,講到了如何將HTML世界和webgl的世界同步起來,本文也是一樣的思路,先同步好兩個世界,再進行特效創做web
首先搭建HTML和JSapp
<div class="relative w-screen h-screen"> <div class="absolute w-screen h-screen flex-center opacity-0"> <img src="https://i.loli.net/2021/03/08/uYcvELkr4dqFj9w.jpg" class="w-60 cursor-pointer" alt="" crossorigin="anonymous" /> </div> <div class="particle-explode w-full h-full bg-black"></div> </div> 複製代碼
class ParticleExplode extends Base { // 初始化 async init() { this.createScene(); this.createPerspectiveCamera(); this.createRenderer(); this.createParticleExplodeMaterial(); await preloadImages(); this.createPoints(); this.createClickEffect(); this.createLight(); this.trackMousePos(); this.createOrbitControls(); this.addListeners(); this.setLoop(); } } const start = () => { const particleExplode = new ParticleExplode(".particle-explode", true); particleExplode.init(); }; start(); 複製代碼
class ParticleExplode extends Base { constructor(sel: string, debug: boolean) { ... this.cameraPosition = new THREE.Vector3(0, 0, 1500); const fov = this.getScreenFov(); this.perspectiveCameraParams = { fov, near: 0.1, far: 5000 }; } // 獲取跟屏幕同像素的fov角度 getScreenFov() { return ky.rad2deg( 2 * Math.atan(window.innerHeight / 2 / this.cameraPosition.z) ); } } 複製代碼
import imagesLoaded from "https://cdn.skypack.dev/imagesloaded@4.1.4"; const preloadImages = (sel = "img") => { return new Promise((resolve) => { imagesLoaded(sel, { background: true }, resolve); }); }; 複製代碼
建立DOMMeshObject,用來同步HTML和webgl世界的數據curl
這裏有一點要注意的是,因爲本文建立的是粒子特效,所以不用Mesh
,用的是Points
,它能將Geometry
以點陣的形式表示async
class DOMMeshObject { el!: Element; rect!: DOMRect; mesh!: THREE.Mesh | THREE.Points; constructor( el: Element, scene: THREE.Scene, material: THREE.Material = new THREE.MeshBasicMaterial({ color: 0xff0000 }), isPoints = false ) { this.el = el; const rect = el.getBoundingClientRect(); this.rect = rect; const { width, height } = rect; const geometry = new THREE.PlaneBufferGeometry( width, height, width, height ); const mesh = isPoints ? new THREE.Points(geometry, material) : new THREE.Mesh(geometry, material); scene.add(mesh); this.mesh = mesh; } setPosition() { const { mesh, rect } = this; const { top, left, width, height } = rect; const x = left + width / 2 - window.innerWidth / 2; const y = -(top + height / 2 - window.innerHeight / 2) + window.scrollY; mesh.position.set(x, y, 0); } } class ParticleExplode extends Base { // 建立材質 createParticleExplodeMaterial() { const particleExplodeMaterial = new THREE.ShaderMaterial({ vertexShader: particleExplodeVertexShader, fragmentShader: particleExplodeFragmentShader, side: THREE.DoubleSide, uniforms: { uTime: { value: 0 }, uMouse: { value: new THREE.Vector2(0, 0) }, uResolution: { value: new THREE.Vector2(window.innerWidth, window.innerHeight) }, uProgress: { value: 0 }, uTexture: { value: null } } }); this.particleExplodeMaterial = particleExplodeMaterial; } // 建立點 createPoints() { const image = document.querySelector("img")!; this.image = image; const texture = new THREE.Texture(image); texture.needsUpdate = true; const material = this.particleExplodeMaterial.clone(); material.uniforms.uTexture.value = texture; const imageDOMMeshObj = new DOMMeshObject( image, this.scene, material, true ); imageDOMMeshObj.setPosition(); this.imageDOMMeshObj = imageDOMMeshObj; } } 複製代碼
建立完點陣後,畫面上依舊一片黑,爲何呢?由於咱們忘了在頂點着色器中設置點的大小了,在particleExplodeVertexShader
中加入這一行ide
gl_PointSize=2.; 複製代碼
能夠看到畫面上總算有顯示了,其實這不是一張平面,而是由成千上萬的點組成的「平面」函數
如今,你能夠將你喜歡的圖片貼上去了,片元着色器particleExplodeFragmentShader
代碼以下
uniform sampler2D uTexture; varying vec2 vUv; void main(){ vec4 color=texture2D(uTexture,vUv); if(color.r<.1&&color.g<.1&&color.b<.1){ discard; } gl_FragColor=color; } 複製代碼
接下來又到了激動人心的時刻——爆炸特效的實現了!
爆炸,簡而言之就是大量的微粒在必定空間內進行不規則的大幅運動而造成的奇觀。說到「不規則」,咱們首先就能想到一個詞——「噪聲」。
噪聲有不少種,最多見的有perlin noise
、simplex noise
等,本文用的是基於simplex noise
的curl noise
,在google上搜索curl noise glsl
,很容易就能將下面的噪聲代碼搞到手(谷歌:關鍵時刻仍是得靠勞資)
vec4 permute(vec4 x){return mod(((x*34.)+1.)*x,289.);} vec4 taylorInvSqrt(vec4 r){return 1.79284291400159-.85373472095314*r;} float snoise(vec3 v){ const vec2 C=vec2(1./6.,1./3.); const vec4 D=vec4(0.,.5,1.,2.); // First corner vec3 i=floor(v+dot(v,C.yyy)); vec3 x0=v-i+dot(i,C.xxx); // Other corners vec3 g=step(x0.yzx,x0.xyz); vec3 l=1.-g; vec3 i1=min(g.xyz,l.zxy); vec3 i2=max(g.xyz,l.zxy); // x0 = x0 - 0. + 0.0 * C vec3 x1=x0-i1+1.*C.xxx; vec3 x2=x0-i2+2.*C.xxx; vec3 x3=x0-1.+3.*C.xxx; // Permutations i=mod(i,289.); vec4 p=permute(permute(permute( i.z+vec4(0.,i1.z,i2.z,1.)) +i.y+vec4(0.,i1.y,i2.y,1.)) +i.x+vec4(0.,i1.x,i2.x,1.)); // Gradients // ( N*N points uniformly over a square, mapped onto an octahedron.) float n_=1./7.;// N=7 vec3 ns=n_*D.wyz-D.xzx; vec4 j=p-49.*floor(p*ns.z*ns.z);// mod(p,N*N) vec4 x_=floor(j*ns.z); vec4 y_=floor(j-7.*x_);// mod(j,N) vec4 x=x_*ns.x+ns.yyyy; vec4 y=y_*ns.x+ns.yyyy; vec4 h=1.-abs(x)-abs(y); vec4 b0=vec4(x.xy,y.xy); vec4 b1=vec4(x.zw,y.zw); vec4 s0=floor(b0)*2.+1.; vec4 s1=floor(b1)*2.+1.; vec4 sh=-step(h,vec4(0.)); vec4 a0=b0.xzyw+s0.xzyw*sh.xxyy; vec4 a1=b1.xzyw+s1.xzyw*sh.zzww; vec3 p0=vec3(a0.xy,h.x); vec3 p1=vec3(a0.zw,h.y); vec3 p2=vec3(a1.xy,h.z); vec3 p3=vec3(a1.zw,h.w); //Normalise gradients vec4 norm=taylorInvSqrt(vec4(dot(p0,p0),dot(p1,p1),dot(p2,p2),dot(p3,p3))); p0*=norm.x; p1*=norm.y; p2*=norm.z; p3*=norm.w; // Mix final noise value vec4 m=max(.6-vec4(dot(x0,x0),dot(x1,x1),dot(x2,x2),dot(x3,x3)),0.); m=m*m; return 42.*dot(m*m,vec4(dot(p0,x0),dot(p1,x1), dot(p2,x2),dot(p3,x3))); } vec3 snoiseVec3(vec3 x){ return vec3(snoise(vec3(x)*2.-1.), snoise(vec3(x.y-19.1,x.z+33.4,x.x+47.2))*2.-1., snoise(vec3(x.z+74.2,x.x-124.5,x.y+99.4)*2.-1.) ); } vec3 curlNoise(vec3 p){ const float e=.1; vec3 dx=vec3(e,0.,0.); vec3 dy=vec3(0.,e,0.); vec3 dz=vec3(0.,0.,e); vec3 p_x0=snoiseVec3(p-dx); vec3 p_x1=snoiseVec3(p+dx); vec3 p_y0=snoiseVec3(p-dy); vec3 p_y1=snoiseVec3(p+dy); vec3 p_z0=snoiseVec3(p-dz); vec3 p_z1=snoiseVec3(p+dz); float x=p_y1.z-p_y0.z-p_z1.y+p_z0.y; float y=p_z1.x-p_z0.x-p_x1.z+p_x0.z; float z=p_x1.y-p_x0.y-p_y1.x+p_y0.x; const float divisor=1./(2.*e); return normalize(vec3(x,y,z)*divisor); } 複製代碼
將噪聲函數弄來後,立馬就能應用到咱們的片元着色器裏,思路也很簡單粗暴:將位置信息傳遞給噪聲,再給原先的位置加上噪聲就OK了,因爲噪聲的值很大,須要慢慢地對值進行調試,這裏花的時間相對較多一些
uniform float uTime; uniform float uProgress; varying vec2 vUv; void main(){ vec3 noise=curlNoise(vec3(position.x*.02,position.y*.008,uTime*.05)); vec3 distortion=vec3(position.x*2.,position.y,1.)*noise*uProgress; vec3 newPos=position+distortion; vec4 modelPosition=modelMatrix*vec4(newPos,1.); vec4 viewPosition=viewMatrix*modelPosition; vec4 projectedPosition=projectionMatrix*viewPosition; gl_Position=projectedPosition; gl_PointSize=2.; vUv=uv; } 複製代碼
將噪聲的值調成你預想中的效果後,監聽好點擊事件,用gsap
來改變爆炸的過程值,這樣爆炸效果就實現了
import gsap from "https://cdn.skypack.dev/gsap@3.6.0"; class ParticleExplode extends Base { // 建立點擊效果 createClickEffect() { const material = this.imageDOMMeshObj.mesh.material as any; const image = this.image; image.addEventListener("click", () => { if (!this.isOpen) { gsap.to(material.uniforms.uProgress, { value: 3, duration: 1 }); this.isOpen = true; } else { gsap.to(material.uniforms.uProgress, { value: 0, duration: 1 }); this.isOpen = false; } }); } // 動畫 update() { const elapsedTime = this.clock.getElapsedTime(); if (this.imageDOMMeshObj) { const material = this.imageDOMMeshObj.mesh.material as any; material.uniforms.uTime.value = elapsedTime; } } } 複製代碼