class GPGPU {
constructor(renderer, textureSize) {
const gl = renderer.getContext()
if (!gl.getExtension('OES_texture_float')) {
alert('client support error - float textures not supported')
throw new Error('float textures not supported')
}
if (gl.getParameter(gl.MAX_VERTEX_TEXTURE_IMAGE_UNITS) === 0) {
alert('client support error - vertex shader cannot read textures')
throw new Error('vertex shader cannot read textures')
}
this.renderer = renderer
this.positionsA = new THREE.DataTexture(
preloadedDataBuffer(textureSize),
textureSize, textureSize,
THREE.RGBAFormat, THREE.FloatType)
this.positionsB = new THREE.DataTexture(
null,
textureSize, textureSize,
THREE.RGBAFormat, THREE.FloatType)
this.positionsA.needsUpdate = true
this.scene = new THREE.Scene()
this.orthoCamera = new THREE.OrthographicCamera(-1, 1, 1, -1, 0, 1)
// create a target texture
const options = {
wrapS: THREE.ClampToEdgeWrapping,
wrapT: THREE.ClampToEdgeWrapping,
stencilBuffer: false,
minFilter: THREE.NearestFilter, // important as we want to sample square pixels
magFilter: THREE.NearestFilter,
format: THREE.RGBAFormat,
type: (/(iPad|iPhone|iPod)/g.test(navigator.userAgent)) ? THREE.HalfFloatType : THREE.FloatType,
}
this.rtts = [
new THREE.WebGLRenderTarget(textureSize, textureSize, options),
new THREE.WebGLRenderTarget(textureSize, textureSize, options)
]
this.rttIndex = 0
// the simulation:
//create a bi-unit quadrilateral and uses the simulation material to update the Float Texture
let geom = new THREE.BufferGeometry()
geom.addAttribute('position', new THREE.BufferAttribute(new Float32Array([
-1, -1, 0,
1, -1, 0,
1, 1, 0,
-1, -1, 0,
1, 1, 0,
-1, 1, 0
]), 3))
geom.addAttribute('uv', new THREE.BufferAttribute(new Float32Array([0,1, 1,1, 1,0, 0,1, 1,0, 0,0 ]), 2))
this.simulationMaterial = new THREE.ShaderMaterial({
uniforms: {
positions: {type: 't', value: this.positionsA },
elapsedTime: {type: 'f', value: 0},
delta: {type: 'f', value: 0},
},
vertexShader: `
varying vec2 vUv;
void main() {
vUv = vec2(uv.x, uv.y);
gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
}
`,
fragmentShader: `
#define PI 3.1415926536
#define PI2 6.28318530718
uniform float elapsedTime;
uniform float delta;
uniform sampler2D positions; // Data Texture containing original positions
varying vec2 vUv;
void main() {
vec4 sample = texture2D(positions, vec2(vUv.x, 1.0 - vUv.y));
vec3 pos = sample.xyz;
float lifetime = sample.a;
// increase lifetime by delta, cap it at 1 and start over
lifetime = mod(lifetime + delta, 1.0);
float radius = 125.0;
float radius2 = 250.0;
float index = vUv.x * ${textureSize}.0 + vUv.y * ${textureSize}.0 * ${textureSize}.0;
/* *************************** */
pos.x = vUv.x * radius2 - radius;
pos.y = vUv.y * radius2 - radius;
//pos.x += sin(lifetime * 16.1) * 4.0;
//pos.z += sin(lifetime * 22.5) * 4.0;
//pos.y += lifetime * 25.0;
gl_FragColor = vec4(pos.rgb, lifetime);
}
`
})
this.mesh = new THREE.Mesh(geom, this.simulationMaterial)
this.scene.add(this.mesh)
}
simulate (delta, elapsedTime) {
this.rttIndex = (this.rttIndex + 1) % 2
this.simulationMaterial.uniforms.elapsedTime.value = elapsedTime
this.simulationMaterial.uniforms.delta.value = delta
this.renderer.setRenderTarget(this.rtts[this.rttIndex])
this.renderer.clear()
this.renderer.render(this.scene, this.orthoCamera)
this.renderer.setRenderTarget(null)
const texture = this.rtts[this.rttIndex].texture
this.simulationMaterial.uniforms.positions.value = texture
return texture
}
}