diff --git a/Images/tofugif.gif b/Images/tofugif.gif new file mode 100644 index 0000000..444e317 Binary files /dev/null and b/Images/tofugif.gif differ diff --git a/README.md b/README.md index c636328..eb4fe6b 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,15 @@ # HW 0: Noisy Planet Part 1 (Intro to Javascript and WebGL) +Live demo: https://jakelem.github.io/hw00-webgl-intro +

- +drawing

-

(source: Ken Perlin)

+

+ + +For this project, I made a jiggly tofu. The cube vertices move along a sine wave, whose direction and amplitude are determined by the position of the mouse on the screen. The normals are offset based on a 3D FBM derivative, as described by IQ [in this article](https://www.iquilezles.org/www/articles/morenoise/morenoise.htm). FBM is also used to interpolate colors for the tofu albedo and also as a specular map. + ## Objective - Check that the tools and build configuration we will be using for the class works. diff --git a/src/geometry/Cube.ts b/src/geometry/Cube.ts new file mode 100644 index 0000000..f6735cb --- /dev/null +++ b/src/geometry/Cube.ts @@ -0,0 +1,117 @@ +import {vec3, vec4} from 'gl-matrix'; +import Drawable from '../rendering/gl/Drawable'; +import {gl} from '../globals'; + +class Cube extends Drawable { + indices: Uint32Array; + positions: Float32Array; + normals: Float32Array; + + color: vec4; + colors: Float32Array; + center: vec4; + + constructor(center: vec3) { + super(); // Call the constructor of the super class. This is required. + this.center = vec4.fromValues(center[0], center[1], center[2], 1); + } + + create() { + + this.indices = new Uint32Array([0, 1, 2, + 0, 2, 3, + 4, 5, 6, + 4, 6, 7, + 8, 9, 10, + 8, 10, 11, + 12, 13, 14, + 12, 14, 15, + 16, 17, 18, + 16, 18, 19, + 20, 21, 22, + 20, 22, 23 + + ]); + + this.normals = new Float32Array([0, 0, -1, 0, // FRONT + 0, 0, -1, 0, + 0, 0, -1, 0, + 0, 0, -1, 0, + + 0, 0, 1, 0, // BACK + 0, 0, 1, 0, + 0, 0, 1, 0, + 0, 0, 1, 0, + + -1, 0, 0, 0, // LEFT + -1, 0, 0, 0, + -1, 0, 0, 0, + -1, 0, 0, 0, + + 1, 0, 0, 0, // RIGHT + 1, 0, 0, 0, + 1, 0, 0, 0, + 1, 0, 0, 0, + + 0, -1, 0, 0, // BOTTOM + 0, -1, 0, 0, + 0, -1, 0, 0, + 0, -1, 0, 0, + + 0, 1, 0, 0, // TOP + 0, 1, 0, 0, + 0, 1, 0, 0, + 0, 1, 0, 0, + + ]); + + this.positions = new Float32Array([-0.5, -0.5, 0, 1, // FRONT + 0.5, -0.5, 0, 1, + 0.5, 0.5, 0, 1, + -0.5, 0.5, 0, 1, + + -0.5, -0.5, 1, 1, // BACK + 0.5, -0.5, 1, 1, + 0.5, 0.5, 1, 1, + -0.5, 0.5, 1, 1, + + -0.5, -0.5, 0, 1, // LEFT + -0.5, -0.5, 1, 1, + -0.5, 0.5, 1, 1, + -0.5, 0.5, 0, 1, + + 0.5, -0.5, 0, 1, // RIGHT + 0.5, -0.5, 1, 1, + 0.5, 0.5, 1, 1, + 0.5, 0.5, 0, 1, + + -0.5, -0.5, 0, 1, // BOTTOM + -0.5, -0.5, 1, 1, + 0.5, -0.5, 1, 1, + 0.5, -0.5, 0, 1, + + -0.5, 0.5, 0, 1, // TOP + -0.5, 0.5, 1, 1, + 0.5, 0.5, 1, 1, + 0.5, 0.5, 0, 1, + ]); + + this.generateIdx(); + this.generatePos(); + this.generateNor(); + + this.count = this.indices.length; + gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.bufIdx); + gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, this.indices, gl.STATIC_DRAW); + + gl.bindBuffer(gl.ARRAY_BUFFER, this.bufNor); + gl.bufferData(gl.ARRAY_BUFFER, this.normals, gl.STATIC_DRAW); + + gl.bindBuffer(gl.ARRAY_BUFFER, this.bufPos); + gl.bufferData(gl.ARRAY_BUFFER, this.positions, gl.STATIC_DRAW); + + console.log(`Created cube`); + } +}; + +export default Cube; diff --git a/src/main.ts b/src/main.ts index 65a9461..059373a 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,8 +1,9 @@ -import {vec3} from 'gl-matrix'; +import {vec2, vec3, vec4} from 'gl-matrix'; const Stats = require('stats-js'); import * as DAT from 'dat.gui'; import Icosphere from './geometry/Icosphere'; import Square from './geometry/Square'; +import Cube from './geometry/Cube'; import OpenGLRenderer from './rendering/gl/OpenGLRenderer'; import Camera from './Camera'; import {setGL} from './globals'; @@ -13,17 +14,30 @@ import ShaderProgram, {Shader} from './rendering/gl/ShaderProgram'; const controls = { tesselations: 5, 'Load Scene': loadScene, // A function pointer, essentially + 'Color': [255,245,233], + 'Secondary Color': [247,240,221], + 'Use Numerical Normals' : true }; let icosphere: Icosphere; let square: Square; +let cube: Cube; +let time : number = 0; let prevTesselations: number = 5; +let mouseX = 0; +let mouseY = 0; function loadScene() { icosphere = new Icosphere(vec3.fromValues(0, 0, 0), 1, controls.tesselations); icosphere.create(); square = new Square(vec3.fromValues(0, 0, 0)); square.create(); + cube = new Cube(vec3.fromValues(0, 0, 0)); + cube.create(); +} + +function colArrayToVec4(colArr : number[]) : vec4 { + return vec4.fromValues(colArr[0] / 255.0, colArr[1] / 255.0, colArr[2] / 255.0, 1); } function main() { @@ -38,6 +52,9 @@ function main() { // Add controls to the gui const gui = new DAT.GUI(); gui.add(controls, 'tesselations', 0, 8).step(1); + gui.addColor(controls, 'Color'); + gui.addColor(controls, 'Secondary Color'); + //gui.add(controls, 'Use Numerical Normals', true); gui.add(controls, 'Load Scene'); // get canvas and webgl context @@ -56,7 +73,7 @@ function main() { const camera = new Camera(vec3.fromValues(0, 0, 5), vec3.fromValues(0, 0, 0)); const renderer = new OpenGLRenderer(canvas); - renderer.setClearColor(0.2, 0.2, 0.2, 1); + renderer.setClearColor(234 / 255.0,182 / 255.0, 118.0 / 255.0, 1); gl.enable(gl.DEPTH_TEST); const lambert = new ShaderProgram([ @@ -70,18 +87,27 @@ function main() { stats.begin(); gl.viewport(0, 0, window.innerWidth, window.innerHeight); renderer.clear(); + if(controls.tesselations != prevTesselations) { prevTesselations = controls.tesselations; icosphere = new Icosphere(vec3.fromValues(0, 0, 0), 1, prevTesselations); icosphere.create(); } + console.log("what"); + lambert.setCamPos(vec4.fromValues(camera.controls.eye[0], camera.controls.eye[1], camera.controls.eye[2], 1)); + lambert.setGeometryColor(colArrayToVec4(controls["Color"])); + lambert.setSecondaryColor(colArrayToVec4(controls["Secondary Color"])); + lambert.setNumericalNorm(controls["Use Numerical Normals"]); + + lambert.setTime(time); + lambert.setMousePos(vec2.fromValues(mouseX, mouseY)); renderer.render(camera, lambert, [ - icosphere, - // square, + cube, ]); stats.end(); + ++time; // Tell the browser to call `tick` again whenever it renders a new frame requestAnimationFrame(tick); } @@ -100,4 +126,9 @@ function main() { tick(); } +document.addEventListener( 'mousemove', function( event ) { + mouseX = event.pageX / window.innerWidth; + mouseY = (window.innerHeight - event.pageY) / window.innerHeight; +}); + main(); diff --git a/src/rendering/gl/OpenGLRenderer.ts b/src/rendering/gl/OpenGLRenderer.ts index 7e527c2..5155535 100644 --- a/src/rendering/gl/OpenGLRenderer.ts +++ b/src/rendering/gl/OpenGLRenderer.ts @@ -25,13 +25,11 @@ class OpenGLRenderer { render(camera: Camera, prog: ShaderProgram, drawables: Array) { let model = mat4.create(); let viewProj = mat4.create(); - let color = vec4.fromValues(1, 0, 0, 1); mat4.identity(model); mat4.multiply(viewProj, camera.projectionMatrix, camera.viewMatrix); prog.setModelMatrix(model); prog.setViewProjMatrix(viewProj); - prog.setGeometryColor(color); for (let drawable of drawables) { prog.draw(drawable); diff --git a/src/rendering/gl/ShaderProgram.ts b/src/rendering/gl/ShaderProgram.ts index 67fef40..90b1396 100644 --- a/src/rendering/gl/ShaderProgram.ts +++ b/src/rendering/gl/ShaderProgram.ts @@ -1,4 +1,4 @@ -import {vec4, mat4} from 'gl-matrix'; +import {vec2, vec4, mat4} from 'gl-matrix'; import Drawable from './Drawable'; import {gl} from '../../globals'; @@ -28,7 +28,16 @@ class ShaderProgram { unifModel: WebGLUniformLocation; unifModelInvTr: WebGLUniformLocation; unifViewProj: WebGLUniformLocation; + unifViewProjInv: WebGLUniformLocation; + unifColor: WebGLUniformLocation; + unifSecondaryColor: WebGLUniformLocation; + + unifNumericalNorm: WebGLUniformLocation; + + unifTime: WebGLUniformLocation; + unifMousePos: WebGLUniformLocation; + unifCamPos: WebGLUniformLocation; constructor(shaders: Array) { this.prog = gl.createProgram(); @@ -47,7 +56,14 @@ class ShaderProgram { this.unifModel = gl.getUniformLocation(this.prog, "u_Model"); this.unifModelInvTr = gl.getUniformLocation(this.prog, "u_ModelInvTr"); this.unifViewProj = gl.getUniformLocation(this.prog, "u_ViewProj"); + this.unifViewProjInv = gl.getUniformLocation(this.prog, "u_ViewProjInv"); this.unifColor = gl.getUniformLocation(this.prog, "u_Color"); + this.unifSecondaryColor = gl.getUniformLocation(this.prog, "u_SecondaryColor"); + this.unifNumericalNorm = gl.getUniformLocation(this.prog, "u_NumericalNorm"); + this.unifTime = gl.getUniformLocation(this.prog, "u_Time"); + this.unifMousePos = gl.getUniformLocation(this.prog, "u_MousePos"); + this.unifCamPos = gl.getUniformLocation(this.prog, "u_CameraPos"); + } use() { @@ -74,7 +90,18 @@ class ShaderProgram { setViewProjMatrix(vp: mat4) { this.use(); if (this.unifViewProj !== -1) { + let viewprojinv: mat4 = mat4.create(); + mat4.invert(viewprojinv, vp); + gl.uniformMatrix4fv(this.unifViewProj, false, vp); + gl.uniformMatrix4fv(this.unifViewProjInv, false, viewprojinv); + } + } + + setInverseViewProjMatrix(vp: mat4) { + this.use(); + if (this.unifViewProj !== -1) { + gl.uniformMatrix4fv(this.unifViewProjInv, false, vp); } } @@ -85,6 +112,43 @@ class ShaderProgram { } } + setSecondaryColor(color: vec4) { + this.use(); + if (this.unifSecondaryColor !== -1) { + gl.uniform4fv(this.unifSecondaryColor, color); + } + } + + setNumericalNorm(n: boolean) { + this.use(); + if (this.unifSecondaryColor !== -1) { + gl.uniform1i(this.unifNumericalNorm, Number(n)); + } + } + + + + setTime(time: number) { + this.use(); + if (this.unifColor !== -1) { + gl.uniform1f(this.unifTime, time); + } + } + + setMousePos(pos: vec2) { + this.use(); + if (this.unifColor !== -1) { + gl.uniform2fv(this.unifMousePos, pos); + } + } + + setCamPos(pos: vec4) { + this.use(); + if (this.unifColor !== -1) { + gl.uniform4fv(this.unifCamPos, pos); + } + } + draw(d: Drawable) { this.use(); diff --git a/src/shaders/lambert-frag.glsl b/src/shaders/lambert-frag.glsl index 2b8e11b..96abf0a 100644 --- a/src/shaders/lambert-frag.glsl +++ b/src/shaders/lambert-frag.glsl @@ -12,32 +12,120 @@ precision highp float; uniform vec4 u_Color; // The color with which to render this instance of geometry. +uniform vec4 u_SecondaryColor; // The color with which to render this instance of geometry. + +uniform vec4 u_CameraPos; // The color with which to render this instance of geometry. +uniform int u_NumericalNorm; // These are the interpolated values out of the rasterizer, so you can't know // their specific values without knowing the vertices that contributed to them in vec4 fs_Nor; in vec4 fs_LightVec; in vec4 fs_Col; +in vec4 fs_WorldPos; +in vec4 fs_LightPos; +in vec4 fs_Pos; + +uniform vec2 u_MousePos; +uniform float u_Time; out vec4 out_Col; // This is the final output color that you will see on your - // screen for the pixel that is currently being processed. +// screen for the pixel that is currently being processed. -void main() + +float hash3(vec3 v) { - // Material base color (before shading) - vec4 diffuseColor = u_Color; + return fract(sin(dot(v, vec3(24.51853, 4815.44774, 32555.33333))) * 3942185.3); +} - // Calculate the diffuse term for Lambert shading - float diffuseTerm = dot(normalize(fs_Nor), normalize(fs_LightVec)); - // Avoid negative lighting values - // diffuseTerm = clamp(diffuseTerm, 0, 1); +vec4 noise3(vec3 v) +{ + //Adapted from IQ: https://www.iquilezles.org/www/articles/morenoise/morenoise.htm + vec3 intV = floor(v); + vec3 fractV = fract(v); + vec3 u = fractV*fractV*fractV*(fractV*(fractV*6.0-15.0)+10.0); + vec3 du = 30.0*fractV*fractV*(fractV*(fractV-2.0)+1.0); + + float a = hash3( intV+vec3(0.f,0.f,0.f) ); + float b = hash3( intV+vec3(1.f,0.f,0.f) ); + float c = hash3( intV+vec3(0.f,1.f,0.f) ); + float d = hash3( intV+vec3(1.f,1.f,0.f) ); + float e = hash3( intV+vec3(0.f,0.f,1.f) ); + float f = hash3( intV+vec3(1.f,0.f,1.f) ); + float g = hash3( intV+vec3(0.f,1.f,1.f) ); + float h = hash3( intV+vec3(1.f,1.f,1.f) ); + + float k0 = a; + float k1 = b - a; + float k2 = c - a; + float k3 = e - a; + float k4 = a - b - c + d; + float k5 = a - c - e + g; + float k6 = a - b - e + f; + float k7 = - a + b + c - d + e - f - g + h; + + + vec3 dv = 2.0* du * vec3( k1 + k4*u.y + k6*u.z + k7*u.y*u.z, + k2 + k5*u.z + k4*u.x + k7*u.z*u.x, + k3 + k6*u.x + k5*u.y + k7*u.x*u.y); + + return vec4(-1.f+2.f*(k0 + k1*u.x + k2*u.y + k3*u.z + k4*u.x*u.y + k5*u.y*u.z + k6*u.z*u.x + k7*u.x*u.y*u.z), dv); +} - float ambientTerm = 0.2; +vec4 fbm3(vec3 v, int octaves, float amp, float freq, float pers, float freq_power) +{ + vec2 center_MousePos = 2.f * (u_MousePos - 0.5f); + float sum = 0.f; + vec3 dv = vec3(0.f,0.f,0.f); + float speed = 0.01f; + for(int i = 0; i < octaves; ++i) + { + amp *= pers; + freq *= freq_power; + vec4 noise = noise3((v) * freq); + sum += amp * noise.x; + dv += amp * noise.yzw; + } + return vec4(sum, dv); +} - float lightIntensity = diffuseTerm + ambientTerm; //Add a small float value to the color multiplier - //to simulate ambient lighting. This ensures that faces that are not - //lit by our point light are not completely black. +float getBias(float t, float bias) +{ + return t / ((((1.f / bias) - 2.f * (1.f - t)) + 1.f)); +} - // Compute final shaded color - out_Col = vec4(diffuseColor.rgb * lightIntensity, diffuseColor.a); +void main() +{ + // Material base color (before shading) + vec4 diffuseColor = u_SecondaryColor; + vec3 offset = vec3(20.f); //sin(u_Time * 0.002) * (1.f + 3.f * fs_Nor.xyz); + vec4 fbm_col = fbm3(fs_Pos.xyz, 4, 0.8f, 1.6f, 0.8f, 2.f); + vec4 fbm_norm = fbm3(fs_Pos.xyz, 6, 0.8f, 2.0f, 0.8f, 2.f); + vec4 fbm_spec = fbm3(fs_Pos.xyz + offset, 6, 0.8f, 4.5f, 0.8f, 1.6f); + + fbm_spec.x = getBias(fbm_spec.x * 0.5f + 0.5f, 0.3f) * 1.4f + 0.2f; + fbm_spec.x =clamp(fbm_spec.x, 0.f, 4.f); + + vec4 norm = normalize(vec4(fbm_norm.yzw, 0.f)); + float light_dist = distance(fs_LightPos, fs_WorldPos); + float pointlightIntensity = 23.f / (light_dist * light_dist); + float fbm_lerp = 0.04f; + if(fbm_norm.x > 0.45) + fbm_lerp = fbm_norm.x; + norm = normalize(mix(fs_Nor, norm, fbm_lerp)); + + vec4 lightVec = normalize(fs_LightPos - fs_WorldPos); + diffuseColor.xyz = mix(u_SecondaryColor.xyz, u_Color.xyz, clamp((fbm_col.x + 1.0) * 0.5, 0.0, 1.f)); + float diffuseTerm = pointlightIntensity * dot(normalize(norm), normalize(lightVec)); + // Avoid negative lighting values + diffuseTerm = clamp(diffuseTerm, 0.f, 1.f); + float ambientTerm = 0.7; + vec4 viewVec = normalize(fs_WorldPos - u_CameraPos); + vec4 h = normalize(lightVec - viewVec); + float specularIntensity = pointlightIntensity * max(pow(max(dot(h, norm), 0.f), 1024.f), 0.f); + + float lightIntensity = clamp((diffuseTerm + ambientTerm + specularIntensity * fbm_spec.x), 0.f, 3.f); + vec4 lightColor = vec4(255.f, 245.f, 228.f, 255.f) / 255.f; + out_Col = vec4(diffuseColor.xyz * lightIntensity * lightColor.xyz, diffuseColor.a); + } diff --git a/src/shaders/lambert-vert.glsl b/src/shaders/lambert-vert.glsl index 7f95a37..9ea9c80 100644 --- a/src/shaders/lambert-vert.glsl +++ b/src/shaders/lambert-vert.glsl @@ -7,17 +7,25 @@ //This simultaneous transformation allows your program to run much faster, especially when rendering //geometry with millions of vertices. +uniform float u_Time; +uniform vec2 u_MousePos; +uniform vec4 u_CameraPos; + uniform mat4 u_Model; // The matrix that defines the transformation of the - // object we're rendering. In this assignment, - // this will be the result of traversing your scene graph. +// object we're rendering. In this assignment, +// this will be the result of traversing your scene graph. uniform mat4 u_ModelInvTr; // The inverse transpose of the model matrix. - // This allows us to transform the object's normals properly - // if the object has been non-uniformly scaled. +// This allows us to transform the object's normals properly +// if the object has been non-uniformly scaled. uniform mat4 u_ViewProj; // The matrix that defines the camera's transformation. - // We've written a static matrix for you to use for HW2, - // but in HW3 you'll have to generate one yourself +// We've written a static matrix for you to use for HW2, +// but in HW3 you'll have to generate one yourself + +uniform mat4 u_ViewProjInv; // The matrix that defines the camera's transformation. +// We've written a static matrix for you to use for HW2, +// but in HW3 you'll have to generate one yourself in vec4 vs_Pos; // The array of vertex positions passed to the shader @@ -26,28 +34,50 @@ in vec4 vs_Nor; // The array of vertex normals passed to the shader in vec4 vs_Col; // The array of vertex colors passed to the shader. out vec4 fs_Nor; // The array of normals that has been transformed by u_ModelInvTr. This is implicitly passed to the fragment shader. +out vec4 fs_WorldPos; +out vec4 fs_Pos; + out vec4 fs_LightVec; // The direction in which our virtual light lies, relative to each vertex. This is implicitly passed to the fragment shader. +out vec4 fs_LightPos; // The direction in which our virtual light lies, relative to each vertex. This is implicitly passed to the fragment shader. + out vec4 fs_Col; // The color of each vertex. This is implicitly passed to the fragment shader. -const vec4 lightPos = vec4(5, 5, 3, 1); //The position of our virtual light, which is used to compute the shading of - //the geometry in the fragment shader. +const vec4 lightPos = vec4(1, 2.0, 9.5, 1); //The position of our virtual light, which is used to compute the shading of +//the geometry in the fragment shader. void main() { fs_Col = vs_Col; // Pass the vertex colors to the fragment shader for interpolation - + vec2 center_MousePos = 2.f * (u_MousePos - 0.5f); mat3 invTranspose = mat3(u_ModelInvTr); + vec4 p = vec4(center_MousePos.xy, 1, 1) * abs(u_CameraPos.z); + p = u_ViewProjInv * p; + fs_Nor = vec4(invTranspose * vec3(vs_Nor), 0); // Pass the vertex normals to the fragment shader for interpolation. - // Transform the geometry's normals by the inverse transpose of the - // model matrix. This is necessary to ensure the normals remain - // perpendicular to the surface after the surface is transformed by - // the model matrix. - - + // Transform the geometry's normals by the inverse transpose of the + // model matrix. This is necessary to ensure the normals remain + // perpendicular to the surface after the surface is transformed by + // the model matrix. + + vec4 modelposition = u_Model * vs_Pos; // Temporarily store the transformed vertex positions for use below - + + float amp = 1.f - length(center_MousePos) + 1.f; + amp =length(center_MousePos) + 1.f; + vec2 dir = normalize(center_MousePos); + vec3 camDir = normalize(u_CameraPos).xyz; + vec3 perp_dir = cross(vec3(dir, 0), camDir); + vec3 test = cross(vec3(0,0,1), vec3(0,1,0)); + float amt = dot(perp_dir.xy, modelposition.xy); + vec2 center = center_MousePos; + + fs_Pos = modelposition; + fs_LightPos = lightPos; + modelposition.xy += amp * 0.3 * dir * sin(amt + u_Time * 0.1); + fs_WorldPos = modelposition; fs_LightVec = lightPos - modelposition; // Compute the direction in which the light source lies - + gl_Position = u_ViewProj * modelposition;// gl_Position is a built-in variable of OpenGL which is - // used to render the final positions of the geometry's vertices + // used to render the final positions of the geometry's vertices + }