diff --git a/README.md b/README.md
index c636328..28e81a4 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,16 @@
 # HW 0: Noisy Planet Part 1 (Intro to Javascript and WebGL)
 
+## Description of project
+Six sides of a cube start very far out and come together to interlock in a cube shape, staying there for a few seconds, before clipping past each other and flying back out. This is achieved by using a tangent function to modify the position of the sides by their normal vector, as well as a sine wave that has the same period, to ensure the sides stay together in the middle for a little bit.
+
+As the sides come closer and closer, they start white, but begin to rapidly develop spots of the selected color, as well as black, before transitioning into a psychedelic red-green-blue static. At the same time, fbm noise at a very high octave was used to create this black sort of staticky, oscillating effect when the cube is at a standstill.
+
+https://ruthchu.github.io/hw00-webgl-intro/
+
+
+
+
+
 
   
 
diff --git a/images/blasting_off.png b/images/blasting_off.png
new file mode 100644
index 0000000..e49c939
Binary files /dev/null and b/images/blasting_off.png differ
diff --git a/images/coming_together.png b/images/coming_together.png
new file mode 100644
index 0000000..acbff17
Binary files /dev/null and b/images/coming_together.png differ
diff --git a/images/static_effect.png b/images/static_effect.png
new file mode 100644
index 0000000..24bbccc
Binary files /dev/null and b/images/static_effect.png differ
diff --git a/src/geometry/Cube.ts b/src/geometry/Cube.ts
new file mode 100644
index 0000000..d534c5f
--- /dev/null
+++ b/src/geometry/Cube.ts
@@ -0,0 +1,120 @@
+import {vec3, vec4} from 'gl-matrix';
+import Drawable from '../rendering/gl/Drawable';
+import {gl} from '../globals';
+
+class Cube extends Drawable {
+  indices: Uint32Array;
+  positions: Float32Array;
+  normals: Float32Array;
+  center: vec4;
+
+  constructor(center: vec3) {
+    super();
+    this.center = vec4.fromValues(center[0], center[1], center[2], 1);
+  }
+
+  create() {
+    this.positions = new Float32Array([
+        //faces positive x direction
+        1, -1, 1, 1,
+        1, 1, 1, 1,
+        1, 1, -1, 1,
+        1, -1, -1, 1,
+
+        //faces negative x direction
+        -1, 1, -1, 1,
+        -1, -1, -1, 1,
+        -1, -1, 1, 1,
+        -1, 1, 1, 1,
+
+        //faces positive y direction
+        -1, 1, -1, 1,
+        -1, 1, 1, 1,
+        1, 1, 1, 1,
+        1, 1, -1, 1,
+
+        //faces negative y direction
+        1, 1, -1, 1,
+        1, -1, -1, 1,
+        -1, -1, -1, 1,
+        -1, 1, -1, 1,
+
+        //faces positive z direction
+        -1, 1, 1, 1,
+        -1, -1, 1, 1,
+        1, -1, 1, 1,
+        1, 1, 1, 1,
+
+        //faces negative z direction
+        -1, -1, 1, 1,
+        -1, -1, -1, 1,
+        1, -1, -1, 1,
+        1, -1, 1, 1
+    ]);
+
+    this.normals = new Float32Array([
+        //faces positive x direction
+        1, 0, 0, 0,
+        1, 0, 0, 0,
+        1, 0, 0, 0,
+        1, 0, 0, 0,
+
+        //faces negative x direction
+        -1, 0, 0, 0,
+        -1, 0, 0, 0,
+        -1, 0, 0, 0,
+        -1, 0, 0, 0,
+
+        //faces positive y direction
+        0, 1, 0, 0,
+        0, 1, 0, 0,
+        0, 1, 0, 0,
+        0, 1, 0, 0,
+
+        //faces negative y direction
+        0, -1, 0, 0,
+        0, -1, 0, 0,
+        0, -1, 0, 0,
+        0, -1, 0, 0,
+
+        //faces positive z direction
+        0, 0, 1, 0,
+        0, 0, 1, 0,
+        0, 0, 1, 0,
+        0, 0, 1, 0,
+
+        //faces negative z direction
+        0, 0, -1, 0,
+        0, 0, -1, 0,
+        0, 0, -1, 0,
+        0, 0, -1, 0
+    ]);
+
+    this.indices = new Uint32Array([
+        0, 1, 2, 0, 2, 3,
+        4, 5, 6, 4, 6, 7,
+        8, 9, 10, 8, 10, 11,
+        12, 13, 14, 12, 14, 15,
+        16, 17, 18, 16, 18, 19,
+        20, 21, 22, 20, 22, 23
+    ]);
+      
+    this.generateIdx();
+    this.generatePos();
+    this.generateNor();
+
+    this.count = this.indices.length;
+    gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, this.bufIdx);
+    gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, this.indices, gl.STATIC_DRAW);
+
+    gl.bindBuffer(gl.ARRAY_BUFFER, this.bufNor);
+    gl.bufferData(gl.ARRAY_BUFFER, this.normals, gl.STATIC_DRAW);
+
+    gl.bindBuffer(gl.ARRAY_BUFFER, this.bufPos);
+    gl.bufferData(gl.ARRAY_BUFFER, this.positions, gl.STATIC_DRAW);
+
+    console.log(`Created cube`);
+  }
+};
+
+export default Cube;
\ No newline at end of file
diff --git a/src/geometry/Icosphere.ts b/src/geometry/Icosphere.ts
index 412995b..a763c7f 100644
--- a/src/geometry/Icosphere.ts
+++ b/src/geometry/Icosphere.ts
@@ -176,4 +176,4 @@ class Icosphere extends Drawable {
   }
 };
 
-export default Icosphere;
+export default Icosphere;
\ No newline at end of file
diff --git a/src/main.ts b/src/main.ts
index 65a9461..0ef7bb6 100644
--- a/src/main.ts
+++ b/src/main.ts
@@ -1,4 +1,4 @@
-import {vec3} from 'gl-matrix';
+import {vec3, vec4} from 'gl-matrix';
 const Stats = require('stats-js');
 import * as DAT from 'dat.gui';
 import Icosphere from './geometry/Icosphere';
@@ -7,23 +7,34 @@ import OpenGLRenderer from './rendering/gl/OpenGLRenderer';
 import Camera from './Camera';
 import {setGL} from './globals';
 import ShaderProgram, {Shader} from './rendering/gl/ShaderProgram';
+import Cube from './geometry/Cube';
 
 // Define an object with application parameters and button callbacks
 // This will be referred to by dat.GUI's functions that add GUI elements.
 const controls = {
   tesselations: 5,
   'Load Scene': loadScene, // A function pointer, essentially
+  terrainFreq: 1,
+  cloudHeight: .9,
+};
+
+const palette = {
+  color: [ 163, 120, 191 ]
 };
 
 let icosphere: Icosphere;
 let square: Square;
-let prevTesselations: number = 5;
+let cube: Cube;
+let prevTesselations: number = 6;
+let u_tick: number = 0.0;
 
 function loadScene() {
   icosphere = new Icosphere(vec3.fromValues(0, 0, 0), 1, controls.tesselations);
   icosphere.create();
   square = new Square(vec3.fromValues(0, 0, 0));
   square.create();
+  //cube = new Cube(vec3.fromValues(0, 0, 0));
+  //cube.create();
 }
 
 function main() {
@@ -39,6 +50,9 @@ function main() {
   const gui = new DAT.GUI();
   gui.add(controls, 'tesselations', 0, 8).step(1);
   gui.add(controls, 'Load Scene');
+  gui.add(controls, 'terrainFreq', 0, 2).step(.1);
+  gui.add(controls, 'cloudHeight', .6, 1.1).step(.05);
+  gui.addColor(palette, 'color');
 
   // get canvas and webgl context
   const canvas =  document.getElementById('canvas');
@@ -58,17 +72,38 @@ function main() {
   const renderer = new OpenGLRenderer(canvas);
   renderer.setClearColor(0.2, 0.2, 0.2, 1);
   gl.enable(gl.DEPTH_TEST);
+  gl.enable(gl.BLEND);
+  gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
+  gl.enable(gl.CULL_FACE);
+  gl.frontFace(gl.CW);
 
   const lambert = new ShaderProgram([
     new Shader(gl.VERTEX_SHADER, require('./shaders/lambert-vert.glsl')),
     new Shader(gl.FRAGMENT_SHADER, require('./shaders/lambert-frag.glsl')),
   ]);
 
+  const perlin = new ShaderProgram([
+    new Shader(gl.VERTEX_SHADER, require('./shaders/perlin-vert.glsl')),
+    new Shader(gl.FRAGMENT_SHADER, require('./shaders/perlin-frag.glsl')),
+  ])
+
+  const planet = new ShaderProgram([
+    new Shader(gl.VERTEX_SHADER, require('./shaders/planet-vert.glsl')),
+    new Shader(gl.FRAGMENT_SHADER, require('./shaders/planet-frag.glsl')),
+  ])
+
+  const raymarch = new ShaderProgram([
+    new Shader(gl.VERTEX_SHADER, require('./shaders/raymarch-vert.glsl')),
+    new Shader(gl.FRAGMENT_SHADER, require('./shaders/raymarch-frag.glsl')),
+  ])
+
   // This function will be called every frame
   function tick() {
+    u_tick++;
     camera.update();
     stats.begin();
     gl.viewport(0, 0, window.innerWidth, window.innerHeight);
+
     renderer.clear();
     if(controls.tesselations != prevTesselations)
     {
@@ -76,10 +111,23 @@ function main() {
       icosphere = new Icosphere(vec3.fromValues(0, 0, 0), 1, prevTesselations);
       icosphere.create();
     }
-    renderer.render(camera, lambert, [
+    renderer.render(camera, planet, [
       icosphere,
-      // square,
-    ]);
+      //square,
+      //cube,
+    ], vec4.fromValues(palette.color[0] / 255., 
+      palette.color[1] / 255., 
+      palette.color[2] / 255., 1), 
+      u_tick, controls.terrainFreq, controls.cloudHeight);
+    renderer.render(camera, raymarch, [
+      icosphere,
+      //square,
+      //cube,
+    ], vec4.fromValues(palette.color[0] / 255., 
+      palette.color[1] / 255., 
+      palette.color[2] / 255., 1), 
+      u_tick, controls.terrainFreq, controls.cloudHeight);
+
     stats.end();
 
     // Tell the browser to call `tick` again whenever it renders a new frame
diff --git a/src/rendering/gl/OpenGLRenderer.ts b/src/rendering/gl/OpenGLRenderer.ts
index 7e527c2..bda94bd 100644
--- a/src/rendering/gl/OpenGLRenderer.ts
+++ b/src/rendering/gl/OpenGLRenderer.ts
@@ -22,16 +22,23 @@ class OpenGLRenderer {
     gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
   }
 
-  render(camera: Camera, prog: ShaderProgram, drawables: Array) {
+  render(camera: Camera, prog: ShaderProgram, drawables: Array,
+     u_color: vec4, u_time: number, u_terrain: number, u_cloud: number) {
     let model = mat4.create();
     let viewProj = mat4.create();
-    let color = vec4.fromValues(1, 0, 0, 1);
+    let time = u_time;
+    let color = u_color;
+    let terrain = u_terrain;
+    let cloud = u_cloud;
 
     mat4.identity(model);
     mat4.multiply(viewProj, camera.projectionMatrix, camera.viewMatrix);
     prog.setModelMatrix(model);
     prog.setViewProjMatrix(viewProj);
+    prog.setTime(time);
     prog.setGeometryColor(color);
+    prog.setTerrain(terrain);
+    prog.setCloud(cloud);
 
     for (let drawable of drawables) {
       prog.draw(drawable);
diff --git a/src/rendering/gl/ShaderProgram.ts b/src/rendering/gl/ShaderProgram.ts
index 67fef40..17fa6b2 100644
--- a/src/rendering/gl/ShaderProgram.ts
+++ b/src/rendering/gl/ShaderProgram.ts
@@ -29,6 +29,9 @@ class ShaderProgram {
   unifModelInvTr: WebGLUniformLocation;
   unifViewProj: WebGLUniformLocation;
   unifColor: WebGLUniformLocation;
+  unifTime: WebGLUniformLocation;
+  unifTerrain: WebGLUniformLocation;
+  unifCloud: WebGLUniformLocation;
 
   constructor(shaders: Array) {
     this.prog = gl.createProgram();
@@ -48,6 +51,9 @@ class ShaderProgram {
     this.unifModelInvTr = gl.getUniformLocation(this.prog, "u_ModelInvTr");
     this.unifViewProj   = gl.getUniformLocation(this.prog, "u_ViewProj");
     this.unifColor      = gl.getUniformLocation(this.prog, "u_Color");
+    this.unifTime       = gl.getUniformLocation(this.prog, "u_Time");
+    this.unifTerrain    = gl.getUniformLocation(this.prog, "u_Terrain");
+    this.unifCloud      = gl.getUniformLocation(this.prog, "u_Cloud");
   }
 
   use() {
@@ -77,6 +83,13 @@ class ShaderProgram {
       gl.uniformMatrix4fv(this.unifViewProj, false, vp);
     }
   }
+  
+  setTime(time: number) {
+    this.use();
+    if (this.unifTime !== -1) {
+      gl.uniform1f(this.unifTime, time);
+    }
+  }
 
   setGeometryColor(color: vec4) {
     this.use();
@@ -85,6 +98,20 @@ class ShaderProgram {
     }
   }
 
+  setTerrain(terrain: number) {
+    this.use();
+    if (this.unifColor !== -1) {
+      gl.uniform1f(this.unifTerrain, terrain);
+    }
+  }
+
+  setCloud(cloud: number) {
+    this.use();
+    if (this.unifColor !== -1) {
+      gl.uniform1f(this.unifCloud, cloud);
+    }
+  }
+
   draw(d: Drawable) {
     this.use();
 
diff --git a/src/shaders/perlin-frag.glsl b/src/shaders/perlin-frag.glsl
new file mode 100644
index 0000000..2b438fe
--- /dev/null
+++ b/src/shaders/perlin-frag.glsl
@@ -0,0 +1,160 @@
+#version 300 es
+
+// This is a fragment shader. If you've opened this file first, please
+// open and read lambert.vert.glsl before reading on.
+// Unlike the vertex shader, the fragment shader actually does compute
+// the shading of geometry. For every pixel in your program's output
+// screen, the fragment shader is run for every bit of geometry that
+// particular pixel overlaps. By implicitly interpolating the position
+// data passed into the fragment shader by the vertex shader, the fragment shader
+// can compute what color to apply to its pixel based on things like vertex
+// position, light position, and vertex color.
+precision highp float;
+
+uniform vec4 u_Color; // The color with which to render this instance of geometry.
+uniform highp float u_Time;
+
+// These are the interpolated values out of the rasterizer, so you can't know
+// their specific values without knowing the vertices that contributed to them
+
+in vec4 fs_Pos;
+in vec4 fs_Nor;
+in vec4 fs_LightVec;
+in vec4 fs_Col;
+
+out vec4 out_Col; // This is the final output color that you will see on your
+                  // screen for the pixel that is currently being processed.
+
+// Pseudorandom output modified from https://stackoverflow.com/questions/4200224/random-noise-functions-for-glsl
+// Outputs red, green, or blue, based on which value is the largest
+vec3 rand(vec3 co){
+    float a = fract(sin(dot(co, vec3(12.9898, 78.233, 34.252))) * 43758.5453);
+    float b = fract(sin(dot(co, vec3(78.233, 34.252, 12.9898))) * 43758.5453);
+    float c = fract(sin(dot(co, vec3(34.252, 78.233, 12.9898))) * 43758.5453);
+    if (a > b && a > c) {
+        return vec3(1.0, 0.0, 0.0);
+    } else if (b > a && b > c) {
+        return vec3(0.0, 1.0, 0.0);
+    } else if (c > b && c > a) {
+        return vec3(0.0, 0.0, 1.0);
+    }
+    return vec3(a, b, c);
+}
+
+// Taken from cis460 sky shader (not sure where it came from originally)
+vec4 permute(vec4 x){return mod(((x*34.0)+1.0)*x, 289.0);}
+vec4 taylorInvSqrt(vec4 r){return 1.79284291400159 - 0.85373472095314 * r;}
+
+float snoise(vec3 v){
+    const vec2  C = vec2(1.0/6.0, 1.0/3.0) ;
+    const vec4  D = vec4(0.0, 0.5, 1.0, 2.0);
+
+    // First corner
+    vec3 i  = floor(v + dot(v, C.yyy) );
+    vec3 x0 =   v - i + dot(i, C.xxx) ;
+
+    // Other corners
+    vec3 g = step(x0.yzx, x0.xyz);
+    vec3 l = 1.0 - g;
+    vec3 i1 = min( g.xyz, l.zxy );
+    vec3 i2 = max( g.xyz, l.zxy );
+
+    //  x0 = x0 - 0. + 0.0 * C
+    vec3 x1 = x0 - i1 + 1.0 * C.xxx;
+    vec3 x2 = x0 - i2 + 2.0 * C.xxx;
+    vec3 x3 = x0 - 1. + 3.0 * C.xxx;
+
+    // Permutations
+    i = mod(i, 289.0 );
+    vec4 p = permute( permute( permute(
+                                   i.z + vec4(0.0, i1.z, i2.z, 1.0 ))
+                               + i.y + vec4(0.0, i1.y, i2.y, 1.0 ))
+                      + i.x + vec4(0.0, i1.x, i2.x, 1.0 ));
+
+    // Gradients
+    // ( N*N points uniformly over a square, mapped onto an octahedron.)
+    float n_ = 1.0/7.0; // N=7
+    vec3  ns = n_ * D.wyz - D.xzx;
+
+    vec4 j = p - 49.0 * floor(p * ns.z *ns.z);  //  mod(p,N*N)
+
+    vec4 x_ = floor(j * ns.z);
+    vec4 y_ = floor(j - 7.0 * x_ );    // mod(j,N)
+
+    vec4 x = x_ *ns.x + ns.yyyy;
+    vec4 y = y_ *ns.x + ns.yyyy;
+    vec4 h = 1.0 - abs(x) - abs(y);
+
+    vec4 b0 = vec4( x.xy, y.xy );
+    vec4 b1 = vec4( x.zw, y.zw );
+
+    vec4 s0 = floor(b0)*2.0 + 1.0;
+    vec4 s1 = floor(b1)*2.0 + 1.0;
+    vec4 sh = -step(h, vec4(0.0));
+
+    vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy ;
+    vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww ;
+
+    vec3 p0 = vec3(a0.xy,h.x);
+    vec3 p1 = vec3(a0.zw,h.y);
+    vec3 p2 = vec3(a1.xy,h.z);
+    vec3 p3 = vec3(a1.zw,h.w);
+
+    //Normalise gradients
+    vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3)));
+    p0 *= norm.x;
+    p1 *= norm.y;
+    p2 *= norm.z;
+    p3 *= norm.w;
+
+    // Mix final noise value
+    vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0);
+    m = m * m;
+    return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1),
+                                  dot(p2,x2), dot(p3,x3) ) );
+}
+
+// Self-written referencing noise 2021 slide deck. https://cis566-procedural-graphics.github.io/noise-2021.pdf
+float fbm(float nOctaves, vec3 pos) {
+    float total = 0.;
+    float persistence = 1.f / 2.f;
+
+    for (float i = 0.f; i < nOctaves; ++i) {
+        float frequency = pow(2.f, i);
+        float amplitude = pow(persistence, i);
+
+        total += amplitude * snoise(pos * frequency);
+    }
+    return total;
+}
+
+void main()
+{
+    // Material base color (before shading)
+    vec4 diffuseColor = u_Color;
+
+    // Helps to determine scaling factor for rate the colors change
+    float rampUp = .5 / tan(u_Time * .02);
+
+    // To keep track of when the cube is formed vs moving
+    float flash = sin(u_Time * .02);
+    
+    // random red, blue, or green based on position
+    vec3 flashColor = rand(vec3(fs_Pos));
+
+    if (flash < 0.0 ) { // Color when the sides of the box are "still"
+        // Flashes through very high octave fbm
+        float stopFlash = fbm(50.0, vec3((fs_Pos.xy * u_Time), fs_Pos.z));
+        out_Col = vec4(vec3(stopFlash) * flashColor, 1.0);
+    }
+    else { // Color when the sides of the box are moving
+        // Modify noise in relation to movement of object
+        float fbmBase = fbm(50.0, vec3(fs_Pos) * rampUp);
+        vec3  fbmRemap = vec3(smoothstep(-1., 1., fbmBase));
+
+        // Modify base color in relation to movement of object
+        diffuseColor *= (1.0 - smoothstep(0., 1., flash));
+        diffuseColor =  mix(diffuseColor, vec4(flashColor, 1.0), 1. - smoothstep(0., 1., flash));
+        out_Col = vec4(fbmRemap, 1.0) * diffuseColor;
+    }
+}
\ No newline at end of file
diff --git a/src/shaders/perlin-vert.glsl b/src/shaders/perlin-vert.glsl
new file mode 100644
index 0000000..7a20718
--- /dev/null
+++ b/src/shaders/perlin-vert.glsl
@@ -0,0 +1,63 @@
+#version 300 es
+
+//This is a vertex shader. While it is called a "shader" due to outdated conventions, this file
+//is used to apply matrix transformations to the arrays of vertex data passed to it.
+//Since this code is run on your GPU, each vertex is transformed simultaneously.
+//If it were run on your CPU, each vertex would have to be processed in a FOR loop, one at a time.
+//This simultaneous transformation allows your program to run much faster, especially when rendering
+//geometry with millions of vertices.
+
+uniform mat4 u_Model;       // The matrix that defines the transformation of the
+                            // object we're rendering. In this assignment,
+                            // this will be the result of traversing your scene graph.
+
+uniform mat4 u_ModelInvTr;  // The inverse transpose of the model matrix.
+                            // This allows us to transform the object's normals properly
+                            // if the object has been non-uniformly scaled.
+
+uniform mat4 u_ViewProj;    // The matrix that defines the camera's transformation.
+                            // We've written a static matrix for you to use for HW2,
+                            // but in HW3 you'll have to generate one yourself
+uniform highp float u_Time;
+
+
+in vec4 vs_Pos;             // The array of vertex positions passed to the shader
+
+in vec4 vs_Nor;             // The array of vertex normals passed to the shader
+
+in vec4 vs_Col;             // The array of vertex colors passed to the shader.
+
+
+out vec4 fs_Pos;
+out vec4 fs_Nor;            // The array of normals that has been transformed by u_ModelInvTr. This is implicitly passed to the fragment shader.
+out vec4 fs_LightVec;       // The direction in which our virtual light lies, relative to each vertex. This is implicitly passed to the fragment shader.
+out vec4 fs_Col;            // The color of each vertex. This is implicitly passed to the fragment shader.
+
+const vec4 lightPos = vec4(5, 5, 3, 1); //The position of our virtual light, which is used to compute the shading of
+                                        //the geometry in the fragment shader.
+
+void main()
+{
+    fs_Col = vs_Col;                         // Pass the vertex colors to the fragment shader for interpolation
+    fs_Pos = vs_Pos;
+
+    mat3 invTranspose = mat3(u_ModelInvTr);
+    fs_Nor = vec4(invTranspose * vec3(vs_Nor), 0);          // Pass the vertex normals to the fragment shader for interpolation.
+                                                            // Transform the geometry's normals by the inverse transpose of the
+                                                            // model matrix. This is necessary to ensure the normals remain
+                                                            // perpendicular to the surface after the surface is transformed by
+                                                            // the model matrix.
+
+    vec4 modelposition = u_Model * vs_Pos;   // Temporarily store the transformed vertex positions for use below
+    
+    float move = sin(u_Time * .02);          // Used to make the box move every other period
+    
+    if (move > 0.) {                         // Moves the sides of the box in the normal direction using a tangent function
+        modelposition += tan(u_Time * .02) * fs_Nor; 
+    }
+
+    fs_LightVec = lightPos - modelposition;  // Compute the direction in which the light source lies
+
+    gl_Position = u_ViewProj * modelposition;// gl_Position is a built-in variable of OpenGL which is
+                                             // used to render the final positions of the geometry's vertices
+}
\ No newline at end of file
diff --git a/src/shaders/planet-frag.glsl b/src/shaders/planet-frag.glsl
new file mode 100644
index 0000000..f850411
--- /dev/null
+++ b/src/shaders/planet-frag.glsl
@@ -0,0 +1,298 @@
+#version 300 es
+
+// This is a fragment shader. If you've opened this file first, please
+// open and read lambert.vert.glsl before reading on.
+// Unlike the vertex shader, the fragment shader actually does compute
+// the shading of geometry. For every pixel in your program's output
+// screen, the fragment shader is run for every bit of geometry that
+// particular pixel overlaps. By implicitly interpolating the position
+// data passed into the fragment shader by the vertex shader, the fragment shader
+// can compute what color to apply to its pixel based on things like vertex
+// position, light position, and vertex color.
+precision highp float;
+
+uniform vec4 u_Color; // The color with which to render this instance of geometry.
+uniform highp float u_Time;
+uniform float u_Terrain;
+
+// These are the interpolated values out of the rasterizer, so you can't know
+// their specific values without knowing the vertices that contributed to them
+
+in vec4 fs_Pos;
+in vec4 fs_Nor;
+in vec4 fs_LightVec;
+in vec4 fs_Col;
+
+out vec4 out_Col; // This is the final output color that you will see on your
+                  // screen for the pixel that is currently being processed.
+
+// Taken from cis460 sky shader (not sure where it came from originally)
+vec4 permute(vec4 x){return mod(((x*34.0)+1.0)*x, 289.0);}
+vec4 taylorInvSqrt(vec4 r){return 1.79284291400159 - 0.85373472095314 * r;}
+
+float snoise(vec3 v){
+    const vec2  C = vec2(1.0/6.0, 1.0/3.0) ;
+    const vec4  D = vec4(0.0, 0.5, 1.0, 2.0);
+
+    // First corner
+    vec3 i  = floor(v + dot(v, C.yyy) );
+    vec3 x0 =   v - i + dot(i, C.xxx) ;
+
+    // Other corners
+    vec3 g = step(x0.yzx, x0.xyz);
+    vec3 l = 1.0 - g;
+    vec3 i1 = min( g.xyz, l.zxy );
+    vec3 i2 = max( g.xyz, l.zxy );
+
+    //  x0 = x0 - 0. + 0.0 * C
+    vec3 x1 = x0 - i1 + 1.0 * C.xxx;
+    vec3 x2 = x0 - i2 + 2.0 * C.xxx;
+    vec3 x3 = x0 - 1. + 3.0 * C.xxx;
+
+    // Permutations
+    i = mod(i, 289.0 );
+    vec4 p = permute( permute( permute(
+                                   i.z + vec4(0.0, i1.z, i2.z, 1.0 ))
+                               + i.y + vec4(0.0, i1.y, i2.y, 1.0 ))
+                      + i.x + vec4(0.0, i1.x, i2.x, 1.0 ));
+
+    // Gradients
+    // ( N*N points uniformly over a square, mapped onto an octahedron.)
+    float n_ = 1.0/7.0; // N=7
+    vec3  ns = n_ * D.wyz - D.xzx;
+
+    vec4 j = p - 49.0 * floor(p * ns.z *ns.z);  //  mod(p,N*N)
+
+    vec4 x_ = floor(j * ns.z);
+    vec4 y_ = floor(j - 7.0 * x_ );    // mod(j,N)
+
+    vec4 x = x_ *ns.x + ns.yyyy;
+    vec4 y = y_ *ns.x + ns.yyyy;
+    vec4 h = 1.0 - abs(x) - abs(y);
+
+    vec4 b0 = vec4( x.xy, y.xy );
+    vec4 b1 = vec4( x.zw, y.zw );
+
+    vec4 s0 = floor(b0)*2.0 + 1.0;
+    vec4 s1 = floor(b1)*2.0 + 1.0;
+    vec4 sh = -step(h, vec4(0.0));
+
+    vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy ;
+    vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww ;
+
+    vec3 p0 = vec3(a0.xy,h.x);
+    vec3 p1 = vec3(a0.zw,h.y);
+    vec3 p2 = vec3(a1.xy,h.z);
+    vec3 p3 = vec3(a1.zw,h.w);
+
+    //Normalise gradients
+    vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3)));
+    p0 *= norm.x;
+    p1 *= norm.y;
+    p2 *= norm.z;
+    p3 *= norm.w;
+
+    // Mix final noise value
+    vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0);
+    m = m * m;
+    return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1),
+                                  dot(p2,x2), dot(p3,x3) ) );
+}
+
+// Self-written referencing noise 2021 slide deck. https://cis566-procedural-graphics.github.io/noise-2021.pdf
+float fbm(float nOctaves, vec3 pos) {
+    float total = 0.;
+    float persistence = 1.f / 2.f;
+
+    for (float i = 0.f; i < nOctaves; ++i) {
+        float frequency = pow(2.f, i);
+        float amplitude = pow(persistence, i);
+
+        total += amplitude * snoise(pos * frequency);
+    }
+    return total;
+}
+
+float bias(float time, float bias)
+{
+  return (time / ((((1.0/bias) - 2.0)*(1.0 - time))+1.0));
+}
+
+float gain(float time, float gain)
+{
+  if (time < 0.5) {
+    return bias(time * 2.0,gain)/2.0;
+  }
+  else {
+    return bias(time * 2.0 - 1.0,1.0 - gain)/2.0 + 0.5;
+  }
+}
+
+float easeInQuart(float x)
+{
+    return x * x * x * x;
+}
+
+float easeInOutCubic(float x)
+{
+  return x < 0.5 ? 4.0 * x * x * x : 1.0 - pow(-2.0 * x + 2.0, 3.0) / 2.0;
+}
+
+vec3 hash33(vec3 p3) {
+	vec3 p = fract(p3 * vec3(.1031,.11369,.13787));
+    p += dot(p, p.yxz+19.19);
+    return -1.0 + 2.0 * fract(vec3((p.x + p.y)*p.z, (p.x+p.z)*p.y, (p.y+p.z)*p.x));
+}
+
+float worley(vec3 p, float scale){
+
+    vec3 id = floor(p*scale);
+    vec3 fd = fract(p*scale);
+
+    float n = 0.;
+
+    float minimalDist = 1.;
+
+
+    for(float x = -1.; x <=1.; x++){
+        for(float y = -1.; y <=1.; y++){
+            for(float z = -1.; z <=1.; z++){
+
+                vec3 coord = vec3(x,y,z);
+                vec3 rId = hash33(mod(id+coord,scale))*0.5+0.5;
+
+                vec3 r = coord + rId - fd; 
+
+                float d = dot(r,r);
+
+                if(d < minimalDist){
+                    minimalDist = d;
+                }
+
+            }//z
+        }//y
+    }//x
+    
+    return 1.0-minimalDist;
+}
+
+vec2 height(vec3 value) 
+{   
+    value *= u_Terrain; 
+    // noise range is -1.338 to 1.3
+    float terrainType = 0.0; // DEFAULT
+    float baseNoise = (fbm(7.0, value) + 1.338) / 2.638; // fbm mapped from 0 to 1
+    baseNoise = gain(baseNoise, .8); // makes the peaks more dramatic
+    float noiseVal = clamp(baseNoise, .5, 1.0); // takes everything below .5 and clamps it flat (water)
+    if (noiseVal <= .5) {
+        terrainType = 3.0; //WATERRRR
+    }
+    if (noiseVal > .5 && noiseVal < .525) {
+        noiseVal = mix(.5, .525, easeInQuart(smoothstep(.5, .525, noiseVal)));
+        terrainType = 1.0;// COASTLINE
+    }
+    return vec2(noiseVal, terrainType);
+}
+
+const float DELTA = 1e-4;
+
+vec4 calcNewNor(vec3 oldNor) {
+    vec3 mid_Nor = oldNor;
+    vec3 tangent = normalize(cross(vec3(0.0, 1.0, 0.0), vec3(mid_Nor)));
+    vec3 bitangent = cross(vec3(mid_Nor), tangent);
+
+    float px = height(fs_Pos.xyz + DELTA * tangent).x;
+    float nx = height(fs_Pos.xyz - DELTA * tangent).x;
+    float py = height(fs_Pos.xyz + DELTA * bitangent).x;
+    float ny = height(fs_Pos.xyz - DELTA * bitangent).x;
+
+    vec3 p1 = fs_Pos.xyz + DELTA * tangent + px * mid_Nor.xyz;
+    vec3 p2 = fs_Pos.xyz + DELTA * bitangent + py * mid_Nor.xyz;
+    vec3 p3 = fs_Pos.xyz - DELTA * tangent + nx * mid_Nor.xyz;
+    vec3 p4 = fs_Pos.xyz - DELTA * bitangent + ny * mid_Nor.xyz;
+
+    return vec4(normalize(cross(normalize(p1 - p3), normalize(p2 - p4))), 0.0);
+}
+
+float waterNoise(float boundary)
+{
+    float modTime = u_Time * .0001;
+    vec3 movedVal = (fs_Pos.xyz + 1.0) / 2.0;
+    float waveNoise;
+    if (boundary < .28) {
+        waveNoise = snoise(vec3(movedVal.x * 2.3 - sin(modTime), movedVal.y * 11.0, movedVal.z + sin(modTime)) * 10.0);
+    }
+    else {
+        waveNoise = snoise(vec3(movedVal.x, movedVal.y * 30.0 + sin(modTime), movedVal.z + sin(modTime)) * 40.0);
+    }
+    return waveNoise;
+}
+
+float waves(float val)
+{
+    float modTime = u_Time * .0001;
+    float colorBoi = snoise(vec3(fs_Pos.x + sin(modTime), fs_Pos.y + sin(modTime), fs_Pos.z + sin(modTime)) * 50.0);
+    if (colorBoi > 0.3 && colorBoi < 0.6) {
+        colorBoi = 1.0 * bias(smoothstep(0.4, 0.5, val), .3);
+    }
+    else {
+        colorBoi = 0.0;
+    }
+    return colorBoi;
+}
+
+void main()
+{
+    vec4 final_Nor = calcNewNor(fs_Nor.xyz);
+   
+    // Material base color (before shading)
+    vec4 diffuseColor = vec4(1.0);
+    vec2 terrainColor = height(fs_Pos.xyz); // fbm mapped from 0 to 1
+    
+    // WATER
+    if (terrainColor.y == 3.0) {
+        vec3 lightBlue = vec3(102.0, 207.0, 255.0) / 255.0;
+        vec3 darkBlue = vec3(46.0, 108.0, 217.0) / 255.0;
+        vec3 blue = mix(darkBlue, lightBlue, smoothstep(.2, .5, clamp((fbm(7.0, fs_Pos.xyz * u_Terrain) + 1.338) / 2.638, .3, .5))); // interpolation of light to dark blue for the base water color
+       
+        float waveBound = (fbm(10.0, fs_Pos.xyz * u_Terrain) + 1.338) / 2.638; // boundary for where the shoreline waves begin
+        float deepWaterBound = (fbm(7.0, fs_Pos.xyz * u_Terrain) + 1.338) / 2.638; // clamp wave bound
+        float deepWaterWaves = waterNoise(deepWaterBound) + terrainColor.x; // boundary for what to color as deep water waves   
+       
+        if (waveBound < 0.5 && waveBound > 0.35) { // adds waves close to shoreline
+            float waveColor = waves(waveBound);
+            blue += vec3(waveColor);
+        }
+        if (waveBound > .49) {
+            blue += vec3(.75);
+        }
+        if (deepWaterWaves > 0.5 && deepWaterWaves < 0.6) { // adds fake waves to deeper water
+            blue += vec3(.3);
+        }
+        diffuseColor = vec4(blue, 1.0);
+    }
+
+    // LAND
+    else {
+        if (terrainColor.x < .51) { // COASTLINE
+                vec3 yellow = vec3(230.0, 218.0, 170.0) / 255.0;
+                vec3 pink = vec3(235.0, 227.0, 195.0) / 255.0;
+                diffuseColor = vec4(mix(yellow, pink, smoothstep(.3, .5, terrainColor.x)), 1.0);
+        }
+        else {
+            diffuseColor = vec4(vec3(u_Color), 1.0);
+        }
+    } 
+    float diffuseTerm = dot(normalize(final_Nor), normalize(fs_LightVec));
+        // Avoid negative lighting values
+        diffuseTerm = clamp(diffuseTerm, 0.0, 1.0);
+
+        float ambientTerm = 0.2;
+
+        float lightIntensity = diffuseTerm + ambientTerm;   //Add a small float value to the color multiplier
+                                                            //to simulate ambient lighting. This ensures that faces that are not
+                                                            //lit by our point light are not completely black.
+
+        // Compute final shaded color
+    out_Col = vec4(diffuseColor.rgb * lightIntensity, diffuseColor.a); 
+}
\ No newline at end of file
diff --git a/src/shaders/planet-vert.glsl b/src/shaders/planet-vert.glsl
new file mode 100644
index 0000000..6ed092c
--- /dev/null
+++ b/src/shaders/planet-vert.glsl
@@ -0,0 +1,221 @@
+#version 300 es
+
+//This is a vertex shader. While it is called a "shader" due to outdated conventions, this file
+//is used to apply matrix transformations to the arrays of vertex data passed to it.
+//Since this code is run on your GPU, each vertex is transformed simultaneously.
+//If it were run on your CPU, each vertex would have to be processed in a FOR loop, one at a time.
+//This simultaneous transformation allows your program to run much faster, especially when rendering
+//geometry with millions of vertices.
+
+uniform mat4 u_Model;       // The matrix that defines the transformation of the
+                            // object we're rendering. In this assignment,
+                            // this will be the result of traversing your scene graph.
+
+uniform mat4 u_ModelInvTr;  // The inverse transpose of the model matrix.
+                            // This allows us to transform the object's normals properly
+                            // if the object has been non-uniformly scaled.
+
+uniform mat4 u_ViewProj;    // The matrix that defines the camera's transformation.
+                            // We've written a static matrix for you to use for HW2,
+                            // but in HW3 you'll have to generate one yourself
+uniform highp float u_Time;
+uniform float u_Terrain;
+
+in vec4 vs_Pos;             // The array of vertex positions passed to the shader
+
+in vec4 vs_Nor;             // The array of vertex normals passed to the shader
+
+in vec4 vs_Col;             // The array of vertex colors passed to the shader.
+
+
+out vec4 fs_Pos;
+out vec4 fs_Nor;            // The array of normals that has been transformed by u_ModelInvTr. This is implicitly passed to the fragment shader.
+out vec4 fs_LightVec;       // The direction in which our virtual light lies, relative to each vertex. This is implicitly passed to the fragment shader.
+out vec4 fs_Col;            // The color of each vertex. This is implicitly passed to the fragment shader.
+
+const vec4 lightPos = vec4(5, 5, 3, 1); //The position of our virtual light, which is used to compute the shading of
+                                        //the geometry in the fragment shader.
+
+// Taken from cis460 sky shader (not sure where it came from originally)
+vec4 permute(vec4 x){return mod(((x*34.0)+1.0)*x, 289.0);}
+vec4 taylorInvSqrt(vec4 r){return 1.79284291400159 - 0.85373472095314 * r;}
+
+float snoise(vec3 v){
+    const vec2  C = vec2(1.0/6.0, 1.0/3.0) ;
+    const vec4  D = vec4(0.0, 0.5, 1.0, 2.0);
+
+    // First corner
+    vec3 i  = floor(v + dot(v, C.yyy) );
+    vec3 x0 =   v - i + dot(i, C.xxx) ;
+
+    // Other corners
+    vec3 g = step(x0.yzx, x0.xyz);
+    vec3 l = 1.0 - g;
+    vec3 i1 = min( g.xyz, l.zxy );
+    vec3 i2 = max( g.xyz, l.zxy );
+
+    //  x0 = x0 - 0. + 0.0 * C
+    vec3 x1 = x0 - i1 + 1.0 * C.xxx;
+    vec3 x2 = x0 - i2 + 2.0 * C.xxx;
+    vec3 x3 = x0 - 1. + 3.0 * C.xxx;
+
+    // Permutations
+    i = mod(i, 289.0 );
+    vec4 p = permute( permute( permute(
+                                   i.z + vec4(0.0, i1.z, i2.z, 1.0 ))
+                               + i.y + vec4(0.0, i1.y, i2.y, 1.0 ))
+                      + i.x + vec4(0.0, i1.x, i2.x, 1.0 ));
+
+    // Gradients
+    // ( N*N points uniformly over a square, mapped onto an octahedron.)
+    float n_ = 1.0/7.0; // N=7
+    vec3  ns = n_ * D.wyz - D.xzx;
+
+    vec4 j = p - 49.0 * floor(p * ns.z *ns.z);  //  mod(p,N*N)
+
+    vec4 x_ = floor(j * ns.z);
+    vec4 y_ = floor(j - 7.0 * x_ );    // mod(j,N)
+
+    vec4 x = x_ *ns.x + ns.yyyy;
+    vec4 y = y_ *ns.x + ns.yyyy;
+    vec4 h = 1.0 - abs(x) - abs(y);
+
+    vec4 b0 = vec4( x.xy, y.xy );
+    vec4 b1 = vec4( x.zw, y.zw );
+
+    vec4 s0 = floor(b0)*2.0 + 1.0;
+    vec4 s1 = floor(b1)*2.0 + 1.0;
+    vec4 sh = -step(h, vec4(0.0));
+
+    vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy ;
+    vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww ;
+
+    vec3 p0 = vec3(a0.xy,h.x);
+    vec3 p1 = vec3(a0.zw,h.y);
+    vec3 p2 = vec3(a1.xy,h.z);
+    vec3 p3 = vec3(a1.zw,h.w);
+
+    //Normalise gradients
+    vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3)));
+    p0 *= norm.x;
+    p1 *= norm.y;
+    p2 *= norm.z;
+    p3 *= norm.w;
+
+    // Mix final noise value
+    vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0);
+    m = m * m;
+    return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1),
+                                  dot(p2,x2), dot(p3,x3) ) );
+}
+
+// Self-written referencing noise 2021 slide deck. https://cis566-procedural-graphics.github.io/noise-2021.pdf
+float fbm(float nOctaves, vec3 pos) {
+    float total = 0.;
+    float persistence = 1.f / 2.f;
+
+    for (float i = 0.f; i < nOctaves; ++i) {
+        float frequency = pow(2.f, i);
+        float amplitude = pow(persistence, i);
+
+        total += amplitude * snoise(pos * frequency);
+    }
+    return total;
+}
+
+float bias(float time, float bias)
+{
+  return (time / ((((1.0/bias) - 2.0)*(1.0 - time))+1.0));
+}
+
+float gain(float time, float gain)
+{
+  if (time < 0.5) {
+    return bias(time * 2.0,gain)/2.0;
+  }
+  else {
+    return bias(time * 2.0 - 1.0,1.0 - gain)/2.0 + 0.5;
+  }
+} 
+
+float easeInQuart(float x)
+{
+    return x * x * x * x;
+}
+
+float easeOutQuart(float x) {
+  return 1.0 - pow(1.0 - x, 4.0);
+}
+
+// worley and hash taken from https://www.shadertoy.com/view/3d3fWN
+vec3 hash33(vec3 p3) {
+	vec3 p = fract(p3 * vec3(.1031,.11369,.13787));
+    p += dot(p, p.yxz+19.19);
+    return -1.0 + 2.0 * fract(vec3((p.x + p.y)*p.z, (p.x+p.z)*p.y, (p.y+p.z)*p.x));
+}
+
+float worley(vec3 p, float scale){
+    vec3 id = floor(p*scale);
+    vec3 fd = fract(p*scale);
+
+    float n = 0.;
+
+    float minimalDist = 1.;
+
+    for(float x = -1.; x <=1.; x++){
+        for(float y = -1.; y <=1.; y++){
+            for(float z = -1.; z <=1.; z++){
+
+                vec3 coord = vec3(x,y,z);
+                vec3 rId = hash33(mod(id+coord,scale))*0.5+0.5;
+
+                vec3 r = coord + rId - fd; 
+
+                float d = dot(r,r);
+
+                if(d < minimalDist){
+                    minimalDist = d;
+                }
+            }//z
+        }//y
+    }//x
+    return 1.0-minimalDist;
+}
+
+float height(vec3 value) 
+{
+    // noise range is -1.338 to 1.3
+    float baseNoise = (fbm(7.0, value) + 1.338) / 2.638; // fbm mapped from 0 to 1
+    baseNoise = gain(baseNoise, .8); // makes the peaks more dramatic
+    float noiseVal = clamp(baseNoise, .5, 1.0); // takes everything below .5 and clamps it flat (water)
+    if (noiseVal > .5 && noiseVal < .525) { // coastline boundary
+        noiseVal = mix(.5, .525, easeInQuart(smoothstep(.5, .525, noiseVal)));
+    }
+    return noiseVal;
+}
+
+void main()
+{
+    fs_Col = vs_Col;                         // Pass the vertex colors to the fragment shader for interpolation
+    fs_Pos = vs_Pos;
+    fs_Nor = vs_Nor;
+
+    mat3 invTranspose = mat3(u_ModelInvTr);
+    vec4 mid_Nor = vec4(invTranspose * vec3(vs_Nor), 0);          // Pass the vertex normals to the fragment shader for interpolation.
+                                                            // Transform the geometry's normals by the inverse transpose of the
+                                                            // model matrix. This is necessary to ensure the normals remain
+                                                            // perpendicular to the surface after the surface is transformed by
+                                                            // the model matrix.
+    
+ 
+    float noiseHeight = height(vec3(fs_Pos * u_Terrain));
+
+    vec4 noisedPos = noiseHeight * mid_Nor;
+    vec4 modelposition = u_Model * vs_Pos;   // Temporarily store the transformed vertex positions for use below
+    modelposition += noisedPos;
+
+    fs_LightVec = lightPos - modelposition;  // Compute the direction in which the light source lies
+
+    gl_Position = u_ViewProj * modelposition;// gl_Position is a built-in variable of OpenGL which is
+                                             // used to render the final positions of the geometry's vertices
+}
\ No newline at end of file
diff --git a/src/shaders/raymarch-frag.glsl b/src/shaders/raymarch-frag.glsl
new file mode 100644
index 0000000..1dddec2
--- /dev/null
+++ b/src/shaders/raymarch-frag.glsl
@@ -0,0 +1,142 @@
+#version 300 es
+
+precision highp float;
+
+uniform vec4 u_Color; // The color with which to render this instance of geometry.
+uniform highp float u_Time;
+uniform mat4 u_ViewProj;    // The matrix that defines the camera's transformation.
+uniform float u_Terrain;
+uniform float u_Cloud;
+
+in vec4 fs_Pos;
+in vec4 fs_Nor;
+in vec4 fs_LightVec;
+in vec4 fs_Col;
+
+in vec4 old_Pos;
+
+out vec4 out_Col; // This is the final output color that you will see on your
+                  // screen for the pixel that is currently being processed.
+
+// Taken from cis460 sky shader (not sure where it came from originally)
+vec4 permute(vec4 x){return mod(((x*34.0)+1.0)*x, 289.0);}
+vec4 taylorInvSqrt(vec4 r){return 1.79284291400159 - 0.85373472095314 * r;}
+
+float snoise(vec3 v){
+    const vec2  C = vec2(1.0/6.0, 1.0/3.0) ;
+    const vec4  D = vec4(0.0, 0.5, 1.0, 2.0);
+
+    // First corner
+    vec3 i  = floor(v + dot(v, C.yyy) );
+    vec3 x0 =   v - i + dot(i, C.xxx) ;
+
+    // Other corners
+    vec3 g = step(x0.yzx, x0.xyz);
+    vec3 l = 1.0 - g;
+    vec3 i1 = min( g.xyz, l.zxy );
+    vec3 i2 = max( g.xyz, l.zxy );
+
+    //  x0 = x0 - 0. + 0.0 * C
+    vec3 x1 = x0 - i1 + 1.0 * C.xxx;
+    vec3 x2 = x0 - i2 + 2.0 * C.xxx;
+    vec3 x3 = x0 - 1. + 3.0 * C.xxx;
+
+    // Permutations
+    i = mod(i, 289.0 );
+    vec4 p = permute( permute( permute(
+                                   i.z + vec4(0.0, i1.z, i2.z, 1.0 ))
+                               + i.y + vec4(0.0, i1.y, i2.y, 1.0 ))
+                      + i.x + vec4(0.0, i1.x, i2.x, 1.0 ));
+
+    // Gradients
+    // ( N*N points uniformly over a square, mapped onto an octahedron.)
+    float n_ = 1.0/7.0; // N=7
+    vec3  ns = n_ * D.wyz - D.xzx;
+
+    vec4 j = p - 49.0 * floor(p * ns.z *ns.z);  //  mod(p,N*N)
+
+    vec4 x_ = floor(j * ns.z);
+    vec4 y_ = floor(j - 7.0 * x_ );    // mod(j,N)
+
+    vec4 x = x_ *ns.x + ns.yyyy;
+    vec4 y = y_ *ns.x + ns.yyyy;
+    vec4 h = 1.0 - abs(x) - abs(y);
+
+    vec4 b0 = vec4( x.xy, y.xy );
+    vec4 b1 = vec4( x.zw, y.zw );
+
+    vec4 s0 = floor(b0)*2.0 + 1.0;
+    vec4 s1 = floor(b1)*2.0 + 1.0;
+    vec4 sh = -step(h, vec4(0.0));
+
+    vec4 a0 = b0.xzyw + s0.xzyw*sh.xxyy ;
+    vec4 a1 = b1.xzyw + s1.xzyw*sh.zzww ;
+
+    vec3 p0 = vec3(a0.xy,h.x);
+    vec3 p1 = vec3(a0.zw,h.y);
+    vec3 p2 = vec3(a1.xy,h.z);
+    vec3 p3 = vec3(a1.zw,h.w);
+
+    //Normalise gradients
+    vec4 norm = taylorInvSqrt(vec4(dot(p0,p0), dot(p1,p1), dot(p2, p2), dot(p3,p3)));
+    p0 *= norm.x;
+    p1 *= norm.y;
+    p2 *= norm.z;
+    p3 *= norm.w;
+
+    // Mix final noise value
+    vec4 m = max(0.6 - vec4(dot(x0,x0), dot(x1,x1), dot(x2,x2), dot(x3,x3)), 0.0);
+    m = m * m;
+    return 42.0 * dot( m*m, vec4( dot(p0,x0), dot(p1,x1),
+                                  dot(p2,x2), dot(p3,x3) ) );
+}
+
+float fbm(float nOctaves, vec3 pos) {
+    float total = 0.;
+    float persistence = 1.f / 2.f;
+
+    for (float i = 0.f; i < nOctaves; ++i) {
+        float frequency = pow(2.f, i);
+        float amplitude = pow(persistence, i);
+
+        total += amplitude * snoise(pos * frequency);
+    }
+    return total;
+}
+
+float bias(float time, float bias)
+{
+  return (time / ((((1.0/bias) - 2.0)*(1.0 - time))+1.0));
+}
+
+float gain(float time, float gain)
+{
+  if (time < 0.5) {
+    return bias(time * 2.0,gain)/2.0;
+  }
+  else {
+    return bias(time * 2.0 - 1.0,1.0 - gain)/2.0 + 0.5;
+  }
+} 
+
+float easeInOutCustom(float x) {
+    return -(4.0 *pow((x - .5),2.0)) + 1.0;
+}
+
+void main()
+{
+    float baseNoise = (fbm(7.0, old_Pos.xyz * u_Terrain) + 1.338) / 2.638;
+    baseNoise = gain(baseNoise, .8);
+    float lowBound = u_Cloud - .35;
+    float highBound = u_Cloud - .12;
+    if (baseNoise > lowBound && baseNoise < highBound) {
+        float modTime = sin(u_Time * .0007);
+        float cloudNoise = (fbm(20.0, vec3(fs_Pos.x + modTime, fs_Pos.y, fs_Pos.z + modTime) * 5.0) + 1.338) / 2.638;
+        float remapped = ((baseNoise - lowBound) / .23);
+        cloudNoise = mix(0.0, cloudNoise, easeInOutCustom(remapped));
+        out_Col = vec4(vec3(1.0), cloudNoise);
+    }
+    else {
+        out_Col = vec4(0.0);
+    }
+}
\ No newline at end of file
diff --git a/src/shaders/raymarch-vert.glsl b/src/shaders/raymarch-vert.glsl
new file mode 100644
index 0000000..a56da3f
--- /dev/null
+++ b/src/shaders/raymarch-vert.glsl
@@ -0,0 +1,66 @@
+#version 300 es
+
+//This is a vertex shader. While it is called a "shader" due to outdated conventions, this file
+//is used to apply matrix transformations to the arrays of vertex data passed to it.
+//Since this code is run on your GPU, each vertex is transformed simultaneously.
+//If it were run on your CPU, each vertex would have to be processed in a FOR loop, one at a time.
+//This simultaneous transformation allows your program to run much faster, especially when rendering
+//geometry with millions of vertices.
+
+uniform mat4 u_Model;       // The matrix that defines the transformation of the
+                            // object we're rendering. In this assignment,
+                            // this will be the result of traversing your scene graph.
+
+uniform mat4 u_ModelInvTr;  // The inverse transpose of the model matrix.
+                            // This allows us to transform the object's normals properly
+                            // if the object has been non-uniformly scaled.
+
+uniform mat4 u_ViewProj;    // The matrix that defines the camera's transformation.
+                            // We've written a static matrix for you to use for HW2,
+                            // but in HW3 you'll have to generate one yourself
+uniform highp float u_Time;
+
+uniform float u_Terrain;
+uniform float u_Cloud;
+
+
+in vec4 vs_Pos;             // The array of vertex positions passed to the shader
+
+in vec4 vs_Nor;             // The array of vertex normals passed to the shader
+
+in vec4 vs_Col;             // The array of vertex colors passed to the shader.
+
+
+out vec4 fs_Pos;
+out vec4 fs_Nor;            // The array of normals that has been transformed by u_ModelInvTr. This is implicitly passed to the fragment shader.
+out vec4 fs_LightVec;       // The direction in which our virtual light lies, relative to each vertex. This is implicitly passed to the fragment shader.
+out vec4 fs_Col;            // The color of each vertex. This is implicitly passed to the fragment shader.
+
+out vec4 old_Pos;
+
+const vec4 lightPos = vec4(5, 5, 3, 1); //The position of our virtual light, which is used to compute the shading of
+                                        //the geometry in the fragment shader.
+
+void main()
+{
+    fs_Col = vs_Col;                         // Pass the vertex colors to the fragment shader for interpolation
+    fs_Pos = vs_Pos;
+
+    mat3 invTranspose = mat3(u_ModelInvTr);
+    fs_Nor = vec4(invTranspose * vec3(vs_Nor), 0);          // Pass the vertex normals to the fragment shader for interpolation.
+                                                            // Transform the geometry's normals by the inverse transpose of the
+                                                            // model matrix. This is necessary to ensure the normals remain
+                                                            // perpendicular to the surface after the surface is transformed by
+                                                            // the model matrix.
+
+
+    vec4 modelposition = u_Model * vs_Pos;   // Temporarily store the transformed vertex positions for use below
+
+    modelposition += vs_Nor * u_Cloud;
+
+    fs_LightVec = lightPos - modelposition;  // Compute the direction in which the light source lies
+
+    gl_Position = u_ViewProj * modelposition;// gl_Position is a built-in variable of OpenGL which is
+                                             // used to render the final positions of the geometry's vertices
+    old_Pos = vs_Pos;
+}
\ No newline at end of file