week 9 friday what did we talk about last time bump
play

Week 9 - Friday What did we talk about last time? Bump mapping - PowerPoint PPT Presentation

Week 9 - Friday What did we talk about last time? Bump mapping Radiometry Photometry Colorimetry Lighting with shader code Ambient Directional (diffuse and specular) Adding a specular component to the diffuse


  1. Week 9 - Friday

  2.  What did we talk about last time?  Bump mapping  Radiometry  Photometry  Colorimetry  Lighting with shader code  Ambient  Directional (diffuse and specular)

  3.  Adding a specular component to the diffuse shader requires incorporating the view vector  It will be included in the shader file and be set as a parameter in the C# code

  4.  The camera location is added to the declarations  As are specular colors and a shininess parameter float4x4 World; float4x4 View; float4x4 Projection; float4x4 WorldInverseTranspose; float3 Camera; static const float PI = 3.14159265f; float4 AmbientColor = float4(1, 1, 1, 1); float AmbientIntensity = 0.1; float3 DiffuseLightDirection; float4 DiffuseColor = float4(1, 1, 1, 1); float DiffuseIntensity = 0.7; float Shininess = 20; float4 SpecularColor = float4(1, 1, 1, 1); float SpecularIntensity = 0.5;

  5.  The output adds a normal so that the half vector can be computed in the pixel shader  A world position lets us compute the view vector to the camera struct VertexShaderInput { float4 Position : POSITION0; float3 Normal : NORMAL0; }; struct VertexShaderOutput { float4 Position : POSITION0; float4 Color : COLOR0; float3 Normal : NORMAL0; float4 WorldPosition : POSITIONT; };

  6.  The same computations as the diffuse shader, but we store the normal and the transformed world position in the output VertexShaderOutput VertexShaderFunction(VertexShaderInput input) { VertexShaderOutput output; float4 worldPosition = mul(input.Position, World); output.WorldPosition = worldPosition; float4 viewPosition = mul(worldPosition, View); output.Position = mul(viewPosition, Projection); float3 normal = normalize(mul(input.Normal, (float3x3)WorldInverseTranspose)); float lightIntensity = dot(normal, normalize(DiffuseLightDirection)); output.Color = saturate(DiffuseColor * DiffuseIntensity * lightIntensity); output.Normal = normal; return output; }

  7.  Here we finally have a real computation because we need to use the pixel normal (which is averaged from vertices) in combination with the view vector  The technique is the same float4 PixelShaderFunction(VertexShaderOutput input) : COLOR0 { float3 light = normalize(DiffuseLightDirection); float3 normal = normalize(input.Normal); float3 reflect = normalize(2 * dot(light, normal) * normal - light ); float3 view = normalize(Camera - (float3)input.WorldPosition); float dotProduct = dot(reflect, view); float4 specular = (8 + Shininess) / (8 * PI) * SpecularIntensity * SpecularColor * pow(saturate(dotProduct), Shininess); return saturate(input.Color + AmbientColor * AmbientIntensity + specular); }

  8.  Point lights model omni lights at a specific position  They generally attenuate (get dimmer) over a distance and have a maximum range  DirectX has a constant attenuation, linear attenuation, and a quadratic attenuation  You can choose attenuation levels through shaders  They are more computationally expensive than directional lights because a light vector has to be computed for every pixel  It is possible to implement point lights in a deferred shader, lighting only those pixels that actually get used

  9.  We add light position and radius float4x4 World; float4x4 View; float4x4 Projection; float4x4 WorldInverseTranspose; float3 LightPosition; float LightRadius = 100; float3 Camera; static const float PI = 3.14159265f; float4 AmbientColor = float4(1, 1, 1, 1); float AmbientIntensity = 0.1; float4 DiffuseColor = float4(1, 1, 1, 1); float DiffuseIntensity = 0.7; float Shininess = 20; float4 SpecularColor = float4(1, 1, 1, 1); float SpecularIntensity = 0.5;

  10.  We no longer need color in the output  We do need the vector to the camera from the location  We keep the world location at that fragment struct VertexShaderInput { float4 Position : POSITION0; float3 Normal : NORMAL0; }; struct VertexShaderOutput { float4 Position : POSITION0; float3 Normal : NORMAL0; float4 WorldPosition : POSITIONT; };

  11.  We compute the normal and the world position VertexShaderOutput VertexShaderFunction(VertexShaderInput input) { VertexShaderOutput output; float4 worldPosition = mul(input.Position, World); output.WorldPosition = worldPosition; float4 viewPosition = mul(worldPosition, View); output.Position = mul(viewPosition, Projection); float3 normal = normalize(mul(input.Normal, (float3x3)WorldInverseTranspose)); output.Normal = normal; return output; }

  12.  Lots of junk in here float4 PixelShaderFunction(VertexShaderOutput input) : COLOR0 { float3 lightDirection = LightPosition – (float3)input.WorldPosition; float3 normal = normalize(input.Normal); float intensity = pow(1 - saturate(length(lightDirection) / LightRadius), 2); lightDirection = normalize(lightDirection); float3 view = normalize(Camera - (float3)input.WorldPosition); float diffuseColor = dot(normal, lightDirection) * intensity; float3 reflect = normalize(2 * diffuseColor * normal – lightDirection); float dotProduct = dot(reflect, view); float4 specular = (8 + Shininess) / (8 * PI) * SpecularIntensity * SpecularColor * pow(saturate(dotProduct), Shininess) * intensity; return saturate(diffuseColor + AmbientColor * AmbientIntensity + specular); }

  13.  The bidirectional reflectance distribution function is a function that describes the difference between outgoing radiance and incoming irradiance  This function changes based on:  Wavelength  Angle of light to surface  Angle of viewer from surface  For point or directional lights, we do not need differentials and can write the BRDF: L ( v ) = o f ( l , v ) E cos θ L i

  14.  We've been talking about lighting models  Lambertian, specular, etc.  A BRDF is an attempt to model physics slightly better  A big difference is that different wavelengths are absorbed and reflected different by different materials  Rendering models in real time with (more) accurate BRDFs is still an open research problem

  15.  They also have global lighting (shadows and reflections)  Taken from www.kevinbeason.com

  16.  The BRDF is supposed to account for all the light interactions we discussed in Chapter 5 (reflection and refraction)  We can see the similarity to the lighting equation from Chapter 5, now with a BRDF: n ∑ = ⊗ L ( v ) f ( l , v ) E cos θ o k L i k k = k 1

  17.  If the subsurface scattering effects are great, the size of the pixel may matter  Then, a bidirectional surface scattering reflectance distribution function (BSSRDF) is needed  Or if the surface characteristics change in different areas, you need a spatially varying BRDF  And so on…

  18.  Helmholtz reciprocity:  f ( l , v ) = f ( v , l )  Conservation of energy:  Outgoing energy cannot be greater than incoming energy  The simplest BRDF is Lambertian shading  We assume that energy is scattered equally in all directions  Integrating over the hemisphere gives a factor of π  Dividing by π gives us exactly what we saw before: n c ∑ = ⊗ diff L ( v ) E cos θ o L i π k = k 1

  19.  We'll start with our specular shader for directional light and add textures to it

  20.  The texture for the ship is below:

  21.  We add a Texture2D variable called ModelTexture  We also add a SamplerState structure that specifies how to filter the texture Texture2D ModelTexture; SamplerState ModelTextureSampler { MinFilter = Linear; MagFilter = Linear; MipFilter = Linear; AddressU = Clamp; AddressV = Clamp; };

  22.  We add a texture coordinate to the input and the output of the vertex shader struct VertexShaderInput { float4 Position : POSITION0; float3 Normal : NORMAL0; float2 Texture : TEXCOORD0; }; struct VertexShaderOutput { float4 Position : POSITION0; float4 Color : COLOR0; float3 Normal : NORMAL0; float4 WorldPosition : POSITIONT; float2 Texture : TEXCOORD0; };

  23.  Almost nothing changes here except that we copy the input texture coordinate into the output VertexShaderOutput VertexShaderFunction(VertexShaderInput input) { VertexShaderOutput output; float4 worldPosition = mul(input.Position, World); output.WorldPosition = worldPosition; float4 viewPosition = mul(worldPosition, View); output.Position = mul(viewPosition, Projection); float3 normal = normalize(mul(input.Normal, (float3x3)WorldInverseTranspose)); float lightIntensity = dot(normal, normalize(DiffuseLightDirection)); output.Color = saturate(DiffuseColor * DiffuseIntensity * lightIntensity); output.Normal = normal; output.Texture = input.Texture; return output; }

  24.  We have to pull the color from the texture and set its alpha to 1  Then scale the components of the color by the texture color float4 PixelShaderFunction(VertexShaderOutput input) : COLOR0 { float3 light = normalize(DiffuseLightDirection); float3 normal = normalize(input.Normal); float3 reflect = normalize(2 * dot(light, normal) * normal - light); float3 view = normalize(Camera - (float3)input.WorldPosition); float dotProduct = dot(reflect, view); float4 textureColor = ModelTexture.Sample(ModelTextureSampler, input.Texture); textureColor.a = 1; float4 specular = (8 + Shininess) / (8 * PI) * SpecularIntensity * SpecularColor * pow(saturate(dotProduct), Shininess); return saturate(textureColor * input.Color + AmbientColor * AmbientIntensity + specular); }

Download Presentation
Download Policy: The content available on the website is offered to you 'AS IS' for your personal information and use only. It cannot be commercialized, licensed, or distributed on other websites without prior consent from the author. To download a presentation, simply click this link. If you encounter any difficulties during the download process, it's possible that the publisher has removed the file from their server.

Recommend


More recommend