You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 

731 lines
34 KiB

#pragma kernel RaytracingAreaShadowPrepass
#pragma kernel RaytracingAreaShadowNewSample
#pragma kernel RaytracingDirectionalShadowSample
#pragma kernel RaytracingPointShadowSample SEGMENT_SHADOW_SAMPLE=RaytracingPointShadowSample POINT_LIGHT
#pragma kernel RaytracingSpotShadowSample SEGMENT_SHADOW_SAMPLE=RaytracingSpotShadowSample SPOT_LIGHT
#pragma kernel RaytracingProjectorPyramidShadowSample SEGMENT_SHADOW_SAMPLE=RaytracingProjectorPyramidShadowSample PYRAMID_LIGHT
#pragma kernel RaytracingProjectorBoxShadowSample
#pragma kernel ClearShadowTexture
#pragma kernel OutputShadowTexture OUTPUT_SHADOW_TEXTURE=OutputShadowTexture
#pragma kernel OutputColorShadowTexture OUTPUT_SHADOW_TEXTURE=OutputColorShadowTexture COLOR_SHADOW
#pragma kernel OutputSpecularShadowTexture OUTPUT_SHADOW_TEXTURE=OutputSpecularShadowTexture SPECULAR_SHADOW
// Given that the algorithm requires BSDF evaluation, we need to define this macro
#define HAS_LIGHTLOOP
// Given that the algorithm requires BSDF evaluation, we need to define this macro
#define SKIP_RASTERIZED_AREA_SHADOWS
// Given that this pass does not use the shadow algorithm multi-compile, we need to define SHADOW_LOW to quite the shadow algorithm error
#define SHADOW_LOW
// Include and define the shader pass
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/ShaderPass/ShaderPass.cs.hlsl"
#define SHADERPASS SHADERPASS_RAYTRACING
// HDRP generic includes
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl"
#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/ShaderVariables.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Lighting.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Material/Material.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/LightLoopDef.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/Material/Lit/Lit.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDStencilUsage.cs.hlsl"
// Raytracing includes
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/ShaderVariablesRaytracing.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/RaytracingSampling.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Shadows/SphericalQuad.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Shadows/SphericalSphere.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Shadows/SphericalCone.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Shadows/SphericalPyramid.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Shadows/RaytracingMIS.hlsl"
#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/Shadows/RayTracingShadowUtilities.hlsl"
#pragma only_renderers d3d11 xboxseries ps5
// #pragma enable_d3d11_debug_symbols
// Tile size of this compute
#define RAYTRACING_SHADOW_TILE_SIZE 8
#define AREA_SHADOW_CLAMP_VALUE 10.0
// The target acceleration structure that we will evaluate the reflexion in
TEXTURE2D_X(_DepthTexture);
// Flag value that defines if a given pixel is deferred or not
TEXTURE2D_X_UINT2(_StencilTexture);
// Output buffers of the shadows raytrace shader
RW_TEXTURE2D_X(float2, _AnalyticProbBuffer);
RW_TEXTURE2D_X(float2, _RaytracedAreaShadowSample);
RW_TEXTURE2D_X(float2, _RaytracedAreaShadowIntegration);
RW_TEXTURE2D_X(float4, _RaytracingDirectionBuffer);
RW_TEXTURE2D_X(float, _RayTracingLengthBuffer);
// Prepass that evaluates the data required for the ray tracing
[numthreads(RAYTRACING_SHADOW_TILE_SIZE, RAYTRACING_SHADOW_TILE_SIZE, 1)]
void RaytracingAreaShadowPrepass(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupThreadId : SV_GroupThreadID, uint2 groupId : SV_GroupID)
{
UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);
// Compute the pixel position to process
uint2 currentCoord = groupId * RAYTRACING_SHADOW_TILE_SIZE + groupThreadId;
// The value -1.0 is used to identify an invalid pixel
_AnalyticProbBuffer[COORD_TEXTURE2D_X(currentCoord)] = float2(-1.0, -1.0);
_RaytracedAreaShadowIntegration[COORD_TEXTURE2D_X(currentCoord)] = float2(0.0, 0.0);
// Values that need to be defined per sample
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(0.0, 0.0);
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, 0.0);
_RayTracingLengthBuffer[COORD_TEXTURE2D_X(currentCoord)] = 0.0;
// Read the depth value
float depthValue = LOAD_TEXTURE2D_X(_DepthTexture, currentCoord).x;
uint stencilValue = GetStencilValue(LOAD_TEXTURE2D_X(_StencilTexture, currentCoord));
if (depthValue == UNITY_RAW_FAR_CLIP_VALUE || (stencilValue & STENCILUSAGE_IS_UNLIT) != 0)
return;
// Compute the position input structure
PositionInputs posInput = GetPositionInput(currentCoord, _ScreenSize.zw, depthValue, UNITY_MATRIX_I_VP, GetWorldToViewMatrix(), 0);
// Convert this to a world space position
const float3 positionWS = posInput.positionWS;
// Compute the view vector on the surface
const float3 viewWS = GetWorldSpaceNormalizeViewDir(posInput.positionWS);
// Fetch the data of the light
LightData lightData = _LightDatas[_RaytracingTargetLight];
// Compute the current sample index
uint globalSampleIndex = _RaytracingFrameIndex * _RaytracingNumSamples;
// Generate the new sample (follwing values of the sequence)
float2 noiseValue;
noiseValue.x = GetBNDSequenceSample(currentCoord, globalSampleIndex, 0);
noiseValue.y = GetBNDSequenceSample(currentCoord, globalSampleIndex, 1);
// Need to be overriden by the technique
float3 outputPosition = float3(0.0, 0.0, 0.0);
// Does this pixel have SSS?
bool pixelIsDeferred = (stencilValue & STENCILUSAGE_REQUIRES_DEFERRED_LIGHTING) != 0;
if (pixelIsDeferred)
{
// Let's now decode the BSDF data from the gbuffer
BSDFData bsdfData;
ZERO_INITIALIZE(BSDFData, bsdfData);
BuiltinData builtinData;
ZERO_INITIALIZE(BuiltinData, builtinData);
uint featureFlags = MATERIALFEATUREFLAGS_LIT_STANDARD;
DecodeFromGBuffer(posInput.positionSS, featureFlags, bsdfData, builtinData);
// Structure that holds all the input data for the MIS
MISSamplingInput misInput;
ZERO_INITIALIZE(MISSamplingInput, misInput);
misInput.roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
misInput.viewWS = viewWS;
misInput.positionWS = positionWS;
misInput.rectDimension = lightData.size.xy;
misInput.rectWSPos = lightData.positionRWS;
misInput.noiseValue = noiseValue;
// Setup and check the spherical rectangle
SphQuad squad;
ZERO_INITIALIZE(SphQuad, squad);
if (!InitSphericalQuad(lightData, positionWS, bsdfData.normalWS, squad))
{
// We want this to be flagged as a proper shadow, and not a 0/0 case
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(0.0, 0.0);
_AnalyticProbBuffer[COORD_TEXTURE2D_X(currentCoord)] = float2(-1.0, -1.0);
return;
}
// Compute the local frame that matches the normal
misInput.localToWorld = GetLocalFrame(bsdfData.normalWS);
// Beyond a certain value of smoothness, we clamp due to the invalidity of the ratio BRDF / MIS.
// TODO: investigate this and find a way to by pass it
bsdfData.perceptualRoughness = ClampPerceptualRoughnessForRaytracing(bsdfData.perceptualRoughness);
bsdfData.roughnessT = ClampRoughnessForRaytracing(bsdfData.roughnessT);
bsdfData.roughnessB = ClampRoughnessForRaytracing(bsdfData.roughnessB);
// Compute the prelight data
PreLightData preLightData = GetPreLightData(viewWS, posInput, bsdfData);
// Compute the direct lighting of the light (used for MIS)
LightLoopContext context;
// Given that the approximation used for LTC is completely different from what we would get from a real integration, we only rely on the not textured intensity.
// To acheive that, we set cookie index to -1 so that the evaluatebsdf_rect function to not use any cookie. We also keep track of that cookie value to restore it after the evaluation.
int cookieMode = lightData.cookieMode;
lightData.cookieMode = COOKIEMODE_NONE;
DirectLighting lighting = EvaluateBSDF_Area(context, viewWS, posInput, preLightData, lightData, bsdfData, builtinData);
lighting.diffuse = lighting.diffuse * bsdfData.diffuseColor;
lightData.cookieMode = cookieMode;
// Compute the non-occluded analytic luminance value
float U = clamp(Luminance(lighting.diffuse + lighting.specular) * GetCurrentExposureMultiplier(), 0.0, AREA_SHADOW_CLAMP_VALUE);
// NOTE: Due to a VGPR optimisation in we need to restore the previous value (position, dimmer, and other thing are overriden)
lightData = _LightDatas[_RaytracingTargetLight];
// Here we need to evaluate the diffuseProbablity and the unshadowed lighting
if (U < ANALYTIC_RADIANCE_THRESHOLD || !EvaluateMISProbabilties(lighting, bsdfData.perceptualRoughness, misInput.brdfProb))
{
// We want this to be flagged as a proper shadow, and not a 0/0 case
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(0.0, 0.0);
_AnalyticProbBuffer[COORD_TEXTURE2D_X(currentCoord)] = float2(-1.0, -1.0);
return;
}
// Structure that holds all the output data from the MIS
MISSamplingOuput misOutput;
ZERO_INITIALIZE(MISSamplingOuput, misOutput);
// Pick the sampling technique
EvaluateMISTechnique(misInput);
// Generate the right MIS Sample
bool validity = GenerateMISSample(misInput, squad, viewWS, misOutput);
outputPosition = misOutput.pos;
// If we could not sample , or the sample is not in the hemisphere or the sample is on the backface of the light
if (!validity || dot(misOutput.dir, bsdfData.normalWS) <= 0.0 || dot(misOutput.dir, lightData.forward) >= 0.0)
{
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(0.0, 0.0);
_AnalyticProbBuffer[COORD_TEXTURE2D_X(currentCoord)] = float2(U, misInput.brdfProb);
return;
}
// Evaluate the lighting
CBSDF cbsdf = EvaluateBSDF(viewWS, misOutput.dir, preLightData, bsdfData);
float3 diffuseLighting = cbsdf.diffR;
float3 specularLighting = cbsdf.specR;
// Combine the light color with the light cookie color (if any)
float3 lightColor = lightData.color;
if (lightData.cookieMode != COOKIEMODE_NONE)
{
float cookieWidth = lightData.cookieScaleOffset.x * _CookieAtlasSize.x;
float cookieSizePOT = round(LOG2_E * log(cookieWidth));
lightColor *= SampleCookie2D(misOutput.sampleUV, lightData.cookieScaleOffset, bsdfData.perceptualRoughness * cookieSizePOT);
}
diffuseLighting *= bsdfData.diffuseColor * lightData.diffuseDimmer * lightColor;
specularLighting *= lightData.specularDimmer * lightColor;
// Compute the MIS weight
float misPDF = lerp(misOutput.lightPDF, misOutput.brdfPDF, misInput.brdfProb);
float3 radiance = misPDF > 0.0 ? (diffuseLighting + specularLighting) / misPDF : 0.0;
// Accumulate
float3 Un = clamp(radiance * GetCurrentExposureMultiplier(), 0.0, AREA_SHADOW_CLAMP_VALUE);
// Compute luminance of Un
float UnL = Luminance(Un) / _RaytracingNumSamples;
// To avoid huge values on low PDFs (leading to potential precision issues),
// we clip them proportionally to the unoccluded analytic value
const float unoccludedThreshold = 10.0 * U;
if (UnL > unoccludedThreshold)
{
UnL = unoccludedThreshold;
}
// Pass on the values to the output buffer (Sn, Un) and (U)
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(UnL, UnL);
_AnalyticProbBuffer[COORD_TEXTURE2D_X(currentCoord)] = float2(U, misInput.brdfProb);
}
else
{
// Decode the world space normal
NormalData normalData;
DecodeFromNormalBuffer(currentCoord, normalData);
// Setup and check the spherical rectangle
SphQuad squad;
if (!InitSphericalQuad(lightData, positionWS, normalData.normalWS, squad))
{
// We want this to be flagged as a proper shadow, and not a 0/0 case
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(0.0, 0.0);
_AnalyticProbBuffer[COORD_TEXTURE2D_X(currentCoord)] = float2(-1.0, -1.0);
return;
}
// Structure that holds all the output data from the MIS
LightSamplingOutput lightSamplingOutput;
ZERO_INITIALIZE(LightSamplingOutput, lightSamplingOutput);
// Generate the right MIS Sample
GenerateLightSample(positionWS, noiseValue, squad, viewWS, lightSamplingOutput);
outputPosition = lightSamplingOutput.pos;
// If we could not sample , or the sample is not in the hemisphere or the sample is on the backface of the light
if (dot(lightSamplingOutput.dir, normalData.normalWS) <= 0.0 || dot(lightSamplingOutput.dir, lightData.forward) >= 0.0)
{
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(0.0, 0.0);
_AnalyticProbBuffer[COORD_TEXTURE2D_X(currentCoord)] = float2(1.0, 1.0);
return;
}
// Pass on the values to the output buffer (Sn, Un) and (U)
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(1.0, 1.0);
_AnalyticProbBuffer[COORD_TEXTURE2D_X(currentCoord)] = float2(1.0, 1.0);
}
// Let's shift the origin and destination positions by a bias
float3 rayOrigin = positionWS;
float3 rayDestination = outputPosition;
float rayDistance = length(rayDestination - rayOrigin);
float3 rayDirection = (rayDestination - rayOrigin) / rayDistance;
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(rayDirection, 1.0f);
_RayTracingLengthBuffer[COORD_TEXTURE2D_X(currentCoord)] = rayDistance;
}
// Prepass that evaluates the data required for the ray tracing
[numthreads(RAYTRACING_SHADOW_TILE_SIZE, RAYTRACING_SHADOW_TILE_SIZE, 1)]
void RaytracingAreaShadowNewSample(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupThreadId : SV_GroupThreadID, uint2 groupId : SV_GroupID)
{
UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);
// Compute the pixel position to process
uint2 currentCoord = groupId * RAYTRACING_SHADOW_TILE_SIZE + groupThreadId;
// Read the depth value
float depthValue = LOAD_TEXTURE2D_X(_DepthTexture, currentCoord).x;
uint stencilValue = GetStencilValue(LOAD_TEXTURE2D_X(_StencilTexture, currentCoord));
if (depthValue == UNITY_RAW_FAR_CLIP_VALUE || (stencilValue & STENCILUSAGE_IS_UNLIT) != 0 || _AnalyticProbBuffer[COORD_TEXTURE2D_X(currentCoord)].x < 0.0)
{
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(0.0, 0.0);
return;
}
// Compute the position input structure
PositionInputs posInput = GetPositionInput(currentCoord, _ScreenSize.zw, depthValue, UNITY_MATRIX_I_VP, GetWorldToViewMatrix(), 0);
// Convert this to a world space position
const float3 positionWS = posInput.positionWS;
// Compute the view vector on the surface
const float3 viewWS = GetWorldSpaceNormalizeViewDir(positionWS);
// Fetch the data of the light
LightData lightData = _LightDatas[_RaytracingTargetLight];
// Compute the current sample index
uint globalSampleIndex = _RaytracingFrameIndex * _RaytracingNumSamples + _RaytracingSampleIndex;
// Generate the new sample (follwing values of the sequence)
float2 noiseValue;
noiseValue.x = GetBNDSequenceSample(currentCoord, globalSampleIndex, 0);
noiseValue.y = GetBNDSequenceSample(currentCoord, globalSampleIndex, 1);
// Need to be overriden by the technique
float3 outputPosition = float3(0.0, 0.0, 0.0);
// Does this pixel have SSS?
bool pixelIsDeferred = (stencilValue & STENCILUSAGE_REQUIRES_DEFERRED_LIGHTING) != 0;
if (pixelIsDeferred)
{
// Let's now decode the BSDF data from the gbuffer
BSDFData bsdfData;
ZERO_INITIALIZE(BSDFData, bsdfData);
BuiltinData builtinData;
ZERO_INITIALIZE(BuiltinData, builtinData);
// Decode BSDF Data
uint featureFlags = MATERIALFEATUREFLAGS_LIT_STANDARD;
DecodeFromGBuffer(posInput.positionSS, featureFlags, bsdfData, builtinData);
// Beyond a certain value of smoothness, we clamp due to the invalidity of the ratio BRDF / MIS.
// TODO: investigate this and find a way to by pass it
bsdfData.perceptualRoughness = ClampPerceptualRoughnessForRaytracing(bsdfData.perceptualRoughness);
bsdfData.roughnessT = ClampRoughnessForRaytracing(bsdfData.roughnessT);
bsdfData.roughnessB = ClampRoughnessForRaytracing(bsdfData.roughnessB);
// Compute the prelight data
PreLightData preLightData = GetPreLightData(viewWS, posInput, bsdfData);
// Our shader only processes luminance
float U = _AnalyticProbBuffer[COORD_TEXTURE2D_X(currentCoord)].x;
// Structure that holds all the input data for the MIS
MISSamplingInput misInput;
ZERO_INITIALIZE(MISSamplingInput, misInput);
misInput.roughness = PerceptualRoughnessToRoughness(bsdfData.perceptualRoughness);
misInput.viewWS = viewWS;
misInput.positionWS = positionWS;
misInput.rectDimension = lightData.size.xy;
misInput.rectWSPos = lightData.positionRWS;
misInput.brdfProb = _AnalyticProbBuffer[COORD_TEXTURE2D_X(currentCoord)].y;
misInput.noiseValue = noiseValue;
// Setup and check the spherical rectangle
SphQuad squad;
InitSphericalQuad(lightData, positionWS, squad);
// Compute the local frame that matches the normal
misInput.localToWorld = GetLocalFrame(bsdfData.normalWS);
// Structure that holds all the output data from the MIS
MISSamplingOuput misOutput;
ZERO_INITIALIZE(MISSamplingOuput, misOutput);
// Pick the sampling technique
EvaluateMISTechnique(misInput);
// Generate the right MIS Sample
bool validity = GenerateMISSample(misInput, squad, viewWS, misOutput);
outputPosition = misOutput.pos;
// If we could not sample , or the sample is not in the hemisphere or the sample is on the backface of the light
if (!validity || dot(misOutput.dir, bsdfData.normalWS) <= 0.0 || dot(misOutput.dir, lightData.forward) >= 0.0)
{
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(0.0, 0.0);
return;
}
// Evaluate the lighting
CBSDF cbsdf = EvaluateBSDF(viewWS, misOutput.dir, preLightData, bsdfData);
float3 diffuseLighting = cbsdf.diffR;
float3 specularLighting = cbsdf.specR;
// Combine the light color with the light cookie color (if any)
float3 lightColor = lightData.color;
if (lightData.cookieMode != COOKIEMODE_NONE)
{
float cookieWidth = lightData.cookieScaleOffset.x * _CookieAtlasSize.x;
float cookieSizePOT = round(LOG2_E * log(cookieWidth));
lightColor *= SampleCookie2D(misOutput.sampleUV, lightData.cookieScaleOffset, bsdfData.perceptualRoughness * cookieSizePOT);
}
diffuseLighting *= bsdfData.diffuseColor * lightData.diffuseDimmer * lightColor;
specularLighting *= lightData.specularDimmer * lightColor;
// Compute the MIS weight
float misPDF = lerp(misOutput.lightPDF, misOutput.brdfPDF, misInput.brdfProb);
float3 radiance = misPDF > 0.0 ? (diffuseLighting + specularLighting) / misPDF : 0.0;
// Accumulate
float3 Un = clamp(radiance * GetCurrentExposureMultiplier(), 0.0, AREA_SHADOW_CLAMP_VALUE);
// Compute luminance of Un
float UnL = Luminance(Un) / _RaytracingNumSamples;
// To avoid huge values on low PDFs (leading to potential precision issues),
// we clip them proportionally to the unoccluded analytic value
const float unoccludedThreshold = 10.0 * U;
if (UnL > unoccludedThreshold)
{
UnL = unoccludedThreshold;
}
// Pass on the values to the output buffer (Sn, Un) and (U)
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(UnL, UnL);
}
else
{
// Decode the world space normal
NormalData normalData;
DecodeFromNormalBuffer(currentCoord, normalData);
// Setup and check the spherical rectangle
SphQuad squad;
if (!InitSphericalQuad(lightData, positionWS, normalData.normalWS, squad))
{
// We want this to be flagged as a proper shadow, and not a 0/0 case
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(0.0, 0.0);
return;
}
// Structure that holds all the output data from the MIS
LightSamplingOutput lightSamplingOutput;
ZERO_INITIALIZE(LightSamplingOutput, lightSamplingOutput);
// Generate the right MIS Sample
GenerateLightSample(positionWS, noiseValue, squad, viewWS, lightSamplingOutput);
outputPosition = lightSamplingOutput.pos;
// If we could not sample , or the sample is not in the hemisphere or the sample is on the backface of the light
if (dot(lightSamplingOutput.dir, normalData.normalWS) <= 0.0 || dot(lightSamplingOutput.dir, lightData.forward) >= 0.0)
{
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(0.0, 0.0);
return;
}
// Pass on the values to the output buffer (Sn, Un) and (U)
_RaytracedAreaShadowSample[COORD_TEXTURE2D_X(currentCoord)] = float2(1.0, 1.0);
}
// Let's shift the origin and destination positions by a bias
float3 rayOrigin = positionWS;
float3 rayDestination = outputPosition;
float rayDistance = length(rayDestination - rayOrigin);
float3 rayDirection = (rayDestination - rayOrigin) / rayDistance;
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(rayDirection, 1.0f);
_RayTracingLengthBuffer[COORD_TEXTURE2D_X(currentCoord)] = rayDistance;
}
[numthreads(RAYTRACING_SHADOW_TILE_SIZE, RAYTRACING_SHADOW_TILE_SIZE, 1)]
void RaytracingDirectionalShadowSample(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupThreadId : SV_GroupThreadID, uint2 groupId : SV_GroupID)
{
UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);
// Compute the pixel position to process
uint2 currentCoord = groupId * RAYTRACING_SHADOW_TILE_SIZE + groupThreadId;
// Read the depth value
float depthValue = LOAD_TEXTURE2D_X(_DepthTexture, currentCoord).x;
uint stencilValue = GetStencilValue(LOAD_TEXTURE2D_X(_StencilTexture, currentCoord));
if (depthValue == UNITY_RAW_FAR_CLIP_VALUE || (stencilValue & STENCILUSAGE_IS_UNLIT) != 0)
{
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, -1.0);
return;
}
// Compute the position input structure
PositionInputs posInput = GetPositionInput(currentCoord, _ScreenSize.zw, depthValue, UNITY_MATRIX_I_VP, GetWorldToViewMatrix(), 0);
// Decode the world space normal
NormalData normalData;
DecodeFromNormalBuffer(currentCoord, normalData);
// Convert this to a world space position
float3 positionWS = posInput.positionWS;
// Fetch the data of the light
DirectionalLightData lightData = _DirectionalLightDatas[_DirectionalShadowIndex];
// Compute the current sample index
int globalSampleIndex = _RaytracingFrameIndex * _RaytracingNumSamples + _RaytracingSampleIndex;
// Generate the new sample (follwing values of the sequence)
float2 noiseValue;
noiseValue.x = GetBNDSequenceSample(currentCoord, globalSampleIndex, 0);
noiseValue.y = GetBNDSequenceSample(currentCoord, globalSampleIndex, 1);
// Create the local ortho basis
float3x3 localToWorld = GetLocalFrame(-lightData.forward);
// We need to convert the diameter to a radius for our sampling
float3 localDir = SampleConeUniform(noiseValue.x, noiseValue.y, cos(lightData.angularDiameter * 0.5));
float3 wsDir = mul(localDir, localToWorld);
// Output the direction to the target uav
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(wsDir, 1.0f);
}
// Used by Cone and Pyramid shaped Spot Lights
[numthreads(RAYTRACING_SHADOW_TILE_SIZE, RAYTRACING_SHADOW_TILE_SIZE, 1)]
void SEGMENT_SHADOW_SAMPLE(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupThreadId : SV_GroupThreadID, uint2 groupId : SV_GroupID)
{
UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);
// Compute the pixel position to process
uint2 currentCoord = groupId * RAYTRACING_SHADOW_TILE_SIZE + groupThreadId;
// Read the depth value and if this is a background pixel, early exit
float depthValue = LOAD_TEXTURE2D_X(_DepthTexture, currentCoord).x;
if (depthValue == UNITY_RAW_FAR_CLIP_VALUE)
{
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, POINT_BLACK_PDF);
return;
}
// Read the stencil value, and if this is unlit, early exit.
uint stencilValue = GetStencilValue(LOAD_TEXTURE2D_X(_StencilTexture, currentCoord));
if ((stencilValue & STENCILUSAGE_IS_UNLIT) != 0)
{
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, POINT_WHITE_PDF);
return;
}
// Compute the position input structure
PositionInputs posInput = GetPositionInput(currentCoord, _ScreenSize.zw, depthValue, UNITY_MATRIX_I_VP, GetWorldToViewMatrix(), 0);
// Decode the world space normal
NormalData normalData;
DecodeFromNormalBuffer(currentCoord, normalData);
// Fetch the data of the area light
LightData lightData = _LightDatas[_RaytracingTargetLight];
// Evaluate the distance of the point to the light
float dist2 = DistSqrToLight(lightData, posInput.positionWS);
// If the point is inside the volume of the light, we will consider it as a non occlusion.
if (dist2 < _RaytracingLightRadius * _RaytracingLightRadius)
{
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, POINT_WHITE_PDF);
return;
}
// If the point is inside the culling region of the light, we need to skip
#if POINT_LIGHT
if (!PositionInPointRange(lightData, dist2))
{
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, POINT_WHITE_PDF);
return;
}
#endif
#if SPOT_LIGHT
if (!PositionInSpotRange(lightData, _RaytracingLightAngle, posInput.positionWS, dist2))
{
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, POINT_BLACK_PDF);
return;
}
#endif
#if PYRAMID_LIGHT
if (!PositionInPyramidRange(lightData, _RaytracingLightSizeX, _RaytracingLightSizeY, posInput.positionWS, dist2))
{
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, POINT_BLACK_PDF);
return;
}
#endif
// Compute the current sample index
int globalSampleIndex = _RaytracingFrameIndex * _RaytracingNumSamples + _RaytracingSampleIndex;
// Generate the new sample (follwing values of the sequence)
float2 noiseValue;
noiseValue.x = GetBNDSequenceSample(currentCoord, globalSampleIndex, 0);
noiseValue.y = GetBNDSequenceSample(currentCoord, globalSampleIndex, 1);
// Generate the sample
float3 lightPosition = float3(0.0, 0.0, 0.0);
float samplePDF = 1.0;
if (_RaytracingLightRadius > 0.001)
{
#if POINT_LIGHT
SampleSphericalSphere(lightData.positionRWS, _RaytracingLightRadius, noiseValue.x, noiseValue.y, posInput.positionWS, lightPosition, samplePDF);
#elif SPOT_LIGHT
SampleSphericalCone(lightData.positionRWS, _RaytracingLightRadius, lightData.forward, _RaytracingLightAngle, noiseValue.x, noiseValue.y, lightPosition, samplePDF);
#elif PYRAMID_LIGHT
SampleSphericalPyramid(lightData.positionRWS, _RaytracingLightRadius, lightData.forward, normalize(lightData.right), normalize(lightData.up), _RaytracingLightSizeX, _RaytracingLightSizeY, noiseValue.x, noiseValue.y, lightPosition, samplePDF);
#endif
}
else
{
lightPosition = lightData.positionRWS;
}
// Compute the ray length and ray direction
float3 rayDirection = lightPosition - posInput.positionWS;
float rayLength = length(rayDirection);
rayDirection = rayDirection / rayLength;
// If the normal of this pixel cannot face the light, we invalidate it
float finalPDF = dot(normalData.normalWS, rayDirection) > 0.0 ? samplePDF : POINT_BACK_FACE_PDF;
// Output the direction to the target uav
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(rayDirection, finalPDF);
_RayTracingLengthBuffer[COORD_TEXTURE2D_X(currentCoord)] = rayLength;
}
// Used by Box shaped Spot Lights
[numthreads(RAYTRACING_SHADOW_TILE_SIZE, RAYTRACING_SHADOW_TILE_SIZE, 1)]
void RaytracingProjectorBoxShadowSample(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupThreadId : SV_GroupThreadID, uint2 groupId : SV_GroupID)
{
UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);
// Compute the pixel position to process
uint2 currentCoord = groupId * RAYTRACING_SHADOW_TILE_SIZE + groupThreadId;
// Read the depth value and if this is a background pixel, early exit
float depthValue = LOAD_TEXTURE2D_X(_DepthTexture, currentCoord).x;
if (depthValue == UNITY_RAW_FAR_CLIP_VALUE)
{
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, POINT_BLACK_PDF);
return;
}
// Read the stencil value, and if this is unlit, early exit.
uint stencilValue = GetStencilValue(LOAD_TEXTURE2D_X(_StencilTexture, currentCoord));
if ((stencilValue & STENCILUSAGE_IS_UNLIT) != 0)
{
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, POINT_WHITE_PDF);
return;
}
// Compute the position input structure
PositionInputs posInput = GetPositionInput(currentCoord, _ScreenSize.zw, depthValue, UNITY_MATRIX_I_VP, GetWorldToViewMatrix(), 0);
// Decode the world space normal
NormalData normalData;
DecodeFromNormalBuffer(currentCoord, normalData);
// Fetch the data of the area light
LightData lightData = _LightDatas[_RaytracingTargetLight];
// Compute the ray length and ray direction
// The ray length is the distance from the ray origin to the plane through the light origin perpendicular to the light direction
float3 rayDirection = normalize(-lightData.forward);
float rayLength = dot(lightData.positionRWS - posInput.positionWS, rayDirection);
float dist2 = rayLength * rayLength;
// If this pixel is beyond the range of the light, we invalidate it
if (!PositionInBoxRange(lightData, _RaytracingLightSizeX, _RaytracingLightSizeY, posInput.positionWS, dist2))
{
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(0.0, 0.0, 0.0, POINT_BLACK_PDF);
return;
}
// If the normal of this pixel cannot face the light, we invalidate it
float finalPDF = dot(normalData.normalWS, rayDirection) > 0.0 ? 1.0f : POINT_BACK_FACE_PDF;
// Output the direction to the target uav
_RaytracingDirectionBuffer[COORD_TEXTURE2D_X(currentCoord)] = float4(rayDirection, finalPDF);
_RayTracingLengthBuffer[COORD_TEXTURE2D_X(currentCoord)] = rayLength;
}
RW_TEXTURE2D_X(float4, _RaytracedShadowIntegration);
[numthreads(RAYTRACING_SHADOW_TILE_SIZE, RAYTRACING_SHADOW_TILE_SIZE, 1)]
void ClearShadowTexture(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupThreadId : SV_GroupThreadID, uint2 groupId : SV_GroupID)
{
UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);
// Compute the pixel position to process
uint2 currentCoord = groupId * RAYTRACING_SHADOW_TILE_SIZE + groupThreadId;
_RaytracedShadowIntegration[COORD_TEXTURE2D_X(currentCoord)] = 0.0;
}
// Slot in which a shadow texture should be copied
RWTexture2DArray<float4> _ScreenSpaceShadowsTextureRW;
[numthreads(RAYTRACING_SHADOW_TILE_SIZE, RAYTRACING_SHADOW_TILE_SIZE, 1)]
void OUTPUT_SHADOW_TEXTURE(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 groupThreadId : SV_GroupThreadID, uint2 groupId : SV_GroupID)
{
UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z);
// Compute the pixel position to process
uint2 currentCoord = groupId * RAYTRACING_SHADOW_TILE_SIZE + groupThreadId;
// Offset shadow slot based on XR setup
uint shadowSlot = INDEX_TEXTURE2D_ARRAY_X(_RaytracingShadowSlot);
// Fetch the previous stored values
float4 previousShadowValues = _ScreenSpaceShadowsTextureRW[uint3(currentCoord, shadowSlot)];
#if defined(COLOR_SHADOW)
// If this is a color shadow, then it is must be stored in the first three channels, override those but keeping the previous value in w
previousShadowValues.xyz = _RaytracedShadowIntegration[COORD_TEXTURE2D_X(currentCoord)].xyz;
_ScreenSpaceShadowsTextureRW[uint3(currentCoord, shadowSlot)] = previousShadowValues;
#elif defined(SPECULAR_SHADOW)
_ScreenSpaceShadowsTextureRW[uint3(currentCoord, shadowSlot)] = (1.0 - _RaytracingChannelMask) * max(0, previousShadowValues)
+ _RaytracingChannelMask0 * _RaytracedShadowIntegration[COORD_TEXTURE2D_X(currentCoord)].x
+ _RaytracingChannelMask1 * _RaytracedShadowIntegration[COORD_TEXTURE2D_X(currentCoord)].y;
#else
// Otherwise we only override the channel we are interested in. We use a max with 0 to avoid using invalid values that may come from rthandle resizing
_ScreenSpaceShadowsTextureRW[uint3(currentCoord, shadowSlot)] = (1.0 - _RaytracingChannelMask) * max(0, previousShadowValues)
+ _RaytracingChannelMask * _RaytracedShadowIntegration[COORD_TEXTURE2D_X(currentCoord)].x;
#endif
}