HBAO_AO.cginc 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203
  1. //----------------------------------------------------------------------------------
  2. //
  3. // Copyright (c) 2014, NVIDIA CORPORATION. All rights reserved.
  4. //
  5. // Redistribution and use in source and binary forms, with or without
  6. // modification, are permitted provided that the following conditions
  7. // are met:
  8. // * Redistributions of source code must retain the above copyright
  9. // notice, this list of conditions and the following disclaimer.
  10. // * Redistributions in binary form must reproduce the above copyright
  11. // notice, this list of conditions and the following disclaimer in the
  12. // documentation and/or other materials provided with the distribution.
  13. // * Neither the name of NVIDIA CORPORATION nor the names of its
  14. // contributors may be used to endorse or promote products derived
  15. // from this software without specific prior written permission.
  16. //
  17. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
  18. // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  19. // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  20. // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
  21. // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
  22. // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
  23. // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
  24. // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  25. // OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  26. // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  27. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  28. //
  29. //----------------------------------------------------------------------------------
  30. #ifndef HBAO_AO_INCLUDED
  31. #define HBAO_AO_INCLUDED
  32. #include "UnityCG.cginc"
  33. #include "HBAO_Common.cginc"
  34. inline float3 FetchLayerViewPos(float2 uv) {
  35. float depth = LinearizeDepth(SAMPLE_DEPTH_TEXTURE(_DepthTex, uv));
  36. #if ORTHOGRAPHIC_PROJECTION
  37. return float3((uv * _UVToView.xy + _UVToView.zw), depth);
  38. #else
  39. return float3((uv * _UVToView.xy + _UVToView.zw) * depth, depth);
  40. #endif
  41. }
  42. inline float Falloff(float distanceSquare) {
  43. // 1 scalar mad instruction
  44. return distanceSquare * _NegInvRadius2 + 1.0;
  45. }
  46. inline float ComputeAO(float3 P, float3 N, float3 S) {
  47. float3 V = S - P;
  48. float VdotV = dot(V, V);
  49. float NdotV = dot(N, V) * rsqrt(VdotV);
  50. // Use saturate(x) instead of max(x,0.f) because that is faster on Kepler
  51. return saturate(NdotV - _AngleBias) * saturate(Falloff(VdotV));
  52. }
  53. inline float2 RotateDirections(float2 dir, float2 rot) {
  54. return float2(dir.x * rot.x - dir.y * rot.y,
  55. dir.x * rot.y + dir.y * rot.x);
  56. }
  57. inline float InterleavedGradientNoise(float2 screenPos) {
  58. // http://www.iryoku.com/downloads/Next-Generation-Post-Processing-in-Call-of-Duty-Advanced-Warfare-v18.pptx (slide 123)
  59. float3 magic = float3(0.06711056, 0.00583715, 52.9829189);
  60. return frac(magic.z * frac(dot(screenPos, magic.xy)));
  61. }
  62. inline float2 FetchNoise(float2 screenPos) {
  63. #if INTERLEAVED_GRADIENT_NOISE
  64. // Use Jorge Jimenez's IGN noise and GTAO spatial offsets distribution
  65. // https://blog.selfshadow.com/publications/s2016-shading-course/activision/s2016_pbs_activision_occlusion.pdf (slide 93)
  66. return float2(InterleavedGradientNoise(screenPos), UNITY_SAMPLE_TEX2D(_NoiseTex, screenPos / 4.0).g);
  67. #else
  68. // (cos(alpha), sin(alpha), jitter)
  69. return UNITY_SAMPLE_TEX2D(_NoiseTex, screenPos / 4.0).rg;
  70. #endif
  71. }
  72. float4 AO_Frag(Varyings input) : SV_Target
  73. {
  74. UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input);
  75. #ifdef DEINTERLEAVED
  76. float3 P = FetchLayerViewPos(input.uv);
  77. #else
  78. float3 P = FetchViewPos(input.uv);
  79. #endif
  80. #ifndef DEBUG_VIEWNORMALS
  81. clip(_MaxDistance - P.z);
  82. #endif
  83. #if ORTHOGRAPHIC_PROJECTION
  84. float stepSize = min(_Radius, _MaxRadiusPixels) / (STEPS + 1.0);
  85. #else
  86. float stepSize = min((_Radius / P.z), _MaxRadiusPixels) / (STEPS + 1.0);
  87. #endif
  88. #ifdef DEINTERLEAVED
  89. float3 N = UNITY_SAMPLE_SCREENSPACE_TEXTURE(_NormalsTex, input.uv).rgb * 2.0 - 1.0;
  90. float2 rand = _Jitter; // angle, jitter
  91. #else
  92. float3 N = FetchViewNormals(input.uv, _AO_TexelSize.xy, P);
  93. float2 rand = FetchNoise(input.uv * _AO_TexelSize.zw); // angle, jitter
  94. #endif
  95. const float alpha = 2.0 * UNITY_PI / DIRECTIONS;
  96. float ao = 0;
  97. #if COLOR_BLEEDING
  98. static float2 cbUVs[DIRECTIONS * STEPS];
  99. static float cbContribs[DIRECTIONS * STEPS];
  100. #endif
  101. UNITY_UNROLL
  102. for (int d = 0; d < DIRECTIONS; ++d) {
  103. float angle = alpha * (float(d) + rand.x + _TemporalParams.x);
  104. // Compute normalized 2D direction
  105. float cosA, sinA;
  106. sincos(angle, sinA, cosA);
  107. float2 direction = float2(cosA, sinA);
  108. // Jitter starting sample within the first step
  109. float rayPixels = (frac(rand.y + _TemporalParams.y) * stepSize + 1.0);
  110. UNITY_UNROLL
  111. for (int s = 0; s < STEPS; ++s) {
  112. #ifdef DEINTERLEAVED
  113. float2 snappedUV = round(rayPixels * direction) * _DeinterleavedAO_TexelSize.xy + input.uv;
  114. float3 S = FetchLayerViewPos(snappedUV);
  115. #else
  116. float2 snappedUV = round(rayPixels * direction) * _Input_TexelSize.xy + input.uv;
  117. float3 S = FetchViewPos(snappedUV);
  118. #endif
  119. rayPixels += stepSize;
  120. float contrib = ComputeAO(P, N, S);
  121. #if OFFSCREEN_SAMPLES_CONTRIBUTION
  122. float2 offscreenAmount = _OffscreenSamplesContrib * (snappedUV - saturate(snappedUV) != 0 ? 1 : 0);
  123. contrib = max(contrib, offscreenAmount.x);
  124. contrib = max(contrib, offscreenAmount.y);
  125. #endif
  126. ao += contrib;
  127. #if COLOR_BLEEDING
  128. int sampleIdx = d * s;
  129. cbUVs[sampleIdx] = snappedUV;
  130. cbContribs[sampleIdx] = contrib;
  131. #endif
  132. }
  133. }
  134. #ifdef DEBUG_VIEWNORMALS
  135. N = float3(N.x, -N.y, N.z);
  136. return float4(N * 0.5 + 0.5, 1);
  137. #else
  138. #if COLOR_BLEEDING
  139. half3 col = half3(0, 0, 0);
  140. UNITY_UNROLL
  141. for (int s = 0; s < DIRECTIONS * STEPS; s += 2) {
  142. half3 emission = UNITY_SAMPLE_SCREENSPACE_TEXTURE(_MainTex, cbUVs[s]).rgb;
  143. half average = (emission.x + emission.y + emission.z) / 3;
  144. half scaledAverage = saturate((average - _ColorBleedBrightnessMaskRange.x) / (_ColorBleedBrightnessMaskRange.y - _ColorBleedBrightnessMaskRange.x + 1e-6));
  145. half maskMultiplier = 1 - (scaledAverage * _ColorBleedBrightnessMask);
  146. col += emission * cbContribs[s] * maskMultiplier;
  147. }
  148. #if DEFERRED_SHADING
  149. half3 albedo = UNITY_SAMPLE_SCREENSPACE_TEXTURE(_CameraGBufferTexture0, input.uv).rgb * 0.8 + 0.2;
  150. albedo *= _AlbedoMultiplier;
  151. #else
  152. half3 albedo = half3(1, 1, 1);
  153. #endif
  154. float4 aoOutput = float4(col, ao);
  155. #else
  156. float aoOutput = ao;
  157. #endif
  158. // apply bias multiplier
  159. aoOutput *= (_AOmultiplier / (STEPS * DIRECTIONS));
  160. float fallOffStart = _MaxDistance - _DistanceFalloff;
  161. float distFactor = saturate((P.z - fallOffStart) / (_MaxDistance - fallOffStart));
  162. #if COLOR_BLEEDING
  163. //aoOutput.rgb = saturate(1 - lerp(dot(aoOutput.rgb, 0.333).xxx, aoOutput.rgb * albedo, _ColorBleedSaturation));
  164. aoOutput.rgb = saturate(lerp(dot(aoOutput.rgb, 0.333).xxx, aoOutput.rgb * albedo, _ColorBleedSaturation));
  165. aoOutput = lerp(saturate(float4(aoOutput.rgb, 1 - aoOutput.a)), float4(0, 0, 0, 1), distFactor);
  166. return aoOutput;
  167. #else
  168. aoOutput = lerp(saturate(1 - aoOutput), 1, distFactor);
  169. return float4(EncodeFloatRG(saturate(P.z * (1.0 / _ProjectionParams.z))), 1.0, aoOutput);
  170. #endif
  171. #endif // DEBUG_VIEWNORMALS
  172. }
  173. #endif // HBAO_AO_INCLUDED