1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205
|
/* This struct holds all the information about the Ray.
- rayViewStart, rayViewEnd: Holds the position of the ray in View Space.
- rayFragStart, rayFragEnd, rayFragCurr: Holds the fragment position in Screen Space.
- rayFragStartDepth, rayFragEndDepth: Holds the ray depth in Screen Space.
- rayCoveredPart: Holds how much of the ray is covered during marching. It is a value between 0 and 1.
- hit: Indicates if the ray hit an object or not during marching.
- objHitViewPos: The possible position of an object that the ray might hit. */
struct RayData{
vec3 rayViewStart, rayViewEnd;
vec2 rayFragStart, rayFragEnd, rayFragCurr;
float rayFragStartDepth, rayFragEndDepth;
float rayCoveredPart;
int hit;
vec3 objHitViewPos;
} rayData;
/* This function takes the pixel position in the viewport of (WIDTH, HEIGHT) and returns the
frag position in view space */
vec3 viewPosFromScreen(vec2 fragPos, vec2 size);
float rayViewDepthFromScreen(vec2 size);
/* This function takes texture coordinates and change the y componenent according to the selected graphics API */
vec2 correctTextureCoordinates(vec2 uv);
/* This function checks if the ray fragment position is outside the screen or not */
bool rayFragOutOfBound(vec2 rayFrag, vec2 size);
/* This function follows the ray in screen space and tries to find if the ray hit an object. Each iteration the ray fragment position is incremented by
the rayStepVector and the ray fragment position is converted to view space to check if there is an object at this position or not. */
void rayMarch(vec2 rayStepVector, vec2 size);
/* This function is called by rayMarch when the ray hit an object. It tries to find the exact location where the hit happened */
void refinementStep(vec2 rayStepVector, vec2 size);
void MAIN()
{
vec2 size = vec2(textureSize(SCREEN_TEXTURE, 0));
/* Calculate the reflected vector of the fragment view space position around the normal.
The reflected vector is calculated in World Space then get converted to View Space.
Calculations are not done directly in View Space to avoid using the inverse function
to transform the normal. */
vec3 unitPos = normalize(-VIEW_VECTOR);
vec3 unitNormal = normalize(VAR_WORLD_NORMAL);
vec3 reflected = reflect(unitPos, unitNormal);
reflected = vec4(VIEW_MATRIX * vec4(reflected, 0.0)).xyz;
reflected = normalize(reflected);
/* Convert the fragment position from World Space to View Space */
vec3 fragViewPos = vec4(VIEW_MATRIX * vec4(VAR_WORLD_POSITION , 1.0)).xyz;
/* Calculate the starting and ending point of the reflected ray in View Space */
rayData.rayViewStart = fragViewPos;
rayData.rayViewEnd = fragViewPos + (reflected * rayMaxDistance);
/* Convert the start position from view space to Screen Space in the size of the viewport (WIDTH, HEIGHT) */
vec4 rayClipStart = PROJECTION_MATRIX * vec4(rayData.rayViewStart, 1.0);
rayClipStart /= rayClipStart.w;
rayData.rayFragStart = rayClipStart.xy * 0.5 + 0.5;
rayData.rayFragStart *= size;
rayData.rayFragStartDepth = rayClipStart.z;
/* Convert the end position from View Space to Screen Space in the size of the viewport (WIDTH, HEIGHT) */
vec4 rayClipEnd = PROJECTION_MATRIX * vec4(rayData.rayViewEnd, 1.0);
rayClipEnd /= rayClipEnd.w;
rayData.rayFragEnd = rayClipEnd.xy * 0.5 + 0.5;
rayData.rayFragEnd *= size;
rayData.rayFragEndDepth = rayClipEnd.z;
/* Calculate the difference between the start and end fragment */
vec2 diff = rayData.rayFragEnd - rayData.rayFragStart;
/* Calculate the value of each step. The step depends on the marchSteps property set from the QML.
Increasing marchSteps will result in a better quality but affects performance */
vec2 rayStepVector = diff / max(marchSteps, 1);
/* Start ray marching */
rayData.rayFragCurr = rayData.rayFragStart;
rayData.hit = 0;
rayMarch(rayStepVector, size);
/* If the resulting fragment Screen Space position is outside the screen return the material color. */
bool isOutOfBound = rayFragOutOfBound(rayData.rayFragCurr, size);
if(isOutOfBound)
{
BASE_COLOR = materialColor;
return;
}
//! [visibilitycheck]
float visibility = rayData.hit;
/* Check if the ray hit an object behind the camera. This means information about the object can not be obtained from SCREEN_TEXTURE.
Start fading the visibility according to how much the reflected ray is moving toward the opposite direction of the camera */
visibility *= (1 - max(dot(-normalize(fragViewPos), reflected), 0));
/* Fade out visibility according how far is the hit object from the fragment */
visibility *= (1 - clamp(length(rayData.objHitViewPos - rayData.rayViewStart) / rayMaxDistance, 0, 1));
visibility = clamp(visibility, 0, 1);
//! [visibilitycheck]
/* Calculate the reflection color from the SCREEN_TEXTURE */
//! [reflectioncolor]
vec2 uv = rayData.rayFragCurr / size;
uv = correctTextureCoordinates(uv);
vec3 reflectionColor = texture(SCREEN_TEXTURE, uv).rgb;
reflectionColor *= specular;
vec3 mixedColor = mix(materialColor.rgb, reflectionColor, visibility);
BASE_COLOR = vec4(mixedColor, materialColor.a);
//! [reflectioncolor]
}
//! [viewposfromscreen]
vec3 viewPosFromScreen(vec2 fragPos, vec2 size)
{
vec2 uv = fragPos / size;
vec2 texuv = correctTextureCoordinates(uv);
float depth = textureLod(DEPTH_TEXTURE, texuv, 0).r;
if(NEAR_CLIP_VALUE < 0.0)
depth = 2 * depth - 1.0;
vec3 ndc = vec3(2 * uv - 1, depth);
vec4 viewPos = INVERSE_PROJECTION_MATRIX * vec4(ndc, 1.0);
viewPos /= viewPos.w;
return viewPos.xyz;
}
//! [viewposfromscreen]
//! [rayviewposfromscreen]
float rayViewDepthFromScreen(vec2 size)
{
vec2 uv = rayData.rayFragCurr / size;
float depth = mix(rayData.rayFragStartDepth, rayData.rayFragEndDepth, rayData.rayCoveredPart);
vec3 ndc = vec3(2 * uv - 1, depth);
vec4 viewPos = INVERSE_PROJECTION_MATRIX * vec4(ndc, 1.0);
viewPos /= viewPos.w;
return viewPos.z;
}
//! [rayviewposfromscreen]
//! [rayoutofbound]
bool rayFragOutOfBound(vec2 rayFrag, vec2 size)
{
if(rayFrag.x > size.x || rayFrag.y > size.y)
return true;
if(rayFrag.x < 0 || rayFrag.y < 0)
return true;
return false;
}
//! [rayoutofbound]
//! [refinementStep]
void refinementStep(vec2 rayStepVector, vec2 size)
{
for(int i = 0; i < refinementSteps; i++)
{
rayData.rayCoveredPart = length(rayData.rayFragCurr - rayData.rayFragStart) / length(rayData.rayFragEnd - rayData.rayFragStart);
rayData.rayCoveredPart = clamp(rayData.rayCoveredPart, 0.0, 1.0);
float rayDepth = rayViewDepthFromScreen(size);
rayData.objHitViewPos = viewPosFromScreen(rayData.rayFragCurr, size);
float deltaDepth = rayDepth - rayData.objHitViewPos.z;
rayStepVector *= 0.5;
if(deltaDepth > 0 && deltaDepth < depthBias)
rayData.rayFragCurr -= rayStepVector;
else
rayData.rayFragCurr += rayStepVector;
}
}
//! [refinementStep]
//! [rayMarch]
void rayMarch(vec2 rayStepVector, vec2 size)
{
for(int i = 0; i < marchSteps; i++)
{
rayData.rayFragCurr += rayStepVector;
rayData.rayCoveredPart = length(rayData.rayFragCurr - rayData.rayFragStart) / length(rayData.rayFragEnd - rayData.rayFragStart);
rayData.rayCoveredPart = clamp(rayData.rayCoveredPart, 0.0, 1.0);
float rayDepth = rayViewDepthFromScreen(size);
rayData.objHitViewPos = viewPosFromScreen(rayData.rayFragCurr, size);
float deltaDepth = rayDepth - rayData.objHitViewPos.z;
if(deltaDepth > 0 && deltaDepth < depthBias)
{
rayData.hit = 1;
refinementStep(rayStepVector, size);
return;
}
}
}
//! [rayMarch]
//! [correctTexture]
vec2 correctTextureCoordinates(vec2 uv)
{
if(FRAMEBUFFER_Y_UP < 0 && NDC_Y_UP == 1)
uv.y = 1 - uv.y;
return uv;
}
//! [correctTexture]
|