maierfelix / webgpu

WebGPU for Node [Deprecated, Unmaintained]
MIT License
244 stars 17 forks source link

gbuffer+ray tracing has a bug when read from gbuffer in .rgen shader #19

Closed yyc-git closed 4 years ago

yyc-git commented 4 years ago

I want to use hybrid render with gbuffer+ray tracing, so there is three passes:

gbuffer pass's shader code is: gbuffer.vert

#version 450
#pragma shader_stage(vertex)

#include "../../shaders/camera.glsl"

layout(location = 0) in vec3 position;

layout(location = 0) out vec3 vPosition;

layout(std140, set = 0, binding = 0) uniform Model {
  mat4 modelMatrix;
}
uModel;

void main() {
  vec4 worldPosition = uModel.modelMatrix * vec4(position, 1.0);
  vPosition = vec3(worldPosition);

  gl_Position = getProjectionMatrix() * getViewMatrix() * worldPosition;
  gl_Position.y = -gl_Position.y;
}

gbuffer.frag

#version 450
#pragma shader_stage(fragment)

layout(location = 0) in vec3 vPosition;
layout(location = 1) in vec3 vNormal;

layout(location = 0) out vec4 gPosition;

void main() {
  //write to gbuffer->MRT
  gPosition = vec4(vPosition, 0.0);
}

ray tracing pass's shader code is: ray-generation.rgen

#version 460
#extension GL_NV_ray_tracing : require
#pragma shader_stage(raygen)

layout(location = 0) rayPayloadNV vec3 hitValue;

layout(binding = 0) uniform sampler2D gPositionTexture;

layout(set = 1, binding = 0) uniform accelerationStructureNV topLevelAS;
layout(std140, set = 1, binding = 1) buffer PixelBuffer { vec4 pixels[]; }
pixelBuffer;

vec2 getLanuchIndex(uvec3 launchIDNV, uvec3 launchSizeNV) {
  const vec2 pixelCenter = vec2(launchIDNV.xy) + vec2(0.5);
  const vec2 inUV = pixelCenter / vec2(launchSizeNV.xy);

  return inUV * 2.0 - 1.0;
}

void main() {
  vec2 lanuchIndex = getLanuchIndex(gl_LaunchIDNV, gl_LaunchSizeNV);

  //read from gbuffer->MRT
  vec4 worldPosition = texture(gPositionTexture, lanuchIndex);

  //directly show worldPosition
  hitValue = vec3(worldPosition);

  const uint pixelIndex = gl_LaunchIDNV.y * gl_LaunchSizeNV.x + gl_LaunchIDNV.x;

  pixelBuffer.pixels[pixelIndex] = vec4(hitValue, 1.0);
}

blit pass's shader code is(is the same as ray tracing example): screen.vert

#version 450
#pragma shader_stage(vertex)

layout(location = 0) out vec2 uv;

void main() {
  uv = vec2((gl_VertexIndex << 1) & 2, gl_VertexIndex & 2);
  gl_Position = vec4(uv * 2.0 - 1.0, 0.0, 1.0);
}

screen.frag

#version 450
#pragma shader_stage(fragment)

layout(location = 0) in vec2 uv;
layout(location = 0) out vec4 outColor;

layout(std140, set = 0, binding = 0) buffer PixelBuffer { vec4 pixels[]; }
pixelBuffer;

layout(set = 0, binding = 1) uniform ScreenDimension { vec2 resolution; };

void main() {
  const ivec2 bufferCoord = ivec2(floor(uv * resolution));
  const vec2 fragCoord = (uv * resolution);
  const uint pixelIndex = bufferCoord.y * uint(resolution.x) + bufferCoord.x;

  vec4 pixelColor = pixelBuffer.pixels[pixelIndex];
  outColor = pixelColor;
}

the output should be(the scene has 1 triangle + 1 plane):

but the output actually is:

maierfelix commented 4 years ago

Since the output is repeated 4 times, I'd try checking the function where you generate the UVs to read from the GBuffer (mainly the inUV * 2.0 - 1.0 part).

For example try inUV or inUV * 0.5 + 0.5

yyc-git commented 4 years ago

use inUV fix the bug! Thanks very much!