PRBonn / semantic_suma

SuMa++: Efficient LiDAR-based Semantic SLAM (Chen et al IROS 2019)
MIT License
907 stars 205 forks source link

a process function prolem of " glDrawArrays(GL_POINTS, 0, 1);" #43

Closed rainlord closed 3 years ago

rainlord commented 3 years ago

hello,

I have a problem in the function of "Preprocessing::process", in the process of "avgVertexmap","filterVertexmap","generate normal map",just call "glDrawArrays(GL_POINTS, 0, 1);"; this function just draw a vertex,although the shader of "quad.geom" generates 4 vertexes ;but there are just 4 vertexes in total; So how the fragment shader , for example "gen_normalmap.frag",to generate normal vector of all vertexes ?
please help me,Thanks a lot !!!!!

` glDisable(GL_DEPTH_TEST);

glow::GlTextureRectangle erode_semanticmap(width, height_, TextureFormat::RGBA_FLOAT);

semanticbuffer_.attach(FramebufferAttachment::COLOR0, frame.normalmap); semanticbuffer.attach(FramebufferAttachment::COLOR1, erode_semanticmap); semanticbuffer.bind();

glActiveTexture(GLTEXTURE0); if (filterVertexmap) tempvertices.bind(); else frame.vertex_map.bind();

glActiveTexture(GL_TEXTURE1); frame.semantic_map.bind();

sampler.bind(0); sampler.bind(1);

vao_nopoints.bind(); normalprogram.bind();

glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // reset depth/normalmap glDrawArrays(GL_POINTS, 0, 1);

normalprogram.release(); vao_nopoints.release(); semanticbuffer_.release();

glActiveTexture(GLTEXTURE0); if (filterVertexmap) tempvertices.release(); else frame.vertex_map.release();

glActiveTexture(GL_TEXTURE1); frame.semantic_map.release();

sampler.release(0); sampler.release(1);`

jbehley commented 3 years ago

The geometry shader generates two triangle faces to cover the full viewport. the fragment shader has then to determine the color of each pixel and there happens then the computation.

rainlord commented 3 years ago

thinks a lot ,but I am still have little confused about how the "gen_normalmap.frag" work ? in the code of "quat.geom" , texCoords just represents 4 coordinates:(0,0)(1,1)(0,1),(1,0)and output 4 vertexes ;So the "gen_normalmap.frag" just will be called 4 times correspond to the 4 vertexes ;

this code of line " vec2 pos = texCoords * textureSize(vertex_map);" will just generate 4 pos: (0,0),(data_width,data_height),(0,data_height),(data_width,0); I am confused about how does the "gen_normalmap.frag" compute all normal vector of all surfel or all point of a lidar scan?

`in vec2 texCoords;

uniform int width; uniform int height;

uniform sampler2DRect vertex_map; uniform sampler2DRect semantic_map;

layout (location = 0) out vec4 normal; layout (location = 1) out vec4 eroded_semantic_map;

uniform int normal_radius;

include "shader/color_map.glsl"

float wrap(float x, float dim) { float value = x;

while(value >= dim) value = (value - dim); while(value < 0) value = (value + dim);

return value; }

bool valid(vec4 normal) { return (normal.w > 0.5); }

vec4 invalid = vec4(0.0, 0.0, 0.0, 1.0);

void main() { float width = textureSize(vertex_map).x; vec2 pos = texCoords * textureSize(vertex_map); normal = invalid; eroded_semantic_map = invalid; // for invalid points

if(texture(vertex_map, pos).w > 0.0f) { normal.w = 1.0f;

vec4 p = texture(vertex_map, pos);
vec4 u = texture(vertex_map, vec2(wrap(pos.x + 1, width), pos.y));
vec4 v = texture(vertex_map, vec2(pos.x, pos.y + 1));
vec4 s = texture(vertex_map, vec2(wrap(pos.x - 1, width), pos.y));
vec4 t = texture(vertex_map, vec2(pos.x, pos.y - 1));

u.xyz = normalize(u.xyz - p.xyz);
v.xyz = normalize(v.xyz - p.xyz);
s.xyz = normalize(p.xyz - s.xyz);
t.xyz = normalize(p.xyz - t.xyz);

if(u.w < 1.0f && v.w < 1.0f) normal.w = 0;
if(s.w < 1.0f && t.w < 1.0f) normal.w = 0;

if(!valid(u) || !valid(v)) normal.w = 0;

// floodfill erosion
int kernel_size = 2;
eroded_semantic_map = texture(semantic_map, pos);

for(int offset = 1; offset < kernel_size; offset++)
{
  float p_label = texture(semantic_map, pos).x;
  float u_label = texture(semantic_map, vec2(wrap(pos.x + offset, width), pos.y)).x;
  float v_label = texture(semantic_map, vec2(pos.x, pos.y + offset)).x;
  float s_label = texture(semantic_map, vec2(wrap(pos.x - offset, width), pos.y)).x;
  float t_label = texture(semantic_map, vec2(pos.x, pos.y - offset)).x;

  if((p_label != u_label && u_label != 0.0) ||
     (p_label != v_label && v_label != 0.0) ||
     (p_label != s_label && s_label != 0.0) ||
     (p_label != t_label && t_label != 0.0))
    eroded_semantic_map = invalid;
}

// TODO: check if distances in x/y-direction are similar.
//if(abs(length(u) - length(s)) / max(length(u), length(s)) > 0.5) normal.w = 0;
//if(abs(length(v) - length(t)) / max(length(t), length(v)) > 0.5) normal.w = 0;

if(normal.w > 0.0f)
{
  vec3 w = cross(u.xyz, v.xyz);
  float len = length(w);
  normal = vec4(w / len, 1.0);
  normal.w = int(len > 0.0000001);
}

} } `

jbehley commented 3 years ago

as the triangles generated in the geometry cover the full screen, the fragment shader needs to produce for each pixel a value. therefore the fragment shader is invoked for each pixel with the interpolated texCoord value of the three vertices from the triangle.

texCoord will take all values in [0,1]x[0,1].

It's a trick that is also used render effects or in deferred rendering. (see learn opengl, the renderquad in the bloom filter)

Instead of explicitly rendering a quad by giving 6 vertices, I exploit the geometry shader to generate two triangle faces. Note that the vertex shader is even empty. It's only need to invoke the geometry shader once and the rest is handled internally in the graphics pipeline.

By having the interpolated texture coordinates one can query the textures (vertex map) and compute the output values (the normals from neighboring points from the vertex map).

rainlord commented 3 years ago

I think I ignore the process of Interpolation . So thanks again Sincerely .