Closed afraidofdark closed 7 months ago
Hello,
I think, for people like me who creates opengl es context under windows, a define can be introduced, which prevents unnecessary opengl warnings.
This seems like a XY Problem. We should focus on fixing the initial problem instead:
Under windows I don't define any of these, because otherwise it starts asking for headers and functions to link them.
That's the point of those defines. You should clarify how you are using GL ES 2 under Windows in your main code (what does your other code looks like in terms of includes and init code, which library are you using, how are you compiling/linking) and we should strive to make any valid ES 2 setup work with our backend.
Here is the minimal code that will create the problem. link to my glad file gles2.h
#include "SDL.h"
#include <ImGui/imgui.h>
#include <ImGui/backends/imgui_impl_opengl3.h>
#include <ImGui/backends/imgui_impl_sdl2.h>
#define GLAD_GLES2_IMPLEMENTATION
#include "glad/gles2.h"
#include <iostream>
#include <functional>
#include <string>
bool g_running = true;
SDL_Window* g_window = nullptr;
SDL_GLContext g_context = nullptr;
void GLDebugMessageCallback(GLenum source,
GLenum type,
GLuint id,
GLenum severity,
GLsizei length,
const GLchar* msg,
const void* data)
{
std::cout << msg << std::endl;
}
int main(int argc, char* argv[])
{
if (SDL_Init(SDL_INIT_VIDEO | SDL_INIT_EVENTS | SDL_INIT_GAMECONTROLLER) < 0)
{
g_running = false;
}
else
{
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_ES);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
SDL_GL_SetAttribute(SDL_GL_DOUBLEBUFFER, 1);
SDL_GL_SetAttribute(SDL_GL_DEPTH_SIZE, 24);
SDL_GL_SetAttribute(SDL_GL_STENCIL_SIZE, 8);
SDL_GL_SetAttribute(SDL_GL_FRAMEBUFFER_SRGB_CAPABLE, 1);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS, SDL_GL_CONTEXT_DEBUG_FLAG);
g_window =
SDL_CreateWindow("ToolKit",
SDL_WINDOWPOS_UNDEFINED,
SDL_WINDOWPOS_UNDEFINED,
1280,
800,
SDL_WINDOW_OPENGL | SDL_WINDOW_RESIZABLE | SDL_WINDOW_SHOWN | SDL_WINDOW_ALLOW_HIGHDPI);
if (g_window == nullptr)
{
g_running = false;
}
else
{
g_context = SDL_GL_CreateContext(g_window);
if (g_context == nullptr)
{
g_running = false;
}
else
{
SDL_GL_MakeCurrent(g_window, g_context);
SDL_GL_SetSwapInterval(0);
}
}
IMGUI_CHECKVERSION();
ImGui::CreateContext();
ImGuiIO& io = ImGui::GetIO();
io.ConfigFlags |= ImGuiConfigFlags_DockingEnable | ImGuiConfigFlags_ViewportsEnable;
io.ConfigWindowsMoveFromTitleBarOnly = true;
ImGui_ImplSDL2_InitForOpenGL(g_window, g_context);
ImGui_ImplOpenGL3_Init("#version 300 es");
// Load gl and error reporters
gladLoadGLES2((GLADloadfunc)SDL_GL_GetProcAddress);
if (glDebugMessageCallback != NULL)
{
glEnable(GL_DEBUG_OUTPUT);
glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS);
glDebugMessageCallback(&GLDebugMessageCallback, nullptr);
glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_NOTIFICATION, 0, nullptr, GL_FALSE);
glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_LOW, 0, nullptr, GL_FALSE);
glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_MEDIUM, 0, nullptr, GL_TRUE);
glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_HIGH, 0, nullptr, GL_TRUE);
}
while (true)
{
SDL_Event sdlEvent;
while (SDL_PollEvent(&sdlEvent))
{
ImGui_ImplSDL2_ProcessEvent(&sdlEvent);
}
ImGui_ImplOpenGL3_NewFrame();
ImGui_ImplSDL2_NewFrame();
ImGui::NewFrame();
ImGui::ShowDemoWindow();
ImGui::Render();
ImGui_ImplOpenGL3_RenderDrawData(ImGui::GetDrawData());
ImGui::EndFrame();
ImGui::UpdatePlatformWindows();
ImGui::RenderPlatformWindowsDefault();
SDL_GL_MakeCurrent(g_window, g_context);
SDL_GL_SwapWindow(g_window);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
return 0;
}
}
Thank you. The GLAD stuff has no effects really, so i believe the only meaningful setup line here is using SDL_GL_CONTEXT_PROFILE_ES. Must investigate SDL code to see what it means and how we could somehow detect it on our end.
I tried to detect the opengl version, macros does not consider the case of loading opengl es 3.0 on destkop, so settings in the ImGui_ImplOpenGL3_Data are not generated correctly.
Here is the diff that still fixes the problem with properly assigning the opengl version.
IMHO adding one more macro which indicates DESKTOP_OPENGL_ES3 is much better, because the codebase already have suitable checks for GLES3.0 every where.
diff --git a/backends/imgui_impl_opengl3.cpp b/backends/imgui_impl_opengl3.cpp
index 0be98b63..46cf80d9 100644
--- a/backends/imgui_impl_opengl3.cpp
+++ b/backends/imgui_impl_opengl3.cpp
@@ -319,6 +319,14 @@ bool ImGui_ImplOpenGL3_Init(const char* glsl_version)
#if defined(IMGUI_IMPL_OPENGL_ES3)
bd->GlProfileIsES3 = true;
+#else
+
+ const GLubyte* version = glGetString(GL_VERSION);
+ if (strstr((const char*) version, "OpenGL ES"))
+ {
+ bd->GlProfileIsES3 = true;
+ }
+
#endif
bd->UseBufferSubData = false;
@@ -424,7 +432,10 @@ static void ImGui_ImplOpenGL3_SetupRenderState(ImDrawData* draw_data, int fb_wid
glDisable(GL_PRIMITIVE_RESTART);
#endif
#ifdef IMGUI_IMPL_OPENGL_HAS_POLYGON_MODE
- glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
+ if (!bd->GlProfileIsES3)
+ {
+ glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
+ }
#endif
// Support for GL 4.5 rarely used glClipControl(GL_UPPER_LEFT)
@@ -513,7 +524,12 @@ void ImGui_ImplOpenGL3_RenderDrawData(ImDrawData* draw_data)
GLuint last_vertex_array_object; glGetIntegerv(GL_VERTEX_ARRAY_BINDING, (GLint*)&last_vertex_array_object);
#endif
#ifdef IMGUI_IMPL_OPENGL_HAS_POLYGON_MODE
- GLint last_polygon_mode[2]; glGetIntegerv(GL_POLYGON_MODE, last_polygon_mode);
+ GLint last_polygon_mode[2];
+ if (!bd->GlProfileIsES3)
+ {
+ glGetIntegerv(GL_POLYGON_MODE, last_polygon_mode);
+ }
+
#endif
GLint last_viewport[4]; glGetIntegerv(GL_VIEWPORT, last_viewport);
GLint last_scissor_box[4]; glGetIntegerv(GL_SCISSOR_BOX, last_scissor_box);
@@ -653,14 +669,17 @@ void ImGui_ImplOpenGL3_RenderDrawData(ImDrawData* draw_data)
#ifdef IMGUI_IMPL_OPENGL_HAS_POLYGON_MODE
// Desktop OpenGL 3.0 and OpenGL 3.1 had separate polygon draw modes for front-facing and back-facing faces of polygons
- if (bd->GlVersion <= 310 || bd->GlProfileIsCompat)
+ if (!bd->GlProfileIsES3)
{
- glPolygonMode(GL_FRONT, (GLenum)last_polygon_mode[0]);
- glPolygonMode(GL_BACK, (GLenum)last_polygon_mode[1]);
- }
- else
- {
- glPolygonMode(GL_FRONT_AND_BACK, (GLenum)last_polygon_mode[0]);
+ if (bd->GlVersion <= 310 || bd->GlProfileIsCompat)
+ {
+ glPolygonMode(GL_FRONT, (GLenum) last_polygon_mode[0]);
+ glPolygonMode(GL_BACK, (GLenum) last_polygon_mode[1]);
+ }
+ else
+ {
+ glPolygonMode(GL_FRONT_AND_BACK, (GLenum) last_polygon_mode[0]);
+ }
}
#endif // IMGUI_IMPL_OPENGL_HAS_POLYGON_MODE
Thank you, this is super useful as until now I had no idea how to run an ES context on desktop (I've requested that help a few times before).
Committed a backend-side fix as 9ec299e
I have reduced your repro to the following blocks changes from existing example:
Include:
#define GLAD_GLES2_IMPLEMENTATION
#include "glad/gles2.h"
Init:
// Decide GL+GLSL versions
const char* glsl_version = "#version 300 es";
SDL_GL_SetAttribute(SDL_GL_CONTEXT_PROFILE_MASK, SDL_GL_CONTEXT_PROFILE_ES);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 3);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 2);
SDL_GL_SetAttribute(SDL_GL_CONTEXT_FLAGS, SDL_GL_CONTEXT_DEBUG_FLAG);
Init:
// Setup Glad
gladLoadGLES2((GLADloadfunc)SDL_GL_GetProcAddress);
// Setup debug message callback
if (glDebugMessageCallback != NULL)
{
glEnable(GL_DEBUG_OUTPUT);
glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS);
glDebugMessageCallback([](GLenum, GLenum, GLuint, GLenum, GLsizei, const GLchar* msg, const void*) { printf("%s\n", msg); }, nullptr);
glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_NOTIFICATION, 0, nullptr, GL_FALSE);
glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_LOW, 0, nullptr, GL_FALSE);
glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_MEDIUM, 0, nullptr, GL_TRUE);
glDebugMessageControl(GL_DONT_CARE, GL_DONT_CARE, GL_DEBUG_SEVERITY_HIGH, 0, nullptr, GL_TRUE);
}
IMHO adding one more macro which indicates DESKTOP_OPENGL_ES3 is much better, because the codebase already have suitable checks for GLES3.0 every where.
My problem is that a DESKTOP_OPENGL_ES3 like macro would imply we know what headers to pull, I'm not sure there is a standard for Desktop ES? We currently locally use our own loader which happens to work with SDL setup. In fact, being able to do it without a compile-time define is a benefit.
If there is a better way to detect an ES context I'd always be happy to hear it.
Thanks for your help!
Version/Branch of Dear ImGui:
Version 1.90.5 WIP, Branch: docking
Back-ends:
imgui_impl_sdl2.h +imgui_impl_opengl3.h
Compiler, OS:
Windows 11 - VS 2022
Full config/build information:
Details:
Imgui disables glPolygonMode if IMGUI_IMPL_OPENGL_ES3 or IMGUI_IMPL_OPENGL_ES2 defined. Under windows I don't define any of these, because otherwise it starts asking for headers and functions to link them. I use glad gles2.0 with 3.2 features also create an sdl opengl es 3.0 context. So I don't actually have polygon mode. But since I am under windows and neither of IMGUI_IMPL_OPENGL_ES3 or IMGUI_IMPL_OPENGL_ES2 can be defined, I always ending up having IMGUI_IMPL_OPENGL_HAS_POLYGON_MODE defined. Which causes invalid enum errors
[Error] GL_INVALID_ENUM error generated. <pname> requires feature(s) disabled in the current profile. [Error] GL_INVALID_ENUM error generated. <mode> is not a valid polygon mode.
My solution is to create another macro and modify imgui_impl_opengl3.cpp line 182 as below:
which effectively prevents polygon mode if "TK_NO_POLYGON_MODE" is defined in imconfig.h
I think, for people like me who creates opengl es context under windows, a define can be introduced, which prevents unnecessary opengl warnings.
Screenshots/Video:
No response
Minimal, Complete and Verifiable Example code: