OpenGL glReadPixels doesn't work with EGL while working with GLX

259 Views Asked by At

I would like to use OpenGL with EGL context for offscreen rending into memory. I managed to get a running code without errors reported by OGL or EGL, but when I try to read the pixels with glReadPixels afters rendering, I don't see any rendered image in the memory. I also tried the same code with GLX context instead of EGL and that one works as expected. I would like to see where the error is as I want to use EGL and not drag along the burden of unnecessarily requiring a window system library. I would also like to avoid OGL version > 1.5, i.e. no framebuffer objects.

Here is the complete code, you can switch between EGL and GLX by just changing the value of USE_EGL between 1 and 0. (I compile as gcc -lGL -lEGL -lGLX -lX11 test.c)

#include <stdint.h>
#include <stdio.h>

#include <GL/gl.h>
#include <EGL/egl.h>

#include <GL/glx.h>

#define USE_EGL 1

#define IMG_W 16
#define IMG_H 16

uint8_t image[4 * IMG_W * IMG_H];

int main()
{

#if USE_EGL
  static const EGLint configAttribs[] =
  {
    EGL_SURFACE_TYPE, EGL_PBUFFER_BIT,
    EGL_BLUE_SIZE, 8,
    EGL_GREEN_SIZE, 8,
    EGL_RED_SIZE, 8,
    EGL_DEPTH_SIZE, 8,
    EGL_RENDERABLE_TYPE, EGL_OPENGL_BIT,
    EGL_NONE
  };    

  static const EGLint pbufferAttribs[] =
  {
    EGL_WIDTH, IMG_W,
    EGL_HEIGHT, IMG_H,
    EGL_NONE
  };

  EGLDisplay eglDpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);

  EGLint major, minor;

  printf("%d",eglInitialize(eglDpy, &major, &minor) == EGL_TRUE);

  EGLint numConfigs;
  EGLConfig eglCfg;

  printf("%d",eglChooseConfig(eglDpy, configAttribs, &eglCfg, 1, &numConfigs) == EGL_TRUE);

  EGLSurface eglSurf = eglCreatePbufferSurface(eglDpy, eglCfg, pbufferAttribs);

  printf("%d",eglSurf != EGL_NO_SURFACE);

  printf("%d",eglBindAPI(EGL_OPENGL_API) == EGL_TRUE);

  EGLContext eglCtx = eglCreateContext(eglDpy, eglCfg, EGL_NO_CONTEXT, NULL);

  printf("%d",eglMakeCurrent(eglDpy, eglSurf, eglSurf, eglCtx) == EGL_TRUE);
#else
  // glx

  Display *glxDpy = XOpenDisplay(0);

  int attributeList[] = {
    GLX_RGBA,
    GLX_DOUBLEBUFFER,
    GLX_RED_SIZE, 1,
    GLX_GREEN_SIZE, 1,
    GLX_BLUE_SIZE, 1, 
    None };

  int nelements;
  GLXFBConfig *fbc = glXChooseFBConfig(glxDpy, DefaultScreen(glxDpy),0,&nelements);

  XVisualInfo *vi = glXChooseVisual(glxDpy, DefaultScreen(glxDpy),attributeList);

  XSetWindowAttributes swa;
  swa.colormap = XCreateColormap(glxDpy,RootWindow(glxDpy,vi->screen),vi->visual,AllocNone);
  swa.border_pixel = 0;
  swa.event_mask = StructureNotifyMask;
  Window win = XCreateWindow(glxDpy, RootWindow(glxDpy, vi->screen),0,0,IMG_W,
    IMG_H,0,vi->depth,InputOutput,vi->visual,CWBorderPixel|CWColormap|CWEventMask,
    &swa);

  XMapWindow (glxDpy, win);

  GLXContext ctx = glXCreateContext(glxDpy, vi, 0, GL_TRUE);
  glXMakeCurrent (glxDpy, win, ctx);
#endif

  glClearColor(0.0, 0.0, 0.0, 0.0);
  glClear(GL_COLOR_BUFFER_BIT);
  glColor3f(1.0, 0.7, 1.0);
  glOrtho(0.0, 1.0, 0.0, 1.0, -1.0, 1.0);

  printf("%d",glGetError() == GL_NO_ERROR);

  glBegin(GL_POLYGON);
    glVertex3f(0.2, 0.2, 0.0);
    glVertex3f(0.2, 0.7, 0.0);
    glVertex3f(0.7, 0.2, 0.0);
  glEnd();

  glFlush();

  glFinish();

  glReadPixels(0,0,IMG_W,IMG_H,GL_RGB,GL_UNSIGNED_BYTE,image);

  printf("%d",glGetError() == GL_NO_ERROR);

  glFinish();

  printf("\n");

  for (int i = 0; i < IMG_W; ++i) // draw to terminal
  {
    for (int j = 0; j < IMG_H; ++j)
      printf("%c ",image[3 * (j * IMG_W + i)] != 0 ? '#' : '.');

    printf("\n");
  }

  #if USE_EGL
    eglTerminate(eglDpy);
  #else
    ctx = glXGetCurrentContext(); 
    glXDestroyContext(glxDpy, ctx); 
  #endif

  return 0;
}

With USE_EGL 0 (glx version) I get the expected result:

11
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . # # # # # # # # . . . . .
. . . # # # # # # # . . . . . .
. . . # # # # # # . . . . . . .
. . . # # # # # . . . . . . . .
. . . # # # # . . . . . . . . .
. . . # # # . . . . . . . . . .
. . . # # . . . . . . . . . . .
. . . # . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .

while with USE_EGL 1 I get:

1111111
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .
. . . . . . . . . . . . . . . .

My system is Devuan GNU/Linux with OpenGL version string: 2.1 Mesa 18.3.6, Intel GPU. Is there something I'm doing wrong or could this be a driver bug?

0

There are 0 best solutions below