EGL_BAD_CONFIG using OpenGL ES 2 on Samsung Galaxy Tab 2 (works on Sony Xperia)

1,754 views
Skip to first unread message

guich

unread,
Oct 16, 2012, 12:58:05 PM10/16/12
to android-ndk
Hi,

I have a code that works fine on Sony Xperia S with android 2.3.7. I
just installed it in my Galaxy Tab 2 and it fails. The error is
EGL_BAD_CONFIG when calling eglCreateContext.

This is the code:

bool initGLES(ScreenSurface screen)
{
int32 i;
const EGLint attribs[] =
{
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_NONE
};
EGLint context_attribs[] = { EGL_CONTEXT_CLIENT_VERSION, 2,
EGL_NONE };
EGLDisplay display;
EGLConfig config;
EGLint numConfigs;
EGLint format;
EGLSurface surface;
EGLContext context;
EGLint width;
EGLint height;

if ((display = eglGetDisplay(EGL_DEFAULT_DISPLAY)) ==
EGL_NO_DISPLAY) {debug("eglGetDisplay() returned error %d",
eglGetError()); return false;}
if (!eglInitialize(display, 0,
0)) {debug("eglInitialize()
returned error %d", eglGetError()); return false;}
if (!eglChooseConfig(display, attribs, &config, 1,
&numConfigs)) {debug("eglChooseConfig() returned error %d",
eglGetError()); destroyEGL(); return false;}
if (!eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID,
&format)) {debug("eglGetConfigAttrib() returned error %d",
eglGetError()); destroyEGL(); return false;}

int ret = ANativeWindow_setBuffersGeometry(window, 0, 0, format);
debug("format: %d, display: %d, config: %d, nconf: %d, ret:
%d",format, display,config,numConfigs, ret);

if (!(surface = eglCreateWindowSurface(display, config, window,
0))) {debug("eglCreateWindowSurface() returned error %d",
eglGetError()); destroyEGL(); return false;}
debug("display: %d, window: %d",display,window);
if (!(context = eglCreateContext(display, config, EGL_NO_CONTEXT,
context_attribs))) {debug("eglCreateContext() returned error %d",
eglGetError()); destroyEGL(); return false;}
if (!eglMakeCurrent(display, surface, surface,
context)) {debug("eglMakeCurrent() returned error %d",
eglGetError()); destroyEGL(); return false;}
if (!eglQuerySurface(display, surface, EGL_WIDTH, &width) || !
eglQuerySurface(display, surface, EGL_HEIGHT, &height))
{debug("eglQuerySurface() returned error %d", eglGetError());
destroyEGL(); return false;}

_display = display;
_surface = surface;
_context = context;
return setupGL(width,height);
}

This is the output in the Xperia:
format: 1, display: 1, config: 16, nconf: 1, ret: 0
display: 1, window: 2846880

And this is in the Tab2:
: format: 5, display: 1, config: 1, nconf: 1, ret: 0
: display: 1, window: 16193944
: eglCreateContext() returned error 12293
eglMakeCurrent:573 error 3008 (EGL_BAD_DISPLAY)

Error 12293 is EGL_BAD_CONFIG.

I know that Java ES 2 works fine on the galaxy tab 2 because i could
run some java samples on it.

Any help is greatly appreciated

guich

Fredrik Ehnbom

unread,
Oct 16, 2012, 1:04:50 PM10/16/12
to andro...@googlegroups.com
You're missing EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT in your attribs array.

/f

2012/10/16 guich <guih...@gmail.com>

   guich

--
You received this message because you are subscribed to the Google Groups "android-ndk" group.
To post to this group, send email to andro...@googlegroups.com.
To unsubscribe from this group, send email to android-ndk...@googlegroups.com.
For more options, visit this group at http://groups.google.com/group/android-ndk?hl=en.


guich

unread,
Oct 16, 2012, 1:12:14 PM10/16/12
to android-ndk
THANKS!!!!

guich

unread,
Oct 16, 2012, 1:51:36 PM10/16/12
to android-ndk
I'm now getting a strange problem. I have a gl program that draws a
point. It works in the xperia but not in the galaxy. This is the code:

#define LRP_VERTEX_CODE \
"attribute vec4 a_Position;" \
"uniform mat4 projectionMatrix;" \
"void main() {gl_Position = a_Position*projectionMatrix;}"

#define LRP_FRAGMENT_CODE \
"precision mediump float;" \
"uniform vec4 a_Color;" \
"void main() {gl_FragColor = a_Color;}"

static GLuint lrpProgram;
static GLuint lrpPosition;
static GLuint lrpColor;

void initLineRectPoint()
{
lrpProgram = createProgram(LRP_VERTEX_CODE, LRP_FRAGMENT_CODE);
setCurrentProgram(lrpProgram);
lrpColor = glGetUniformLocation(lrpProgram, "a_Color");
lrpPosition = glGetAttribLocation(lrpProgram, "a_Position"); // get
handle to vertex shader's vPosition member
glEnableVertexAttribArray(lrpPosition); // Enable a handle to the
vertices - since this is the only one used, keep it enabled all the
time
}

void glDrawPixel(int32 x, int32 y, int32 rgb)
{
GLfloat* coords = glcoords;
PixelConv pc;
pc.pixel = rgb;
coords[0] = x;
coords[1] = y;

setCurrentProgram(lrpProgram);
glUniform4f(lrpColor, f255[pc.r], f255[pc.g], f255[pc.b], 1); //
Set color for drawing the line - nopt to cache color
glVertexAttribPointer(lrpPosition, COORDS_PER_VERTEX, GL_FLOAT,
GL_FALSE, COORDS_PER_VERTEX * sizeof(float), coords); // Prepare the
triangle coordinate data
glDrawArrays(GL_POINTS, 0,1);
}

I already checked the output for each function call in the functions
above but none return errors.

Again, the code works fine on xperia (2.3.7) but not on galaxy tab 2
(4.0.4).

thanks

guich

Fabien R

unread,
Oct 17, 2012, 3:46:48 AM10/17/12
to andro...@googlegroups.com
On 16/10/2012 19:51, guich wrote:
> I'm now getting a strange problem. I have a gl program that draws a
> point. It works in the xperia but not in the galaxy. This is the code:
>
> #define LRP_VERTEX_CODE \
> "attribute vec4 a_Position;" \
> "uniform mat4 projectionMatrix;" \
> "void main() {gl_Position = a_Position*projectionMatrix;}"
>
You'd better write:

gl_Position =projectionMatrix * a_Position;

> #define LRP_FRAGMENT_CODE \
> "precision mediump float;" \
> "uniform vec4 a_Color;" \
> "void main() {gl_FragColor = a_Color;}"
>
> static GLuint lrpProgram;
> static GLuint lrpPosition;
> static GLuint lrpColor;
>
> void initLineRectPoint()
> {
> lrpProgram = createProgram(LRP_VERTEX_CODE, LRP_FRAGMENT_CODE);
> setCurrentProgram(lrpProgram);
> lrpColor = glGetUniformLocation(lrpProgram, "a_Color");
> lrpPosition = glGetAttribLocation(lrpProgram, "a_Position"); // get
>
Where is the call to glGetUniformLocation() for projectionMatrix ?
> handle to vertex shader's vPosition member
> glEnableVertexAttribArray(lrpPosition); // Enable a handle to the
> vertices - since this is the only one used, keep it enabled all the
> time
> }
>
> void glDrawPixel(int32 x, int32 y, int32 rgb)
> {
> GLfloat* coords = glcoords;
> PixelConv pc;
> pc.pixel = rgb;
> coords[0] = x;
> coords[1] = y;
>
> setCurrentProgram(lrpProgram);
> glUniform4f(lrpColor, f255[pc.r], f255[pc.g], f255[pc.b], 1); //
> Set color for drawing the line - nopt to cache color
> glVertexAttribPointer(lrpPosition, COORDS_PER_VERTEX, GL_FLOAT,
> GL_FALSE, COORDS_PER_VERTEX * sizeof(float), coords); // Prepare the
> triangle coordinate data
> glDrawArrays(GL_POINTS, 0,1);
> }
>
You don't send projectionMatrix to the vertex shader.

You seem to be a newbie in OpenGL.
Read some tutorials:
http://en.wikibooks.org/wiki/OpenGL_Programming
http://www.arcsynthesis.org/gltut/

-
Fabien

Reply all
Reply to author
Forward
0 new messages