osx - C++ + SDL + OpenGL 3.3 doesn't work on Mac OS X? -


i'm starting developing opengl 3 (i'm used 1, it's quite change), , i'm using sdl windowing/image/sound/event-framework. have following code(taken opengl.org , modified):

#include <stdio.h> #include <stdlib.h> /* if using gl3.h */ /* ensure using opengl's core profile */ #define gl3_prototypes 1 #include <opengl/gl3.h>  #include <sdl2/sdl.h> #define program_name "tutorial1"  /* simple function prints message, error code returned sdl,  * , quits application */ void sdldie(const char *msg) {     printf("%s: %s\n", msg, sdl_geterror());     sdl_quit();     exit(1); }   void checksdlerror(int line = -1) { #ifndef ndebug     const char *error = sdl_geterror();     if (*error != '\0')     {         printf("sdl error: %s\n", error);         if (line != -1)             printf(" + line: %i\n", line);         sdl_clearerror();     } #endif }  void render(sdl_window* win){     glclearcolor(1.0,0.0,0.0,1.0);     glclear ( gl_color_buffer_bit );     sdl_gl_swapwindow(win); }   /* our program's entry point */ int main(int argc, char *argv[]) {     sdl_window *mainwindow; /* our window handle */     sdl_glcontext maincontext; /* our opengl context handle */      if (sdl_init(sdl_init_video) < 0) /* initialize sdl's video subsystem */     sdldie("unable initialize sdl"); /* or die on error */      /* request opengl 3.2 context.      * sdl doesn't have ability choose profile @ time of writing,      * should default core profile */     sdl_gl_setattribute(sdl_gl_context_major_version, 3);     sdl_gl_setattribute(sdl_gl_context_minor_version, 2);      /* turn on double buffering 24bit z buffer.      * may need change 16 or 32 system */     sdl_gl_setattribute(sdl_gl_doublebuffer, 1);     sdl_gl_setattribute(sdl_gl_depth_size, 24);      /* create our window centered @ 512x512 resolution */     mainwindow = sdl_createwindow(program_name, sdl_windowpos_centered, sdl_windowpos_centered,                               512, 512, sdl_window_opengl | sdl_window_shown);     if (!mainwindow) /* die if creation failed */         sdldie("unable create window");      checksdlerror(__line__);      /* create our opengl context , attach our window */     maincontext = sdl_gl_createcontext(mainwindow);     checksdlerror(__line__);       /* makes our buffer swap syncronized monitor's vertical refresh */     sdl_gl_setswapinterval(1);      render(mainwindow);     sdl_delay(2000);      /* delete our opengl context, destroy our window, , shutdown sdl */     sdl_gl_deletecontext(maincontext);     sdl_destroywindow(mainwindow);     sdl_quit();      return 0; } 

and works well. if change line:

sdl_gl_setattribute(sdl_gl_context_minor_version, 2); 

to this:

sdl_gl_setattribute(sdl_gl_context_minor_version, 3); 

so uses opengl 3.3 instead of 3.2, exc_bad_acces error on every single opengl call. want use opengl 3.3. computer: macbook pro retina late 2012 mac os x mountain lion intel i7 2.7 ghz intel hd 4000 nvidia geforce g

anyone knows wether os x error, sdl error, or wrong code? (i know sdl-code may not best, sdl 1.2 code)

because osx have opengl capabilities 3.2.

if sdl_geterror, tell wrong.

edit: osx mavericks (10.9) supports 4.1.


Comments

Popular posts from this blog

css - Which browser returns the correct result for getBoundingClientRect of an SVG element? -

gcc - Calling fftR4() in c from assembly -

.htaccess - Matching full URL in RewriteCond -