#define WIN32_LEAN_AND_MEAN
#include "windows.h"
#include "gl/gl.h"
#pragma comment(lib, "opengl32.lib")
#include "SDL.h"
#ifndef NDEBUG
#pragma comment(lib, "SDLd.lib")
#pragma comment(lib, "SDLmaind.lib")
#else
#pragma comment(lib, "SDL.lib")
#pragma comment(lib, "SDLmainnostdio.lib")
#endif
bool g_exit=false;
extern "C" int SDL_main(int argc, char* argv[])
{
int delay_ms = 10;
if(SDL_Init(SDL_INIT_VIDEO)<0)
return -1;
const SDL_VideoInfo* info = SDL_GetVideoInfo();
if(!info)
return -1;
int flags=0;
flags |= SDL_OPENGL;
//flags |= SDL_GL_DOUBLEBUFFER;
//flags |= SDL_HWPALETTE;
//flags |= SDL_RESIZABLE;
//flags |= (info->hw_available)?SDL_HWSURFACE:SDL_SWSURFACE;
//flags |= (info->blit_hw)?SDL_HWACCEL:0;
int bpp = info->vfmt->BitsPerPixel;
SDL_Surface* context = SDL_SetVideoMode(640, 480, bpp, flags);
SDL_WM_SetCaption("Access Violation Test", "?");
if(true)
{
while(!g_exit)
{
//glClear(0);
//SDL_GL_SwapBuffers();
SDL_Event event;
while(SDL_PollEvent(&event))
{
switch(event.type)
{
case SDL_KEYDOWN:
break;
case SDL_VIDEORESIZE:
break;
case SDL_QUIT:
g_exit=true;
break;
}
}
}
}
SDL_FreeSurface(context); context=0;
SDL_Quit();
return 0;
}
SDL Access Violation on exit
I just noticed that whenever my SDL app exits, there's an avalanche of access violations. I trimmed down the app to the core SDL code, and it still occurs. Does this happen to anyone else? Did I mess-up building the dll? I double checked the run-time libraries (multithreaded-dll) are the same.
- The trade-off between price and quality does not exist in Japan. Rather, the idea that high quality brings on cost reduction is widely accepted.-- Tajima & Matsubara
Compiled, linked and ran your code against my build of the SDL 1.2.7 and it doesn't cause any access violations for me.
October 27, 2004 04:34 AM
From the SDL docs: "The surface returned (by SDL_SetVideoMode) is freed by SDL_Quit() and should not be freed by the caller".
Oh, and instead of using SDL_GetVideoInfo to get the current bits per pixel you could just pass '0' to SDL_SetVideoMode.
Oh, and instead of using SDL_GetVideoInfo to get the current bits per pixel you could just pass '0' to SDL_SetVideoMode.
BTW: Instead of this:
Do this:
On windows, that header does the same thing as what you're doing.
But it'll do the right thing on other OSes too, so your code is more portable. (And cleaner, in my opinion)
#define WIN32_LEAN_AND_MEAN#include "windows.h"#include "gl/gl.h"#pragma comment(lib, "opengl32.lib")#include "SDL.h"
Do this:
#include "SDL.h"#include "SDL_opengl.h"
On windows, that header does the same thing as what you're doing.
But it'll do the right thing on other OSes too, so your code is more portable. (And cleaner, in my opinion)
This topic is closed to new replies.
Advertisement
Popular Topics
Advertisement