AndroidManifest.xmlDemo.c
#include#include#include#include#include#include#include#defineLOGI(...) ((void)__Android_log_print(ANDROID_LOG_INFO, "native-activity", __VA_ARGS__))#defineLOGW(...) ((void)__Android_log_print(ANDROID_LOG_WARN, "native-activity", __VA_ARGS__))/**
* Our saved state data.*/structsaved_state {floatangle;
int32_t x;
int32_t y;
};/**
* Shared state for our app.*/structengine {structAndroid_app*app;
ASensorManager*sensorManager;constASensor*accelerometerSensor;
ASensorEventQueue*sensorEventQueue;intanimating;
EGLDisplay display;
EGLSurface surface;
EGLContext context;
int32_t width;
int32_t height;structsaved_state state;
};/**
* Initialize an EGL context for the current display.*/staticintengine_init_display(structengine*engine) {//initialize OpenGL ES and EGL/** Here specify the attributes of the desired configuration.
* Below, we select an EGLConfig with at least 8 bits per color
* component compatible with on-screen windows*/constEGLint attribs[]={
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_BLUE_SIZE,8,
EGL_GREEN_SIZE,8,
EGL_RED_SIZE,8,
EGL_NONE
};
EGLint w, h, dummy, format;
EGLint numConfigs;
EGLConfig config;
EGLSurface surface;
EGLContext context;
EGLDisplay display=eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display,0,0);/*Here, the application chooses the configuration it desires. In this
* sample, we have a very simplified selection process, where we pick
* the first EGLConfig that matches our criteria*/eglChooseConfig(display, attribs,&config,1,&numConfigs);/*EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is
* guaranteed to be accepted by ANativeWindow_setBuffersGeometry().
* As soon as we picked a EGLConfig, we can safely reconfigure the
* ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID.*/eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID,&format);
ANativeWindow_setBuffersGeometry(engine->app->window,0,0, format);
surface=eglCreateWindowSurface(display, config, engine->app->window, NULL);
context=eglCreateContext(display, config, NULL, NULL);if(eglMakeCurrent(display, surface, surface, context)==EGL_FALSE) {
LOGW("Unable to eglMakeCurrent");return-1;
}
eglQuerySurface(display, surface, EGL_WIDTH,&w);
eglQuerySurface(display, surface, EGL_HEIGHT,&h);
engine->display=display;
engine->context=context;
engine->surface=surface;
engine->width=w;
engine->height=h;
engine->state.angle=0;//Initialize GL state.glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);
glEnable(GL_CULL_FACE);
glShadeModel(GL_SMOOTH);
glDisable(GL_DEPTH_TEST);return0;
}/**
* Just the current frame in the display.*/staticvoidengine_draw_frame(structengine*engine) {if(engine->display==NULL) {//No display.return;
}//Just fill the screen with a color.glClearColor(((float)engine->state.x)/engine->width, engine->state.angle,
((float)engine->state.y)/engine->height,1);
glClear(GL_COLOR_BUFFER_BIT);
eglSwapBuffers(engine->display, engine->surface);
}/**
* Tear down the EGL context currently associated with the display.*/staticvoidengine_term_display(structengine*engine) {if(engine->display!=EGL_NO_DISPLAY) {
eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);if(engine->context!=EGL_NO_CONTEXT) {
eglDestroyContext(engine->display, engine->context);
}if(engine->surface!=EGL_NO_SURFACE) {
eglDestroySurface(engine->display, engine->surface);
}
eglTerminate(engine->display);
}
engine->animating=0;
engine->display=EGL_NO_DISPLAY;
engine->context=EGL_NO_CONTEXT;
engine->surface=EGL_NO_SURFACE;
}/**
* Process the next input event.*/staticint32_t engine_handle_input(structAndroid_app*app, AInputEvent*event) {structengine*engine=(structengine*)app->userData;if(AInputEvent_getType(event)==AINPUT_EVENT_TYPE_MOTION) {
engine->animating=1;
engine->state.x=AMotionEvent_getX(event,0);
engine->state.y=AMotionEvent_getY(event,0);return1;
}return0;
}/**
* Process the next main command.*/staticvoidengine_handle_cmd(structAndroid_app*app, int32_t cmd) {structengine*engine=(structengine*)app->userData;switch(cmd) {caseAPP_CMD_SAVE_STATE://The system has asked us to save our current state. Do so.engine->app->savedState=malloc(sizeof(structsaved_state));*((structsaved_state*)engine->app->savedState)=engine->state;
engine->app->savedStateSize=sizeof(structsaved_state);break;caseAPP_CMD_INIT_WINDOW://The window is being shown, get it ready.if(engine->app->window!=NULL) {
engine_init_display(engine);
engine_draw_frame(engine);
}break;caseAPP_CMD_TERM_WINDOW://The window is being hidden or closed, clean it up.engine_term_display(engine);break;caseAPP_CMD_GAINED_FOCUS://When our app gains focus, we start monitoring the accelerometer.if(engine->accelerometerSensor!=NULL) {
ASensorEventQueue_enableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);//We'd like to get 60 events per second (in us).ASensorEventQueue_setEventRate(engine->sensorEventQueue,
engine->accelerometerSensor, (1000L/60)*1000);
}break;caseAPP_CMD_LOST_FOCUS://When our app loses focus, we stop monitoring the accelerometer.//This is to avoid consuming battery while not being used.if(engine->accelerometerSensor!=NULL) {
ASensorEventQueue_disableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);
}//Also stop animating.engine->animating=0;
engine_draw_frame(engine);break;
}
}/**
* This is the main entry point of a native application that is using
* Android_native_app_glue. It runs in its own thread, with its own
* event loop for receiving input events and doing other things.*/voidAndroid_main(structandroid_app*state) {structengine engine;//Make sure glue isn't stripped.app_dummy();
memset(&engine,0,sizeof(engine));
state->userData=&engine;
state->onAppCmd=engine_handle_cmd;
state->onInputEvent=engine_handle_input;
engine.app=state;//Prepare to monitor accelerometerengine.sensorManager=ASensorManager_getInstance();
engine.accelerometerSensor=ASensorManager_getDefaultSensor(engine.sensorManager,
ASENSOR_TYPE_ACCELEROMETER);
engine.sensorEventQueue=ASensorManager_createEventQueue(engine.sensorManager,
state->looper, LOOPER_ID_USER, NULL, NULL);if(state->savedState!=NULL) {//We are starting with a previous saved state; restore from it.engine.state=*(structsaved_state*)state->savedState;
}//loop waiting for stuff to do.while(1) {//Read all pending events.intident;intevents;structAndroid_poll_source*source;//If not animating, we will block forever waiting for events.//If animating, we loop until all events are read, then continue//to draw the next frame of animation.while((ident=ALooper_pollAll(engine.animating?0:-1, NULL,&events,
(void**)&source))>=0) {//Process this event.if(source!=NULL) {
source->process(state, source);
}//If a sensor has data, process it now.if(ident==LOOPER_ID_USER) {if(engine.accelerometerSensor!=NULL) {
ASensorEventevent;while(ASensorEventQueue_getEvents(engine.sensorEventQueue,&event,1)>0) {
LOGI("accelerometer: x=%f y=%f z=%f",event.acceleration.x,event.acceleration.y,event.acceleration.z);
}
}
}//Check if we are exiting.if(state->destroyRequested!=0) {
engine_term_display(&engine);return;
}
}if(engine.animating) {//Done with events; draw next animation frame.engine.state.angle+=.01f;if(engine.state.angle>1) {
engine.state.angle=0;
}//Drawing is throttled to the screen update rate, so there//is no need to do timing here.engine_draw_frame(&engine);
}
}
}