ndk 不用java_Android NDK 更新,不再需要 Java

AndroidManifest.xmlDemo.c

#include#include#include#include#include#include#include#defineLOGI(...) ((void)__Android_log_print(ANDROID_LOG_INFO, "native-activity", __VA_ARGS__))#defineLOGW(...) ((void)__Android_log_print(ANDROID_LOG_WARN, "native-activity", __VA_ARGS__))/**

* Our saved state data.*/structsaved_state {floatangle;

int32_t x;

int32_t y;

};/**

* Shared state for our app.*/structengine {structAndroid_app*app;

ASensorManager*sensorManager;constASensor*accelerometerSensor;

ASensorEventQueue*sensorEventQueue;intanimating;

EGLDisplay display;

EGLSurface surface;

EGLContext context;

int32_t width;

int32_t height;structsaved_state state;

};/**

* Initialize an EGL context for the current display.*/staticintengine_init_display(structengine*engine) {//initialize OpenGL ES and EGL/** Here specify the attributes of the desired configuration.

* Below, we select an EGLConfig with at least 8 bits per color

* component compatible with on-screen windows*/constEGLint attribs[]={

EGL_SURFACE_TYPE, EGL_WINDOW_BIT,

EGL_BLUE_SIZE,8,

EGL_GREEN_SIZE,8,

EGL_RED_SIZE,8,

EGL_NONE

};

EGLint w, h, dummy, format;

EGLint numConfigs;

EGLConfig config;

EGLSurface surface;

EGLContext context;

EGLDisplay display=eglGetDisplay(EGL_DEFAULT_DISPLAY);

eglInitialize(display,0,0);/*Here, the application chooses the configuration it desires. In this

* sample, we have a very simplified selection process, where we pick

* the first EGLConfig that matches our criteria*/eglChooseConfig(display, attribs,&config,1,&numConfigs);/*EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is

* guaranteed to be accepted by ANativeWindow_setBuffersGeometry().

* As soon as we picked a EGLConfig, we can safely reconfigure the

* ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID.*/eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID,&format);

ANativeWindow_setBuffersGeometry(engine->app->window,0,0, format);

surface=eglCreateWindowSurface(display, config, engine->app->window, NULL);

context=eglCreateContext(display, config, NULL, NULL);if(eglMakeCurrent(display, surface, surface, context)==EGL_FALSE) {

LOGW("Unable to eglMakeCurrent");return-1;

}

eglQuerySurface(display, surface, EGL_WIDTH,&w);

eglQuerySurface(display, surface, EGL_HEIGHT,&h);

engine->display=display;

engine->context=context;

engine->surface=surface;

engine->width=w;

engine->height=h;

engine->state.angle=0;//Initialize GL state.glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);

glEnable(GL_CULL_FACE);

glShadeModel(GL_SMOOTH);

glDisable(GL_DEPTH_TEST);return0;

}/**

* Just the current frame in the display.*/staticvoidengine_draw_frame(structengine*engine) {if(engine->display==NULL) {//No display.return;

}//Just fill the screen with a color.glClearColor(((float)engine->state.x)/engine->width, engine->state.angle,

((float)engine->state.y)/engine->height,1);

glClear(GL_COLOR_BUFFER_BIT);

eglSwapBuffers(engine->display, engine->surface);

}/**

* Tear down the EGL context currently associated with the display.*/staticvoidengine_term_display(structengine*engine) {if(engine->display!=EGL_NO_DISPLAY) {

eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);if(engine->context!=EGL_NO_CONTEXT) {

eglDestroyContext(engine->display, engine->context);

}if(engine->surface!=EGL_NO_SURFACE) {

eglDestroySurface(engine->display, engine->surface);

}

eglTerminate(engine->display);

}

engine->animating=0;

engine->display=EGL_NO_DISPLAY;

engine->context=EGL_NO_CONTEXT;

engine->surface=EGL_NO_SURFACE;

}/**

* Process the next input event.*/staticint32_t engine_handle_input(structAndroid_app*app, AInputEvent*event) {structengine*engine=(structengine*)app->userData;if(AInputEvent_getType(event)==AINPUT_EVENT_TYPE_MOTION) {

engine->animating=1;

engine->state.x=AMotionEvent_getX(event,0);

engine->state.y=AMotionEvent_getY(event,0);return1;

}return0;

}/**

* Process the next main command.*/staticvoidengine_handle_cmd(structAndroid_app*app, int32_t cmd) {structengine*engine=(structengine*)app->userData;switch(cmd) {caseAPP_CMD_SAVE_STATE://The system has asked us to save our current state. Do so.engine->app->savedState=malloc(sizeof(structsaved_state));*((structsaved_state*)engine->app->savedState)=engine->state;

engine->app->savedStateSize=sizeof(structsaved_state);break;caseAPP_CMD_INIT_WINDOW://The window is being shown, get it ready.if(engine->app->window!=NULL) {

engine_init_display(engine);

engine_draw_frame(engine);

}break;caseAPP_CMD_TERM_WINDOW://The window is being hidden or closed, clean it up.engine_term_display(engine);break;caseAPP_CMD_GAINED_FOCUS://When our app gains focus, we start monitoring the accelerometer.if(engine->accelerometerSensor!=NULL) {

ASensorEventQueue_enableSensor(engine->sensorEventQueue,

engine->accelerometerSensor);//We'd like to get 60 events per second (in us).ASensorEventQueue_setEventRate(engine->sensorEventQueue,

engine->accelerometerSensor, (1000L/60)*1000);

}break;caseAPP_CMD_LOST_FOCUS://When our app loses focus, we stop monitoring the accelerometer.//This is to avoid consuming battery while not being used.if(engine->accelerometerSensor!=NULL) {

ASensorEventQueue_disableSensor(engine->sensorEventQueue,

engine->accelerometerSensor);

}//Also stop animating.engine->animating=0;

engine_draw_frame(engine);break;

}

}/**

* This is the main entry point of a native application that is using

* Android_native_app_glue. It runs in its own thread, with its own

* event loop for receiving input events and doing other things.*/voidAndroid_main(structandroid_app*state) {structengine engine;//Make sure glue isn't stripped.app_dummy();

memset(&engine,0,sizeof(engine));

state->userData=&engine;

state->onAppCmd=engine_handle_cmd;

state->onInputEvent=engine_handle_input;

engine.app=state;//Prepare to monitor accelerometerengine.sensorManager=ASensorManager_getInstance();

engine.accelerometerSensor=ASensorManager_getDefaultSensor(engine.sensorManager,

ASENSOR_TYPE_ACCELEROMETER);

engine.sensorEventQueue=ASensorManager_createEventQueue(engine.sensorManager,

state->looper, LOOPER_ID_USER, NULL, NULL);if(state->savedState!=NULL) {//We are starting with a previous saved state; restore from it.engine.state=*(structsaved_state*)state->savedState;

}//loop waiting for stuff to do.while(1) {//Read all pending events.intident;intevents;structAndroid_poll_source*source;//If not animating, we will block forever waiting for events.//If animating, we loop until all events are read, then continue//to draw the next frame of animation.while((ident=ALooper_pollAll(engine.animating?0:-1, NULL,&events,

(void**)&source))>=0) {//Process this event.if(source!=NULL) {

source->process(state, source);

}//If a sensor has data, process it now.if(ident==LOOPER_ID_USER) {if(engine.accelerometerSensor!=NULL) {

ASensorEventevent;while(ASensorEventQueue_getEvents(engine.sensorEventQueue,&event,1)>0) {

LOGI("accelerometer: x=%f y=%f z=%f",event.acceleration.x,event.acceleration.y,event.acceleration.z);

}

}

}//Check if we are exiting.if(state->destroyRequested!=0) {

engine_term_display(&engine);return;

}

}if(engine.animating) {//Done with events; draw next animation frame.engine.state.angle+=.01f;if(engine.state.angle>1) {

engine.state.angle=0;

}//Drawing is throttled to the screen update rate, so there//is no need to do timing here.engine_draw_frame(&engine);

}

}

}

Android NDK中,我们可以使用JNI(Java Native Interface)来实现C/C++代码和Java代码的相互调用。 下面是一个简单的例子,展示了如何在NDK中调用Java层的方法。 首先,在Java层中创建一个类,并在其中声明一个需要被C/C++回调的方法: ```java public class MyCallback { public void processData(byte[] data) { // 处理数据的逻辑 } } ``` 然后,在C/C++代码中,我们需要使用JNI来获取Java层的MyCallback对象,并调用其processData方法。具体步骤如下: 1. 首先,需要在C/C++代码中引入JNI头文件: ```c++ #include <jni.h> ``` 2. 获取Java层的MyCallback对象: ```c++ JNIEnv* env; JavaVM* jvm; // 获取当前线程的JNIEnv指针 jvm->AttachCurrentThread(&env, NULL); // 获取MyCallback类 jclass myCallbackClass = env->FindClass("com/example/MyCallback"); // 获取MyCallback对象 jmethodID constructor = env->GetMethodID(myCallbackClass, "<init>", "()V"); jobject myCallbackObj = env->NewObject(myCallbackClass, constructor); ``` 3. 调用MyCallback对象的processData方法: ```c++ // 获取processData方法的ID jmethodID processDataMethod = env->GetMethodID(myCallbackClass, "processData", "([B)V"); // 构造byte[]对象 jbyteArray data = env->NewByteArray(size); env->SetByteArrayRegion(data, 0, size, (jbyte*)buf); // 调用processData方法 env->CallVoidMethod(myCallbackObj, processDataMethod, data); ``` 最后,记得在C/C++代码中释放JNI相关资源: ```c++ jvm->DetachCurrentThread(); env->DeleteLocalRef(myCallbackClass); env->DeleteLocalRef(myCallbackObj); env->DeleteLocalRef(data); ``` 以上就是在NDK中实现线程回调Java层方法的基本步骤。需要注意的是,在调用Java层方法时,需要使用JNIEnv指针。此外,如果在多线程环境下操作JNI,需要使用jvm->AttachCurrentThread()方法获取当前线程的JNIEnv指针。
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值