Android-NDK处理用户交互事件
在 android_main(struct android_app*
state)函数里面设置输入事件处理函数:
state->onInputEvent =
&handleInput;//设置输入事件的处理函数,如触摸响应
函数介绍:
AMotionEvent_getX():以屏幕左上角为原点,是绝对坐标
AMotionEvent_getY():以屏幕左上角为原点,是绝对坐标
AMotionEvent_getPointerCount();多点触摸函数,返回触摸的点数量,跟硬件有关系
#include <jni.h> #include <errno.h> #include <stdlib.h> #include <stdio.h> #include <EGL/egl.h> #include <GLES/gl.h> #include <vector> #include <string> #include <map> #include <android/sensor.h> #include <android/log.h> #include <android_native_app_glue.h> #define LOGI(...) ((void)__android_log_print(ANDROID_LOG_ERROR, "native-activity", __VA_ARGS__)) #define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, "native-activity", __VA_ARGS__)) /** * Our saved state data. */ struct saved_state { float angle; int32_t x; int32_t y; }; /** * Shared state for our app. */ struct engine { struct android_app* app; ASensorManager* sensorManager; const ASensor* accelerometerSensor; ASensorEventQueue* sensorEventQueue; int animating; EGLDisplay display; EGLSurface surface; EGLContext context; int32_t width; int32_t height; struct saved_state state; }; class float3 { public: float x,y,z; }; std::vector<float3> g_arVertex; /** * Initialize an EGL context for the current display. */ static int engine_init_display(struct engine* engine) { // initialize OpenGL ES and EGL /* * Here specify the attributes of the desired configuration. * Below, we select an EGLConfig with at least 8 bits per color * component compatible with on-screen windows */ const EGLint attribs[] = { EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_BLUE_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_RED_SIZE, 8, EGL_NONE }; EGLint w, h, dummy, format; EGLint numConfigs; EGLConfig config; EGLSurface surface; EGLContext context; EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY); eglInitialize(display, 0, 0); /* Here, the application chooses the configuration it desires. In this * sample, we have a very simplified selection process, where we pick * the first EGLConfig that matches our criteria */ eglChooseConfig(display, attribs, &config, 1, &numConfigs); /* EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is * guaranteed to be accepted by ANativeWindow_setBuffersGeometry(). * As soon as we picked a EGLConfig, we can safely reconfigure the * ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID. */ eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format); ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format); surface = eglCreateWindowSurface(display, config, engine->app->window, NULL); context = eglCreateContext(display, config, NULL, NULL); if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) { LOGW("Unable to eglMakeCurrent"); return -1; } eglQuerySurface(display, surface, EGL_WIDTH, &w); eglQuerySurface(display, surface, EGL_HEIGHT, &h); engine->display = display; engine->context = context; engine->surface = surface; engine->width = w; engine->height = h; engine->state.angle = 0; // Initialize GL state. glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST); glEnable(GL_CULL_FACE); glShadeModel(GL_SMOOTH); glDisable(GL_DEPTH_TEST); glViewport(0,0,w,h); glOrthof(0,w,h,0,-100,100); return 0; } /** * Just the current frame in the display. */ static void engine_draw_frame(struct engine* engine) { if (engine->display == NULL) { // No display. return; } // Just fill the screen with a color. glClearColor(((float)engine->state.x)/engine->width, engine->state.angle, ((float)engine->state.y)/engine->height, 1); glClear(GL_COLOR_BUFFER_BIT); glEnableClientState(GL_VERTEX_ARRAY); if(g_arVertex.size() >= 2) { glColor4f(1,1,1,1); glVertexPointer(3,GL_FLOAT,0,&g_arVertex[0]); glDrawArrays(GL_LINE_STRIP,0,g_arVertex.size()); } eglSwapBuffers(engine->display, engine->surface); } /** * Tear down the EGL context currently associated with the display. */ static void engine_term_display(struct engine* engine) { if (engine->display != EGL_NO_DISPLAY) { eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT); if (engine->context != EGL_NO_CONTEXT) { eglDestroyContext(engine->display, engine->context); } if (engine->surface != EGL_NO_SURFACE) { eglDestroySurface(engine->display, engine->surface); } eglTerminate(engine->display); } engine->animating = 0; engine->display = EGL_NO_DISPLAY; engine->context = EGL_NO_CONTEXT; engine->surface = EGL_NO_SURFACE; } /** * Process the next input event. */ static int32_t engine_handle_input(struct android_app* app, AInputEvent* event) { struct engine* engine = (struct engine*)app->userData; int32_t evtType = AInputEvent_getType(event); switch(evtType) { case AINPUT_EVENT_TYPE_KEY: break; case AINPUT_EVENT_TYPE_MOTION: { switch(AInputEvent_getSource(event)) { case AINPUT_SOURCE_TOUCHSCREEN: { int32_t id = AMotionEvent_getAction(event); switch(id) { case AMOTION_EVENT_ACTION_MOVE: { size_t cnt = AMotionEvent_getPointerCount(event); for( int i = 0 ;i < cnt; ++ i ) { float x = AMotionEvent_getX(event,i); float y = AMotionEvent_getY(event,i); char szBuf[64]; LOGI("x = %f y = %f",x,y); float3 pt; pt.x = x; pt.y = y; pt.z = 0; g_arVertex.push_back(pt); } } break; case AMOTION_EVENT_ACTION_DOWN: { float x = AMotionEvent_getX(event,0); float y = AMotionEvent_getY(event,0); char szBuf[64]; LOGI("x = %f y = %f",x,y); float3 pt; pt.x = x; pt.y = y; pt.z = 0; g_arVertex.push_back(pt); } break; case AMOTION_EVENT_ACTION_UP: break; } } break; case AINPUT_SOURCE_TRACKBALL: break; } } break; } if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION) { engine->animating = 1; engine->state.x = AMotionEvent_getX(event, 0); engine->state.y = AMotionEvent_getY(event, 0); return 1; } return 0; } /** * Process the next main command. */ static void engine_handle_cmd(struct android_app* app, int32_t cmd) { struct engine* engine = (struct engine*)app->userData; switch (cmd) { case APP_CMD_SAVE_STATE: break; case APP_CMD_INIT_WINDOW: // The window is being shown, get it ready. if (engine->app->window != NULL) { engine_init_display(engine); } break; case APP_CMD_TERM_WINDOW: // The window is being hidden or closed, clean it up. engine_term_display(engine); break; case APP_CMD_GAINED_FOCUS: break; case APP_CMD_LOST_FOCUS: break; } } /** * This is the main entry point of a native application that is using * android_native_app_glue. It runs in its own thread, with its own * event loop for receiving input events and doing other things. */ void android_main(struct android_app* state) { struct engine engine; // Make sure glue isn‘t stripped. app_dummy(); memset(&engine, 0, sizeof(engine)); state->userData = &engine; state->onAppCmd = engine_handle_cmd; state->onInputEvent = engine_handle_input; engine.app = state; // Prepare to monitor accelerometer engine.sensorManager = ASensorManager_getInstance(); engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER); if (state->savedState != NULL) { // We are starting with a previous saved state; restore from it. engine.state = *(struct saved_state*)state->savedState; } int ident, events; struct android_poll_source* source; while (true) { while ((ident = ALooper_pollAll(0, NULL, &events, (void**)&source)) >= 0) { if (source != NULL) source->process(state, source); if (state->destroyRequested != 0) return; } engine_draw_frame(&engine); } }
效果如图所示。
APK下载:下载
郑重声明:本站内容如果来自互联网及其他传播媒体,其版权均属原媒体及文章作者所有。转载目的在于传递更多信息及用于网络分享,并不代表本站赞同其观点和对其真实性负责,也不构成任何其他建议。