在 android_main(struct android_app* state)函数里面设置输入事件处理函数:
state->onInputEvent = &handleInput;//设置输入事件的处理函数,如触摸响应

函数介绍:

AMotionEvent_getX():以屏幕左上角为原点,是绝对坐标

AMotionEvent_getY():以屏幕左上角为原点,是绝对坐标

AMotionEvent_getPointerCount();多点触摸函数,返回触摸的点数量,跟硬件有关系

#include <jni.h>
#include <errno.h>
#include <stdlib.h>
#include <stdio.h> #include <EGL/egl.h>
#include <GLES/gl.h>
#include <vector>
#include <string>
#include <map>
#include <android/sensor.h>
#include <android/log.h>
#include <android_native_app_glue.h> #define LOGI(...) ((void)__android_log_print(ANDROID_LOG_ERROR, "native-activity", __VA_ARGS__))
#define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, "native-activity", __VA_ARGS__)) /**
* Our saved state data.
*/
struct saved_state {
float angle;
int32_t x;
int32_t y;
}; /**
* Shared state for our app.
*/
struct engine {
struct android_app* app; ASensorManager* sensorManager;
const ASensor* accelerometerSensor;
ASensorEventQueue* sensorEventQueue; int animating;
EGLDisplay display;
EGLSurface surface;
EGLContext context;
int32_t width;
int32_t height;
struct saved_state state;
}; class float3
{
public:
float x,y,z;
};
std::vector<float3> g_arVertex;
/**
* Initialize an EGL context for the current display.
*/
static int engine_init_display(struct engine* engine) {
// initialize OpenGL ES and EGL /*
* Here specify the attributes of the desired configuration.
* Below, we select an EGLConfig with at least 8 bits per color
* component compatible with on-screen windows
*/
const EGLint attribs[] =
{
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_BLUE_SIZE, ,
EGL_GREEN_SIZE, ,
EGL_RED_SIZE, ,
EGL_NONE
};
EGLint w, h, dummy, format;
EGLint numConfigs;
EGLConfig config;
EGLSurface surface;
EGLContext context; EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY); eglInitialize(display, , ); /* Here, the application chooses the configuration it desires. In this
* sample, we have a very simplified selection process, where we pick
* the first EGLConfig that matches our criteria */
eglChooseConfig(display, attribs, &config, , &numConfigs); /* EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is
* guaranteed to be accepted by ANativeWindow_setBuffersGeometry().
* As soon as we picked a EGLConfig, we can safely reconfigure the
* ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID. */
eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format); ANativeWindow_setBuffersGeometry(engine->app->window, , , format); surface = eglCreateWindowSurface(display, config, engine->app->window, NULL);
context = eglCreateContext(display, config, NULL, NULL); if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
LOGW("Unable to eglMakeCurrent");
return -;
} eglQuerySurface(display, surface, EGL_WIDTH, &w);
eglQuerySurface(display, surface, EGL_HEIGHT, &h); engine->display = display;
engine->context = context;
engine->surface = surface;
engine->width = w;
engine->height = h;
engine->state.angle = ; // Initialize GL state.
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);
glEnable(GL_CULL_FACE);
glShadeModel(GL_SMOOTH);
glDisable(GL_DEPTH_TEST);
glViewport(,,w,h);
glOrthof(,w,h,,-,); return ;
} /**
* Just the current frame in the display.
*/
static void engine_draw_frame(struct engine* engine) {
if (engine->display == NULL) {
// No display.
return;
} // Just fill the screen with a color.
glClearColor(((float)engine->state.x)/engine->width, engine->state.angle,
((float)engine->state.y)/engine->height, );
glClear(GL_COLOR_BUFFER_BIT); glEnableClientState(GL_VERTEX_ARRAY);
if(g_arVertex.size() >= )
{
glColor4f(,,,);
glVertexPointer(,GL_FLOAT,,&g_arVertex[]);
glDrawArrays(GL_LINE_STRIP,,g_arVertex.size());
} eglSwapBuffers(engine->display, engine->surface);
} /**
* Tear down the EGL context currently associated with the display.
*/
static void engine_term_display(struct engine* engine) {
if (engine->display != EGL_NO_DISPLAY) {
eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
if (engine->context != EGL_NO_CONTEXT) {
eglDestroyContext(engine->display, engine->context);
}
if (engine->surface != EGL_NO_SURFACE) {
eglDestroySurface(engine->display, engine->surface);
}
eglTerminate(engine->display);
}
engine->animating = ;
engine->display = EGL_NO_DISPLAY;
engine->context = EGL_NO_CONTEXT;
engine->surface = EGL_NO_SURFACE;
} /**
* Process the next input event.
*/
static int32_t engine_handle_input(struct android_app* app, AInputEvent* event)
{
struct engine* engine = (struct engine*)app->userData; int32_t evtType = AInputEvent_getType(event);
switch(evtType)
{
case AINPUT_EVENT_TYPE_KEY:
break; case AINPUT_EVENT_TYPE_MOTION:
{
switch(AInputEvent_getSource(event))
{
case AINPUT_SOURCE_TOUCHSCREEN:
{
int32_t id = AMotionEvent_getAction(event);
switch(id)
{
case AMOTION_EVENT_ACTION_MOVE:
{
size_t cnt = AMotionEvent_getPointerCount(event);
for( int i = ;i < cnt; ++ i )
{
float x = AMotionEvent_getX(event,i);
float y = AMotionEvent_getY(event,i);
char szBuf[];
LOGI("x = %f y = %f",x,y);
float3 pt;
pt.x = x;
pt.y = y;
pt.z = ;
g_arVertex.push_back(pt);
} }
break;
case AMOTION_EVENT_ACTION_DOWN:
{
float x = AMotionEvent_getX(event,);
float y = AMotionEvent_getY(event,);
char szBuf[];
LOGI("x = %f y = %f",x,y);
float3 pt;
pt.x = x;
pt.y = y;
pt.z = ;
g_arVertex.push_back(pt);
}
break;
case AMOTION_EVENT_ACTION_UP:
break;
}
}
break;
case AINPUT_SOURCE_TRACKBALL:
break;
}
}
break;
} if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION)
{
engine->animating = ;
engine->state.x = AMotionEvent_getX(event, );
engine->state.y = AMotionEvent_getY(event, );
return ;
}
return ;
} /**
* Process the next main command.
*/
static void engine_handle_cmd(struct android_app* app, int32_t cmd) {
struct engine* engine = (struct engine*)app->userData;
switch (cmd) {
case APP_CMD_SAVE_STATE: break;
case APP_CMD_INIT_WINDOW:
// The window is being shown, get it ready.
if (engine->app->window != NULL) {
engine_init_display(engine);
}
break;
case APP_CMD_TERM_WINDOW:
// The window is being hidden or closed, clean it up.
engine_term_display(engine);
break;
case APP_CMD_GAINED_FOCUS: break;
case APP_CMD_LOST_FOCUS: break;
}
} /**
* This is the main entry point of a native application that is using
* android_native_app_glue. It runs in its own thread, with its own
* event loop for receiving input events and doing other things.
*/
void android_main(struct android_app* state) {
struct engine engine; // Make sure glue isn't stripped.
app_dummy(); memset(&engine, , sizeof(engine));
state->userData = &engine;
state->onAppCmd = engine_handle_cmd;
state->onInputEvent = engine_handle_input;
engine.app = state; // Prepare to monitor accelerometer
engine.sensorManager = ASensorManager_getInstance();
engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager,
ASENSOR_TYPE_ACCELEROMETER); if (state->savedState != NULL) {
// We are starting with a previous saved state; restore from it.
engine.state = *(struct saved_state*)state->savedState;
} int ident, events;
struct android_poll_source* source; while (true)
{
while ((ident = ALooper_pollAll(, NULL, &events, (void**)&source)) >= )
{
if (source != NULL)
source->process(state, source); if (state->destroyRequested != )
return;
} engine_draw_frame(&engine);
} }

效果如图所示。

APK下载:下载

Android-NDK处理用户交互事件的更多相关文章

  1. Android - 和其他APP交互 - 把用户带到其他app

    Android的重要功能之一就是app可以根据要执行的操作让用户启动另外一个app.例如,app有一个商业地址然后想要在地图上显示,并不需要在app中加一个显示地图的activity,可以直接用Int ...

  2. Android SDK上手指南 3:用户交互

    在这篇教程中,我们将对之前所添加的Button元素进行设置以实现对用户点击的检测与响应.为了达成这一目标,我们需要在应用程序的主Activity类中略微涉及Java编程内容.如果大家在Java开发方面 ...

  3. event代表事件的状态,专门负责对事件的处理,它的属性和方法能帮助我们完成很多和用户交互的操作;

    IE的event和其他的标准DOM的Event是不一样的,不同的浏览器事件的冒泡机制也是有区别 IE:window.event.cancelBubble = true;//停止冒泡window.eve ...

  4. Android NDK开发入门实例

    AndroidNDK是能使Android应用开发者把从c/c++编译而来的本地代码嵌入到应用包中的一系列工具的组合. 注意: AndroidNDK只能用于Android1.5及以上版本中. I. An ...

  5. Android NDK 和 OpenCV 整合开发总结(3)

    Android NDK 和 OpenCV 整合开发总结(3) http://hujiaweibujidao.github.io/blog/2013/11/18/android-ndk-and-open ...

  6. Android NDK开发Crash错误定位[转]

    使用 ndk-stack 的时候需要你的 lib 编译为 debug版的,通常需要下面的修改: 1. 修改 android.mk,增加,为 LOCAL_CFLAGS 增加 -g 选项 2. 修改 ap ...

  7. 五毛的cocos2d-x学习笔记06-处理用户交互

    前几篇感觉自己在写教育文章,╮(╯▽╰)╭.今天换成开发者的口吻,毕竟我也是在边学边写博客. 处理用户交互包括:单点触摸.多点触摸.事件传递.传感器.物理按键等部分. 单点触摸: 触摸事件传递顺序 o ...

  8. Threejs 的场景查看 - 几个交互事件库助你方便查看场景

    Threejs 的场景查看 - 几个交互事件库助你方便查看场景 太阳火神的漂亮人生 (http://blog.csdn.net/opengl_es) 本文遵循"署名-非商业用途-保持一致&q ...

  9. C# 通过服务启动窗体(把窗体添加到服务里)实现用户交互的windows服务[转发]

    由于个人需要,想找一个键盘记录的程序,从网上下载了很多,多数都是需要注册的,另外也多被杀软查杀.于是决定自己写一个,如果作为一个windows应用程序,可以实现抓取键盘的记录.想要实现随系统启动的话, ...

随机推荐

  1. 201709013工作日记--Android异步通信AsyncTask

    参考相关博客:http://blog.csdn.net/liuhe688/article/details/6532519 在Android中实现异步任务机制有两种方式,Handler和AsyncTas ...

  2. Hdu1548 A strange lift 2017-01-17 10:34 35人阅读 评论(0) 收藏

    A strange lift Time Limit : 2000/1000ms (Java/Other)   Memory Limit : 65536/32768K (Java/Other) Tota ...

  3. knowledge_map 修改笔记

    20150110 knowledge_map修改的地方: 1.dialog的调整.把结构图中的dialog(另存,新建知识点)调整到全局中(我的列表,knowledge_map_js_showKnow ...

  4. EBS获取附件URL

    http://wenku.baidu.com/link?url=MnYX269RBqW9ZRh-4famwduhYq9As0-vsIyVPA7aqv64cdxxjZEOaEE1_KZ9SGjY9qCx ...

  5. Delphi for iOS开发指南(1):在Mac上配置你的开发环境

    http://cache.baiducontent.com/c?m=9d78d513d99516f11ab7cf690d678c3b584380122ba7a0020fd18438e4732b4050 ...

  6. ASP.NET MVC学习之模型验证详解

    ASP.NET MVC学习之模型验证篇 2014-05-28 11:36 by y-z-f, 6722 阅读, 13 评论, 收藏, 编辑 一.学习前的一句话 在这里要先感谢那些能够点开我随笔的博友们 ...

  7. java spring boot 开启监控信息

    效果: 配置 // pom <dependency> <groupId>org.springframework.boot</groupId> <artifac ...

  8. LayaAir从入门到放弃

    我是一个小白程序员,准备开发小程序找了很久小程序开发引擎,看到白鹭和LayaAir .经过一番资料查找后被LayaAir高性能.接口简洁及社区活跃所吸引,然后选择了LayaAir. 开发过程中一般小问 ...

  9. Cesium简介 [转]

    http://www.cnblogs.com/laixiangran/p/4984522.html 一.Cesium介绍 Cesium是国外一个基于JavaScript编写的使用WebGL的地图引擎. ...

  10. OSX - libc++究竟是啥?

    libc++是什么?  libc++是C++标准库的实现! libc++ is an implementation of the C++ standard library, targeting C++ ...