Android开始开放C/C++编写的应用

对于Android平台来说已经有迹象表明开放纯C/C++编写的应用,过去我们也介绍过今天就说下具体的吧,在NDK的Samples中名为native-activity的例子已经证实了这点,不过目前Android123提示大家,显示系统Android上提供的C++接口只有OpenGL了,一起来看看AndroidManifest.xml中的定义吧。不过必须在Android 2.2或更高的固件上才能运行。注意下面的minSDK设置的为8,同时application的android:hasCode=false表示了这个应用将没有Java代码。

<?xml version="1.0" encoding="utf-8"?>

<uses-sdk android:minSdkVersion="8" />

   <application android:label="@string/app_name" android:hasCode="false">

        <activity android:name="android.app.NativeActivity"
            android:label="@string/app_name" android:configChanges="orientation|keyboardHidden">
            <meta-data android:name="android.app.lib_name" android:value="native-activity" />
        <intent-filter>
            <action android:name="android.intent.action.MAIN" />
            <category android:name="android.intent.category.LAUNCHER" />
        </intent-filter>
    </activity>
</application>

从上面来看activity对应的name必须是android.app.NativeActivity,这点和AIDL中Service的name也写全package name是一个道理,否则无法找到。下面是纯C++的JNI代码了

#include <jni.h>
#include <errno.h>

#include <EGL/egl.h> //这里JNI目前提供的支持,不是很多,只有GL家族作为显示系统了
#include <GLES/gl.h>

#include <android/sensor.h> //感应器的支持
#include <android/log.h>
#include <android_native_app_glue.h> //这里Android123提示大家,c程序必须从main开始,这里必须包含这个,经过改进的伪入口点了。

#define LOGI(…) ((void)__android_log_print(ANDROID_LOG_INFO, “native-activity”, VA_ARGS))
#define LOGW(…) ((void)__android_log_print(ANDROID_LOG_WARN, “native-activity”, VA_ARGS))

struct saved_state {
float angle;
int32_t x;
int32_t y;
};

struct engine {
struct android_app* app;

ASensorManager* sensorManager;
const ASensor* accelerometerSensor;
ASensorEventQueue* sensorEventQueue;

int animating;
EGLDisplay display;
EGLSurface surface;
EGLContext context;
int32_t width;
int32_t height;
struct saved_state state;

};

初始化OpenGL ES

static int engine_init_display(struct engine* engine) {
const EGLint attribs[] = {
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_NONE
};
EGLint w, h, dummy, format;
EGLint numConfigs;
EGLConfig config;
EGLSurface surface;
EGLContext context;

EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);

eglInitialize(display, 0, 0);

eglChooseConfig(display, attribs, &config, 1, &numConfigs);

eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);

ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format);

surface = eglCreateWindowSurface(display, config, engine->app->window, NULL);
context = eglCreateContext(display, config, NULL, NULL);

if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
    LOGW("Unable to eglMakeCurrent");
    return -1;
}

eglQuerySurface(display, surface, EGL_WIDTH, &w);
eglQuerySurface(display, surface, EGL_HEIGHT, &h);

engine->display = display;
engine->context = context;
engine->surface = surface;
engine->width = w;
engine->height = h;
engine->state.angle = 0;

glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);
glEnable(GL_CULL_FACE);
glShadeModel(GL_SMOOTH);
glDisable(GL_DEPTH_TEST);

return 0;

}

static void engine_draw_frame(struct engine* engine) {
if (engine->display == NULL) {
// No display.
return;
}

glClearColor(((float)engine->state.x)/engine->width, engine->state.angle,
        ((float)engine->state.y)/engine->height, 1);
glClear(GL_COLOR_BUFFER_BIT);

eglSwapBuffers(engine->display, engine->surface);

}

static void engine_term_display(struct engine* engine) {
if (engine->display != EGL_NO_DISPLAY) {
eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
if (engine->context != EGL_NO_CONTEXT) {
eglDestroyContext(engine->display, engine->context);
}
if (engine->surface != EGL_NO_SURFACE) {
eglDestroySurface(engine->display, engine->surface);
}
eglTerminate(engine->display);
}
engine->animating = 0;
engine->display = EGL_NO_DISPLAY;
engine->context = EGL_NO_CONTEXT;
engine->surface = EGL_NO_SURFACE;
}

static int32_t engine_handle_input(struct android_app* app, AInputEvent* event) {
struct engine* engine = (struct engine*)app->userData;
if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION) {
engine->animating = 1;
engine->state.x = AMotionEvent_getX(event, 0);
engine->state.y = AMotionEvent_getY(event, 0);
return 1;
}
return 0;
}

static void engine_handle_cmd(struct android_app* app, int32_t cmd) {
struct engine* engine = (struct engine*)app->userData;
switch (cmd) {
case APP_CMD_SAVE_STATE:
engine->app->savedState = malloc(sizeof(struct saved_state));
((struct saved_state)engine->app->savedState) = engine->state;
engine->app->savedStateSize = sizeof(struct saved_state);
break;
case APP_CMD_INIT_WINDOW:
if (engine->app->window != NULL) {
engine_init_display(engine);
engine_draw_frame(engine);
}
break;
case APP_CMD_TERM_WINDOW:
engine_term_display(engine);
break;
case APP_CMD_GAINED_FOCUS:
if (engine->accelerometerSensor != NULL) {
ASensorEventQueue_enableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);
ASensorEventQueue_setEventRate(engine->sensorEventQueue,
engine->accelerometerSensor, (1000L/60)*1000);
}
break;
case APP_CMD_LOST_FOCUS:
if (engine->accelerometerSensor != NULL) {
ASensorEventQueue_disableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);
}
engine->animating = 0;
engine_draw_frame(engine);
break;
}
}

void android_main(struct android_app* state) { //这里是伪main入口点,但是所有的初始化的确从这里开始
struct engine engine;

app_dummy();

memset(&engine, 0, sizeof(engine));
state->userData = &engine;
state->onAppCmd = engine_handle_cmd;
state->onInputEvent = engine_handle_input;
engine.app = state;

engine.sensorManager = ASensorManager_getInstance();
engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager,
        ASENSOR_TYPE_ACCELEROMETER);
engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager,
        state->looper, LOOPER_ID_USER, NULL, NULL);

if (state->savedState != NULL) {
    // We are starting with a previous saved state; restore from it.
    engine.state = *(struct saved_state*)state->savedState;
}

// loop waiting for stuff to do.

while (1) {
    // Read all pending events.
    int ident;
    int events;
    struct android_poll_source* source;

    // If not animating, we will block forever waiting for events.
    // If animating, we loop until all events are read, then continue
    // to draw the next frame of animation.
    while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events,
            (void**)&source)) >= 0) {

        // Process this event.
        if (source != NULL) {
            source->process(state, source);
        }

        // If a sensor has data, process it now.
        if (ident == LOOPER_ID_USER) {
            if (engine.accelerometerSensor != NULL) {
                ASensorEvent event;
                while (ASensorEventQueue_getEvents(engine.sensorEventQueue,
                        &event, 1) > 0) {
                    LOGI("accelerometer: x=%f y=%f z=%f",
                            event.acceleration.x, event.acceleration.y,
                            event.acceleration.z);
                }
            }
        }

        // Check if we are exiting.
        if (state->destroyRequested != 0) {
            engine_term_display(&engine);
            return;
        }
    }

    if (engine.animating) {
        // Done with events; draw next animation frame.
        engine.state.angle += .01f;
        if (engine.state.angle > 1) {
            engine.state.angle = 0;
        }

        // Drawing is throttled to the screen update rate, so there
        // is no need to do timing here.
        engine_draw_frame(&engine);
    }
}

}
//END_INCLUDE(all)