Files
panopainter/android/src/cpp/main.cpp
2019-08-01 18:22:31 +02:00

1157 lines
39 KiB
C++

/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
//BEGIN_INCLUDE(all)
#include <mutex>
#include <initializer_list>
#include <memory>
#include <jni.h>
#include <errno.h>
#include <cassert>
#include <EGL/egl.h>
#include <EGL/eglext.h>
#include <android/window.h>
#include <android/asset_manager_jni.h>
#include <sys/prctl.h> // for prctl( PR_SET_NAME )
#include <unistd.h>
#include "pch.h"
#include "app.h"
#include "asset.h"
#include "keymap.h"
#include "main.h"
#include "com_omixlab_panopainter_MainActivity.h"
#ifdef __QUEST__
#include "oculus_vr.h"
#elif __FOCUS__
#include "wave_vr.h"
#endif
typedef void (*GLDEBUGPROC)(GLenum source,
GLenum type,
GLuint id,
GLenum severity,
GLsizei length,
const GLchar* message,
const void* userParam);
typedef void (*fnDebugMessageCallback)(GLDEBUGPROC callback, const void* userParam);
#define GL_DEBUG_SEVERITY_HIGH 0x9146
#define GL_DEBUG_SEVERITY_MEDIUM 0x9147
#define GL_DEBUG_SEVERITY_LOW 0x9148
#define GL_DEBUG_SEVERITY_NOTIFICATION 0x826B
#define GL_DEBUG_OUTPUT 0x92E0
#define GL_DEBUG_OUTPUT_SYNCHRONOUS 0x8242
EGLDisplay g_display = EGL_NO_DISPLAY;
EGLContext g_context = EGL_NO_CONTEXT;
std::recursive_mutex mutex;
int mutex_count = 0;
struct engine g_engine;
thread_local JNIEnv* jni;
jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/)
{
LOG("JNI_OnLoad");
return JNI_VERSION_1_6;
}
std::string utf8chr(int cp)
{
char c[5]={ 0x00,0x00,0x00,0x00,0x00 };
if (cp<=0x7F) { c[0] = cp; }
else if(cp<=0x7FF) { c[0] = (cp>>6)+192; c[1] = (cp&63)+128; }
else if(0xd800<=cp && cp<=0xdfff) {} //invalid block of utf8
else if(cp<=0xFFFF) { c[0] = (cp>>12)+224; c[1]= ((cp>>6)&63)+128; c[2]=(cp&63)+128; }
else if(cp<=0x10FFFF) { c[0] = (cp>>18)+240; c[1] = ((cp>>12)&63)+128; c[2] = ((cp>>6)&63)+128; c[3]=(cp&63)+128; }
return std::string(c);
}
// see https://stackoverflow.com/questions/21124051/receive-complete-android-unicode-input-in-c-c
// see http://www.zedwood.com/article/cpp-utf8-char-to-codepoint
int GetUnicodeChar(int eventType, int keyCode, int metaState)
{
#define COMBINING_ACCENT 0x80000000
#define COMBINING_ACCENT_MASK 0x7fffffff
jclass class_key_event = jni->FindClass("android/view/KeyEvent");
int unicodeKey;
if(metaState == 0)
{
jmethodID method_get_unicode_char = jni->GetMethodID(class_key_event, "getUnicodeChar", "()I");
jmethodID eventConstructor = jni->GetMethodID(class_key_event, "<init>", "(II)V");
jobject eventObj = jni->NewObject(class_key_event, eventConstructor, eventType, keyCode);
unicodeKey = jni->CallIntMethod(eventObj, method_get_unicode_char);
}
else
{
jmethodID method_get_unicode_char = jni->GetMethodID(class_key_event, "getUnicodeChar", "(I)I");
jmethodID eventConstructor = jni->GetMethodID(class_key_event, "<init>", "(II)V");
jobject eventObj = jni->NewObject(class_key_event, eventConstructor, eventType, keyCode);
unicodeKey = jni->CallIntMethod(eventObj, method_get_unicode_char, metaState);
}
if ((unicodeKey & COMBINING_ACCENT) != 0)
{
unicodeKey = unicodeKey & COMBINING_ACCENT_MASK;
}
LOG("Unicode key is: %d", unicodeKey);
return unicodeKey;
}
void android_attach_jni()
{
g_engine.app->activity->vm->AttachCurrentThread(&jni, nullptr);
}
void android_detach_jni()
{
g_engine.app->activity->vm->DetachCurrentThread();
}
void android_async_lock()
{
mutex.lock();
if (mutex_count == 0)
eglMakeCurrent(g_engine.display, g_engine.surface, g_engine.surface, g_engine.context);
mutex_count++;
}
bool android_async_trylock()
{
if (!mutex.try_lock())
return false;
if (mutex_count == 0)
eglMakeCurrent(g_engine.display, g_engine.surface, g_engine.surface, g_engine.context);
mutex_count++;
return true;
}
void android_async_swap()
{
eglSwapBuffers(g_engine.display, g_engine.surface);
}
void android_async_unlock()
{
mutex_count--;
if (mutex_count == 0)
eglMakeCurrent(g_engine.display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
mutex.unlock();
}
// see https://groups.google.com/forum/#!topic/android-ndk/Tk3g00wLKhk
void displayKeyboard(bool pShow)
{
// Retrieves NativeActivity.
jobject lNativeActivity = g_engine.app->activity->clazz;
jclass ClassNativeActivity = jni->GetObjectClass(lNativeActivity);
// Retrieves Context.INPUT_METHOD_SERVICE.
jclass ClassContext = jni->FindClass("android/content/Context");
jfieldID FieldINPUT_METHOD_SERVICE =
jni->GetStaticFieldID(ClassContext, "INPUT_METHOD_SERVICE", "Ljava/lang/String;");
jobject INPUT_METHOD_SERVICE =
jni->GetStaticObjectField(ClassContext,
FieldINPUT_METHOD_SERVICE);
//jniCheck(INPUT_METHOD_SERVICE);
// Runs getSystemService(Context.INPUT_METHOD_SERVICE).
jclass ClassInputMethodManager = jni->FindClass(
"android/view/inputmethod/InputMethodManager");
jmethodID MethodGetSystemService = jni->GetMethodID(
ClassNativeActivity, "getSystemService",
"(Ljava/lang/String;)Ljava/lang/Object;");
jobject lInputMethodManager = jni->CallObjectMethod(
lNativeActivity, MethodGetSystemService,
INPUT_METHOD_SERVICE);
// Runs getWindow().getDecorView().
jmethodID MethodGetWindow = jni->GetMethodID(
ClassNativeActivity, "getWindow",
"()Landroid/view/Window;");
jobject lWindow = jni->CallObjectMethod(lNativeActivity,
MethodGetWindow);
jclass ClassWindow = jni->FindClass(
"android/view/Window");
jmethodID MethodGetDecorView = jni->GetMethodID(
ClassWindow, "getDecorView", "()Landroid/view/View;");
jobject lDecorView = jni->CallObjectMethod(lWindow,
MethodGetDecorView);
if (pShow) {
// Runs lInputMethodManager.showSoftInput(...).
jmethodID MethodShowSoftInput = jni->GetMethodID(
ClassInputMethodManager, "showSoftInput",
"(Landroid/view/View;I)Z");
jboolean lResult = jni->CallBooleanMethod(
lInputMethodManager, MethodShowSoftInput,
lDecorView, 0);
} else {
// Runs lWindow.getViewToken()
jclass ClassView = jni->FindClass(
"android/view/View");
jmethodID MethodGetWindowToken = jni->GetMethodID(
ClassView, "getWindowToken", "()Landroid/os/IBinder;");
jobject lBinder = jni->CallObjectMethod(lDecorView,
MethodGetWindowToken);
// lInputMethodManager.hideSoftInput(...).
jmethodID MethodHideSoftInput = jni->GetMethodID(
ClassInputMethodManager, "hideSoftInputFromWindow",
"(Landroid/os/IBinder;I)Z");
jboolean lRes = jni->CallBooleanMethod(
lInputMethodManager, MethodHideSoftInput,
lBinder, 0);
}
}
/*
* Class: com_omixlab_panopainter_MainActivity
* Method: pickFileCallback
* Signature: (Ljava/lang/String;)V
*/
std::function<void(std::string)> pick_file_callback;
std::function<void()> pick_file_callback_context;
extern "C"
{
#ifdef __FOCUS__
JNIEXPORT void JNICALL Java_com_omixlab_panopainter_MainActivity_init_vr(JNIEnv *env, jobject activity, jobject am)
{
Asset::m_am = AAssetManager_fromJava(env, am);
wave_init(0, 0, 0);
}
#endif
JNIEXPORT void JNICALL Java_com_omixlab_panopainter_MainActivity_pickFileCallback(JNIEnv *env, jobject, jstring path)
{
const char* path_utf = env->GetStringUTFChars(path, nullptr);
std::string file_path = path_utf; // create a copy
env->ReleaseStringUTFChars(path, path_utf);
LOG("received %s", file_path.c_str());
pick_file_callback_context = [file_path]
{
if (pick_file_callback)
{
LOG("callback");
pick_file_callback(file_path);
pick_file_callback = nullptr;
}
};
}
JNIEXPORT void JNICALL Java_com_omixlab_panopainter_MainActivity_pickExternalCallback(JNIEnv *env, jobject, jstring path)
{
const char* path_utf = env->GetStringUTFChars(path, nullptr);
std::string file_path = path_utf; // create a copy
env->ReleaseStringUTFChars(path, path_utf);
LOG("data_path %s", file_path.c_str());
App::I->data_path = file_path;
App::I->work_path = file_path;
App::I->rec_path = file_path + "/frames";
App::I->initLog();
}
JNIEXPORT void JNICALL Java_com_omixlab_panopainter_MainActivity_contentRectChanged(JNIEnv *end, jobject,
jint wnd_w, jint wnd_h, jint rect_left, jint rect_top, jint rect_right, jint rect_bottom)
{
#ifndef __QUEST__
LOG("resize wnd [%d %d] rect [%d %d %d %d]", wnd_w, wnd_h, rect_left, rect_top, rect_right, rect_bottom);
App::I->width = wnd_w;
App::I->height = (rect_bottom - rect_top);
App::I->off_x = 0;
App::I->off_y = wnd_h - (rect_bottom - rect_top);
#endif
}
}
void android_pick_file(std::function<void(std::string)> callback)
{
pick_file_callback = callback;
jclass clazz = jni->GetObjectClass(g_engine.app->activity->clazz);
jmethodID method = jni->GetMethodID(clazz, "pickFile", "()V");
jni->CallVoidMethod(g_engine.app->activity->clazz, method);
}
float get_display_density()
{
jclass clazz = jni->GetObjectClass(g_engine.app->activity->clazz);
jmethodID method = jni->GetMethodID(clazz, "getDensity", "()F");
return jni->CallFloatMethod(g_engine.app->activity->clazz, method);
}
std::string get_data_path()
{
jclass clazz = jni->GetObjectClass(g_engine.app->activity->clazz);
jmethodID method = jni->GetMethodID(clazz, "getDataPath", "()Ljava/lang/String;");
jstring js = (jstring)jni->CallObjectMethod(g_engine.app->activity->clazz, method);
const char* utf = jni->GetStringUTFChars(js, nullptr);
std::string str = utf; // create a copy
jni->ReleaseStringUTFChars(js, utf);
return str;
}
// source: https://github.com/opencollab/giws/issues/4
jstring JniStringFromUTF8(const std::string& s)
{
int len = s.size();
jbyteArray bytes = jni->NewByteArray(len);
if (bytes == 0)
{
LOG("jni->NewByteArray failed");
return 0;
}
jni->SetByteArrayRegion(bytes, 0, len, (jbyte *)s.c_str());
jclass string_class = jni->FindClass("java/lang/String");
jmethodID stringConstructor = jni->GetMethodID(string_class, "<init>", "([B)V" );
if (stringConstructor == NULL)
{
LOG("JniStringFromUTF8 new String(byte[]) failed");
jni->DeleteLocalRef(bytes);
return 0;
}
jstring result = (jstring)jni->NewObject(string_class, stringConstructor, bytes);
jni->DeleteLocalRef(bytes);
return result; //NOTE: jstring must be freed using: curEnv->DeleteLocalRef(result);
}
std::string android_get_clipboard()
{
jclass clazz = jni->GetObjectClass(g_engine.app->activity->clazz);
jmethodID method = jni->GetMethodID(clazz, "getClipboardText", "()Ljava/lang/String;");
jstring js = (jstring)jni->CallObjectMethod(g_engine.app->activity->clazz, method);
const char* utf = jni->GetStringUTFChars(js, nullptr);
std::string str = utf; // create a copy
jni->ReleaseStringUTFChars(js, utf);
return str;
}
bool android_set_clipboard(const std::string& s)
{
jclass clazz = jni->GetObjectClass(g_engine.app->activity->clazz);
jmethodID method = jni->GetMethodID(clazz, "setClipboardText", "(Ljava/lang/String;)Z");
jstring js = JniStringFromUTF8(s);
if (!js) return false;
jboolean success = jni->CallBooleanMethod(g_engine.app->activity->clazz, method, js);
jni->DeleteLocalRef(js);
return success;
}
bool vr_running = false;
static void engine_vr_loop()
{
LOG("start hmd render thread");
vr_running = true;
App::I->ui_sync();
while (vr_running)
{
#ifdef __QUEST__
App::I->render_task([] {
App::I->vr_draw_ui();
oculus_draw(0);
});
#endif
}
LOG("hmd renderer terminated");
}
static void engine_start_vr_thread()
{
std::thread(engine_vr_loop).detach();
}
static void engine_start_vr_mode()
{
#ifdef __QUEST__
LOG("QUEST init VR");
oculus_init_vr(g_engine.display, g_engine.context, g_engine.app->window);
#elif __FOCUS__
wave_init_vr(g_engine.display, g_engine.context, g_engine.app->window);
#endif
}
/**
* Initialize an EGL context for the current display.
*/
static int engine_init_display(struct engine* engine) {
// initialize OpenGL ES and EGL
/*
* Here specify the attributes of the desired configuration.
* Below, we select an EGLConfig with at least 8 bits per color
* component compatible with on-screen windows
*/
#ifdef __QUEST__
EGLint egl_depth = 0;
EGLint egl_alpha = 8;
const EGLint attribs[] = {
EGL_RENDERABLE_TYPE, EGL_OPENGL_ES3_BIT_KHR,
EGL_SURFACE_TYPE, EGL_WINDOW_BIT | EGL_PBUFFER_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_ALPHA_SIZE, 8,
EGL_DEPTH_SIZE, 0,
EGL_STENCIL_SIZE, 0,
EGL_SAMPLES, 0,
EGL_NONE
};
#else
EGLint egl_depth = 24;
EGLint egl_alpha = 0;
const EGLint attribs[] = {
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_DEPTH_SIZE, 24,
EGL_STENCIL_SIZE, 0,
EGL_SAMPLES, 0,
EGL_NONE
};
#endif
EGLint w, h, format;
EGLint numConfigs;
EGLConfig config = nullptr;
EGLSurface surface = nullptr;
EGLContext context = nullptr;
EGLDisplay display = g_display;
if (g_display == EGL_NO_DISPLAY)
{
LOG("DYSPLAY CREATE");
display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display, nullptr, nullptr);
}
else LOG("DISPLAY RESUME");
/* Here, the application chooses the configuration it desires.
* find the best match if possible, otherwise use the very first one
*/
eglChooseConfig(display, attribs, nullptr,0, &numConfigs);
std::unique_ptr<EGLConfig[]> supportedConfigs(new EGLConfig[numConfigs]);
assert(supportedConfigs);
eglChooseConfig(display, attribs, supportedConfigs.get(), numConfigs, &numConfigs);
assert(numConfigs);
auto i = 0;
for (; i < numConfigs; i++) {
auto& cfg = supportedConfigs[i];
EGLint r, g, b, a, d;
if (eglGetConfigAttrib(display, cfg, EGL_RED_SIZE, &r) &&
eglGetConfigAttrib(display, cfg, EGL_GREEN_SIZE, &g) &&
eglGetConfigAttrib(display, cfg, EGL_BLUE_SIZE, &b) &&
eglGetConfigAttrib(display, cfg, EGL_ALPHA_SIZE, &a) &&
eglGetConfigAttrib(display, cfg, EGL_DEPTH_SIZE, &d) &&
r == 8 && g == 8 && b == 8 && a == egl_alpha && d == egl_depth ) {
config = supportedConfigs[i];
break;
}
}
if (i == numConfigs) {
config = supportedConfigs[0];
}
/* EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is
* guaranteed to be accepted by ANativeWindow_setBuffersGeometry().
* As soon as we picked a EGLConfig, we can safely reconfigure the
* ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID. */
eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);
surface = eglCreateWindowSurface(display, config, engine->app->window, nullptr);
#ifdef __QUEST__
const int gles_version = 3;
#else
const int gles_version = 2;
#endif
const EGLint attribs_test[] = {
EGL_CONTEXT_CLIENT_VERSION, gles_version,
EGL_CONTEXT_FLAGS_KHR, EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR | EGL_CONTEXT_OPENGL_FORWARD_COMPATIBLE_BIT_KHR,
EGL_CONTEXT_OPENGL_PROFILE_MASK_KHR, EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR,
EGL_NONE
};
bool resuming_context = true;
context = g_context;
if (g_context == EGL_NO_CONTEXT)
{
LOG("CONTEXT CREATE");
context = eglCreateContext(display, config, EGL_NO_CONTEXT, attribs_test);
resuming_context = false;
}
else LOG("CONTEXT RESUME");
if (context == EGL_NO_CONTEXT)
{
LOG("EGL: debug and forward context failed");
const EGLint attribs_test[] = {
EGL_CONTEXT_CLIENT_VERSION, gles_version,
EGL_CONTEXT_FLAGS_KHR, EGL_CONTEXT_OPENGL_FORWARD_COMPATIBLE_BIT_KHR,
EGL_NONE
};
context = eglCreateContext(display, config, EGL_NO_CONTEXT, attribs_test);
if (context == EGL_NO_CONTEXT)
{
LOG("EGL: only forward context failed");
const EGLint attribs_test[] = {
EGL_CONTEXT_CLIENT_VERSION, gles_version,
EGL_CONTEXT_FLAGS_KHR, EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR,
//EGL_CONTEXT_OPENGL_PROFILE_MASK_KHR, EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT_KHR,
EGL_NONE
};
context = eglCreateContext(display, config, EGL_NO_CONTEXT, attribs_test);
if (context == EGL_NO_CONTEXT)
{
LOG("EGL: only debug context failed");
const EGLint attribs_test[] = {
EGL_CONTEXT_CLIENT_VERSION, gles_version,
EGL_NONE
};
context = eglCreateContext(display, config, EGL_NO_CONTEXT, attribs_test);
if (context == EGL_NO_CONTEXT)
{
LOG("EGL: all the context creation failed");
}
else
{
LOG("EGL: created simple context");
}
}
else
{
LOG("EGL: created only debug context");
}
}
else
{
LOG("EGL: created only forward context");
}
}
else
{
LOG("EGL: created debug and forward context");
}
if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
LOG("Unable to eglMakeCurrent");
return -1;
}
eglQuerySurface(display, surface, EGL_WIDTH, &w);
eglQuerySurface(display, surface, EGL_HEIGHT, &h);
engine->display = display;
engine->context = context;
engine->surface = surface;
engine->width = w;
engine->height = h;
engine->state.angle = 0;
g_display = display;
g_context = context;
if (resuming_context)
{
LOG("RESUME APP");
App::I->and_app = engine->app;
LOG("release egl context");
eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
mutex.unlock();
engine_start_vr_mode();
return 0;
}
// Check openGL on the system
auto opengl_info = { GL_VENDOR, GL_RENDERER, GL_VERSION/*, GL_EXTENSIONS*/ };
for (auto name : opengl_info) {
auto info = glGetString(name);
LOG("OpenGL Info: %s", info);
}
GLint n_exts;
std::map<std::string, bool> ext_map;
glGetIntegerv(GL_NUM_EXTENSIONS, &n_exts);
for (int i = 0; i < n_exts; i++)
{
ext_map.emplace((char*)glGetStringi(GL_EXTENSIONS, i), true);
}
//const char* ext = (const char*) glGetString(GL_EXTENSIONS);
//int ext_len = strlen(ext);
//static char ext_name[256];
//int ext_name_i = 0;
//for (int i = 0; i < ext_len; i++)
//{
// char c = ext[i];
// if (c == ' ')
// {
// ext_map.emplace(std::string(ext_name, ext_name_i), true);
// ext_name_i = 0;
// }
// else
// {
// ext_name[ext_name_i++] = c;
// }
//}
if (ext_map.count("GL_KHR_debug"))
{
LOG("GL_KHR_debug supported");
auto glDebugMessageCallback = (fnDebugMessageCallback)eglGetProcAddress("glDebugMessageCallbackKHR");
if (glDebugMessageCallback)
{
LOG("glDebugMessageCallback proc found at %p", glDebugMessageCallback);
glDebugMessageCallback([](GLenum source, GLenum type, GLuint id,
GLenum severity, GLsizei length, const GLchar* message, const void* userParam)
{
//if (severity == GL_DEBUG_SEVERITY_MEDIUM || severity == GL_DEBUG_SEVERITY_HIGH)
{
LOG("OPENGL: %.*s", length, message);
}
}, nullptr);
glEnable(GL_DEBUG_OUTPUT);
glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS);
}
else
{
LOG("glDebugMessageCallback proc NOT FOUND");
}
}
FILE* file = popen("getprop", "r");
std::map<std::string, std::string> os_props;
if (file)
{
char output[100];
while (fgets(output, sizeof(output), file) != nullptr)
{
int i = 0;
int l = strlen(output);
char buf[64];
int j = 0;
while (i < l && output[i] != '[') i++;
i++;
while (i < l && output[i] != ']') { buf[j++] = output[i]; i++; }
std::string key(buf, j);
j = 0;
while (i < l && output[i] != '[') i++;
i++;
while (i < l && output[i] != ']') { buf[j++] = output[i]; i++; }
os_props[key] = std::string(buf, j);
//LOG("PROP: %s -> %s", key.c_str(), os_props[key].c_str());
}
pclose(file);
}
LOG("PROP Android Version: %s", os_props["ro.build.version.release"].c_str());
LOG("PROP Android SDK: %s", os_props["ro.build.version.sdk"].c_str());
LOG("PROP Country Code: %s", os_props["ro.csc.country_code"].c_str());
LOG("PROP ABI: %s", os_props["ro.product.cpu.abilist"].c_str());
LOG("PROP Brand: %s", os_props["ro.product.brand"].c_str());
LOG("PROP Maker: %s", os_props["ro.product.manufacturer"].c_str());
LOG("PROP Mode: %s", os_props["ro.product.model"].c_str());
Asset::m_am = engine->app->activity->assetManager;
App::I->and_app = engine->app;
App::I->and_engine = engine;
//std::string base_path = engine->app->activity->externalDataPath ?
// engine->app->activity->externalDataPath : get_data_path(engine->app);
if (App::I->data_path.empty() || App::I->data_path == ".")
App::I->data_path = get_data_path();
LOG("data_path %s", App::I->data_path.c_str());
#ifdef __QUEST__
App::I->zoom = 1.f;
App::I->width = 1024;
App::I->height = 1024;
App::I->redraw = true;
App::I->vr_active = true;
App::I->has_vr = true;
App::I->vr_only = true;
// give control to the render thread
LOG("release egl context");
eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
LOG("start render threads");
App::I->render_thread_start();
LOG("start ui thread");
App::I->ui_thread_start();
LOG("start vr thread");
engine_start_vr_mode();
engine_start_vr_thread();
#else
float density = get_display_density();
LOG("density %f", density);
App::I->zoom = density / 1.5;
App::I->width = w;
App::I->height = h;
App::I->redraw = true;
// give control to the render thread
LOG("release egl context");
eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
LOG("start render threads");
App::I->render_thread_start();
LOG("start ui thread");
App::I->ui_thread_start();
#endif
return 0;
}
/**
* Tear down the EGL context currently associated with the display.
*/
static void engine_term_display(struct engine* engine) {
LOG("flush render thread");
App::I->render_sync();
mutex.lock();
if (engine->display != EGL_NO_DISPLAY) {
eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
// if (engine->context != EGL_NO_CONTEXT) {
// eglDestroyContext(engine->display, engine->context);
// }
if (engine->surface != EGL_NO_SURFACE) {
eglDestroySurface(engine->display, engine->surface);
}
// eglTerminate(engine->display);
}
engine->animating = 0;
engine->display = EGL_NO_DISPLAY;
engine->context = EGL_NO_CONTEXT;
engine->surface = EGL_NO_SURFACE;
#ifdef __QUEST__
oculus_release_vr();
#endif
}
/**
* Process the next input event.
*/
static int32_t engine_handle_input(struct android_app* app, AInputEvent* event) {
struct engine* engine = (struct engine*)app->userData;
int32_t eventType = AInputEvent_getType(event);
//LOG("event type: %d", eventType);
//locker _locker{engine};
App::I->redraw = true;
switch (eventType) {
case AINPUT_EVENT_TYPE_MOTION:
// switch (AInputEvent_getSource(event)) {
// case AINPUT_SOURCE_STYLUS:
// case AINPUT_SOURCE_TOUCHSCREEN:
{
int action = AKeyEvent_getAction(event) & AMOTION_EVENT_ACTION_MASK;
int32_t index = (action & AMOTION_EVENT_ACTION_POINTER_INDEX_MASK)
>> AMOTION_EVENT_ACTION_POINTER_INDEX_SHIFT;
int pointer_id = AMotionEvent_getPointerId(event, index);
int32_t count = AMotionEvent_getPointerCount(event);
auto findPointer = [](int id, AInputEvent* event)
{
int32_t count = AMotionEvent_getPointerCount(event);
int ret = -1;
for (int i = 0; i < count; i++)
{
//LOG("pointer %d id %d == %d", i, id, AMotionEvent_getPointerId(event, i));
if (AMotionEvent_getPointerId(event, i) == id)
ret = i;
}
return ret;
return -1;
};
struct Pointer
{
int id = -1;
int idx;
glm::vec2 pos;
};
static Pointer p0, p1;
static int tracked = 0;
//LOG("event source: %d", AInputEvent_getSource(event));
//LOG("pointer id %d count %d", pointer_id, count);
MouseEvent e;
switch (action) {
case AMOTION_EVENT_ACTION_DOWN:
{
float x = AMotionEvent_getX(event, 0);
float y = AMotionEvent_getY(event, 0);
p0.id = AMotionEvent_getPointerId(event, 0);
p0.pos = {x, y};
p0.idx = index;
int tool_type = AMotionEvent_getToolType(event, index);
float pressure = AMotionEvent_getPressure(event, 0);
kEventSource source = tool_type == AMOTION_EVENT_TOOL_TYPE_STYLUS ?
kEventSource::Stylus : kEventSource::Touch;
App::I->ui_task_async([=]{
App::I->mouse_down(0, x, y, pressure, source, 0);
});
tracked = 1;
//LOG("first down");
return 1;
}
case AMOTION_EVENT_ACTION_POINTER_DOWN:
{
//LOG("pointer down index %d", index);
if (count == 2)
{
float x = AMotionEvent_getX(event, 1);
float y = AMotionEvent_getY(event, 1);
p1.id = AMotionEvent_getPointerId(event, 1);
p1.idx = index;
p1.pos = {x, y};
p0.pos.x = AMotionEvent_getX(event, 0);
p0.pos.y = AMotionEvent_getY(event, 0);
//LOG("second down");
App::I->ui_task_async([t=tracked, _p0=p0, _p1=p1] {
if (t == 1)
App::I->mouse_cancel(0);
App::I->gesture_start(_p0.pos, _p1.pos);
});
tracked = 2;
}
return 1;
}
case AMOTION_EVENT_ACTION_UP:
{
float y = AMotionEvent_getY(event, 0);
float x = AMotionEvent_getX(event, 0);
p0.id = -1;
p1.id = -1;
int tool_type = AMotionEvent_getToolType(event, index);
float pressure = AMotionEvent_getPressure(event, 0);
kEventSource source = tool_type == AMOTION_EVENT_TOOL_TYPE_STYLUS ?
kEventSource::Stylus : kEventSource::Touch;
if (tracked == 1)
{
App::I->ui_task_async([=] {
App::I->mouse_up(0, x, y, source, 0);
});
}
tracked = 0;
//LOG("first up");
return 1;
}
case AMOTION_EVENT_ACTION_POINTER_UP:
if (p1.id == AMotionEvent_getPointerId(event, 1))
{
p1.id = -1;
//LOG("second up");
App::I->ui_task_async([=] {
App::I->gesture_end();
});
}
return 1;
case AMOTION_EVENT_ACTION_HOVER_MOVE: // pen move before touching
{
float y = AMotionEvent_getY(event, 0);
float x = AMotionEvent_getX(event, 0);
App::I->ui_task_async([=] {
App::I->mouse_move(x, y, 0, kEventSource::Stylus, 0);
});
//LOG("single move");
return 1;
}
case AMOTION_EVENT_ACTION_MOVE:
if (count == 1 && tracked == 1)
{
float y = AMotionEvent_getY(event, 0);
float x = AMotionEvent_getX(event, 0);
int tool_type = AMotionEvent_getToolType(event, index);
float pressure = AMotionEvent_getPressure(event, 0);
kEventSource source = tool_type == AMOTION_EVENT_TOOL_TYPE_STYLUS ?
kEventSource::Stylus : kEventSource::Touch;
App::I->ui_task_async([=] {
App::I->mouse_move(x, y, pressure, source, 0);
});
//LOG("single move");
}
else if (count == 2)
{
int idx = findPointer(pointer_id, event);
//LOG("pointer move index %d", idx);
if (p0.idx == idx)
{
//LOG("first move");
float y = AMotionEvent_getY(event, 0);
float x = AMotionEvent_getX(event, 0);
p0.pos = {x, y};
}
if (p1.idx == idx)
{
//LOG("second move");
float x = AMotionEvent_getX(event, 1);
float y = AMotionEvent_getY(event, 1);
p1.pos = {x, y};
}
App::I->ui_task_async([_p0=p0, _p1=p1] {
App::I->gesture_move(_p0.pos, _p1.pos);
});
}
return 1;
default:
//LOG("motion action: %d", action);
break;
}
}
// break;
// } // end switch
break;
case AINPUT_EVENT_TYPE_KEY:
{
int action = AKeyEvent_getAction(event);
int32_t key_val = AKeyEvent_getKeyCode(event);
int key = AKeyEvent_getKeyCode(event);
int metaState = AKeyEvent_getMetaState(event);
int uniValue = GetUnicodeChar(action, key, metaState);
switch (action)
{
case AKEY_EVENT_ACTION_MULTIPLE:
LOG("Received key multi event: %d\n", key_val);
if (uniValue > 32 && uniValue < 127) //printable ascii range
App::I->ui_task_async([=] {
App::I->key_char(uniValue);
});
break;
case AKEY_EVENT_ACTION_DOWN:
LOG("Received key down event: %d\n", key_val);
App::I->ui_task_async([=] {
App::I->key_down(convert_key(key_val));
});
break;
case AKEY_EVENT_ACTION_UP:
LOG("Received key up event: %d\n", key_val);
App::I->ui_task_async([=] {
App::I->key_up(convert_key(key_val));
if (uniValue > 32 && uniValue < 127) //printable ascii range
App::I->key_char(uniValue);
});
break;
}
return 1;
}
} // end switch
return 0;
}
/**
* Process the next main command.
*/
static void engine_handle_cmd(struct android_app* app, int32_t cmd) {
struct engine* engine = (struct engine*)app->userData;
switch (cmd) {
case APP_CMD_RESUME:
LOG("APP_CMD_RESUME");
App::I->redraw = true;
ALooper_wake(engine->app->looper);
break;
case APP_CMD_SAVE_STATE:
// The system has asked us to save our current state. Do so.
engine->app->savedState = malloc(sizeof(struct saved_state));
*((struct saved_state*)engine->app->savedState) = engine->state;
engine->app->savedStateSize = sizeof(struct saved_state);
LOG("SAVE STATE");
break;
case APP_CMD_INIT_WINDOW:
// The window is being shown, get it ready.
if (engine->app->window != NULL)
engine_init_display(engine);
break;
case APP_CMD_TERM_WINDOW:
// The window is being hidden or closed, clean it up.
//App::I->terminate();
engine_term_display(engine);
//exit(0);
break;
case APP_CMD_GAINED_FOCUS:
/*
// When our app gains focus, we start monitoring the accelerometer.
if (engine->accelerometerSensor != NULL) {
ASensorEventQueue_enableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);
// We'd like to get 60 events per second (in us).
ASensorEventQueue_setEventRate(engine->sensorEventQueue,
engine->accelerometerSensor,
(1000L/60)*1000);
}
engine->animating = 1;
*/
break;
case APP_CMD_LOST_FOCUS:
/*
// When our app loses focus, we stop monitoring the accelerometer.
// This is to avoid consuming battery while not being used.
if (engine->accelerometerSensor != NULL) {
ASensorEventQueue_disableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);
}
// Also stop animating.
engine->animating = 0;
*/
break;
case APP_CMD_WINDOW_REDRAW_NEEDED:
LOG("APP_CMD_WINDOW_REDRAW_NEEDED");
App::I->redraw = true;
ALooper_wake(engine->app->looper);
break;
case APP_CMD_WINDOW_RESIZED:
LOG("APP_CMD_WINDOW_RESIZED");
App::I->redraw = true;
ALooper_wake(engine->app->looper);
break;
case APP_CMD_CONTENT_RECT_CHANGED:
LOG("APP_CMD_CONTENT_RECT_CHANGED");
//App::I->width = engine->app->contentRect.right - engine->app->contentRect.left;
//App::I->height = engine->app->contentRect.bottom - engine->app->contentRect.top;
//LOG("content rect %f %f", App::I->width, App::I->height);
App::I->redraw = true;
ALooper_wake(engine->app->looper);
break;
}
}
/**
* This is the main entry point of a native application that is using
* android_native_app_glue. It runs in its own thread, with its own
* event loop for receiving input events and doing other things.
*/
void android_main(struct android_app* state) {
// Make sure glue isn't stripped.
// DON'T REMOVE, even if the compiler say it's deprecated
app_dummy();
App::I = new App;
memset(&g_engine, 0, sizeof(g_engine));
state->userData = &g_engine;
state->onAppCmd = engine_handle_cmd;
state->onInputEvent = engine_handle_input;
g_engine.app = state;
// BEGIN OVR
ANativeActivity_setWindowFlags(state->activity, AWINDOW_FLAG_KEEP_SCREEN_ON, 0 );
state->activity->vm->AttachCurrentThread(&jni, nullptr);
#ifdef __QUEST__
oculus_init(state->activity->vm, jni, state->activity->clazz);
#elif __FOCUS__
wave_init(state->activity->vm, jni, state->activity->clazz);
#endif
// Note that AttachCurrentThread will reset the thread name.
prctl(PR_SET_NAME, (long)"PP Main", 0, 0, 0);
// Prepare to monitor accelerometer
/*
g_engine.sensorManager = ASensorManager_getInstance();
g_engine.accelerometerSensor = ASensorManager_getDefaultSensor(
g_engine.sensorManager, ASENSOR_TYPE_ACCELEROMETER);
g_engine.sensorEventQueue = ASensorManager_createEventQueue(
g_engine.sensorManager, state->looper, LOOPER_ID_USER, NULL, NULL);
*/
LOG("START MAIN");
if (state->savedState != NULL) {
// We are starting with a previous saved state; restore from it.
g_engine.state = *(struct saved_state*)state->savedState;
}
//App::I->create();
App::I->redraw = true;
// loop waiting for stuff to do.
while (1) {
// Read all pending events.
int ident;
int events;
struct android_poll_source* source;
// If not animating, we will block forever waiting for events.
// If animating, we loop until all events are read, then continue
// to draw the next frame of animation.
bool used = false;
int timeout = g_engine.display != EGL_NO_DISPLAY ? 0 : -1;
while (!used && (ident=ALooper_pollOnce(timeout, NULL, &events,
(void**)&source)) != ALOOPER_POLL_ERROR) {
// Process this event.
// If a sensor has data, process it now.
/*
if (ident == LOOPER_ID_USER) {
if (g_engine.accelerometerSensor != NULL) {
ASensorEvent event;
while (ASensorEventQueue_getEvents(g_engine.sensorEventQueue, &event, 1) > 0) {
// LOGI("accelerometer: x=%f y=%f z=%f",
// event.acceleration.x, event.acceleration.y,
// event.acceleration.z);
}
}
}
*/
if (source != NULL) {
source->process(state, source);
used = true;
}
if (g_engine.display == EGL_NO_DISPLAY || ident == ALOOPER_POLL_CALLBACK)
continue;
if (ident == ALOOPER_POLL_TIMEOUT)
App::I->redraw = true;
// Check if we are exiting.
if (state->destroyRequested != 0) {
engine_term_display(&g_engine);
return;
}
}
//if (engine.animating)
if (g_engine.display != EGL_NO_DISPLAY)
{
if (pick_file_callback_context)
{
pick_file_callback_context();
pick_file_callback_context = nullptr;
}
}
}
}
//END_INCLUDE(all)