Files
panopainter/android/focus/src/main/cpp/wave_vr.cpp

399 lines
14 KiB
C++

#include "pch.h"
#include "wave_vr.h"
#include "log.h"
#include "rtt.h"
#include "app.h"
#include "canvas.h"
#include <unistd.h>
extern "C" bool WVR_EXPORT WVR_IsATWActive();
extern "C" void WVR_EXPORT WVR_SetATWActive(bool isActive, void *anativeWindow = nullptr);
extern "C" void WVR_EXPORT WVR_PauseATW(); // New Api to replace SetATWActive(false)
extern "C" void WVR_EXPORT WVR_ResumeATW(); // New Api to replace SetATWActive(true)
extern "C" void WVR_EXPORT WVR_OnDisable(); // New Api to replace SetATWActive(false) on Unity3D OnDisable
extern "C" void WVR_EXPORT WVR_OnApplicationQuit(); // New Api to handle OnApplicationQuit case
extern "C" void WVR_EXPORT WVR_SetRenderThreadId(int tid);
void android_async_lock();
bool android_async_trylock();
void android_async_swap();
void android_async_unlock();
RTT ovr_eyes[3][2];
struct WaveController : public VRController
{
void update_state(double predictedDisplayTime, glm::vec3 head_pos)
{
/*
vrapi_GetInputTrackingState(ovr_context, id, predictedDisplayTime, &tracking);
glm::vec3 c_pos = glm::make_vec3((float*)&tracking.HeadPose.Pose.Position);
auto c_rot_ovr = ovrMatrix4f_CreateFromQuaternion(&tracking.HeadPose.Pose.Orientation);
auto c_rot_ovr_tp = ovrMatrix4f_Transpose(&c_rot_ovr);
glm::mat4 c_rot = glm::make_mat4((float*)&c_rot_ovr_tp);
m_mat = glm::translate(c_pos) * c_rot;
// update controllers
state.Header.ControllerType = ovrControllerType_TrackedRemote;
vrapi_GetCurrentInputState(ovr_context, id, &state.Header);
update_analog(kButton::Trigger, {state.IndexTrigger, 0});
update_analog(kButton::Grip, {state.GripTrigger, 0});
update_analog(kButton::Pad, glm::make_vec2((float*)&state.Joystick));
update_digital(kButton::Trigger, state.Buttons & ovrButton_Trigger, {state.IndexTrigger, 0});
update_digital(kButton::Pad, state.Buttons & ovrButton_Joystick, glm::make_vec2((float*)&state.Joystick));
update_digital(kButton::A, state.Buttons & ovrButton_A);
*/
}
void update_digital(kButton b, bool pressed, glm::vec2 axis = {0, 0})
{
/*
if (pressed && !m_buttons[(int)b])
{
m_buttons[(int)b] = true;
App::I.vr_digital(*this, b, kAction::Press, axis);
}
if (!pressed && m_buttons[(int)b])
{
m_buttons[(int)b] = false;
App::I.vr_digital(*this, b, kAction::Release, axis);
}
*/
}
void update_analog(kButton b, glm::vec2 force)
{
/*
float l = glm::compMax(glm::abs(force));
const float zero = 0.01f;
if (l > zero && !m_analog_buttons[(int)b])
{
m_analog_buttons[(int)b] = true;
App::I.vr_analog(*this, b, kAction::Press, force);
}
if (l <= zero && m_analog_buttons[(int)b])
{
m_analog_buttons[(int)b] = false;
App::I.vr_analog(*this, b, kAction::Release, force);
}
*/
}
float get_trigger_value() const override {
/*
if (id == -1)
return 0.f;
return state.IndexTrigger;
*/
return 0;
}
};
WaveController controllers[2];
extern struct engine g_engine;
int vr_main(int argc, char *argv[])
{
LOG("vr_main");
LOG("WVR_SetATWActive");
WVR_SetATWActive(true, g_engine.app->window);
// We has the display config from device service, so we don't this logic.
WVR_RenderInitParams_t pp = { WVR_GraphicsApiType_OpenGL, WVR_RenderConfig_Default };
WVR_RenderError render_error = WVR_RenderError_LibNotSupported;
render_error = WVR_RenderInit(&pp);
if (render_error != WVR_RenderError_None)
LOG("WVR Render Init Failed");
// Loading the WVR Runtime
WVR_InitError eError = WVR_InitError_None;
LOG("initVR():start call WVR_Init");
eError = WVR_Init(WVR_AppType_VRContent);
LOG("initVR():start call WVR_Init end");
if (eError != WVR_InitError_None) {
LOG("Unable to init VR runtime: %s", WVR_GetInitErrorString(eError));
return 1;// false;
}
LOG("WVR_SetRenderThreadId");
WVR_SetRenderThreadId((int)gettid());
WVR_InputAttribute inputIdAndTypes[] = {
{WVR_InputId_Alias1_Menu, WVR_InputType_Button, WVR_AnalogType_None},
{WVR_InputId_Alias1_Touchpad, WVR_InputType_Button | WVR_InputType_Touch | WVR_InputType_Analog, WVR_AnalogType_2D},
{WVR_InputId_Alias1_Trigger, WVR_InputType_Button , WVR_AnalogType_None},
{WVR_InputId_Alias1_Digital_Trigger, WVR_InputType_Button , WVR_AnalogType_None}
};
WVR_SetInputRequest(WVR_DeviceType_HMD, inputIdAndTypes, sizeof(inputIdAndTypes) / sizeof(*inputIdAndTypes));
WVR_SetInputRequest(WVR_DeviceType_Controller_Right, inputIdAndTypes, sizeof(inputIdAndTypes) / sizeof(*inputIdAndTypes));
WVR_SetInputRequest(WVR_DeviceType_Controller_Left, inputIdAndTypes, sizeof(inputIdAndTypes) / sizeof(*inputIdAndTypes));
// Must initialize render runtime before all OpenGL code.
WVR_RenderInitParams_t param;
param = { WVR_GraphicsApiType_OpenGL, WVR_RenderConfig_Timewarp_Asynchronous };
WVR_RenderError pError = WVR_RenderInit(&param);
if (pError != WVR_RenderError_None) {
LOG("Present init failed - Error[%d]", pError);
}
auto mInteractionMode = WVR_GetInteractionMode();
auto mGazeTriggerType = WVR_GetGazeTriggerType();
LOG("initVR() mInteractionMode: %d, mGazeTriggerType: %d", mInteractionMode, mGazeTriggerType);
uint32_t w, h;
WVR_GetRenderTargetSize(&w, &h);
std::vector<RTT> rtt_eyes[2];
WVR_TextureQueueHandle_t tq[2];
for (int eye = 0; eye < 2; eye++)
{
tq[eye] = WVR_ObtainTextureQueue(WVR_TextureTarget_2D, WVR_TextureFormat_RGBA,
WVR_TextureType_UnsignedByte, w, h, 0);
for (int i = 0; i < WVR_GetTextureQueueLength(tq[eye]); i++)
{
auto t = WVR_GetTexture(tq[eye], i);
rtt_eyes[eye].emplace_back();
rtt_eyes[eye].back().create(w, h, (int)t.id);
rtt_eyes[eye].back().bindFramebuffer();
glClearColor(1, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
rtt_eyes[eye].back().unbindFramebuffer();
LOG("WVR create texture %d for eye %d", i, eye);
}
}
android_async_lock();
LOG("init app");
App::I->initLog();
App::I->zoom = 1.f;
App::I->width = 1024;
App::I->height = 1024;
App::I->redraw = true;
App::I->init();
//App::I.resize(1024, 1024);
App::I->vr_active = true;
App::I->has_vr = true;
App::I->vr_only = true;
LOG("init app done");
android_async_unlock();
LOG("WVR enter render loop");
bool running = true;
while (running)
{
android_async_lock();
WVR_DevicePosePair_t devices[WVR_DEVICE_COUNT_LEVEL_1];
WVR_GetSyncPose(WVR_PoseOriginModel_OriginOnHead, devices, WVR_DEVICE_COUNT_LEVEL_1);
LOG("WVR RENDER FRAME");
for (int eye = 0; eye < 2; eye++)
{
int i = WVR_GetAvailableTextureIndex(tq[eye]);
auto t = WVR_GetTexture(tq[eye], i);
WVR_SubmitError e;
e = WVR_SubmitFrame((WVR_Eye)eye, &t, &devices[WVR_DEVICE_HMD].pose);
if (e != WVR_SubmitError_None)
{
LOG("submit error: %d", (int)e)
running = false;
break;
}
}
glClearColor(0, 1, 0, 1);
glClear(GL_COLOR_BUFFER_BIT);
glFinish();
android_async_unlock();
}
LOG("WVR render loop finished");
return 0;
}
void wave_init(JavaVM* vm, JNIEnv* jni, jobject activity_class)
{
//vr_main(0, nullptr);
/*
java.Vm = vm;
java.Env = jni;
java.ActivityObject = activity_class;
LOG("init OVR");
const ovrInitParms initParms = vrapi_DefaultInitParms( &java );
int32_t initResult = vrapi_Initialize( &initParms );
if ( initResult != VRAPI_INITIALIZE_SUCCESS )
{
// If intialization failed, vrapi_* function calls will not be available.
exit( 0 );
}
auto result = ovr_PlatformInitializeAndroid("1687982804637910", activity_class, jni);
LOG("ovr_PlatformInitializeAndroid result %d", result);
*/
LOG("WVR register main");
//WVR_RegisterMain(vr_main);
}
void wave_init_vr(EGLDisplay display, EGLContext context, ANativeWindow* surface)
{
vr_main(0, nullptr);
/*
LOG("init swapchain");
int rtt_w = vrapi_GetSystemPropertyInt(&java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_WIDTH);
int rtt_h = vrapi_GetSystemPropertyInt(&java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_HEIGHT);
LOG("ovr suggested texture size %d %d", rtt_w, rtt_h);
for (int eye = 0; eye < 2; eye++)
{
swap_chain[eye] = vrapi_CreateTextureSwapChain3(VRAPI_TEXTURE_TYPE_2D, GL_RGBA8, rtt_w, rtt_h, 1, 3);
swap_chain_count = vrapi_GetTextureSwapChainLength(swap_chain[eye]);
for (int i = 0; i < swap_chain_count; i++)
{
auto texid = vrapi_GetTextureSwapChainHandle(swap_chain[eye], i);
if (ovr_eyes[i][eye].create(rtt_w, rtt_h, texid, GL_RGBA8, true))
{
ovr_eyes[i][eye].bindTexture();
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
ovr_eyes[i][eye].unbindTexture();
LOG("create eye %d", eye);
}
else
{
LOG("FAILED create fb for eye %d", eye);
}
}
}
LOG("vrapi_DefaultModeParms");
ovrModeParms parms = vrapi_DefaultModeParms( &java );
// No need to reset the FLAG_FULLSCREEN window flag when using a View
//parms.Flags &= ~VRAPI_MODE_FLAG_RESET_WINDOW_FULLSCREEN;
parms.Flags |= VRAPI_MODE_FLAG_NATIVE_WINDOW;
parms.Display = (size_t)display;
parms.WindowSurface = (size_t)surface;
parms.ShareContext = (size_t)context;
LOG("enter vr mode");
ovr_context = vrapi_EnterVrMode(&parms);
if (!ovr_context)
{
LOG("EnterVRMode FAILED");
}
LOG("vr mode entered");
vrapi_SetClockLevels(ovr_context, 2, 2);
vrapi_SetPerfThread(ovr_context, VRAPI_PERF_THREAD_TYPE_MAIN, gettid());
vrapi_SetPerfThread(ovr_context, VRAPI_PERF_THREAD_TYPE_RENDERER, 0);
*/
}
void wave_release_vr()
{
/*
vrapi_LeaveVrMode(ovr_context);
ovr_context = nullptr;
for (int eye = 0; eye < 2; eye++)
{
vrapi_DestroyTextureSwapChain(swap_chain[eye]);
swap_chain[eye] = nullptr;
for (int i = 0; i < swap_chain_count; i++)
ovr_eyes[eye][i].destroy();
}
swap_chain_index = 0;
swap_chain_count = 0;
LOG("leave vr mode");
*/
}
void wave_draw(float dt)
{
/*
const double predictedDisplayTime = vrapi_GetPredictedDisplayTime(ovr_context, ovr_frame);
const ovrTracking2 tracking = vrapi_GetPredictedTracking2(ovr_context, predictedDisplayTime);
auto pose_ovr = ovrMatrix4f_CreateFromQuaternion(&tracking.HeadPose.Pose.Orientation);
auto pose_ovr_tp = ovrMatrix4f_Transpose(&pose_ovr);
glm::mat4 pose = glm::make_mat4(reinterpret_cast<float*>(&pose_ovr_tp));
glm::vec3 head_pos = glm::make_vec3((float*)&tracking.HeadPose.Pose.Position);
if (controllers[0].id == -1)
{
// init controller
ovrInputCapabilityHeader capsHeader;
for (int i = 0; ; i++)
{
if ( vrapi_EnumerateInputDevices( ovr_context, i, &capsHeader ) >= 0 )
{
if ( capsHeader.Type == ovrControllerType_TrackedRemote )
{
ovrInputTrackedRemoteCapabilities remoteCaps;
remoteCaps.Header = capsHeader;
if ( vrapi_GetInputDeviceCapabilities( ovr_context, &remoteCaps.Header ) >= 0 )
{
// remote is connected
if (remoteCaps.ControllerCapabilities & ovrControllerCaps_RightHand)
{
// right controller found
controllers[0].id = capsHeader.DeviceID;
LOG("found controller id %d", (int)controllers[0].id);
break;
}
}
}
}
else
{
break;
}
}
}
if (controllers[0].id != -1)
{
controllers[0].update_state(predictedDisplayTime, head_pos);
App::I.vr_controllers[0] = controllers[0];
}
App::I.vr_head = pose;
App::I.vr_update(dt);
// update hmd
auto layer = vrapi_DefaultLayerProjection2();
//ovrVector4f red = {1, 1, 0, 1};
//auto layer = vrapi_DefaultLayerSolidColorProjection2(&red);
layer.HeadPose = tracking.HeadPose;
for (int eye = 0; eye < 2; eye++)
{
auto& rtt = ovr_eyes[swap_chain_index][eye];
rtt.bindFramebuffer();
rtt.clear({1, 0, 1, 1});
glViewport(0, 0, rtt.getWidth(), rtt.getHeight());
auto proj_ovr = ovrMatrix4f_Transpose(&tracking.Eye[eye].ProjectionMatrix);
glm::mat4 proj = glm::make_mat4(reinterpret_cast<const float*>(&proj_ovr));
auto view_ovr = ovrMatrix4f_Transpose(&tracking.Eye[eye].ViewMatrix);
glm::mat4 view = glm::make_mat4(reinterpret_cast<const float*>(&view_ovr));
App::I.vr_draw(proj, view, pose);
rtt.unbindFramebuffer();
layer.Textures[eye].ColorSwapChain = swap_chain[eye];
layer.Textures[eye].SwapChainIndex = swap_chain_index;
layer.Textures[eye].TexCoordsFromTanAngles =
ovrMatrix4f_TanAngleMatrixFromProjection(&tracking.Eye[eye].ProjectionMatrix);
}
layer.Header.Flags |= VRAPI_FRAME_LAYER_FLAG_CHROMATIC_ABERRATION_CORRECTION;
const ovrLayerHeader2 * layers[] = { &layer.Header };
ovrSubmitFrameDescription2 frameDesc = { 0 };
frameDesc.Flags = 0;
frameDesc.SwapInterval = 1;
frameDesc.FrameIndex = ovr_frame;
frameDesc.DisplayTime = predictedDisplayTime;
frameDesc.LayerCount = 1;
frameDesc.Layers = layers;
vrapi_SubmitFrame2(ovr_context, &frameDesc);
ovr_frame++;
swap_chain_index = (swap_chain_index + 1) % swap_chain_count;
*/
}