MainActivity
AndroidManifest.xml
file requires the following feature to enable keyboard overlay. <!-- Enable overlay keyboard -->
<uses-feature
android:name="oculus.software.overlay_keyboard"
android:required="false"
/>
android:required="false"
setting ensures that the app will run even if the feature is missing.GL_TEXTURE_EXTERNAL_OES
and store the returned GLUint
texture name. The following snippet is written in C but you can set it from any OpenGL library that lets you specify the GL_TEXTURE_EXTERNAL_OES
target. GLuint* t0 = &display->Program.Textures[0];
GL(glActiveTexture(GL_TEXTURE0));
GL(glGenTextures(1, t0));
ALOGV("Texture created, texName %d", *t0);
GL(glBindTexture(GL_TEXTURE_EXTERNAL_OES, *t0));
GL(glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR));
GL(glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR));
GL(glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE));
GL(glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE));
GL(glBindTexture(GL_TEXTURE_EXTERNAL_OES, 0));
SurfaceTexture
and sample the texture using the GL_OES_EGL_image_external
extension. The following sample code demonstrates the implementation.static const char VIRTUAL_DISPLAY_FRAGMENT_SHADER[] =
"#extension GL_OES_EGL_image_external : require\n"
"#extension GL_OES_EGL_image_external_essl3 : require\n"
"uniform samplerExternalOES Texture0;\n"
"uniform vec2 CursorUV;\n"
"in mediump vec2 fragTexCoord;\n"
"out lowp vec4 outColor;\n"
"void main() {\n"
" vec2 invertedYFragTexCoord = vec2(fragTexCoord.x, 1.0 - fragTexCoord.y);\n"
" outColor = texture(Texture0, invertedYFragTexCoord);\n"
" float cursorDistance = distance(CursorUV, invertedYFragTexCoord);\n"
" float textureWidth = float(textureSize(Texture0, 0).x);\n"
" if (cursorDistance * textureWidth < 2.0) {\n"
" outColor = vec4(1.0 - outColor.r, 1.0 - outColor.g, 1.0 - outColor.b, 1.0);\n"
" }\n"
"}\n";
MotionEvents
based on your preferred input hit testing.SurfaceTexture.updateTexImage()
method.SurfaceTexture
does not currently support Vulkan, however you can implement it with VK_ANDROID_external_memory_android_hardware_buffer.MotionEvents
in the Window
on top of the Android UI View
hierarchy. Below is an example of hit-testing a ray pointing straight from the controller pose provided by VrApi or OpenXR.static void ovrVirtualDisplay_HandleInput(
ovrVirtualDisplay* display,
const ovrRigidBodyPosef* controllerPose) {
Triangle triangle;
memcpy(&triangle[0], &ovrGeometry_QuadVertexPositions[0], sizeof(float) * 3); // upper-left
memcpy(&triangle[1], &ovrGeometry_QuadVertexPositions[1], sizeof(float) * 3); // lower-left
memcpy(&triangle[2], &ovrGeometry_QuadVertexPositions[3], sizeof(float) * 3); // upper-right
ovrVector2f* uv = &display->CursorUV;
IntersectLineQuad(controllerPose, triangle, &display->InstanceTransform, uv);
display->Java->Env->CallVoidMethod(
display->Java->ActivityObject, display->OnMoveMethod, uv->x, uv->y);
JAVA_CHECK_EXCEPTION(display->Java->Env, "failed to trigger onMove method");
}
IntersectLineQuad
transforms the provided API types into standard 3-vectors of the origin of the ray line, the direction from that point that the line is pointing, the coordinates (in three 3-vector coordinates) of the upper-left triangle, and an out parameter in barycentric coordinates, which it then converts to UV coordinates using an additional ConvertBarycentricToUV
function. The latter takes the shortcut of assuming the geometry the Android UI is drawn on is perpendicular to the Z-axis and you will need to update to function anywhere in render space.// adapted from GLM_GTX_intersect IntersectLineTriangle
static bool IntersectLineQuad(
const MathCommon::Vector3f& orig,
const MathCommon::Vector3f& dir,
const TriangleMathCommon& upperLeftTriangle,
MathCommon::Vector3f& position) {
// float Epsilon = std::numeric_limits<float>::min();
const MathCommon::Vector3f& vert0 = upperLeftTriangle[0];
const MathCommon::Vector3f& vert1 = upperLeftTriangle[1];
const MathCommon::Vector3f& vert2 = upperLeftTriangle[2];
MathCommon::Vector3f edge1 = vert1 - vert0;
MathCommon::Vector3f edge2 = vert2 - vert0;
MathCommon::Vector3f Perpendicular = dir.Cross(edge2);
float det = edge1.Dot(Perpendicular);
// if (det > -Epsilon && det < Epsilon)
// return false;
float inv_det = 1.0f / det;
MathCommon::Vector3f Tengant = orig - vert0;
position.y = Tengant.Dot(Perpendicular) * inv_det;
// if (position.y < 0.0f || position.y > 1.0f)
// return false;
MathCommon::Vector3f Cotengant = Tengant.Cross(edge1);
position.z = dir.Dot(Cotengant) * inv_det;
// if (position.z < 0.0f || position.y + position.z > 1.0f)
// return false;
position.x = edge2.Dot(Cotengant) * inv_det;
return true;
}
// assumes plane is perpendicular to Z
static ovrVector2f ConvertBarycentricToUV(const MathCommon::Vector3f& barycentricCoords3D) {
MathCommon::Vector2f uv0{0.0f, 0.0f};
MathCommon::Vector2f uv1{0.0f, 1.0f};
MathCommon::Vector2f uv2{1.0f, 0.0f};
MathCommon::Vector2f result{
uv0.x * barycentricCoords3D.x + uv1.x * barycentricCoords3D.y + uv2.x * barycentricCoords3D.z,
uv0.y * barycentricCoords3D.x + uv1.y * barycentricCoords3D.y +
uv2.y * barycentricCoords3D.z};
return ovrVector2f{result.x, result.y};
}
ACTION_HOVER*
MotionEvents for motion and use the preferred button input (for example, trigger) to fire simulated ACTION_DOWN
and ACTION_UP
MotionEvents
. The following code demonstrates how to submit simulated input to the Android UI. private synchronized void submitMotionEvent(final float u, final float v, final int action) {
if (!getTestDialogCreated()) {
return;
}
final float width = mTestDialog.getWidth();
final float height = mTestDialog.getHeight();
final float majorDimension = width > height ? width : height;
final float x = u * majorDimension;
final float y = v * majorDimension;
final long now = SystemClock.uptimeMillis();
final MotionEvent event = MotionEvent.obtain(mLastDown, now, action, x, y, 0);
event.setSource(InputDevice.SOURCE_CLASS_POINTER);
runOnUiThread(
new Runnable() {
@Override
public void run() {
mTestDialog.dispatchTouchEvent(event);
event.recycle();
}
});
}
public void onTrigger(final float u, final float v, final boolean down) {
final int action = down ? MotionEvent.ACTION_DOWN : MotionEvent.ACTION_UP;
if (down) {
mLastDown = SystemClock.uptimeMillis();
submitMotionEvent(u, v, action);
} else {
submitMotionEvent(u, v, action);
mLastDown = 0;
}
}
public void onMove(final float u, final float v) {
if (mLastDown != 0) {
submitMotionEvent(u, v, MotionEvent.ACTION_MOVE);
} else {
submitMotionEvent(u, v, MotionEvent.ACTION_HOVER_MOVE);
}
}