By neerajshetty121
on Thu, 03/31/2016 - 12:32
I am trying to detect the tap on a 3d object in Android.
I have used the Dominoes example as a reference to code my application, i.e. by creating a bounding box around my 3d object (which is also a cube).
This is what I have so far.
Buttons.cpp
JNIEXPORT void JNICALL Java_com_vuforia_samples_Dominoes_ButtonsRenderer_nativeSetModelViewMatrix(JNIEnv* env, jobject obj, Vuforia::Matrix44F matrix) { modelViewMatrix = matrix; } JNIEXPORT void JNICALL Java_com_vuforia_samples_Dominoes_ButtonsRenderer_nativeAddButton(JNIEnv* env, jobject obj, jstring buttonID, jfloat x, jfloat y, jfloat z, jfloat lenX, jfloat lenY, jfloat lenZ){ // If we've reached the max domino count, return if (buttonCount == MAX_BUTTONS) { return; } // Get the next available domino structure Button *button = &buttonArray[buttonCount]; // Initialize the domino button->id = uniqueId; button->buttonID = buttonID; button->posX = x; button->posY = y; button->posZ = z; button->lenX = lenX; button->lenY = lenY; button->lenZ = lenZ; // Calculate the initial transforms from the position and pivot updatePickingTransform(button); buttonCount++; uniqueId++; } JNIEXPORT void JNICALL Java_com_vuforia_samples_Dominoes_ButtonsRenderer_nativeHandleTouches(JNIEnv* env, jobject obj) { if (tap) { // Find the start and end points in world space for the tap // These will lie on the near and far plane and can be used for picking Vuforia::Vec3F intersection, lineStart, lineEnd; projectScreenPointToPlane(Vuforia::Vec2F(tapX, tapY), Vuforia::Vec3F(0, 0, 0), Vuforia::Vec3F(0, 0, 1), intersection, lineStart, lineEnd); Button* button; // For each domino, check for intersection with our picking line for (int i = 0; i < buttonCount; i++) { button = &buttonArray[i]; bool intersection = checkIntersectionLine(button->pickingTransform, lineStart, lineEnd); if (intersection) { selectedButtonIndex = i; tap = false; buttonClickDetected(); break; } } } } void buttonClickDetected(){ Button selectedButton = buttonArray[selectedButtonIndex]; displayMessage("Touch Detected"); } void updatePickingTransform(Button* button) { // Reset the picking transform to the identity matrix button->pickingTransform = SampleMath::Matrix44FIdentity(); float* transformPtr = &button->pickingTransform.data[0]; // The following transformations happen in reverse order // For picking, we want a slightly wider target to improve responsiveness // We can also skip the tilting transformation, since picking only occurs when the dominoes are upright SampleUtils::translatePoseMatrix(button->posX, button->posY, button->posZ, transformPtr); SampleUtils::scalePoseMatrix(button->lenX, button->lenY, button->lenZ, transformPtr); } void projectScreenPointToPlane(Vuforia::Vec2F point, Vuforia::Vec3F planeCenter, Vuforia::Vec3F planeNormal, Vuforia::Vec3F &intersection, Vuforia::Vec3F &lineStart, Vuforia::Vec3F &lineEnd) { // Window Coordinates to Normalized Device Coordinates Vuforia::VideoBackgroundConfig config = Vuforia::Renderer::getInstance().getVideoBackgroundConfig(); float halfScreenWidth = screenWidth / 2.0f; float halfScreenHeight = screenHeight / 2.0f; float halfViewportWidth = config.mSize.data[0] / 2.0f; float halfViewportHeight = config.mSize.data[1] / 2.0f; float x = (point.data[0] - halfScreenWidth) / halfViewportWidth; float y = (point.data[1] - halfScreenHeight) / halfViewportHeight * -1; Vuforia::Vec4F ndcNear(x, y, -1, 1); Vuforia::Vec4F ndcFar(x, y, 1, 1); // Normalized Device Coordinates to Eye Coordinates Vuforia::Vec4F pointOnNearPlane = SampleMath::Vec4FTransform(ndcNear, inverseProjMatrix); Vuforia::Vec4F pointOnFarPlane = SampleMath::Vec4FTransform(ndcFar, inverseProjMatrix); pointOnNearPlane = SampleMath::Vec4FDiv(pointOnNearPlane, pointOnNearPlane.data[3]); pointOnFarPlane = SampleMath::Vec4FDiv(pointOnFarPlane, pointOnFarPlane.data[3]); // Eye Coordinates to Object Coordinates Vuforia::Matrix44F inverseModelViewMatrix = SampleMath::Matrix44FInverse(modelViewMatrix); Vuforia::Vec4F nearWorld = SampleMath::Vec4FTransform(pointOnNearPlane, inverseModelViewMatrix); Vuforia::Vec4F farWorld = SampleMath::Vec4FTransform(pointOnFarPlane, inverseModelViewMatrix); lineStart = Vuforia::Vec3F(nearWorld.data[0], nearWorld.data[1], nearWorld.data[2]); lineEnd = Vuforia::Vec3F(farWorld.data[0], farWorld.data[1], farWorld.data[2]); linePlaneIntersection(lineStart, lineEnd, planeCenter, planeNormal, intersection); } bool linePlaneIntersection(Vuforia::Vec3F lineStart, Vuforia::Vec3F lineEnd, Vuforia::Vec3F pointOnPlane, Vuforia::Vec3F planeNormal, Vuforia::Vec3F &intersection) { Vuforia::Vec3F lineDir = SampleMath::Vec3FSub(lineEnd, lineStart); lineDir = SampleMath::Vec3FNormalize(lineDir); Vuforia::Vec3F planeDir = SampleMath::Vec3FSub(pointOnPlane, lineStart); float n = SampleMath::Vec3FDot(planeNormal, planeDir); float d = SampleMath::Vec3FDot(planeNormal, lineDir); if (fabs(d) < 0.00001) { // Line is parallel to plane return false; } float dist = n / d; Vuforia::Vec3F offset = SampleMath::Vec3FScale(lineDir, dist); intersection = SampleMath::Vec3FAdd(lineStart, offset); } // ---------------------------------------------------------------------------- // Picking // ---------------------------------------------------------------------------- bool checkIntersectionLine(Vuforia::Matrix44F transformA, Vuforia::Vec3F pointA, Vuforia::Vec3F pointB) { // Use the separating axis theorem to determine whether or not // the line intersects the object-oriented bounding box transformA = SampleMath::Matrix44FTranspose(transformA); Vuforia::Vec3F lineDir = SampleMath::Vec3FSub(pointB, pointA); Vuforia::Vec3F normalA1 = SampleMath::Vec3FTransformNormal(buttonNormals[0], transformA); Vuforia::Vec3F normalA2 = SampleMath::Vec3FTransformNormal(buttonNormals[1], transformA); Vuforia::Vec3F normalA3 = SampleMath::Vec3FTransformNormal(buttonNormals[2], transformA); Vuforia::Vec3F crossA1 = SampleMath::Vec3FCross(normalA1, lineDir); Vuforia::Vec3F crossA2 = SampleMath::Vec3FCross(normalA2, lineDir); Vuforia::Vec3F crossA3 = SampleMath::Vec3FCross(normalA3, lineDir); for (int i = 0; i < 8; i++) { buttonTransformedVerticesA[i] = SampleMath::Vec3FTransform(buttonBaseVertices[i], transformA); } if (isSeparatingAxisLine(normalA1, pointA, pointB)) return false; if (isSeparatingAxisLine(normalA2, pointA, pointB)) return false; if (isSeparatingAxisLine(normalA3, pointA, pointB)) return false; if (isSeparatingAxisLine(crossA1, pointA, pointB)) return false; if (isSeparatingAxisLine(crossA2, pointA, pointB)) return false; if (isSeparatingAxisLine(crossA3, pointA, pointB)) return false; return true; } bool isSeparatingAxisLine(Vuforia::Vec3F axis, Vuforia::Vec3F pointA, Vuforia::Vec3F pointB) { // Determine whether or not the given axis separates // the globally stored transformed vertices of the bounding box // and the given line float magnitude = axis.data[0] * axis.data[0] + axis.data[1] * axis.data[1] + axis.data[2] * axis.data[2]; if (magnitude < 0.00001) return false; float minA, maxA, minB, maxB; minA = maxA = SampleMath::Vec3FDot(buttonTransformedVerticesA[0], axis); float p; for (int i = 1; i < 8; i++) { p = SampleMath::Vec3FDot(buttonTransformedVerticesA[i], axis); if (p < minA) minA = p; if (p > maxA) maxA = p; } minB = maxB = SampleMath::Vec3FDot(pointA, axis); p = SampleMath::Vec3FDot(pointB, axis); if (p < minB) minB = p; if (p > maxB) maxB = p; if (maxA < minB) return true; if (minA > maxB) return true; return false; }
ButtonsRenderer.java
private native boolean nativeClearButtons(); private native boolean nativeSetModelViewMatrix(Matrix44F modelViewMatrix); private native boolean nativeAddButton(String buttonID, float x, float y, float z, float lenX, float lenY, float lenZ); private native boolean nativeHandleTouches(); private native boolean clearNativeTouch(); // The render function. private void renderFrame() { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); State state = mRenderer.begin(); mRenderer.drawVideoBackground(); GLES20.glEnable(GLES20.GL_DEPTH_TEST); // handle face culling, we need to detect if we are using reflection // to determine the direction of the culling GLES20.glEnable(GLES20.GL_CULL_FACE); GLES20.glEnable(GLES20.GL_BLEND); GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); if (Renderer.getInstance().getVideoBackgroundConfig().getReflection() == VIDEO_BACKGROUND_REFLECTION.VIDEO_BACKGROUND_REFLECTION_ON) GLES20.glFrontFace(GLES20.GL_CW); // Front camera else GLES20.glFrontFace(GLES20.GL_CCW); // Back camera // Set the viewport int[] viewport = vuforiaAppSession.getViewport(); GLES20.glViewport(viewport[0], viewport[1], viewport[2], viewport[3]); //Clear Buttons Array in Native nativeClearButtons(); // did we find any trackables this frame? for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { TrackableResult result = state.getTrackableResult(tIdx); Trackable trackable = result.getTrackable(); if (!result.isOfType(ObjectTargetResult.getClassType())) continue; ObjectTarget objectTarget = (ObjectTarget) trackable; Matrix44F modelViewMatrix_Vuforia = Tool .convertPose2GLMatrix(result.getPose()); float[] modelViewMatrix = modelViewMatrix_Vuforia.getData(); //Set Model View Matrix in Native nativeSetModelViewMatrix(modelViewMatrix_Vuforia); // deal with the modelview and projection matrices float[] modelViewProjection = new float[16]; float[] objectSize = objectTarget.getSize().getData(); Matrix.translateM(modelViewMatrix, 0, objectSize[0] / 2, objectSize[1] / 2, objectSize[2] / 2); Matrix.scaleM(modelViewMatrix, 0, objectSize[0] / 2, objectSize[1] / 2, objectSize[2] / 2); Matrix.multiplyMM(modelViewProjection, 0, vuforiaAppSession.getProjectionMatrix().getData(), 0, modelViewMatrix, 0); // activatrigidBodyTarget.xmle the shader program and bind the vertex/normal/tex coords GLES20.glUseProgram(shaderProgramID); GLES20.glVertexAttribPointer(vertexHandle, 3, GLES20.GL_FLOAT, false, 0, mCubeObject.getVertices()); GLES20.glUniform1f(opacityHandle, 0.3f); GLES20.glUniform3f(colorHandle, 0.0f, 0.0f, 0.0f); GLES20.glVertexAttribPointer(textureCoordHandle, 2, GLES20.GL_FLOAT, false, 0, mCubeObject.getTexCoords()); GLES20.glVertexAttribPointer(normalHandle, 3, GLES20.GL_FLOAT, false, 0, mCubeObject.getNormals()); GLES20.glEnableVertexAttribArray(vertexHandle); GLES20.glEnableVertexAttribArray(normalHandle); GLES20.glEnableVertexAttribArray(textureCoordHandle); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(0).mTextureID[0]); GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0); GLES20.glUniform1i(texSampler2DHandle, 0); // pass the model view matrix to the shader GLES20.glUniformMatrix4fv(mvpMatrixHandle, 1, false, modelViewProjection, 0); // finally render GLES20.glDrawElements(GLES20.GL_TRIANGLES, mCubeObject.getNumObjectIndex(), GLES20.GL_UNSIGNED_SHORT, mCubeObject.getIndices()); // disable the enabled arrays GLES20.glDisableVertexAttribArray(vertexHandle); GLES20.glDisableVertexAttribArray(normalHandle); GLES20.glDisableVertexAttribArray(textureCoordHandle); SampleUtils.checkGLError("Render Frame"); //Add Button to array in native String action = "default"; String buttonID = trackable.getName() + "_" + action; float posX = objectSize[0] / 2; float posY = objectSize[1] / 2; float posZ = objectSize[2] / 2; float lenX = objectSize[0] / 2; float lenY = objectSize[1] / 2; float lenZ = objectSize[2] / 2; nativeAddButton(buttonID, posX, posY, posZ, lenX, lenY, lenZ); //Call Handle Touch for this trackable nativeHandleTouches(); } clearNativeTouch(); //Set Native "Tap" to false GLES20.glDisable(GLES20.GL_DEPTH_TEST); GLES20.glDisable(GLES20.GL_BLEND); mRenderer.end(); }
Right now, i get the touch detected message anytime my trackable is in view and I tap on the screen, even if the tap is not on the cube that is rendered.
I want the message to be shown only when I tap on the cube. Need help on this urgently.