By 365solution
on Wed, 10/02/2013 - 08:05
hi, this is the code that I get from https://developer.vuforia.com/forum/android/interaction-between-markersframemarkers and i had install in on my phone. But it show blue screen after i press the START button. Below is the code.
Is there any other thing that i forgot to do. BTW, where do i need to assign which marker render which object.
/*============================================================================== Copyright (c) 2010-2013 QUALCOMM Austria Research Center GmbH. All Rights Reserved. @file ImageTargets.cpp @brief Sample for ImageTargets ==============================================================================*/ #include <jni.h> #include <android/log.h> #include <stdio.h> #include <string.h> #include <assert.h> #ifdef USE_OPENGL_ES_1_1 #include <GLES/gl.h> #include <GLES/glext.h> #else #include <GLES2/gl2.h> #include <GLES2/gl2ext.h> #endif #include <QCAR/QCAR.h> #include <QCAR/CameraDevice.h> #include <QCAR/Renderer.h> #include <QCAR/VideoBackgroundConfig.h> #include <QCAR/Trackable.h> #include <QCAR/TrackableResult.h> #include <QCAR/Tool.h> #include <QCAR/Tracker.h> #include <QCAR/TrackerManager.h> #include <QCAR/ImageTracker.h> #include <QCAR/CameraCalibration.h> #include <QCAR/UpdateCallback.h> #include <QCAR/DataSet.h> #include "SampleUtils.h" #include "SampleMath.h" #include "Texture.h" #include "CubeShaders.h" #include "Teapot.h" #ifdef __cplusplus extern "C" { #endif // Textures: int textureCount = 0; Texture** textures = 0; // OpenGL ES 2.0 specific: #ifdef USE_OPENGL_ES_2_0 unsigned int shaderProgramID = 0; GLint vertexHandle = 0; GLint normalHandle = 0; GLint textureCoordHandle = 0; GLint mvpMatrixHandle = 0; GLint texSampler2DHandle = 0; #endif // Screen dimensions: unsigned int screenWidth = 0; unsigned int screenHeight = 0; // Indicates whether screen is in portrait (true) or landscape (false) mode bool isActivityInPortraitMode = false; // The projection matrix used for rendering virtual objects: QCAR::Matrix44F projectionMatrix; // Constants: static const float kObjectScale = 3.f; QCAR::DataSet* dataSetStonesAndChips = 0; QCAR::DataSet* dataSetTarmac = 0; bool switchDataSetAsap = false; // Object to receive update callbacks from QCAR SDK class ImageTargets_UpdateCallback : public QCAR::UpdateCallback { virtual void QCAR_onUpdate(QCAR::State& /*state*/) { if (switchDataSetAsap) { switchDataSetAsap = false; // Get the image tracker: QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance(); QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>( trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER)); if (imageTracker == 0 || dataSetStonesAndChips == 0 || dataSetTarmac == 0 || imageTracker->getActiveDataSet() == 0) { LOG("Failed to switch data set."); return; } if (imageTracker->getActiveDataSet() == dataSetStonesAndChips) { imageTracker->deactivateDataSet(dataSetStonesAndChips); imageTracker->activateDataSet(dataSetTarmac); } else { imageTracker->deactivateDataSet(dataSetTarmac); imageTracker->activateDataSet(dataSetStonesAndChips); } } } }; ImageTargets_UpdateCallback updateCallback; JNIEXPORT int JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_getOpenGlEsVersionNative(JNIEnv *, jobject) { #ifdef USE_OPENGL_ES_1_1 return 1; #else return 2; #endif } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setActivityPortraitMode(JNIEnv *, jobject, jboolean isPortrait) { isActivityInPortraitMode = isPortrait; } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_switchDatasetAsap(JNIEnv *, jobject) { switchDataSetAsap = true; } JNIEXPORT int JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initTracker(JNIEnv *, jobject) { LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initTracker"); // Initialize the image tracker: QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance(); QCAR::Tracker* tracker = trackerManager.initTracker(QCAR::Tracker::IMAGE_TRACKER); if (tracker == NULL) { LOG("Failed to initialize ImageTracker."); return 0; } LOG("Successfully initialized ImageTracker."); return 1; } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitTracker(JNIEnv *, jobject) { LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitTracker"); // Deinit the image tracker: QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance(); trackerManager.deinitTracker(QCAR::Tracker::IMAGE_TRACKER); } JNIEXPORT int JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_loadTrackerData(JNIEnv *, jobject) { LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_loadTrackerData"); // Get the image tracker: QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance(); QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>( trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER)); if (imageTracker == NULL) { LOG("Failed to load tracking data set because the ImageTracker has not" " been initialized."); return 0; } // Create the data sets: dataSetStonesAndChips = imageTracker->createDataSet(); if (dataSetStonesAndChips == 0) { LOG("Failed to create a new tracking data."); return 0; } dataSetTarmac = imageTracker->createDataSet(); if (dataSetTarmac == 0) { LOG("Failed to create a new tracking data."); return 0; } // Load the data sets: if (!dataSetStonesAndChips->load("StonesAndChips.xml", QCAR::DataSet::STORAGE_APPRESOURCE)) { LOG("Failed to load data set."); return 0; } if (!dataSetTarmac->load("Tarmac.xml", QCAR::DataSet::STORAGE_APPRESOURCE)) { LOG("Failed to load data set."); return 0; } // Activate the data set: if (!imageTracker->activateDataSet(dataSetStonesAndChips)) { LOG("Failed to activate data set."); return 0; } LOG("Successfully loaded and activated data set."); return 1; } JNIEXPORT int JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_destroyTrackerData(JNIEnv *, jobject) { LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_destroyTrackerData"); // Get the image tracker: QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance(); QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>( trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER)); if (imageTracker == NULL) { LOG("Failed to destroy the tracking data set because the ImageTracker has not" " been initialized."); return 0; } if (dataSetStonesAndChips != 0) { if (imageTracker->getActiveDataSet() == dataSetStonesAndChips && !imageTracker->deactivateDataSet(dataSetStonesAndChips)) { LOG("Failed to destroy the tracking data set StonesAndChips because the data set " "could not be deactivated."); return 0; } if (!imageTracker->destroyDataSet(dataSetStonesAndChips)) { LOG("Failed to destroy the tracking data set StonesAndChips."); return 0; } LOG("Successfully destroyed the data set StonesAndChips."); dataSetStonesAndChips = 0; } if (dataSetTarmac != 0) { if (imageTracker->getActiveDataSet() == dataSetTarmac && !imageTracker->deactivateDataSet(dataSetTarmac)) { LOG("Failed to destroy the tracking data set Tarmac because the data set " "could not be deactivated."); return 0; } if (!imageTracker->destroyDataSet(dataSetTarmac)) { LOG("Failed to destroy the tracking data set Tarmac."); return 0; } LOG("Successfully destroyed the data set Tarmac."); dataSetTarmac = 0; } return 1; } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_onQCARInitializedNative(JNIEnv *, jobject) { // Register the update callback where we handle the data set swap: QCAR::registerCallback(&updateCallback); // Comment in to enable tracking of up to 2 targets simultaneously and // split the work over multiple frames: QCAR::setHint(QCAR::HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 2); QCAR::setHint(QCAR::HINT_IMAGE_TARGET_MULTI_FRAME_ENABLED, 1); } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_renderFrame(JNIEnv *, jobject) { // Clear color and depth buffer glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); // Render video background: QCAR::State state = QCAR::Renderer::getInstance().begin(); #ifdef USE_OPENGL_ES_1_1 // Set GL11 flags: glEnableClientState(GL_VERTEX_ARRAY); glDisableClientState(GL_NORMAL_ARRAY); glDisableClientState(GL_TEXTURE_COORD_ARRAY); glDisable(GL_LIGHTING); #endif glDisable(GL_TEXTURE_2D); glEnable(GL_DEPTH_TEST); glEnable(GL_CULL_FACE); QCAR::Matrix44F mainModelViewMatrix; QCAR::Vec3F targetCenters[2]; // make this big enough to hold all your targets // Did we find any trackables this frame? for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: const QCAR::TrackableResult* result = state.getTrackableResult(tIdx); const QCAR::Trackable& trackable = result->getTrackable(); QCAR::Matrix44F modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(result->getPose()); if (tIdx == 0) { // Make the first visible target our world center (0, 0, 0) // Store its modelViewMatrix and continue looking for other targets mainModelViewMatrix = modelViewMatrix; targetCenters[0].data[0] = 0.0f; targetCenters[0].data[1] = 0.0f; targetCenters[0].data[2] = 0.0f; } else { // This is another visible target // Find its center point in relation to the first target // To do this we use the matrix inverse function (SampleMath.h from the Dominoes project) QCAR::Matrix44F mainModelViewInverse = SampleMath::Matrix44FInverse(mainModelViewMatrix); QCAR::Matrix44F modelViewTranspose = SampleMath::Matrix44FTranspose(modelViewMatrix); // let's work with row-major matrices QCAR::Matrix44F offsetMatrix = QCAR::Tool::multiply(mainModelViewInverse, modelViewTranspose); // Transform a point on the second target by this offset matrix // (0, 0, 0) is the local center of the target QCAR::Vec4F position(0.0f, 0.0f, 0.0f, 1.0f); position = SampleMath::Vec4FTransform(position, offsetMatrix); // Add this position to our array targetCenters[1].data[0] = position.data[0]; targetCenters[1].data[1] = position.data[1]; targetCenters[1].data[2] = position.data[2]; } } if (state.getNumTrackableResults() > 1) { #ifdef USE_OPENGL_ES_1_1 // Load projection matrix: glMatrixMode(GL_PROJECTION); glLoadMatrixf(projectionMatrix.data); // Load model view matrix: glMatrixMode(GL_MODELVIEW); glLoadMatrixf(mainModelViewMatrix.data); // Set the color to red: glColor4f(1.0f, 0.0f, 0.0f, 1.0f); // Draw object: glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &targetCenters[0].data[0]); glDrawArrays(GL_LINES, 0, 2); #else QCAR::Matrix44F modelViewProjection; SampleUtils::multiplyMatrix(&projectionMatrix.data[0], &mainModelViewMatrix.data[0], &modelViewProjection.data[0]); glUseProgram(shaderProgramID); glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, (const GLvoid*) &targetCenters[0].data[0]); glEnableVertexAttribArray(vertexHandle); glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*) &modelViewProjection.data[0]); glDrawArrays(GL_LINES, 0, 2); #endif } glDisable(GL_DEPTH_TEST); #ifdef USE_OPENGL_ES_1_1 glDisable(GL_TEXTURE_2D); glDisableClientState(GL_VERTEX_ARRAY); glDisableClientState(GL_NORMAL_ARRAY); glDisableClientState(GL_TEXTURE_COORD_ARRAY); #else glEnable(GL_TEXTURE_2D); glDisableVertexAttribArray(vertexHandle); glDisableVertexAttribArray(normalHandle); glDisableVertexAttribArray(textureCoordHandle); #endif QCAR::Renderer::getInstance().end(); } void configureVideoBackground() { // Get the default video mode: QCAR::CameraDevice& cameraDevice = QCAR::CameraDevice::getInstance(); QCAR::VideoMode videoMode = cameraDevice. getVideoMode(QCAR::CameraDevice::MODE_DEFAULT); // Configure the video background QCAR::VideoBackgroundConfig config; config.mEnabled = true; config.mSynchronous = true; config.mPosition.data[0] = 0.0f; config.mPosition.data[1] = 0.0f; if (isActivityInPortraitMode) { //LOG("configureVideoBackground PORTRAIT"); config.mSize.data[0] = videoMode.mHeight * (screenHeight / (float)videoMode.mWidth); config.mSize.data[1] = screenHeight; if(config.mSize.data[0] < screenWidth) { LOG("Correcting rendering background size to handle missmatch between screen and video aspect ratios."); config.mSize.data[0] = screenWidth; config.mSize.data[1] = screenWidth * (videoMode.mWidth / (float)videoMode.mHeight); } } else { //LOG("configureVideoBackground LANDSCAPE"); config.mSize.data[0] = screenWidth; config.mSize.data[1] = videoMode.mHeight * (screenWidth / (float)videoMode.mWidth); if(config.mSize.data[1] < screenHeight) { LOG("Correcting rendering background size to handle missmatch between screen and video aspect ratios."); config.mSize.data[0] = screenHeight * (videoMode.mWidth / (float)videoMode.mHeight); config.mSize.data[1] = screenHeight; } } LOG("Configure Video Background : Video (%d,%d), Screen (%d,%d), mSize (%d,%d)", videoMode.mWidth, videoMode.mHeight, screenWidth, screenHeight, config.mSize.data[0], config.mSize.data[1]); // Set the config: QCAR::Renderer::getInstance().setVideoBackgroundConfig(config); } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative( JNIEnv* env, jobject obj, jint width, jint height) { LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative"); // Store screen dimensions screenWidth = width; screenHeight = height; // Handle to the activity class: jclass activityClass = env->GetObjectClass(obj); jmethodID getTextureCountMethodID = env->GetMethodID(activityClass, "getTextureCount", "()I"); if (getTextureCountMethodID == 0) { LOG("Function getTextureCount() not found."); return; } textureCount = env->CallIntMethod(obj, getTextureCountMethodID); if (!textureCount) { LOG("getTextureCount() returned zero."); return; } textures = new Texture*[textureCount]; jmethodID getTextureMethodID = env->GetMethodID(activityClass, "getTexture", "(I)Lcom/qualcomm/QCARSamples/ImageTargets/Texture;"); if (getTextureMethodID == 0) { LOG("Function getTexture() not found."); return; } // Register the textures for (int i = 0; i < textureCount; ++i) { jobject textureObject = env->CallObjectMethod(obj, getTextureMethodID, i); if (textureObject == NULL) { LOG("GetTexture() returned zero pointer"); return; } textures[i] = Texture::create(env, textureObject); } LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative finished"); } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitApplicationNative( JNIEnv* env, jobject obj) { LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitApplicationNative"); // Release texture resources if (textures != 0) { for (int i = 0; i < textureCount; ++i) { delete textures[i]; textures[i] = NULL; } delete[]textures; textures = NULL; textureCount = 0; } } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_startCamera(JNIEnv *, jobject) { LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_startCamera"); // Select the camera to open, set this to QCAR::CameraDevice::CAMERA_FRONT // to activate the front camera instead. QCAR::CameraDevice::CAMERA camera = QCAR::CameraDevice::CAMERA_DEFAULT; // Initialize the camera: if (!QCAR::CameraDevice::getInstance().init(camera)) return; // Configure the video background configureVideoBackground(); // Select the default mode: if (!QCAR::CameraDevice::getInstance().selectVideoMode( QCAR::CameraDevice::MODE_DEFAULT)) return; // Start the camera: if (!QCAR::CameraDevice::getInstance().start()) return; // Uncomment to enable flash //if(QCAR::CameraDevice::getInstance().setFlashTorchMode(true)) // LOG("IMAGE TARGETS : enabled torch"); // Uncomment to enable infinity focus mode, or any other supported focus mode // See CameraDevice.h for supported focus modes //if(QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_INFINITY)) // LOG("IMAGE TARGETS : enabled infinity focus"); // Start the tracker: QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance(); QCAR::Tracker* imageTracker = trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER); if(imageTracker != 0) imageTracker->start(); } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_stopCamera(JNIEnv *, jobject) { LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_stopCamera"); // Stop the tracker: QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance(); QCAR::Tracker* imageTracker = trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER); if(imageTracker != 0) imageTracker->stop(); QCAR::CameraDevice::getInstance().stop(); QCAR::CameraDevice::getInstance().deinit(); } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setProjectionMatrix(JNIEnv *, jobject) { LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setProjectionMatrix"); // Cache the projection matrix: const QCAR::CameraCalibration& cameraCalibration = QCAR::CameraDevice::getInstance().getCameraCalibration(); projectionMatrix = QCAR::Tool::getProjectionGL(cameraCalibration, 2.0f, 2500.0f); } // ---------------------------------------------------------------------------- // Activates Camera Flash // ---------------------------------------------------------------------------- JNIEXPORT jboolean JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_activateFlash(JNIEnv*, jobject, jboolean flash) { return QCAR::CameraDevice::getInstance().setFlashTorchMode((flash==JNI_TRUE)) ? JNI_TRUE : JNI_FALSE; } JNIEXPORT jboolean JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_autofocus(JNIEnv*, jobject) { return QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_TRIGGERAUTO) ? JNI_TRUE : JNI_FALSE; } JNIEXPORT jboolean JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setFocusMode(JNIEnv*, jobject, jint mode) { int qcarFocusMode; switch ((int)mode) { case 0: qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_NORMAL; break; case 1: qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_CONTINUOUSAUTO; break; case 2: qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_INFINITY; break; case 3: qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_MACRO; break; default: return JNI_FALSE; } return QCAR::CameraDevice::getInstance().setFocusMode(qcarFocusMode) ? JNI_TRUE : JNI_FALSE; } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_initRendering( JNIEnv* env, jobject obj) { LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_initRendering"); // Define clear color glClearColor(0.0f, 0.0f, 0.0f, QCAR::requiresAlpha() ? 0.0f : 1.0f); // Now generate the OpenGL texture objects and add settings for (int i = 0; i < textureCount; ++i) { glGenTextures(1, &(textures[i]->mTextureID)); glBindTexture(GL_TEXTURE_2D, textures[i]->mTextureID); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textures[i]->mWidth, textures[i]->mHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE, (GLvoid*) textures[i]->mData); } #ifndef USE_OPENGL_ES_1_1 shaderProgramID = SampleUtils::createProgramFromBuffer(cubeMeshVertexShader, cubeFragmentShader); vertexHandle = glGetAttribLocation(shaderProgramID, "vertexPosition"); normalHandle = glGetAttribLocation(shaderProgramID, "vertexNormal"); textureCoordHandle = glGetAttribLocation(shaderProgramID, "vertexTexCoord"); mvpMatrixHandle = glGetUniformLocation(shaderProgramID, "modelViewProjectionMatrix"); texSampler2DHandle = glGetUniformLocation(shaderProgramID, "texSampler2D"); #endif } JNIEXPORT void JNICALL Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_updateRendering( JNIEnv* env, jobject obj, jint width, jint height) { LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_updateRendering"); // Update screen dimensions screenWidth = width; screenHeight = height; // Reconfigure the video background configureVideoBackground(); } #ifdef __cplusplus } #endif