I am integrating image targets with cloud recognition sample in android and application working properly, i mean i see the teapot on targets that stored locally, and after pressing start scan button, it recognizes the target that is stored in cloud but nothing rendered, and then application freezes and shows some error in log
I am also sending my image targets sample, can you check it please, am i missing something?
Thanks
/*==============================================================================
Copyright (c) 2010-2013 QUALCOMM Austria Research Center GmbH.
All Rights Reserved.
Qualcomm Confidential and Proprietary
@file
ImageTargets.cpp
@brief
Sample for ImageTargets
==============================================================================*/
#include <jni.h>
#include <android/log.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>
#include "SampleMath.cpp"
#include "Tomcat.h"
#include "Teapot.h"
#include <math.h>
#ifdef USE_OPENGL_ES_1_1
#include <GLES/gl.h>
#include <GLES/glext.h>
#else
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#endif
#include <QCAR/QCAR.h>
#include <QCAR/CameraDevice.h>
#include <QCAR/Renderer.h>
#include <QCAR/VideoBackgroundConfig.h>
#include <QCAR/Trackable.h>
#include <QCAR/TrackableResult.h>
#include <QCAR/Tool.h>
#include <QCAR/Tracker.h>
#include <QCAR/TrackerManager.h>
#include <QCAR/ImageTracker.h>
#include <QCAR/CameraCalibration.h>
#include <QCAR/UpdateCallback.h>
#include <QCAR/DataSet.h>
#include "SampleUtils.h"
#include "Texture.h"
#include "CubeShaders.h"
#include <QCAR/ImageTargetResult.h>
#include <QCAR/ImageTarget.h>
#include <QCAR/TargetFinder.h>
#include <QCAR/TargetSearchResult.h>
#include <QCAR/TrackableSource.h>
#include <QCAR/Image.h>
#include <sys/time.h>
#ifdef __cplusplus
extern "C" {
#endif
// Textures:
int textureCount = 0;
Texture** textures = 0;
#define TOTAL_ANIMATION_DURATION 10.0f //seconds
// OpenGL ES 2.0 specific:
#ifdef USE_OPENGL_ES_2_0
unsigned int shaderProgramID = 0;
GLint vertexHandle = 0;
GLint normalHandle = 0;
GLint textureCoordHandle = 0;
GLint mvpMatrixHandle = 0;
GLint texSampler2DHandle = 0;
#endif
// Screen dimensions:
unsigned int screenWidth = 0;
unsigned int screenHeight = 0;
// Indicates whether screen is in portrait (true) or landscape (false) mode
bool isActivityInPortraitMode = false;
// The projection matrix used for rendering virtual objects:
QCAR::Matrix44F projectionMatrix;
// Constants:
static const float kObjectScale = 2.f;
QCAR::DataSet* dataSetStonesAndChips = 0;
QCAR::DataSet* dataSetTarmac = 0;
bool switchDataSetAsap = false;
bool scanningMode = false;
bool showStartScanButton = false;
static const size_t CONTENT_MAX = 256;
char lastTargetId[CONTENT_MAX];
static const char* kAccessKey = "b7f1a5db757132fd97f45050ea7fc568ffd2a0e7";
static const char* kSecretKey = "d51fdfaae3df6487e3951ece5d00affb1e59d92a";
QCAR::Vec3F targetCumulatedDisplacement(0.0f, 0.0f, 0.0f);
void initStateVariables() {
lastTargetId[0] = '\0';
scanningMode = true;
}
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initCloudReco(
JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initCloudReco");
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
assert(imageTracker != NULL);
//Get the TargetFinder:
QCAR::TargetFinder* targetFinder = imageTracker->getTargetFinder();
assert(targetFinder != NULL);
// Start initialization:
if (targetFinder->startInit(kAccessKey, kSecretKey))
{
targetFinder->waitUntilInitFinished();
}
int resultCode = targetFinder->getInitState();
if ( resultCode != QCAR::TargetFinder::INIT_SUCCESS)
{
LOG("Failed to initialize target finder.");
return resultCode;
}
return resultCode;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_printString(
JNIEnv * env, jobject, jstring myJavaString)
{
const char *cstr = env->GetStringUTFChars( myJavaString, 0 );
LOG("Hello, this is my string %s", cstr);
env->ReleaseStringUTFChars(myJavaString, cstr);
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_printInteger(JNIEnv * , jobject, jint value)
{
LOG("Failed to initialize target finder. %d", value);
}
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitCloudReco(
JNIEnv *, jobject)
{
// Get the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
if (imageTracker == NULL)
{
LOG("Failed to deinit CloudReco as the ImageTracker was not initialized.");
return 0;
}
// Deinitialize Cloud Reco:
QCAR::TargetFinder* finder = imageTracker->getTargetFinder();
finder->deinit();
return 1;
}
// Use the following calls if you would like to customize the color of the UI
// targetFinder->setUIScanlineColor(1.0, 0.0, 0.0);
// targetFinder->setUIPointColor(0.0, 0.0, 1.0);
// Object to receive update callbacks from QCAR SDK
class ImageTargets_UpdateCallback: public QCAR::UpdateCallback {
virtual void QCAR_onUpdate(QCAR::State& state) { //NEW code for Cloud Reco
QCAR::TrackerManager& trackerManager =
QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker =
static_cast<QCAR::ImageTracker*>(trackerManager.getTracker(
QCAR::Tracker::IMAGE_TRACKER));
// Get the target finder:
QCAR::TargetFinder* targetFinder = imageTracker->getTargetFinder();
// Check if there are new results available:
const int statusCode = targetFinder->updateSearchResults();
if (statusCode < 0) {
char errorMessage[80];
sprintf(errorMessage, "Error with status code %d at frame %d",
statusCode, state.getFrame().getTimeStamp());
} else if (statusCode == QCAR::TargetFinder::UPDATE_RESULTS_AVAILABLE) {
// Process new search results
if (targetFinder->getResultCount() > 0) {
const QCAR::TargetSearchResult* result =
targetFinder->getResult(0);
// Check if this target is suitable for tracking:
if (result->getTrackingRating() > 0) {
// Create a new Trackable from the result:
QCAR::Trackable* newTrackable =
targetFinder->enableTracking(*result);
if (newTrackable != 0) {
LOG(
"Successfully created new trackable '%s' with rating '%d'.", newTrackable->getName(), result->getTrackingRating());
// Checks if the targets has changed
LOG(
"Comparing Strings. currentTargetId: %s lastTargetId: %s", result->getUniqueTargetId(), lastTargetId);
if (strcmp(result->getUniqueTargetId(), lastTargetId)
!= 0) {
LOG(
"Recognized target name is %d", result->getUniqueTargetId());
//snprintf(targetMetadata, CONTENT_MAX, "%s", result->getMetaData());
// If the target has changed...
// app-specific: do something
// (e.g. generate new 3D model or texture)
}
strcpy(lastTargetId, result->getUniqueTargetId());
// // Stop Cloud Reco scanning
targetFinder->stop();
//
scanningMode = false;
//
showStartScanButton = true;
}
}
}
}
}
};
ImageTargets_UpdateCallback updateCallback;
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_getOpenGlEsVersionNative(JNIEnv *, jobject)
{
#ifdef USE_OPENGL_ES_1_1
return 1;
#else
return 2;
#endif
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setActivityPortraitMode(JNIEnv *, jobject, jboolean isPortrait)
{
isActivityInPortraitMode = isPortrait;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_switchDatasetAsap(JNIEnv *, jobject)
{
switchDataSetAsap = true;
}
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initTracker(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initTracker");
// Initialize the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::Tracker* tracker = trackerManager.initTracker(QCAR::Tracker::IMAGE_TRACKER);
if (tracker == NULL)
{
LOG("Failed to initialize ImageTracker.");
return 0;
}
LOG("Successfully initialized ImageTracker.");
return 1;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitTracker(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitTracker");
// Deinit the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
trackerManager.deinitTracker(QCAR::Tracker::IMAGE_TRACKER);
}
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_loadTrackerData(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_loadTrackerData");
// Get the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
if (imageTracker == NULL)
{
LOG("Failed to load tracking data set because the ImageTracker has not"
" been initialized.");
return 0;
}
// Create the data sets:
dataSetStonesAndChips = imageTracker->createDataSet();
if (dataSetStonesAndChips == 0)
{
LOG("Failed to create a new tracking data.");
return 0;
}
dataSetTarmac = imageTracker->createDataSet();
if (dataSetTarmac == 0)
{
LOG("Failed to create a new tracking data.");
return 0;
}
// Load the data sets:
if (!dataSetStonesAndChips->load("StonesAndChips.xml", QCAR::DataSet::STORAGE_APPRESOURCE))
{
LOG("Failed to load data set.");
return 0;
}
if (!dataSetTarmac->load("Tarmac.xml", QCAR::DataSet::STORAGE_APPRESOURCE))
{
LOG("Failed to load data set.");
return 0;
}
// Activate the data set:
if (!imageTracker->activateDataSet(dataSetStonesAndChips))
{
LOG("Failed to activate data set.");
return 0;
}
LOG("Successfully loaded and activated data set.");
return 1;
}
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_destroyTrackerData(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_destroyTrackerData");
// Get the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
if (imageTracker == NULL)
{
LOG("Failed to destroy the tracking data set because the ImageTracker has not"
" been initialized.");
return 0;
}
if (dataSetStonesAndChips != 0)
{
if (imageTracker->getActiveDataSet() == dataSetStonesAndChips &&
!imageTracker->deactivateDataSet(dataSetStonesAndChips))
{
LOG("Failed to destroy the tracking data set StonesAndChips because the data set "
"could not be deactivated.");
return 0;
}
if (!imageTracker->destroyDataSet(dataSetStonesAndChips))
{
LOG("Failed to destroy the tracking data set StonesAndChips.");
return 0;
}
LOG("Successfully destroyed the data set StonesAndChips.");
dataSetStonesAndChips = 0;
}
if (dataSetTarmac != 0)
{
if (imageTracker->getActiveDataSet() == dataSetTarmac &&
!imageTracker->deactivateDataSet(dataSetTarmac))
{
LOG("Failed to destroy the tracking data set Tarmac because the data set "
"could not be deactivated.");
return 0;
}
if (!imageTracker->destroyDataSet(dataSetTarmac))
{
LOG("Failed to destroy the tracking data set Tarmac.");
return 0;
}
LOG("Successfully destroyed the data set Tarmac.");
dataSetTarmac = 0;
}
return 1;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_onQCARInitializedNative(JNIEnv *, jobject)
{
// Register the update callback where we handle the data set swap:
QCAR::registerCallback(&updateCallback);
// Comment in to enable tracking of up to 2 targets simultaneously and
// split the work over multiple frames:
QCAR::setHint(QCAR::HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 3);
}
int lastTrackableId = -1;
float animationTime = 0;
//
//void computeTargetTranslationFromScreenVector(float screenDeltaX,
// float screenDeltaY, QCAR::Matrix44F & modelViewMatrix,
// QCAR::Vec3F & result) {
// QCAR::Vec3F screenAlignedDisplacement;
// screenAlignedDisplacement.data[0] = screenDeltaX;
// screenAlignedDisplacement.data[1] = screenDeltaY;
// screenAlignedDisplacement.data[2] = 0.0f;
//
// // Compute matrix to pass from Eye Coordinates to Object Coordinates
// QCAR::Matrix44F inverseModelViewMatrix = SampleMath::Matrix44FInverse(
// modelViewMatrix);
//
// // Convert the screen-aligned displacement vector to Object Coordinates
// // (i.e. in the target local reference frame)
// QCAR::Vec3F localTargetDisplacement = SampleMath::Vec3FTransformNormal(
// screenAlignedDisplacement, inverseModelViewMatrix);
//
// // Compute a speed factor based on the distance of the target from the camera
// float distanceObjectToCamera = fabs(modelViewMatrix.data[14]);
// float speed = 0.001f * distanceObjectToCamera; // TODO adjust value to your needs
//
// // set the result taking the speed factor into account
// result.data[0] = speed * localTargetDisplacement.data[0];
// result.data[1] = speed * localTargetDisplacement.data[1];
// result.data[2] = speed * localTargetDisplacement.data[2];
//}
unsigned long getCurrentTimeMilliseconds() {
struct timeval tv;
gettimeofday(&tv, NULL);
unsigned long s = tv.tv_sec * 1000;
unsigned long us = tv.tv_usec / 1000;
return s + us;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_renderFrame(JNIEnv * env, jobject obj)
{
//LOG("Java_com_qualcomm_QCARSamples_ImageTargets_GLRenderer_renderFrame");
//New code for Cloud Reco
if (showStartScanButton)
{
jclass javaClass = env->GetObjectClass(obj);
jmethodID method = env->GetMethodID(javaClass,"showStartScanButton", "()V");
env->CallVoidMethod(obj, method);
showStartScanButton = false;
}
// Clear color and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Get the state from QCAR and mark the beginning of a rendering section
QCAR::State state = QCAR::Renderer::getInstance().begin();
// Explicitly render the Video Background
QCAR::Renderer::getInstance().drawVideoBackground();
#ifdef USE_OPENGL_ES_1_1
// Set GL11 flags:
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnable(GL_TEXTURE_2D);
glDisable(GL_LIGHTING);
#endif
glEnable(GL_DEPTH_TEST);
// We must detect if background reflection is active and adjust the culling direction.
// If the reflection is active, this means the post matrix has been reflected as well,
// therefore standard counter clockwise face culling will result in "inside out" models.
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON)
glFrontFace(GL_CW);//Front camera
else
glFrontFace(GL_CCW);//Back camera
// Did we find any trackables this frame?
static float lastTime = 0.001f * getCurrentTimeMilliseconds();
// if no targets => reset animation time and lastTrackableId
if (state.getNumTrackableResults() == 0)
{
lastTime = 0.001f * getCurrentTimeMilliseconds();
animationTime = 0;
lastTrackableId = -1;
}
for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
{
// Get the trackable:
const QCAR::TrackableResult* result = state.getTrackableResult(tIdx);
const QCAR::Trackable& trackable = result->getTrackable();
//////////////////////////////////////////////////
// NEW code for animation
if (trackable.getId() != lastTrackableId)
{
// reset animation time
animationTime = 0;
// Update lastTrackableId
lastTrackableId = trackable.getId();
}
float currentTime = 0.001f * getCurrentTimeMilliseconds();
float elapsedTime = currentTime - lastTime;
animationTime += elapsedTime;
lastTime = currentTime;
// clamp to total duration
// if (animationTime > TOTAL_ANIMATION_DURATION)
// {
// animationTime = TOTAL_ANIMATION_DURATION;
// }
// /////////////////////////////////////////////////
if (animationTime > TOTAL_ANIMATION_DURATION)
{
animationTime = 0;
}
QCAR::Matrix44F modelViewMatrix =
QCAR::Tool::convertPose2GLMatrix(result->getPose());
/////////////////////////
if (trackable.getId() != lastTrackableId) {
jstring js = env->NewStringUTF(trackable.getName());
jclass javaClass = env->GetObjectClass(obj);
jmethodID method = env->GetMethodID(javaClass,"displayMessage", "(Ljava/lang/String;)V");
env->CallVoidMethod(obj, method, js);
lastTrackableId = trackable.getId();
}
// Choose the texture based on the target name:
// int textureIndex;
// if (strcmp(trackable.getName(), "chips") == 0)
// {
// textureIndex = 0;
// }
// else if (strcmp(trackable.getName(), "stones") == 0)
// {
// textureIndex = 1;
// }
// else
// {
// textureIndex = 2;
// }
//
// const Texture* const thisTexture = textures[textureIndex];
int textureIndex;
const float* thisVertices;
const float* thisTexCoords;
const float* thisNormals;
const unsigned short* thisIndices;
long NUM_TEAPOT_OBJECT_INDEXC;
if (!strcmp(trackable.getName(), "stones"))
{
textureIndex = 0;
thisVertices = teapotVertices;
thisTexCoords = teapotTexCoords;
thisNormals = teapotNormals;
thisIndices = teapotIndices;
NUM_TEAPOT_OBJECT_INDEXC = NUM_TEAPOT_OBJECT_INDEX;
}
else
if (!strcmp(trackable.getName(), "chips"))
{
textureIndex = 1;
thisVertices = teapotVerticesA;
thisTexCoords = teapotTexCoordsA;
thisNormals = teapotNormalsA;
thisIndices = teapotIndicesA;
NUM_TEAPOT_OBJECT_INDEXC = NUM_TEAPOT_OBJECT_INDEXA;
}
else
{
textureIndex = 2;
}
const Texture* const thisTexture = textures[textureIndex];
#ifdef USE_OPENGL_ES_1_1
// Load projection matrix:
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(projectionMatrix.data);
// Load model view matrix:
glMatrixMode(GL_MODELVIEW);
glLoadMatrixf(modelViewMatrix.data);
glTranslatef(0.f, 0.f, kObjectScale);
glScalef(kObjectScale, kObjectScale, kObjectScale);
// Draw object:
glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &thisTexCoords[0]);
glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &thisVertices[0]);
glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &thisNormals[0]);
glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEXC, GL_UNSIGNED_SHORT,
(const GLvoid*) &thisIndices[0]);
/////////////////////////////////////////////////////////////////////////////////////
// glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
// glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &teapotTexCoordsA[0]);
// glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &teapotVerticesA[0]);
// glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &teapotNormalsA[0]);
// glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEXA, GL_UNSIGNED_SHORT,
// (const GLvoid*) &teapotIndicesA[0]);
//
//
#else
QCAR::Matrix44F modelViewProjection;
SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale,
&modelViewMatrix.data[0]);
//////////////////////////////////////////////////////
// NEW code for animation
// Perform rotation here based on animationTime:
float totalRotationAngle = 90.0f;// use your custom value here
//float currentRotationAngle = totalRotationAngle * (animationTime / TOTAL_ANIMATION_DURATION);
float currentRotationAngle = 360.0f * (animationTime / TOTAL_ANIMATION_DURATION);
SampleUtils::rotatePoseMatrix(currentRotationAngle, 0.f, 0.f, 1.f,
&modelViewMatrix.data[0]);
////////////////////////////////////////////////////
SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale,
&modelViewMatrix.data[0]);
SampleUtils::multiplyMatrix(&projectionMatrix.data[0],
&modelViewMatrix.data[0] ,
&modelViewProjection.data[0]);
glUseProgram(shaderProgramID);
///////////////////////////////////////////////////////////////// From this to /////////////////////////
// glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0,
// (const GLvoid*) &teapotVertices[0]);
// glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0,
// (const GLvoid*) &teapotNormals[0]);
// glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0,
// (const GLvoid*) &teapotTexCoords[0]);
//
// glEnableVertexAttribArray(vertexHandle);
// glEnableVertexAttribArray(normalHandle);
// glEnableVertexAttribArray(textureCoordHandle);
//
// glActiveTexture(GL_TEXTURE0);
// glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
// glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);
// glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE,
// (GLfloat*)&modelViewProjection.data[0] );
//
//
////////////////////////////////////////////////////until this line///////////////////////////////
//
// glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT,
// (const GLvoid*) &teapotIndices[0]);
//
/////////////////////////////////////////For draw diffent models starts here//////////////////////////
glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &thisVertices[0]);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &thisNormals[0]);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &thisTexCoords[0]);
glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE,
(GLfloat*)&modelViewProjection.data[0] );
glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEXC, GL_UNSIGNED_SHORT,
(const GLvoid*) &thisIndices[0]);
SampleUtils::checkGlError("ImageTargets renderFrame");
#endif
}
glDisable(GL_DEPTH_TEST);
#ifdef USE_OPENGL_ES_1_1
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
#else
glDisableVertexAttribArray(vertexHandle);
glDisableVertexAttribArray(normalHandle);
glDisableVertexAttribArray(textureCoordHandle);
#endif
QCAR::Renderer::getInstance().end();
// for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
// {
// // Get the target:
// const QCAR::TrackableResult* result = state.getTrackableResult( tIdx );
// const QCAR::Trackable& trackable = result->getTrackable();
//
// // Compare this target's id to a globally stored id
// // If this is a new target, find the displayMessage java method and
// // call it with the target's name
//
// if (trackable.getId() != lastTrackableId) {
// jstring js = env->NewStringUTF(trackable.getName());
// jclass javaClass = env->GetObjectClass(obj);
// jmethodID method = env->GetMethodID(javaClass,"displayMessage", "(Ljava/lang/String;)V");
// env->CallVoidMethod(obj, method, js);
// lastTrackableId = trackable.getId();
// }
//
//
//
// }
}
void configureVideoBackground() {
// Get the default video mode:
QCAR::CameraDevice& cameraDevice = QCAR::CameraDevice::getInstance();
QCAR::VideoMode videoMode = cameraDevice.getVideoMode(
QCAR::CameraDevice::MODE_DEFAULT);
// Configure the video background
QCAR::VideoBackgroundConfig config;
config.mEnabled = true;
config.mSynchronous = true;
config.mPosition.data[0] = 0.0f;
config.mPosition.data[1] = 0.0f;
if (isActivityInPortraitMode) {
//LOG("configureVideoBackground PORTRAIT");
config.mSize.data[0] = videoMode.mHeight
* (screenHeight / (float) videoMode.mWidth);
config.mSize.data[1] = screenHeight;
if (config.mSize.data[0] < screenWidth) {
LOG(
"Correcting rendering background size to handle missmatch between screen and video aspect ratios.");
config.mSize.data[0] = screenWidth;
config.mSize.data[1] = screenWidth
* (videoMode.mWidth / (float) videoMode.mHeight);
}
} else {
//LOG("configureVideoBackground LANDSCAPE");
config.mSize.data[0] = screenWidth;
config.mSize.data[1] = videoMode.mHeight
* (screenWidth / (float) videoMode.mWidth);
if (config.mSize.data[1] < screenHeight) {
LOG(
"Correcting rendering background size to handle missmatch between screen and video aspect ratios.");
config.mSize.data[0] = screenHeight
* (videoMode.mWidth / (float) videoMode.mHeight);
config.mSize.data[1] = screenHeight;
}
}
LOG(
"Configure Video Background : Video (%d,%d), Screen (%d,%d), mSize (%d,%d)", videoMode.mWidth, videoMode.mHeight, screenWidth, screenHeight, config.mSize.data[0], config.mSize.data[1]);
// Set the config:
QCAR::Renderer::getInstance().setVideoBackgroundConfig(config);
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative(
JNIEnv* env, jobject obj, jint width, jint height)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative");
// Store screen dimensions
screenWidth = width;
screenHeight = height;
// Handle to the activity class:
jclass activityClass = env->GetObjectClass(obj);
jmethodID getTextureCountMethodID = env->GetMethodID(activityClass,
"getTextureCount", "()I");
if (getTextureCountMethodID == 0)
{
LOG("Function getTextureCount() not found.");
return;
}
textureCount = env->CallIntMethod(obj, getTextureCountMethodID);
if (!textureCount)
{
LOG("getTextureCount() returned zero.");
return;
}
textures = new Texture*[textureCount];
jmethodID getTextureMethodID = env->GetMethodID(activityClass,
"getTexture", "(I)Lcom/qualcomm/QCARSamples/ImageTargets/Texture;");
if (getTextureMethodID == 0)
{
LOG("Function getTexture() not found.");
return;
}
// Register the textures
for (int i = 0; i < textureCount; ++i)
{
jobject textureObject = env->CallObjectMethod(obj, getTextureMethodID, i);
if (textureObject == NULL)
{
LOG("GetTexture() returned zero pointer");
return;
}
textures[i] = Texture::create(env, textureObject);
}
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative finished");
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitApplicationNative(
JNIEnv* env, jobject obj)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitApplicationNative");
// Release texture resources
if (textures != 0)
{
for (int i = 0; i < textureCount; ++i)
{
delete textures[i];
textures[i] = NULL;
}
delete[]textures;
textures = NULL;
textureCount = 0;
}
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_startCamera(JNIEnv *,
jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_startCamera");
// Select the camera to open, set this to QCAR::CameraDevice::CAMERA_FRONT
// to activate the front camera instead.
QCAR::CameraDevice::CAMERA camera = QCAR::CameraDevice::CAMERA_DEFAULT;
// Initialize the camera:
if (!QCAR::CameraDevice::getInstance().init(camera))
return;
// Configure the video background
configureVideoBackground();
// Select the default mode:
if (!QCAR::CameraDevice::getInstance().selectVideoMode(
QCAR::CameraDevice::MODE_DEFAULT))
return;
// Start the camera:
if (!QCAR::CameraDevice::getInstance().start())
return;
// Uncomment to enable flash
//if(QCAR::CameraDevice::getInstance().setFlashTorchMode(true))
// LOG("IMAGE TARGETS : enabled torch");
// Uncomment to enable infinity focus mode, or any other supported focus mode
// See CameraDevice.h for supported focus modes
//if(QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_INFINITY))
// LOG("IMAGE TARGETS : enabled infinity focus");
// Start the tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>( trackerManager.getTracker( QCAR::Tracker::IMAGE_TRACKER ) );
if(imageTracker != 0)
imageTracker->start();
if (scanningMode)
{
QCAR::TargetFinder* targetFinder = imageTracker->getTargetFinder();
assert (targetFinder != 0);
targetFinder->startRecognition();
}
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_stopCamera(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_stopCamera");
// Stop the tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>( trackerManager.getTracker( QCAR::Tracker::IMAGE_TRACKER ) );
if(imageTracker != 0)
imageTracker->stop();
QCAR::TargetFinder* targetFinder = imageTracker->getTargetFinder();
assert (targetFinder != 0);
targetFinder->stop();
// Clears the trackables
targetFinder->clearTrackables();
QCAR::CameraDevice::getInstance().stop();
QCAR::CameraDevice::getInstance().deinit();
initStateVariables();
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_enterScanningModeNative(
JNIEnv*, jobject)
{
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
assert(imageTracker != 0);
QCAR::TargetFinder* targetFinder = imageTracker->getTargetFinder();
assert (targetFinder != 0);
// Start Cloud Reco
targetFinder->startRecognition();
// Clear all trackables created previously:
targetFinder->clearTrackables();
scanningMode = true;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setProjectionMatrix(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setProjectionMatrix");
// Cache the projection matrix:
const QCAR::CameraCalibration& cameraCalibration =
QCAR::CameraDevice::getInstance().getCameraCalibration();
projectionMatrix = QCAR::Tool::getProjectionGL(cameraCalibration, 2.0f, 2500.0f);
}
// ----------------------------------------------------------------------------
// Activates Camera Flash
// ----------------------------------------------------------------------------
JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_activateFlash(JNIEnv*, jobject, jboolean flash)
{
return QCAR::CameraDevice::getInstance().setFlashTorchMode((flash==JNI_TRUE)) ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_autofocus(JNIEnv*, jobject)
{
return QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_TRIGGERAUTO) ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setFocusMode(JNIEnv*, jobject, jint mode)
{
int qcarFocusMode;
switch ((int)mode)
{
case 0:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_NORMAL;
break;
case 1:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_CONTINUOUSAUTO;
break;
case 2:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_INFINITY;
break;
case 3:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_MACRO;
break;
default:
return JNI_FALSE;
}
return QCAR::CameraDevice::getInstance().setFocusMode(qcarFocusMode) ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_initRendering(
JNIEnv* env, jobject obj)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_initRendering");
// Define clear color
glClearColor(0.0f, 0.0f, 0.0f, QCAR::requiresAlpha() ? 0.0f : 1.0f);
// Now generate the OpenGL texture objects and add settings
for (int i = 0; i < textureCount; ++i)
{
glGenTextures(1, &(textures[i]->mTextureID));
glBindTexture(GL_TEXTURE_2D, textures[i]->mTextureID);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textures[i]->mWidth,
textures[i]->mHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
(GLvoid*) textures[i]->mData);
}
#ifndef USE_OPENGL_ES_1_1
shaderProgramID = SampleUtils::createProgramFromBuffer(cubeMeshVertexShader,
cubeFragmentShader);
vertexHandle = glGetAttribLocation(shaderProgramID,
"vertexPosition");
normalHandle = glGetAttribLocation(shaderProgramID,
"vertexNormal");
textureCoordHandle = glGetAttribLocation(shaderProgramID,
"vertexTexCoord");
mvpMatrixHandle = glGetUniformLocation(shaderProgramID,
"modelViewProjectionMatrix");
texSampler2DHandle = glGetUniformLocation(shaderProgramID,
"texSampler2D");
#endif
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_updateRendering(
JNIEnv* env, jobject obj, jint width, jint height)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_updateRendering");
// Update screen dimensions
screenWidth = width;
screenHeight = height;
// Reconfigure the video background
configureVideoBackground();
}
#ifdef __cplusplus
}
#endif
Seems, this link is not avail now.