Log in or register to post comments

Image targets via cloud

June 25, 2013 - 11:06pm #1

 I am integrating image targets with cloud recognition   sample in android  and application working properly, i mean   i see the teapot on  targets that stored locally, and after pressing start scan button, it recognizes the target that is stored in cloud but nothing rendered, and  then application  freezes and shows some error in log

I am also sending my image targets sample, can you check  it please, am i missing something? 

 

Thanks

 

/*==============================================================================
 Copyright (c) 2010-2013 QUALCOMM Austria Research Center GmbH.
 All Rights Reserved.
 Qualcomm Confidential and Proprietary
 
 @file
 ImageTargets.cpp
 
 @brief
 Sample for ImageTargets
 
 ==============================================================================*/
 
#include <jni.h>
#include <android/log.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>
#include "SampleMath.cpp"
#include "Tomcat.h"
#include "Teapot.h"
 
#include <math.h>
 
#ifdef USE_OPENGL_ES_1_1
#include <GLES/gl.h>
#include <GLES/glext.h>
#else
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#endif
 
#include <QCAR/QCAR.h>
#include <QCAR/CameraDevice.h>
#include <QCAR/Renderer.h>
#include <QCAR/VideoBackgroundConfig.h>
#include <QCAR/Trackable.h>
#include <QCAR/TrackableResult.h>
 
#include <QCAR/Tool.h>
#include <QCAR/Tracker.h>
#include <QCAR/TrackerManager.h>
#include <QCAR/ImageTracker.h>
#include <QCAR/CameraCalibration.h>
#include <QCAR/UpdateCallback.h>
#include <QCAR/DataSet.h>
 
#include "SampleUtils.h"
#include "Texture.h"
#include "CubeShaders.h"
 
#include <QCAR/ImageTargetResult.h>
#include <QCAR/ImageTarget.h>
#include <QCAR/TargetFinder.h>
#include <QCAR/TargetSearchResult.h>
#include <QCAR/TrackableSource.h>
#include <QCAR/Image.h>
#include <sys/time.h>
 
#ifdef __cplusplus
extern "C" {
#endif
 
// Textures:
int textureCount = 0;
Texture** textures = 0;
#define TOTAL_ANIMATION_DURATION  10.0f //seconds
// OpenGL ES 2.0 specific:
#ifdef USE_OPENGL_ES_2_0
unsigned int shaderProgramID = 0;
GLint vertexHandle = 0;
GLint normalHandle = 0;
GLint textureCoordHandle = 0;
GLint mvpMatrixHandle = 0;
GLint texSampler2DHandle = 0;
#endif
 
// Screen dimensions:
unsigned int screenWidth = 0;
unsigned int screenHeight = 0;
 
// Indicates whether screen is in portrait (true) or landscape (false) mode
bool isActivityInPortraitMode = false;
 
// The projection matrix used for rendering virtual objects:
QCAR::Matrix44F projectionMatrix;
 
// Constants:
static const float kObjectScale = 2.f;
 
QCAR::DataSet* dataSetStonesAndChips = 0;
QCAR::DataSet* dataSetTarmac = 0;
 
bool switchDataSetAsap = false;
 
bool scanningMode = false;
bool showStartScanButton = false;
static const size_t CONTENT_MAX = 256;
char lastTargetId[CONTENT_MAX];
 
static const char* kAccessKey = "b7f1a5db757132fd97f45050ea7fc568ffd2a0e7";
static const char* kSecretKey = "d51fdfaae3df6487e3951ece5d00affb1e59d92a";
 
QCAR::Vec3F targetCumulatedDisplacement(0.0f, 0.0f, 0.0f);
 
void initStateVariables() {
lastTargetId[0] = '\0';
scanningMode = true;
}
 
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initCloudReco(
JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initCloudReco");
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
 
assert(imageTracker != NULL);
 
//Get the TargetFinder:
QCAR::TargetFinder* targetFinder = imageTracker->getTargetFinder();
assert(targetFinder != NULL);
 
// Start initialization:
if (targetFinder->startInit(kAccessKey, kSecretKey))
{
targetFinder->waitUntilInitFinished();
}
 
int resultCode = targetFinder->getInitState();
if ( resultCode != QCAR::TargetFinder::INIT_SUCCESS)
{
 
LOG("Failed to initialize target finder.");
return resultCode;
 
}
 
return resultCode;
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_printString(
JNIEnv * env, jobject, jstring myJavaString)
{
const char *cstr = env->GetStringUTFChars( myJavaString, 0 );
LOG("Hello, this is my string  %s", cstr);
env->ReleaseStringUTFChars(myJavaString, cstr);
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_printInteger(JNIEnv * , jobject, jint value)
{
LOG("Failed to initialize target finder. %d", value);
}
 
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitCloudReco(
JNIEnv *, jobject)
{
 
// Get the image tracker:
 
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
 
if (imageTracker == NULL)
{
LOG("Failed to deinit CloudReco as the ImageTracker was not initialized.");
return 0;
}
 
// Deinitialize Cloud Reco:
QCAR::TargetFinder* finder = imageTracker->getTargetFinder();
finder->deinit();
 
return 1;
}
 
// Use the following calls if you would like to customize the color of the UI
// targetFinder->setUIScanlineColor(1.0, 0.0, 0.0);
// targetFinder->setUIPointColor(0.0, 0.0, 1.0);
 
// Object to receive update callbacks from QCAR SDK
 
class ImageTargets_UpdateCallback: public QCAR::UpdateCallback {
 
virtual void QCAR_onUpdate(QCAR::State& state) { //NEW code for Cloud Reco
QCAR::TrackerManager& trackerManager =
QCAR::TrackerManager::getInstance();
 
QCAR::ImageTracker* imageTracker =
static_cast<QCAR::ImageTracker*>(trackerManager.getTracker(
QCAR::Tracker::IMAGE_TRACKER));
 
// Get the target finder:
QCAR::TargetFinder* targetFinder = imageTracker->getTargetFinder();
 
// Check if there are new results available:
const int statusCode = targetFinder->updateSearchResults();
 
if (statusCode < 0) {
char errorMessage[80];
 
sprintf(errorMessage, "Error with status code %d at frame %d",
statusCode, state.getFrame().getTimeStamp());
} else if (statusCode == QCAR::TargetFinder::UPDATE_RESULTS_AVAILABLE) {
// Process new search results
 
if (targetFinder->getResultCount() > 0) {
 
const QCAR::TargetSearchResult* result =
targetFinder->getResult(0);
 
// Check if this target is suitable for tracking:
if (result->getTrackingRating() > 0) {
// Create a new Trackable from the result:
 
QCAR::Trackable* newTrackable =
targetFinder->enableTracking(*result);
 
if (newTrackable != 0) {
LOG(
"Successfully created new trackable '%s' with rating '%d'.", newTrackable->getName(), result->getTrackingRating());
 
// Checks if the targets has changed
LOG(
"Comparing Strings. currentTargetId: %s  lastTargetId: %s", result->getUniqueTargetId(), lastTargetId);
 
if (strcmp(result->getUniqueTargetId(), lastTargetId)
!= 0) {
 
LOG(
"Recognized target name is %d", result->getUniqueTargetId());
//snprintf(targetMetadata, CONTENT_MAX, "%s", result->getMetaData());
 
// If the target has changed...
// app-specific: do something
// (e.g. generate new 3D model or texture)
 
}
 
strcpy(lastTargetId, result->getUniqueTargetId());
// // Stop Cloud Reco scanning
targetFinder->stop();
//
scanningMode = false;
//
showStartScanButton = true;
}
}
}
}
}
};
 
ImageTargets_UpdateCallback updateCallback;
 
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_getOpenGlEsVersionNative(JNIEnv *, jobject)
{
#ifdef USE_OPENGL_ES_1_1        
return 1;
#else
return 2;
#endif
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setActivityPortraitMode(JNIEnv *, jobject, jboolean isPortrait)
{
isActivityInPortraitMode = isPortrait;
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_switchDatasetAsap(JNIEnv *, jobject)
{
switchDataSetAsap = true;
}
 
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initTracker(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initTracker");
 
// Initialize the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::Tracker* tracker = trackerManager.initTracker(QCAR::Tracker::IMAGE_TRACKER);
if (tracker == NULL)
{
LOG("Failed to initialize ImageTracker.");
return 0;
}
 
LOG("Successfully initialized ImageTracker.");
return 1;
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitTracker(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitTracker");
 
// Deinit the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
trackerManager.deinitTracker(QCAR::Tracker::IMAGE_TRACKER);
}
 
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_loadTrackerData(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_loadTrackerData");
 
// Get the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
if (imageTracker == NULL)
{
LOG("Failed to load tracking data set because the ImageTracker has not"
" been initialized.");
return 0;
}
 
// Create the data sets:
dataSetStonesAndChips = imageTracker->createDataSet();
if (dataSetStonesAndChips == 0)
{
LOG("Failed to create a new tracking data.");
return 0;
}
 
dataSetTarmac = imageTracker->createDataSet();
if (dataSetTarmac == 0)
{
LOG("Failed to create a new tracking data.");
return 0;
}
 
// Load the data sets:
if (!dataSetStonesAndChips->load("StonesAndChips.xml", QCAR::DataSet::STORAGE_APPRESOURCE))
{
LOG("Failed to load data set.");
return 0;
}
 
if (!dataSetTarmac->load("Tarmac.xml", QCAR::DataSet::STORAGE_APPRESOURCE))
{
LOG("Failed to load data set.");
return 0;
}
 
// Activate the data set:
if (!imageTracker->activateDataSet(dataSetStonesAndChips))
{
LOG("Failed to activate data set.");
return 0;
}
 
LOG("Successfully loaded and activated data set.");
return 1;
}
 
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_destroyTrackerData(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_destroyTrackerData");
 
// Get the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
if (imageTracker == NULL)
{
LOG("Failed to destroy the tracking data set because the ImageTracker has not"
" been initialized.");
return 0;
}
 
if (dataSetStonesAndChips != 0)
{
if (imageTracker->getActiveDataSet() == dataSetStonesAndChips &&
!imageTracker->deactivateDataSet(dataSetStonesAndChips))
{
LOG("Failed to destroy the tracking data set StonesAndChips because the data set "
"could not be deactivated.");
return 0;
}
 
if (!imageTracker->destroyDataSet(dataSetStonesAndChips))
{
LOG("Failed to destroy the tracking data set StonesAndChips.");
return 0;
}
 
LOG("Successfully destroyed the data set StonesAndChips.");
dataSetStonesAndChips = 0;
}
 
if (dataSetTarmac != 0)
{
if (imageTracker->getActiveDataSet() == dataSetTarmac &&
!imageTracker->deactivateDataSet(dataSetTarmac))
{
LOG("Failed to destroy the tracking data set Tarmac because the data set "
"could not be deactivated.");
return 0;
}
 
if (!imageTracker->destroyDataSet(dataSetTarmac))
{
LOG("Failed to destroy the tracking data set Tarmac.");
return 0;
}
 
LOG("Successfully destroyed the data set Tarmac.");
dataSetTarmac = 0;
}
 
return 1;
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_onQCARInitializedNative(JNIEnv *, jobject)
{
// Register the update callback where we handle the data set swap:
QCAR::registerCallback(&updateCallback);
 
// Comment in to enable tracking of up to 2 targets simultaneously and
// split the work over multiple frames:
QCAR::setHint(QCAR::HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 3);
}
int lastTrackableId = -1;
float animationTime = 0;
 
//
//void computeTargetTranslationFromScreenVector(float screenDeltaX,
// float screenDeltaY, QCAR::Matrix44F & modelViewMatrix,
// QCAR::Vec3F & result) {
// QCAR::Vec3F screenAlignedDisplacement;
// screenAlignedDisplacement.data[0] = screenDeltaX;
// screenAlignedDisplacement.data[1] = screenDeltaY;
// screenAlignedDisplacement.data[2] = 0.0f;
//
// // Compute matrix to pass from Eye Coordinates to Object Coordinates
// QCAR::Matrix44F inverseModelViewMatrix = SampleMath::Matrix44FInverse(
// modelViewMatrix);
//
// // Convert the screen-aligned displacement vector to Object Coordinates
// // (i.e. in the target local reference frame)
// QCAR::Vec3F localTargetDisplacement = SampleMath::Vec3FTransformNormal(
// screenAlignedDisplacement, inverseModelViewMatrix);
//
// // Compute a speed factor based on the distance of the target from the camera
// float distanceObjectToCamera = fabs(modelViewMatrix.data[14]);
// float speed = 0.001f * distanceObjectToCamera; // TODO adjust value to your needs
//
// // set the result taking the speed factor into account
// result.data[0] = speed * localTargetDisplacement.data[0];
// result.data[1] = speed * localTargetDisplacement.data[1];
// result.data[2] = speed * localTargetDisplacement.data[2];
//}
 
unsigned long getCurrentTimeMilliseconds() {
struct timeval tv;
gettimeofday(&tv, NULL);
unsigned long s = tv.tv_sec * 1000;
unsigned long us = tv.tv_usec / 1000;
return s + us;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_renderFrame(JNIEnv * env, jobject obj)
{
//LOG("Java_com_qualcomm_QCARSamples_ImageTargets_GLRenderer_renderFrame");
 
//New code for Cloud Reco
if (showStartScanButton)
{
 
jclass javaClass = env->GetObjectClass(obj);
 
jmethodID method = env->GetMethodID(javaClass,"showStartScanButton", "()V");
 
env->CallVoidMethod(obj, method);
 
showStartScanButton = false;
}
 
// Clear color and depth buffer
 
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
 
// Get the state from QCAR and mark the beginning of a rendering section
QCAR::State state = QCAR::Renderer::getInstance().begin();
 
// Explicitly render the Video Background
QCAR::Renderer::getInstance().drawVideoBackground();
 
#ifdef USE_OPENGL_ES_1_1
// Set GL11 flags:
 
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
glEnable(GL_TEXTURE_2D);
glDisable(GL_LIGHTING);
 
#endif
 
glEnable(GL_DEPTH_TEST);
// We must detect if background reflection is active and adjust the culling direction.
// If the reflection is active, this means the post matrix has been reflected as well,
// therefore standard counter clockwise face culling will result in "inside out" models.
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
 
if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON)
glFrontFace(GL_CW);//Front camera
else
glFrontFace(GL_CCW);//Back camera
 
// Did we find any trackables this frame?
 
static float lastTime = 0.001f * getCurrentTimeMilliseconds();
 
// if no targets => reset animation time and lastTrackableId
if (state.getNumTrackableResults() == 0)
{
lastTime = 0.001f * getCurrentTimeMilliseconds();
animationTime = 0;
lastTrackableId = -1;
}
 
for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
{
// Get the trackable:
const QCAR::TrackableResult* result = state.getTrackableResult(tIdx);
const QCAR::Trackable& trackable = result->getTrackable();
 
//////////////////////////////////////////////////
// NEW code for animation
if (trackable.getId() != lastTrackableId)
{
// reset animation time
animationTime = 0;
 
// Update lastTrackableId
lastTrackableId = trackable.getId();
}
 
float currentTime = 0.001f * getCurrentTimeMilliseconds();
 
float elapsedTime = currentTime - lastTime;
 
animationTime += elapsedTime;
 
lastTime = currentTime;
 
// clamp to total duration
 
// if (animationTime > TOTAL_ANIMATION_DURATION)
// {
//    animationTime = TOTAL_ANIMATION_DURATION;
// }
// /////////////////////////////////////////////////
 
if (animationTime > TOTAL_ANIMATION_DURATION)
{
animationTime = 0;
}
 
QCAR::Matrix44F modelViewMatrix =
QCAR::Tool::convertPose2GLMatrix(result->getPose());
 
/////////////////////////
if (trackable.getId() != lastTrackableId) {
jstring js = env->NewStringUTF(trackable.getName());
jclass javaClass = env->GetObjectClass(obj);
jmethodID method = env->GetMethodID(javaClass,"displayMessage", "(Ljava/lang/String;)V");
env->CallVoidMethod(obj, method, js);
lastTrackableId = trackable.getId();
}
 
// Choose the texture based on the target name:
// int textureIndex;
// if (strcmp(trackable.getName(), "chips") == 0)
// {
// textureIndex = 0;
// }
// else if (strcmp(trackable.getName(), "stones") == 0)
// {
// textureIndex = 1;
// }
// else
// {
// textureIndex = 2;
// }
//
// const Texture* const thisTexture = textures[textureIndex];
 
int textureIndex;
const float* thisVertices;
const float* thisTexCoords;
const float* thisNormals;
const unsigned short* thisIndices;
long NUM_TEAPOT_OBJECT_INDEXC;
 
if (!strcmp(trackable.getName(), "stones"))
{
textureIndex = 0;
thisVertices = teapotVertices;
thisTexCoords = teapotTexCoords;
thisNormals = teapotNormals;
thisIndices = teapotIndices;
NUM_TEAPOT_OBJECT_INDEXC = NUM_TEAPOT_OBJECT_INDEX;
}
else
 
if (!strcmp(trackable.getName(), "chips"))
{
textureIndex = 1;
thisVertices = teapotVerticesA;
thisTexCoords = teapotTexCoordsA;
thisNormals = teapotNormalsA;
thisIndices = teapotIndicesA;
NUM_TEAPOT_OBJECT_INDEXC = NUM_TEAPOT_OBJECT_INDEXA;
}
 
else
{
textureIndex = 2;
}
const Texture* const thisTexture = textures[textureIndex];
 
#ifdef USE_OPENGL_ES_1_1
// Load projection matrix:
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(projectionMatrix.data);
 
// Load model view matrix:
glMatrixMode(GL_MODELVIEW);
glLoadMatrixf(modelViewMatrix.data);
glTranslatef(0.f, 0.f, kObjectScale);
glScalef(kObjectScale, kObjectScale, kObjectScale);
 
// Draw object:
 
glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &thisTexCoords[0]);
glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &thisVertices[0]);
glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &thisNormals[0]);
glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEXC, GL_UNSIGNED_SHORT,
(const GLvoid*) &thisIndices[0]);
/////////////////////////////////////////////////////////////////////////////////////
 
// glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
// glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &teapotTexCoordsA[0]);
// glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &teapotVerticesA[0]);
// glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &teapotNormalsA[0]);
// glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEXA, GL_UNSIGNED_SHORT,
// (const GLvoid*) &teapotIndicesA[0]);
//
//
#else
 
QCAR::Matrix44F modelViewProjection;
 
SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale,
&modelViewMatrix.data[0]);
 
//////////////////////////////////////////////////////
// NEW code for animation
// Perform rotation here based on animationTime:
float totalRotationAngle = 90.0f;// use your custom value here
 
//float currentRotationAngle = totalRotationAngle * (animationTime / TOTAL_ANIMATION_DURATION);
float currentRotationAngle = 360.0f * (animationTime / TOTAL_ANIMATION_DURATION);
 
SampleUtils::rotatePoseMatrix(currentRotationAngle, 0.f, 0.f, 1.f,
&modelViewMatrix.data[0]);
////////////////////////////////////////////////////
 
SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale,
&modelViewMatrix.data[0]);
 
SampleUtils::multiplyMatrix(&projectionMatrix.data[0],
&modelViewMatrix.data[0] ,
&modelViewProjection.data[0]);
 
glUseProgram(shaderProgramID);
 
///////////////////////////////////////////////////////////////// From this to /////////////////////////
 
// glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0,
// (const GLvoid*) &teapotVertices[0]);
// glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0,
// (const GLvoid*) &teapotNormals[0]);
// glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0,
// (const GLvoid*) &teapotTexCoords[0]);
//
// glEnableVertexAttribArray(vertexHandle);
// glEnableVertexAttribArray(normalHandle);
// glEnableVertexAttribArray(textureCoordHandle);
//
// glActiveTexture(GL_TEXTURE0);
// glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
// glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);
// glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE,
// (GLfloat*)&modelViewProjection.data[0] );
//
//
 
////////////////////////////////////////////////////until this line///////////////////////////////
//
// glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT,
// (const GLvoid*) &teapotIndices[0]);
//
 
/////////////////////////////////////////For draw diffent models starts here//////////////////////////
 
glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &thisVertices[0]);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &thisNormals[0]);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &thisTexCoords[0]);
 
glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);
 
glActiveTexture(GL_TEXTURE0);
 
glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);
 
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE,
(GLfloat*)&modelViewProjection.data[0] );
glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEXC, GL_UNSIGNED_SHORT,
(const GLvoid*) &thisIndices[0]);
 
SampleUtils::checkGlError("ImageTargets renderFrame");
#endif
 
}
 
glDisable(GL_DEPTH_TEST);
 
#ifdef USE_OPENGL_ES_1_1
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
#else
glDisableVertexAttribArray(vertexHandle);
glDisableVertexAttribArray(normalHandle);
glDisableVertexAttribArray(textureCoordHandle);
#endif
 
QCAR::Renderer::getInstance().end();
 
//    for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
//    {
//        // Get the target:
//        const QCAR::TrackableResult* result = state.getTrackableResult( tIdx );
//        const QCAR::Trackable& trackable = result->getTrackable();
//
//        // Compare this target's id to a globally stored id
//        // If this is a new target, find the displayMessage java method and
//        // call it with the target's name
//
//        if (trackable.getId() != lastTrackableId) {
//            jstring js = env->NewStringUTF(trackable.getName());
//            jclass javaClass = env->GetObjectClass(obj);
//            jmethodID method = env->GetMethodID(javaClass,"displayMessage", "(Ljava/lang/String;)V");
//            env->CallVoidMethod(obj, method, js);
//            lastTrackableId = trackable.getId();
//        }
//
//
//
//    }
}
 
void configureVideoBackground() {
// Get the default video mode:
QCAR::CameraDevice& cameraDevice = QCAR::CameraDevice::getInstance();
QCAR::VideoMode videoMode = cameraDevice.getVideoMode(
QCAR::CameraDevice::MODE_DEFAULT);
// Configure the video background
QCAR::VideoBackgroundConfig config;
config.mEnabled = true;
config.mSynchronous = true;
config.mPosition.data[0] = 0.0f;
config.mPosition.data[1] = 0.0f;
if (isActivityInPortraitMode) {
//LOG("configureVideoBackground PORTRAIT");
config.mSize.data[0] = videoMode.mHeight
* (screenHeight / (float) videoMode.mWidth);
config.mSize.data[1] = screenHeight;
if (config.mSize.data[0] < screenWidth) {
LOG(
"Correcting rendering background size to handle missmatch between screen and video aspect ratios.");
config.mSize.data[0] = screenWidth;
config.mSize.data[1] = screenWidth
* (videoMode.mWidth / (float) videoMode.mHeight);
}
} else {
//LOG("configureVideoBackground LANDSCAPE");
config.mSize.data[0] = screenWidth;
config.mSize.data[1] = videoMode.mHeight
* (screenWidth / (float) videoMode.mWidth);
if (config.mSize.data[1] < screenHeight) {
LOG(
"Correcting rendering background size to handle missmatch between screen and video aspect ratios.");
config.mSize.data[0] = screenHeight
* (videoMode.mWidth / (float) videoMode.mHeight);
config.mSize.data[1] = screenHeight;
}
}
 
LOG(
"Configure Video Background : Video (%d,%d), Screen (%d,%d), mSize (%d,%d)", videoMode.mWidth, videoMode.mHeight, screenWidth, screenHeight, config.mSize.data[0], config.mSize.data[1]);
// Set the config:
QCAR::Renderer::getInstance().setVideoBackgroundConfig(config);
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative(
JNIEnv* env, jobject obj, jint width, jint height)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative");
 
// Store screen dimensions
screenWidth = width;
screenHeight = height;
 
// Handle to the activity class:
jclass activityClass = env->GetObjectClass(obj);
 
jmethodID getTextureCountMethodID = env->GetMethodID(activityClass,
"getTextureCount", "()I");
if (getTextureCountMethodID == 0)
{
LOG("Function getTextureCount() not found.");
return;
}
textureCount = env->CallIntMethod(obj, getTextureCountMethodID);
if (!textureCount)
{
LOG("getTextureCount() returned zero.");
return;
}
 
textures = new Texture*[textureCount];
jmethodID getTextureMethodID = env->GetMethodID(activityClass,
"getTexture", "(I)Lcom/qualcomm/QCARSamples/ImageTargets/Texture;");
 
if (getTextureMethodID == 0)
{
LOG("Function getTexture() not found.");
return;
}
 
// Register the textures
for (int i = 0; i < textureCount; ++i)
{
 
jobject textureObject = env->CallObjectMethod(obj, getTextureMethodID, i);
if (textureObject == NULL)
{
LOG("GetTexture() returned zero pointer");
return;
}
 
textures[i] = Texture::create(env, textureObject);
}
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative finished");
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitApplicationNative(
JNIEnv* env, jobject obj)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitApplicationNative");
 
// Release texture resources
if (textures != 0)
{
for (int i = 0; i < textureCount; ++i)
{
delete textures[i];
textures[i] = NULL;
}
 
delete[]textures;
textures = NULL;
 
textureCount = 0;
}
}
 
JNIEXPORT void JNICALL
 
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_startCamera(JNIEnv *,
jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_startCamera");
 
// Select the camera to open, set this to QCAR::CameraDevice::CAMERA_FRONT
// to activate the front camera instead.
QCAR::CameraDevice::CAMERA camera = QCAR::CameraDevice::CAMERA_DEFAULT;
 
// Initialize the camera:
if (!QCAR::CameraDevice::getInstance().init(camera))
return;
 
// Configure the video background
configureVideoBackground();
 
// Select the default mode:
if (!QCAR::CameraDevice::getInstance().selectVideoMode(
QCAR::CameraDevice::MODE_DEFAULT))
return;
 
// Start the camera:
if (!QCAR::CameraDevice::getInstance().start())
return;
 
// Uncomment to enable flash
//if(QCAR::CameraDevice::getInstance().setFlashTorchMode(true))
// LOG("IMAGE TARGETS : enabled torch");
 
// Uncomment to enable infinity focus mode, or any other supported focus mode
// See CameraDevice.h for supported focus modes
//if(QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_INFINITY))
// LOG("IMAGE TARGETS : enabled infinity focus");
 
// Start the tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
 
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>( trackerManager.getTracker( QCAR::Tracker::IMAGE_TRACKER ) );
 
if(imageTracker != 0)
imageTracker->start();
 
if (scanningMode)
{
QCAR::TargetFinder* targetFinder = imageTracker->getTargetFinder();
assert (targetFinder != 0);
 
targetFinder->startRecognition();
}
 
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_stopCamera(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_stopCamera");
 
// Stop the tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>( trackerManager.getTracker( QCAR::Tracker::IMAGE_TRACKER ) );
 
if(imageTracker != 0)
imageTracker->stop();
 
QCAR::TargetFinder* targetFinder = imageTracker->getTargetFinder();
assert (targetFinder != 0);
 
targetFinder->stop();
 
// Clears the trackables
targetFinder->clearTrackables();
 
QCAR::CameraDevice::getInstance().stop();
QCAR::CameraDevice::getInstance().deinit();
 
initStateVariables();
 
}
 
JNIEXPORT void JNICALL
 
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_enterScanningModeNative(
JNIEnv*, jobject)
{
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
 
assert(imageTracker != 0);
 
QCAR::TargetFinder* targetFinder = imageTracker->getTargetFinder();
assert (targetFinder != 0);
 
// Start Cloud Reco
targetFinder->startRecognition();
 
// Clear all trackables created previously:
targetFinder->clearTrackables();
 
scanningMode = true;
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setProjectionMatrix(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setProjectionMatrix");
 
// Cache the projection matrix:
const QCAR::CameraCalibration& cameraCalibration =
QCAR::CameraDevice::getInstance().getCameraCalibration();
 
projectionMatrix = QCAR::Tool::getProjectionGL(cameraCalibration, 2.0f, 2500.0f);
}
 
// ----------------------------------------------------------------------------
// Activates Camera Flash
// ----------------------------------------------------------------------------
JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_activateFlash(JNIEnv*, jobject, jboolean flash)
{
return QCAR::CameraDevice::getInstance().setFlashTorchMode((flash==JNI_TRUE)) ? JNI_TRUE : JNI_FALSE;
}
 
JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_autofocus(JNIEnv*, jobject)
{
return QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_TRIGGERAUTO) ? JNI_TRUE : JNI_FALSE;
}
 
JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setFocusMode(JNIEnv*, jobject, jint mode)
{
int qcarFocusMode;
 
switch ((int)mode)
{
case 0:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_NORMAL;
break;
 
case 1:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_CONTINUOUSAUTO;
break;
 
case 2:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_INFINITY;
break;
 
case 3:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_MACRO;
break;
 
default:
return JNI_FALSE;
}
 
return QCAR::CameraDevice::getInstance().setFocusMode(qcarFocusMode) ? JNI_TRUE : JNI_FALSE;
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_initRendering(
JNIEnv* env, jobject obj)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_initRendering");
 
// Define clear color
glClearColor(0.0f, 0.0f, 0.0f, QCAR::requiresAlpha() ? 0.0f : 1.0f);
 
// Now generate the OpenGL texture objects and add settings
for (int i = 0; i < textureCount; ++i)
{
glGenTextures(1, &(textures[i]->mTextureID));
glBindTexture(GL_TEXTURE_2D, textures[i]->mTextureID);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textures[i]->mWidth,
textures[i]->mHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
(GLvoid*) textures[i]->mData);
}
#ifndef USE_OPENGL_ES_1_1
 
shaderProgramID = SampleUtils::createProgramFromBuffer(cubeMeshVertexShader,
cubeFragmentShader);
 
vertexHandle = glGetAttribLocation(shaderProgramID,
"vertexPosition");
normalHandle = glGetAttribLocation(shaderProgramID,
"vertexNormal");
textureCoordHandle = glGetAttribLocation(shaderProgramID,
"vertexTexCoord");
mvpMatrixHandle = glGetUniformLocation(shaderProgramID,
"modelViewProjectionMatrix");
texSampler2DHandle = glGetUniformLocation(shaderProgramID,
"texSampler2D");
 
#endif
 
}
 
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_updateRendering(
JNIEnv* env, jobject obj, jint width, jint height)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_updateRendering");
 
// Update screen dimensions
screenWidth = width;
screenHeight = height;
 
// Reconfigure the video background
configureVideoBackground();
}
 
#ifdef __cplusplus
}
#endif
 

Image targets via cloud

February 21, 2016 - 11:16pm #29

Seems, this link is not avail now. 

Image targets via cloud

August 7, 2013 - 7:41am #28

Hello Al,

Thanks, it worked...

Binu Paul

Image targets via cloud

July 11, 2013 - 12:51am #27

Hi, this small tutorial could provide some useful info;

https://developer.vuforia.com/forum/faq/android-how-integrate-cloudreco-imagetargets-sample

It explains how to integrate CloudReco on top of ImageTargets, with steps and code snippets; it also explains in good detail how to enter and exit the scanning mode, which should help anser your last question.

Image targets via cloud

July 10, 2013 - 10:48pm #26

Hello Al,

I have fixed the issues with the media player. I am entering into the code cleaning phase now.

 

One issue I noted is that if the image is changed to a new one, the scan mode is not activated automatically. How can I fix that? Any reference docs?

Thanks,

Binu Paul

Image targets via cloud

July 8, 2013 - 11:55pm #25

Hello,

I am getting some errors when the video is played first time. I think it is due to some code/setting that was present for the normal video player code. The video is getting played from the cloud the second time.

Please see the log file attached. How to fix this error.

 

---

    public void createProductTexture(String data)
    {    
    DebugLog.LOGI("Cloud response obtained is: " +  data);
   
    try {
   
    String url  =  data;
   
    if (mRenderer != null) {
   
     for (int i = 0; i < NUM_TARGETS; i++) {
   
     mMovieName[i] = url;
   
         if (!mReturningFromFullScreen) {
       
         if (mMovieName[i].length() > 0) {
       
         mRenderer.requestLoad(i, mMovieName[i], mSeekPosition[i], false);
       
         }
             
         } else {
       
         if (mMovieName[i].length() > 0) {
       
         mRenderer.requestLoad(i, mMovieName[i], mSeekPosition[i], mWasPlaying[i]);
       
         }
             
         }
         
      }
     
  }
   
    } catch(Exception e) {
   
    DebugLog.LOGI("IOException captured while playing from cloud... " +  e.toString());
   
    }
    }

---

 

Thanks,

Binu Paul

AttachmentSize
Plain text icon log.txt12.5 KB

Image targets via cloud

July 8, 2013 - 7:27am #24

Hello Al,

I was able to play the video the video from the server after the target is detected using cloud.

But, I am having some errors with the media player and old code. I am working on it and I think a code clean up will fix them.

Thanks,

Binu Paul

Image targets via cloud

July 7, 2013 - 7:43pm #23

Hi,

Can I use the same concept applied in the onResume() method to load the video from the url that is returned by the cloud service?

The customizations applicable to my case are:

1. NUM_TARGETS will be 1 always.

2. mMovieName will be having only one video.

3. There will not be a video to load until the cloud response is obtained. There will be nothing to load in the case of initApplicationAR() method.

4. Can I use the following code for my cloud response method to load the video?

---

// Reload all the movies
  if (mRenderer != null)
  {
      for (int i = 0; i < NUM_TARGETS; i++)
      {
          if (!mReturningFromFullScreen)
          {
              mRenderer.requestLoad(
                  i, mMovieName[i], mSeekPosition[i], false);
          }
          else
          {
              mRenderer.requestLoad(
                i, mMovieName[i], mSeekPosition[i], mWasPlaying[i]);
          }
       }
   }
 
---
 
5. I am using the mMediaPlayer.setDataSource(url); for loading the video from the cloud response url.
 
Will my concept work?
 
Thanks,
Binu Paul

Image targets via cloud

July 7, 2013 - 1:52pm #22

You may want to have a look at the "Sample Specific Notes" section of the VideoPlayback sample:

https://developer.vuforia.com/resources/sample-apps/video-playback-sample-app

You can find them at the bottom of the page, see "Sample Specific Notes"; this should help.

 

Image targets via cloud

July 7, 2013 - 8:12am #21

Hi,

I was able to get the video url from the server. But, how can I set them to the media player? I am not able to get this implemented correctly. Is there any code block I can refer to?

Thanks,

Binu Paul

Image targets via cloud

July 5, 2013 - 11:42am #20

Nice to see you're making good progress.

Image targets via cloud

July 5, 2013 - 9:05am #19

Hi AlessandroB,

As per your instructions, I checked and integrated the code for getting the JSON response. I was able to get the response correctly on the CPP and Java side after some initial issues. Thanks a lot.

Now, I need to set the obtained URL to the media player for playing the video dynamically.

Thanks, 

Binu Paul

 

Image targets via cloud

July 5, 2013 - 1:23am #18

Hi,

I was able to integrate the splash screen start button click to start the cloud scan. Thus I was able to get ride of the extra scan button. I am getting the JSON response from the cloud.

I will be working on the concept to get the JSON details to the Java side soon.

I also have to do a code clean up to get ride of the unwanted code segments. :)

Thanks,

Binu Paul

Image targets via cloud

July 4, 2013 - 7:55am #17

For the JSON part, have a llok at the GetBookData class defined inside CloudReco.java

 

Image targets via cloud

July 4, 2013 - 5:19am #16

Hello AlessandroB,

Thanks, your solution worked. I was able to fix the error and move forward with the integration. When depolyed, I got the start scan button at the top left corner and when it is clicked, the cloud scan was initiated.

Currently, I am also getting the normal startup screen, which should be replaced with the cloud scan. I will be working on that.

I should also find out a method to get the file name from the server using the JSON response.

Which function can be used to capture the response in CPP and Java side?

Thanks,

Binu Paul

Image targets via cloud

July 4, 2013 - 4:14am #15

The definition of R.layout.camera_overlay is automatically generated based on the content of the /res/layout subfolder (under your project directory);

this is where the file called camera_overlay.xml is located (in the ImageTargets project);

so, you basically need to copy that file to the /res/layout/ directory of your project (e.g. Videoplayback )

 

Image targets via cloud

July 4, 2013 - 3:08am #14

Hi,

I am working on it. But, when the initApplicationAR() function is modified, I am getting some errors. The line that throws the error is:

---

LayoutInflater inflater = LayoutInflater.from(this);
mUILayout = (RelativeLayout) inflater.inflate(R.layout.camera_overlay, null, false);
 
---
 
The error is: R.layout.camera_overlay cannot be resolved or is not a field.
 
When I cross checked, I found that the R.java (auto generated file) in the video playback project is not having the definition, but is present in the image targets project. Since this is an auto generated file, how can I add this definition to my video playback code?
 
Thanks,
Binu Paul
AttachmentSize
Image icon 2013-07-04_153340.png42.42 KB
Plain text icon R.java_.txt1.5 KB

Image targets via cloud

July 4, 2013 - 2:23am #13

Hi, i did not implement it fully

Image targets via cloud

July 3, 2013 - 9:15am #12

Hello,

No, I have not tried it yet. I will check that tomorrow. 

Did you implement the video + cloud app fully?

Thanks,

Binu Paul

 

Image targets via cloud

July 3, 2013 - 5:04am #11

Also did you tryed to render different models on different targets, if yes then you can do it with cloud

Image targets via cloud

July 3, 2013 - 5:02am #10

Hello Binu Paul, i think it's possible and  i am also trying to make such thing like you asked

Image targets via cloud

July 3, 2013 - 3:52am #9

Hi,

Adding the code to ' ImageTargetsRender.java' fixed my error. Thanks for pointing this out,

I need to select the 3D object based on the value returned from the cloud service request. How is that possible? Right now, it is displaying the default teapot object for all images. Is there any code reference available?

I am on ImageTargets.cpp file. The code block is:

---

if (newTrackable != 0)
                {
                    LOG("Successfully created new trackable '%s' with rating '%d'.",
 
                    newTrackable->getName(), result->getTrackingRating());
 
                    if (strcmp(result->getUniqueTargetId(), lastTargetId) != 0)
                    {
                         // If the target has changed...
                         // app-specific: do something
                         // (e.g. generate new 3D model or texture)
                     }
 
                     strcpy(lastTargetId, result->getUniqueTargetId());
 
                     // Stop Cloud Reco scanning
                     targetFinder->stop();
 
                     scanningMode = false;
                     showStartScanButton = true;
                 }

---

Thanks,

Binu Paul

Image targets via cloud

July 2, 2013 - 11:38pm #8

You are right, we can integrate Cloud support with Video playback future , and  of course we need to take care of  some special code segments, also in this forum there are many good refferences to solve this issue

Image targets via cloud

July 2, 2013 - 11:28pm #7

Hi,

I commented out that code earlier. I will check and will try now. 

Also, can we implement the cloud support for video playback the same way? Do we need to take care of any special code segments?

Thanks,

Binu Paul

Image targets via cloud

July 2, 2013 - 11:22pm #6

Hi Binu Paul,  did you declare this method inside ImageTargetsRender.java  

public void showStartScanButton()
{
    mActivity.showStartScanButton();
}
 
 

Image targets via cloud

July 2, 2013 - 10:51pm #5

Hi,

I am also working on the cloud + image target integration. I followed the setps mentioned by AlessandroB and was able to do the integration. The application is now communicating to the cloud (please see below):

---
07-03 11:14:51.205: D/AR(15836): ARHttpClient: Executing post!
07-03 11:14:53.557: I/AR(15836): Completed CloudReco transaction with ID 'f0a05f71cfe84fb397f92b07d8891e2c'
07-03 11:14:53.619: D/dalvikvm(15836): GC_CONCURRENT freed 383K, 13% free 9637K/11015K, paused 13ms+5ms, total 54ms
07-03 11:14:53.690: I/AR(15836): ImageTracker: Successfully created dataset
07-03 11:14:53.846: I/AR(15836): Successfully created ImageTarget.
07-03 11:14:53.861: I/QCAR(15836): Successfully created new trackable 'Chips' with rating '4'.
07-03 11:14:53.861: A/libc(15836): Fatal signal 11 (SIGSEGV) at 0x00000004 (code=1), thread 15854 (Thread-4133)
---
 
When my custom image from cloud is detected, the application crashes (last line) and returns to the splash screen. This is the case with the 'chips' and 'stones' images also. How to fix the error?
 
Thanks,
Binu Paul
 
 

Image targets via cloud

June 26, 2013 - 10:24pm #4

Glad to hear that!

Image targets via cloud

June 26, 2013 - 9:39pm #3

Hi AlessandroB,  it works,   thanks for your quick responses, best regards 

Image targets via cloud

June 26, 2013 - 12:06am #2

hi, we cannot check every single line of code (also, please avoid posting huge code snippets in the Forum, as this makes it difficult to read the thread).

I would suggest checking ocne again this article and trying to debug the issue:

https://developer.vuforia.com/forum/faq/android-how-integrate-cloudreco-imagetargets-sample

 

Log in or register to post comments