Log in or register to post comments

How to change the teapot into a text in vuforia native

July 17, 2012 - 5:01pm #1

anyone have a sample to change object teapot to some info text? i want to create AR if my Android detect the marker, show the info text. In every marker have a different info text

How to change the teapot into a text in vuforia native

July 23, 2012 - 12:00pm #10

Hello,

The link in my previous post should help you then. I suggest you create a texture image with the text you want. You then need to follow the instruction in the other post to create a 3D plane to render you 2D texture onto. This will create the effect of having text in 3D space.

Thank you,

-Peter

How to change the teapot into a text in vuforia native

July 19, 2012 - 7:54pm #9

i want text to track on the target

How to change the teapot into a text in vuforia native

July 19, 2012 - 10:13am #8

Hello,

There needs to be some sort of 3D plane to track onto. See this thread for more information https://ar.qualcomm.at/content/using-2d-images . Do you want the text to track on the target or just display on screen?

Thank you,

-Peter

How to change the teapot into a text in vuforia native

July 19, 2012 - 3:49am #7

im not using 3D text...

How to change the teapot into a text in vuforia native

July 18, 2012 - 6:43pm #6

Do you want to render 3D text? Does the text change?

Take a look at the link I'd posted below.

How to change the teapot into a text in vuforia native

July 17, 2012 - 7:19pm #5

anyone can help me to change it?i'm newbie in there

How to change the teapot into a text in vuforia native

July 17, 2012 - 7:16pm #4

// Textures:
int textureCount = 0;
Texture** textures = 0;

// OpenGL ES 2.0 specific:
#ifdef USE_OPENGL_ES_2_0
unsigned int shaderProgramID = 0;
GLint vertexHandle = 0;
GLint normalHandle = 0;
GLint textureCoordHandle = 0;
GLint mvpMatrixHandle = 0;
#endif

// Screen dimensions:
unsigned int screenWidth = 0;
unsigned int screenHeight = 0;

// Indicates whether screen is in portrait (true) or landscape (false) mode
bool isActivityInPortraitMode = false;

// The projection matrix used for rendering virtual objects:
QCAR::Matrix44F projectionMatrix;

// Constants:
static const float kObjectScale = 3.f;

QCAR::DataSet* dataSetStonesAndChips = 0;
QCAR::DataSet* dataSetTarmac = 0;
QCAR::DataSet* dataSetbatik = 0;

bool switchDataSetAsap = false;

// Object to receive update callbacks from QCAR SDK
class ImageTargets_UpdateCallback : public QCAR::UpdateCallback
{
virtual void QCAR_onUpdate(QCAR::State& /*state*/)
{
if (switchDataSetAsap)
{
switchDataSetAsap = false;

// Get the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<:imagetracker>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
if (imageTracker == 0 || dataSetStonesAndChips == 0 || dataSetTarmac == 0 ||
dataSetbatik == 0 || imageTracker->getActiveDataSet() == 0)
{
LOG("Failed to switch data set.");
return;
}

if (imageTracker->getActiveDataSet() == dataSetStonesAndChips)
{
imageTracker->deactivateDataSet(dataSetStonesAndChips);
imageTracker->deactivateDataSet(dataSetbatik);
imageTracker->activateDataSet(dataSetTarmac);
}
else if (imageTracker->getActiveDataSet() == dataSetbatik)
{
imageTracker->deactivateDataSet(dataSetStonesAndChips);
imageTracker->deactivateDataSet(dataSetTarmac);
imageTracker->activateDataSet(dataSetbatik);
}
else
{
imageTracker->deactivateDataSet(dataSetTarmac);
imageTracker->deactivateDataSet(dataSetbatik);
imageTracker->activateDataSet(dataSetStonesAndChips);
}
}
}
};

ImageTargets_UpdateCallback updateCallback;

JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_getOpenGlEsVersionNative(JNIEnv *, jobject)
{
#ifdef USE_OPENGL_ES_1_1
return 1;
#else
return 2;
#endif
}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setActivityPortraitMode(JNIEnv *, jobject, jboolean isPortrait)
{
isActivityInPortraitMode = isPortrait;
}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_switchDatasetAsap(JNIEnv *, jobject)
{
switchDataSetAsap = true;
}

JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initTracker(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initTracker");

// Initialize the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::Tracker* tracker = trackerManager.initTracker(QCAR::Tracker::IMAGE_TRACKER);
if (tracker == NULL)
{
LOG("Failed to initialize ImageTracker.");
return 0;
}

LOG("Successfully initialized ImageTracker.");
return 1;
}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitTracker(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitTracker");

// Deinit the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
trackerManager.deinitTracker(QCAR::Tracker::IMAGE_TRACKER);
}

JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_loadTrackerData(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_loadTrackerData");

// Get the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<:imagetracker>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
if (imageTracker == NULL)
{
LOG("Failed to load tracking data set because the ImageTracker has not"
" been initialized.");
return 0;
}

// Create the data sets:
dataSetStonesAndChips = imageTracker->createDataSet();
if (dataSetStonesAndChips == 0)
{
LOG("Failed to create a new tracking data.");
return 0;
}

dataSetTarmac = imageTracker->createDataSet();
if (dataSetTarmac == 0)
{
LOG("Failed to create a new tracking data.");
return 0;
}
dataSetbatik = imageTracker->createDataSet();
if (dataSetbatik == 0)
{
LOG("Failed to create a new tracking data.");
return 0;
}

// Load the data sets:
if (!dataSetStonesAndChips->load("StonesAndChips.xml", QCAR::DataSet::STORAGE_APPRESOURCE))
{
LOG("Failed to load data set.");
return 0;
}

if (!dataSetTarmac->load("Tarmac.xml", QCAR::DataSet::STORAGE_APPRESOURCE))
{
LOG("Failed to load data set.");
return 0;
}
if (!dataSetbatik->load("batik.xml", QCAR::DataSet::STORAGE_APPRESOURCE))
{
LOG("Failed to load data set.");
return 0;
}

// Activate the data set:
if (!imageTracker->activateDataSet(dataSetbatik))
{
LOG("Failed to activate data set.");
return 0;
}

LOG("Successfully loaded and activated data set.");
return 1;
}

JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_destroyTrackerData(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_destroyTrackerData");

// Get the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<:imagetracker>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
if (imageTracker == NULL)
{
LOG("Failed to destroy the tracking data set because the ImageTracker has not"
" been initialized.");
return 0;
}

if (dataSetStonesAndChips != 0)
{
if (imageTracker->getActiveDataSet() == dataSetStonesAndChips &&
!imageTracker->deactivateDataSet(dataSetStonesAndChips))
{
LOG("Failed to destroy the tracking data set StonesAndChips because the data set "
"could not be deactivated.");
return 0;
}

if (!imageTracker->destroyDataSet(dataSetStonesAndChips))
{
LOG("Failed to destroy the tracking data set StonesAndChips.");
return 0;
}

LOG("Successfully destroyed the data set StonesAndChips.");
dataSetStonesAndChips = 0;
}

if (dataSetTarmac != 0)
{
if (imageTracker->getActiveDataSet() == dataSetTarmac &&
!imageTracker->deactivateDataSet(dataSetTarmac))
{
LOG("Failed to destroy the tracking data set Tarmac because the data set "
"could not be deactivated.");
return 0;
}

if (!imageTracker->destroyDataSet(dataSetTarmac))
{
LOG("Failed to destroy the tracking data set Tarmac.");
return 0;
}

LOG("Successfully destroyed the data set Tarmac.");
dataSetTarmac = 0;
}

if (dataSetbatik != 0)
{
if (imageTracker->getActiveDataSet() == dataSetbatik &&
!imageTracker->deactivateDataSet(dataSetbatik))
{
LOG("Failed to destroy the tracking data set batik because the data set "
"could not be deactivated.");
return 0;
}

if (!imageTracker->destroyDataSet(dataSetbatik))
{
LOG("Failed to destroy the tracking data set batik.");
return 0;
}

LOG("Successfully destroyed the data set batik.");
dataSetbatik = 0;
}
return 1;
}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_onQCARInitializedNative(JNIEnv *, jobject)
{
// Register the update callback where we handle the data set swap:
QCAR::registerCallback(&updateCallback);

// Comment in to enable tracking of up to 2 targets simultaneously and
// split the work over multiple frames:
// QCAR::setHint(QCAR::HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 2);
// QCAR::setHint(QCAR::HINT_IMAGE_TARGET_MULTI_FRAME_ENABLED, 1);
}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_renderFrame(JNIEnv *, jobject)
{
//LOG("Java_com_qualcomm_QCARSamples_ImageTargets_GLRenderer_renderFrame");

// Clear color and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);

// Get the state from QCAR and mark the beginning of a rendering section
QCAR::State state = QCAR::Renderer::getInstance().begin();

// Explicitly render the Video Background
QCAR::Renderer::getInstance().drawVideoBackground();

#ifdef USE_OPENGL_ES_1_1
// Set GL11 flags:
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_NORMAL_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);

glEnable(GL_TEXTURE_2D);
glDisable(GL_LIGHTING);

#endif

glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);

// Did we find any trackables this frame?
for(int tIdx = 0; tIdx {
// Get the trackable:
const QCAR::Trackable* trackable = state.getActiveTrackable(tIdx);
QCAR::Matrix44F modelViewMatrix =
QCAR::Tool::convertPose2GLMatrix(trackable->getPose());

// Choose the texture based on the target name:
int textureIndex;
if (strcmp(trackable->getName(), "chips") == 0)
{
textureIndex = 0;
}
else if (strcmp(trackable->getName(), "stones") == 0)
{
textureIndex = 1;
}
else if (strcmp(trackable->getName(), "tambal") == 0)
{
textureIndex = 0;
}
else if (strcmp(trackable->getName(), "ceplok") == 0)
{
textureIndex = 1;
}
else if (strcmp(trackable->getName(), "uwiri") == 0)
{
textureIndex = 0;
}

else
{
textureIndex = 2;
}

const Texture* const thisTexture = textures[textureIndex];

#ifdef USE_OPENGL_ES_1_1
// Load projection matrix:
glMatrixMode(GL_PROJECTION);
glLoadMatrixf(projectionMatrix.data);

// Load model view matrix:
glMatrixMode(GL_MODELVIEW);
glLoadMatrixf(modelViewMatrix.data);
glTranslatef(0.f, 0.f, kObjectScale);
glScalef(kObjectScale, kObjectScale, kObjectScale);

// Draw object:
glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
glTexCoordPointer(2, GL_FLOAT, 0, (const GLvoid*) &teapotTexCoords[0]);
glVertexPointer(3, GL_FLOAT, 0, (const GLvoid*) &teapotVertices[0]);
glNormalPointer(GL_FLOAT, 0, (const GLvoid*) &teapotNormals[0]);
glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT,
(const GLvoid*) &teapotIndices[0]);
#else

QCAR::Matrix44F modelViewProjection;

SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale,
&modelViewMatrix.data[0]);
SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale,
&modelViewMatrix.data[0]);
SampleUtils::multiplyMatrix(&projectionMatrix.data[0],
&modelViewMatrix.data[0] ,
&modelViewProjection.data[0]);

glUseProgram(shaderProgramID);

glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &teapotVertices[0]);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &teapotNormals[0]);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &teapotTexCoords[0]);

glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);

glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE,
(GLfloat*)&modelViewProjection.data[0] );
glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT,
(const GLvoid*) &teapotIndices[0]);

SampleUtils::checkGlError("ImageTargets renderFrame");
#endif

}

glDisable(GL_DEPTH_TEST);

#ifdef USE_OPENGL_ES_1_1
glDisable(GL_TEXTURE_2D);
glDisableClientState(GL_VERTEX_ARRAY);
glDisableClientState(GL_NORMAL_ARRAY);
glDisableClientState(GL_TEXTURE_COORD_ARRAY);
#else
glDisableVertexAttribArray(vertexHandle);
glDisableVertexAttribArray(normalHandle);
glDisableVertexAttribArray(textureCoordHandle);
#endif

QCAR::Renderer::getInstance().end();
}

void
configureVideoBackground()
{
// Get the default video mode:
QCAR::CameraDevice& cameraDevice = QCAR::CameraDevice::getInstance();
QCAR::VideoMode videoMode = cameraDevice.
getVideoMode(QCAR::CameraDevice::MODE_DEFAULT);

// Configure the video background
QCAR::VideoBackgroundConfig config;
config.mEnabled = true;
config.mSynchronous = true;
config.mPosition.data[0] = 0.0f;
config.mPosition.data[1] = 0.0f;

if (isActivityInPortraitMode)
{
//LOG("configureVideoBackground PORTRAIT");
config.mSize.data[0] = videoMode.mHeight
* (screenHeight / (float)videoMode.mWidth);
config.mSize.data[1] = screenHeight;

if(config.mSize.data[0] {
LOG("Correcting rendering background size to handle missmatch between screen and video aspect ratios.");
config.mSize.data[0] = screenWidth;
config.mSize.data[1] = screenWidth *
(videoMode.mWidth / (float)videoMode.mHeight);
}
}
else
{
//LOG("configureVideoBackground LANDSCAPE");
config.mSize.data[0] = screenWidth;
config.mSize.data[1] = videoMode.mHeight
* (screenWidth / (float)videoMode.mWidth);

if(config.mSize.data[1] {
LOG("Correcting rendering background size to handle missmatch between screen and video aspect ratios.");
config.mSize.data[0] = screenHeight
* (videoMode.mWidth / (float)videoMode.mHeight);
config.mSize.data[1] = screenHeight;
}
}

LOG("Configure Video Background : Video (%d,%d), Screen (%d,%d), mSize (%d,%d)", videoMode.mWidth, videoMode.mHeight, screenWidth, screenHeight, config.mSize.data[0], config.mSize.data[1]);

// Set the config:
QCAR::Renderer::getInstance().setVideoBackgroundConfig(config);
}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative(
JNIEnv* env, jobject obj, jint width, jint height)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative");

// Store screen dimensions
screenWidth = width;
screenHeight = height;

// Handle to the activity class:
jclass activityClass = env->GetObjectClass(obj);

jmethodID getTextureCountMethodID = env->GetMethodID(activityClass,
"getTextureCount", "()I");
if (getTextureCountMethodID == 0)
{
LOG("Function getTextureCount() not found.");
return;
}

textureCount = env->CallIntMethod(obj, getTextureCountMethodID);
if (!textureCount)
{
LOG("getTextureCount() returned zero.");
return;
}

textures = new Texture*[textureCount];

jmethodID getTextureMethodID = env->GetMethodID(activityClass,
"getTexture", "(I)Lcom/qualcomm/QCARSamples/ImageTargets/Texture;");

if (getTextureMethodID == 0)
{
LOG("Function getTexture() not found.");
return;
}

// Register the textures
for (int i = 0; i {

jobject textureObject = env->CallObjectMethod(obj, getTextureMethodID, i);
if (textureObject == NULL)
{
LOG("GetTexture() returned zero pointer");
return;
}

textures[i] = Texture::create(env, textureObject);
}
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_initApplicationNative finished");
}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitApplicationNative(
JNIEnv* env, jobject obj)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_deinitApplicationNative");

// Release texture resources
if (textures != 0)
{
for (int i = 0; i {
delete textures[i];
textures[i] = NULL;
}

delete[]textures;
textures = NULL;

textureCount = 0;
}
}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_startCamera(JNIEnv *,
jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_startCamera");

// Initialize the camera:
if (!QCAR::CameraDevice::getInstance().init())
return;

// Configure the video background
configureVideoBackground();

// Select the default mode:
if (!QCAR::CameraDevice::getInstance().selectVideoMode(
QCAR::CameraDevice::MODE_DEFAULT))
return;

// Start the camera:
if (!QCAR::CameraDevice::getInstance().start())
return;

// Uncomment to enable flash
//if(QCAR::CameraDevice::getInstance().setFlashTorchMode(true))
// LOG("IMAGE TARGETS : enabled torch");

// Uncomment to enable infinity focus mode, or any other supported focus mode
// See CameraDevice.h for supported focus modes
//if(QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_INFINITY))
// LOG("IMAGE TARGETS : enabled infinity focus");

// Start the tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::Tracker* imageTracker = trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER);
if(imageTracker != 0)
imageTracker->start();
}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_stopCamera(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_stopCamera");

// Stop the tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::Tracker* imageTracker = trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER);
if(imageTracker != 0)
imageTracker->stop();

QCAR::CameraDevice::getInstance().stop();
QCAR::CameraDevice::getInstance().deinit();
}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setProjectionMatrix(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setProjectionMatrix");

// Cache the projection matrix:
const QCAR::CameraCalibration& cameraCalibration =
QCAR::CameraDevice::getInstance().getCameraCalibration();
projectionMatrix = QCAR::Tool::getProjectionGL(cameraCalibration, 2.0f,
2000.0f);
}

JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_activateFlash(JNIEnv*, jobject, jboolean flash)
{
return QCAR::CameraDevice::getInstance().setFlashTorchMode((flash==JNI_TRUE)) ? JNI_TRUE : JNI_FALSE;
}

JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_autofocus(JNIEnv*, jobject)
{
return QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_TRIGGERAUTO) ? JNI_TRUE : JNI_FALSE;
}

JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargets_setFocusMode(JNIEnv*, jobject, jint mode)
{
int qcarFocusMode;

switch ((int)mode)
{
case 0:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_NORMAL;
break;

case 1:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_CONTINUOUSAUTO;
break;

case 2:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_INFINITY;
break;

case 3:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_MACRO;
break;

default:
return JNI_FALSE;
}

return QCAR::CameraDevice::getInstance().setFocusMode(qcarFocusMode) ? JNI_TRUE : JNI_FALSE;
}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_initRendering(
JNIEnv* env, jobject obj)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_initRendering");

// Define clear color
glClearColor(0.0f, 0.0f, 0.0f, QCAR::requiresAlpha() ? 0.0f : 1.0f);

// Now generate the OpenGL texture objects and add settings
for (int i = 0; i {
glGenTextures(1, &(textures[i]->mTextureID));
glBindTexture(GL_TEXTURE_2D, textures[i]->mTextureID);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textures[i]->mWidth,
textures[i]->mHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
(GLvoid*) textures[i]->mData);
}
#ifndef USE_OPENGL_ES_1_1

shaderProgramID = SampleUtils::createProgramFromBuffer(cubeMeshVertexShader,
cubeFragmentShader);

vertexHandle = glGetAttribLocation(shaderProgramID,
"vertexPosition");
normalHandle = glGetAttribLocation(shaderProgramID,
"vertexNormal");
textureCoordHandle = glGetAttribLocation(shaderProgramID,
"vertexTexCoord");
mvpMatrixHandle = glGetUniformLocation(shaderProgramID,
"modelViewProjectionMatrix");

#endif

}

JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_updateRendering(
JNIEnv* env, jobject obj, jint width, jint height)
{
LOG("Java_com_qualcomm_QCARSamples_ImageTargets_ImageTargetsRenderer_updateRendering");

// Update screen dimensions
screenWidth = width;
screenHeight = height;

// Reconfigure the video background
configureVideoBackground();
}

#ifdef __cplusplus
}
#endif

this is my code...where i add or changes the object or how to implemented it.it is imagetarget.cpp

How to change the teapot into a text in vuforia native

July 17, 2012 - 5:59pm #3

It's a challenge to render text dynamically w/ OpenGL. If the text is static (i.e. it won't change ) then you could render it as a 2D bitmap on a plane. You could also present the text using an Android View via Java, though this wouldn't be positioned on the target.

Here is a description of some of the OpenGL approaches - http://stackoverflow.com/questions/1339136/draw-text-in-opengl-es-android

How to change the teapot into a text in vuforia native

July 17, 2012 - 5:11pm #2

i try to make form imagetarget samples....any idea to changes the teapot to text

 

Log in or register to post comments