Hi,
I have a problem with rendering on a framebuffer attached texture the video background from vuforia. Actually i don't know if the problem is related to vuforia or it's just me doing something wrong. I'm using 2 different devices for testing it: on a galaxy s2 with Android 4.1.2 the code works as expected, in Nexus 5 with android 4.4 the code doesn't work. I stripped part of the project i'm working on and i made a test code starting from your example regarding the video background. The file is copied here to let you give it a try as fast as possible.
ps. you can try to comment line 715 and give it a go, in s2 the screen becomes correctly black, in nexus 5 it's visible the background as if the fb attachment operation was completely ignored.
Thanks in advance,
Andrea
<pre class="brush: cpp">
/*==============================================================================
Copyright (c) 2011-2013 QUALCOMM Austria Research Center GmbH.
All Rights Reserved.
@file
BackgroundTextureAccess.cpp
@brief
Sample for BackgroundTextureAccess
==============================================================================*/
#include <jni.h>
#include <android/log.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <QCAR/QCAR.h>
#include <QCAR/CameraDevice.h>
#include <QCAR/Renderer.h>
#include <QCAR/VideoBackgroundConfig.h>
#include <QCAR/Trackable.h>
#include <QCAR/TrackableResult.h>
#include <QCAR/Tool.h>
#include <QCAR/Tracker.h>
#include <QCAR/TrackerManager.h>
#include <QCAR/ImageTracker.h>
#include <QCAR/CameraCalibration.h>
#include <QCAR/DataSet.h>
#include <QCAR/VideoBackgroundTextureInfo.h>
#include "SampleUtils.h"
#include "Texture.h"
#include "CubeShaders.h"
#include "Shaders.h"
#include "Teapot.h"
#include "GLUtils.h"
#ifdef __cplusplus
extern "C"
{
#endif
// Textures:
int textureCount = 0;
Texture** textures = 0;
// OpenGL ES 2.0 specific:
unsigned int shaderProgramID = 0;
GLint vertexHandle = 0;
GLint normalHandle = 0;
GLint textureCoordHandle = 0;
GLint mvpMatrixHandle = 0;
GLint texSampler2DHandle = 0;
// These values will hold the GL viewport
int viewportPosition_x = 0;
int viewportPosition_y = 0;
int viewportSize_x = 0;
int viewportSize_y = 0;
// Screen dimensions:
unsigned int screenWidth = 0;
unsigned int screenHeight = 0;
// Indicates whether screen is in portrait (true) or landscape (false) mode
bool isActivityInPortraitMode = false;
// The projection matrix used for rendering virtual objects:
QCAR::Matrix44F projectionMatrix;
// Constants:
static const float kObjectScale = 3.f;
QCAR::DataSet* dataSetTarmac = 0;
// This is the OpenGL ES index for our vertex/fragment program of the video background rendering
unsigned int vbShaderProgramID = 0;
// These handles are required to pass the values to the videobackground shaders
GLint vbVertexPositionHandle = 0, vbVertexTexCoordHandle = 0;
GLint vbTexSampler2DHandle = 0, vbProjectionMatrixHandle = 0;
GLint vbTouchLocationXHandle = 0, vbTouchLocationYHandle = 0;
// This flag indicates whether the shaders have been initialized
bool vbShadersInitialized = false;
// This flag indicates whether the mesh values have been initialized
bool vbMeshInitialized = false;
// These values indicate how many rows and columns we want for our video background texture polygon
const int vbNumVertexCols = 10;
const int vbNumVertexRows = 10;
// These are the variables for the vertices, coords and inidices
const int vbNumVertexValues = vbNumVertexCols*vbNumVertexRows*3; // Each vertex has three values: X, Y, Z
const int vbNumTexCoord = vbNumVertexCols*vbNumVertexRows*2; // Each texture coordinate has 2 values: U and V
const int vbNumIndices = (vbNumVertexCols-1)*(vbNumVertexRows-1)*6; // Each square is composed of 2 triangles which in turn
// have 3 vertices each, so we need 6 indices
// These are the data containers for the vertices, texcoords and indices in the CPU
float vbOrthoQuadVertices [vbNumVertexValues];
float vbOrthoQuadTexCoords [vbNumTexCoord];
GLbyte vbOrthoQuadIndices [vbNumIndices];
// This will hold the data for the projection matrix passed to the vertex shader
float vbOrthoProjMatrix [16];
// These mark the spot where the user touches the screen
float touchLocation_x, touchLocation_y;
/************************************************************/
/* Simple Shader */
/************************************************************/
const char* VertexSimple = " \
\
attribute vec4 vertexPosition; \
attribute vec2 vertexTexCoord; \
\
varying vec2 texCoord; \
\
uniform mat4 modelViewProjectionMatrix; \
\
void main() \
{ \
gl_Position = modelViewProjectionMatrix * vertexPosition; \
texCoord = vertexTexCoord; \
} \
";
const char* FragmentSimple = " \
\
precision mediump float; \
\
varying vec2 texCoord; \
\
uniform sampler2D texSampler2D; \
\
void main() \
{ \
gl_FragColor = texture2D(texSampler2D, texCoord); \
} \
";
const char *attrSimple[] = {
"vertexPosition",
"vertexTexCoord",
};
const char *unifSimple[] = {
"modelViewProjectionMatrix",
"texSampler2D",
};
// Simple Shader vars
unsigned int simpleShaderProgramID = 0;
GLint vertexSimpleHandle = 0;
GLint textureSimpleCoordHandle = 0;
GLint mvpMatrixSimpleHandle = 0;
GLint texSamplerSimple2DHandle = 0;
/*
* Registering the Shader
*/
void SimpleShaderBuilder()
{
LOG("Simple Shader Builder");
// Now generate the OpenGL texture objects and add settings
simpleShaderProgramID = SampleUtils::createProgramFromBuffer(VertexSimple,
FragmentSimple);
vertexSimpleHandle = glGetAttribLocation(simpleShaderProgramID,
"vertexPosition");
textureSimpleCoordHandle = glGetAttribLocation(simpleShaderProgramID,
"vertexTexCoord");
mvpMatrixSimpleHandle = glGetUniformLocation(simpleShaderProgramID,
"modelViewProjectionMatrix");
texSamplerSimple2DHandle = glGetUniformLocation(simpleShaderProgramID,
"texSampler2D");
}
// Framebuffer related variables
GLuint fbId;
GLint oldfbId;
GLuint dbId;
GLint oldviewport[4];
int width, height;
int channelCount;
GLuint debugTexture;
/*
* Framebuffer generation to render on a texture
* it is attached to a texture as rendering surface
* and a depth buffer
*/
void createfb()
{
width = screenWidth;
height = screenHeight;
LOG(" FrameBuffer width [%d] height[%d]\n", width, height);
glGenFramebuffers(1, &fbId);
// Create depth renderbuffer
glGenRenderbuffers(1, &dbId);
glBindRenderbuffer(GL_RENDERBUFFER, dbId);
glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, width, height);
// texture setup
glGenTextures(1, &debugTexture);
channelCount = 4;
glBindTexture(GL_TEXTURE_2D, debugTexture);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, (GLvoid *) NULL);
// Bind to the framebuffer
glGetIntegerv(GL_FRAMEBUFFER_BINDING, &oldfbId);
glBindFramebuffer(GL_FRAMEBUFFER, fbId);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, debugTexture, 0);
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, dbId);
SampleUtils::checkGlError(" end ScreenTexture");
GLenum status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
if(status != GL_FRAMEBUFFER_COMPLETE) LOG(" FrameBuffer [%d] is not in the right state: error flag [%d]\n", fbId, status);
glBindFramebuffer(GL_FRAMEBUFFER, oldfbId);
}
/*
* Attach the fb for texture rendering
*/
void attachfb()
{
//LOG("attachfb w[%d] h[%d]", width, height);
glGetIntegerv(GL_FRAMEBUFFER_BINDING, &oldfbId);
glBindFramebuffer(GL_FRAMEBUFFER, fbId);
glGetIntegerv(GL_VIEWPORT, oldviewport);
glViewport(0,0, width, height);
SampleUtils::checkGlError(" end attachfb");
}
/*
* Detach the fb for texture rendering
*/
void detachfb()
{
glViewport(oldviewport[0],oldviewport[1], oldviewport[2], oldviewport[3]);
// bind the application-created framebuffer
glBindFramebuffer(GL_FRAMEBUFFER, oldfbId);
//LOG(" FrameBuffer oldfbId[%d]", oldfbId);
SampleUtils::checkGlError(" end detachfb");
}
/*
* Vertices coords
*/
float qdbg[] = {
0,0,
400,0,
400,800,
0,800
};
/*
* Textures coords
*/
float qdbgtxt[] = {
0,0,
1,0,
1,1,
0,1
};
/*
* Indices
*/
unsigned short qidx[] = {
0,1,2,
0,2,3
};
float ortho_projection[16];
/*
* Standard Orthoprojection matrix builder to render in 2D
*/
void buildOrhtoProjectionMatrix(float *Matrix, const float left, const float right, const float bottom, const float top, const float near, const float far)
{
Matrix[0] = 2.0 / (right - left);
Matrix[1] = Matrix[2] = Matrix[3] = 0.0;
// Second Column
Matrix[4] = Matrix[6] = Matrix[7] = 0.0;
Matrix[5] = 2.0 / (top - bottom);
Matrix[10] = -2.0 / (far - near);
Matrix[11] = Matrix[8] = Matrix[9] = 0.0;
Matrix[12] = -(right + left) / (right - left);
Matrix[13] = -(top + bottom) / (top - bottom);
Matrix[14] = -(far + near) / (far - near);
Matrix[15] = 1;
}
/*
* A red quad to identify the fb
*/
GLuint redquad;
unsigned char redcolor[] = {0x80, 0x00, 0x00, 0xFF};
float qred[] = {
32,32,
48,32,
48,48,
32,48
};
/*
* Debug quad one-time configuration
*/
void configViewportMode()
{
glGenTextures(1, &redquad);
channelCount = 4;
unsigned char buf[16*16*4];
for (register int i=0; i<16*16; i++) memcpy(buf+i*4, redcolor, 4);
glBindTexture(GL_TEXTURE_2D, redquad);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, 16, 16, 0, GL_RGBA, GL_UNSIGNED_BYTE, (GLvoid *) buf);
buildOrhtoProjectionMatrix(ortho_projection, 0, 400, 0, 800, -1, 1);
}
/*
* Renderer function to draw the debug quad on screen
*/
void writeToDebugBox(GLuint texID)
{
GLint oldviewport[4];
glGetIntegerv(GL_VIEWPORT, oldviewport);
glViewport(0, 0, 400, 800);
glDisable(GL_DEPTH_TEST);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texID);
glUseProgram(simpleShaderProgramID);
float *quadcoords;
if (texID == debugTexture)
quadcoords = qdbg;
else if (texID == redquad)
quadcoords = qred;
glVertexAttribPointer(vertexSimpleHandle, 2, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) quadcoords);
glVertexAttribPointer(textureSimpleCoordHandle, 2, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) qdbgtxt);
glEnableVertexAttribArray(vertexSimpleHandle);
glEnableVertexAttribArray(textureSimpleCoordHandle);
glUniformMatrix4fv(mvpMatrixSimpleHandle, 1, GL_FALSE,
(GLfloat*)ortho_projection);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT,
(const GLvoid*)qidx);
glDisableVertexAttribArray(vertexSimpleHandle);
glDisableVertexAttribArray(textureSimpleCoordHandle);
glEnable(GL_DEPTH_TEST);
glViewport(oldviewport[0], oldviewport[1], oldviewport[2], oldviewport[3]);
}
////////////////////////////////////////////////////////////////////////////////
// This function creates the shader program with the vertex and fragment shaders
// defined in Shader.h. It also gets handles to the position of the variables
// for later usage. It also defines a standard orthographic projection matrix
void
VBSetupShaderProgram()
{
// Create shader program:
if (vbShaderProgramID != 0)
glDeleteProgram(vbShaderProgramID);
vbShaderProgramID = createShaderProgramFromBuffers(vertexShaderSrc, fragmentShaderSrc);
if (vbShaderProgramID > 0)
{
// Activate shader:
glUseProgram(vbShaderProgramID);
// Retrieve handler for vertex position shader attribute variable:
vbVertexPositionHandle = glGetAttribLocation(vbShaderProgramID, "vertexPosition");
// Retrieve handler for texture coordinate shader attribute variable:
vbVertexTexCoordHandle = glGetAttribLocation(vbShaderProgramID, "vertexTexCoord");
// Retrieve handler for texture sampler shader uniform variable:
vbTexSampler2DHandle = glGetUniformLocation(vbShaderProgramID, "texSampler2D");
// Retrieve handler for projection matrix shader uniform variable:
vbProjectionMatrixHandle = glGetUniformLocation(vbShaderProgramID, "projectionMatrix");
// Retrieve handler for projection matrix shader uniform variable:
vbTouchLocationXHandle = glGetUniformLocation(vbShaderProgramID, "touchLocation_x");
// Retrieve handler for projection matrix shader uniform variable:
vbTouchLocationYHandle = glGetUniformLocation(vbShaderProgramID, "touchLocation_y");
checkGLError("Getting the handles to the shader variables");
// Set the orthographic matrix
setOrthoMatrix(-1.0, 1.0, -1.0, 1.0, -1.0, 1.0, vbOrthoProjMatrix);
// Stop using the program
glUseProgram(0);
vbShadersInitialized = true;
}
else
LOG("Could not initialize video background shader for the effects");
}
////////////////////////////////////////////////////////////////////////////////
// This function adds the values to the vertex, coord and indices variables.
// Essentially it defines a mesh from -1 to 1 in X and Y with
// vbNumVertexRows rows and vbNumVertexCols columns. Thus, if we were to assign
// vbNumVertexRows=10 and vbNumVertexCols=10 we would have a mesh composed of
// 100 little squares (notice, however, that we work with triangles so it is
// actually not composed of 100 squares but of 200 triangles). The example
// below shows 4 triangles composing 2 squares.
// D---E---F
// | \ | \ |
// A---B---C
void
CreateVideoBackgroundMesh()
{
// Get the texture and image dimensions from QCAR
const QCAR::VideoBackgroundTextureInfo texInfo = QCAR::Renderer::getInstance().getVideoBackgroundTextureInfo();
// Detect if the renderer is reporting reflected pose info, possibly due to useage of the front camera.
// If so, we need to reflect the image of the video background to match the pose.
const QCAR::VIDEO_BACKGROUND_REFLECTION reflection = QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection;
const float reflectionOffset = vbNumVertexCols - 1;
// If there is no image data yet then return;
if ((texInfo.mImageSize.data[0] == 0)||(texInfo.mImageSize.data[1] == 0)) return;
// These calculate a slope for the texture coords
float uRatio = ((float)texInfo.mImageSize.data[0]/(float)texInfo.mTextureSize.data[0]);
float vRatio = ((float)texInfo.mImageSize.data[1]/(float)texInfo.mTextureSize.data[1]);
float uSlope = uRatio/(vbNumVertexCols-1);
float vSlope = vRatio/(vbNumVertexRows-1);
// These calculate a slope for the vertex values in this case we have a span of 2, from -1 to 1
float totalSpan = 2.0f;
float colSlope = totalSpan/(vbNumVertexCols-1);
float rowSlope = totalSpan/(vbNumVertexRows-1);
// Some helper variables
int currentIndexPosition = 0;
int currentVertexPosition = 0;
int currentCoordPosition = 0;
int currentVertexIndex = 0;
for (int j = 0; j<vbNumVertexRows; j++)
{
for (int i = 0; i<vbNumVertexCols; i++)
{
// We populate the mesh with a regular grid
vbOrthoQuadVertices[currentVertexPosition /*X*/] = ((colSlope*i)-(totalSpan/2.0f)); // We subtract this because the values range from -totalSpan/2 to totalSpan/2
vbOrthoQuadVertices[currentVertexPosition+1 /*Y*/] = ((rowSlope*j)-(totalSpan/2.0f));
vbOrthoQuadVertices[currentVertexPosition+2 /*Z*/] = 0.0f; // It is all a flat polygon orthogonal to the view vector
// We also populate its associated texture coordinate
if (isActivityInPortraitMode)
{
vbOrthoQuadTexCoords[currentCoordPosition /*U*/] = uRatio - (uSlope*j);
vbOrthoQuadTexCoords[currentCoordPosition+1 /*V*/] = vRatio - ((reflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON) ? vSlope*(reflectionOffset - i) : vSlope*i);
}
else /*Landscape*/
{
vbOrthoQuadTexCoords[currentCoordPosition /*U*/] = (reflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON) ? uSlope*(reflectionOffset - i) : uSlope*i;
vbOrthoQuadTexCoords[currentCoordPosition+1 /*V*/] = vRatio - (vSlope*j);
}
// Now we populate the triangles that compose the mesh
// First triangle is the upper right of the vertex
if (j<vbNumVertexRows-1)
{
if (i<vbNumVertexCols-1) // In the example above this would make triangles ABD and BCE
{
vbOrthoQuadIndices[currentIndexPosition ] = currentVertexIndex;
vbOrthoQuadIndices[currentIndexPosition+1] = currentVertexIndex+1;
vbOrthoQuadIndices[currentIndexPosition+2] = currentVertexIndex+vbNumVertexCols;
currentIndexPosition += 3;
}
if (i>0) // In the example above this would make triangles BED and CFE
{
vbOrthoQuadIndices[currentIndexPosition ] = currentVertexIndex;
vbOrthoQuadIndices[currentIndexPosition+1] = currentVertexIndex+vbNumVertexCols;
vbOrthoQuadIndices[currentIndexPosition+2] = currentVertexIndex+vbNumVertexCols-1;
currentIndexPosition += 3;
}
}
currentVertexPosition += 3; // Three values per vertex (x,y,z)
currentCoordPosition += 2; // Two texture coordinates per vertex (u,v)
currentVertexIndex += 1; // Vertex index increased by one
}
}
vbMeshInitialized = true;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_setActivityPortraitMode(JNIEnv *, jobject, jboolean isPortrait)
{
isActivityInPortraitMode = isPortrait;
}
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_initTracker(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_initTracker");
// Initialize the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::Tracker* tracker = trackerManager.initTracker(QCAR::Tracker::IMAGE_TRACKER);
if (tracker == NULL)
{
LOG("Failed to initialize ImageTracker.");
return 0;
}
LOG("Successfully initialized ImageTracker.");
return 1;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_deinitTracker(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_deinitTracker");
// Deinit the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
trackerManager.deinitTracker(QCAR::Tracker::IMAGE_TRACKER);
}
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_loadTrackerData(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_loadTrackerData");
// Get the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
if (imageTracker == NULL)
{
LOG("Failed to load tracking data set because the ImageTracker has not"
" been initialized.");
return 0;
}
// Create the data sets:
dataSetTarmac = imageTracker->createDataSet();
if (dataSetTarmac == 0)
{
LOG("Failed to create a new tracking data.");
return 0;
}
// Load the data sets:
if (!dataSetTarmac->load("Tarmac.xml", QCAR::DataSet::STORAGE_APPRESOURCE))
{
LOG("Failed to load data set.");
return 0;
}
// Activate the data set:
if (!imageTracker->activateDataSet(dataSetTarmac))
{
LOG("Failed to activate data set.");
return 0;
}
LOG("Successfully loaded and activated data set.");
return 1;
}
JNIEXPORT int JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_destroyTrackerData(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_destroyTrackerData");
// Get the image tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::ImageTracker* imageTracker = static_cast<QCAR::ImageTracker*>(
trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER));
if (imageTracker == NULL)
{
LOG("Failed to destroy the tracking data set because the ImageTracker has not"
" been initialized.");
return 0;
}
if (dataSetTarmac != 0)
{
if (!imageTracker->deactivateDataSet(dataSetTarmac))
{
LOG("Failed to destroy the tracking data set Tarmac because the data set "
"could not be deactivated.");
return 0;
}
if (!imageTracker->destroyDataSet(dataSetTarmac))
{
LOG("Failed to destroy the tracking data set Tarmac.");
return 0;
}
LOG("Successfully destroyed the data set Tarmac.");
dataSetTarmac = 0;
return 1;
}
LOG("No tracker data set to destroy.");
return 0;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_onQCARInitializedNative(JNIEnv *, jobject)
{
// Comment in to enable tracking of up to 2 targets simultaneously and
// split the work over multiple frames:
// QCAR::setHint(QCAR::HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 2);
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccessRenderer_renderFrame(JNIEnv *, jobject)
{
attachfb(); // LINE ADDED BY BLOOM
// Clear color and depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// Get the state from QCAR and mark the beginning of a rendering section
QCAR::State state = QCAR::Renderer::getInstance().begin();
/////////////////////////////////////////////////////////////////
// This section renders the video background with a
// custom shader defined in Shaders.h
GLuint vbVideoTextureUnit = 0;
QCAR::Renderer::getInstance().bindVideoBackground(vbVideoTextureUnit);
glDisable(GL_DEPTH_TEST);
glDisable(GL_CULL_FACE);
// Set the viewport
glViewport(viewportPosition_x, viewportPosition_y, viewportSize_x, viewportSize_y);
// We need a finer mesh for this background
// We have to create it here because it will request the texture info of the video background
if (!vbMeshInitialized)
CreateVideoBackgroundMesh();
// Load the shader and upload the vertex/texcoord/index data
glUseProgram(vbShaderProgramID);
glVertexAttribPointer(vbVertexPositionHandle, 3, GL_FLOAT, GL_FALSE, 0, vbOrthoQuadVertices);
glVertexAttribPointer(vbVertexTexCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, vbOrthoQuadTexCoords);
glUniform1i(vbTexSampler2DHandle, vbVideoTextureUnit);
glUniformMatrix4fv(vbProjectionMatrixHandle, 1, GL_FALSE, &vbOrthoProjMatrix[0]);
glUniform1f(vbTouchLocationXHandle, (touchLocation_x*2.0)-1.0);
glUniform1f(vbTouchLocationYHandle, (2.0-(touchLocation_y*2.0))-1.0);
// Render the video background with the custom shader
// First, we enable the vertex arrays
glEnableVertexAttribArray(vbVertexPositionHandle);
glEnableVertexAttribArray(vbVertexTexCoordHandle);
// Then, we issue the render call
glDrawElements(GL_TRIANGLES, vbNumIndices, GL_UNSIGNED_BYTE, vbOrthoQuadIndices);
// Finally, we disable the vertex arrays
glDisableVertexAttribArray(vbVertexPositionHandle);
glDisableVertexAttribArray(vbVertexTexCoordHandle);
// Wrap up this rendering
glUseProgram(0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, 0);
SampleUtils::checkGlError("Rendering of the background failed");
//
/////////////////////////////////////////////////////////////////
writeToDebugBox(redquad); // LINE ADDED BY BLOOM
detachfb(); // LINE ADDED BY BLOOM
writeToDebugBox(debugTexture);// LINE ADDED BY BLOOM
/////////////////////////////////////////////////////////////////
// The following section is similar to image targets
// we still render the teapot on top of the targets
glEnable(GL_DEPTH_TEST);
// We must detect if background reflection is active and adjust the culling direction.
// If the reflection is active, this means the post matrix has been reflected as well,
// therefore standard counter clockwise face culling will result in "inside out" models.
glEnable(GL_CULL_FACE);
glCullFace(GL_BACK);
if(QCAR::Renderer::getInstance().getVideoBackgroundConfig().mReflection == QCAR::VIDEO_BACKGROUND_REFLECTION_ON)
glFrontFace(GL_CW); //Front camera
else
glFrontFace(GL_CCW); //Back camera
// Did we find any trackables this frame?
for(int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++)
{
// Get the trackable:
const QCAR::TrackableResult* trackableResult = state.getTrackableResult(tIdx);
QCAR::Matrix44F modelViewMatrix =
QCAR::Tool::convertPose2GLMatrix(trackableResult->getPose());
// Choose the texture based on the target name:
int textureIndex = 0;
const Texture* const thisTexture = textures[textureIndex];
QCAR::Matrix44F modelViewProjection;
SampleUtils::translatePoseMatrix(0.0f, 0.0f, kObjectScale,
&modelViewMatrix.data[0]);
SampleUtils::scalePoseMatrix(kObjectScale, kObjectScale, kObjectScale,
&modelViewMatrix.data[0]);
SampleUtils::multiplyMatrix(&projectionMatrix.data[0],
&modelViewMatrix.data[0] ,
&modelViewProjection.data[0]);
glUseProgram(shaderProgramID);
glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &teapotVertices[0]);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &teapotNormals[0]);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0,
(const GLvoid*) &teapotTexCoords[0]);
glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, thisTexture->mTextureID);
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE,
(GLfloat*)&modelViewProjection.data[0] );
glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);
glDrawElements(GL_TRIANGLES, NUM_TEAPOT_OBJECT_INDEX, GL_UNSIGNED_SHORT,
(const GLvoid*) &teapotIndices[0]);
glDisableVertexAttribArray(vertexHandle);
glDisableVertexAttribArray(normalHandle);
glDisableVertexAttribArray(textureCoordHandle);
SampleUtils::checkGlError("BackgroundTextureAccess renderFrame");
}
glDisable(GL_DEPTH_TEST);
//
/////////////////////////////////////////////////////////////////
// It is always important to tell the QCAR Renderer
// that we are finished
QCAR::Renderer::getInstance().end();
}
void
configureVideoBackground()
{
// Get the default video mode:
QCAR::CameraDevice& cameraDevice = QCAR::CameraDevice::getInstance();
QCAR::VideoMode videoMode = cameraDevice.
getVideoMode(QCAR::CameraDevice::MODE_DEFAULT);
// Configure the video background
QCAR::VideoBackgroundConfig config;
config.mEnabled = true;
config.mSynchronous = true;
config.mPosition.data[0] = 0.0f;
config.mPosition.data[1] = 0.0f;
if (isActivityInPortraitMode)
{
//LOG("configureVideoBackground PORTRAIT");
config.mSize.data[0] = videoMode.mHeight
* (screenHeight / (float)videoMode.mWidth);
config.mSize.data[1] = screenHeight;
if(config.mSize.data[0] < screenWidth)
{
LOG("Correcting rendering background size to handle missmatch between screen and video aspect ratios.");
config.mSize.data[0] = screenWidth;
config.mSize.data[1] = screenWidth *
(videoMode.mWidth / (float)videoMode.mHeight);
}
}
else
{
//LOG("configureVideoBackground LANDSCAPE");
config.mSize.data[0] = screenWidth;
config.mSize.data[1] = videoMode.mHeight
* (screenWidth / (float)videoMode.mWidth);
if(config.mSize.data[1] < screenHeight)
{
LOG("Correcting rendering background size to handle missmatch between screen and video aspect ratios.");
config.mSize.data[0] = screenHeight
* (videoMode.mWidth / (float)videoMode.mHeight);
config.mSize.data[1] = screenHeight;
}
}
LOG("Configure Video Background : Video (%d,%d), Screen (%d,%d), mSize (%d,%d)", videoMode.mWidth, videoMode.mHeight, screenWidth, screenHeight, config.mSize.data[0], config.mSize.data[1]);
viewportPosition_x = (((int)(screenWidth - config.mSize.data[0])) / (int) 2) + config.mPosition.data[0];
viewportPosition_y = (((int)(screenHeight - config.mSize.data[1])) / (int) 2) + config.mPosition.data[1];
viewportSize_x = config.mSize.data[0];
viewportSize_y = config.mSize.data[1];
// Set the config:
QCAR::Renderer::getInstance().setVideoBackgroundConfig(config);
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_initApplicationNative(
JNIEnv* env, jobject obj, jint width, jint height)
{
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_initApplicationNative");
// Store screen dimensions
screenWidth = width;
screenHeight = height;
// Handle to the activity class:
jclass activityClass = env->GetObjectClass(obj);
jmethodID getTextureCountMethodID = env->GetMethodID(activityClass,
"getTextureCount", "()I");
if (getTextureCountMethodID == 0)
{
LOG("Function getTextureCount() not found.");
return;
}
textureCount = env->CallIntMethod(obj, getTextureCountMethodID);
if (!textureCount)
{
LOG("getTextureCount() returned zero.");
return;
}
textures = new Texture*[textureCount];
jmethodID getTextureMethodID = env->GetMethodID(activityClass,
"getTexture", "(I)Lcom/qualcomm/QCARSamples/BackgroundTextureAccess/Texture;");
if (getTextureMethodID == 0)
{
LOG("Function getTexture() not found.");
return;
}
// Register the textures
for (int i = 0; i < textureCount; ++i)
{
jobject textureObject = env->CallObjectMethod(obj, getTextureMethodID, i);
if (textureObject == NULL)
{
LOG("GetTexture() returned zero pointer");
return;
}
textures[i] = Texture::create(env, textureObject);
}
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_initApplicationNative finished");
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_deinitApplicationNative(
JNIEnv* env, jobject obj)
{
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_deinitApplicationNative");
// Release texture resources
if (textures != 0)
{
for (int i = 0; i < textureCount; ++i)
{
delete textures[i];
textures[i] = NULL;
}
delete[]textures;
textures = NULL;
textureCount = 0;
}
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_startCamera(JNIEnv *,
jobject)
{
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_startCamera");
// Select the camera to open, set this to QCAR::CameraDevice::CAMERA_FRONT
// to activate the front camera instead.
QCAR::CameraDevice::CAMERA camera = QCAR::CameraDevice::CAMERA_DEFAULT;
// Initialize the camera:
if (!QCAR::CameraDevice::getInstance().init(camera))
return;
// Configure the video background
configureVideoBackground();
// Select the default mode:
if (!QCAR::CameraDevice::getInstance().selectVideoMode(
QCAR::CameraDevice::MODE_DEFAULT))
return;
// Start the camera:
if (!QCAR::CameraDevice::getInstance().start())
return;
// Start the tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::Tracker* imageTracker = trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER);
if(imageTracker != 0)
imageTracker->start();
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_stopCamera(JNIEnv *,
jobject)
{
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_stopCamera");
// Stop the tracker:
QCAR::TrackerManager& trackerManager = QCAR::TrackerManager::getInstance();
QCAR::Tracker* imageTracker = trackerManager.getTracker(QCAR::Tracker::IMAGE_TRACKER);
if(imageTracker != 0)
imageTracker->stop();
QCAR::CameraDevice::getInstance().stop();
QCAR::CameraDevice::getInstance().deinit();
// Force the reinitialization of shaders when resume
vbShadersInitialized = false;
vbMeshInitialized = false;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_setProjectionMatrix(JNIEnv *, jobject)
{
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_setProjectionMatrix");
// Cache the projection matrix:
const QCAR::CameraCalibration& cameraCalibration =
QCAR::CameraDevice::getInstance().getCameraCalibration();
projectionMatrix = QCAR::Tool::getProjectionGL(cameraCalibration, 2.0f, 2500.0f);
}
JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_autofocus(JNIEnv*, jobject)
{
return QCAR::CameraDevice::getInstance().setFocusMode(QCAR::CameraDevice::FOCUS_MODE_TRIGGERAUTO) ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT jboolean JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_setFocusMode(JNIEnv*, jobject, jint mode)
{
int qcarFocusMode;
switch ((int)mode)
{
case 0:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_NORMAL;
break;
case 1:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_CONTINUOUSAUTO;
break;
case 2:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_INFINITY;
break;
case 3:
qcarFocusMode = QCAR::CameraDevice::FOCUS_MODE_MACRO;
break;
default:
return JNI_FALSE;
}
return QCAR::CameraDevice::getInstance().setFocusMode(qcarFocusMode) ? JNI_TRUE : JNI_FALSE;
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccessRenderer_initRendering(
JNIEnv* env, jobject obj)
{
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccessRenderer_initRendering");
// Define clear color
glClearColor(0.0f, 0.0f, 0.0f, QCAR::requiresAlpha() ? 0.0f : 1.0f);
// Now generate the OpenGL texture objects and add settings
for (int i = 0; i < textureCount; ++i)
{
glGenTextures(1, &(textures[i]->mTextureID));
glBindTexture(GL_TEXTURE_2D, textures[i]->mTextureID);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, textures[i]->mWidth,
textures[i]->mHeight, 0, GL_RGBA, GL_UNSIGNED_BYTE,
(GLvoid*) textures[i]->mData);
}
shaderProgramID = SampleUtils::createProgramFromBuffer(cubeMeshVertexShader,
cubeFragmentShader);
vertexHandle = glGetAttribLocation(shaderProgramID,
"vertexPosition");
normalHandle = glGetAttribLocation(shaderProgramID,
"vertexNormal");
textureCoordHandle = glGetAttribLocation(shaderProgramID,
"vertexTexCoord");
mvpMatrixHandle = glGetUniformLocation(shaderProgramID,
"modelViewProjectionMatrix");
texSampler2DHandle = glGetUniformLocation(shaderProgramID,
"texSampler2D");
if (!vbShadersInitialized)
VBSetupShaderProgram();
SimpleShaderBuilder();// LINE ADDED BY BLOOM
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccessRenderer_updateRendering(
JNIEnv* env, jobject obj, jint width, jint height)
{
LOG("Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccessRenderer_updateRendering");
// Update screen dimensions
screenWidth = width;
screenHeight = height;
// Reconfigure the video background
configureVideoBackground();
configViewportMode();// LINE ADDED BY BLOOM
createfb();// LINE ADDED BY BLOOM
}
JNIEXPORT void JNICALL
Java_com_qualcomm_QCARSamples_BackgroundTextureAccess_BackgroundTextureAccess_nativeTouchEvent(JNIEnv *, jobject, jfloat x, jfloat y)
{
// We will use these for the Loupe effect
// We need to check whether they are in the [-1,1] range. This is
// because some devices can send touch events beyond the screen
// real estate. The value -100.0 is simply used as a flag
// for the shader to ignore the position
if ((x >= -1.0)&&(x <= 1.0)) touchLocation_x = x;
else touchLocation_x = -100.0;
if ((y >= -1.0)&&(y <= 1.0)) touchLocation_y = y;
else touchLocation_y = -100.0;
}
#ifdef __cplusplus
}
#endif
</pre>
Great. Thansk for sharing your final notes.