Hi,
I am working on rendering video on target using cloudRecognition sample application latest SDK 2.0.31 getting video on target with flickering and not position exactly on center of the target. I need in details with steps in changes for fix it . Please check the code i made changes in cloudreco RenderingFrame() method. Thanks in advance
- (void)renderFrameQCAR
{
..........
..........
if (![[CRQCARutils getInstance].lastTargetIDScanned isEqualToString:uniqueTargetId] && NO == [[BooksManager sharedInstance] isNetworkOperationInProgress])
{
[CRQCARutils getInstance].lastTargetIDScanned = uniqueTargetId;
// [self createContent:imageTargetTrackable];
enterContentMode();
}
else
{
int playerIndex = 0;
// Mark this video (target) as active
videoData[playerIndex].isActive = YES;
// Get the target size (used to determine if taps are within the target)
if (0.0f == videoData[playerIndex].targetPositiveDimensions.data[0] ||
0.0f == videoData[playerIndex].targetPositiveDimensions.data[1]) {
const QCAR::ImageTarget& imageTarget = (const QCAR::ImageTarget&) trackableResult->getTrackable();
videoData[playerIndex].targetPositiveDimensions = imageTarget.getSize();
// The pose delivers the centre of the target, thus the dimensions
// go from -width / 2 to width / 2, and -height / 2 to height / 2
videoData[playerIndex].targetPositiveDimensions.data[0] /= 2.0f;
videoData[playerIndex].targetPositiveDimensions.data[1] /= 2.0f;
}
// Get the current trackable pose
const QCAR::Matrix34F& trackablePose = trackableResult->getPose();
// This matrix is used to calculate the location of the screen tap
videoData[playerIndex].modelViewMatrix = QCAR::Tool::convertPose2GLMatrix(trackablePose);
float aspectRatio;
const GLvoid* texCoords;
GLuint frameTextureID;
BOOL displayVideoFrame = YES;
// Retain value between calls
static GLuint videoTextureID[NUM_VIDEO_TARGETS] = {0};
MEDIA_STATE currentStatus = [videoPlayerHelper[playerIndex] getStatus];
// --- INFORMATION ---
// One could trigger automatic playback of a video at this point. This
// could be achieved by calling the play method of the VideoPlayerHelper
// object if currentStatus is not PLAYING. You should also call
// getStatus again after making the call to play, in order to update the
// value held in currentStatus.
// --- END INFORMATION ---
if(currentStatus ==READY)
{
[videoPlayerHelper[playerIndex] play:NO fromPosition:VIDEO_PLAYBACK_CURRENT_POSITION];
}
currentStatus= [videoPlayerHelper[playerIndex] getStatus];
switch (currentStatus) {
case PLAYING: {
// If the tracking lost timer is scheduled, terminate it
if (nil != trackingLostTimer) {
// Timer termination must occur on the same thread on which
// it was installed
[self performSelectorOnMainThread:@selector(terminateTrackingLostTimer) withObject:nil waitUntilDone:YES];
}
// Upload the decoded video data for the latest frame to OpenGL
// and obtain the video texture ID
GLuint videoTexID = [videoPlayerHelper[playerIndex] updateVideoData];
if (0 == videoTextureID[playerIndex]) {
videoTextureID[playerIndex] = videoTexID;
}
// Fallthrough
}
case PAUSED:
if (0 == videoTextureID[playerIndex]) {
// No video texture available, display keyframe
displayVideoFrame = NO;
}
else {
// Display the texture most recently returned from the call
// to [videoPlayerHelper updateVideoData]
frameTextureID = videoTextureID[playerIndex];
}
break;
default:
videoTextureID[playerIndex] = 0;
displayVideoFrame = NO;
break;
}
if (YES == displayVideoFrame) {
// ---- Display the video frame -----
aspectRatio = (float)[videoPlayerHelper[playerIndex] getVideoHeight] / (float)[videoPlayerHelper[playerIndex] getVideoWidth];
texCoords = videoQuadTextureCoords;
}
else {
// ----- Display the keyframe -----
Object3D* obj3D = [objects3D objectAtIndex:OBJECT_KEYFRAME_1 + playerIndex];
frameTextureID = [[obj3D texture] textureID];
aspectRatio = (float)[[obj3D texture] height] / (float)[[obj3D texture] width];
texCoords = quadTexCoords;
}
// If the current status is valid (not NOT_READY or ERROR), render the
// video quad with the texture we've just selected
if (NOT_READY != currentStatus) {
// Convert trackable pose to matrix for use with OpenGL
QCAR::Matrix44F modelViewMatrixVideo = QCAR::Tool::convertPose2GLMatrix(trackablePose);
QCAR::Matrix44F modelViewProjectionVideo;
ShaderUtils::translatePoseMatrix(0.0f, 0.0f, videoData[playerIndex].targetPositiveDimensions.data[0],
&modelViewMatrixVideo.data[0]);
ShaderUtils::scalePoseMatrix(videoData[playerIndex].targetPositiveDimensions.data[0],
videoData[playerIndex].targetPositiveDimensions.data[0] * aspectRatio,
videoData[playerIndex].targetPositiveDimensions.data[0],
&modelViewMatrixVideo.data[0]);
ShaderUtils::multiplyMatrix(&qUtils.projectionMatrix.data[0],
&modelViewMatrixVideo.data[0] ,
&modelViewProjectionVideo.data[0]);
glUseProgram(shaderProgramID);
glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, quadVertices);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, quadNormals);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, texCoords);
glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, frameTextureID);
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjectionVideo.data[0]);
glUniform1i(texSampler2DHandle, 0 /*GL_TEXTURE0*/);
glDrawElements(GL_TRIANGLES, NUM_QUAD_INDEX, GL_UNSIGNED_SHORT, quadIndices);
glDisableVertexAttribArray(vertexHandle);
glDisableVertexAttribArray(normalHandle);
glDisableVertexAttribArray(textureCoordHandle);
glUseProgram(0);
}
// If the current status is not PLAYING, render an icon
if (PLAYING != currentStatus) {
GLuint iconTextureID;
switch (currentStatus) {
case READY:
case REACHED_END:
case PAUSED:
case STOPPED: {
// ----- Display play icon -----
Object3D* obj3D = [objects3D objectAtIndex:OBJECT_PLAY_ICON];
iconTextureID = [[obj3D texture] textureID];
break;
}
case ERROR: {
// ----- Display error icon -----
Object3D* obj3D = [objects3D objectAtIndex:OBJECT_ERROR_ICON];
iconTextureID = [[obj3D texture] textureID];
break;
}
default: {
// ----- Display busy icon -----
Object3D* obj3D = [objects3D objectAtIndex:OBJECT_BUSY_ICON];
iconTextureID = [[obj3D texture] textureID];
break;
}
}
// Convert trackable pose to matrix for use with OpenGL
QCAR::Matrix44F modelViewMatrixButton = QCAR::Tool::convertPose2GLMatrix(trackablePose);
QCAR::Matrix44F modelViewProjectionButton;
ShaderUtils::translatePoseMatrix(0.0f, 0.0f, videoData[playerIndex].targetPositiveDimensions.data[1] / SCALE_ICON_TRANSLATION, &modelViewMatrixButton.data[0]);
ShaderUtils::scalePoseMatrix(videoData[playerIndex].targetPositiveDimensions.data[1] / SCALE_ICON,
videoData[playerIndex].targetPositiveDimensions.data[1] / SCALE_ICON,
videoData[playerIndex].targetPositiveDimensions.data[1] / SCALE_ICON,
&modelViewMatrixButton.data[0]);
ShaderUtils::multiplyMatrix(&qUtils.projectionMatrix.data[0],
&modelViewMatrixButton.data[0] ,
&modelViewProjectionButton.data[0]);
glDepthFunc(GL_LEQUAL);
glUseProgram(shaderProgramID);
glVertexAttribPointer(vertexHandle, 3, GL_FLOAT, GL_FALSE, 0, quadVertices);
glVertexAttribPointer(normalHandle, 3, GL_FLOAT, GL_FALSE, 0, quadNormals);
glVertexAttribPointer(textureCoordHandle, 2, GL_FLOAT, GL_FALSE, 0, quadTexCoords);
glEnableVertexAttribArray(vertexHandle);
glEnableVertexAttribArray(normalHandle);
glEnableVertexAttribArray(textureCoordHandle);
// Blend the icon over the background
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, iconTextureID);
glUniformMatrix4fv(mvpMatrixHandle, 1, GL_FALSE, (GLfloat*)&modelViewProjectionButton.data[0] );
glDrawElements(GL_TRIANGLES, NUM_QUAD_INDEX, GL_UNSIGNED_SHORT, quadIndices);
glDisable(GL_BLEND);
glDisableVertexAttribArray(vertexHandle);
glDisableVertexAttribArray(normalHandle);
glDisableVertexAttribArray(textureCoordHandle);
glUseProgram(0);
glDepthFunc(GL_LESS);
}
}
glDisable(GL_BLEND);
glEnable(GL_DEPTH_TEST);
glEnable(GL_CULL_FACE);
}
Thanks & Regards,
P.Uthaman
Video Flickering on Target Image using Cloud Recognition
The easiest thing to do would be to PM me with a link to your project so I can download / build and see the problem, and then we can take it from there.
N
Video Flickering on Target Image using Cloud Recognition
Hi Nalin,
Please check the below link for download project . I am using same cloudReco media target ( target_awesomeaugmentions,..) for recognition.
http://220.225.216.219:4000/Vid/Compressed/CloudReco-2-0-7-VT.zip
Issues:-
Video Flickering on Target Image using Cloud Recognition
Hi uthamanp
I think this is a good attempt at combining video and cloudreco so well done for attempting this.
1 - I saw no flicker testing on an iPhone 5 so maybe it is specific to a device
I am trying to vuforia iOS
I am trying to vuforia iOS sdk in swift. i have convert image Recognition in swift, i woking fine but now i want to combine Cloud-Recognition and video playback in my project. so i can recognise image from cloud and play video if that image on cloud.
Hi Uthanmanp,
Hi Uthanmanp,
Can you please share the code of cloud recognition + video playback, for which you are asking?
Because the link you shared is not working.