By dewsxc
on Wed, 03/19/2014 - 03:10
Hi !
I tried to combine two sample VideoPlayBack and ImageTarget by Vuforia Sample App 2-8-8, use 2-8-7 of Vuforia SDK,
I use other device like Samsung Galaxy S2, AUSU Transformer, Sony ST21i2 etc. works fine.
Only Samsung Galaxy S4 will have black texture when playing video on texture in my app, but Vuforia Sample App works fine.
Could someone help me a little? It's drive me crazy for long time.
I will appreciate even a small feed back.
Here is code at rendering :
private void shaderAndRenderModel(int takeModelIndex, int textureID, float[] projection, boolean isVideoFrame, boolean alphaMode, boolean isCover) { ModelMesh model = models.get(takeModelIndex); int numIdx = model.getNumIndex(); // All fine for model data. Buffer vertices = model.getVertices(); Buffer normals = model.getNormals(); Buffer indices = model.getIndices(); Buffer texCoords; if (isVideoFrame) { texCoords = model.getVideoTextureCoords(); } else { texCoords = model.getTextureCoords(); } int shaderProgram; int vertexH; int normalH; int vertexCoordH; int texSample2DH; int mvpMatrixH; if (isVideoFrame) { shaderProgram = shaderProgramIDForVideo; // Shader program from VideoPlayBack Sample App. vertexH = videoVertexHandle; normalH = videoNormalHandle; vertexCoordH = videoVertexCoordHandle; texSample2DH = videoTexSampler2DHandle; mvpMatrixH = videoMvpMatrixHandle; } else { shaderProgram = shaderProgramID; // Shader program from ImageTarget Sample App. vertexH = vertexHandle; normalH = normalHandle; vertexCoordH = vertexCoordHandle; texSample2DH = texSampler2DHandle; mvpMatrixH = mvpMatrixHandle; } if (isCover) { GLES20.glDepthFunc(GLES20.GL_LEQUAL); } GLES20.glUseProgram(shaderProgram); GLES20.glVertexAttribPointer(vertexH, 3, GLES20.GL_FLOAT, false, 0, vertices); GLES20.glVertexAttribPointer(normalH, 3, GLES20.GL_FLOAT, false, 0, normals); GLES20.glVertexAttribPointer(vertexCoordH, 2, GLES20.GL_FLOAT, false, 0, texCoords); GLES20.glEnableVertexAttribArray(vertexH); GLES20.glEnableVertexAttribArray(normalH); GLES20.glEnableVertexAttribArray(vertexCoordH); if (alphaMode) { GLES20.glEnable(GLES20.GL_BLEND); GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); } GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // Video is used different option. if (isVideoFrame) { GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textureID); } else { GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureID); } GLES20.glUniformMatrix4fv(mvpMatrixH, 1, false, projection, 0); GLES20.glUniform1i(texSample2DH, 0); GLES20.glDrawElements(GLES20.GL_TRIANGLES, numIdx, GLES20.GL_UNSIGNED_SHORT, indices); if (alphaMode) { GLES20.glDisable(GLES20.GL_BLEND); } GLES20.glDisableVertexAttribArray(vertexH); GLES20.glDisableVertexAttribArray(normalH); GLES20.glDisableVertexAttribArray(vertexCoordH); GLES20.glUseProgram(0); if (isCover) { GLES20.glDepthFunc(GLES20.GL_LESS); } }
Here is init Shader
private void initShader() { // Normal used. shaderProgramID = SampleUtils.createProgramFromShaderSrc(CubeShaders.CUBE_MESH_VERTEX_SHADER, CubeShaders.CUBE_MESH_FRAGMENT_SHADER); vertexHandle = GLES20.glGetAttribLocation(shaderProgramID, ARParameter.AR_SHADER_VERTEXT_POSITION); normalHandle = GLES20.glGetAttribLocation(shaderProgramID, ARParameter.AR_SHADER_NORMAL_POSITION); vertexCoordHandle = GLES20.glGetAttribLocation(shaderProgramID, ARParameter.AR_SHADER_VERTEXT_TEXTURE_COORDINATE); texSampler2DHandle = GLES20.glGetUniformLocation(shaderProgramID, ARParameter.AR_SHADER_TEX_SAMPLER_2D); mvpMatrixHandle = GLES20.glGetUniformLocation(shaderProgramID, ARParameter.AR_SHADER_MODEL_VIEW_PROJECTION_MATRIX); // Video shaderProgramIDForVideo =SampleUtils.createProgramFromShaderSrc(VideoPlaybackShaders.VIDEO_PLAYBACK_VERTEX_SHADER, VideoPlaybackShaders.VIDEO_PLAYBACK_FRAGMENT_SHADER); videoVertexHandle = GLES20.glGetAttribLocation(shaderProgramIDForVideo, ARParameter.AR_SHADER_VERTEXT_POSITION); videoNormalHandle = GLES20.glGetAttribLocation(shaderProgramIDForVideo, ARParameter.AR_SHADER_NORMAL_POSITION);; videoVertexCoordHandle = GLES20.glGetAttribLocation(shaderProgramIDForVideo, ARParameter.AR_SHADER_VERTEXT_TEXTURE_COORDINATE); videoTexSampler2DHandle = GLES20.glGetUniformLocation(shaderProgramIDForVideo, ARParameter.AR_SHADER_TEX_SAMPLER_2D_VIDEO); // Different. videoMvpMatrixHandle = GLES20.glGetUniformLocation(shaderProgramIDForVideo, ARParameter.AR_SHADER_MODEL_VIEW_PROJECTION_MATRIX); }
Here is loading texture
public void loadToMemory(ArrayList<ARTarget> textures) { for (ARTarget tex : textures) { GLES20.glGenTextures(1, tex.textureID, 0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex.textureID[0]); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, tex.width, tex.height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, tex.pixelData); // Generate texture ID for video, it's VGLSurfaceView required. if (tex.getReactType() == ARTarget.REACTTYPE_PLAYING_VIDEO) { int videoTextureID[] = new int[1]; GLES20.glGenTextures(1, videoTextureID, 0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, videoTextureID[0]); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); // End bind. tex.initVideoPlayer(activity, videoTextureID[0]); } } }
Here is init VideoPlayerHelper
public class ARTarget extends Texture { static final public int REACTTYPE_NON = 0; static final public int REACTTYPE_PLAYING_VIDEO = 1; public VideoPlayerHelper videoHelper; public ARTarget(Activity activity) { // Do other thing.... if (reactType == REACTTYPE_PLAYING_VIDEO) { this.videoHelper = new VideoPlayerHelper(); this.videoHelper.init(); this.videoHelper.setActivity(activity); } } public void initVideoPlayer(Activity activity, int videoTextureID) { if (reactType == REACTTYPE_PLAYING_VIDEO && reactObjectName != null && videoTextureID != 0) { this.videoHelper.setupSurfaceTexture(videoTextureID); // Must set ID before load video. this.videoHelper.load(ARResourceName.AR_TARGET_RESOURCE_FOLDER + reactObjectName, MEDIA_TYPE.ON_TEXTURE, false, VideoPlayerHelper.CURRENT_POSITION); } } }
Thanks for your time.
Black Texture when use Samsung Galaxy 4
How many videos are you playing ?
It could be that you are not using too much graphics memory for the textures (Vuforia camera video background + video textures + others...), and this is potentially causing an issue on specific devices.
Black Texture when use Samsung Galaxy 4
I renderer only one texture at a time(just that video).
You give me a good direction, maybe it's about memory management.
Thanks for that tool, I'll try to use.