By mhosny
on Thu, 06/26/2014 - 20:15
I'm trying to draw texture on top on screen located on the center on the trackable image.
I use this code for shaders:
package com.UnseenEgypt.ueg.Utils; import android.opengl.GLES20; public class riGraphicTools { /* SHADER Image * * This shader is for rendering 2D images straight from a texture * No additional effects. * */ public static final String vs_Image = "uniform mat4 uMVPMatrix;" + "attribute vec4 vPosition;" + "attribute vec2 a_texCoord;" + "varying vec2 v_texCoord;" + "void main() {" + " gl_Position = uMVPMatrix * vPosition;" + " v_texCoord = a_texCoord;" + "}"; public static final String fs_Image = "precision mediump float;" + "varying vec2 v_texCoord;" + "uniform sampler2D s_texture;" + "void main() {" + " gl_FragColor = texture2D( s_texture, v_texCoord );" + "}"; public static int loadShader(int type, String shaderCode){ // create a vertex shader type (GLES20.GL_VERTEX_SHADER) // or a fragment shader type (GLES20.GL_FRAGMENT_SHADER) int shader = GLES20.glCreateShader(type); // add the source code to the shader and compile it GLES20.glShaderSource(shader, shaderCode); GLES20.glCompileShader(shader); // return the shader return shader; } }
And this code for texture rendering
package com.UnseenEgypt.ueg.Utils; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.FloatBuffer; import java.nio.ShortBuffer; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.opengl.GLES20; import android.opengl.GLUtils; public class Sprite { // Geometric variables public static float vertices[]; public static short indices[]; public static float uvs[]; public FloatBuffer vertexBuffer; public ShortBuffer drawListBuffer; public FloatBuffer uvBuffer; private int imageShaderProgram; private int mTextureHandle; private Context mContext; private int tWidth, tHeight; private float centerX, centerY; public int gettWidth() { return tWidth; } public void settWidth(int tWidth) { this.tWidth = tWidth; } public int gettHeight() { return tHeight; } public void settHeight(int tHeight) { this.tHeight = tHeight; } public float getCenterX() { return centerX; } public void setCenterX(float centerX) { this.centerX = centerX; } public float getCenterY() { return centerY; } public void setCenterY(float centerY) { this.centerY = centerY; } public Sprite(Context con, String texName){ mContext = con; int vertexShader = riGraphicTools.loadShader(GLES20.GL_VERTEX_SHADER, riGraphicTools.vs_Image); int fragmentShader = riGraphicTools.loadShader(GLES20.GL_FRAGMENT_SHADER, riGraphicTools.fs_Image); imageShaderProgram = GLES20.glCreateProgram(); // create empty OpenGL ES Program GLES20.glAttachShader(imageShaderProgram, vertexShader); // add the vertex shader to program GLES20.glAttachShader(imageShaderProgram, fragmentShader); // add the fragment shader to program //Texture Code GLES20.glBindAttribLocation(imageShaderProgram, 0, "a_TexCoordinate"); GLES20.glLinkProgram(imageShaderProgram); // creates OpenGL ES program executables // Set our shader program GLES20.glUseProgram(imageShaderProgram); // Create the image information mTextureHandle = SetupImage(texName); // Create the triangles SetupTriangle(); } public void Render(float [] m){ //GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); // get handle to vertex shader's vPosition member int mPositionHandle = GLES20.glGetAttribLocation(imageShaderProgram, "vPosition"); // Enable generic vertex attribute array GLES20.glEnableVertexAttribArray(mPositionHandle); // Prepare the triangle coordinate data GLES20.glVertexAttribPointer(mPositionHandle, 3, GLES20.GL_FLOAT, false, 3*4, vertexBuffer); // Get handle to texture coordinates location int mTexCoordLoc = GLES20.glGetAttribLocation(imageShaderProgram, "a_texCoord" ); // Get handle to textures locations int mSamplerLoc = GLES20.glGetUniformLocation (imageShaderProgram, "s_texture" ); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); //Bind the texture to this unit. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextureHandle); // Set the sampler texture unit to 0, where we have saved the texture. GLES20.glUniform1i ( mSamplerLoc, 0); // Prepare the texturecoordinates GLES20.glVertexAttribPointer ( mTexCoordLoc, 2, GLES20.GL_FLOAT, false, 0, uvBuffer); // Enable generic vertex attribute array GLES20.glEnableVertexAttribArray ( mTexCoordLoc ); // Get handle to shape's transformation matrix int mtrxhandle = GLES20.glGetUniformLocation(imageShaderProgram, "uMVPMatrix"); // Apply the projection and view transformation GLES20.glUniformMatrix4fv(mtrxhandle, 1, false, m, 0); // Draw the triangle GLES20.glDrawElements(GLES20.GL_TRIANGLES, indices.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer); // Disable vertex array GLES20.glDisableVertexAttribArray(mPositionHandle); //GLES20.glDisableVertexAttribArray(mTexCoordLoc); } public int SetupImage(String texName) { // Create our UV coordinates. uvs = new float[] { 0.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f }; // The texture buffer ByteBuffer bb = ByteBuffer.allocateDirect(uvs.length * 4); bb.order(ByteOrder.nativeOrder()); uvBuffer = bb.asFloatBuffer(); uvBuffer.put(uvs); uvBuffer.position(0); // Generate Textures, if more needed, alter these numbers. int[] texturenames = new int[1]; GLES20.glGenTextures(1, texturenames, 0); // Retrieve our image from resources. int id = mContext.getResources().getIdentifier("drawable/ic_launcher", null, mContext.getPackageName()); // Temporary create a bitmap final BitmapFactory.Options options = new BitmapFactory.Options(); options.inScaled = false; // No pre-scaling // Read in the resource final Bitmap bmp = BitmapFactory.decodeResource(mContext.getResources(), id, options); // Bind texture to texturename //GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texturenames[0]); // Set filtering GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); // Load the bitmap into the bound texture. GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bmp, 0); tWidth = bmp.getWidth(); tHeight = bmp.getHeight(); // We are done using the bitmap so we should recycle it. bmp.recycle(); return texturenames[0]; } public void SetupTriangle() { float halfWidth = tWidth/2.0f; float halfHeight = tHeight/2.0f; // We have to create the vertices of our triangle. vertices = new float[] {-1f * halfWidth, halfHeight, 0.0f, -1f * halfWidth,-1f * halfHeight, 0.0f, halfWidth,-1f * halfHeight , 0.0f, halfWidth, halfHeight, 0.0f, }; indices = new short[] {0, 1, 2, 0, 2, 3}; // The order of vertexrendering. // The vertex buffer. ByteBuffer bb = ByteBuffer.allocateDirect(vertices.length * 4); bb.order(ByteOrder.nativeOrder()); vertexBuffer = bb.asFloatBuffer(); vertexBuffer.put(vertices); vertexBuffer.position(0); // initialize byte buffer for the draw list ByteBuffer dlb = ByteBuffer.allocateDirect(indices.length * 2); dlb.order(ByteOrder.nativeOrder()); drawListBuffer = dlb.asShortBuffer(); drawListBuffer.put(indices); drawListBuffer.position(0); } }
And this code for the rendering class
/*============================================================================== Copyright (c) 2012-2013 Qualcomm Connected Experiences, Inc. All Rights Reserved. ==============================================================================*/ package com.UnseenEgypt.ueg.Application; import java.util.Arrays; import java.util.Map; import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; import android.content.Context; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.opengl.Matrix; import android.util.Log; import com.qualcomm.vuforia.CameraDevice; import com.qualcomm.vuforia.Matrix44F; import com.qualcomm.vuforia.Renderer; import com.qualcomm.vuforia.State; import com.qualcomm.vuforia.Tool; import com.qualcomm.vuforia.Trackable; import com.qualcomm.vuforia.TrackableResult; import com.qualcomm.vuforia.VIDEO_BACKGROUND_REFLECTION; import com.qualcomm.vuforia.Vec2F; import com.qualcomm.vuforia.Vec3F; import com.qualcomm.vuforia.VideoBackgroundConfig; import com.qualcomm.vuforia.VideoMode; import com.qualcomm.vuforia.Vuforia; import com.UnseenEgypt.ueg.Application.Essentials.*; import com.UnseenEgypt.ueg.Utils.*; // The renderer class for the ImageTargets sample. public class TargetsRenderer implements GLSurfaceView.Renderer { private static final String LOGTAG = "ImageTargetRenderer"; private Context mContext; /*----------------------------------------*/ // Our matrices private final float[] mtrxProjection = new float[16]; private final float[] mtrxView = new float[16]; private final float[] mtrxProjectionAndView = new float[16]; private Map<String, TextureBox> tBs; private String [] keys = {"ya.jpg"}; /*-----------------------------------------*/ public void setTBs(Map<String, TextureBox> map){ tBs = map; } private ApplicationSession vuforiaAppSession; private Renderer mRenderer; boolean mIsActive = false; private Sprite sprite; private Targets mActivity; private static final float OBJECT_SCALE_FLOAT = 100.0f; public TargetsRenderer(Targets activity, ApplicationSession session) { mActivity = activity; mContext = mActivity.getApplicationContext(); vuforiaAppSession = session; } // Called to draw the current frame. @Override public void onDrawFrame(GL10 gl) { if (!mIsActive) return; // Call our function to render content renderFrame(); } // Called when the surface is created or recreated. @Override public void onSurfaceCreated(GL10 gl, EGLConfig config) { Log.d(LOGTAG, "GLRenderer.onSurfaceCreated"); initRendering(); // Call Vuforia function to (re)initialize rendering after first use // or after OpenGL ES context was lost (e.g. after onPause/onResume): vuforiaAppSession.onSurfaceCreated(); } // Called when the surface changed size. @Override public void onSurfaceChanged(GL10 gl, int width, int height) { Log.d(LOGTAG, "GLRenderer.onSurfaceChanged"); // Call Vuforia function to handle render surface size changes: vuforiaAppSession.onSurfaceChanged(width, height); // Redo the Viewport, making it fullscreen. GLES20.glViewport(0, 0, (int)width, (int)height); Matrix.setIdentityM(mtrxProjection, 0); Matrix.setIdentityM(mtrxView, 0); Matrix.setIdentityM(mtrxProjectionAndView, 0); // Setup our screen width and height for normal sprite translation. Matrix.orthoM(mtrxProjection, 0, -1.0f * width/2, width/2.0f, -1.0f * height/2, height/2.0f, -100, 100); // Set the camera position (View matrix) Matrix.setLookAtM(mtrxView, 0, 0f, 0f, 1f, 0f, 0f, -100f, 0f, 1.0f, 0.0f); } // Function for initializing the renderer. private void initRendering() { mRenderer = Renderer.getInstance(); GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f : 1.0f); sprite = new Sprite(mContext, "ya"); } // The render function. private void renderFrame() { GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); State state = mRenderer.begin(); mRenderer.drawVideoBackground(); /* GLES20.glEnable(GLES20.GL_DEPTH_TEST); // handle face culling, we need to detect if we are using reflection // to determine the direction of the culling GLES20.glEnable(GLES20.GL_CULL_FACE); GLES20.glCullFace(GLES20.GL_BACK); if (Renderer.getInstance().getVideoBackgroundConfig().getReflection() == VIDEO_BACKGROUND_REFLECTION.VIDEO_BACKGROUND_REFLECTION_ON) GLES20.glFrontFace(GLES20.GL_CW); // Front camera else GLES20.glFrontFace(GLES20.GL_CCW); // Back camera */ // did we find any trackables this frame? for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { TrackableResult result = state.getTrackableResult(tIdx); Trackable trackable = result.getTrackable(); //printUserData(trackable); Matrix44F modelViewMatrix_Vuforia = Tool .convertPose2GLMatrix(result.getPose()); //Log.i(LOGTAG, "Pose Matrix "+Arrays.toString(imageTargetResult.getPose().getData())); //Log.i(LOGTAG, "ModelView Matrix "+ Arrays.toString(modelViewMatrix_Vuforia.getData())); Vec3F zeroPoint = new Vec3F(0,0,0); Vec2F projectionPoint = Tool.projectPoint(CameraDevice.getInstance() .getCameraCalibration(), result.getPose(), zeroPoint); //Log.i(LOGTAG, "Camera Projected Point " + Arrays.toString(projectionPoint.getData())); Vec2F screenPoint = cameraPointToScreenPoint(projectionPoint); Log.i(LOGTAG, "Center Point " + Arrays.toString(screenPoint.getData())); vuforiaAppSession.setmModelViewMatrix(modelViewMatrix_Vuforia); float [] tempView = new float [16]; for (int i=0; i < tempView.length; i++){ tempView[i] = mtrxView[i]; } float centerX = (screenPoint.getData()[0] - (vuforiaAppSession.getScreenWidth()/2)); float centerY = (-1.0f * screenPoint.getData()[1] + (vuforiaAppSession.getScreenHeight()/2)); Matrix.translateM(tempView, 0, centerX, centerY, 100.0f); sprite.setCenterX(centerX); sprite.setCenterY(centerY); Matrix.scaleM(tempView, 0, OBJECT_SCALE_FLOAT, OBJECT_SCALE_FLOAT, OBJECT_SCALE_FLOAT); // Calculate the projection and view transformation Matrix.multiplyMM(mtrxProjectionAndView, 0, mtrxProjection, 0, tempView, 0); tempView = null; String picName = trackable.getName().substring(6); int textureIndex = Integer.parseInt(picName); if (textureIndex != 0){ switch(textureIndex){ case 100: case 101: case 102: case 103: case 104: case 105: case 106: case 107: case 108: case 109: case 110: case 111: case 112: case 113: case 114: case 115: case 116: case 117: case 118: case 119: case 120: case 121: case 122: case 123: case 124: case 125: case 126: case 127: sprite.Render(mtrxProjectionAndView); break; case 300: case 301: case 302: case 303: sprite.Render(mtrxProjectionAndView); break; case 400: case 401: case 402: case 403: case 404: case 405: case 406: case 407: case 408: case 409: case 410: case 411: case 412: case 413: case 414: case 415: case 416: case 417: case 418: case 419: case 420: case 421: sprite.Render(mtrxProjectionAndView); break; case 501: case 502: case 503: case 504: case 505: case 506: case 507: case 508: case 509: case 510: case 511: case 512: case 513: sprite.Render(mtrxProjectionAndView); break; case 621: case 622: case 623: case 617: sprite.Render(mtrxProjectionAndView); break; case 647: sprite.Render(mtrxProjectionAndView); break; case 640: case 639: sprite.Render(mtrxProjectionAndView); break; case 633: case 632: case 631: case 630: case 629: sprite.Render(mtrxProjectionAndView); break; case 627: case 628: sprite.Render(mtrxProjectionAndView); break; default: break; } } SampleUtils.checkGLError("Render Frame"); } GLES20.glDisable(GLES20.GL_DEPTH_TEST); mRenderer.end(); } private Vec2F cameraPointToScreenPoint(Vec2F cameraPoint) { int screenWidth = vuforiaAppSession.getScreenWidth(); int screenHeight = vuforiaAppSession.getScreenHeight(); VideoMode videoMode = CameraDevice.getInstance().getVideoMode(CameraDevice.MODE.MODE_DEFAULT); VideoBackgroundConfig config = Renderer.getInstance().getVideoBackgroundConfig(); int xOffset = (int) (((int) screenWidth - config.getSize().getData()[0]) / 2.0f + config.getPosition().getData()[0]); int yOffset = (int) (((int) screenHeight - config.getSize().getData()[1]) / 2.0f - config.getPosition().getData()[1]); if (vuforiaAppSession.isPortrait()) { // camera image is rotated 90 degrees int rotatedX = (int) (videoMode.getHeight() - cameraPoint.getData()[1]); int rotatedY = (int) cameraPoint.getData()[0]; float f1 = rotatedX * config.getSize().getData()[0] / (float) videoMode.getHeight() + xOffset; float f2 = rotatedY * config.getSize().getData()[1] / (float) videoMode.getWidth() + yOffset; Vec2F vf = new Vec2F (f1, f2); return vf; } else { float f1 = cameraPoint.getData()[0] * config.getSize().getData()[0] / (float) videoMode.getWidth() + xOffset; float f2 = cameraPoint.getData()[1] * config.getSize().getData()[1] / (float) videoMode.getHeight() + yOffset; Vec2F vf = new Vec2F(f1, f2); return vf; } } private void printUserData(Trackable trackable) { String userData = (String) trackable.getUserData(); Log.d(LOGTAG, "UserData:Retreived User Data \"" + userData + "\""); } }
The problem is that the image doesn't appear when i point my mobile to the target. Although it draws well by its own.
Any clue why this might happen ?
Thanks in advance.