By Shiv
on Sun, 01/12/2014 - 09:52
Hi All,
I'm using Vuforia 2.8.8 and trying to replace Teapot with VideoPlayback in UserDefined Target.
Below is my code which I tried so far ,
public class UserDefinedVideoPlayback extends Activity implementsSampleApplicationControl, SampleAppMenuInterface { private static final String LOGTAG = "UserDefinedVideoPlayback"; SampleApplicationSession vuforiaAppSession; Activity mActivity; // Helpers to detect events such as double tapping: private GestureDetector mGestureDetector = null; // Movie for the Targets: public static final int NUM_TARGETS = 2; public static final int STONES = 0; public static final int CHIPS = 1; private VideoPlayerHelper mVideoPlayerHelper[] = null; private int mSeekPosition[] = null; private boolean mWasPlaying[] = null; private String mMovieName[] = null; // A boolean to indicate whether we come from full screen: private boolean mReturningFromFullScreen = false; // Our OpenGL view: private SampleApplicationGLView mGlView; // Our renderer: private UserDefinedVideoPlaybackRenderer mRenderer; // The textures we will use for rendering: private Vector<Texture> mTextures; DataSet dataSetStonesAndChips = null; private RelativeLayout mUILayout; private boolean mFlash = false; private boolean mContAutofocus = false; private boolean mExtendedTracking = false; private View mFlashOptionView; private SampleAppMenu mSampleAppMenu; private LoadingDialogHandler loadingDialogHandler = new LoadingDialogHandler( this); boolean mIsDroidDevice = false; //****************************************************************// private View mBottomBar; private View mCameraButton; // Alert dialog for displaying SDK errors private AlertDialog mDialog; int targetBuilderCounter = 1; DataSet dataSetUserDef = null; private ArrayList<View> mSettingsAdditionalViews; RefFreeFrame refFreeFrame; protected void onCreate(Bundle savedInstanceState) { Log.d(LOGTAG, "onCreate"); super.onCreate(savedInstanceState); vuforiaAppSession = new SampleApplicationSession(this); mActivity = this; startLoadingAnimation(); vuforiaAppSession .initAR(this, ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); // Load any sample specific textures: mTextures = new Vector<Texture>(); loadTextures(); // Create the gesture detector that will handle the single and // double taps: mGestureDetector = new GestureDetector(getApplicationContext(), new GestureListener()); mVideoPlayerHelper = new VideoPlayerHelper[NUM_TARGETS]; mSeekPosition = new int[NUM_TARGETS]; mWasPlaying = new boolean[NUM_TARGETS]; mMovieName = new String[NUM_TARGETS]; // Create the video player helper that handles the playback of the movie // for the targets: for (int i = 0; i < NUM_TARGETS; i++) { mVideoPlayerHelper[i] = new VideoPlayerHelper(); mVideoPlayerHelper[i].init(); mVideoPlayerHelper[i].setActivity(this); } mMovieName[STONES] = "VideoPlayback/homedesign_1.mp4"; mMovieName[CHIPS] = "VideoPlayback/interior_2.mp4"; // Set the double tap listener: mGestureDetector.setOnDoubleTapListener(new OnDoubleTapListener() { // Handle the double tap public boolean onDoubleTap(MotionEvent e) { boolean isDoubleTapHandled = false; for (int i = 0; i < NUM_TARGETS; i++) { // Verify that the tap happens inside the target: if (mRenderer!= null && mRenderer.isTapOnScreenInsideTarget(i, e.getX(), e.getY())) { // Check whether we can play full screen at all: if (mVideoPlayerHelper[i].isPlayableFullscreen()) { // Pause all other media: pauseAll(i); // Request the playback in fullscreen: mVideoPlayerHelper[i].play(true, VideoPlayerHelper.CURRENT_POSITION); isDoubleTapHandled = true; } // Even though multiple videos can be loaded only one // can be playing at any point in time. This break // prevents that, say, overlapping videos trigger // simultaneously playback. break; } } return isDoubleTapHandled; } public boolean onDoubleTapEvent(MotionEvent e) { // We do not react to this event return false; } // Handle the single tap public boolean onSingleTapConfirmed(MotionEvent e) { boolean isSingleTapHandled = false; // Do not react if the StartupScreen is being displayed for (int i = 0; i < NUM_TARGETS; i++) { // Verify that the tap happened inside the target if (mRenderer!= null && mRenderer.isTapOnScreenInsideTarget(i, e.getX(), e.getY())) { // Check if it is playable on texture if (mVideoPlayerHelper[i].isPlayableOnTexture()) { // We can play only if the movie was paused, ready // or stopped if ((mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.PAUSED) || (mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.READY) || (mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.STOPPED) || (mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.REACHED_END)) { // Pause all other media pauseAll(i); // If it has reached the end then rewind if ((mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.REACHED_END)) mSeekPosition[i] = 0; mVideoPlayerHelper[i].play(false, mSeekPosition[i]); mSeekPosition[i] = VideoPlayerHelper.CURRENT_POSITION; } else if (mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.PLAYING) { // If it is playing then we pause it mVideoPlayerHelper[i].pause(); } } else if (mVideoPlayerHelper[i].isPlayableFullscreen()) { // If it isn't playable on texture // Either because it wasn't requested or because it // isn't supported then request playback fullscreen. mVideoPlayerHelper[i].play(true, VideoPlayerHelper.CURRENT_POSITION); } isSingleTapHandled = true; // Even though multiple videos can be loaded only one // can be playing at any point in time. This break // prevents that, say, overlapping videos trigger // simultaneously playback. break; } } return isSingleTapHandled; } }); } // We want to load specific textures from the APK, which we will later // use for rendering. private void loadTextures() { mTextures.add(Texture.loadTextureFromApk( "VideoPlayback/VuforiaSizzleReel_1.png", getAssets())); mTextures.add(Texture.loadTextureFromApk( "VideoPlayback/VuforiaSizzleReel_2.png", getAssets())); mTextures.add(Texture.loadTextureFromApk("VideoPlayback/play.png", getAssets())); mTextures.add(Texture.loadTextureFromApk("VideoPlayback/busy.png", getAssets())); mTextures.add(Texture.loadTextureFromApk("VideoPlayback/error.png", getAssets())); } // Called when the activity will start interacting with the user. protected void onResume() { Log.d(LOGTAG, "onResume"); super.onResume(); // This is needed for some Droid devices to force portrait if (mIsDroidDevice) { setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); } try { vuforiaAppSession.resumeAR(); } catch (SampleApplicationException e) { Log.e(LOGTAG, e.getString()); } // Resume the GL view: if (mGlView != null) { mGlView.setVisibility(View.VISIBLE); mGlView.onResume(); } // Reload all the movies if (mRenderer != null) { for (int i = 0; i < NUM_TARGETS; i++) { if (!mReturningFromFullScreen) { mRenderer.requestLoad(i, mMovieName[i], mSeekPosition[i], false); } else { mRenderer.requestLoad(i, mMovieName[i], mSeekPosition[i], mWasPlaying[i]); } } } mReturningFromFullScreen = false; } // Called when returning from the full screen player protected void onActivityResult(int requestCode, int resultCode, Intent data) { if (requestCode == 1) { mActivity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); if (resultCode == RESULT_OK) { // The following values are used to indicate the position in // which the video was being played and whether it was being // played or not: String movieBeingPlayed = data.getStringExtra("movieName"); mReturningFromFullScreen = true; // Find the movie that was being played full screen for (int i = 0; i < NUM_TARGETS; i++) { if (movieBeingPlayed.compareTo(mMovieName[i]) == 0) { mSeekPosition[i] = data.getIntExtra( "currentSeekPosition", 0); mWasPlaying[i] = data.getBooleanExtra("playing", false); } } } } } public void onConfigurationChanged(Configuration config) { Log.d(LOGTAG, "onConfigurationChanged"); super.onConfigurationChanged(config); vuforiaAppSession.onConfigurationChanged(); // Removes the current layout and inflates a proper layout // for the new screen orientation if (mUILayout != null) { mUILayout.removeAllViews(); ((ViewGroup) mUILayout.getParent()).removeView(mUILayout); } addOverlayView(false); } protected void onPause() { Log.d(LOGTAG, "onPause"); super.onPause(); if (mGlView != null) { mGlView.setVisibility(View.INVISIBLE); mGlView.onPause(); } // Store the playback state of the movies and unload them: for (int i = 0; i < NUM_TARGETS; i++) { // If the activity is paused we need to store the position in which // this was currently playing: if (mVideoPlayerHelper[i].isPlayableOnTexture()) { mSeekPosition[i] = mVideoPlayerHelper[i].getCurrentPosition(); mWasPlaying[i] = mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.PLAYING ? true : false; } // We also need to release the resources used by the helper, though // we don't need to destroy it: if (mVideoPlayerHelper[i] != null) mVideoPlayerHelper[i].unload(); } mReturningFromFullScreen = false; // Turn off the flash if (mFlashOptionView != null && mFlash) { // OnCheckedChangeListener is called upon changing the checked state if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { ((Switch) mFlashOptionView).setChecked(false); } else { ((CheckBox) mFlashOptionView).setChecked(false); } } try { vuforiaAppSession.pauseAR(); } catch (SampleApplicationException e) { Log.e(LOGTAG, e.getString()); } } // The final call you receive before your activity is destroyed. protected void onDestroy() { Log.d(LOGTAG, "onDestroy"); super.onDestroy(); for (int i = 0; i < NUM_TARGETS; i++) { // If the activity is destroyed we need to release all resources: if (mVideoPlayerHelper[i] != null) mVideoPlayerHelper[i].deinit(); mVideoPlayerHelper[i] = null; } try { vuforiaAppSession.stopAR(); } catch (SampleApplicationException e) { Log.e(LOGTAG, e.getString()); } // Unload texture: mTextures.clear(); mTextures = null; System.gc(); } // Pause all movies except one // if the value of 'except' is -1 then // do a blanket pause private void pauseAll(int except) { // And pause all the playing videos: for (int i = 0; i < NUM_TARGETS; i++) { // We can make one exception to the pause all calls: if (i != except) { // Check if the video is playable on texture if (mVideoPlayerHelper[i].isPlayableOnTexture()) { // If it is playing then we pause it mVideoPlayerHelper[i].pause(); } } } } // Do not exit immediately and instead show the startup screen public void onBackPressed() { pauseAll(-1); super.onBackPressed(); } private void startLoadingAnimation() { LayoutInflater inflater = LayoutInflater.from(this); mUILayout = (RelativeLayout) inflater.inflate(R.layout.camera_overlay, null, false); mUILayout.setVisibility(View.VISIBLE); mUILayout.setBackgroundColor(Color.BLACK); // Gets a reference to the loading dialog loadingDialogHandler.mLoadingDialogContainer = mUILayout .findViewById(R.id.loading_indicator); // Shows the loading indicator at start loadingDialogHandler .sendEmptyMessage(LoadingDialogHandler.SHOW_LOADING_DIALOG); // Adds the inflated layout to the view addContentView(mUILayout, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); } // Initializes AR application components. private void initApplicationAR() { // Do application initialization refFreeFrame = new RefFreeFrame(this, vuforiaAppSession); refFreeFrame.init(); // Create OpenGL ES view: int depthSize = 16; int stencilSize = 0; boolean translucent = Vuforia.requiresAlpha(); mGlView = new SampleApplicationGLView(this); mGlView.init(translucent, depthSize, stencilSize); mRenderer = new UserDefinedVideoPlaybackRenderer(this, vuforiaAppSession); mRenderer.setTextures(mTextures); // The renderer comes has the OpenGL context, thus, loading to texture // must happen when the surface has been created. This means that we // can't load the movie from this thread (GUI) but instead we must // tell the GL thread to load it once the surface has been created. for (int i = 0; i < NUM_TARGETS; i++) { mRenderer.setVideoPlayerHelper(i, mVideoPlayerHelper[i]); mRenderer.requestLoad(i, mMovieName[i], 0, false); } mGlView.setRenderer(mRenderer); for (int i = 0; i < NUM_TARGETS; i++) { float[] temp = { 0f, 0f }; mRenderer.targetPositiveDimensions[i].setData(temp); mRenderer.videoPlaybackTextureID[i] = -1; } addOverlayView(true); } // Adds the Overlay view to the GLView private void addOverlayView(boolean initLayout) { // Inflates the Overlay Layout to be displayed above the Camera View LayoutInflater inflater = LayoutInflater.from(this); mUILayout = (RelativeLayout) inflater.inflate( R.layout.camera_overlay_udt, null, false); mUILayout.setVisibility(View.VISIBLE); // If this is the first time that the application runs then the // uiLayout background is set to BLACK color, will be set to // transparent once the SDK is initialized and camera ready to draw if (initLayout) { mUILayout.setBackgroundColor(Color.BLACK); } // Adds the inflated layout to the view addContentView(mUILayout, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); // Gets a reference to the bottom navigation bar mBottomBar = mUILayout.findViewById(R.id.bottom_bar); // Gets a reference to the Camera button mCameraButton = mUILayout.findViewById(R.id.camera_button); // Gets a reference to the loading dialog container loadingDialogHandler.mLoadingDialogContainer = mUILayout .findViewById(R.id.loading_layout); startUserDefinedTargets(); initializeBuildTargetModeViews(); mUILayout.bringToFront(); } // We do not handle the touch event here, we just forward it to the // gesture detector public boolean onTouchEvent(MotionEvent event) { boolean result = false; result = mGestureDetector.onTouchEvent(event); // Process the Gestures if (!result && mSampleAppMenu != null ) result = mSampleAppMenu.processEvent(event); return result; } @Override public boolean doInitTrackers() { // Indicate if the trackers were initialized correctly boolean result = true; // Initialize the image tracker: TrackerManager trackerManager = TrackerManager.getInstance(); Tracker tracker = trackerManager.initTracker(ImageTracker .getClassType()); if (tracker == null) { Log.d(LOGTAG, "Failed to initialize ImageTracker."); result = false; } else { Log.d(LOGTAG, "Successfully initialized ImageTracker."); } return result; } @Override public boolean doLoadTrackersData() { // Get the image tracker: TrackerManager trackerManager = TrackerManager.getInstance(); ImageTracker imageTracker = (ImageTracker) trackerManager .getTracker(ImageTracker.getClassType()); if (imageTracker == null) { Log.d( LOGTAG, "Failed to load tracking data set because the ImageTracker has not been initialized."); return false; } /*// Create the data sets: dataSetStonesAndChips = imageTracker.createDataSet(); if (dataSetStonesAndChips == null) { Log.d(LOGTAG, "Failed to create a new tracking data."); return false; } // Load the data sets: if (!dataSetStonesAndChips.load("StonesAndChips.xml", DataSet.STORAGE_TYPE.STORAGE_APPRESOURCE)) { Log.d(LOGTAG, "Failed to load data set."); return false; } // Activate the data set: if (!imageTracker.activateDataSet(dataSetStonesAndChips)) { Log.d(LOGTAG, "Failed to activate data set."); return false; } Log.d(LOGTAG, "Successfully loaded and activated data set."); return true;*/ // Create the data set: dataSetUserDef = imageTracker.createDataSet(); if (dataSetUserDef == null) { Log.d(LOGTAG, "Failed to create a new tracking data."); return false; } if (!imageTracker.activateDataSet(dataSetUserDef)) { Log.d(LOGTAG, "Failed to activate data set."); return false; } Log.d(LOGTAG, "Successfully loaded and activated data set."); return true; } @Override public boolean doStartTrackers() { // Indicate if the trackers were started correctly boolean result = true; Tracker imageTracker = TrackerManager.getInstance().getTracker( ImageTracker.getClassType()); if (imageTracker != null) { imageTracker.start(); Vuforia.setHint(HINT.HINT_MAX_SIMULTANEOUS_IMAGE_TARGETS, 2); } else result = false; return result; } @Override public boolean doStopTrackers() { // Indicate if the trackers were stopped correctly boolean result = true; Tracker imageTracker = TrackerManager.getInstance().getTracker( ImageTracker.getClassType()); if (imageTracker != null) imageTracker.stop(); else result = false; return result; } @Override public boolean doUnloadTrackersData() { // Indicate if the trackers were unloaded correctly boolean result = true; // Get the image tracker: TrackerManager trackerManager = TrackerManager.getInstance(); ImageTracker imageTracker = (ImageTracker) trackerManager .getTracker(ImageTracker.getClassType()); if (imageTracker == null) { Log.d( LOGTAG, "Failed to destroy the tracking data set because the ImageTracker has not been initialized."); return false; } /* if (dataSetStonesAndChips != null) { if (imageTracker.getActiveDataSet() == dataSetStonesAndChips && !imageTracker.deactivateDataSet(dataSetStonesAndChips)) { Log.d( LOGTAG, "Failed to destroy the tracking data set StonesAndChips because the data set could not be deactivated."); result = false; } else if (!imageTracker.destroyDataSet(dataSetStonesAndChips)) { Log.d(LOGTAG, "Failed to destroy the tracking data set StonesAndChips."); result = false; } dataSetStonesAndChips = null; }*/ if (dataSetUserDef != null) { if (imageTracker.getActiveDataSet() != null && !imageTracker.deactivateDataSet(dataSetUserDef)) { Log.d( LOGTAG, "Failed to destroy the tracking data set because the data set could not be deactivated."); result = false; } if (!imageTracker.destroyDataSet(dataSetUserDef)) { Log.d(LOGTAG, "Failed to destroy the tracking data set."); result = false; } Log.d(LOGTAG, "Successfully destroyed the data set."); dataSetUserDef = null; } return result; } @Override public boolean doDeinitTrackers() { // Indicate if the trackers were deinitialized correctly boolean result = true; if (refFreeFrame != null) refFreeFrame.deInit(); TrackerManager tManager = TrackerManager.getInstance(); tManager.deinitTracker(ImageTracker.getClassType()); return result; } @Override public void onInitARDone(SampleApplicationException exception) { if (exception == null) { initApplicationAR(); // Activate the renderer mRenderer.mIsActive = true; // Now add the GL surface view. It is important // that the OpenGL ES surface view gets added // BEFORE the camera is started and video // background is configured. addContentView(mGlView, new LayoutParams(LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); // Sets the UILayout to be drawn in front of the camera mUILayout.bringToFront(); // Hides the Loading Dialog loadingDialogHandler .sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG); // Sets the layout background to transparent mUILayout.setBackgroundColor(Color.TRANSPARENT); try { vuforiaAppSession.startAR(CameraDevice.CAMERA.CAMERA_FRONT); } catch (SampleApplicationException e) { Log.e(LOGTAG, e.getString()); } boolean result = CameraDevice.getInstance().setFocusMode( CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO); if (result) mContAutofocus = true; else Log.e(LOGTAG, "Unable to enable continuous autofocus"); setSampleAppMenuAdditionalViews(); mSampleAppMenu = new SampleAppMenu(this, this, "User Defined Targets", mGlView, mUILayout, mSettingsAdditionalViews); setSampleAppMenuSettings(); } else { Log.e(LOGTAG, exception.getString()); finish(); } } @Override public void onQCARUpdate(State state) { TrackerManager trackerManager = TrackerManager.getInstance(); ImageTracker imageTracker = (ImageTracker) trackerManager .getTracker(ImageTracker.getClassType()); if (refFreeFrame.hasNewTrackableSource()) { Log.d(LOGTAG, "Attempting to transfer the trackable source to the dataset"); // Deactivate current dataset imageTracker.deactivateDataSet(imageTracker.getActiveDataSet()); // Clear the oldest target if the dataset is full or the dataset // already contains five user-defined targets. if (dataSetUserDef.hasReachedTrackableLimit() || dataSetUserDef.getNumTrackables() >= 5) dataSetUserDef.destroy(dataSetUserDef.getTrackable(0)); if (mExtendedTracking && dataSetUserDef.getNumTrackables() > 0) { // We need to stop the extended tracking for the previous target // so we can enable it for the new one int previousCreatedTrackableIndex = dataSetUserDef.getNumTrackables() - 1; dataSetUserDef.getTrackable(previousCreatedTrackableIndex) .stopExtendedTracking(); } // Add new trackable source Trackable trackable = dataSetUserDef .createTrackable(refFreeFrame.getNewTrackableSource()); // Reactivate current dataset imageTracker.activateDataSet(dataSetUserDef); if (mExtendedTracking) { trackable.startExtendedTracking(); } } } final private static int CMD_BACK = -1; final private static int CMD_EXTENDED_TRACKING = 1; final private static int CMD_AUTOFOCUS = 2; final private static int CMD_FLASH = 3; final private static int CMD_CAMERA_FRONT = 4; final private static int CMD_CAMERA_REAR = 5; // This method sets the additional views to be moved along with the GLView private void setSampleAppMenuAdditionalViews() { mSettingsAdditionalViews = new ArrayList<View>(); mSettingsAdditionalViews.add(mBottomBar); } // This method sets the menu's settings private void setSampleAppMenuSettings() { SampleAppMenuGroup group; group = mSampleAppMenu.addGroup("", false); group.addTextItem(getString(R.string.menu_back), -1); group = mSampleAppMenu.addGroup("", true); group.addSelectionItem(getString(R.string.menu_extended_tracking), CMD_EXTENDED_TRACKING, false); group.addSelectionItem(getString(R.string.menu_contAutofocus), CMD_AUTOFOCUS, mContAutofocus); mFlashOptionView = group.addSelectionItem( getString(R.string.menu_flash), CMD_FLASH, false); CameraInfo ci = new CameraInfo(); boolean deviceHasFrontCamera = false; boolean deviceHasBackCamera = false; for (int i = 0; i < Camera.getNumberOfCameras(); i++) { Camera.getCameraInfo(i, ci); if (ci.facing == CameraInfo.CAMERA_FACING_FRONT) deviceHasFrontCamera = true; else if (ci.facing == CameraInfo.CAMERA_FACING_BACK) deviceHasBackCamera = true; } if (deviceHasBackCamera && deviceHasFrontCamera) { group = mSampleAppMenu.addGroup(getString(R.string.menu_camera), true); group.addRadioItem(getString(R.string.menu_camera_front), CMD_CAMERA_FRONT, false); group.addRadioItem(getString(R.string.menu_camera_back), CMD_CAMERA_REAR, true); } mSampleAppMenu.attachMenu(); } @Override public boolean menuProcess(int command) { boolean result = true; switch (command) { case CMD_BACK: finish(); break; case CMD_FLASH: result = CameraDevice.getInstance().setFlashTorchMode(!mFlash); if (result) { mFlash = !mFlash; } else { showToast(getString(mFlash ? R.string.menu_flash_error_off : R.string.menu_flash_error_on)); Log.e(LOGTAG, getString(mFlash ? R.string.menu_flash_error_off : R.string.menu_flash_error_on)); } break; case CMD_AUTOFOCUS: if (mContAutofocus) { result = CameraDevice.getInstance().setFocusMode( CameraDevice.FOCUS_MODE.FOCUS_MODE_NORMAL); if (result) { mContAutofocus = false; } else { showToast(getString(R.string.menu_contAutofocus_error_off)); Log.e(LOGTAG, getString(R.string.menu_contAutofocus_error_off)); } } else { result = CameraDevice.getInstance().setFocusMode( CameraDevice.FOCUS_MODE.FOCUS_MODE_CONTINUOUSAUTO); if (result) { mContAutofocus = true; } else { showToast(getString(R.string.menu_contAutofocus_error_on)); Log.e(LOGTAG, getString(R.string.menu_contAutofocus_error_on)); } } break; case CMD_CAMERA_FRONT: case CMD_CAMERA_REAR: // Turn off the flash if (mFlashOptionView != null && mFlash) { // OnCheckedChangeListener is called upon changing the checked state if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { ((Switch) mFlashOptionView).setChecked(false); } else { ((CheckBox) mFlashOptionView).setChecked(false); } } doStopTrackers(); CameraDevice.getInstance().stop(); CameraDevice.getInstance().deinit(); try { vuforiaAppSession .startAR(command == CMD_CAMERA_FRONT ? CameraDevice.CAMERA.CAMERA_FRONT : CameraDevice.CAMERA.CAMERA_BACK); } catch (SampleApplicationException e) { showToast(e.getString()); Log.e(LOGTAG, e.getString()); result = false; } doStartTrackers(); break; case CMD_EXTENDED_TRACKING: /* for (int tIdx = 0; tIdx < dataSetStonesAndChips .getNumTrackables(); tIdx++) { Trackable trackable = dataSetStonesAndChips .getTrackable(tIdx);*/ if (dataSetUserDef.getNumTrackables() > 0) { int lastTrackableCreatedIndex = dataSetUserDef.getNumTrackables() - 1; Trackable trackable = dataSetUserDef .getTrackable(lastTrackableCreatedIndex); if (!mExtendedTracking) { if (!trackable.startExtendedTracking()) { Log.e(LOGTAG, "Failed to start extended tracking target"); result = false; } else { Log.d(LOGTAG, "Successfully started extended tracking target"); } } else { if (!trackable.stopExtendedTracking()) { Log.e(LOGTAG, "Failed to stop extended tracking target"); result = false; } else { Log.d(LOGTAG, "Successfully started extended tracking target"); } } } if (result) mExtendedTracking = !mExtendedTracking; break; } return result; } private void showToast(String text) { Toast.makeText(this, text, Toast.LENGTH_SHORT).show(); } boolean startUserDefinedTargets() { Log.d(LOGTAG, "startUserDefinedTargets"); TrackerManager trackerManager = TrackerManager.getInstance(); ImageTracker imageTracker = (ImageTracker) (trackerManager .getTracker(ImageTracker.getClassType())); if (imageTracker != null) { ImageTargetBuilder targetBuilder = imageTracker .getImageTargetBuilder(); if (targetBuilder != null) { // if needed, stop the target builder if (targetBuilder.getFrameQuality() != ImageTargetBuilder.FRAME_QUALITY.FRAME_QUALITY_NONE) targetBuilder.stopScan(); imageTracker.stop(); targetBuilder.startScan(); } } else return false; return true; } boolean isUserDefinedTargetsRunning() { TrackerManager trackerManager = TrackerManager.getInstance(); ImageTracker imageTracker = (ImageTracker) trackerManager .getTracker(ImageTracker.getClassType()); if (imageTracker != null) { ImageTargetBuilder targetBuilder = imageTracker .getImageTargetBuilder(); if (targetBuilder != null) { Log.e(LOGTAG, "Quality> " + targetBuilder.getFrameQuality()); return (targetBuilder.getFrameQuality() != ImageTargetBuilder.FRAME_QUALITY.FRAME_QUALITY_NONE) ? true : false; } } return false; } void startBuild() { TrackerManager trackerManager = TrackerManager.getInstance(); ImageTracker imageTracker = (ImageTracker) trackerManager .getTracker(ImageTracker.getClassType()); if (imageTracker != null) { ImageTargetBuilder targetBuilder = imageTracker .getImageTargetBuilder(); if (targetBuilder != null) { // Uncomment this block to show and error message if // the frame quality is Low //if (targetBuilder.getFrameQuality() == ImageTargetBuilder.FRAME_QUALITY.FRAME_QUALITY_LOW) //{ // showErrorDialogInUIThread(); //} String name; do { name = "stones"; Log.d(LOGTAG, "TRYING " + name); targetBuilderCounter++; } while (!targetBuilder.build(name, 320.0f)); refFreeFrame.setCreating(); } } } void updateRendering() { DisplayMetrics metrics = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(metrics); refFreeFrame.initGL(metrics.widthPixels, metrics.heightPixels); } // Initialize views private void initializeBuildTargetModeViews() { // Shows the bottom bar mBottomBar.setVisibility(View.VISIBLE); mCameraButton.setVisibility(View.VISIBLE); } // Button Camera clicked public void onCameraClick(View v) { if (isUserDefinedTargetsRunning()) { // Shows the loading dialog loadingDialogHandler .sendEmptyMessage(LoadingDialogHandler.SHOW_LOADING_DIALOG); // Builds the new target startBuild(); } } // Creates a texture given the filename public Texture createTexture(String nName) { return Texture.loadTextureFromApk(nName, getAssets()); } // Callback function called when the target creation finished public void targetCreated() { // Hides the loading dialog loadingDialogHandler .sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG); if (refFreeFrame != null) { refFreeFrame.reset(); } Log.d(LOGTAG, "targetCreated"); } // Shows error message in a system dialog box on the UI thread void showErrorDialogInUIThread() { runOnUiThread(new Runnable() { public void run() { showErrorDialog(); } }); } // Shows error message in a system dialog box private void showErrorDialog() { if (mDialog != null && mDialog.isShowing()) mDialog.dismiss(); mDialog = new AlertDialog.Builder(UserDefinedVideoPlayback.this).create(); DialogInterface.OnClickListener clickListener = new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); } }; mDialog.setButton(DialogInterface.BUTTON_POSITIVE, getString(R.string.button_OK), clickListener); mDialog.setTitle(getString(R.string.target_quality_error_title)); String message = getString(R.string.target_quality_error_desc); // Show dialog box with error message: mDialog.setMessage(message); mDialog.show(); } // Process Single Tap event to trigger autofocus private class GestureListener extends GestureDetector.SimpleOnGestureListener { // Used to set autofocus one second after a manual focus is triggered private final Handler autofocusHandler = new Handler(); @Override public boolean onDown(MotionEvent e) { return true; } @Override public boolean onSingleTapUp(MotionEvent e) { // Generates a Handler to trigger autofocus // after 1 second autofocusHandler.postDelayed(new Runnable() { public void run() { boolean result = CameraDevice.getInstance().setFocusMode( CameraDevice.FOCUS_MODE.FOCUS_MODE_TRIGGERAUTO); if (!result) Log.e("SingleTapUp", "Unable to trigger focus"); } }, 1000L); return true; } } } /****************************************************************************************************************/ public class UserDefinedVideoPlaybackRenderer implements GLSurfaceView.Renderer{ private static final String LOGTAG = "VideoPlaybackRenderer"; SampleApplicationSession vuforiaAppSession; // Video Playback Rendering Specific private int videoPlaybackShaderID = 0; private int videoPlaybackVertexHandle = 0; private int videoPlaybackNormalHandle = 0; private int videoPlaybackTexCoordHandle = 0; private int videoPlaybackMVPMatrixHandle = 0; private int videoPlaybackTexSamplerOESHandle = 0; // Video Playback Textures for the two targets int videoPlaybackTextureID[] = new int[VideoPlayback.NUM_TARGETS]; // Keyframe and icon rendering specific private int keyframeShaderID = 0; private int keyframeVertexHandle = 0; private int keyframeNormalHandle = 0; private int keyframeTexCoordHandle = 0; private int keyframeMVPMatrixHandle = 0; private int keyframeTexSampler2DHandle = 0; // We cannot use the default texture coordinates of the quad since these // will change depending on the video itself private float videoQuadTextureCoords[] = { 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, }; // This variable will hold the transformed coordinates (changes every frame) private float videoQuadTextureCoordsTransformedStones[] = { 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, }; private float videoQuadTextureCoordsTransformedChips[] = { 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f, }; // Trackable dimensions Vec2F targetPositiveDimensions[] = new Vec2F[VideoPlayback.NUM_TARGETS]; static int NUM_QUAD_VERTEX = 4; static int NUM_QUAD_INDEX = 6; double quadVerticesArray[] = { -1.0f, -1.0f, 0.0f, 1.0f, -1.0f, 0.0f, 1.0f, 1.0f, 0.0f, -1.0f, 1.0f, 0.0f }; double quadTexCoordsArray[] = { 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 1.0f }; double quadNormalsArray[] = { 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, }; short quadIndicesArray[] = { 0, 1, 2, 2, 3, 0 }; Buffer quadVertices, quadTexCoords, quadIndices, quadNormals; public boolean mIsActive = false; private float[][] mTexCoordTransformationMatrix = null; private VideoPlayerHelper mVideoPlayerHelper[] = null; private String mMovieName[] = null; private MEDIA_TYPE mCanRequestType[] = null; private int mSeekPosition[] = null; private boolean mShouldPlayImmediately[] = null; private long mLostTrackingSince[] = null; private boolean mLoadRequested[] = null; UserDefinedVideoPlayback mActivity; // Needed to calculate whether a screen tap is inside the target Matrix44F modelViewMatrix[] = new Matrix44F[VideoPlayback.NUM_TARGETS]; private Vector<Texture> mTextures; boolean isTracking[] = new boolean[VideoPlayback.NUM_TARGETS]; MEDIA_STATE currentStatus[] = new MEDIA_STATE[VideoPlayback.NUM_TARGETS]; // These hold the aspect ratio of both the video and the // keyframe float videoQuadAspectRatio[] = new float[VideoPlayback.NUM_TARGETS]; float keyframeQuadAspectRatio[] = new float[VideoPlayback.NUM_TARGETS]; //****************************************************************************************************** private int shaderProgramID; private int vertexHandle; private int normalHandle; private int textureCoordHandle; private int mvpMatrixHandle; private int texSampler2DHandle; // Constants: static final float kObjectScale = 3.f; private Teapot mTeapot; public UserDefinedVideoPlaybackRenderer(UserDefinedVideoPlayback activity,SampleApplicationSession session) {mActivity = activity; vuforiaAppSession = session; // Create an array of the size of the number of targets we have mVideoPlayerHelper = new VideoPlayerHelper[VideoPlayback.NUM_TARGETS]; mMovieName = new String[VideoPlayback.NUM_TARGETS]; mCanRequestType = new MEDIA_TYPE[VideoPlayback.NUM_TARGETS]; mSeekPosition = new int[VideoPlayback.NUM_TARGETS]; mShouldPlayImmediately = new boolean[VideoPlayback.NUM_TARGETS]; mLostTrackingSince = new long[VideoPlayback.NUM_TARGETS]; mLoadRequested = new boolean[VideoPlayback.NUM_TARGETS]; mTexCoordTransformationMatrix = new float[VideoPlayback.NUM_TARGETS][16]; // Initialize the arrays to default values for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) { mVideoPlayerHelper[i] = null; mMovieName[i] = ""; mCanRequestType[i] = MEDIA_TYPE.ON_TEXTURE_FULLSCREEN; mSeekPosition[i] = 0; mShouldPlayImmediately[i] = false; mLostTrackingSince[i] = -1; mLoadRequested[i] = false; } for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) targetPositiveDimensions[i] = new Vec2F(); for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) modelViewMatrix[i] = new Matrix44F();} // Store the Player Helper object passed from the main activity public void setVideoPlayerHelper(int target, VideoPlayerHelper newVideoPlayerHelper) { mVideoPlayerHelper[target] = newVideoPlayerHelper; } public void requestLoad(int target, String movieName, int seekPosition, boolean playImmediately) { mMovieName[target] = movieName; mSeekPosition[target] = seekPosition; mShouldPlayImmediately[target] = playImmediately; mLoadRequested[target] = true; } // Called when the surface is created or recreated. public void onSurfaceCreated(GL10 gl, EGLConfig config) { // Call function to initialize rendering: // The video texture is also created on this step initRendering(); // Call Vuforia function to (re)initialize rendering after first use // or after OpenGL ES context was lost (e.g. after onPause/onResume): Vuforia.onSurfaceCreated(); for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) { if (mVideoPlayerHelper[i] != null) { // The VideoPlayerHelper needs to setup a surface texture given // the texture id // Here we inform the video player that we would like to play // the movie // both on texture and on full screen // Notice that this does not mean that the platform will be able // to do what we request // After the file has been loaded one must always check with // isPlayableOnTexture() whether // this can be played embedded in the AR scene if (!mVideoPlayerHelper[i] .setupSurfaceTexture(videoPlaybackTextureID[i])) mCanRequestType[i] = MEDIA_TYPE.FULLSCREEN; else mCanRequestType[i] = MEDIA_TYPE.ON_TEXTURE_FULLSCREEN; // And now check if a load has been requested with the // parameters passed from the main activity if (mLoadRequested[i]) { mVideoPlayerHelper[i].load(mMovieName[i], mCanRequestType[i], mShouldPlayImmediately[i], mSeekPosition[i]); mLoadRequested[i] = false; } } } } // Called when the surface changed size. @Override public void onSurfaceChanged(GL10 gl, int width, int height) { Log.d(LOGTAG, "GLRenderer.onSurfaceChanged"); // Call function to update rendering when render surface // parameters have changed: mActivity.updateRendering(); // Call Vuforia function to handle render surface size changes: vuforiaAppSession.onSurfaceChanged(width, height); // Upon every on pause the movie had to be unloaded to release resources // Thus, upon every surface create or surface change this has to be // reloaded // See: // http://developer.android.com/reference/android/media/MediaPlayer.html#release() for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) { if (mLoadRequested[i] && mVideoPlayerHelper[i] != null) { mVideoPlayerHelper[i].load(mMovieName[i], mCanRequestType[i], mShouldPlayImmediately[i], mSeekPosition[i]); mLoadRequested[i] = false; } } } // Called to draw the current frame. public void onDrawFrame(GL10 gl) { if (!mIsActive) return; for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) { if (mVideoPlayerHelper[i] != null) { if (mVideoPlayerHelper[i].isPlayableOnTexture()) { // First we need to update the video data. This is a built // in Android call // Here, the decoded data is uploaded to the OES texture // We only need to do this if the movie is playing if (mVideoPlayerHelper[i].getStatus() == MEDIA_STATE.PLAYING) { mVideoPlayerHelper[i].updateVideoData(); } // According to the Android API // (http://developer.android.com/reference/android/graphics/SurfaceTexture.html) // transforming the texture coordinates needs to happen // every frame. mVideoPlayerHelper[i] .getSurfaceTextureTransformMatrix(mTexCoordTransformationMatrix[i]); setVideoDimensions(i, mVideoPlayerHelper[i].getVideoWidth(), mVideoPlayerHelper[i].getVideoHeight(), mTexCoordTransformationMatrix[i]); } setStatus(i, mVideoPlayerHelper[i].getStatus().getNumericType()); } } // Call our function to render content renderFrame(); for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) { // Ask whether the target is currently being tracked and if so react // to it if (isTracking(i)) { // If it is tracking reset the timestamp for lost tracking mLostTrackingSince[i] = -1; } else { // If it isn't tracking // check whether it just lost it or if it's been a while if (mLostTrackingSince[i] < 0) mLostTrackingSince[i] = SystemClock.uptimeMillis(); else { // If it's been more than 2 seconds then pause the player if ((SystemClock.uptimeMillis() - mLostTrackingSince[i]) > 2000) { if (mVideoPlayerHelper[i] != null) mVideoPlayerHelper[i].pause(); } } } } // If you would like the video to start playing as soon as it starts // tracking // and pause as soon as tracking is lost you can do that here by // commenting // the for-loop above and instead checking whether the isTracking() // value has // changed since the last frame. Notice that you need to be careful not // to // trigger automatic playback for fullscreen since that will be // inconvenient // for your users. } @SuppressLint("InlinedApi") void renderFrame() { // Clear color and depth buffer GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); // Get the state from Vuforia and mark the beginning of a rendering // section State state = Renderer.getInstance().begin(); // Explicitly render the Video Background Renderer.getInstance().drawVideoBackground(); GLES20.glEnable(GLES20.GL_DEPTH_TEST); // We must detect if background reflection is active and adjust the // culling direction. // If the reflection is active, this means the post matrix has been // reflected as well, // therefore standard counter clockwise face culling will result in // "inside out" models. GLES20.glEnable(GLES20.GL_CULL_FACE); GLES20.glCullFace(GLES20.GL_BACK); if (Renderer.getInstance().getVideoBackgroundConfig().getReflection() == VIDEO_BACKGROUND_REFLECTION.VIDEO_BACKGROUND_REFLECTION_ON) GLES20.glFrontFace(GLES20.GL_CW); // Front camera else GLES20.glFrontFace(GLES20.GL_CCW); // Back camera float temp[] = { 0.0f, 0.0f }; for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) { isTracking[i] = false; targetPositiveDimensions[i].setData(temp); } // Did we find any trackables this frame? for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) { // Get the trackable: TrackableResult trackableResult = state.getTrackableResult(tIdx); ImageTarget imageTarget = (ImageTarget) trackableResult .getTrackable(); int currentTarget; // We store the modelview matrix to be used later by the tap // calculation if (imageTarget.getName().compareTo("stones") == 0) currentTarget = VideoPlayback.STONES; else currentTarget = VideoPlayback.CHIPS; modelViewMatrix[currentTarget] = Tool .convertPose2GLMatrix(trackableResult.getPose()); isTracking[currentTarget] = true; targetPositiveDimensions[currentTarget] = imageTarget.getSize(); // The pose delivers the center of the target, thus the dimensions // go from -width/2 to width/2, same for height temp[0] = targetPositiveDimensions[currentTarget].getData()[0] / 2.0f; temp[1] = targetPositiveDimensions[currentTarget].getData()[1] / 2.0f; targetPositiveDimensions[currentTarget].setData(temp); // If the movie is ready to start playing or it has reached the end // of playback we render the keyframe if ((currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.READY) || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.REACHED_END) || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.NOT_READY) || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.ERROR)) { float[] modelViewMatrixKeyframe = Tool.convertPose2GLMatrix( trackableResult.getPose()).getData(); float[] modelViewProjectionKeyframe = new float[16]; // Matrix.translateM(modelViewMatrixKeyframe, 0, 0.0f, 0.0f, // targetPositiveDimensions[currentTarget].getData()[0]); // Here we use the aspect ratio of the keyframe since it // is likely that it is not a perfect square float ratio = 1.0f; if (mTextures.get(currentTarget).mSuccess) ratio = keyframeQuadAspectRatio[currentTarget]; else ratio = targetPositiveDimensions[currentTarget].getData()[1] / targetPositiveDimensions[currentTarget].getData()[0]; Matrix.scaleM(modelViewMatrixKeyframe, 0, targetPositiveDimensions[currentTarget].getData()[0], targetPositiveDimensions[currentTarget].getData()[0] * ratio, targetPositiveDimensions[currentTarget].getData()[0]); Matrix.multiplyMM(modelViewProjectionKeyframe, 0, vuforiaAppSession.getProjectionMatrix().getData(), 0, modelViewMatrixKeyframe, 0); GLES20.glUseProgram(keyframeShaderID); // Prepare for rendering the keyframe GLES20.glVertexAttribPointer(keyframeVertexHandle, 3, GLES20.GL_FLOAT, false, 0, quadVertices); GLES20.glVertexAttribPointer(keyframeNormalHandle, 3, GLES20.GL_FLOAT, false, 0, quadNormals); GLES20.glVertexAttribPointer(keyframeTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, quadTexCoords); GLES20.glEnableVertexAttribArray(keyframeVertexHandle); GLES20.glEnableVertexAttribArray(keyframeNormalHandle); GLES20.glEnableVertexAttribArray(keyframeTexCoordHandle); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // The first loaded texture from the assets folder is the // keyframe GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(currentTarget).mTextureID[0]); GLES20.glUniformMatrix4fv(keyframeMVPMatrixHandle, 1, false, modelViewProjectionKeyframe, 0); GLES20.glUniform1i(keyframeTexSampler2DHandle, 0); // Render GLES20.glDrawElements(GLES20.GL_TRIANGLES, NUM_QUAD_INDEX, GLES20.GL_UNSIGNED_SHORT, quadIndices); GLES20.glDisableVertexAttribArray(keyframeVertexHandle); GLES20.glDisableVertexAttribArray(keyframeNormalHandle); GLES20.glDisableVertexAttribArray(keyframeTexCoordHandle); GLES20.glUseProgram(0); } else // In any other case, such as playing or paused, we render // the actual contents { float[] modelViewMatrixVideo = Tool.convertPose2GLMatrix( trackableResult.getPose()).getData(); float[] modelViewProjectionVideo = new float[16]; // Matrix.translateM(modelViewMatrixVideo, 0, 0.0f, 0.0f, // targetPositiveDimensions[currentTarget].getData()[0]); // Here we use the aspect ratio of the video frame Matrix.scaleM(modelViewMatrixVideo, 0, targetPositiveDimensions[currentTarget].getData()[0], targetPositiveDimensions[currentTarget].getData()[0] * videoQuadAspectRatio[currentTarget], targetPositiveDimensions[currentTarget].getData()[0]); Matrix.multiplyMM(modelViewProjectionVideo, 0, vuforiaAppSession.getProjectionMatrix().getData(), 0, modelViewMatrixVideo, 0); GLES20.glUseProgram(videoPlaybackShaderID); // Prepare for rendering the keyframe GLES20.glVertexAttribPointer(videoPlaybackVertexHandle, 3, GLES20.GL_FLOAT, false, 0, quadVertices); GLES20.glVertexAttribPointer(videoPlaybackNormalHandle, 3, GLES20.GL_FLOAT, false, 0, quadNormals); if (imageTarget.getName().compareTo("stones") == 0) GLES20.glVertexAttribPointer(videoPlaybackTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, fillBuffer(videoQuadTextureCoordsTransformedStones)); else GLES20.glVertexAttribPointer(videoPlaybackTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, fillBuffer(videoQuadTextureCoordsTransformedChips)); GLES20.glEnableVertexAttribArray(videoPlaybackVertexHandle); GLES20.glEnableVertexAttribArray(videoPlaybackNormalHandle); GLES20.glEnableVertexAttribArray(videoPlaybackTexCoordHandle); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // IMPORTANT: // Notice here that the texture that we are binding is not the // typical GL_TEXTURE_2D but instead the GL_TEXTURE_EXTERNAL_OES GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, videoPlaybackTextureID[currentTarget]); GLES20.glUniformMatrix4fv(videoPlaybackMVPMatrixHandle, 1, false, modelViewProjectionVideo, 0); GLES20.glUniform1i(videoPlaybackTexSamplerOESHandle, 0); // Render GLES20.glDrawElements(GLES20.GL_TRIANGLES, NUM_QUAD_INDEX, GLES20.GL_UNSIGNED_SHORT, quadIndices); GLES20.glDisableVertexAttribArray(videoPlaybackVertexHandle); GLES20.glDisableVertexAttribArray(videoPlaybackNormalHandle); GLES20.glDisableVertexAttribArray(videoPlaybackTexCoordHandle); GLES20.glUseProgram(0); } // The following section renders the icons. The actual textures used // are loaded from the assets folder if ((currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.READY) || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.REACHED_END) || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.PAUSED) || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.NOT_READY) || (currentStatus[currentTarget] == VideoPlayerHelper.MEDIA_STATE.ERROR)) { // If the movie is ready to be played, pause, has reached end or // is not // ready then we display one of the icons float[] modelViewMatrixButton = Tool.convertPose2GLMatrix( trackableResult.getPose()).getData(); float[] modelViewProjectionButton = new float[16]; GLES20.glDepthFunc(GLES20.GL_LEQUAL); GLES20.glEnable(GLES20.GL_BLEND); GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA); // The inacuracy of the rendering process in some devices means // that // even if we use the "Less or Equal" version of the depth // function // it is likely that we will get ugly artifacts // That is the translation in the Z direction is slightly // different // Another posibility would be to use a depth func "ALWAYS" but // that is typically not a good idea Matrix .translateM( modelViewMatrixButton, 0, 0.0f, 0.0f, targetPositiveDimensions[currentTarget].getData()[1] / 10.98f); Matrix .scaleM( modelViewMatrixButton, 0, (targetPositiveDimensions[currentTarget].getData()[1] / 2.0f), (targetPositiveDimensions[currentTarget].getData()[1] / 2.0f), (targetPositiveDimensions[currentTarget].getData()[1] / 2.0f)); Matrix.multiplyMM(modelViewProjectionButton, 0, vuforiaAppSession.getProjectionMatrix().getData(), 0, modelViewMatrixButton, 0); GLES20.glUseProgram(keyframeShaderID); GLES20.glVertexAttribPointer(keyframeVertexHandle, 3, GLES20.GL_FLOAT, false, 0, quadVertices); GLES20.glVertexAttribPointer(keyframeNormalHandle, 3, GLES20.GL_FLOAT, false, 0, quadNormals); GLES20.glVertexAttribPointer(keyframeTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, quadTexCoords); GLES20.glEnableVertexAttribArray(keyframeVertexHandle); GLES20.glEnableVertexAttribArray(keyframeNormalHandle); GLES20.glEnableVertexAttribArray(keyframeTexCoordHandle); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // Depending on the status in which we are we choose the // appropriate // texture to display. Notice that unlike the video these are // regular // GL_TEXTURE_2D textures switch (currentStatus[currentTarget]) { case READY: GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(2).mTextureID[0]); break; case REACHED_END: GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(2).mTextureID[0]); break; case PAUSED: GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(2).mTextureID[0]); break; case NOT_READY: GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(3).mTextureID[0]); break; case ERROR: GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(4).mTextureID[0]); break; default: GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, mTextures.get(3).mTextureID[0]); break; } GLES20.glUniformMatrix4fv(keyframeMVPMatrixHandle, 1, false, modelViewProjectionButton, 0); GLES20.glUniform1i(keyframeTexSampler2DHandle, 0); // Render GLES20.glDrawElements(GLES20.GL_TRIANGLES, NUM_QUAD_INDEX, GLES20.GL_UNSIGNED_SHORT, quadIndices); GLES20.glDisableVertexAttribArray(keyframeVertexHandle); GLES20.glDisableVertexAttribArray(keyframeNormalHandle); GLES20.glDisableVertexAttribArray(keyframeTexCoordHandle); GLES20.glUseProgram(0); // Finally we return the depth func to its original state GLES20.glDepthFunc(GLES20.GL_LESS); GLES20.glDisable(GLES20.GL_BLEND); } SampleUtils.checkGLError("VideoPlayback renderFrame"); } GLES20.glDisable(GLES20.GL_DEPTH_TEST); Renderer.getInstance().end(); } boolean isTapOnScreenInsideTarget(int target, float x, float y) { // Here we calculate that the touch event is inside the target Vec3F intersection; // Vec3F lineStart = new Vec3F(); // Vec3F lineEnd = new Vec3F(); DisplayMetrics metrics = new DisplayMetrics(); mActivity.getWindowManager().getDefaultDisplay().getMetrics(metrics); intersection = SampleMath.getPointToPlaneIntersection(SampleMath .Matrix44FInverse(vuforiaAppSession.getProjectionMatrix()), modelViewMatrix[target], metrics.widthPixels, metrics.heightPixels, new Vec2F(x, y), new Vec3F(0, 0, 0), new Vec3F(0, 0, 1)); // The target returns as pose the center of the trackable. The following // if-statement simply checks that the tap is within this range if ((intersection.getData()[0] >= -(targetPositiveDimensions[target] .getData()[0])) && (intersection.getData()[0] <= (targetPositiveDimensions[target] .getData()[0])) && (intersection.getData()[1] >= -(targetPositiveDimensions[target] .getData()[1])) && (intersection.getData()[1] <= (targetPositiveDimensions[target] .getData()[1]))) return true; else return false; } void setVideoDimensions(int target, float videoWidth, float videoHeight, float[] textureCoordMatrix) { // The quad originaly comes as a perfect square, however, the video // often has a different aspect ration such as 4:3 or 16:9, // To mitigate this we have two options: // 1) We can either scale the width (typically up) // 2) We can scale the height (typically down) // Which one to use is just a matter of preference. This example scales // the height down. // (see the render call in renderFrame) videoQuadAspectRatio[target] = videoHeight / videoWidth; float mtx[] = textureCoordMatrix; float tempUVMultRes[] = new float[2]; if (target == VideoPlayback.STONES) { tempUVMultRes = uvMultMat4f( videoQuadTextureCoordsTransformedStones[0], videoQuadTextureCoordsTransformedStones[1], videoQuadTextureCoords[0], videoQuadTextureCoords[1], mtx); videoQuadTextureCoordsTransformedStones[0] = tempUVMultRes[0]; videoQuadTextureCoordsTransformedStones[1] = tempUVMultRes[1]; tempUVMultRes = uvMultMat4f( videoQuadTextureCoordsTransformedStones[2], videoQuadTextureCoordsTransformedStones[3], videoQuadTextureCoords[2], videoQuadTextureCoords[3], mtx); videoQuadTextureCoordsTransformedStones[2] = tempUVMultRes[0]; videoQuadTextureCoordsTransformedStones[3] = tempUVMultRes[1]; tempUVMultRes = uvMultMat4f( videoQuadTextureCoordsTransformedStones[4], videoQuadTextureCoordsTransformedStones[5], videoQuadTextureCoords[4], videoQuadTextureCoords[5], mtx); videoQuadTextureCoordsTransformedStones[4] = tempUVMultRes[0]; videoQuadTextureCoordsTransformedStones[5] = tempUVMultRes[1]; tempUVMultRes = uvMultMat4f( videoQuadTextureCoordsTransformedStones[6], videoQuadTextureCoordsTransformedStones[7], videoQuadTextureCoords[6], videoQuadTextureCoords[7], mtx); videoQuadTextureCoordsTransformedStones[6] = tempUVMultRes[0]; videoQuadTextureCoordsTransformedStones[7] = tempUVMultRes[1]; } else if (target == VideoPlayback.CHIPS) { tempUVMultRes = uvMultMat4f( videoQuadTextureCoordsTransformedChips[0], videoQuadTextureCoordsTransformedChips[1], videoQuadTextureCoords[0], videoQuadTextureCoords[1], mtx); videoQuadTextureCoordsTransformedChips[0] = tempUVMultRes[0]; videoQuadTextureCoordsTransformedChips[1] = tempUVMultRes[1]; tempUVMultRes = uvMultMat4f( videoQuadTextureCoordsTransformedChips[2], videoQuadTextureCoordsTransformedChips[3], videoQuadTextureCoords[2], videoQuadTextureCoords[3], mtx); videoQuadTextureCoordsTransformedChips[2] = tempUVMultRes[0]; videoQuadTextureCoordsTransformedChips[3] = tempUVMultRes[1]; tempUVMultRes = uvMultMat4f( videoQuadTextureCoordsTransformedChips[4], videoQuadTextureCoordsTransformedChips[5], videoQuadTextureCoords[4], videoQuadTextureCoords[5], mtx); videoQuadTextureCoordsTransformedChips[4] = tempUVMultRes[0]; videoQuadTextureCoordsTransformedChips[5] = tempUVMultRes[1]; tempUVMultRes = uvMultMat4f( videoQuadTextureCoordsTransformedChips[6], videoQuadTextureCoordsTransformedChips[7], videoQuadTextureCoords[6], videoQuadTextureCoords[7], mtx); videoQuadTextureCoordsTransformedChips[6] = tempUVMultRes[0]; videoQuadTextureCoordsTransformedChips[7] = tempUVMultRes[1]; } // textureCoordMatrix = mtx; } // Multiply the UV coordinates by the given transformation matrix float[] uvMultMat4f(float transformedU, float transformedV, float u, float v, float[] pMat) { float x = pMat[0] * u + pMat[4] * v /* + pMat[ 8]*0.f */+ pMat[12] * 1.f; float y = pMat[1] * u + pMat[5] * v /* + pMat[ 9]*0.f */+ pMat[13] * 1.f; // float z = pMat[2]*u + pMat[6]*v + pMat[10]*0.f + pMat[14]*1.f; // We // dont need z and w so we comment them out // float w = pMat[3]*u + pMat[7]*v + pMat[11]*0.f + pMat[15]*1.f; float result[] = new float[2]; // transformedU = x; // transformedV = y; result[0] = x; result[1] = y; return result; } void setStatus(int target, int value) { // Transform the value passed from java to our own values switch (value) { case 0: currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.REACHED_END; break; case 1: currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.PAUSED; break; case 2: currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.STOPPED; break; case 3: currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.PLAYING; break; case 4: currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.READY; break; case 5: currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.NOT_READY; break; case 6: currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.ERROR; break; default: currentStatus[target] = VideoPlayerHelper.MEDIA_STATE.NOT_READY; break; } } boolean isTracking(int target) { return isTracking[target]; } public void setTextures(Vector<Texture> textures) { mTextures = textures; } @SuppressLint("InlinedApi") void initRendering() { Log.d(LOGTAG, "VideoPlayback VideoPlaybackRenderer initRendering"); // Define clear color GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f : 1.0f); // Now generate the OpenGL texture objects and add settings for (Texture t : mTextures) { // Here we create the textures for the keyframe // and for all the icons GLES20.glGenTextures(1, t.mTextureID, 0); GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, t.mTextureID[0]); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, t.mWidth, t.mHeight, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, t.mData); } // Now we create the texture for the video data from the movie // IMPORTANT: // Notice that the textures are not typical GL_TEXTURE_2D textures // but instead are GL_TEXTURE_EXTERNAL_OES extension textures // This is required by the Android SurfaceTexture for (int i = 0; i < VideoPlayback.NUM_TARGETS; i++) { GLES20.glGenTextures(1, videoPlaybackTextureID, i); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, videoPlaybackTextureID[i]); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR); GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0); } // The first shader is the one that will display the video data of the // movie // (it is aware of the GL_TEXTURE_EXTERNAL_OES extension) videoPlaybackShaderID = SampleUtils.createProgramFromShaderSrc( VideoPlaybackShaders.VIDEO_PLAYBACK_VERTEX_SHADER, VideoPlaybackShaders.VIDEO_PLAYBACK_FRAGMENT_SHADER); videoPlaybackVertexHandle = GLES20.glGetAttribLocation( videoPlaybackShaderID, "vertexPosition"); videoPlaybackNormalHandle = GLES20.glGetAttribLocation( videoPlaybackShaderID, "vertexNormal"); videoPlaybackTexCoordHandle = GLES20.glGetAttribLocation( videoPlaybackShaderID, "vertexTexCoord"); videoPlaybackMVPMatrixHandle = GLES20.glGetUniformLocation( videoPlaybackShaderID, "modelViewProjectionMatrix"); videoPlaybackTexSamplerOESHandle = GLES20.glGetUniformLocation( videoPlaybackShaderID, "texSamplerOES"); // This is a simpler shader with regular 2D textures keyframeShaderID = SampleUtils.createProgramFromShaderSrc( KeyFrameShaders.KEY_FRAME_VERTEX_SHADER, KeyFrameShaders.KEY_FRAME_FRAGMENT_SHADER); keyframeVertexHandle = GLES20.glGetAttribLocation(keyframeShaderID, "vertexPosition"); keyframeNormalHandle = GLES20.glGetAttribLocation(keyframeShaderID, "vertexNormal"); keyframeTexCoordHandle = GLES20.glGetAttribLocation(keyframeShaderID, "vertexTexCoord"); keyframeMVPMatrixHandle = GLES20.glGetUniformLocation(keyframeShaderID, "modelViewProjectionMatrix"); keyframeTexSampler2DHandle = GLES20.glGetUniformLocation( keyframeShaderID, "texSampler2D"); keyframeQuadAspectRatio[VideoPlayback.STONES] = (float) mTextures .get(0).mHeight / (float) mTextures.get(0).mWidth; keyframeQuadAspectRatio[VideoPlayback.CHIPS] = (float) mTextures.get(1).mHeight / (float) mTextures.get(1).mWidth; quadVertices = fillBuffer(quadVerticesArray); quadTexCoords = fillBuffer(quadTexCoordsArray); quadIndices = fillBuffer(quadIndicesArray); quadNormals = fillBuffer(quadNormalsArray); } private Buffer fillBuffer(double[] array) { // Convert to floats because OpenGL doesnt work on doubles, and manually // casting each input value would take too much time. ByteBuffer bb = ByteBuffer.allocateDirect(4 * array.length); // each // float // takes 4 // bytes bb.order(ByteOrder.LITTLE_ENDIAN); for (double d : array) bb.putFloat((float) d); bb.rewind(); return bb; } private Buffer fillBuffer(short[] array) { ByteBuffer bb = ByteBuffer.allocateDirect(2 * array.length); // each // short // takes 2 // bytes bb.order(ByteOrder.LITTLE_ENDIAN); for (short s : array) bb.putShort(s); bb.rewind(); return bb; } private Buffer fillBuffer(float[] array) { // Convert to floats because OpenGL doesnt work on doubles, and manually // casting each input value would take too much time. ByteBuffer bb = ByteBuffer.allocateDirect(4 * array.length); // each // float // takes 4 // bytes bb.order(ByteOrder.LITTLE_ENDIAN); for (float d : array) bb.putFloat(d); bb.rewind(); return bb; } } /*******************************************************************************************************************/What I can see is only loading icon. Thanks and RegardsShiv Prakash
Replace Teapot with VideoPlayback in UserDefined Target
Hi, did you managed doing this ?
I'm currently trying to do the same thing without success for now ;)
Hey guys, I have done this.
Hey guys, I have done this. Able to play vedio....is anyone still stuck in it then please let me now. I am currently on Vuforia 4