Hi all,
I posted this a while back but wanted to include it now that I can actually post code on the forum. Import the VideoPlayback examples with prefabs, then replace VideoPlaybackBehaviour with this one and remove DefaultTrackableEventHandler.cs replacing it with TrackableEventHandlerVideos.cs to your ImageTarget instead.
TrackableEventHandlerVideos.cs
/*============================================================================== Copyright (c) 2012 QUALCOMM Austria Research Center GmbH. All Rights Reserved. Qualcomm Confidential and Proprietary This Vuforia(TM) sample application in source code form ("Sample Code") for the Vuforia Software Development Kit and/or Vuforia Extension for Unity (collectively, the "Vuforia SDK") may in all cases only be used in conjunction with use of the Vuforia SDK, and is subject in all respects to all of the terms and conditions of the Vuforia SDK License Agreement, which may be found at https://ar.qualcomm.at/legal/license. By retaining or using the Sample Code in any manner, you confirm your agreement to all the terms and conditions of the Vuforia SDK License Agreement. If you do not agree to all the terms and conditions of the Vuforia SDK License Agreement, then you may not retain or use any of the Sample Code in any manner. ==============================================================================*/ using UnityEngine; // A custom handler that implements the ITrackableEventHandler interface. public class TrackableEventHandlerVideos : MonoBehaviour, ITrackableEventHandler { #region PRIVATE_MEMBER_VARIABLES private TrackableBehaviour mTrackableBehaviour; private bool mHasBeenFound = false; private bool mLostTracking; private float mSecondsSinceLost; private float distanceToCamera; private float mVideoCurrentPosition; private float mCurrentVolume; private Transform mSphere; #endregion // PRIVATE_MEMBER_VARIABLES #region UNITY_MONOBEHAVIOUR_METHODS void Start() { Transform[] allChildren = GetComponentsInChildren<Transform>(); foreach (Transform child in allChildren) { // do whatever with child transform here if (child.name == "Sphere") mSphere = child; } mTrackableBehaviour = GetComponent<TrackableBehaviour>(); if (mTrackableBehaviour) { mTrackableBehaviour.RegisterTrackableEventHandler(this); } OnTrackingLost(); } void Update() { //for testing audio levels while in editor //distanceToCamera = Vector3.Distance(Camera.main.transform.position, transform.root.position); //Debug.Log(Mathf.Clamp01(1.0f-distanceToCamera*0.01f)); //To spatialize audio: check if component is available, then on update set volume to normalized distance from tracker. if (mHasBeenFound) { if (mSphere) mSphere.Rotate(0.0f, -0.2666f, 0.0f); VideoPlaybackBehaviour video = GetComponentInChildren<VideoPlaybackBehaviour>(); //Debug.Log("Video on "+ transform.root.name +" is "+ video.m_path); if (video != null && video.CurrentState == VideoPlayerHelper.MediaState.PLAYING && mLostTracking == false && mHasBeenFound == true) { distanceToCamera = Vector3.Distance(Camera.main.transform.position, transform.root.position); //Debug.Log(distanceToCamera); mCurrentVolume = 1.0f-(Mathf.Clamp01(distanceToCamera*0.006f)*0.5f); video.VideoPlayer.SetVolume(mCurrentVolume); } //Loop automatically if marker is visible and video has reached the end //comment this out if you want the play button to appear when the video has reached the end if (video != null && video.CurrentState == VideoPlayerHelper.MediaState.REACHED_END && mLostTracking == false && mHasBeenFound == true) { video.VideoPlayer.Play(false, 0); } } // Pause the video if tracking is lost for more than n seconds if (mHasBeenFound && mLostTracking) { VideoPlaybackBehaviour video = GetComponentInChildren<VideoPlaybackBehaviour>(); if (video != null && video.CurrentState == VideoPlayerHelper.MediaState.PLAYING) { //fade out volume from current if marker is lost //Debug.Log(mCurrentVolume - mSecondsSinceLost); video.VideoPlayer.SetVolume(Mathf.Clamp01(mCurrentVolume - mSecondsSinceLost)); } //n.0f is number of seconds before playback stops when marker is lost if (mSecondsSinceLost > 1.0f) { if (video != null && video.CurrentState == VideoPlayerHelper.MediaState.PLAYING) { //get last position so it can resume after video is unloaded and reloaded. mVideoCurrentPosition = video.VideoPlayer.GetCurrentPosition(); video.VideoPlayer.Pause(); if (video.VideoPlayer.Unload()) { Debug.Log ("UnLoaded Video: "+ video.m_path); } } mLostTracking = false; } mSecondsSinceLost += Time.deltaTime; } } #endregion // UNITY_MONOBEHAVIOUR_METHODS #region PUBLIC_METHODS // Implementation of the ITrackableEventHandler function called when the // tracking state changes. public void OnTrackableStateChanged( TrackableBehaviour.Status previousStatus, TrackableBehaviour.Status newStatus) { if (newStatus == TrackableBehaviour.Status.DETECTED || newStatus == TrackableBehaviour.Status.TRACKED) { OnTrackingFound(); } else { OnTrackingLost(); } } #endregion // PUBLIC_METHODS #region PRIVATE_METHODS private void OnTrackingFound() { Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(); Collider[] colliderComponents = GetComponentsInChildren<Collider>(); AudioSource[] audioComponents = GetComponentsInChildren<AudioSource>(); // Enable rendering: foreach (Renderer component in rendererComponents) { component.enabled = true; } // Enable colliders: foreach (Collider component in colliderComponents) { component.enabled = true; } //Play audio: foreach (AudioSource component in audioComponents) { component.audio.Play(); } Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found"); // Optionally play the video automatically when the target is found VideoPlaybackBehaviour video = GetComponentInChildren<VideoPlaybackBehaviour>(); if (video != null) { //load Video on tracking, use local variable to skip to position left off at pause if (video.VideoPlayer.Load(video.m_path, VideoPlayerHelper.MediaType.ON_TEXTURE, true, mVideoCurrentPosition)) { Debug.Log ("Loaded Video: "+ video.m_path); // Play this video on texture where it left off } if (video.VideoPlayer.IsPlayableOnTexture()) { VideoPlayerHelper.MediaState state = video.VideoPlayer.GetStatus(); if (state == VideoPlayerHelper.MediaState.PAUSED || state == VideoPlayerHelper.MediaState.READY || state == VideoPlayerHelper.MediaState.STOPPED) { // Pause other videos before playing this one //PauseOtherVideos(video); video.VideoPlayer.Play(false, video.VideoPlayer.GetCurrentPosition()); } else if (state == VideoPlayerHelper.MediaState.REACHED_END) { // Pause other videos before playing this one // PauseOtherVideos(video); // Play this video from the beginning video.VideoPlayer.Play(false, 0); } } } mHasBeenFound = true; mLostTracking = false; } private void OnTrackingLost() { Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(); Collider[] colliderComponents = GetComponentsInChildren<Collider>(); AudioSource[] audioComponents = GetComponentsInChildren<AudioSource>(); // Disable rendering: foreach (Renderer component in rendererComponents) { component.enabled = false; } // Disable colliders: foreach (Collider component in colliderComponents) { component.enabled = false; } //Pause Audio: foreach (AudioSource component in audioComponents) { component.audio.Pause(); } Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost"); mLostTracking = true; mSecondsSinceLost = 0; } // Pause all videos except this one private void PauseOtherVideos(VideoPlaybackBehaviour currentVideo) { VideoPlaybackBehaviour[] videos = (VideoPlaybackBehaviour[]) FindObjectsOfType(typeof(VideoPlaybackBehaviour)); foreach (VideoPlaybackBehaviour video in videos) { if (video != currentVideo) { if (video.CurrentState == VideoPlayerHelper.MediaState.PLAYING) { video.VideoPlayer.Pause(); } } } } #endregion // PRIVATE_METHODS }
VideoPlaybackBehaviour.cs
/*============================================================================== Copyright (c) 2012 QUALCOMM Austria Research Center GmbH. All Rights Reserved. Qualcomm Confidential and Proprietary This Vuforia(TM) sample application in source code form ("Sample Code") for the Vuforia Software Development Kit and/or Vuforia Extension for Unity (collectively, the "Vuforia SDK") may in all cases only be used in conjunction with use of the Vuforia SDK, and is subject in all respects to all of the terms and conditions of the Vuforia SDK License Agreement, which may be found at https://ar.qualcomm.at/legal/license. By retaining or using the Sample Code in any manner, you confirm your agreement to all the terms and conditions of the Vuforia SDK License Agreement. If you do not agree to all the terms and conditions of the Vuforia SDK License Agreement, then you may not retain or use any of the Sample Code in any manner. ==============================================================================*/ using UnityEngine; using System.Collections; public class VideoPlaybackBehaviour : MonoBehaviour { #region PUBLIC_MEMBER_VARIABLES // URL of the video, either a path to a local file or a remote address public string m_path = null; // Texture for the play icon public Texture m_playTexture = null; // Texture for the busy icon public Texture m_busyTexture = null; // Texture for the error icon public Texture m_errorTexture = null; #endregion // PUBLIC_MEMBER_VARIABLES #region PRIVATE_MEMBER_VARIABLES private VideoPlayerHelper mVideoPlayer = null; private bool mIsInited = false; private bool mIsPrepared = false; private Texture2D mVideoTexture = null; [SerializeField] [HideInInspector] private Texture mKeyframeTexture = null; private VideoPlayerHelper.MediaType mMediaType = VideoPlayerHelper.MediaType.ON_TEXTURE_FULLSCREEN; private VideoPlayerHelper.MediaState mCurrentState = VideoPlayerHelper.MediaState.NOT_READY; private float mSeekPosition = 0.0f; private bool isPlayableOnTexture; private GameObject mIconPlane = null; private bool mIconPlaneActive = false; #endregion // PRIVATE_MEMBER_VARIABLES #region PROPERTIES // Returns the video player public VideoPlayerHelper VideoPlayer { get { return mVideoPlayer; } } // Returns the current playback state public VideoPlayerHelper.MediaState CurrentState { get { return mCurrentState; } } // Type of playback (on-texture only, fullscreen only, or both) public VideoPlayerHelper.MediaType MediaType { get { return mMediaType; } set { mMediaType = value; } } // Texture displayed before video playback begins public Texture KeyframeTexture { get { return mKeyframeTexture; } set { mKeyframeTexture = value; } } #endregion // PROPERTIES #region UNITY_MONOBEHAVIOUR_METHODS void Start() { // A filename or url must be set in the inspector if (m_path == null || m_path.Length == 0) { Debug.Log("Please set a video url in the Inspector"); this.enabled = false; } // Create the video player and set the filename mVideoPlayer = new VideoPlayerHelper(); mVideoPlayer.SetFilename(m_path); // Find the icon plane (child of this object) mIconPlane = transform.Find("Icon").gameObject; // Set the current state to Not Ready HandleStateChange(VideoPlayerHelper.MediaState.NOT_READY); mCurrentState = VideoPlayerHelper.MediaState.NOT_READY; // Flip the plane as the video texture is mirrored on the horizontal transform.localScale = new Vector3(-1 * Mathf.Abs(transform.localScale.x), transform.localScale.y, transform.localScale.z); // Scale the icon ScaleIcon(); } void Update() { if (!mIsInited) { // Initialize the video player if (mVideoPlayer.Init() == false) { Debug.Log("Could not initialize video player"); HandleStateChange(VideoPlayerHelper.MediaState.ERROR); this.enabled = false; return; } // Initialize the video texture InitVideoTexture(); // Load the video /* if (mVideoPlayer.Load(m_path, mMediaType, false, 0) == false) { Debug.Log("ERR1: Could not load video '" + m_path + "' for media type " + mMediaType); HandleStateChange(VideoPlayerHelper.MediaState.ERROR); this.enabled = false; return; } */ // Successfully initialized mIsInited = true; } else if (!mIsPrepared) { // Get the video player status VideoPlayerHelper.MediaState state = mVideoPlayer.GetStatus(); if (state == VideoPlayerHelper.MediaState.ERROR) { Debug.Log("ERR2: Could not load video '" + m_path + "' for media type " + mMediaType); HandleStateChange(VideoPlayerHelper.MediaState.ERROR); this.enabled = false; } else if (state < VideoPlayerHelper.MediaState.NOT_READY) { // Video player is ready // Can we play this video on a texture? isPlayableOnTexture = mVideoPlayer.IsPlayableOnTexture(); if (isPlayableOnTexture) { // Pass the video texture id to the video player int nativeTextureID = mVideoTexture.GetNativeTextureID(); mVideoPlayer.SetVideoTextureID(nativeTextureID); // Get the video width and height int videoWidth = mVideoPlayer.GetVideoWidth(); int videoHeight = mVideoPlayer.GetVideoHeight(); if (videoWidth > 0 && videoHeight > 0) { // Scale the video plane to match the video aspect ratio // float aspect = videoHeight / (float) videoWidth; // Flip the plane as the video texture is mirrored on the horizontal // transform.localScale = new Vector3(-0.1f, 0.1f, 0.1f * aspect); } // Seek ahead if necessary if (mSeekPosition > 0) { mVideoPlayer.SeekTo(mSeekPosition); } } else { // Handle the state change state = mVideoPlayer.GetStatus(); HandleStateChange(state); mCurrentState = state; } // Scale the icon ScaleIcon(); // Video is prepared, ready for playback mIsPrepared = true; } } else { if (isPlayableOnTexture) { // Update the video texture with the latest video frame VideoPlayerHelper.MediaState state = mVideoPlayer.UpdateVideoData(); // Check for playback state change if (state != mCurrentState) { HandleStateChange(state); mCurrentState = state; } } else { // Get the current status VideoPlayerHelper.MediaState state = mVideoPlayer.GetStatus(); // Check for playback state change if (state != mCurrentState) { HandleStateChange(state); mCurrentState = state; } } } CheckIconPlaneVisibility(); } void OnApplicationPause(bool pause) { if (pause) { // Handle pause event natively mVideoPlayer.OnPause(); // Store the playback position for later mSeekPosition = mVideoPlayer.GetCurrentPosition(); // Deinit the video mVideoPlayer.Deinit(); // Reset initialization parameters mIsInited = false; mIsPrepared = false; // Set the current state to Not Ready HandleStateChange(VideoPlayerHelper.MediaState.NOT_READY); mCurrentState = VideoPlayerHelper.MediaState.NOT_READY; } } void OnDestroy() { // Deinit the video mVideoPlayer.Deinit(); } #endregion // UNITY_MONOBEHAVIOUR_METHODS #region PUBLIC_METHODS public void ShowBusyIcon() { mIconPlane.renderer.material.mainTexture = m_busyTexture; } #endregion // PUBLIC_METHODS #region PRIVATE_METHODS // Initialize the video texture private void InitVideoTexture() { // Create texture of size 0 that will be updated in the plugin (we allocate buffers in native code) mVideoTexture = new Texture2D(0, 0, TextureFormat.RGB565, false); mVideoTexture.filterMode = FilterMode.Bilinear; mVideoTexture.wrapMode = TextureWrapMode.Clamp; } // Handle video playback state changes private void HandleStateChange(VideoPlayerHelper.MediaState newState) { // If the movie is playing or paused render the video texture // Otherwise render the keyframe if (newState == VideoPlayerHelper.MediaState.PLAYING || newState == VideoPlayerHelper.MediaState.PAUSED) { renderer.material.mainTexture = mVideoTexture; renderer.material.mainTextureScale = new Vector2(1, 1); } else { if (mKeyframeTexture != null) { renderer.material.mainTexture = mKeyframeTexture; renderer.material.mainTextureScale = new Vector2(1, -1); } } // Display the appropriate icon, or disable if not needed switch (newState) { case VideoPlayerHelper.MediaState.READY: case VideoPlayerHelper.MediaState.REACHED_END: case VideoPlayerHelper.MediaState.PAUSED: case VideoPlayerHelper.MediaState.STOPPED: mIconPlane.renderer.material.mainTexture = m_playTexture; mIconPlaneActive = true; break; case VideoPlayerHelper.MediaState.NOT_READY: case VideoPlayerHelper.MediaState.PLAYING_FULLSCREEN: mIconPlane.renderer.material.mainTexture = m_busyTexture; mIconPlaneActive = true; break; case VideoPlayerHelper.MediaState.ERROR: mIconPlane.renderer.material.mainTexture = m_errorTexture; mIconPlaneActive = true; break; default: mIconPlaneActive = false; break; } if (newState == VideoPlayerHelper.MediaState.PLAYING_FULLSCREEN) { // Switching to full screen, disable QCARBehaviour (only applicable for iOS) QCARBehaviour qcarBehaviour = (QCARBehaviour) FindObjectOfType(typeof(QCARBehaviour)); qcarBehaviour.enabled = false; } else if (mCurrentState == VideoPlayerHelper.MediaState.PLAYING_FULLSCREEN) { // Switching away from full screen, enable QCARBehaviour (only applicable for iOS) QCARBehaviour qcarBehaviour = (QCARBehaviour) FindObjectOfType(typeof(QCARBehaviour)); qcarBehaviour.enabled = true; } } private void ScaleIcon() { // Icon should fill 50% of the narrowest side of the video float videoWidth = Mathf.Abs(transform.localScale.x); float videoHeight = Mathf.Abs(transform.localScale.z); float iconWidth, iconHeight; if (videoWidth > videoHeight) { iconWidth = 0.5f * videoHeight / videoWidth; iconHeight = 0.5f; } else { iconWidth = 0.5f; iconHeight = 0.5f * videoWidth / videoHeight; } mIconPlane.transform.localScale = new Vector3(-iconWidth, 1.0f, iconHeight); } private void CheckIconPlaneVisibility() { // If the video object renderer is currently enabled, we might need to toggle the icon plane visibility if (renderer.enabled) { // Check if the icon plane renderer has to be disabled explicitly in case it was enabled by another script (e.g. TrackableEventHandler) if (mIconPlane.renderer.enabled != mIconPlaneActive) mIconPlane.renderer.enabled = mIconPlaneActive; } } #endregion // PRIVATE_METHODS }
Thanks for sharing this. Happy to hear that the new "Code editor" feature of the Forum has enabled you to post it.