Topic locked

[sharing] VideoPlaybackBehaviour and loading on runtime

February 11, 2013 - 7:40am #1

Hi all, 

 

I posted this a while back but wanted to include it now that I can actually post code on the forum.  Import the VideoPlayback examples with prefabs, then replace VideoPlaybackBehaviour with this one and remove DefaultTrackableEventHandler.cs replacing it with TrackableEventHandlerVideos.cs to your ImageTarget instead.

 

TrackableEventHandlerVideos.cs

 

/*==============================================================================
            Copyright (c) 2012 QUALCOMM Austria Research Center GmbH.
            All Rights Reserved.
            Qualcomm Confidential and Proprietary

This  Vuforia(TM) sample application in source code form ("Sample Code") for the
Vuforia Software Development Kit and/or Vuforia Extension for Unity
(collectively, the "Vuforia SDK") may in all cases only be used in conjunction
with use of the Vuforia SDK, and is subject in all respects to all of the terms
and conditions of the Vuforia SDK License Agreement, which may be found at
https://ar.qualcomm.at/legal/license.

By retaining or using the Sample Code in any manner, you confirm your agreement
to all the terms and conditions of the Vuforia SDK License Agreement.  If you do
not agree to all the terms and conditions of the Vuforia SDK License Agreement,
then you may not retain or use any of the Sample Code in any manner.
==============================================================================*/

using UnityEngine;

// A custom handler that implements the ITrackableEventHandler interface.
public class TrackableEventHandlerVideos : MonoBehaviour,
                                     ITrackableEventHandler
{
    #region PRIVATE_MEMBER_VARIABLES

    private TrackableBehaviour mTrackableBehaviour;

    private bool mHasBeenFound = false;
    private bool mLostTracking;
    private float mSecondsSinceLost;
	private float distanceToCamera;
	
	private float mVideoCurrentPosition;
	private float mCurrentVolume;
	
	private Transform mSphere;
	

    #endregion // PRIVATE_MEMBER_VARIABLES



    #region UNITY_MONOBEHAVIOUR_METHODS

    void Start()
    {
		Transform[] allChildren = GetComponentsInChildren<Transform>();
			foreach (Transform child in allChildren) {
    		// do whatever with child transform here
			if (child.name == "Sphere") mSphere = child;
		}
		
        mTrackableBehaviour = GetComponent<TrackableBehaviour>();
        if (mTrackableBehaviour)
        {
            mTrackableBehaviour.RegisterTrackableEventHandler(this);
        }

        OnTrackingLost();
    }


    void Update()
    {
		//for testing audio levels while in editor		
		//distanceToCamera = Vector3.Distance(Camera.main.transform.position, transform.root.position);
		//Debug.Log(Mathf.Clamp01(1.0f-distanceToCamera*0.01f));
		
		//To spatialize audio: check if component is available, then on update set volume to normalized distance from tracker.
		 if (mHasBeenFound) {
			 if (mSphere)	mSphere.Rotate(0.0f, -0.2666f, 0.0f);
			
			VideoPlaybackBehaviour video = GetComponentInChildren<VideoPlaybackBehaviour>();
			//Debug.Log("Video on "+ transform.root.name +" is "+ video.m_path);
			
			if (video != null &&
                video.CurrentState == VideoPlayerHelper.MediaState.PLAYING &&
				mLostTracking == false &&
				mHasBeenFound == true)
                {
					distanceToCamera = Vector3.Distance(Camera.main.transform.position, transform.root.position);
				//Debug.Log(distanceToCamera);
					mCurrentVolume = 1.0f-(Mathf.Clamp01(distanceToCamera*0.006f)*0.5f);
                    video.VideoPlayer.SetVolume(mCurrentVolume);
                }
			//Loop automatically if marker is visible and video has reached the end
			//comment this out if you want the play button to appear when the video has reached the end	
			if (video != null && 
				video.CurrentState == VideoPlayerHelper.MediaState.REACHED_END && 
				mLostTracking == false && 
				mHasBeenFound == true) 
				{
					video.VideoPlayer.Play(false, 0);
				}		
		}
	
		
        // Pause the video if tracking is lost for more than n seconds
        if (mHasBeenFound && mLostTracking)
        {
			VideoPlaybackBehaviour video = GetComponentInChildren<VideoPlaybackBehaviour>();
			if (video != null &&
                video.CurrentState == VideoPlayerHelper.MediaState.PLAYING)
			{
			//fade out volume from current if marker is lost
				//Debug.Log(mCurrentVolume - mSecondsSinceLost);
				video.VideoPlayer.SetVolume(Mathf.Clamp01(mCurrentVolume - mSecondsSinceLost));
			}
			//n.0f is number of seconds before playback stops when marker is lost
            if (mSecondsSinceLost > 1.0f)
            {	
                if (video != null &&
                    video.CurrentState == VideoPlayerHelper.MediaState.PLAYING)
                {
					//get last position so it can resume after video is unloaded and reloaded.
					mVideoCurrentPosition = video.VideoPlayer.GetCurrentPosition();
                    video.VideoPlayer.Pause();
				
					if (video.VideoPlayer.Unload())
					{
					Debug.Log ("UnLoaded Video: "+ video.m_path);	
					}
					
                }
				
                mLostTracking = false;
            }

            mSecondsSinceLost += Time.deltaTime;
        }
    }

    #endregion // UNITY_MONOBEHAVIOUR_METHODS



    #region PUBLIC_METHODS

    // Implementation of the ITrackableEventHandler function called when the
    // tracking state changes.
    public void OnTrackableStateChanged(
                                    TrackableBehaviour.Status previousStatus,
                                    TrackableBehaviour.Status newStatus)
    {
        if (newStatus == TrackableBehaviour.Status.DETECTED ||
            newStatus == TrackableBehaviour.Status.TRACKED)
        {
            OnTrackingFound();
        }
        else
        {
            OnTrackingLost();
        }
    }

    #endregion // PUBLIC_METHODS



    #region PRIVATE_METHODS


    private void OnTrackingFound()
    {
        Renderer[] rendererComponents = GetComponentsInChildren<Renderer>();
        Collider[] colliderComponents = GetComponentsInChildren<Collider>();
	AudioSource[] audioComponents = GetComponentsInChildren<AudioSource>();

        // Enable rendering:
        foreach (Renderer component in rendererComponents)
        {
            component.enabled = true;
        }

        // Enable colliders:
        foreach (Collider component in colliderComponents)
        {
            component.enabled = true;
        }
	//Play audio:
	foreach (AudioSource component in audioComponents) {
		component.audio.Play();
			
	}

        Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");

        // Optionally play the video automatically when the target is found
      
        VideoPlaybackBehaviour video = GetComponentInChildren<VideoPlaybackBehaviour>();
       
		
		if (video != null)
        {
			
			//load Video on tracking, use local variable to skip to position left off at pause
					if (video.VideoPlayer.Load(video.m_path, VideoPlayerHelper.MediaType.ON_TEXTURE, true, mVideoCurrentPosition))
						
					{
					Debug.Log ("Loaded Video: "+ video.m_path);	
						
						 // Play this video on texture where it left off
                  
					}
					
			
			
			
			
            if (video.VideoPlayer.IsPlayableOnTexture())
            {
                VideoPlayerHelper.MediaState state = video.VideoPlayer.GetStatus();
                if (state == VideoPlayerHelper.MediaState.PAUSED ||
                    state == VideoPlayerHelper.MediaState.READY ||
                    state == VideoPlayerHelper.MediaState.STOPPED)
                {
                    // Pause other videos before playing this one
                    //PauseOtherVideos(video);
					
					
					
					video.VideoPlayer.Play(false, video.VideoPlayer.GetCurrentPosition());
                   
                }
                else if (state == VideoPlayerHelper.MediaState.REACHED_END)
                {
                    // Pause other videos before playing this one
                   // PauseOtherVideos(video);

                    // Play this video from the beginning
                    video.VideoPlayer.Play(false, 0);
                }
            }
        }

        mHasBeenFound = true;
        mLostTracking = false;
    }


    private void OnTrackingLost()
    {
        Renderer[] rendererComponents = GetComponentsInChildren<Renderer>();
        Collider[] colliderComponents = GetComponentsInChildren<Collider>();
	AudioSource[] audioComponents = GetComponentsInChildren<AudioSource>();

        // Disable rendering:
        foreach (Renderer component in rendererComponents)
        {
            component.enabled = false;
        }

        // Disable colliders:
        foreach (Collider component in colliderComponents)
        {
            component.enabled = false;
        }
		
		//Pause Audio:
		foreach (AudioSource component in audioComponents) {
			component.audio.Pause();
		}
		
		
        Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");

        mLostTracking = true;
        mSecondsSinceLost = 0;
    }


    // Pause all videos except this one
    private void PauseOtherVideos(VideoPlaybackBehaviour currentVideo)
    {
        VideoPlaybackBehaviour[] videos = (VideoPlaybackBehaviour[])
                FindObjectsOfType(typeof(VideoPlaybackBehaviour));

        foreach (VideoPlaybackBehaviour video in videos)
        {
            if (video != currentVideo)
            {
                if (video.CurrentState == VideoPlayerHelper.MediaState.PLAYING)
                {
                    video.VideoPlayer.Pause();
                }
            }
        }
    }

    #endregion // PRIVATE_METHODS
}

 

 

 

VideoPlaybackBehaviour.cs

 

/*==============================================================================
            Copyright (c) 2012 QUALCOMM Austria Research Center GmbH.
            All Rights Reserved.
            Qualcomm Confidential and Proprietary

This  Vuforia(TM) sample application in source code form ("Sample Code") for the
Vuforia Software Development Kit and/or Vuforia Extension for Unity
(collectively, the "Vuforia SDK") may in all cases only be used in conjunction
with use of the Vuforia SDK, and is subject in all respects to all of the terms
and conditions of the Vuforia SDK License Agreement, which may be found at
https://ar.qualcomm.at/legal/license.

By retaining or using the Sample Code in any manner, you confirm your agreement
to all the terms and conditions of the Vuforia SDK License Agreement.  If you do
not agree to all the terms and conditions of the Vuforia SDK License Agreement,
then you may not retain or use any of the Sample Code in any manner.
==============================================================================*/

using UnityEngine;
using System.Collections;

public class VideoPlaybackBehaviour : MonoBehaviour
{
    #region PUBLIC_MEMBER_VARIABLES

    // URL of the video, either a path to a local file or a remote address
    public string m_path = null;

    // Texture for the play icon
    public Texture m_playTexture = null;

    // Texture for the busy icon
    public Texture m_busyTexture = null;

    // Texture for the error icon
    public Texture m_errorTexture = null;

    #endregion // PUBLIC_MEMBER_VARIABLES



    #region PRIVATE_MEMBER_VARIABLES

    private VideoPlayerHelper mVideoPlayer = null;
    private bool mIsInited = false;
    private bool mIsPrepared = false;

    private Texture2D mVideoTexture = null;

    [SerializeField]
    [HideInInspector]
    private Texture mKeyframeTexture = null;

    private VideoPlayerHelper.MediaType mMediaType =
            VideoPlayerHelper.MediaType.ON_TEXTURE_FULLSCREEN;

    private VideoPlayerHelper.MediaState mCurrentState =
            VideoPlayerHelper.MediaState.NOT_READY;

    private float mSeekPosition = 0.0f;

    private bool isPlayableOnTexture;

    private GameObject mIconPlane = null;
    private bool mIconPlaneActive = false;

    #endregion // PRIVATE_MEMBER_VARIABLES



    #region PROPERTIES

    // Returns the video player
    public VideoPlayerHelper VideoPlayer
    {
        get { return mVideoPlayer; }
    }

    // Returns the current playback state
    public VideoPlayerHelper.MediaState CurrentState
    {
        get { return mCurrentState; }
    }

    // Type of playback (on-texture only, fullscreen only, or both)
    public VideoPlayerHelper.MediaType MediaType
    {
        get { return mMediaType; }
        set { mMediaType = value; }
    }

    // Texture displayed before video playback begins
    public Texture KeyframeTexture
    {
        get { return mKeyframeTexture; }
        set { mKeyframeTexture = value; }
    }

    #endregion // PROPERTIES



    #region UNITY_MONOBEHAVIOUR_METHODS

	void Start()
    {
        // A filename or url must be set in the inspector
        if (m_path == null || m_path.Length == 0)
        {
            Debug.Log("Please set a video url in the Inspector");
            this.enabled = false;
        }
		
		
        // Create the video player and set the filename
        mVideoPlayer = new VideoPlayerHelper();
        mVideoPlayer.SetFilename(m_path);

        // Find the icon plane (child of this object)
        mIconPlane = transform.Find("Icon").gameObject;

        // Set the current state to Not Ready
        HandleStateChange(VideoPlayerHelper.MediaState.NOT_READY);
        mCurrentState = VideoPlayerHelper.MediaState.NOT_READY;

        // Flip the plane as the video texture is mirrored on the horizontal
        transform.localScale = new Vector3(-1 * Mathf.Abs(transform.localScale.x),
                transform.localScale.y, transform.localScale.z);

        // Scale the icon
        ScaleIcon();
	}


	void Update()
    {
        if (!mIsInited)
        {
            // Initialize the video player
            if (mVideoPlayer.Init() == false)
            {
                Debug.Log("Could not initialize video player");
                HandleStateChange(VideoPlayerHelper.MediaState.ERROR);
                this.enabled = false;
                return;
            }

            // Initialize the video texture
            InitVideoTexture();

            // Load the video
            /*
			if (mVideoPlayer.Load(m_path, mMediaType, false, 0) == false)
            {
                Debug.Log("ERR1: Could not load video '" + m_path + "' for media type " + mMediaType);
                HandleStateChange(VideoPlayerHelper.MediaState.ERROR);
                this.enabled = false;
                return;
            }
            */

            // Successfully initialized
            mIsInited = true;
        }
        else if (!mIsPrepared)
        {
            // Get the video player status
            VideoPlayerHelper.MediaState state = mVideoPlayer.GetStatus();

            if (state == VideoPlayerHelper.MediaState.ERROR)
            {
                Debug.Log("ERR2: Could not load video '" + m_path + "' for media type " + mMediaType);
                HandleStateChange(VideoPlayerHelper.MediaState.ERROR);
                this.enabled = false;
            }
            else if (state < VideoPlayerHelper.MediaState.NOT_READY)
            {
                // Video player is ready

                // Can we play this video on a texture?
                isPlayableOnTexture = mVideoPlayer.IsPlayableOnTexture();

                if (isPlayableOnTexture)
                {
                    // Pass the video texture id to the video player
                    int nativeTextureID = mVideoTexture.GetNativeTextureID();
                    mVideoPlayer.SetVideoTextureID(nativeTextureID);

                    // Get the video width and height
                    int videoWidth = mVideoPlayer.GetVideoWidth();
                    int videoHeight = mVideoPlayer.GetVideoHeight();

                    if (videoWidth > 0 && videoHeight > 0)
                    {
                        // Scale the video plane to match the video aspect ratio
                      //  float aspect = videoHeight / (float) videoWidth;

                        // Flip the plane as the video texture is mirrored on the horizontal
                       // transform.localScale = new Vector3(-0.1f, 0.1f, 0.1f * aspect);
                    }

                    // Seek ahead if necessary
                    if (mSeekPosition > 0)
                    {
                        mVideoPlayer.SeekTo(mSeekPosition);
                    }
                }
                else
                {
                    // Handle the state change
                    state = mVideoPlayer.GetStatus();
                    HandleStateChange(state);
                    mCurrentState = state;
                }

                // Scale the icon
                ScaleIcon();

                // Video is prepared, ready for playback
                mIsPrepared = true;
            }
        }
        else
        {
            if (isPlayableOnTexture)
            {
                // Update the video texture with the latest video frame
                VideoPlayerHelper.MediaState state = mVideoPlayer.UpdateVideoData();

                // Check for playback state change
                if (state != mCurrentState)
                {
                    HandleStateChange(state);
                    mCurrentState = state;
                }
            }
            else
            {
                // Get the current status
                VideoPlayerHelper.MediaState state = mVideoPlayer.GetStatus();
                
                // Check for playback state change
                if (state != mCurrentState)
                {
                    HandleStateChange(state);
                    mCurrentState = state;
                }
            }
        }

	    CheckIconPlaneVisibility();
    }


    void OnApplicationPause(bool pause)
    {
        if (pause)
        {
            // Handle pause event natively
            mVideoPlayer.OnPause();

            // Store the playback position for later
            mSeekPosition = mVideoPlayer.GetCurrentPosition();

            // Deinit the video
            mVideoPlayer.Deinit();

            // Reset initialization parameters
            mIsInited = false;
            mIsPrepared = false;

            // Set the current state to Not Ready
            HandleStateChange(VideoPlayerHelper.MediaState.NOT_READY);
            mCurrentState = VideoPlayerHelper.MediaState.NOT_READY;
        }
    }


    void OnDestroy()
    {
        // Deinit the video
        mVideoPlayer.Deinit();
    }

    #endregion // UNITY_MONOBEHAVIOUR_METHODS



    #region PUBLIC_METHODS

    public void ShowBusyIcon()
    {
        mIconPlane.renderer.material.mainTexture = m_busyTexture;
    }

    #endregion // PUBLIC_METHODS



    #region PRIVATE_METHODS

    // Initialize the video texture
    private void InitVideoTexture()
    {
        // Create texture of size 0 that will be updated in the plugin (we allocate buffers in native code)
        mVideoTexture = new Texture2D(0, 0, TextureFormat.RGB565, false);
        mVideoTexture.filterMode = FilterMode.Bilinear;
        mVideoTexture.wrapMode = TextureWrapMode.Clamp;
    }


    // Handle video playback state changes
    private void HandleStateChange(VideoPlayerHelper.MediaState newState)
    {
        // If the movie is playing or paused render the video texture
        // Otherwise render the keyframe
        if (newState == VideoPlayerHelper.MediaState.PLAYING ||
            newState == VideoPlayerHelper.MediaState.PAUSED)
        {
            renderer.material.mainTexture = mVideoTexture;
            renderer.material.mainTextureScale = new Vector2(1, 1);
        }
        else
        {
            if (mKeyframeTexture != null)
            {
                renderer.material.mainTexture = mKeyframeTexture;
                renderer.material.mainTextureScale = new Vector2(1, -1);
            }
        }

        // Display the appropriate icon, or disable if not needed
        switch (newState)
        {
            case VideoPlayerHelper.MediaState.READY:
            case VideoPlayerHelper.MediaState.REACHED_END:
            case VideoPlayerHelper.MediaState.PAUSED:
            case VideoPlayerHelper.MediaState.STOPPED:
                mIconPlane.renderer.material.mainTexture = m_playTexture;
                mIconPlaneActive = true;
                break;

            case VideoPlayerHelper.MediaState.NOT_READY:
            case VideoPlayerHelper.MediaState.PLAYING_FULLSCREEN:
                mIconPlane.renderer.material.mainTexture = m_busyTexture;
                mIconPlaneActive = true;
                break;

            case VideoPlayerHelper.MediaState.ERROR:
                mIconPlane.renderer.material.mainTexture = m_errorTexture;
                mIconPlaneActive = true;
                break;

            default:
                mIconPlaneActive = false;
                break;
        }

        if (newState == VideoPlayerHelper.MediaState.PLAYING_FULLSCREEN)
        {
            // Switching to full screen, disable QCARBehaviour (only applicable for iOS)
            QCARBehaviour qcarBehaviour = (QCARBehaviour) FindObjectOfType(typeof(QCARBehaviour));
            qcarBehaviour.enabled = false;
        }
        else if (mCurrentState == VideoPlayerHelper.MediaState.PLAYING_FULLSCREEN)
        {
            // Switching away from full screen, enable QCARBehaviour (only applicable for iOS)
            QCARBehaviour qcarBehaviour = (QCARBehaviour) FindObjectOfType(typeof(QCARBehaviour));
            qcarBehaviour.enabled = true;
        }
    }


    private void ScaleIcon()
    {
        // Icon should fill 50% of the narrowest side of the video

        float videoWidth = Mathf.Abs(transform.localScale.x);
        float videoHeight = Mathf.Abs(transform.localScale.z);
        float iconWidth, iconHeight;

        if (videoWidth > videoHeight)
        {
            iconWidth = 0.5f * videoHeight / videoWidth;
            iconHeight = 0.5f;
        }
        else
        {
            iconWidth = 0.5f;
            iconHeight = 0.5f * videoWidth / videoHeight;
        }

        mIconPlane.transform.localScale = new Vector3(-iconWidth, 1.0f, iconHeight);
    }


    private void CheckIconPlaneVisibility()
    {
        // If the video object renderer is currently enabled, we might need to toggle the icon plane visibility
        if (renderer.enabled)
        {
            // Check if the icon plane renderer has to be disabled explicitly in case it was enabled by another script (e.g. TrackableEventHandler)
            if (mIconPlane.renderer.enabled != mIconPlaneActive)
                mIconPlane.renderer.enabled = mIconPlaneActive;
        }
    }

    #endregion // PRIVATE_METHODS
}

 

[sharing] VideoPlaybackBehaviour and loading on runtime

September 4, 2014 - 7:42am #22

You're welcome ;-)

[sharing] VideoPlaybackBehaviour and loading on runtime

September 4, 2014 - 7:16am #21

Works perfect!  Thanks for pointing me in right direction, I was using the other AR package and did not realise there was a separate video one too ;-)

 

[sharing] VideoPlaybackBehaviour and loading on runtime

September 4, 2014 - 6:50am #20

:-) thank you :-)

[sharing] VideoPlaybackBehaviour and loading on runtime

September 4, 2014 - 6:47am #19

[sharing] VideoPlaybackBehaviour and loading on runtime

September 4, 2014 - 6:44am #18

OK, I must be dumb... :-(

 

The original says:

"Import the VideoPlayback examples with prefabs, then replace VideoPlaybackBehaviour with this one and remove DefaultTrackableEventHandler.cs replacing it with TrackableEventHandlerVideos.cs to your ImageTarget instead"

I do not see the VideoPlayBack examples (I have the imported from Vuforia the 3.09 Unity Extention but this is not present?)

 

???

 

Thanks

[sharing] VideoPlaybackBehaviour and loading on runtime

September 4, 2014 - 6:26am #17

That part is the same in both (these and the originals).  I haven't updated these scripts for Vuforia 3.0.

[sharing] VideoPlaybackBehaviour and loading on runtime

September 4, 2014 - 6:16am #16

Hi, thanks for the speedy response ;-)

When you say: "use the VideoPlayback behaviour example script as a guide." do you mean the version in this thread?  Do I have to use all the other .cs files included as well?

 

Thanks

 

 

[sharing] VideoPlaybackBehaviour and loading on runtime

September 4, 2014 - 6:04am #15

Hi there,

My knowledge might be out of date but video textures that run as part of the VideoPlayback Behaviour components don't work with the standard video textures that Vuforia recognizes (.mp4, .mov).  So they might work in play mode but they won't appear on your video prefab. Instead, you have to encode as .m4v (use handbrake to build for iPhone/iPod) and use the VideoPlayback behaviour example script as a guide.  The important parts are:

1. Keep the video in your Streaming Assets folder.  It will not be a recognized Unity file.

2. Make sure the playback script has the correct path to your video including the .m4v extension (even though this extension doesn't appear in Unity)

3. It won't work in Play mode but it should give you a "X".  If it displays the hour glass icon in play mode it's probably not working correctly.

I haven't built something using it from scratch in a few verisons now. 

[sharing] VideoPlaybackBehaviour and loading on runtime

September 4, 2014 - 5:58am #14

Hi,  sorry for asking the obvious to some of you but....

 

I have an app that uses Vuforia extension for Unity and have built apps that work perfect when using Unity's build to export to different platforms, desktop and mobile.

However I have a problem that I have found that whereas on desktop the Vuforia allows me to play a movie in a model TV when the trigger image is detected it will not as many before me have found allow for playing video on textures on mobiles.

So my questions are this problem, is it a Vuforia one or Unity and does the code suppled in this thread apply to the Vuforia extension for Unity to allow movies to be played on textures on mobile devices?

 

Thanks

[sharing] VideoPlaybackBehaviour and loading on runtime

August 19, 2013 - 6:25am #13

What I mean by the splash screen is the Unity original splash screen.

I suspect that before loading into the AR scene, either inside Unity original splash screen or another scene before the AR scene, somehow the TrackableEventHandlerVideos.cs for image tracking is start loading.

I tried with Unity original splash scene -> scene 1 -> AR scene, also gave the same results. If I changed back to original code (VideoPlaybackBehaviour + TrackableEventHandler) provided by Vuforia, it is fine.

[sharing] VideoPlaybackBehaviour and loading on runtime

August 19, 2013 - 5:30am #12

You're saying it happens *during* the splash screen?  Sounds like that's an issue of the components not having been added yet before the script is looking for it.  I haven't noticed this issue before but I believe it's possible to figure out the issue one of two ways:

1. Change the "script execution order" explicitly, something might be loadin in the wrong order.

2.  Just be sure to explicitly test ifgetcomponentinchildren is returned null. 

[sharing] VideoPlaybackBehaviour and loading on runtime

August 19, 2013 - 1:01am #11

Hi,

According to your code, it is working. However I found another problem regarding the code when using on Android devices.

Testing method:

Open app -> during the splash screen, point to the image target -> app will close automatically.

void Start(){

xxxx

xxx

video = GetComponentInChildren<VideoPlaybackBehaviour> (); <- This point is causing that errors.

xxx

}

I try to move that code to tracking found but still the same problem.

[sharing] VideoPlaybackBehaviour and loading on runtime

July 31, 2013 - 3:44am #10

You might like to try the latest version of Vuforia as the Unity VP sample has an autoplay checkbox in the inspector for the video child of the image target.

Not sure why you are getting the Busy icon.

What device is this?  Have you tried with any others?

 

N

[sharing] VideoPlaybackBehaviour and loading on runtime

July 29, 2013 - 6:27pm #9

cap10subtext wrote:

Hi Sorry for not replying sooner.  It's totally possible, you just have to compare the original Videoplayback example (specifically VideoPlaybackBehaviour, VideoPlaybackController and the custom TrackableEventHandlerVideos.cs) and you should see in the original where it says "comment this out to play back videos automatically" or something like that.  You should be able to just restore those few things (like pausing all other videos) so that it works like the original.

 

Hi, thank you for sharing this...it almost saved my life :)

I can't manage to play the video only when i tap on it...it starts in automatic and if i comment out the "play video on tracking found" i got the "busy" icon and it plays only fullscreen...

 

Any idea?

 

Cheers

[sharing] VideoPlaybackBehaviour and loading on runtime

May 23, 2013 - 5:32am #8

Hello, thank you very much to share this code, it really works. I've tried in Ipad mini IOS 6.1 and Android 4.1.2 (Samsung Galaxy SIII) and works perfect.

Thank you very much.

[sharing] VideoPlaybackBehaviour and loading on runtime

March 15, 2013 - 1:44am #7

Hey Cap10

Thanks for post. I seemed to reach a limit of 15 videos and when I add a 16th video my app doesn't start on androind HTC one V. You think this Is this a hardware limitation problem? I don't understand if the videos are loading when tracking is found how is it exceeding the memory if the memory is used as tracking is detected and loaded?

[sharing] VideoPlaybackBehaviour and loading on runtime

March 14, 2013 - 7:14am #6

Hi Sorry for not replying sooner.  It's totally possible, you just have to compare the original Videoplayback example (specifically VideoPlaybackBehaviour, VideoPlaybackController and the custom TrackableEventHandlerVideos.cs) and you should see in the original where it says "comment this out to play back videos automatically" or something like that.  You should be able to just restore those few things (like pausing all other videos) so that it works like the original.

[sharing] VideoPlaybackBehaviour and loading on runtime

March 13, 2013 - 6:46am #5

Hi,

First of all thanks for your code, it is really great.

How you can modify this to do not play the video automatically?

 

I wish to play the video after press the play button.
 
 
Cheers!
 
Thank you very much.

[sharing] VideoPlaybackBehaviour and loading on runtime

March 6, 2013 - 11:22pm #4

Nice. thanks for the update.

[sharing] VideoPlaybackBehaviour and loading on runtime

March 6, 2013 - 4:57pm #3

Just posting an updated version of the TrackableEventHandlerVideos.  I was noticing a glitch or two in which a video would not turn off correctly.

 


/*==============================================================================
            Copyright (c) 2012 QUALCOMM Austria Research Center GmbH.
            All Rights Reserved.
            Qualcomm Confidential and Proprietary

This  Vuforia(TM) sample application in source code form ("Sample Code") for the
Vuforia Software Development Kit and/or Vuforia Extension for Unity
(collectively, the "Vuforia SDK") may in all cases only be used in conjunction
with use of the Vuforia SDK, and is subject in all respects to all of the terms
and conditions of the Vuforia SDK License Agreement, which may be found at
https://ar.qualcomm.at/legal/license.

By retaining or using the Sample Code in any manner, you confirm your agreement
to all the terms and conditions of the Vuforia SDK License Agreement.  If you do
not agree to all the terms and conditions of the Vuforia SDK License Agreement,
then you may not retain or use any of the Sample Code in any manner.
==============================================================================*/

using UnityEngine;

// A custom handler that implements the ITrackableEventHandler interface.
public class TrackableEventHandlerVideos : MonoBehaviour,
                                     ITrackableEventHandler
{
    #region PRIVATE_MEMBER_VARIABLES

    private TrackableBehaviour mTrackableBehaviour;
private VideoPlaybackBehaviour video;

    private bool mHasBeenFound = false;
    private bool mLostTracking;
private bool videoFinished;
    private float mSecondsSinceLost;
private float distanceToCamera;

private float mVideoCurrentPosition;
private float mCurrentVolume;

private Transform mMyModel;


    #endregion // PRIVATE_MEMBER_VARIABLES



    #region UNITY_MONOBEHAVIOUR_METHODS

    void Start()
    {
/*for custom animations on update
Transform[] allChildren = GetComponentsInChildren<Transform>();
foreach (Transform child in allChildren) {
     // do whatever with child transform here
if (child.name == "MyModel") mMyModel = child;
}
*/

        mTrackableBehaviour = GetComponent<TrackableBehaviour>();
        if (mTrackableBehaviour)
        {
            mTrackableBehaviour.RegisterTrackableEventHandler(this);
        }

video = GetComponentInChildren<VideoPlaybackBehaviour>();

        OnTrackingLost();
    }


    void Update()
    {

if (video == null) return;

//for testing audio levels while in editor 
//distanceToCamera = Vector3.Distance(Camera.main.transform.position, transform.root.position);
//Debug.Log(distanceToCamera);
//Debug.Log(1.0f-(Mathf.Clamp01(distanceToCamera*0.0005f)*0.5f));

//To spatialize audio: check if component is available, then on update set volume to normalized distance from tracker.

if (!mLostTracking && mHasBeenFound) {

/*
//whatever custom animation is performed per update frame if tracker is found
if (mMyModel)
{
mMyModel.Rotate(0.0f, -0.2666f, 0.0f);
}
*/
//if video is playing, get distance to camera.
if (video.CurrentState == VideoPlayerHelper.MediaState.PLAYING) {
//Debug.Log("Video on "+ transform.root.name +" is "+ video.m_path);
distanceToCamera = Vector3.Distance(Camera.main.transform.position, transform.root.position);
//Debug.Log(distanceToCamera);
mCurrentVolume = 1.0f-(Mathf.Clamp01(distanceToCamera*0.0005f)*0.5f);
                video.VideoPlayer.SetVolume(mCurrentVolume);

} else if (video.CurrentState == VideoPlayerHelper.MediaState.REACHED_END) {

//Loop automatically if marker is visible and video has reached the end
//comment this out if you want the play button to appear when the video has reached the end 

Debug.Log("Video Has ended, playing again");
video.VideoPlayer.Play(false, 0);
}


}


        // Pause the video if tracking is lost for more than n seconds
        if (mHasBeenFound && mLostTracking && !videoFinished)
        {
if (video.CurrentState == VideoPlayerHelper.MediaState.PLAYING)
{
//fade out volume from current if marker is lost
//Debug.Log(mCurrentVolume - mSecondsSinceLost);
video.VideoPlayer.SetVolume(Mathf.Clamp01(mCurrentVolume - mSecondsSinceLost));
}

//n.0f is number of seconds before playback stops when marker is lost
            if (mSecondsSinceLost > 1.0f)
            { 
                if (video.CurrentState == VideoPlayerHelper.MediaState.PLAYING)
                {
//get last position so it can resume after video is unloaded and reloaded.
mVideoCurrentPosition = video.VideoPlayer.GetCurrentPosition();
                    video.VideoPlayer.Pause();

if (video.VideoPlayer.Unload())
{
Debug.Log ("UnLoaded Video: "+ video.m_path); 
videoFinished = true;
}

                }
            }

            mSecondsSinceLost += Time.deltaTime;
        }
    }

    #endregion // UNITY_MONOBEHAVIOUR_METHODS



    #region PUBLIC_METHODS

    // Implementation of the ITrackableEventHandler function called when the
    // tracking state changes.
    public void OnTrackableStateChanged(
                                    TrackableBehaviour.Status previousStatus,
                                    TrackableBehaviour.Status newStatus)
    {
        if (newStatus == TrackableBehaviour.Status.DETECTED ||
            newStatus == TrackableBehaviour.Status.TRACKED)
        {
            OnTrackingFound();
        }
        else
        {
            OnTrackingLost();
        }
    }

    #endregion // PUBLIC_METHODS



    #region PRIVATE_METHODS


    private void OnTrackingFound()
    {
        Renderer[] rendererComponents = GetComponentsInChildren<Renderer>();
        Collider[] colliderComponents = GetComponentsInChildren<Collider>();
AudioSource[] audioComponents = GetComponentsInChildren<AudioSource>();

        // Enable rendering:
        foreach (Renderer component in rendererComponents)
        {
            component.enabled = true;
        }

        // Enable colliders:
        foreach (Collider component in colliderComponents)
        {
            component.enabled = true;
        }
//Play audio:
foreach (AudioSource component in audioComponents) {
component.audio.Play();

}

        Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");

        // Optionally play the video automatically when the target is found
      
if (video != null)
        {

//load Video on tracking, use local variable to skip to position left off at pause
if (video.VideoPlayer.Load(video.m_path, VideoPlayerHelper.MediaType.ON_TEXTURE, true, mVideoCurrentPosition))

{
Debug.Log ("Loaded Video: "+ video.m_path); 

// Play this video on texture where it left off
                  
}

            if (video.VideoPlayer.IsPlayableOnTexture())
            {
                VideoPlayerHelper.MediaState state = video.VideoPlayer.GetStatus();
                if (state == VideoPlayerHelper.MediaState.PAUSED ||
                    state == VideoPlayerHelper.MediaState.READY ||
                    state == VideoPlayerHelper.MediaState.STOPPED)
                {
video.VideoPlayer.Play(false, video.VideoPlayer.GetCurrentPosition());
                   
                }
                else if (state == VideoPlayerHelper.MediaState.REACHED_END)
                {
                    // Play this video from the beginning
                    video.VideoPlayer.Play(false, 0);
                }
            }
        }

        mHasBeenFound = true;
        mLostTracking = false;

    }


    private void OnTrackingLost()
    {
        Renderer[] rendererComponents = GetComponentsInChildren<Renderer>();
        Collider[] colliderComponents = GetComponentsInChildren<Collider>();
AudioSource[] audioComponents = GetComponentsInChildren<AudioSource>();

        // Disable rendering:
        foreach (Renderer component in rendererComponents)
        {
            component.enabled = false;
        }

        // Disable colliders:
        foreach (Collider component in colliderComponents)
        {
            component.enabled = false;
        }

//Pause Audio:
foreach (AudioSource component in audioComponents) {
component.audio.Pause();
}


        Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");

        mLostTracking = true;
        mSecondsSinceLost = 0;
videoFinished = false;
    }

    #endregion // PRIVATE_METHODS
}

 

[sharing] VideoPlaybackBehaviour and loading on runtime

February 11, 2013 - 9:13am #2

Thanks for sharing this. Happy to hear that the new "Code editor" feature of the Forum has enabled you to post it.

Topic locked