02-22-2016 11:31 AM
02-22-2016 09:47 PM
03-09-2016 05:24 AM
/// <summary>
/// Invoked by OVRCameraRig's UpdatedAnchors callback. Allows the Hmd rotation to update the facing direction of the player.
/// </summary>
public void UpdateTransform(OVRCameraRig rig)
{
Transform root = CameraRig.trackingSpace;
Transform centerEye = CameraRig.centerEyeAnchor;
Vector3 prevPos = root.position;
Quaternion prevRot = root.rotation;
// Original
transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);
root.position = prevPos;
root.rotation = prevRot;
}
delta = deltaRotationBetween(root, centerEye)
centerEye.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y - delta, 0.0f);
03-11-2016 10:16 AM
"xflagx" wrote:Yes, Unity internally sets the local position, rotation, and FOV of the Camera to match tracking data and TimeWarp expectations just before Update gets called. This is done to ensure the right relationship between head motion and optical flow, without which users may get severely uncomfortable. In your code snippet, I don't see you registering your UpdatedTransform method with the OVRCameraRig.UpdatedAnchors event. Try something like this:
It seems that the camera gets updated with some kind of magic behind the scenes.
using UnityEngine;
using System.Collections;
public class TrackingModeChanger : MonoBehaviour {
void Awake()
{
foreach (var rig in GameObject.FindObjectsOfType<OVRCameraRig>())
rig.UpdatedAnchors += OnUpdatedAnchors;
}
void Update ()
{
OVRManager.tracker.isEnabled = !Input.GetKey(KeyCode.P);
}
void OnUpdatedAnchors(OVRCameraRig rig)
{
OVRPose pose = rig.centerEyeAnchor.ToOVRPose(true).Inverse();
//FIXME: concatenate your desired pose onto "pose".
rig.trackingSpace.FromOVRPose(pose, true);
}
}
03-16-2016 05:14 AM
private void Rig_UpdatedAnchors(OVRCameraRig obj)
{
OVRPose pose = rig.centerEyeAnchor.ToOVRPose(true).Inverse();
//FIXME: concatenate your desired pose onto "pose".
// The following works
pose.orientation = Quaternion.Inverse(Quaternion.Euler(1, realWorldUpperBody.rotation.eulerAngles.y, 1) * pose.orientation) * pose.orientation;
rig.trackingSpace.FromOVRPose(pose, true);
}
If you don't, there will be a slight wobble due to TimeWarp.
10-19-2016 08:48 PM
10-19-2016 10:21 PM
using UnityEngine;
using System.Collections;
public class FakeTracking : MonoBehaviour
{
public OVRPose centerEyePose = OVRPose.identity;
public OVRPose leftEyePose = OVRPose.identity;
public OVRPose rightEyePose = OVRPose.identity;
public OVRPose leftHandPose = OVRPose.identity;
public OVRPose rightHandPose = OVRPose.identity;
public OVRPose trackerPose = OVRPose.identity;
void Awake()
{
OVRCameraRig rig = GameObject.FindObjectOfType<OVRCameraRig>();
if (rig != null)
rig.UpdatedAnchors += OnUpdatedAnchors;
}
void OnUpdatedAnchors(OVRCameraRig rig)
{
if (!enabled)
return;
//This doesn't work because VR camera poses are read-only.
//rig.centerEyeAnchor.FromOVRPose(OVRPose.identity);
//Instead, invert out the current pose and multiply in the desired pose.
OVRPose pose = rig.centerEyeAnchor.ToOVRPose(true).Inverse();
pose = centerEyePose * pose;
rig.trackingSpace.FromOVRPose(pose, true);
//OVRPose referenceFrame = pose.Inverse();
//The rest of the nodes are updated by OVRCameraRig, not Unity, so they're easy.
rig.leftEyeAnchor.FromOVRPose(leftEyePose);
rig.rightEyeAnchor.FromOVRPose(rightEyePose);
rig.leftHandAnchor.FromOVRPose(leftHandPose);
rig.rightHandAnchor.FromOVRPose(rightHandPose);
rig.trackerAnchor.FromOVRPose(trackerPose);
}
}
10-21-2016 09:09 AM
using UnityEngine;
using System.Collections;
public class FakeTracking : MonoBehaviour
{
public OVRPose centerEyePose = OVRPose.identity;
public OVRPose leftEyePose = OVRPose.identity;
public OVRPose rightEyePose = OVRPose.identity;
public OVRPose leftHandPose = OVRPose.identity;
public OVRPose rightHandPose = OVRPose.identity;
public OVRPose trackerPose = OVRPose.identity;
void Awake()
{
OVRCameraRig rig = GameObject.FindObjectOfType<OVRCameraRig>();
if (rig != null)
rig.UpdatedAnchors += OnUpdatedAnchors;
// print("in awake");
}
void OnUpdatedAnchors(OVRCameraRig rig)
{
if (!enabled)
return;
//This doesn't work because VR camera poses are read-only.
//rig.centerEyeAnchor.FromOVRPose(OVRPose.identity);
//Instead, invert out the current pose and multiply in the desired pose.
OVRPose pose = rig.centerEyeAnchor.ToOVRPose(true).Inverse();
OVRPose correctedCenterEyePose;
correctedCenterEyePose.position = rig.centerEyeAnchor.ToOVRPose(true).position;
correctedCenterEyePose.orientation = rig.centerEyeAnchor.ToOVRPose(true).orientation;
correctedCenterEyePose.orientation *= Quaternion.Euler(-38.25f, 0.0f, 0.0f);
// pose = centerEyePose * pose;
pose = correctedCenterEyePose * pose;
rig.trackingSpace.FromOVRPose(pose, true);
//OVRPose referenceFrame = pose.Inverse();
//The rest of the nodes are updated by OVRCameraRig, not Unity, so they're easy.
rig.leftEyeAnchor.FromOVRPose(leftEyePose);
rig.rightEyeAnchor.FromOVRPose(rightEyePose);
rig.leftHandAnchor.FromOVRPose(leftHandPose);
rig.rightHandAnchor.FromOVRPose(rightHandPose);
rig.trackerAnchor.FromOVRPose(trackerPose);
}
}
10-21-2016 01:29 PM
10-21-2016 06:38 PM