RMI
13-05-2015 11:14:12
Hi,
I finally get the Oculus Rift (Development kit 2, sdk 0.4.4) working with Mogre (1.7.4).
So I put the source code here if someone else want to try.
[note : I'm French so I hope everything will be spelled correctly and readable]
I started from the SDK samples and a sample to use Oculus with slimdx and c# found on https://oculuswrap.codeplex.com/ so there will be some code and comments pasted for there.
First of all there is two way to use the Oculus : "Direct access" or "Extended desktop".
The first way ("Direct access") need to give the Oculus direct access to BackBuffers et SwapChain and I can't get it working with mogre (but you can look at http://www.ogre3d.org/forums/viewtopic.php?f=5&t=76970 on the main ogre forum where it seem they get it work with the last Ogre version).
The second way (Extended desktop) first need to open the "Oculus Configuration Utility" the select "Tools => Rift display mode" and check "Extend Desktop to the HDM".
The Oculus will display as a second 1090*1920 screen in "Portrait" with 90° rotation.
For this way we will need to render 2 camera (one for each eye) on to mesh that will handle the distortion on the Oculus lens then a third camera will render this two mesh side by side on the screen/Oculus
Now you will need a wrapper for the Oculus. I used this one : https://oculuswrap.codeplex.com/
Here start the Mogre part.
To display on the Oculus you'll need two material to render the 2 eyes ("Oculus.material")
In the code you'll need
When creating your render window you'll need to give it the Oculus resolution and to send it on the Oculus second screen
Now before calling the "Root.StartRendering()"
First Initialise the Oculus
then Initialise the mesh and render textures for the eye's camera
Finally initialise your cameras
Now you can call the
And your Oculus should display your scene
If you want the render to follow the Oculus head moves :
You'll need to subscribe to FrameRenderingQueued
I finally get the Oculus Rift (Development kit 2, sdk 0.4.4) working with Mogre (1.7.4).
So I put the source code here if someone else want to try.
[note : I'm French so I hope everything will be spelled correctly and readable]
I started from the SDK samples and a sample to use Oculus with slimdx and c# found on https://oculuswrap.codeplex.com/ so there will be some code and comments pasted for there.
First of all there is two way to use the Oculus : "Direct access" or "Extended desktop".
The first way ("Direct access") need to give the Oculus direct access to BackBuffers et SwapChain and I can't get it working with mogre (but you can look at http://www.ogre3d.org/forums/viewtopic.php?f=5&t=76970 on the main ogre forum where it seem they get it work with the last Ogre version).
The second way (Extended desktop) first need to open the "Oculus Configuration Utility" the select "Tools => Rift display mode" and check "Extend Desktop to the HDM".
The Oculus will display as a second 1090*1920 screen in "Portrait" with 90° rotation.
For this way we will need to render 2 camera (one for each eye) on to mesh that will handle the distortion on the Oculus lens then a third camera will render this two mesh side by side on the screen/Oculus
Now you will need a wrapper for the Oculus. I used this one : https://oculuswrap.codeplex.com/
Here start the Mogre part.
To display on the Oculus you'll need two material to render the 2 eyes ("Oculus.material")
material Oculus/LeftEye
{
receive_shadows off
technique
{
pass Oculus/LeftEye
{
lighting off
texture_unit
{
}
}
}
}
material Oculus/RightEye
{
receive_shadows off
technique
{
pass Oculus/RightEye
{
lighting off
texture_unit
{
}
}
}
}
In the code you'll need
//An acces to the Oculus
private Hmd hmd;
private Wrap oculusWrap;
//To stock Oculus informations
private float fInterpupillaryDistance;
private Quaternion qOculusOrientation;
private Vector3 v3OculusPosition;
//3 camera : 1 for each eye and one final for the Render window / Oculus
private Camera camera;
private Camera camLeft;
private Camera camRight;
//2 RenderTexture for the two "EyeCamera"
private TexturePtr leftEyeTexturePtr;
private TexturePtr rightEyeTexturePtr;
private RenderTexture leftRenderTexture;
private RenderTexture rightRenderTexture;
private MaterialPtr leftEyeMaterialPtr;
private MaterialPtr rightEyeMaterialPtr;
//some scene node to move the camera
private SceneNode snHeadNode, snEyesNode
// and an access to
//the SceneManager
public SceneManager SceneManager;
//the RenderWindow
public RenderWindow RenderWindow;
//the root
public Root Root { get; private set; }
When creating your render window you'll need to give it the Oculus resolution and to send it on the Oculus second screen
Root.Initialise(false, "Main Ogre Window");
NameValuePairList misc = new NameValuePairList();
// Oculus second screen
misc["monitorIndex"] = "1";
misc["border "] = "none";
// Oculus resolution
RenderWindow = Root.CreateRenderWindow("Oculus Rift Liver Visualization", 1080, 1920, true, misc);
Now before calling the "Root.StartRendering()"
First Initialise the Oculus
private void OculusInit()
{
oculusWrap = new Wrap();
// Initialize the Oculus runtime.
oculusWrap.Initialize();
// Use the head mounted display, if it's available, otherwise use the debug HMD.
int numberOfHeadMountedDisplays = oculusWrap.Hmd_Detect();
if (numberOfHeadMountedDisplays > 0)
hmd = oculusWrap.Hmd_Create(0);
else
return;
if (hmd == null)
{
MessageBox.Show("Oculus Rift not detected.", "Uh oh", MessageBoxButtons.OK, MessageBoxIcon.Error);
return;
}
if (hmd.ProductName == string.Empty)
MessageBox.Show("The HMD is not enabled.", "There's a tear in the Rift", MessageBoxButtons.OK, MessageBoxIcon.Error);
if (!hmd.ConfigureTracking(OVR.TrackingCaps.ovrTrackingCap_Orientation | OVR.TrackingCaps.ovrTrackingCap_MagYawCorrection | OVR.TrackingCaps.ovrTrackingCap_Position, OVR.TrackingCaps.None))
{
hmd.Dispose();
oculusWrap.Dispose();
MessageBox.Show("TRACKING INIT ERROR");
}
}
then Initialise the mesh and render textures for the eye's camera
private void OculusDistortionMeshesInit()
{
// Configure Render Textures:
OVR.Sizei recommendedTexLeftSize = hmd.GetFovTextureSize(OVR.EyeType.Left,
hmd.DefaultEyeFov[0], 1.0f);
OVR.Sizei recommendedTexRightSize = hmd.GetFovTextureSize(OVR.EyeType.Right,
hmd.DefaultEyeFov[1], 1.0f);
leftEyeTexturePtr = TextureManager.Singleton.CreateManual("RiftRenderTextureLeft", ResourceGroupManager.DEFAULT_RESOURCE_GROUP_NAME, TextureType.TEX_TYPE_2D, (uint)recommendedTexLeftSize.Width, (uint)recommendedTexLeftSize.Height, 0, PixelFormat.PF_R8G8B8, (int)TextureUsage.TU_RENDERTARGET);
rightEyeTexturePtr = TextureManager.Singleton.CreateManual("RiftRenderTextureRight", ResourceGroupManager.DEFAULT_RESOURCE_GROUP_NAME, TextureType.TEX_TYPE_2D, (uint)recommendedTexRightSize.Width, (uint)recommendedTexRightSize.Height, 0, PixelFormat.PF_R8G8B8, (int)TextureUsage.TU_RENDERTARGET);
// Assign the textures to the eyes used later:
leftEyeMaterialPtr = MaterialManager.Singleton.GetByName("Oculus/LeftEye");
leftEyeMaterialPtr.GetTechnique(0).GetPass(0).GetTextureUnitState(0).SetTextureName("RiftRenderTextureLeft");
rightEyeMaterialPtr = MaterialManager.Singleton.GetByName("Oculus/RightEye");
rightEyeMaterialPtr.GetTechnique(0).GetPass(0).GetTextureUnitState(0).SetTextureName("RiftRenderTextureRight");
OVR.EyeRenderDesc[] eyeRenderDesc = new OVR.EyeRenderDesc[2];
eyeRenderDesc[0] = hmd.GetRenderDesc(OVR.EyeType.Left, hmd.DefaultEyeFov[0]);
eyeRenderDesc[1] = hmd.GetRenderDesc(OVR.EyeType.Right, hmd.DefaultEyeFov[1]);
OVR.Recti[] viewports = new OVR.Recti[2];
viewports[0].Position.x = 0;
viewports[0].Position.y = 0;
viewports[0].Size.Width = recommendedTexLeftSize.Width;
viewports[0].Size.Height = recommendedTexLeftSize.Height;
viewports[1].Position.x = recommendedTexLeftSize.Width;
viewports[1].Position.y = 0;
viewports[1].Size.Width = recommendedTexRightSize.Width;
viewports[1].Size.Height = recommendedTexRightSize.Height;
SceneNode meshNode = SceneManager.RootSceneNode.CreateChildSceneNode();
OVR.DistortionVertex[] vertexDatas;
OVR.Vector2f[] UVScaleOffset;
ushort[] indexDatas;
ManualObject manual;
// Create Left Distortion Meshes
hmd.CreateDistortionMesh(eyeRenderDesc[0].Eye, eyeRenderDesc[0].Fov, hmd.DistortionCaps, out indexDatas, out vertexDatas);
hmd.GetRenderScaleAndOffset(eyeRenderDesc[0].Fov, recommendedTexLeftSize, viewports[0], out UVScaleOffset);
// create ManualObject
// TODO: Destroy the manual objects!!
manual = SceneManager.CreateManualObject("RiftRenderObjectLeft");
manual.Begin("Oculus/LeftEye", RenderOperation.OperationTypes.OT_TRIANGLE_LIST);
for (uint i = 0; i < vertexDatas.Length; i++)
{
OVR.DistortionVertex v = vertexDatas;
manual.Position(v.ScreenPosNDC.X, v.ScreenPosNDC.Y, 0);
manual.TextureCoord(v.TanEyeAnglesR.X * UVScaleOffset[0].X + UVScaleOffset[1].X, v.TanEyeAnglesR.Y * UVScaleOffset[0].Y + UVScaleOffset[1].Y);
}
for (uint i = 0; i < indexDatas.Length; i++)
{
manual.Index(indexDatas);
}
// tell Ogre, your definition has finished
manual.End();
meshNode.AttachObject(manual);
//Create Right Distortion Meshes
hmd.CreateDistortionMesh(eyeRenderDesc[1].Eye, eyeRenderDesc[1].Fov, hmd.DistortionCaps, out indexDatas, out vertexDatas);
hmd.GetRenderScaleAndOffset(eyeRenderDesc[1].Fov, recommendedTexRightSize, viewports[1], out UVScaleOffset);
// create ManualObject
// TODO: Destroy the manual objects!!
manual = SceneManager.CreateManualObject("RiftRenderObjectRight");
manual.Begin("Oculus/RightEye", RenderOperation.OperationTypes.OT_TRIANGLE_LIST);
for (uint i = 0; i < vertexDatas.Length; i++)
{
OVR.DistortionVertex v = vertexDatas;
manual.Position(v.ScreenPosNDC.X, v.ScreenPosNDC.Y, 0);
manual.TextureCoord(v.TanEyeAnglesR.X * UVScaleOffset[0].X + UVScaleOffset[1].X, v.TanEyeAnglesR.Y * UVScaleOffset[0].Y + UVScaleOffset[1].Y);
}
for (uint i = 0; i < indexDatas.Length; i++)
{
manual.Index(indexDatas);
}
// tell Ogre, your definition has finished
manual.End();
meshNode.AttachObject(manual);
meshNode.SetPosition(0, 0, -1);
meshNode.SetScale(1, 1, -1);
}
Finally initialise your cameras
private void SetCameras()
{
//Create a scene nodes which the main cam will be attached to:
SceneNode cameraNode = SceneManager.RootSceneNode.CreateChildSceneNode("Camera");
cameraNode.SetPosition(0, 0, 0);
//create the main camera : (if your scene already have one you can use it instead) so the mesh can be rendered onto it:
camera = SceneManager.CreateCamera("Camera");
camera.ProjectionType = ProjectionType.PT_ORTHOGRAPHIC;
camera.SetOrthoWindow(2, 2);
camera.SetPosition(0, 0, 0);
camera.FarClipDistance = 5000;
camera.NearClipDistance = 0.001f;
//cause the oculus render a 90° rotated screen
camera.Roll(new Radian(-Math.PI / 2));
//attach to the node
cameraNode.AttachObject(camera);
//set the viewport
//if your scene already have one call RenderWindow.RemoveAllViewports() before
Viewport viewport = RenderWindow.AddViewport(camera);
viewport.BackgroundColour = ColourValue.White;
//set the viewport for the oculus now you have one
Viewport oculusViewport = RenderWindow.GetViewport(0);
oculusViewport.BackgroundColour = ColourValue.Black;
oculusViewport.OverlaysEnabled = true;
// Set up IPD in meters:
fInterpupillaryDistance = hmd.GetFloat(OVR.OVR_KEY_IPD, 0.064f);
// Create a scene nodes which the cams will be attached to:
snHeadNode = SceneManager.RootSceneNode.CreateChildSceneNode("HeadNode");
//set the position
snHeadNode.SetPosition(0, 180, 0);
camLeft = SceneManager.CreateCamera("LeftCamera");
camRight = SceneManager.CreateCamera("RightCamera");
snEyesNode = snHeadNode.CreateChildSceneNode("EyesNode");
snEyesNode.AttachObject(camLeft);
snEyesNode.AttachObject(camRight);
// Position cameras according to interpupillary distance
camLeft.SetPosition(-fInterpupillaryDistance / 2.0f, 0.0f, 0.0f);
camRight.SetPosition(fInterpupillaryDistance / 2.0f, 0.0f, 0.0f);
//set the RenderTexture to the eye cam
leftRenderTexture = leftEyeTexturePtr.GetBuffer().GetRenderTarget();
leftRenderTexture.AddViewport(camLeft);
leftRenderTexture.GetViewport(0).SetClearEveryFrame(true);
leftRenderTexture.GetViewport(0).BackgroundColour = RenderWindow.GetViewport(0).BackgroundColour;
leftRenderTexture.GetViewport(0).OverlaysEnabled = true;
rightRenderTexture = rightEyeTexturePtr.GetBuffer().GetRenderTarget();
rightRenderTexture.AddViewport(camRight);
rightRenderTexture.GetViewport(0).SetClearEveryFrame(true);
rightRenderTexture.GetViewport(0).BackgroundColour = RenderWindow.GetViewport(0).BackgroundColour;
rightRenderTexture.GetViewport(0).OverlaysEnabled = true;
OVR.FovPort fovLeft = hmd.DefaultEyeFov[0];
OVR.FovPort fovRight = hmd.DefaultEyeFov[1];
float combinedTanHalfFovHorizontal = System.Math.Max(fovLeft.LeftTan, fovLeft.RightTan);
float combinedTanHalfFovVertical = System.Math.Max(fovLeft.UpTan, fovLeft.DownTan);
float aspectRatio = combinedTanHalfFovHorizontal / combinedTanHalfFovVertical;
camLeft.AspectRatio = aspectRatio;
camRight.AspectRatio = aspectRatio;
OVR.Matrix4f projL = OVR.ovrMatrix4f_Projection(fovLeft, camLeft.NearClipDistance, camLeft.FarClipDistance, 1);
OVR.Matrix4f projR = OVR.ovrMatrix4f_Projection(fovRight, camRight.NearClipDistance, camRight.FarClipDistance, 1);
camLeft.SetCustomProjectionMatrix(true,
new Matrix4(projL.M11, projL.M12, projL.M13, projL.M14,
projL.M21, projL.M22, projL.M23, projL.M24,
projL.M31, projL.M32, projL.M33, projL.M34,
projL.M41, projL.M42, projL.M43, projL.M44));
camRight.SetCustomProjectionMatrix(true,
new Matrix4(projR.M11, projR.M12, projR.M13, projR.M14,
projR.M21, projR.M22, projR.M23, projR.M24,
projR.M31, projR.M32, projR.M33, projR.M34,
projR.M41, projR.M42, projR.M43, projR.M44));
}
Now you can call the
Root.StartRendering();
And your Oculus should display your scene
If you want the render to follow the Oculus head moves :
You'll need to subscribe to FrameRenderingQueued
Root.FrameRenderingQueued += Root_FrameRenderingQueued;
private bool Root_FrameRenderingQueued(FrameEvent _frameEvent)
{
if (hmd != null)
{
if (UpdateOculus(_frameEvent.timeSinceLastFrame))
{
SetOculusPose(qOculusOrientation, v3OculusPosition);
}
}
return true;
}
private bool UpdateOculus(float dt)
{
if (hmd == null) return true;
OVR.FrameTiming frameTiming = hmd.BeginFrameTiming(0);
OVR.TrackingState ts = hmd.GetTrackingState(frameTiming.ScanoutMidpointSeconds);
if (ts.StatusFlags.HasFlag(OVR.StatusBits.ovrStatus_OrientationTracked) || ts.StatusFlags.HasFlag(OVR.StatusBits.ovrStatus_PositionTracked))
{
// The cpp compatibility layer is used to convert ovrPosef to Posef (see OVR_Math.h)
OVR.Posef pose = ts.HeadPose.ThePose;
qOculusOrientation = new Quaternion(pose.Orientation.W, pose.Orientation.X, pose.Orientation.Y, pose.Orientation.Z);
v3OculusPosition = new Vector3(pose.Position.X, pose.Position.Y, pose.Position.Z);
}
OVR.ovr_WaitTillTime(frameTiming.TimewarpPointSeconds);
hmd.ovrHmd_EndFrameTiming();
return true;
}
private void SetOculusPose(Quaternion _qOrientation, Mogre.Vector3 _v3Pos)
{
snEyesNode.Orientation = _qOrientation;
snEyesNode.Position = _v3Pos;
}